libstdc++: Add missing extra space to ChangeLog author lines
[official-gcc.git] / gcc / ipa-polymorphic-call.c
blob65516b6d7b2342c1ff577791519ce592bc07c067
1 /* Analysis of polymorphic call context.
2 Copyright (C) 2013-2020 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "tree-ssa-operands.h"
30 #include "streamer-hooks.h"
31 #include "cgraph.h"
32 #include "data-streamer.h"
33 #include "diagnostic.h"
34 #include "alias.h"
35 #include "fold-const.h"
36 #include "calls.h"
37 #include "ipa-utils.h"
38 #include "tree-dfa.h"
39 #include "gimple-pretty-print.h"
40 #include "tree-into-ssa.h"
42 /* Return true when TYPE contains an polymorphic type and thus is interesting
43 for devirtualization machinery. */
45 static bool contains_type_p (tree, HOST_WIDE_INT, tree,
46 bool consider_placement_new = true,
47 bool consider_bases = true);
49 bool
50 contains_polymorphic_type_p (const_tree type)
52 type = TYPE_MAIN_VARIANT (type);
54 if (RECORD_OR_UNION_TYPE_P (type))
56 if (TYPE_BINFO (type)
57 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
58 return true;
59 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
60 if (TREE_CODE (fld) == FIELD_DECL
61 && !DECL_ARTIFICIAL (fld)
62 && contains_polymorphic_type_p (TREE_TYPE (fld)))
63 return true;
64 return false;
66 if (TREE_CODE (type) == ARRAY_TYPE)
67 return contains_polymorphic_type_p (TREE_TYPE (type));
68 return false;
71 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
72 at position CUR_OFFSET within TYPE.
74 POD can be changed to an instance of a polymorphic type by
75 placement new. Here we play safe and assume that any
76 non-polymorphic type is POD. */
77 bool
78 possible_placement_new (tree type, tree expected_type,
79 HOST_WIDE_INT cur_offset)
81 if (cur_offset < 0)
82 return true;
83 return ((TREE_CODE (type) != RECORD_TYPE
84 || !TYPE_BINFO (type)
85 || cur_offset >= POINTER_SIZE
86 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
87 && (!TYPE_SIZE (type)
88 || !tree_fits_shwi_p (TYPE_SIZE (type))
89 || (cur_offset
90 + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
91 : POINTER_SIZE)
92 <= tree_to_uhwi (TYPE_SIZE (type)))));
95 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
96 is contained at THIS->OFFSET. Walk the memory representation of
97 THIS->OUTER_TYPE and find the outermost class type that match
98 OTR_TYPE or contain OTR_TYPE as a base. Update THIS
99 to represent it.
101 If OTR_TYPE is NULL, just find outermost polymorphic type with
102 virtual table present at position OFFSET.
104 For example when THIS represents type
105 class A
107 int a;
108 class B b;
110 and we look for type at offset sizeof(int), we end up with B and offset 0.
111 If the same is produced by multiple inheritance, we end up with A and offset
112 sizeof(int).
114 If we cannot find corresponding class, give up by setting
115 THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
116 Return true when lookup was successful.
118 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
119 valid only via allocation of new polymorphic type inside by means
120 of placement new.
122 When CONSIDER_BASES is false, only look for actual fields, not base types
123 of TYPE. */
125 bool
126 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type,
127 bool consider_placement_new,
128 bool consider_bases)
130 tree type = outer_type;
131 HOST_WIDE_INT cur_offset = offset;
132 bool speculative = false;
133 bool size_unknown = false;
134 unsigned HOST_WIDE_INT otr_type_size = POINTER_SIZE;
136 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
137 if (!outer_type)
139 clear_outer_type (otr_type);
140 type = otr_type;
141 cur_offset = 0;
143 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
144 that the context is either invalid, or the instance type must be
145 derived from OUTER_TYPE.
147 Because the instance type may contain field whose type is of OUTER_TYPE,
148 we cannot derive any effective information about it.
150 TODO: In the case we know all derived types, we can definitely do better
151 here. */
152 else if (TYPE_SIZE (outer_type)
153 && tree_fits_shwi_p (TYPE_SIZE (outer_type))
154 && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
155 && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
157 bool der = maybe_derived_type; /* clear_outer_type will reset it. */
158 bool dyn = dynamic;
159 clear_outer_type (otr_type);
160 type = otr_type;
161 cur_offset = 0;
163 /* If derived type is not allowed, we know that the context is invalid.
164 For dynamic types, we really do not have information about
165 size of the memory location. It is possible that completely
166 different type is stored after outer_type. */
167 if (!der && !dyn)
169 clear_speculation ();
170 invalid = true;
171 return false;
175 if (otr_type && TYPE_SIZE (otr_type)
176 && tree_fits_shwi_p (TYPE_SIZE (otr_type)))
177 otr_type_size = tree_to_uhwi (TYPE_SIZE (otr_type));
179 if (!type || offset < 0)
180 goto no_useful_type_info;
182 /* Find the sub-object the constant actually refers to and mark whether it is
183 an artificial one (as opposed to a user-defined one).
185 This loop is performed twice; first time for outer_type and second time
186 for speculative_outer_type. The second run has SPECULATIVE set. */
187 while (true)
189 unsigned HOST_WIDE_INT pos, size;
190 tree fld;
192 /* If we do not know size of TYPE, we need to be more conservative
193 about accepting cases where we cannot find EXPECTED_TYPE.
194 Generally the types that do matter here are of constant size.
195 Size_unknown case should be very rare. */
196 if (TYPE_SIZE (type)
197 && tree_fits_shwi_p (TYPE_SIZE (type))
198 && tree_to_shwi (TYPE_SIZE (type)) >= 0)
199 size_unknown = false;
200 else
201 size_unknown = true;
203 /* On a match, just return what we found. */
204 if ((otr_type
205 && types_odr_comparable (type, otr_type)
206 && types_same_for_odr (type, otr_type))
207 || (!otr_type
208 && TREE_CODE (type) == RECORD_TYPE
209 && TYPE_BINFO (type)
210 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
212 if (speculative)
214 /* If we did not match the offset, just give up on speculation. */
215 if (cur_offset != 0
216 /* Also check if speculation did not end up being same as
217 non-speculation. */
218 || (types_must_be_same_for_odr (speculative_outer_type,
219 outer_type)
220 && (maybe_derived_type
221 == speculative_maybe_derived_type)))
222 clear_speculation ();
223 return true;
225 else
227 /* If type is known to be final, do not worry about derived
228 types. Testing it here may help us to avoid speculation. */
229 if (otr_type && TREE_CODE (outer_type) == RECORD_TYPE
230 && (!in_lto_p || odr_type_p (outer_type))
231 && type_with_linkage_p (outer_type)
232 && type_known_to_have_no_derivations_p (outer_type))
233 maybe_derived_type = false;
235 /* Type cannot contain itself on an non-zero offset. In that case
236 just give up. Still accept the case where size is now known.
237 Either the second copy may appear past the end of type or within
238 the non-POD buffer located inside the variably sized type
239 itself. */
240 if (cur_offset != 0)
241 goto no_useful_type_info;
242 /* If we determined type precisely or we have no clue on
243 speculation, we are done. */
244 if (!maybe_derived_type || !speculative_outer_type
245 || !speculation_consistent_p (speculative_outer_type,
246 speculative_offset,
247 speculative_maybe_derived_type,
248 otr_type))
250 clear_speculation ();
251 return true;
253 /* Otherwise look into speculation now. */
254 else
256 speculative = true;
257 type = speculative_outer_type;
258 cur_offset = speculative_offset;
259 continue;
264 /* Walk fields and find corresponding on at OFFSET. */
265 if (TREE_CODE (type) == RECORD_TYPE)
267 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
269 if (TREE_CODE (fld) != FIELD_DECL
270 || TREE_TYPE (fld) == error_mark_node)
271 continue;
273 pos = int_bit_position (fld);
274 if (pos > (unsigned HOST_WIDE_INT)cur_offset)
275 continue;
277 /* Do not consider vptr itself. Not even for placement new. */
278 if (!pos && DECL_ARTIFICIAL (fld)
279 && POINTER_TYPE_P (TREE_TYPE (fld))
280 && TYPE_BINFO (type)
281 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
282 continue;
284 if (!DECL_SIZE (fld) || !tree_fits_uhwi_p (DECL_SIZE (fld)))
285 goto no_useful_type_info;
286 size = tree_to_uhwi (DECL_SIZE (fld));
288 /* We can always skip types smaller than pointer size:
289 those cannot contain a virtual table pointer.
291 Disqualifying fields that are too small to fit OTR_TYPE
292 saves work needed to walk them for no benefit.
293 Because of the way the bases are packed into a class, the
294 field's size may be smaller than type size, so it needs
295 to be done with a care. */
297 if (pos <= (unsigned HOST_WIDE_INT)cur_offset
298 && (pos + size) >= (unsigned HOST_WIDE_INT)cur_offset
299 + POINTER_SIZE
300 && (!otr_type
301 || !TYPE_SIZE (TREE_TYPE (fld))
302 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld)))
303 || (pos + tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld))))
304 >= cur_offset + otr_type_size))
305 break;
308 if (!fld)
309 goto no_useful_type_info;
311 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
312 cur_offset -= pos;
313 /* DECL_ARTIFICIAL represents a basetype. */
314 if (!DECL_ARTIFICIAL (fld))
316 if (!speculative)
318 outer_type = type;
319 offset = cur_offset;
320 /* As soon as we see an field containing the type,
321 we know we are not looking for derivations. */
322 maybe_derived_type = false;
324 else
326 speculative_outer_type = type;
327 speculative_offset = cur_offset;
328 speculative_maybe_derived_type = false;
331 else if (!consider_bases)
332 goto no_useful_type_info;
334 else if (TREE_CODE (type) == ARRAY_TYPE)
336 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
338 /* Give up if we don't know array field size.
339 Also give up on non-polymorphic types as they are used
340 as buffers for placement new. */
341 if (!TYPE_SIZE (subtype)
342 || !tree_fits_shwi_p (TYPE_SIZE (subtype))
343 || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
344 || !contains_polymorphic_type_p (subtype))
345 goto no_useful_type_info;
347 HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
349 /* We may see buffer for placement new. In this case the expected type
350 can be bigger than the subtype. */
351 if (TYPE_SIZE (subtype)
352 && (cur_offset + otr_type_size
353 > tree_to_uhwi (TYPE_SIZE (subtype))))
354 goto no_useful_type_info;
356 cur_offset = new_offset;
357 type = TYPE_MAIN_VARIANT (subtype);
358 if (!speculative)
360 outer_type = type;
361 offset = cur_offset;
362 maybe_derived_type = false;
364 else
366 speculative_outer_type = type;
367 speculative_offset = cur_offset;
368 speculative_maybe_derived_type = false;
371 /* Give up on anything else. */
372 else
374 no_useful_type_info:
375 if (maybe_derived_type && !speculative
376 && TREE_CODE (outer_type) == RECORD_TYPE
377 && TREE_CODE (otr_type) == RECORD_TYPE
378 && TYPE_BINFO (otr_type)
379 && !offset
380 && get_binfo_at_offset (TYPE_BINFO (otr_type), 0, outer_type))
382 clear_outer_type (otr_type);
383 if (!speculative_outer_type
384 || !speculation_consistent_p (speculative_outer_type,
385 speculative_offset,
386 speculative_maybe_derived_type,
387 otr_type))
388 clear_speculation ();
389 if (speculative_outer_type)
391 speculative = true;
392 type = speculative_outer_type;
393 cur_offset = speculative_offset;
395 else
396 return true;
398 /* We found no way to embed EXPECTED_TYPE in TYPE.
399 We still permit two special cases - placement new and
400 the case of variadic types containing themselves. */
401 if (!speculative
402 && consider_placement_new
403 && (size_unknown || !type || maybe_derived_type
404 || possible_placement_new (type, otr_type, cur_offset)))
406 /* In these weird cases we want to accept the context.
407 In non-speculative run we have no useful outer_type info
408 (TODO: we may eventually want to record upper bound on the
409 type size that can be used to prune the walk),
410 but we still want to consider speculation that may
411 give useful info. */
412 if (!speculative)
414 clear_outer_type (otr_type);
415 if (!speculative_outer_type
416 || !speculation_consistent_p (speculative_outer_type,
417 speculative_offset,
418 speculative_maybe_derived_type,
419 otr_type))
420 clear_speculation ();
421 if (speculative_outer_type)
423 speculative = true;
424 type = speculative_outer_type;
425 cur_offset = speculative_offset;
427 else
428 return true;
430 else
432 clear_speculation ();
433 return true;
436 else
438 clear_speculation ();
439 if (speculative)
440 return true;
441 clear_outer_type (otr_type);
442 invalid = true;
443 return false;
449 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
450 CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
451 be built within OUTER_TYPE by means of placement new. CONSIDER_BASES makes
452 function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
453 base of one of fields of OUTER_TYPE. */
455 static bool
456 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
457 tree otr_type,
458 bool consider_placement_new,
459 bool consider_bases)
461 ipa_polymorphic_call_context context;
463 /* Check that type is within range. */
464 if (offset < 0)
465 return false;
467 /* PR ipa/71207
468 As OUTER_TYPE can be a type which has a diamond virtual inheritance,
469 it's not necessary that INNER_TYPE will fit within OUTER_TYPE with
470 a given offset. It can happen that INNER_TYPE also contains a base object,
471 however it would point to the same instance in the OUTER_TYPE. */
473 context.offset = offset;
474 context.outer_type = TYPE_MAIN_VARIANT (outer_type);
475 context.maybe_derived_type = false;
476 context.dynamic = false;
477 return context.restrict_to_inner_class (otr_type, consider_placement_new,
478 consider_bases);
482 /* Return a FUNCTION_DECL if FN represent a constructor or destructor.
483 If CHECK_CLONES is true, also check for clones of ctor/dtors. */
485 tree
486 polymorphic_ctor_dtor_p (tree fn, bool check_clones)
488 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
489 || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
491 if (!check_clones)
492 return NULL_TREE;
494 /* Watch for clones where we constant propagated the first
495 argument (pointer to the instance). */
496 fn = DECL_ABSTRACT_ORIGIN (fn);
497 if (!fn
498 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
499 || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
500 return NULL_TREE;
503 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
504 return NULL_TREE;
506 return fn;
509 /* Return a FUNCTION_DECL if BLOCK represents a constructor or destructor.
510 If CHECK_CLONES is true, also check for clones of ctor/dtors. */
512 tree
513 inlined_polymorphic_ctor_dtor_block_p (tree block, bool check_clones)
515 tree fn = block_ultimate_origin (block);
516 if (fn == NULL || TREE_CODE (fn) != FUNCTION_DECL)
517 return NULL_TREE;
519 return polymorphic_ctor_dtor_p (fn, check_clones);
523 /* We know that the instance is stored in variable or parameter
524 (not dynamically allocated) and we want to disprove the fact
525 that it may be in construction at invocation of CALL.
527 BASE represents memory location where instance is stored.
528 If BASE is NULL, it is assumed to be global memory.
529 OUTER_TYPE is known type of the instance or NULL if not
530 known.
532 For the variable to be in construction we actually need to
533 be in constructor of corresponding global variable or
534 the inline stack of CALL must contain the constructor.
535 Check this condition. This check works safely only before
536 IPA passes, because inline stacks may become out of date
537 later. */
539 bool
540 decl_maybe_in_construction_p (tree base, tree outer_type,
541 gimple *call, tree function)
543 if (outer_type)
544 outer_type = TYPE_MAIN_VARIANT (outer_type);
545 gcc_assert (!base || DECL_P (base));
547 /* After inlining the code unification optimizations may invalidate
548 inline stacks. Also we need to give up on global variables after
549 IPA, because addresses of these may have been propagated to their
550 constructors. */
551 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
552 return true;
554 /* Pure functions cannot do any changes on the dynamic type;
555 that require writing to memory. */
556 if ((!base || !auto_var_in_fn_p (base, function))
557 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
558 return false;
560 bool check_clones = !base || is_global_var (base);
561 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
562 block = BLOCK_SUPERCONTEXT (block))
563 if (tree fn = inlined_polymorphic_ctor_dtor_block_p (block, check_clones))
565 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
567 if (!outer_type || !types_odr_comparable (type, outer_type))
569 if (TREE_CODE (type) == RECORD_TYPE
570 && TYPE_BINFO (type)
571 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
572 return true;
574 else if (types_same_for_odr (type, outer_type))
575 return true;
578 if (!base || (VAR_P (base) && is_global_var (base)))
580 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
581 || (!DECL_CXX_CONSTRUCTOR_P (function)
582 && !DECL_CXX_DESTRUCTOR_P (function)))
584 if (!DECL_ABSTRACT_ORIGIN (function))
585 return false;
586 /* Watch for clones where we constant propagated the first
587 argument (pointer to the instance). */
588 function = DECL_ABSTRACT_ORIGIN (function);
589 if (!function
590 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
591 || (!DECL_CXX_CONSTRUCTOR_P (function)
592 && !DECL_CXX_DESTRUCTOR_P (function)))
593 return false;
595 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (function));
596 if (!outer_type || !types_odr_comparable (type, outer_type))
598 if (TREE_CODE (type) == RECORD_TYPE
599 && TYPE_BINFO (type)
600 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
601 return true;
603 else if (types_same_for_odr (type, outer_type))
604 return true;
606 return false;
609 /* Dump human readable context to F. If NEWLINE is true, it will be terminated
610 by a newline. */
612 void
613 ipa_polymorphic_call_context::dump (FILE *f, bool newline) const
615 fprintf (f, " ");
616 if (invalid)
617 fprintf (f, "Call is known to be undefined");
618 else
620 if (useless_p ())
621 fprintf (f, "nothing known");
622 if (outer_type || offset)
624 fprintf (f, "Outer type%s:", dynamic ? " (dynamic)":"");
625 print_generic_expr (f, outer_type, TDF_SLIM);
626 if (maybe_derived_type)
627 fprintf (f, " (or a derived type)");
628 if (maybe_in_construction)
629 fprintf (f, " (maybe in construction)");
630 fprintf (f, " offset " HOST_WIDE_INT_PRINT_DEC,
631 offset);
633 if (speculative_outer_type)
635 if (outer_type || offset)
636 fprintf (f, " ");
637 fprintf (f, "Speculative outer type:");
638 print_generic_expr (f, speculative_outer_type, TDF_SLIM);
639 if (speculative_maybe_derived_type)
640 fprintf (f, " (or a derived type)");
641 fprintf (f, " at offset " HOST_WIDE_INT_PRINT_DEC,
642 speculative_offset);
645 if (newline)
646 fprintf(f, "\n");
649 /* Print context to stderr. */
651 void
652 ipa_polymorphic_call_context::debug () const
654 dump (stderr);
657 /* Stream out the context to OB. */
659 void
660 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
662 struct bitpack_d bp = bitpack_create (ob->main_stream);
664 bp_pack_value (&bp, invalid, 1);
665 bp_pack_value (&bp, maybe_in_construction, 1);
666 bp_pack_value (&bp, maybe_derived_type, 1);
667 bp_pack_value (&bp, speculative_maybe_derived_type, 1);
668 bp_pack_value (&bp, dynamic, 1);
669 bp_pack_value (&bp, outer_type != NULL, 1);
670 bp_pack_value (&bp, offset != 0, 1);
671 bp_pack_value (&bp, speculative_outer_type != NULL, 1);
672 streamer_write_bitpack (&bp);
674 if (outer_type != NULL)
675 stream_write_tree (ob, outer_type, true);
676 if (offset)
677 streamer_write_hwi (ob, offset);
678 if (speculative_outer_type != NULL)
680 stream_write_tree (ob, speculative_outer_type, true);
681 streamer_write_hwi (ob, speculative_offset);
683 else
684 gcc_assert (!speculative_offset);
687 /* Stream in the context from IB and DATA_IN. */
689 void
690 ipa_polymorphic_call_context::stream_in (class lto_input_block *ib,
691 class data_in *data_in)
693 struct bitpack_d bp = streamer_read_bitpack (ib);
695 invalid = bp_unpack_value (&bp, 1);
696 maybe_in_construction = bp_unpack_value (&bp, 1);
697 maybe_derived_type = bp_unpack_value (&bp, 1);
698 speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
699 dynamic = bp_unpack_value (&bp, 1);
700 bool outer_type_p = bp_unpack_value (&bp, 1);
701 bool offset_p = bp_unpack_value (&bp, 1);
702 bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
704 if (outer_type_p)
705 outer_type = stream_read_tree (ib, data_in);
706 else
707 outer_type = NULL;
708 if (offset_p)
709 offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
710 else
711 offset = 0;
712 if (speculative_outer_type_p)
714 speculative_outer_type = stream_read_tree (ib, data_in);
715 speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
717 else
719 speculative_outer_type = NULL;
720 speculative_offset = 0;
724 /* Produce polymorphic call context for call method of instance
725 that is located within BASE (that is assumed to be a decl) at offset OFF. */
727 void
728 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
730 gcc_assert (DECL_P (base));
731 clear_speculation ();
733 if (!contains_polymorphic_type_p (TREE_TYPE (base)))
735 clear_outer_type ();
736 offset = off;
737 return;
739 outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
740 offset = off;
741 /* Make very conservative assumption that all objects
742 may be in construction.
744 It is up to caller to revisit this via
745 get_dynamic_type or decl_maybe_in_construction_p. */
746 maybe_in_construction = true;
747 maybe_derived_type = false;
748 dynamic = false;
751 /* CST is an invariant (address of decl), try to get meaningful
752 polymorphic call context for polymorphic call of method
753 if instance of OTR_TYPE that is located at offset OFF of this invariant.
754 Return FALSE if nothing meaningful can be found. */
756 bool
757 ipa_polymorphic_call_context::set_by_invariant (tree cst,
758 tree otr_type,
759 HOST_WIDE_INT off)
761 poly_int64 offset2, size, max_size;
762 bool reverse;
763 tree base;
765 invalid = false;
766 off = 0;
767 clear_outer_type (otr_type);
769 if (TREE_CODE (cst) != ADDR_EXPR)
770 return false;
772 cst = TREE_OPERAND (cst, 0);
773 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size, &reverse);
774 if (!DECL_P (base) || !known_size_p (max_size) || maybe_ne (max_size, size))
775 return false;
777 /* Only type inconsistent programs can have otr_type that is
778 not part of outer type. */
779 if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
780 return false;
782 set_by_decl (base, off);
783 return true;
786 /* See if OP is SSA name initialized as a copy or by single assignment.
787 If so, walk the SSA graph up. Because simple PHI conditional is considered
788 copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
789 graph. */
791 static tree
792 walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL)
794 hash_set <tree> *visited = NULL;
795 STRIP_NOPS (op);
796 while (TREE_CODE (op) == SSA_NAME
797 && !SSA_NAME_IS_DEFAULT_DEF (op)
798 /* We might be called via fold_stmt during cfgcleanup where
799 SSA form need not be up-to-date. */
800 && !name_registered_for_update_p (op)
801 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op))
802 || gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI))
804 if (global_visited)
806 if (!*global_visited)
807 *global_visited = new hash_set<tree>;
808 if ((*global_visited)->add (op))
809 goto done;
811 else
813 if (!visited)
814 visited = new hash_set<tree>;
815 if (visited->add (op))
816 goto done;
818 /* Special case
819 if (ptr == 0)
820 ptr = 0;
821 else
822 ptr = ptr.foo;
823 This pattern is implicitly produced for casts to non-primary
824 bases. When doing context analysis, we do not really care
825 about the case pointer is NULL, because the call will be
826 undefined anyway. */
827 if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
829 gimple *phi = SSA_NAME_DEF_STMT (op);
831 if (gimple_phi_num_args (phi) > 2)
832 goto done;
833 if (gimple_phi_num_args (phi) == 1)
834 op = gimple_phi_arg_def (phi, 0);
835 else if (integer_zerop (gimple_phi_arg_def (phi, 0)))
836 op = gimple_phi_arg_def (phi, 1);
837 else if (integer_zerop (gimple_phi_arg_def (phi, 1)))
838 op = gimple_phi_arg_def (phi, 0);
839 else
840 goto done;
842 else
844 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
845 goto done;
846 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
848 STRIP_NOPS (op);
850 done:
851 if (visited)
852 delete (visited);
853 return op;
856 /* Create polymorphic call context from IP invariant CST.
857 This is typically &global_var.
858 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
859 is offset of call. */
861 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
862 tree otr_type,
863 HOST_WIDE_INT off)
865 clear_speculation ();
866 set_by_invariant (cst, otr_type, off);
869 /* Build context for pointer REF contained in FNDECL at statement STMT.
870 if INSTANCE is non-NULL, return pointer to the object described by
871 the context or DECL where context is contained in. */
873 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
874 tree ref,
875 gimple *stmt,
876 tree *instance)
878 tree otr_type = NULL;
879 tree base_pointer;
880 hash_set <tree> *visited = NULL;
882 if (TREE_CODE (ref) == OBJ_TYPE_REF)
884 otr_type = obj_type_ref_class (ref);
885 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
887 else
888 base_pointer = ref;
890 /* Set up basic info in case we find nothing interesting in the analysis. */
891 clear_speculation ();
892 clear_outer_type (otr_type);
893 invalid = false;
895 /* Walk SSA for outer object. */
896 while (true)
898 base_pointer = walk_ssa_copies (base_pointer, &visited);
899 if (TREE_CODE (base_pointer) == ADDR_EXPR)
901 HOST_WIDE_INT offset2, size;
902 bool reverse;
903 tree base
904 = get_ref_base_and_extent_hwi (TREE_OPERAND (base_pointer, 0),
905 &offset2, &size, &reverse);
906 if (!base)
907 break;
909 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
910 offset + offset2,
911 true,
912 NULL /* Do not change outer type. */);
914 /* If this is a varying address, punt. */
915 if (TREE_CODE (base) == MEM_REF || DECL_P (base))
917 /* We found dereference of a pointer. Type of the pointer
918 and MEM_REF is meaningless, but we can look further. */
919 offset_int mem_offset;
920 if (TREE_CODE (base) == MEM_REF
921 && mem_ref_offset (base).is_constant (&mem_offset))
923 offset_int o = mem_offset * BITS_PER_UNIT;
924 o += offset;
925 o += offset2;
926 if (!wi::fits_shwi_p (o))
927 break;
928 base_pointer = TREE_OPERAND (base, 0);
929 offset = o.to_shwi ();
930 outer_type = NULL;
932 /* We found base object. In this case the outer_type
933 is known. */
934 else if (DECL_P (base))
936 if (visited)
937 delete (visited);
938 /* Only type inconsistent programs can have otr_type that is
939 not part of outer type. */
940 if (otr_type
941 && !contains_type_p (TREE_TYPE (base),
942 offset + offset2, otr_type))
944 invalid = true;
945 if (instance)
946 *instance = base_pointer;
947 return;
949 set_by_decl (base, offset + offset2);
950 if (outer_type && maybe_in_construction && stmt)
951 maybe_in_construction
952 = decl_maybe_in_construction_p (base,
953 outer_type,
954 stmt,
955 fndecl);
956 if (instance)
957 *instance = base;
958 return;
960 else
961 break;
963 else
964 break;
966 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
967 && TREE_CODE (TREE_OPERAND (base_pointer, 1)) == INTEGER_CST)
969 offset_int o
970 = offset_int::from (wi::to_wide (TREE_OPERAND (base_pointer, 1)),
971 SIGNED);
972 o *= BITS_PER_UNIT;
973 o += offset;
974 if (!wi::fits_shwi_p (o))
975 break;
976 offset = o.to_shwi ();
977 base_pointer = TREE_OPERAND (base_pointer, 0);
979 else
980 break;
983 if (visited)
984 delete (visited);
986 /* Try to determine type of the outer object. */
987 if (TREE_CODE (base_pointer) == SSA_NAME
988 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
989 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
991 /* See if parameter is THIS pointer of a method. */
992 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
993 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
995 outer_type
996 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
997 cgraph_node *node = cgraph_node::get (current_function_decl);
998 gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE
999 || TREE_CODE (outer_type) == UNION_TYPE);
1001 /* Handle the case we inlined into a thunk. In this case
1002 thunk has THIS pointer of type bar, but it really receives
1003 address to its base type foo which sits in bar at
1004 0-thunk.fixed_offset. It starts with code that adds
1005 think.fixed_offset to the pointer to compensate for this.
1007 Because we walked all the way to the beginning of thunk, we now
1008 see pointer &bar-thunk.fixed_offset and need to compensate
1009 for it. */
1010 if (node->thunk.fixed_offset)
1011 offset -= node->thunk.fixed_offset * BITS_PER_UNIT;
1013 /* Dynamic casting has possibly upcasted the type
1014 in the hierarchy. In this case outer type is less
1015 informative than inner type and we should forget
1016 about it. */
1017 if ((otr_type
1018 && !contains_type_p (outer_type, offset,
1019 otr_type))
1020 || !contains_polymorphic_type_p (outer_type)
1021 /* If we compile thunk with virtual offset, the THIS pointer
1022 is adjusted by unknown value. We can't thus use outer info
1023 at all. */
1024 || node->thunk.virtual_offset_p)
1026 outer_type = NULL;
1027 if (instance)
1028 *instance = base_pointer;
1029 return;
1032 dynamic = true;
1034 /* If the function is constructor or destructor, then
1035 the type is possibly in construction, but we know
1036 it is not derived type. */
1037 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1038 || DECL_CXX_DESTRUCTOR_P (fndecl))
1040 maybe_in_construction = true;
1041 maybe_derived_type = false;
1043 else
1045 maybe_derived_type = true;
1046 maybe_in_construction = false;
1048 if (instance)
1050 /* If method is expanded thunk, we need to apply thunk offset
1051 to instance pointer. */
1052 if (node->thunk.virtual_offset_p
1053 || node->thunk.fixed_offset)
1054 *instance = NULL;
1055 else
1056 *instance = base_pointer;
1058 return;
1060 /* Non-PODs passed by value are really passed by invisible
1061 reference. In this case we also know the type of the
1062 object. */
1063 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1065 outer_type
1066 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1067 /* Only type inconsistent programs can have otr_type that is
1068 not part of outer type. */
1069 if (otr_type && !contains_type_p (outer_type, offset,
1070 otr_type))
1072 invalid = true;
1073 if (instance)
1074 *instance = base_pointer;
1075 return;
1077 /* Non-polymorphic types have no interest for us. */
1078 else if (!otr_type && !contains_polymorphic_type_p (outer_type))
1080 outer_type = NULL;
1081 if (instance)
1082 *instance = base_pointer;
1083 return;
1085 maybe_derived_type = false;
1086 maybe_in_construction = false;
1087 if (instance)
1088 *instance = base_pointer;
1089 return;
1093 tree base_type = TREE_TYPE (base_pointer);
1095 if (TREE_CODE (base_pointer) == SSA_NAME
1096 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1097 && !(TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL
1098 || TREE_CODE (SSA_NAME_VAR (base_pointer)) == RESULT_DECL))
1100 invalid = true;
1101 if (instance)
1102 *instance = base_pointer;
1103 return;
1105 if (TREE_CODE (base_pointer) == SSA_NAME
1106 && SSA_NAME_DEF_STMT (base_pointer)
1107 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1108 base_type = TREE_TYPE (gimple_assign_rhs1
1109 (SSA_NAME_DEF_STMT (base_pointer)));
1111 if (base_type && POINTER_TYPE_P (base_type))
1112 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
1113 offset,
1114 true, NULL /* Do not change type here */);
1115 /* TODO: There are multiple ways to derive a type. For instance
1116 if BASE_POINTER is passed to an constructor call prior our reference.
1117 We do not make this type of flow sensitive analysis yet. */
1118 if (instance)
1119 *instance = base_pointer;
1120 return;
1123 /* Structure to be passed in between detect_type_change and
1124 check_stmt_for_type_change. */
1126 struct type_change_info
1128 /* Offset into the object where there is the virtual method pointer we are
1129 looking for. */
1130 HOST_WIDE_INT offset;
1131 /* The declaration or SSA_NAME pointer of the base that we are checking for
1132 type change. */
1133 tree instance;
1134 /* The reference to virtual table pointer used. */
1135 tree vtbl_ptr_ref;
1136 tree otr_type;
1137 /* If we actually can tell the type that the object has changed to, it is
1138 stored in this field. Otherwise it remains NULL_TREE. */
1139 tree known_current_type;
1140 HOST_WIDE_INT known_current_offset;
1142 /* Set to nonzero if we possibly missed some dynamic type changes and we
1143 should consider the set to be speculative. */
1144 unsigned speculative;
1146 /* Set to true if dynamic type change has been detected. */
1147 bool type_maybe_changed;
1148 /* Set to true if multiple types have been encountered. known_current_type
1149 must be disregarded in that case. */
1150 bool multiple_types_encountered;
1151 bool seen_unanalyzed_store;
1154 /* Return true if STMT is not call and can modify a virtual method table pointer.
1155 We take advantage of fact that vtable stores must appear within constructor
1156 and destructor functions. */
1158 static bool
1159 noncall_stmt_may_be_vtbl_ptr_store (gimple *stmt)
1161 if (is_gimple_assign (stmt))
1163 tree lhs = gimple_assign_lhs (stmt);
1165 if (gimple_clobber_p (stmt))
1166 return false;
1167 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
1169 if (flag_strict_aliasing
1170 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
1171 return false;
1173 if (TREE_CODE (lhs) == COMPONENT_REF
1174 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1175 return false;
1176 /* In the future we might want to use get_ref_base_and_extent to find
1177 if there is a field corresponding to the offset and if so, proceed
1178 almost like if it was a component ref. */
1182 /* Code unification may mess with inline stacks. */
1183 if (cfun->after_inlining)
1184 return true;
1186 /* Walk the inline stack and watch out for ctors/dtors.
1187 TODO: Maybe we can require the store to appear in toplevel
1188 block of CTOR/DTOR. */
1189 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
1190 block = BLOCK_SUPERCONTEXT (block))
1191 if (BLOCK_ABSTRACT_ORIGIN (block)
1192 && TREE_CODE (block_ultimate_origin (block)) == FUNCTION_DECL)
1193 return inlined_polymorphic_ctor_dtor_block_p (block, false);
1194 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
1195 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
1196 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
1199 /* If STMT can be proved to be an assignment to the virtual method table
1200 pointer of ANALYZED_OBJ and the type associated with the new table
1201 identified, return the type. Otherwise return NULL_TREE if type changes
1202 in unknown way or ERROR_MARK_NODE if type is unchanged. */
1204 static tree
1205 extr_type_from_vtbl_ptr_store (gimple *stmt, struct type_change_info *tci,
1206 HOST_WIDE_INT *type_offset)
1208 poly_int64 offset, size, max_size;
1209 tree lhs, rhs, base;
1210 bool reverse;
1212 if (!gimple_assign_single_p (stmt))
1213 return NULL_TREE;
1215 lhs = gimple_assign_lhs (stmt);
1216 rhs = gimple_assign_rhs1 (stmt);
1217 if (TREE_CODE (lhs) != COMPONENT_REF
1218 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1220 if (dump_file)
1221 fprintf (dump_file, " LHS is not virtual table.\n");
1222 return NULL_TREE;
1225 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
1227 else
1229 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
1230 if (DECL_P (tci->instance))
1232 if (base != tci->instance)
1234 if (dump_file)
1236 fprintf (dump_file, " base:");
1237 print_generic_expr (dump_file, base, TDF_SLIM);
1238 fprintf (dump_file, " does not match instance:");
1239 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1240 fprintf (dump_file, "\n");
1242 return NULL_TREE;
1245 else if (TREE_CODE (base) == MEM_REF)
1247 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0))
1249 if (dump_file)
1251 fprintf (dump_file, " base mem ref:");
1252 print_generic_expr (dump_file, base, TDF_SLIM);
1253 fprintf (dump_file, " does not match instance:");
1254 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1255 fprintf (dump_file, "\n");
1257 return NULL_TREE;
1259 if (!integer_zerop (TREE_OPERAND (base, 1)))
1261 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
1263 if (dump_file)
1265 fprintf (dump_file, " base mem ref:");
1266 print_generic_expr (dump_file, base, TDF_SLIM);
1267 fprintf (dump_file, " has non-representable offset:");
1268 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1269 fprintf (dump_file, "\n");
1271 return NULL_TREE;
1273 else
1274 offset += tree_to_shwi (TREE_OPERAND (base, 1)) * BITS_PER_UNIT;
1277 else if (!operand_equal_p (tci->instance, base, 0)
1278 || tci->offset)
1280 if (dump_file)
1282 fprintf (dump_file, " base:");
1283 print_generic_expr (dump_file, base, TDF_SLIM);
1284 fprintf (dump_file, " does not match instance:");
1285 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1286 fprintf (dump_file, " with offset %i\n", (int)tci->offset);
1288 return tci->offset > POINTER_SIZE ? error_mark_node : NULL_TREE;
1290 if (maybe_ne (offset, tci->offset)
1291 || maybe_ne (size, POINTER_SIZE)
1292 || maybe_ne (max_size, POINTER_SIZE))
1294 if (dump_file)
1296 fprintf (dump_file, " wrong offset ");
1297 print_dec (offset, dump_file);
1298 fprintf (dump_file, "!=%i or size ", (int) tci->offset);
1299 print_dec (size, dump_file);
1300 fprintf (dump_file, "\n");
1302 return (known_le (offset + POINTER_SIZE, tci->offset)
1303 || (known_size_p (max_size)
1304 && known_gt (tci->offset + POINTER_SIZE,
1305 offset + max_size))
1306 ? error_mark_node : NULL);
1310 tree vtable;
1311 unsigned HOST_WIDE_INT offset2;
1313 if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
1315 if (dump_file)
1316 fprintf (dump_file, " Failed to lookup binfo\n");
1317 return NULL;
1320 tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1321 offset2, vtable);
1322 if (!binfo)
1324 if (dump_file)
1325 fprintf (dump_file, " Construction vtable used\n");
1326 /* FIXME: We should support construction contexts. */
1327 return NULL;
1330 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
1331 return DECL_CONTEXT (vtable);
1334 /* Record dynamic type change of TCI to TYPE. */
1336 static void
1337 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
1339 if (dump_file)
1341 if (type)
1343 fprintf (dump_file, " Recording type: ");
1344 print_generic_expr (dump_file, type, TDF_SLIM);
1345 fprintf (dump_file, " at offset %i\n", (int)offset);
1347 else
1348 fprintf (dump_file, " Recording unknown type\n");
1351 /* If we found a constructor of type that is not polymorphic or
1352 that may contain the type in question as a field (not as base),
1353 restrict to the inner class first to make type matching bellow
1354 happier. */
1355 if (type
1356 && (offset
1357 || (TREE_CODE (type) != RECORD_TYPE
1358 || !TYPE_BINFO (type)
1359 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
1361 ipa_polymorphic_call_context context;
1363 context.offset = offset;
1364 context.outer_type = type;
1365 context.maybe_in_construction = false;
1366 context.maybe_derived_type = false;
1367 context.dynamic = true;
1368 /* If we failed to find the inner type, we know that the call
1369 would be undefined for type produced here. */
1370 if (!context.restrict_to_inner_class (tci->otr_type))
1372 if (dump_file)
1373 fprintf (dump_file, " Ignoring; does not contain otr_type\n");
1374 return;
1376 /* Watch for case we reached an POD type and anticipate placement
1377 new. */
1378 if (!context.maybe_derived_type)
1380 type = context.outer_type;
1381 offset = context.offset;
1384 if (tci->type_maybe_changed
1385 && (!types_same_for_odr (type, tci->known_current_type)
1386 || offset != tci->known_current_offset))
1387 tci->multiple_types_encountered = true;
1388 tci->known_current_type = TYPE_MAIN_VARIANT (type);
1389 tci->known_current_offset = offset;
1390 tci->type_maybe_changed = true;
1394 /* The maximum number of may-defs we visit when looking for a must-def
1395 that changes the dynamic type in check_stmt_for_type_change. Tuned
1396 after the PR12392 testcase which unlimited spends 40% time within
1397 these alias walks and 8% with the following limit. */
1399 static inline bool
1400 csftc_abort_walking_p (unsigned speculative)
1402 unsigned max = param_max_speculative_devirt_maydefs;
1403 return speculative > max ? true : false;
1406 /* Callback of walk_aliased_vdefs and a helper function for
1407 detect_type_change to check whether a particular statement may modify
1408 the virtual table pointer, and if possible also determine the new type of
1409 the (sub-)object. It stores its result into DATA, which points to a
1410 type_change_info structure. */
1412 static bool
1413 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
1415 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
1416 struct type_change_info *tci = (struct type_change_info *) data;
1417 tree fn;
1419 /* If we already gave up, just terminate the rest of walk. */
1420 if (tci->multiple_types_encountered)
1421 return true;
1423 if (is_gimple_call (stmt))
1425 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
1426 return false;
1428 /* Check for a constructor call. */
1429 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
1430 && DECL_CXX_CONSTRUCTOR_P (fn)
1431 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1432 && gimple_call_num_args (stmt))
1434 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
1435 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1436 HOST_WIDE_INT offset = 0;
1437 bool reverse;
1439 if (dump_file)
1441 fprintf (dump_file, " Checking constructor call: ");
1442 print_gimple_stmt (dump_file, stmt, 0);
1445 /* See if THIS parameter seems like instance pointer. */
1446 if (TREE_CODE (op) == ADDR_EXPR)
1448 HOST_WIDE_INT size;
1449 op = get_ref_base_and_extent_hwi (TREE_OPERAND (op, 0),
1450 &offset, &size, &reverse);
1451 if (!op)
1453 tci->speculative++;
1454 return csftc_abort_walking_p (tci->speculative);
1456 if (TREE_CODE (op) == MEM_REF)
1458 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
1460 tci->speculative++;
1461 return csftc_abort_walking_p (tci->speculative);
1463 offset += tree_to_shwi (TREE_OPERAND (op, 1))
1464 * BITS_PER_UNIT;
1465 op = TREE_OPERAND (op, 0);
1467 else if (DECL_P (op))
1469 else
1471 tci->speculative++;
1472 return csftc_abort_walking_p (tci->speculative);
1474 op = walk_ssa_copies (op);
1476 if (operand_equal_p (op, tci->instance, 0)
1477 && TYPE_SIZE (type)
1478 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
1479 && tree_fits_shwi_p (TYPE_SIZE (type))
1480 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset
1481 /* Some inlined constructors may look as follows:
1482 _3 = operator new (16);
1483 MEM[(struct &)_3] ={v} {CLOBBER};
1484 MEM[(struct CompositeClass *)_3]._vptr.CompositeClass
1485 = &MEM[(void *)&_ZTV14CompositeClass + 16B];
1486 _7 = &MEM[(struct CompositeClass *)_3].object;
1487 EmptyClass::EmptyClass (_7);
1489 When determining dynamic type of _3 and because we stop at first
1490 dynamic type found, we would stop on EmptyClass::EmptyClass (_7).
1491 In this case the emptyclass is not even polymorphic and we miss
1492 it is contained in an outer type that is polymorphic. */
1494 && (tci->offset == offset || contains_polymorphic_type_p (type)))
1496 record_known_type (tci, type, tci->offset - offset);
1497 return true;
1500 /* Calls may possibly change dynamic type by placement new. Assume
1501 it will not happen, but make result speculative only. */
1502 if (dump_file)
1504 fprintf (dump_file, " Function call may change dynamic type:");
1505 print_gimple_stmt (dump_file, stmt, 0);
1507 tci->speculative++;
1508 return csftc_abort_walking_p (tci->speculative);
1510 /* Check for inlined virtual table store. */
1511 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
1513 tree type;
1514 HOST_WIDE_INT offset = 0;
1515 if (dump_file)
1517 fprintf (dump_file, " Checking vtbl store: ");
1518 print_gimple_stmt (dump_file, stmt, 0);
1521 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
1522 if (type == error_mark_node)
1523 return false;
1524 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
1525 if (!type)
1527 if (dump_file)
1528 fprintf (dump_file, " Unanalyzed store may change type.\n");
1529 tci->seen_unanalyzed_store = true;
1530 tci->speculative++;
1532 else
1533 record_known_type (tci, type, offset);
1534 return true;
1536 else
1537 return false;
1540 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1541 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1542 INSTANCE is pointer to the outer instance as returned by
1543 get_polymorphic_context. To avoid creation of temporary expressions,
1544 INSTANCE may also be an declaration of get_polymorphic_context found the
1545 value to be in static storage.
1547 If the type of instance is not fully determined
1548 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1549 is set), try to walk memory writes and find the actual construction of the
1550 instance.
1552 Return true if memory is unchanged from function entry.
1554 We do not include this analysis in the context analysis itself, because
1555 it needs memory SSA to be fully built and the walk may be expensive.
1556 So it is not suitable for use withing fold_stmt and similar uses.
1558 AA_WALK_BUDGET_P, if not NULL, is how statements we should allow
1559 walk_aliased_vdefs to examine. The value should be decremented by the
1560 number of statements we examined or set to zero if exhausted. */
1562 bool
1563 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
1564 tree otr_object,
1565 tree otr_type,
1566 gimple *call,
1567 unsigned *aa_walk_budget_p)
1569 struct type_change_info tci;
1570 ao_ref ao;
1571 bool function_entry_reached = false;
1572 tree instance_ref = NULL;
1573 gimple *stmt = call;
1574 /* Remember OFFSET before it is modified by restrict_to_inner_class.
1575 This is because we do not update INSTANCE when walking inwards. */
1576 HOST_WIDE_INT instance_offset = offset;
1577 tree instance_outer_type = outer_type;
1579 if (!instance)
1580 return false;
1582 if (otr_type)
1583 otr_type = TYPE_MAIN_VARIANT (otr_type);
1585 /* Walk into inner type. This may clear maybe_derived_type and save us
1586 from useless work. It also makes later comparisons with static type
1587 easier. */
1588 if (outer_type && otr_type)
1590 if (!restrict_to_inner_class (otr_type))
1591 return false;
1594 if (!maybe_in_construction && !maybe_derived_type)
1595 return false;
1597 /* If we are in fact not looking at any object object or the instance is
1598 some placement new into a random load, give up straight away. */
1599 if (TREE_CODE (instance) == MEM_REF)
1600 return false;
1602 /* We need to obtain reference to virtual table pointer. It is better
1603 to look it up in the code rather than build our own. This require bit
1604 of pattern matching, but we end up verifying that what we found is
1605 correct.
1607 What we pattern match is:
1609 tmp = instance->_vptr.A; // vtbl ptr load
1610 tmp2 = tmp[otr_token]; // vtable lookup
1611 OBJ_TYPE_REF(tmp2;instance->0) (instance);
1613 We want to start alias oracle walk from vtbl pointer load,
1614 but we may not be able to identify it, for example, when PRE moved the
1615 load around. */
1617 if (gimple_code (call) == GIMPLE_CALL)
1619 tree ref = gimple_call_fn (call);
1620 bool reverse;
1622 if (TREE_CODE (ref) == OBJ_TYPE_REF)
1624 ref = OBJ_TYPE_REF_EXPR (ref);
1625 ref = walk_ssa_copies (ref);
1627 /* If call target is already known, no need to do the expensive
1628 memory walk. */
1629 if (is_gimple_min_invariant (ref))
1630 return false;
1632 /* Check if definition looks like vtable lookup. */
1633 if (TREE_CODE (ref) == SSA_NAME
1634 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1635 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
1636 && TREE_CODE (gimple_assign_rhs1
1637 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
1639 ref = get_base_address
1640 (TREE_OPERAND (gimple_assign_rhs1
1641 (SSA_NAME_DEF_STMT (ref)), 0));
1642 ref = walk_ssa_copies (ref);
1643 /* Find base address of the lookup and see if it looks like
1644 vptr load. */
1645 if (TREE_CODE (ref) == SSA_NAME
1646 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1647 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
1649 HOST_WIDE_INT offset2, size;
1650 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
1651 tree base_ref
1652 = get_ref_base_and_extent_hwi (ref_exp, &offset2,
1653 &size, &reverse);
1655 /* Finally verify that what we found looks like read from
1656 OTR_OBJECT or from INSTANCE with offset OFFSET. */
1657 if (base_ref
1658 && ((TREE_CODE (base_ref) == MEM_REF
1659 && ((offset2 == instance_offset
1660 && TREE_OPERAND (base_ref, 0) == instance)
1661 || (!offset2
1662 && TREE_OPERAND (base_ref, 0)
1663 == otr_object)))
1664 || (DECL_P (instance) && base_ref == instance
1665 && offset2 == instance_offset)))
1667 stmt = SSA_NAME_DEF_STMT (ref);
1668 instance_ref = ref_exp;
1675 /* If we failed to look up the reference in code, build our own. */
1676 if (!instance_ref)
1678 /* If the statement in question does not use memory, we can't tell
1679 anything. */
1680 if (!gimple_vuse (stmt))
1681 return false;
1682 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
1684 else
1685 /* Otherwise use the real reference. */
1686 ao_ref_init (&ao, instance_ref);
1688 /* We look for vtbl pointer read. */
1689 ao.size = POINTER_SIZE;
1690 ao.max_size = ao.size;
1691 /* We are looking for stores to vptr pointer within the instance of
1692 outer type.
1693 TODO: The vptr pointer type is globally known, we probably should
1694 keep it and do that even when otr_type is unknown. */
1695 if (otr_type)
1697 ao.base_alias_set
1698 = get_alias_set (outer_type ? outer_type : otr_type);
1699 ao.ref_alias_set
1700 = get_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
1703 if (dump_file)
1705 fprintf (dump_file, "Determining dynamic type for call: ");
1706 print_gimple_stmt (dump_file, call, 0);
1707 fprintf (dump_file, " Starting walk at: ");
1708 print_gimple_stmt (dump_file, stmt, 0);
1709 fprintf (dump_file, " instance pointer: ");
1710 print_generic_expr (dump_file, otr_object, TDF_SLIM);
1711 fprintf (dump_file, " Outer instance pointer: ");
1712 print_generic_expr (dump_file, instance, TDF_SLIM);
1713 fprintf (dump_file, " offset: %i (bits)", (int)instance_offset);
1714 fprintf (dump_file, " vtbl reference: ");
1715 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
1716 fprintf (dump_file, "\n");
1719 tci.offset = instance_offset;
1720 tci.instance = instance;
1721 tci.vtbl_ptr_ref = instance_ref;
1722 tci.known_current_type = NULL_TREE;
1723 tci.known_current_offset = 0;
1724 tci.otr_type = otr_type;
1725 tci.type_maybe_changed = false;
1726 tci.multiple_types_encountered = false;
1727 tci.speculative = 0;
1728 tci.seen_unanalyzed_store = false;
1730 unsigned aa_walk_budget = 0;
1731 if (aa_walk_budget_p)
1732 aa_walk_budget = *aa_walk_budget_p + 1;
1734 int walked
1735 = walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
1736 &tci, NULL, &function_entry_reached, aa_walk_budget);
1738 /* If we did not find any type changing statements, we may still drop
1739 maybe_in_construction flag if the context already have outer type.
1741 Here we make special assumptions about both constructors and
1742 destructors which are all the functions that are allowed to alter the
1743 VMT pointers. It assumes that destructors begin with assignment into
1744 all VMT pointers and that constructors essentially look in the
1745 following way:
1747 1) The very first thing they do is that they call constructors of
1748 ancestor sub-objects that have them.
1750 2) Then VMT pointers of this and all its ancestors is set to new
1751 values corresponding to the type corresponding to the constructor.
1753 3) Only afterwards, other stuff such as constructor of member
1754 sub-objects and the code written by the user is run. Only this may
1755 include calling virtual functions, directly or indirectly.
1757 4) placement new cannot be used to change type of non-POD statically
1758 allocated variables.
1760 There is no way to call a constructor of an ancestor sub-object in any
1761 other way.
1763 This means that we do not have to care whether constructors get the
1764 correct type information because they will always change it (in fact,
1765 if we define the type to be given by the VMT pointer, it is undefined).
1767 The most important fact to derive from the above is that if, for some
1768 statement in the section 3, we try to detect whether the dynamic type
1769 has changed, we can safely ignore all calls as we examine the function
1770 body backwards until we reach statements in section 2 because these
1771 calls cannot be ancestor constructors or destructors (if the input is
1772 not bogus) and so do not change the dynamic type (this holds true only
1773 for automatically allocated objects but at the moment we devirtualize
1774 only these). We then must detect that statements in section 2 change
1775 the dynamic type and can try to derive the new type. That is enough
1776 and we can stop, we will never see the calls into constructors of
1777 sub-objects in this code.
1779 Therefore if the static outer type was found (outer_type)
1780 we can safely ignore tci.speculative that is set on calls and give up
1781 only if there was dynamic type store that may affect given variable
1782 (seen_unanalyzed_store) */
1784 if (walked < 0)
1786 if (dump_file)
1787 fprintf (dump_file, " AA walk budget exhausted.\n");
1788 *aa_walk_budget_p = 0;
1789 return false;
1791 else if (aa_walk_budget_p)
1792 *aa_walk_budget_p -= walked;
1794 if (!tci.type_maybe_changed
1795 || (outer_type
1796 && !dynamic
1797 && !tci.seen_unanalyzed_store
1798 && !tci.multiple_types_encountered
1799 && ((offset == tci.offset
1800 && types_same_for_odr (tci.known_current_type,
1801 outer_type))
1802 || (instance_offset == offset
1803 && types_same_for_odr (tci.known_current_type,
1804 instance_outer_type)))))
1806 if (!outer_type || tci.seen_unanalyzed_store)
1807 return false;
1808 if (maybe_in_construction)
1809 maybe_in_construction = false;
1810 if (dump_file)
1811 fprintf (dump_file, " No dynamic type change found.\n");
1812 return true;
1815 if (tci.known_current_type
1816 && !function_entry_reached
1817 && !tci.multiple_types_encountered)
1819 if (!tci.speculative)
1821 outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1822 offset = tci.known_current_offset;
1823 dynamic = true;
1824 maybe_in_construction = false;
1825 maybe_derived_type = false;
1826 if (dump_file)
1827 fprintf (dump_file, " Determined dynamic type.\n");
1829 else if (!speculative_outer_type
1830 || speculative_maybe_derived_type)
1832 speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1833 speculative_offset = tci.known_current_offset;
1834 speculative_maybe_derived_type = false;
1835 if (dump_file)
1836 fprintf (dump_file, " Determined speculative dynamic type.\n");
1839 else if (dump_file)
1841 fprintf (dump_file, " Found multiple types%s%s\n",
1842 function_entry_reached ? " (function entry reached)" : "",
1843 function_entry_reached ? " (multiple types encountered)" : "");
1846 return false;
1849 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1850 seems consistent (and useful) with what we already have in the non-speculative context. */
1852 bool
1853 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type,
1854 HOST_WIDE_INT spec_offset,
1855 bool spec_maybe_derived_type,
1856 tree otr_type) const
1858 if (!flag_devirtualize_speculatively)
1859 return false;
1861 /* Non-polymorphic types are useless for deriving likely polymorphic
1862 call targets. */
1863 if (!spec_outer_type || !contains_polymorphic_type_p (spec_outer_type))
1864 return false;
1866 /* If we know nothing, speculation is always good. */
1867 if (!outer_type)
1868 return true;
1870 /* Speculation is only useful to avoid derived types.
1871 This is not 100% true for placement new, where the outer context may
1872 turn out to be useless, but ignore these for now. */
1873 if (!maybe_derived_type)
1874 return false;
1876 /* If types agrees, speculation is consistent, but it makes sense only
1877 when it says something new. */
1878 if (types_must_be_same_for_odr (spec_outer_type, outer_type))
1879 return maybe_derived_type && !spec_maybe_derived_type;
1881 /* If speculation does not contain the type in question, ignore it. */
1882 if (otr_type
1883 && !contains_type_p (spec_outer_type, spec_offset, otr_type, false, true))
1884 return false;
1886 /* If outer type already contains speculation as a filed,
1887 it is useless. We already know from OUTER_TYPE
1888 SPEC_TYPE and that it is not in the construction. */
1889 if (contains_type_p (outer_type, offset - spec_offset,
1890 spec_outer_type, false, false))
1891 return false;
1893 /* If speculative outer type is not more specified than outer
1894 type, just give up.
1895 We can only decide this safely if we can compare types with OUTER_TYPE.
1897 if ((!in_lto_p || odr_type_p (outer_type))
1898 && !contains_type_p (spec_outer_type,
1899 spec_offset - offset,
1900 outer_type, false))
1901 return false;
1902 return true;
1905 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1906 NEW_MAYBE_DERIVED_TYPE
1907 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1909 bool
1910 ipa_polymorphic_call_context::combine_speculation_with
1911 (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1912 tree otr_type)
1914 if (!new_outer_type)
1915 return false;
1917 /* restrict_to_inner_class may eliminate wrong speculation making our job
1918 easier. */
1919 if (otr_type)
1920 restrict_to_inner_class (otr_type);
1922 if (!speculation_consistent_p (new_outer_type, new_offset,
1923 new_maybe_derived_type, otr_type))
1924 return false;
1926 /* New speculation is a win in case we have no speculation or new
1927 speculation does not consider derivations. */
1928 if (!speculative_outer_type
1929 || (speculative_maybe_derived_type
1930 && !new_maybe_derived_type))
1932 speculative_outer_type = new_outer_type;
1933 speculative_offset = new_offset;
1934 speculative_maybe_derived_type = new_maybe_derived_type;
1935 return true;
1937 else if (types_must_be_same_for_odr (speculative_outer_type,
1938 new_outer_type))
1940 if (speculative_offset != new_offset)
1942 /* OK we have two contexts that seems valid but they disagree,
1943 just give up.
1945 This is not a lattice operation, so we may want to drop it later. */
1946 if (dump_file && (dump_flags & TDF_DETAILS))
1947 fprintf (dump_file,
1948 "Speculative outer types match, "
1949 "offset mismatch -> invalid speculation\n");
1950 clear_speculation ();
1951 return true;
1953 else
1955 if (speculative_maybe_derived_type && !new_maybe_derived_type)
1957 speculative_maybe_derived_type = false;
1958 return true;
1960 else
1961 return false;
1964 /* Choose type that contains the other. This one either contains the outer
1965 as a field (thus giving exactly one target) or is deeper in the type
1966 hierarchy. */
1967 else if (speculative_outer_type
1968 && speculative_maybe_derived_type
1969 && (new_offset > speculative_offset
1970 || (new_offset == speculative_offset
1971 && contains_type_p (new_outer_type,
1972 0, speculative_outer_type, false))))
1974 tree old_outer_type = speculative_outer_type;
1975 HOST_WIDE_INT old_offset = speculative_offset;
1976 bool old_maybe_derived_type = speculative_maybe_derived_type;
1978 speculative_outer_type = new_outer_type;
1979 speculative_offset = new_offset;
1980 speculative_maybe_derived_type = new_maybe_derived_type;
1982 if (otr_type)
1983 restrict_to_inner_class (otr_type);
1985 /* If the speculation turned out to make no sense, revert to sensible
1986 one. */
1987 if (!speculative_outer_type)
1989 speculative_outer_type = old_outer_type;
1990 speculative_offset = old_offset;
1991 speculative_maybe_derived_type = old_maybe_derived_type;
1992 return false;
1994 return (old_offset != speculative_offset
1995 || old_maybe_derived_type != speculative_maybe_derived_type
1996 || types_must_be_same_for_odr (speculative_outer_type,
1997 new_outer_type));
1999 return false;
2002 /* Make speculation less specific so
2003 NEW_OUTER_TYPE, NEW_OFFSET, NEW_MAYBE_DERIVED_TYPE is also included.
2004 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
2006 bool
2007 ipa_polymorphic_call_context::meet_speculation_with
2008 (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
2009 tree otr_type)
2011 if (!new_outer_type && speculative_outer_type)
2013 clear_speculation ();
2014 return true;
2017 /* restrict_to_inner_class may eliminate wrong speculation making our job
2018 easier. */
2019 if (otr_type)
2020 restrict_to_inner_class (otr_type);
2022 if (!speculative_outer_type
2023 || !speculation_consistent_p (speculative_outer_type,
2024 speculative_offset,
2025 speculative_maybe_derived_type,
2026 otr_type))
2027 return false;
2029 if (!speculation_consistent_p (new_outer_type, new_offset,
2030 new_maybe_derived_type, otr_type))
2032 clear_speculation ();
2033 return true;
2036 else if (types_must_be_same_for_odr (speculative_outer_type,
2037 new_outer_type))
2039 if (speculative_offset != new_offset)
2041 clear_speculation ();
2042 return true;
2044 else
2046 if (!speculative_maybe_derived_type && new_maybe_derived_type)
2048 speculative_maybe_derived_type = true;
2049 return true;
2051 else
2052 return false;
2055 /* See if one type contains the other as a field (not base). */
2056 else if (contains_type_p (new_outer_type, new_offset - speculative_offset,
2057 speculative_outer_type, false, false))
2058 return false;
2059 else if (contains_type_p (speculative_outer_type,
2060 speculative_offset - new_offset,
2061 new_outer_type, false, false))
2063 speculative_outer_type = new_outer_type;
2064 speculative_offset = new_offset;
2065 speculative_maybe_derived_type = new_maybe_derived_type;
2066 return true;
2068 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2069 else if (contains_type_p (new_outer_type,
2070 new_offset - speculative_offset,
2071 speculative_outer_type, false, true))
2073 if (!speculative_maybe_derived_type)
2075 speculative_maybe_derived_type = true;
2076 return true;
2078 return false;
2080 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2081 else if (contains_type_p (speculative_outer_type,
2082 speculative_offset - new_offset, new_outer_type, false, true))
2084 speculative_outer_type = new_outer_type;
2085 speculative_offset = new_offset;
2086 speculative_maybe_derived_type = true;
2087 return true;
2089 else
2091 if (dump_file && (dump_flags & TDF_DETAILS))
2092 fprintf (dump_file, "Giving up on speculative meet\n");
2093 clear_speculation ();
2094 return true;
2098 /* Assume that both THIS and a given context is valid and strengthen THIS
2099 if possible. Return true if any strengthening was made.
2100 If actual type the context is being used in is known, OTR_TYPE should be
2101 set accordingly. This improves quality of combined result. */
2103 bool
2104 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx,
2105 tree otr_type)
2107 bool updated = false;
2109 if (ctx.useless_p () || invalid)
2110 return false;
2112 /* Restricting context to inner type makes merging easier, however do not
2113 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
2114 if (otr_type && !invalid && !ctx.invalid)
2116 restrict_to_inner_class (otr_type);
2117 ctx.restrict_to_inner_class (otr_type);
2118 if(invalid)
2119 return false;
2122 if (dump_file && (dump_flags & TDF_DETAILS))
2124 fprintf (dump_file, "Polymorphic call context combine:");
2125 dump (dump_file);
2126 fprintf (dump_file, "With context: ");
2127 ctx.dump (dump_file);
2128 if (otr_type)
2130 fprintf (dump_file, "To be used with type: ");
2131 print_generic_expr (dump_file, otr_type, TDF_SLIM);
2132 fprintf (dump_file, "\n");
2136 /* If call is known to be invalid, we are done. */
2137 if (ctx.invalid)
2139 if (dump_file && (dump_flags & TDF_DETAILS))
2140 fprintf (dump_file, "-> Invalid context\n");
2141 goto invalidate;
2144 if (!ctx.outer_type)
2146 else if (!outer_type)
2148 outer_type = ctx.outer_type;
2149 offset = ctx.offset;
2150 dynamic = ctx.dynamic;
2151 maybe_in_construction = ctx.maybe_in_construction;
2152 maybe_derived_type = ctx.maybe_derived_type;
2153 updated = true;
2155 /* If types are known to be same, merging is quite easy. */
2156 else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2158 if (offset != ctx.offset
2159 && TYPE_SIZE (outer_type)
2160 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2162 if (dump_file && (dump_flags & TDF_DETAILS))
2163 fprintf (dump_file, "Outer types match, offset mismatch -> invalid\n");
2164 clear_speculation ();
2165 clear_outer_type ();
2166 invalid = true;
2167 return true;
2169 if (dump_file && (dump_flags & TDF_DETAILS))
2170 fprintf (dump_file, "Outer types match, merging flags\n");
2171 if (maybe_in_construction && !ctx.maybe_in_construction)
2173 updated = true;
2174 maybe_in_construction = false;
2176 if (maybe_derived_type && !ctx.maybe_derived_type)
2178 updated = true;
2179 maybe_derived_type = false;
2181 if (dynamic && !ctx.dynamic)
2183 updated = true;
2184 dynamic = false;
2187 /* If we know the type precisely, there is not much to improve. */
2188 else if (!maybe_derived_type && !maybe_in_construction
2189 && !ctx.maybe_derived_type && !ctx.maybe_in_construction)
2191 /* It may be easy to check if second context permits the first
2192 and set INVALID otherwise. This is not easy to do in general;
2193 contains_type_p may return false negatives for non-comparable
2194 types.
2196 If OTR_TYPE is known, we however can expect that
2197 restrict_to_inner_class should have discovered the same base
2198 type. */
2199 if (otr_type && !ctx.maybe_in_construction && !ctx.maybe_derived_type)
2201 if (dump_file && (dump_flags & TDF_DETAILS))
2202 fprintf (dump_file, "Contextes disagree -> invalid\n");
2203 goto invalidate;
2206 /* See if one type contains the other as a field (not base).
2207 In this case we want to choose the wider type, because it contains
2208 more information. */
2209 else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2210 outer_type, false, false))
2212 if (dump_file && (dump_flags & TDF_DETAILS))
2213 fprintf (dump_file, "Second type contain the first as a field\n");
2215 if (maybe_derived_type)
2217 outer_type = ctx.outer_type;
2218 maybe_derived_type = ctx.maybe_derived_type;
2219 offset = ctx.offset;
2220 dynamic = ctx.dynamic;
2221 updated = true;
2224 /* If we do not know how the context is being used, we cannot
2225 clear MAYBE_IN_CONSTRUCTION because it may be offseted
2226 to other component of OUTER_TYPE later and we know nothing
2227 about it. */
2228 if (otr_type && maybe_in_construction
2229 && !ctx.maybe_in_construction)
2231 maybe_in_construction = false;
2232 updated = true;
2235 else if (contains_type_p (outer_type, offset - ctx.offset,
2236 ctx.outer_type, false, false))
2238 if (dump_file && (dump_flags & TDF_DETAILS))
2239 fprintf (dump_file, "First type contain the second as a field\n");
2241 if (otr_type && maybe_in_construction
2242 && !ctx.maybe_in_construction)
2244 maybe_in_construction = false;
2245 updated = true;
2248 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2249 else if (contains_type_p (ctx.outer_type,
2250 ctx.offset - offset, outer_type, false, true))
2252 if (dump_file && (dump_flags & TDF_DETAILS))
2253 fprintf (dump_file, "First type is base of second\n");
2254 if (!maybe_derived_type)
2256 if (!ctx.maybe_in_construction
2257 && types_odr_comparable (outer_type, ctx.outer_type))
2259 if (dump_file && (dump_flags & TDF_DETAILS))
2260 fprintf (dump_file, "Second context does not permit base -> invalid\n");
2261 goto invalidate;
2264 /* Pick variant deeper in the hierarchy. */
2265 else
2267 outer_type = ctx.outer_type;
2268 maybe_in_construction = ctx.maybe_in_construction;
2269 maybe_derived_type = ctx.maybe_derived_type;
2270 offset = ctx.offset;
2271 dynamic = ctx.dynamic;
2272 updated = true;
2275 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2276 else if (contains_type_p (outer_type,
2277 offset - ctx.offset, ctx.outer_type, false, true))
2279 if (dump_file && (dump_flags & TDF_DETAILS))
2280 fprintf (dump_file, "Second type is base of first\n");
2281 if (!ctx.maybe_derived_type)
2283 if (!maybe_in_construction
2284 && types_odr_comparable (outer_type, ctx.outer_type))
2286 if (dump_file && (dump_flags & TDF_DETAILS))
2287 fprintf (dump_file, "First context does not permit base -> invalid\n");
2288 goto invalidate;
2290 /* Pick the base type. */
2291 else if (maybe_in_construction)
2293 outer_type = ctx.outer_type;
2294 maybe_in_construction = ctx.maybe_in_construction;
2295 maybe_derived_type = ctx.maybe_derived_type;
2296 offset = ctx.offset;
2297 dynamic = ctx.dynamic;
2298 updated = true;
2302 /* TODO handle merging using hierarchy. */
2303 else if (dump_file && (dump_flags & TDF_DETAILS))
2304 fprintf (dump_file, "Giving up on merge\n");
2306 updated |= combine_speculation_with (ctx.speculative_outer_type,
2307 ctx.speculative_offset,
2308 ctx.speculative_maybe_derived_type,
2309 otr_type);
2311 if (updated && dump_file && (dump_flags & TDF_DETAILS))
2313 fprintf (dump_file, "Updated as: ");
2314 dump (dump_file);
2315 fprintf (dump_file, "\n");
2317 return updated;
2319 invalidate:
2320 invalid = true;
2321 clear_speculation ();
2322 clear_outer_type ();
2323 return true;
2326 /* Take non-speculative info, merge it with speculative and clear speculation.
2327 Used when we no longer manage to keep track of actual outer type, but we
2328 think it is still there.
2330 If OTR_TYPE is set, the transformation can be done more effectively assuming
2331 that context is going to be used only that way. */
2333 void
2334 ipa_polymorphic_call_context::make_speculative (tree otr_type)
2336 tree spec_outer_type = outer_type;
2337 HOST_WIDE_INT spec_offset = offset;
2338 bool spec_maybe_derived_type = maybe_derived_type;
2340 if (invalid)
2342 invalid = false;
2343 clear_outer_type ();
2344 clear_speculation ();
2345 return;
2347 if (!outer_type)
2348 return;
2349 clear_outer_type ();
2350 combine_speculation_with (spec_outer_type, spec_offset,
2351 spec_maybe_derived_type,
2352 otr_type);
2355 /* Use when we cannot track dynamic type change. This speculatively assume
2356 type change is not happening. */
2358 void
2359 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor,
2360 tree otr_type)
2362 if (dynamic)
2363 make_speculative (otr_type);
2364 else if (in_poly_cdtor)
2365 maybe_in_construction = true;
2368 /* Return TRUE if this context conveys the same information as OTHER. */
2370 bool
2371 ipa_polymorphic_call_context::equal_to
2372 (const ipa_polymorphic_call_context &x) const
2374 if (useless_p ())
2375 return x.useless_p ();
2376 if (invalid)
2377 return x.invalid;
2378 if (x.useless_p () || x.invalid)
2379 return false;
2381 if (outer_type)
2383 if (!x.outer_type
2384 || !types_odr_comparable (outer_type, x.outer_type)
2385 || !types_same_for_odr (outer_type, x.outer_type)
2386 || offset != x.offset
2387 || maybe_in_construction != x.maybe_in_construction
2388 || maybe_derived_type != x.maybe_derived_type
2389 || dynamic != x.dynamic)
2390 return false;
2392 else if (x.outer_type)
2393 return false;
2396 if (speculative_outer_type
2397 && speculation_consistent_p (speculative_outer_type, speculative_offset,
2398 speculative_maybe_derived_type, NULL_TREE))
2400 if (!x.speculative_outer_type)
2401 return false;
2403 if (!types_odr_comparable (speculative_outer_type,
2404 x.speculative_outer_type)
2405 || !types_same_for_odr (speculative_outer_type,
2406 x.speculative_outer_type)
2407 || speculative_offset != x.speculative_offset
2408 || speculative_maybe_derived_type != x.speculative_maybe_derived_type)
2409 return false;
2411 else if (x.speculative_outer_type
2412 && x.speculation_consistent_p (x.speculative_outer_type,
2413 x.speculative_offset,
2414 x.speculative_maybe_derived_type,
2415 NULL))
2416 return false;
2418 return true;
2421 /* Modify context to be strictly less restrictive than CTX. */
2423 bool
2424 ipa_polymorphic_call_context::meet_with (ipa_polymorphic_call_context ctx,
2425 tree otr_type)
2427 bool updated = false;
2429 if (useless_p () || ctx.invalid)
2430 return false;
2432 /* Restricting context to inner type makes merging easier, however do not
2433 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
2434 if (otr_type && !useless_p () && !ctx.useless_p ())
2436 restrict_to_inner_class (otr_type);
2437 ctx.restrict_to_inner_class (otr_type);
2438 if(invalid)
2439 return false;
2442 if (equal_to (ctx))
2443 return false;
2445 if (ctx.useless_p () || invalid)
2447 *this = ctx;
2448 return true;
2451 if (dump_file && (dump_flags & TDF_DETAILS))
2453 fprintf (dump_file, "Polymorphic call context meet:");
2454 dump (dump_file);
2455 fprintf (dump_file, "With context: ");
2456 ctx.dump (dump_file);
2457 if (otr_type)
2459 fprintf (dump_file, "To be used with type: ");
2460 print_generic_expr (dump_file, otr_type, TDF_SLIM);
2461 fprintf (dump_file, "\n");
2465 if (!dynamic && ctx.dynamic)
2467 dynamic = true;
2468 updated = true;
2471 /* If call is known to be invalid, we are done. */
2472 if (!outer_type)
2474 else if (!ctx.outer_type)
2476 clear_outer_type ();
2477 updated = true;
2479 /* If types are known to be same, merging is quite easy. */
2480 else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2482 if (offset != ctx.offset
2483 && TYPE_SIZE (outer_type)
2484 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2486 if (dump_file && (dump_flags & TDF_DETAILS))
2487 fprintf (dump_file, "Outer types match, offset mismatch -> clearing\n");
2488 clear_outer_type ();
2489 return true;
2491 if (dump_file && (dump_flags & TDF_DETAILS))
2492 fprintf (dump_file, "Outer types match, merging flags\n");
2493 if (!maybe_in_construction && ctx.maybe_in_construction)
2495 updated = true;
2496 maybe_in_construction = true;
2498 if (!maybe_derived_type && ctx.maybe_derived_type)
2500 updated = true;
2501 maybe_derived_type = true;
2503 if (!dynamic && ctx.dynamic)
2505 updated = true;
2506 dynamic = true;
2509 /* See if one type contains the other as a field (not base). */
2510 else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2511 outer_type, false, false))
2513 if (dump_file && (dump_flags & TDF_DETAILS))
2514 fprintf (dump_file, "Second type contain the first as a field\n");
2516 /* The second type is more specified, so we keep the first.
2517 We need to set DYNAMIC flag to avoid declaring context INVALID
2518 of OFFSET ends up being out of range. */
2519 if (!dynamic
2520 && (ctx.dynamic
2521 || (!otr_type
2522 && (!TYPE_SIZE (ctx.outer_type)
2523 || !TYPE_SIZE (outer_type)
2524 || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2525 TYPE_SIZE (outer_type), 0)))))
2527 dynamic = true;
2528 updated = true;
2531 else if (contains_type_p (outer_type, offset - ctx.offset,
2532 ctx.outer_type, false, false))
2534 if (dump_file && (dump_flags & TDF_DETAILS))
2535 fprintf (dump_file, "First type contain the second as a field\n");
2537 if (!dynamic
2538 && (ctx.dynamic
2539 || (!otr_type
2540 && (!TYPE_SIZE (ctx.outer_type)
2541 || !TYPE_SIZE (outer_type)
2542 || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2543 TYPE_SIZE (outer_type), 0)))))
2544 dynamic = true;
2545 outer_type = ctx.outer_type;
2546 offset = ctx.offset;
2547 dynamic = ctx.dynamic;
2548 maybe_in_construction = ctx.maybe_in_construction;
2549 maybe_derived_type = ctx.maybe_derived_type;
2550 updated = true;
2552 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2553 else if (contains_type_p (ctx.outer_type,
2554 ctx.offset - offset, outer_type, false, true))
2556 if (dump_file && (dump_flags & TDF_DETAILS))
2557 fprintf (dump_file, "First type is base of second\n");
2558 if (!maybe_derived_type)
2560 maybe_derived_type = true;
2561 updated = true;
2563 if (!maybe_in_construction && ctx.maybe_in_construction)
2565 maybe_in_construction = true;
2566 updated = true;
2568 if (!dynamic && ctx.dynamic)
2570 dynamic = true;
2571 updated = true;
2574 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2575 else if (contains_type_p (outer_type,
2576 offset - ctx.offset, ctx.outer_type, false, true))
2578 if (dump_file && (dump_flags & TDF_DETAILS))
2579 fprintf (dump_file, "Second type is base of first\n");
2580 outer_type = ctx.outer_type;
2581 offset = ctx.offset;
2582 updated = true;
2583 if (!maybe_derived_type)
2584 maybe_derived_type = true;
2585 if (!maybe_in_construction && ctx.maybe_in_construction)
2586 maybe_in_construction = true;
2587 if (!dynamic && ctx.dynamic)
2588 dynamic = true;
2590 /* TODO handle merging using hierarchy. */
2591 else
2593 if (dump_file && (dump_flags & TDF_DETAILS))
2594 fprintf (dump_file, "Giving up on meet\n");
2595 clear_outer_type ();
2596 updated = true;
2599 updated |= meet_speculation_with (ctx.speculative_outer_type,
2600 ctx.speculative_offset,
2601 ctx.speculative_maybe_derived_type,
2602 otr_type);
2604 if (updated && dump_file && (dump_flags & TDF_DETAILS))
2606 fprintf (dump_file, "Updated as: ");
2607 dump (dump_file);
2608 fprintf (dump_file, "\n");
2610 return updated;