* function.c (dump_stack_clash_frame_info): New function.
[official-gcc.git] / gcc / ipa-polymorphic-call.c
blob9ac5153bf6716d5ed4a40ca885d74ae315153493
1 /* Analysis of polymorphic call context.
2 Copyright (C) 2013-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "tree-ssa-operands.h"
30 #include "streamer-hooks.h"
31 #include "cgraph.h"
32 #include "data-streamer.h"
33 #include "diagnostic.h"
34 #include "alias.h"
35 #include "fold-const.h"
36 #include "calls.h"
37 #include "ipa-utils.h"
38 #include "tree-dfa.h"
39 #include "gimple-pretty-print.h"
40 #include "tree-into-ssa.h"
41 #include "params.h"
43 /* Return true when TYPE contains an polymorphic type and thus is interesting
44 for devirtualization machinery. */
46 static bool contains_type_p (tree, HOST_WIDE_INT, tree,
47 bool consider_placement_new = true,
48 bool consider_bases = true);
50 bool
51 contains_polymorphic_type_p (const_tree type)
53 type = TYPE_MAIN_VARIANT (type);
55 if (RECORD_OR_UNION_TYPE_P (type))
57 if (TYPE_BINFO (type)
58 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
59 return true;
60 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
61 if (TREE_CODE (fld) == FIELD_DECL
62 && !DECL_ARTIFICIAL (fld)
63 && contains_polymorphic_type_p (TREE_TYPE (fld)))
64 return true;
65 return false;
67 if (TREE_CODE (type) == ARRAY_TYPE)
68 return contains_polymorphic_type_p (TREE_TYPE (type));
69 return false;
72 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
73 at possition CUR_OFFSET within TYPE.
75 POD can be changed to an instance of a polymorphic type by
76 placement new. Here we play safe and assume that any
77 non-polymorphic type is POD. */
78 bool
79 possible_placement_new (tree type, tree expected_type,
80 HOST_WIDE_INT cur_offset)
82 if (cur_offset < 0)
83 return true;
84 return ((TREE_CODE (type) != RECORD_TYPE
85 || !TYPE_BINFO (type)
86 || cur_offset >= POINTER_SIZE
87 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
88 && (!TYPE_SIZE (type)
89 || !tree_fits_shwi_p (TYPE_SIZE (type))
90 || (cur_offset
91 + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
92 : POINTER_SIZE)
93 <= tree_to_uhwi (TYPE_SIZE (type)))));
96 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
97 is contained at THIS->OFFSET. Walk the memory representation of
98 THIS->OUTER_TYPE and find the outermost class type that match
99 OTR_TYPE or contain OTR_TYPE as a base. Update THIS
100 to represent it.
102 If OTR_TYPE is NULL, just find outermost polymorphic type with
103 virtual table present at possition OFFSET.
105 For example when THIS represents type
106 class A
108 int a;
109 class B b;
111 and we look for type at offset sizeof(int), we end up with B and offset 0.
112 If the same is produced by multiple inheritance, we end up with A and offset
113 sizeof(int).
115 If we can not find corresponding class, give up by setting
116 THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
117 Return true when lookup was sucesful.
119 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
120 valid only via allocation of new polymorphic type inside by means
121 of placement new.
123 When CONSIDER_BASES is false, only look for actual fields, not base types
124 of TYPE. */
126 bool
127 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type,
128 bool consider_placement_new,
129 bool consider_bases)
131 tree type = outer_type;
132 HOST_WIDE_INT cur_offset = offset;
133 bool speculative = false;
134 bool size_unknown = false;
135 unsigned HOST_WIDE_INT otr_type_size = POINTER_SIZE;
137 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
138 if (!outer_type)
140 clear_outer_type (otr_type);
141 type = otr_type;
142 cur_offset = 0;
144 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
145 that the context is either invalid, or the instance type must be
146 derived from OUTER_TYPE.
148 Because the instance type may contain field whose type is of OUTER_TYPE,
149 we can not derive any effective information about it.
151 TODO: In the case we know all derrived types, we can definitely do better
152 here. */
153 else if (TYPE_SIZE (outer_type)
154 && tree_fits_shwi_p (TYPE_SIZE (outer_type))
155 && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
156 && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
158 bool der = maybe_derived_type; /* clear_outer_type will reset it. */
159 bool dyn = dynamic;
160 clear_outer_type (otr_type);
161 type = otr_type;
162 cur_offset = 0;
164 /* If derived type is not allowed, we know that the context is invalid.
165 For dynamic types, we really do not have information about
166 size of the memory location. It is possible that completely
167 different type is stored after outer_type. */
168 if (!der && !dyn)
170 clear_speculation ();
171 invalid = true;
172 return false;
176 if (otr_type && TYPE_SIZE (otr_type)
177 && tree_fits_shwi_p (TYPE_SIZE (otr_type)))
178 otr_type_size = tree_to_uhwi (TYPE_SIZE (otr_type));
180 if (!type || offset < 0)
181 goto no_useful_type_info;
183 /* Find the sub-object the constant actually refers to and mark whether it is
184 an artificial one (as opposed to a user-defined one).
186 This loop is performed twice; first time for outer_type and second time
187 for speculative_outer_type. The second run has SPECULATIVE set. */
188 while (true)
190 unsigned HOST_WIDE_INT pos, size;
191 tree fld;
193 /* If we do not know size of TYPE, we need to be more conservative
194 about accepting cases where we can not find EXPECTED_TYPE.
195 Generally the types that do matter here are of constant size.
196 Size_unknown case should be very rare. */
197 if (TYPE_SIZE (type)
198 && tree_fits_shwi_p (TYPE_SIZE (type))
199 && tree_to_shwi (TYPE_SIZE (type)) >= 0)
200 size_unknown = false;
201 else
202 size_unknown = true;
204 /* On a match, just return what we found. */
205 if ((otr_type
206 && types_odr_comparable (type, otr_type)
207 && types_same_for_odr (type, otr_type))
208 || (!otr_type
209 && TREE_CODE (type) == RECORD_TYPE
210 && TYPE_BINFO (type)
211 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
213 if (speculative)
215 /* If we did not match the offset, just give up on speculation. */
216 if (cur_offset != 0
217 /* Also check if speculation did not end up being same as
218 non-speculation. */
219 || (types_must_be_same_for_odr (speculative_outer_type,
220 outer_type)
221 && (maybe_derived_type
222 == speculative_maybe_derived_type)))
223 clear_speculation ();
224 return true;
226 else
228 /* If type is known to be final, do not worry about derived
229 types. Testing it here may help us to avoid speculation. */
230 if (otr_type && TREE_CODE (outer_type) == RECORD_TYPE
231 && (!in_lto_p || odr_type_p (outer_type))
232 && type_with_linkage_p (outer_type)
233 && type_known_to_have_no_derivations_p (outer_type))
234 maybe_derived_type = false;
236 /* Type can not contain itself on an non-zero offset. In that case
237 just give up. Still accept the case where size is now known.
238 Either the second copy may appear past the end of type or within
239 the non-POD buffer located inside the variably sized type
240 itself. */
241 if (cur_offset != 0)
242 goto no_useful_type_info;
243 /* If we determined type precisely or we have no clue on
244 speuclation, we are done. */
245 if (!maybe_derived_type || !speculative_outer_type
246 || !speculation_consistent_p (speculative_outer_type,
247 speculative_offset,
248 speculative_maybe_derived_type,
249 otr_type))
251 clear_speculation ();
252 return true;
254 /* Otherwise look into speculation now. */
255 else
257 speculative = true;
258 type = speculative_outer_type;
259 cur_offset = speculative_offset;
260 continue;
265 /* Walk fields and find corresponding on at OFFSET. */
266 if (TREE_CODE (type) == RECORD_TYPE)
268 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
270 if (TREE_CODE (fld) != FIELD_DECL
271 || TREE_TYPE (fld) == error_mark_node)
272 continue;
274 pos = int_bit_position (fld);
275 if (pos > (unsigned HOST_WIDE_INT)cur_offset)
276 continue;
278 /* Do not consider vptr itself. Not even for placement new. */
279 if (!pos && DECL_ARTIFICIAL (fld)
280 && POINTER_TYPE_P (TREE_TYPE (fld))
281 && TYPE_BINFO (type)
282 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
283 continue;
285 if (!DECL_SIZE (fld) || !tree_fits_uhwi_p (DECL_SIZE (fld)))
286 goto no_useful_type_info;
287 size = tree_to_uhwi (DECL_SIZE (fld));
289 /* We can always skip types smaller than pointer size:
290 those can not contain a virtual table pointer.
292 Disqualifying fields that are too small to fit OTR_TYPE
293 saves work needed to walk them for no benefit.
294 Because of the way the bases are packed into a class, the
295 field's size may be smaller than type size, so it needs
296 to be done with a care. */
298 if (pos <= (unsigned HOST_WIDE_INT)cur_offset
299 && (pos + size) >= (unsigned HOST_WIDE_INT)cur_offset
300 + POINTER_SIZE
301 && (!otr_type
302 || !TYPE_SIZE (TREE_TYPE (fld))
303 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld)))
304 || (pos + tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld))))
305 >= cur_offset + otr_type_size))
306 break;
309 if (!fld)
310 goto no_useful_type_info;
312 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
313 cur_offset -= pos;
314 /* DECL_ARTIFICIAL represents a basetype. */
315 if (!DECL_ARTIFICIAL (fld))
317 if (!speculative)
319 outer_type = type;
320 offset = cur_offset;
321 /* As soon as we se an field containing the type,
322 we know we are not looking for derivations. */
323 maybe_derived_type = false;
325 else
327 speculative_outer_type = type;
328 speculative_offset = cur_offset;
329 speculative_maybe_derived_type = false;
332 else if (!consider_bases)
333 goto no_useful_type_info;
335 else if (TREE_CODE (type) == ARRAY_TYPE)
337 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
339 /* Give up if we don't know array field size.
340 Also give up on non-polymorphic types as they are used
341 as buffers for placement new. */
342 if (!TYPE_SIZE (subtype)
343 || !tree_fits_shwi_p (TYPE_SIZE (subtype))
344 || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
345 || !contains_polymorphic_type_p (subtype))
346 goto no_useful_type_info;
348 HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
350 /* We may see buffer for placement new. In this case the expected type
351 can be bigger than the subtype. */
352 if (TYPE_SIZE (subtype)
353 && (cur_offset + otr_type_size
354 > tree_to_uhwi (TYPE_SIZE (subtype))))
355 goto no_useful_type_info;
357 cur_offset = new_offset;
358 type = TYPE_MAIN_VARIANT (subtype);
359 if (!speculative)
361 outer_type = type;
362 offset = cur_offset;
363 maybe_derived_type = false;
365 else
367 speculative_outer_type = type;
368 speculative_offset = cur_offset;
369 speculative_maybe_derived_type = false;
372 /* Give up on anything else. */
373 else
375 no_useful_type_info:
376 if (maybe_derived_type && !speculative
377 && TREE_CODE (outer_type) == RECORD_TYPE
378 && TREE_CODE (otr_type) == RECORD_TYPE
379 && TYPE_BINFO (otr_type)
380 && !offset
381 && get_binfo_at_offset (TYPE_BINFO (otr_type), 0, outer_type))
383 clear_outer_type (otr_type);
384 if (!speculative_outer_type
385 || !speculation_consistent_p (speculative_outer_type,
386 speculative_offset,
387 speculative_maybe_derived_type,
388 otr_type))
389 clear_speculation ();
390 if (speculative_outer_type)
392 speculative = true;
393 type = speculative_outer_type;
394 cur_offset = speculative_offset;
396 else
397 return true;
399 /* We found no way to embedd EXPECTED_TYPE in TYPE.
400 We still permit two special cases - placement new and
401 the case of variadic types containing themselves. */
402 if (!speculative
403 && consider_placement_new
404 && (size_unknown || !type || maybe_derived_type
405 || possible_placement_new (type, otr_type, cur_offset)))
407 /* In these weird cases we want to accept the context.
408 In non-speculative run we have no useful outer_type info
409 (TODO: we may eventually want to record upper bound on the
410 type size that can be used to prune the walk),
411 but we still want to consider speculation that may
412 give useful info. */
413 if (!speculative)
415 clear_outer_type (otr_type);
416 if (!speculative_outer_type
417 || !speculation_consistent_p (speculative_outer_type,
418 speculative_offset,
419 speculative_maybe_derived_type,
420 otr_type))
421 clear_speculation ();
422 if (speculative_outer_type)
424 speculative = true;
425 type = speculative_outer_type;
426 cur_offset = speculative_offset;
428 else
429 return true;
431 else
433 clear_speculation ();
434 return true;
437 else
439 clear_speculation ();
440 if (speculative)
441 return true;
442 clear_outer_type (otr_type);
443 invalid = true;
444 return false;
450 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
451 CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
452 be built within OUTER_TYPE by means of placement new. CONSIDER_BASES makes
453 function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
454 base of one of fields of OUTER_TYPE. */
456 static bool
457 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
458 tree otr_type,
459 bool consider_placement_new,
460 bool consider_bases)
462 ipa_polymorphic_call_context context;
464 /* Check that type is within range. */
465 if (offset < 0)
466 return false;
468 /* PR ipa/71207
469 As OUTER_TYPE can be a type which has a diamond virtual inheritance,
470 it's not necessary that INNER_TYPE will fit within OUTER_TYPE with
471 a given offset. It can happen that INNER_TYPE also contains a base object,
472 however it would point to the same instance in the OUTER_TYPE. */
474 context.offset = offset;
475 context.outer_type = TYPE_MAIN_VARIANT (outer_type);
476 context.maybe_derived_type = false;
477 context.dynamic = false;
478 return context.restrict_to_inner_class (otr_type, consider_placement_new,
479 consider_bases);
483 /* Return a FUNCTION_DECL if FN represent a constructor or destructor.
484 If CHECK_CLONES is true, also check for clones of ctor/dtors. */
486 tree
487 polymorphic_ctor_dtor_p (tree fn, bool check_clones)
489 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
490 || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
492 if (!check_clones)
493 return NULL_TREE;
495 /* Watch for clones where we constant propagated the first
496 argument (pointer to the instance). */
497 fn = DECL_ABSTRACT_ORIGIN (fn);
498 if (!fn
499 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
500 || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
501 return NULL_TREE;
504 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
505 return NULL_TREE;
507 return fn;
510 /* Return a FUNCTION_DECL if BLOCK represents a constructor or destructor.
511 If CHECK_CLONES is true, also check for clones of ctor/dtors. */
513 tree
514 inlined_polymorphic_ctor_dtor_block_p (tree block, bool check_clones)
516 tree fn = block_ultimate_origin (block);
517 if (fn == NULL || TREE_CODE (fn) != FUNCTION_DECL)
518 return NULL_TREE;
520 return polymorphic_ctor_dtor_p (fn, check_clones);
524 /* We know that the instance is stored in variable or parameter
525 (not dynamically allocated) and we want to disprove the fact
526 that it may be in construction at invocation of CALL.
528 BASE represents memory location where instance is stored.
529 If BASE is NULL, it is assumed to be global memory.
530 OUTER_TYPE is known type of the instance or NULL if not
531 known.
533 For the variable to be in construction we actually need to
534 be in constructor of corresponding global variable or
535 the inline stack of CALL must contain the constructor.
536 Check this condition. This check works safely only before
537 IPA passes, because inline stacks may become out of date
538 later. */
540 bool
541 decl_maybe_in_construction_p (tree base, tree outer_type,
542 gimple *call, tree function)
544 if (outer_type)
545 outer_type = TYPE_MAIN_VARIANT (outer_type);
546 gcc_assert (!base || DECL_P (base));
548 /* After inlining the code unification optimizations may invalidate
549 inline stacks. Also we need to give up on global variables after
550 IPA, because addresses of these may have been propagated to their
551 constructors. */
552 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
553 return true;
555 /* Pure functions can not do any changes on the dynamic type;
556 that require writting to memory. */
557 if ((!base || !auto_var_in_fn_p (base, function))
558 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
559 return false;
561 bool check_clones = !base || is_global_var (base);
562 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
563 block = BLOCK_SUPERCONTEXT (block))
564 if (tree fn = inlined_polymorphic_ctor_dtor_block_p (block, check_clones))
566 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
568 if (!outer_type || !types_odr_comparable (type, outer_type))
570 if (TREE_CODE (type) == RECORD_TYPE
571 && TYPE_BINFO (type)
572 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
573 return true;
575 else if (types_same_for_odr (type, outer_type))
576 return true;
579 if (!base || (VAR_P (base) && is_global_var (base)))
581 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
582 || (!DECL_CXX_CONSTRUCTOR_P (function)
583 && !DECL_CXX_DESTRUCTOR_P (function)))
585 if (!DECL_ABSTRACT_ORIGIN (function))
586 return false;
587 /* Watch for clones where we constant propagated the first
588 argument (pointer to the instance). */
589 function = DECL_ABSTRACT_ORIGIN (function);
590 if (!function
591 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
592 || (!DECL_CXX_CONSTRUCTOR_P (function)
593 && !DECL_CXX_DESTRUCTOR_P (function)))
594 return false;
596 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (function));
597 if (!outer_type || !types_odr_comparable (type, outer_type))
599 if (TREE_CODE (type) == RECORD_TYPE
600 && TYPE_BINFO (type)
601 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
602 return true;
604 else if (types_same_for_odr (type, outer_type))
605 return true;
607 return false;
610 /* Dump human readable context to F. If NEWLINE is true, it will be terminated
611 by a newline. */
613 void
614 ipa_polymorphic_call_context::dump (FILE *f, bool newline) const
616 fprintf (f, " ");
617 if (invalid)
618 fprintf (f, "Call is known to be undefined");
619 else
621 if (useless_p ())
622 fprintf (f, "nothing known");
623 if (outer_type || offset)
625 fprintf (f, "Outer type%s:", dynamic ? " (dynamic)":"");
626 print_generic_expr (f, outer_type, TDF_SLIM);
627 if (maybe_derived_type)
628 fprintf (f, " (or a derived type)");
629 if (maybe_in_construction)
630 fprintf (f, " (maybe in construction)");
631 fprintf (f, " offset " HOST_WIDE_INT_PRINT_DEC,
632 offset);
634 if (speculative_outer_type)
636 if (outer_type || offset)
637 fprintf (f, " ");
638 fprintf (f, "Speculative outer type:");
639 print_generic_expr (f, speculative_outer_type, TDF_SLIM);
640 if (speculative_maybe_derived_type)
641 fprintf (f, " (or a derived type)");
642 fprintf (f, " at offset " HOST_WIDE_INT_PRINT_DEC,
643 speculative_offset);
646 if (newline)
647 fprintf(f, "\n");
650 /* Print context to stderr. */
652 void
653 ipa_polymorphic_call_context::debug () const
655 dump (stderr);
658 /* Stream out the context to OB. */
660 void
661 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
663 struct bitpack_d bp = bitpack_create (ob->main_stream);
665 bp_pack_value (&bp, invalid, 1);
666 bp_pack_value (&bp, maybe_in_construction, 1);
667 bp_pack_value (&bp, maybe_derived_type, 1);
668 bp_pack_value (&bp, speculative_maybe_derived_type, 1);
669 bp_pack_value (&bp, dynamic, 1);
670 bp_pack_value (&bp, outer_type != NULL, 1);
671 bp_pack_value (&bp, offset != 0, 1);
672 bp_pack_value (&bp, speculative_outer_type != NULL, 1);
673 streamer_write_bitpack (&bp);
675 if (outer_type != NULL)
676 stream_write_tree (ob, outer_type, true);
677 if (offset)
678 streamer_write_hwi (ob, offset);
679 if (speculative_outer_type != NULL)
681 stream_write_tree (ob, speculative_outer_type, true);
682 streamer_write_hwi (ob, speculative_offset);
684 else
685 gcc_assert (!speculative_offset);
688 /* Stream in the context from IB and DATA_IN. */
690 void
691 ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib,
692 struct data_in *data_in)
694 struct bitpack_d bp = streamer_read_bitpack (ib);
696 invalid = bp_unpack_value (&bp, 1);
697 maybe_in_construction = bp_unpack_value (&bp, 1);
698 maybe_derived_type = bp_unpack_value (&bp, 1);
699 speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
700 dynamic = bp_unpack_value (&bp, 1);
701 bool outer_type_p = bp_unpack_value (&bp, 1);
702 bool offset_p = bp_unpack_value (&bp, 1);
703 bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
705 if (outer_type_p)
706 outer_type = stream_read_tree (ib, data_in);
707 else
708 outer_type = NULL;
709 if (offset_p)
710 offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
711 else
712 offset = 0;
713 if (speculative_outer_type_p)
715 speculative_outer_type = stream_read_tree (ib, data_in);
716 speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
718 else
720 speculative_outer_type = NULL;
721 speculative_offset = 0;
725 /* Proudce polymorphic call context for call method of instance
726 that is located within BASE (that is assumed to be a decl) at offset OFF. */
728 void
729 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
731 gcc_assert (DECL_P (base));
732 clear_speculation ();
734 if (!contains_polymorphic_type_p (TREE_TYPE (base)))
736 clear_outer_type ();
737 offset = off;
738 return;
740 outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
741 offset = off;
742 /* Make very conservative assumption that all objects
743 may be in construction.
745 It is up to caller to revisit this via
746 get_dynamic_type or decl_maybe_in_construction_p. */
747 maybe_in_construction = true;
748 maybe_derived_type = false;
749 dynamic = false;
752 /* CST is an invariant (address of decl), try to get meaningful
753 polymorphic call context for polymorphic call of method
754 if instance of OTR_TYPE that is located at offset OFF of this invariant.
755 Return FALSE if nothing meaningful can be found. */
757 bool
758 ipa_polymorphic_call_context::set_by_invariant (tree cst,
759 tree otr_type,
760 HOST_WIDE_INT off)
762 HOST_WIDE_INT offset2, size, max_size;
763 bool reverse;
764 tree base;
766 invalid = false;
767 off = 0;
768 clear_outer_type (otr_type);
770 if (TREE_CODE (cst) != ADDR_EXPR)
771 return false;
773 cst = TREE_OPERAND (cst, 0);
774 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size, &reverse);
775 if (!DECL_P (base) || max_size == -1 || max_size != size)
776 return false;
778 /* Only type inconsistent programs can have otr_type that is
779 not part of outer type. */
780 if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
781 return false;
783 set_by_decl (base, off);
784 return true;
787 /* See if OP is SSA name initialized as a copy or by single assignment.
788 If so, walk the SSA graph up. Because simple PHI conditional is considered
789 copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
790 graph. */
792 static tree
793 walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL)
795 hash_set <tree> *visited = NULL;
796 STRIP_NOPS (op);
797 while (TREE_CODE (op) == SSA_NAME
798 && !SSA_NAME_IS_DEFAULT_DEF (op)
799 /* We might be called via fold_stmt during cfgcleanup where
800 SSA form need not be up-to-date. */
801 && !name_registered_for_update_p (op)
802 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op))
803 || gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI))
805 if (global_visited)
807 if (!*global_visited)
808 *global_visited = new hash_set<tree>;
809 if ((*global_visited)->add (op))
810 goto done;
812 else
814 if (!visited)
815 visited = new hash_set<tree>;
816 if (visited->add (op))
817 goto done;
819 /* Special case
820 if (ptr == 0)
821 ptr = 0;
822 else
823 ptr = ptr.foo;
824 This pattern is implicitly produced for casts to non-primary
825 bases. When doing context analysis, we do not really care
826 about the case pointer is NULL, because the call will be
827 undefined anyway. */
828 if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
830 gimple *phi = SSA_NAME_DEF_STMT (op);
832 if (gimple_phi_num_args (phi) > 2)
833 goto done;
834 if (gimple_phi_num_args (phi) == 1)
835 op = gimple_phi_arg_def (phi, 0);
836 else if (integer_zerop (gimple_phi_arg_def (phi, 0)))
837 op = gimple_phi_arg_def (phi, 1);
838 else if (integer_zerop (gimple_phi_arg_def (phi, 1)))
839 op = gimple_phi_arg_def (phi, 0);
840 else
841 goto done;
843 else
845 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
846 goto done;
847 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
849 STRIP_NOPS (op);
851 done:
852 if (visited)
853 delete (visited);
854 return op;
857 /* Create polymorphic call context from IP invariant CST.
858 This is typically &global_var.
859 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
860 is offset of call. */
862 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
863 tree otr_type,
864 HOST_WIDE_INT off)
866 clear_speculation ();
867 set_by_invariant (cst, otr_type, off);
870 /* Build context for pointer REF contained in FNDECL at statement STMT.
871 if INSTANCE is non-NULL, return pointer to the object described by
872 the context or DECL where context is contained in. */
874 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
875 tree ref,
876 gimple *stmt,
877 tree *instance)
879 tree otr_type = NULL;
880 tree base_pointer;
881 hash_set <tree> *visited = NULL;
883 if (TREE_CODE (ref) == OBJ_TYPE_REF)
885 otr_type = obj_type_ref_class (ref);
886 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
888 else
889 base_pointer = ref;
891 /* Set up basic info in case we find nothing interesting in the analysis. */
892 clear_speculation ();
893 clear_outer_type (otr_type);
894 invalid = false;
896 /* Walk SSA for outer object. */
897 while (true)
899 base_pointer = walk_ssa_copies (base_pointer, &visited);
900 if (TREE_CODE (base_pointer) == ADDR_EXPR)
902 HOST_WIDE_INT size, max_size;
903 HOST_WIDE_INT offset2;
904 bool reverse;
905 tree base
906 = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
907 &offset2, &size, &max_size, &reverse);
909 if (max_size != -1 && max_size == size)
910 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
911 offset + offset2,
912 true,
913 NULL /* Do not change outer type. */);
915 /* If this is a varying address, punt. */
916 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
917 && max_size != -1
918 && max_size == size)
920 /* We found dereference of a pointer. Type of the pointer
921 and MEM_REF is meaningless, but we can look futher. */
922 if (TREE_CODE (base) == MEM_REF)
924 offset_int o = mem_ref_offset (base) * BITS_PER_UNIT;
925 o += offset;
926 o += offset2;
927 if (!wi::fits_shwi_p (o))
928 break;
929 base_pointer = TREE_OPERAND (base, 0);
930 offset = o.to_shwi ();
931 outer_type = NULL;
933 /* We found base object. In this case the outer_type
934 is known. */
935 else if (DECL_P (base))
937 if (visited)
938 delete (visited);
939 /* Only type inconsistent programs can have otr_type that is
940 not part of outer type. */
941 if (otr_type
942 && !contains_type_p (TREE_TYPE (base),
943 offset + offset2, otr_type))
945 invalid = true;
946 if (instance)
947 *instance = base_pointer;
948 return;
950 set_by_decl (base, offset + offset2);
951 if (outer_type && maybe_in_construction && stmt)
952 maybe_in_construction
953 = decl_maybe_in_construction_p (base,
954 outer_type,
955 stmt,
956 fndecl);
957 if (instance)
958 *instance = base;
959 return;
961 else
962 break;
964 else
965 break;
967 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
968 && TREE_CODE (TREE_OPERAND (base_pointer, 1)) == INTEGER_CST)
970 offset_int o = offset_int::from (TREE_OPERAND (base_pointer, 1),
971 SIGNED);
972 o *= BITS_PER_UNIT;
973 o += offset;
974 if (!wi::fits_shwi_p (o))
975 break;
976 offset = o.to_shwi ();
977 base_pointer = TREE_OPERAND (base_pointer, 0);
979 else
980 break;
983 if (visited)
984 delete (visited);
986 /* Try to determine type of the outer object. */
987 if (TREE_CODE (base_pointer) == SSA_NAME
988 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
989 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
991 /* See if parameter is THIS pointer of a method. */
992 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
993 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
995 outer_type
996 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
997 gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE
998 || TREE_CODE (outer_type) == UNION_TYPE);
1000 /* Dynamic casting has possibly upcasted the type
1001 in the hiearchy. In this case outer type is less
1002 informative than inner type and we should forget
1003 about it. */
1004 if ((otr_type
1005 && !contains_type_p (outer_type, offset,
1006 otr_type))
1007 || !contains_polymorphic_type_p (outer_type))
1009 outer_type = NULL;
1010 if (instance)
1011 *instance = base_pointer;
1012 return;
1015 dynamic = true;
1017 /* If the function is constructor or destructor, then
1018 the type is possibly in construction, but we know
1019 it is not derived type. */
1020 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1021 || DECL_CXX_DESTRUCTOR_P (fndecl))
1023 maybe_in_construction = true;
1024 maybe_derived_type = false;
1026 else
1028 maybe_derived_type = true;
1029 maybe_in_construction = false;
1031 if (instance)
1032 *instance = base_pointer;
1033 return;
1035 /* Non-PODs passed by value are really passed by invisible
1036 reference. In this case we also know the type of the
1037 object. */
1038 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1040 outer_type
1041 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1042 /* Only type inconsistent programs can have otr_type that is
1043 not part of outer type. */
1044 if (otr_type && !contains_type_p (outer_type, offset,
1045 otr_type))
1047 invalid = true;
1048 if (instance)
1049 *instance = base_pointer;
1050 return;
1052 /* Non-polymorphic types have no interest for us. */
1053 else if (!otr_type && !contains_polymorphic_type_p (outer_type))
1055 outer_type = NULL;
1056 if (instance)
1057 *instance = base_pointer;
1058 return;
1060 maybe_derived_type = false;
1061 maybe_in_construction = false;
1062 if (instance)
1063 *instance = base_pointer;
1064 return;
1068 tree base_type = TREE_TYPE (base_pointer);
1070 if (TREE_CODE (base_pointer) == SSA_NAME
1071 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1072 && !(TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL
1073 || TREE_CODE (SSA_NAME_VAR (base_pointer)) == RESULT_DECL))
1075 invalid = true;
1076 if (instance)
1077 *instance = base_pointer;
1078 return;
1080 if (TREE_CODE (base_pointer) == SSA_NAME
1081 && SSA_NAME_DEF_STMT (base_pointer)
1082 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1083 base_type = TREE_TYPE (gimple_assign_rhs1
1084 (SSA_NAME_DEF_STMT (base_pointer)));
1086 if (base_type && POINTER_TYPE_P (base_type))
1087 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
1088 offset,
1089 true, NULL /* Do not change type here */);
1090 /* TODO: There are multiple ways to derive a type. For instance
1091 if BASE_POINTER is passed to an constructor call prior our refernece.
1092 We do not make this type of flow sensitive analysis yet. */
1093 if (instance)
1094 *instance = base_pointer;
1095 return;
1098 /* Structure to be passed in between detect_type_change and
1099 check_stmt_for_type_change. */
1101 struct type_change_info
1103 /* Offset into the object where there is the virtual method pointer we are
1104 looking for. */
1105 HOST_WIDE_INT offset;
1106 /* The declaration or SSA_NAME pointer of the base that we are checking for
1107 type change. */
1108 tree instance;
1109 /* The reference to virtual table pointer used. */
1110 tree vtbl_ptr_ref;
1111 tree otr_type;
1112 /* If we actually can tell the type that the object has changed to, it is
1113 stored in this field. Otherwise it remains NULL_TREE. */
1114 tree known_current_type;
1115 HOST_WIDE_INT known_current_offset;
1117 /* Set to nonzero if we possibly missed some dynamic type changes and we
1118 should consider the set to be speculative. */
1119 unsigned speculative;
1121 /* Set to true if dynamic type change has been detected. */
1122 bool type_maybe_changed;
1123 /* Set to true if multiple types have been encountered. known_current_type
1124 must be disregarded in that case. */
1125 bool multiple_types_encountered;
1126 bool seen_unanalyzed_store;
1129 /* Return true if STMT is not call and can modify a virtual method table pointer.
1130 We take advantage of fact that vtable stores must appear within constructor
1131 and destructor functions. */
1133 static bool
1134 noncall_stmt_may_be_vtbl_ptr_store (gimple *stmt)
1136 if (is_gimple_assign (stmt))
1138 tree lhs = gimple_assign_lhs (stmt);
1140 if (gimple_clobber_p (stmt))
1141 return false;
1142 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
1144 if (flag_strict_aliasing
1145 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
1146 return false;
1148 if (TREE_CODE (lhs) == COMPONENT_REF
1149 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1150 return false;
1151 /* In the future we might want to use get_base_ref_and_offset to find
1152 if there is a field corresponding to the offset and if so, proceed
1153 almost like if it was a component ref. */
1157 /* Code unification may mess with inline stacks. */
1158 if (cfun->after_inlining)
1159 return true;
1161 /* Walk the inline stack and watch out for ctors/dtors.
1162 TODO: Maybe we can require the store to appear in toplevel
1163 block of CTOR/DTOR. */
1164 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
1165 block = BLOCK_SUPERCONTEXT (block))
1166 if (BLOCK_ABSTRACT_ORIGIN (block)
1167 && TREE_CODE (block_ultimate_origin (block)) == FUNCTION_DECL)
1168 return inlined_polymorphic_ctor_dtor_block_p (block, false);
1169 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
1170 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
1171 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
1174 /* If STMT can be proved to be an assignment to the virtual method table
1175 pointer of ANALYZED_OBJ and the type associated with the new table
1176 identified, return the type. Otherwise return NULL_TREE if type changes
1177 in unknown way or ERROR_MARK_NODE if type is unchanged. */
1179 static tree
1180 extr_type_from_vtbl_ptr_store (gimple *stmt, struct type_change_info *tci,
1181 HOST_WIDE_INT *type_offset)
1183 HOST_WIDE_INT offset, size, max_size;
1184 tree lhs, rhs, base;
1185 bool reverse;
1187 if (!gimple_assign_single_p (stmt))
1188 return NULL_TREE;
1190 lhs = gimple_assign_lhs (stmt);
1191 rhs = gimple_assign_rhs1 (stmt);
1192 if (TREE_CODE (lhs) != COMPONENT_REF
1193 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1195 if (dump_file)
1196 fprintf (dump_file, " LHS is not virtual table.\n");
1197 return NULL_TREE;
1200 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
1202 else
1204 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
1205 if (DECL_P (tci->instance))
1207 if (base != tci->instance)
1209 if (dump_file)
1211 fprintf (dump_file, " base:");
1212 print_generic_expr (dump_file, base, TDF_SLIM);
1213 fprintf (dump_file, " does not match instance:");
1214 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1215 fprintf (dump_file, "\n");
1217 return NULL_TREE;
1220 else if (TREE_CODE (base) == MEM_REF)
1222 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0))
1224 if (dump_file)
1226 fprintf (dump_file, " base mem ref:");
1227 print_generic_expr (dump_file, base, TDF_SLIM);
1228 fprintf (dump_file, " does not match instance:");
1229 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1230 fprintf (dump_file, "\n");
1232 return NULL_TREE;
1234 if (!integer_zerop (TREE_OPERAND (base, 1)))
1236 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
1238 if (dump_file)
1240 fprintf (dump_file, " base mem ref:");
1241 print_generic_expr (dump_file, base, TDF_SLIM);
1242 fprintf (dump_file, " has non-representable offset:");
1243 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1244 fprintf (dump_file, "\n");
1246 return NULL_TREE;
1248 else
1249 offset += tree_to_shwi (TREE_OPERAND (base, 1)) * BITS_PER_UNIT;
1252 else if (!operand_equal_p (tci->instance, base, 0)
1253 || tci->offset)
1255 if (dump_file)
1257 fprintf (dump_file, " base:");
1258 print_generic_expr (dump_file, base, TDF_SLIM);
1259 fprintf (dump_file, " does not match instance:");
1260 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1261 fprintf (dump_file, " with offset %i\n", (int)tci->offset);
1263 return tci->offset > POINTER_SIZE ? error_mark_node : NULL_TREE;
1265 if (offset != tci->offset
1266 || size != POINTER_SIZE
1267 || max_size != POINTER_SIZE)
1269 if (dump_file)
1270 fprintf (dump_file, " wrong offset %i!=%i or size %i\n",
1271 (int)offset, (int)tci->offset, (int)size);
1272 return offset + POINTER_SIZE <= tci->offset
1273 || (max_size != -1
1274 && tci->offset + POINTER_SIZE > offset + max_size)
1275 ? error_mark_node : NULL;
1279 tree vtable;
1280 unsigned HOST_WIDE_INT offset2;
1282 if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
1284 if (dump_file)
1285 fprintf (dump_file, " Failed to lookup binfo\n");
1286 return NULL;
1289 tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1290 offset2, vtable);
1291 if (!binfo)
1293 if (dump_file)
1294 fprintf (dump_file, " Construction vtable used\n");
1295 /* FIXME: We should suport construction contexts. */
1296 return NULL;
1299 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
1300 return DECL_CONTEXT (vtable);
1303 /* Record dynamic type change of TCI to TYPE. */
1305 static void
1306 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
1308 if (dump_file)
1310 if (type)
1312 fprintf (dump_file, " Recording type: ");
1313 print_generic_expr (dump_file, type, TDF_SLIM);
1314 fprintf (dump_file, " at offset %i\n", (int)offset);
1316 else
1317 fprintf (dump_file, " Recording unknown type\n");
1320 /* If we found a constructor of type that is not polymorphic or
1321 that may contain the type in question as a field (not as base),
1322 restrict to the inner class first to make type matching bellow
1323 happier. */
1324 if (type
1325 && (offset
1326 || (TREE_CODE (type) != RECORD_TYPE
1327 || !TYPE_BINFO (type)
1328 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
1330 ipa_polymorphic_call_context context;
1332 context.offset = offset;
1333 context.outer_type = type;
1334 context.maybe_in_construction = false;
1335 context.maybe_derived_type = false;
1336 context.dynamic = true;
1337 /* If we failed to find the inner type, we know that the call
1338 would be undefined for type produced here. */
1339 if (!context.restrict_to_inner_class (tci->otr_type))
1341 if (dump_file)
1342 fprintf (dump_file, " Ignoring; does not contain otr_type\n");
1343 return;
1345 /* Watch for case we reached an POD type and anticipate placement
1346 new. */
1347 if (!context.maybe_derived_type)
1349 type = context.outer_type;
1350 offset = context.offset;
1353 if (tci->type_maybe_changed
1354 && (!types_same_for_odr (type, tci->known_current_type)
1355 || offset != tci->known_current_offset))
1356 tci->multiple_types_encountered = true;
1357 tci->known_current_type = TYPE_MAIN_VARIANT (type);
1358 tci->known_current_offset = offset;
1359 tci->type_maybe_changed = true;
1363 /* The maximum number of may-defs we visit when looking for a must-def
1364 that changes the dynamic type in check_stmt_for_type_change. Tuned
1365 after the PR12392 testcase which unlimited spends 40% time within
1366 these alias walks and 8% with the following limit. */
1368 static inline bool
1369 csftc_abort_walking_p (unsigned speculative)
1371 unsigned max = PARAM_VALUE (PARAM_MAX_SPECULATIVE_DEVIRT_MAYDEFS);
1372 return speculative > max ? true : false;
1375 /* Callback of walk_aliased_vdefs and a helper function for
1376 detect_type_change to check whether a particular statement may modify
1377 the virtual table pointer, and if possible also determine the new type of
1378 the (sub-)object. It stores its result into DATA, which points to a
1379 type_change_info structure. */
1381 static bool
1382 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
1384 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
1385 struct type_change_info *tci = (struct type_change_info *) data;
1386 tree fn;
1388 /* If we already gave up, just terminate the rest of walk. */
1389 if (tci->multiple_types_encountered)
1390 return true;
1392 if (is_gimple_call (stmt))
1394 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
1395 return false;
1397 /* Check for a constructor call. */
1398 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
1399 && DECL_CXX_CONSTRUCTOR_P (fn)
1400 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1401 && gimple_call_num_args (stmt))
1403 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
1404 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1405 HOST_WIDE_INT offset = 0, size, max_size;
1406 bool reverse;
1408 if (dump_file)
1410 fprintf (dump_file, " Checking constructor call: ");
1411 print_gimple_stmt (dump_file, stmt, 0);
1414 /* See if THIS parameter seems like instance pointer. */
1415 if (TREE_CODE (op) == ADDR_EXPR)
1417 op = get_ref_base_and_extent (TREE_OPERAND (op, 0), &offset,
1418 &size, &max_size, &reverse);
1419 if (size != max_size || max_size == -1)
1421 tci->speculative++;
1422 return csftc_abort_walking_p (tci->speculative);
1424 if (op && TREE_CODE (op) == MEM_REF)
1426 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
1428 tci->speculative++;
1429 return csftc_abort_walking_p (tci->speculative);
1431 offset += tree_to_shwi (TREE_OPERAND (op, 1))
1432 * BITS_PER_UNIT;
1433 op = TREE_OPERAND (op, 0);
1435 else if (DECL_P (op))
1437 else
1439 tci->speculative++;
1440 return csftc_abort_walking_p (tci->speculative);
1442 op = walk_ssa_copies (op);
1444 if (operand_equal_p (op, tci->instance, 0)
1445 && TYPE_SIZE (type)
1446 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
1447 && tree_fits_shwi_p (TYPE_SIZE (type))
1448 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset
1449 /* Some inlined constructors may look as follows:
1450 _3 = operator new (16);
1451 MEM[(struct &)_3] ={v} {CLOBBER};
1452 MEM[(struct CompositeClass *)_3]._vptr.CompositeClass
1453 = &MEM[(void *)&_ZTV14CompositeClass + 16B];
1454 _7 = &MEM[(struct CompositeClass *)_3].object;
1455 EmptyClass::EmptyClass (_7);
1457 When determining dynamic type of _3 and because we stop at first
1458 dynamic type found, we would stop on EmptyClass::EmptyClass (_7).
1459 In this case the emptyclass is not even polymorphic and we miss
1460 it is contained in an outer type that is polymorphic. */
1462 && (tci->offset == offset || contains_polymorphic_type_p (type)))
1464 record_known_type (tci, type, tci->offset - offset);
1465 return true;
1468 /* Calls may possibly change dynamic type by placement new. Assume
1469 it will not happen, but make result speculative only. */
1470 if (dump_file)
1472 fprintf (dump_file, " Function call may change dynamic type:");
1473 print_gimple_stmt (dump_file, stmt, 0);
1475 tci->speculative++;
1476 return csftc_abort_walking_p (tci->speculative);
1478 /* Check for inlined virtual table store. */
1479 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
1481 tree type;
1482 HOST_WIDE_INT offset = 0;
1483 if (dump_file)
1485 fprintf (dump_file, " Checking vtbl store: ");
1486 print_gimple_stmt (dump_file, stmt, 0);
1489 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
1490 if (type == error_mark_node)
1491 return false;
1492 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
1493 if (!type)
1495 if (dump_file)
1496 fprintf (dump_file, " Unanalyzed store may change type.\n");
1497 tci->seen_unanalyzed_store = true;
1498 tci->speculative++;
1500 else
1501 record_known_type (tci, type, offset);
1502 return true;
1504 else
1505 return false;
1508 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1509 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1510 INSTANCE is pointer to the outer instance as returned by
1511 get_polymorphic_context. To avoid creation of temporary expressions,
1512 INSTANCE may also be an declaration of get_polymorphic_context found the
1513 value to be in static storage.
1515 If the type of instance is not fully determined
1516 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1517 is set), try to walk memory writes and find the actual construction of the
1518 instance.
1520 Return true if memory is unchanged from function entry.
1522 We do not include this analysis in the context analysis itself, because
1523 it needs memory SSA to be fully built and the walk may be expensive.
1524 So it is not suitable for use withing fold_stmt and similar uses. */
1526 bool
1527 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
1528 tree otr_object,
1529 tree otr_type,
1530 gimple *call)
1532 struct type_change_info tci;
1533 ao_ref ao;
1534 bool function_entry_reached = false;
1535 tree instance_ref = NULL;
1536 gimple *stmt = call;
1537 /* Remember OFFSET before it is modified by restrict_to_inner_class.
1538 This is because we do not update INSTANCE when walking inwards. */
1539 HOST_WIDE_INT instance_offset = offset;
1540 tree instance_outer_type = outer_type;
1542 if (otr_type)
1543 otr_type = TYPE_MAIN_VARIANT (otr_type);
1545 /* Walk into inner type. This may clear maybe_derived_type and save us
1546 from useless work. It also makes later comparsions with static type
1547 easier. */
1548 if (outer_type && otr_type)
1550 if (!restrict_to_inner_class (otr_type))
1551 return false;
1554 if (!maybe_in_construction && !maybe_derived_type)
1555 return false;
1557 /* If we are in fact not looking at any object object or the instance is
1558 some placement new into a random load, give up straight away. */
1559 if (TREE_CODE (instance) == MEM_REF)
1560 return false;
1562 /* We need to obtain refernce to virtual table pointer. It is better
1563 to look it up in the code rather than build our own. This require bit
1564 of pattern matching, but we end up verifying that what we found is
1565 correct.
1567 What we pattern match is:
1569 tmp = instance->_vptr.A; // vtbl ptr load
1570 tmp2 = tmp[otr_token]; // vtable lookup
1571 OBJ_TYPE_REF(tmp2;instance->0) (instance);
1573 We want to start alias oracle walk from vtbl pointer load,
1574 but we may not be able to identify it, for example, when PRE moved the
1575 load around. */
1577 if (gimple_code (call) == GIMPLE_CALL)
1579 tree ref = gimple_call_fn (call);
1580 HOST_WIDE_INT offset2, size, max_size;
1581 bool reverse;
1583 if (TREE_CODE (ref) == OBJ_TYPE_REF)
1585 ref = OBJ_TYPE_REF_EXPR (ref);
1586 ref = walk_ssa_copies (ref);
1588 /* If call target is already known, no need to do the expensive
1589 memory walk. */
1590 if (is_gimple_min_invariant (ref))
1591 return false;
1593 /* Check if definition looks like vtable lookup. */
1594 if (TREE_CODE (ref) == SSA_NAME
1595 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1596 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
1597 && TREE_CODE (gimple_assign_rhs1
1598 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
1600 ref = get_base_address
1601 (TREE_OPERAND (gimple_assign_rhs1
1602 (SSA_NAME_DEF_STMT (ref)), 0));
1603 ref = walk_ssa_copies (ref);
1604 /* Find base address of the lookup and see if it looks like
1605 vptr load. */
1606 if (TREE_CODE (ref) == SSA_NAME
1607 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1608 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
1610 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
1611 tree base_ref
1612 = get_ref_base_and_extent (ref_exp, &offset2, &size,
1613 &max_size, &reverse);
1615 /* Finally verify that what we found looks like read from
1616 OTR_OBJECT or from INSTANCE with offset OFFSET. */
1617 if (base_ref
1618 && ((TREE_CODE (base_ref) == MEM_REF
1619 && ((offset2 == instance_offset
1620 && TREE_OPERAND (base_ref, 0) == instance)
1621 || (!offset2
1622 && TREE_OPERAND (base_ref, 0)
1623 == otr_object)))
1624 || (DECL_P (instance) && base_ref == instance
1625 && offset2 == instance_offset)))
1627 stmt = SSA_NAME_DEF_STMT (ref);
1628 instance_ref = ref_exp;
1635 /* If we failed to look up the reference in code, build our own. */
1636 if (!instance_ref)
1638 /* If the statement in question does not use memory, we can't tell
1639 anything. */
1640 if (!gimple_vuse (stmt))
1641 return false;
1642 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
1644 else
1645 /* Otherwise use the real reference. */
1646 ao_ref_init (&ao, instance_ref);
1648 /* We look for vtbl pointer read. */
1649 ao.size = POINTER_SIZE;
1650 ao.max_size = ao.size;
1651 /* We are looking for stores to vptr pointer within the instance of
1652 outer type.
1653 TODO: The vptr pointer type is globally known, we probably should
1654 keep it and do that even when otr_type is unknown. */
1655 if (otr_type)
1657 ao.base_alias_set
1658 = get_alias_set (outer_type ? outer_type : otr_type);
1659 ao.ref_alias_set
1660 = get_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
1663 if (dump_file)
1665 fprintf (dump_file, "Determining dynamic type for call: ");
1666 print_gimple_stmt (dump_file, call, 0);
1667 fprintf (dump_file, " Starting walk at: ");
1668 print_gimple_stmt (dump_file, stmt, 0);
1669 fprintf (dump_file, " instance pointer: ");
1670 print_generic_expr (dump_file, otr_object, TDF_SLIM);
1671 fprintf (dump_file, " Outer instance pointer: ");
1672 print_generic_expr (dump_file, instance, TDF_SLIM);
1673 fprintf (dump_file, " offset: %i (bits)", (int)instance_offset);
1674 fprintf (dump_file, " vtbl reference: ");
1675 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
1676 fprintf (dump_file, "\n");
1679 tci.offset = instance_offset;
1680 tci.instance = instance;
1681 tci.vtbl_ptr_ref = instance_ref;
1682 tci.known_current_type = NULL_TREE;
1683 tci.known_current_offset = 0;
1684 tci.otr_type = otr_type;
1685 tci.type_maybe_changed = false;
1686 tci.multiple_types_encountered = false;
1687 tci.speculative = 0;
1688 tci.seen_unanalyzed_store = false;
1690 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
1691 &tci, NULL, &function_entry_reached);
1693 /* If we did not find any type changing statements, we may still drop
1694 maybe_in_construction flag if the context already have outer type.
1696 Here we make special assumptions about both constructors and
1697 destructors which are all the functions that are allowed to alter the
1698 VMT pointers. It assumes that destructors begin with assignment into
1699 all VMT pointers and that constructors essentially look in the
1700 following way:
1702 1) The very first thing they do is that they call constructors of
1703 ancestor sub-objects that have them.
1705 2) Then VMT pointers of this and all its ancestors is set to new
1706 values corresponding to the type corresponding to the constructor.
1708 3) Only afterwards, other stuff such as constructor of member
1709 sub-objects and the code written by the user is run. Only this may
1710 include calling virtual functions, directly or indirectly.
1712 4) placement new can not be used to change type of non-POD statically
1713 allocated variables.
1715 There is no way to call a constructor of an ancestor sub-object in any
1716 other way.
1718 This means that we do not have to care whether constructors get the
1719 correct type information because they will always change it (in fact,
1720 if we define the type to be given by the VMT pointer, it is undefined).
1722 The most important fact to derive from the above is that if, for some
1723 statement in the section 3, we try to detect whether the dynamic type
1724 has changed, we can safely ignore all calls as we examine the function
1725 body backwards until we reach statements in section 2 because these
1726 calls cannot be ancestor constructors or destructors (if the input is
1727 not bogus) and so do not change the dynamic type (this holds true only
1728 for automatically allocated objects but at the moment we devirtualize
1729 only these). We then must detect that statements in section 2 change
1730 the dynamic type and can try to derive the new type. That is enough
1731 and we can stop, we will never see the calls into constructors of
1732 sub-objects in this code.
1734 Therefore if the static outer type was found (outer_type)
1735 we can safely ignore tci.speculative that is set on calls and give up
1736 only if there was dyanmic type store that may affect given variable
1737 (seen_unanalyzed_store) */
1739 if (!tci.type_maybe_changed
1740 || (outer_type
1741 && !dynamic
1742 && !tci.seen_unanalyzed_store
1743 && !tci.multiple_types_encountered
1744 && ((offset == tci.offset
1745 && types_same_for_odr (tci.known_current_type,
1746 outer_type))
1747 || (instance_offset == offset
1748 && types_same_for_odr (tci.known_current_type,
1749 instance_outer_type)))))
1751 if (!outer_type || tci.seen_unanalyzed_store)
1752 return false;
1753 if (maybe_in_construction)
1754 maybe_in_construction = false;
1755 if (dump_file)
1756 fprintf (dump_file, " No dynamic type change found.\n");
1757 return true;
1760 if (tci.known_current_type
1761 && !function_entry_reached
1762 && !tci.multiple_types_encountered)
1764 if (!tci.speculative)
1766 outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1767 offset = tci.known_current_offset;
1768 dynamic = true;
1769 maybe_in_construction = false;
1770 maybe_derived_type = false;
1771 if (dump_file)
1772 fprintf (dump_file, " Determined dynamic type.\n");
1774 else if (!speculative_outer_type
1775 || speculative_maybe_derived_type)
1777 speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1778 speculative_offset = tci.known_current_offset;
1779 speculative_maybe_derived_type = false;
1780 if (dump_file)
1781 fprintf (dump_file, " Determined speculative dynamic type.\n");
1784 else if (dump_file)
1786 fprintf (dump_file, " Found multiple types%s%s\n",
1787 function_entry_reached ? " (function entry reached)" : "",
1788 function_entry_reached ? " (multiple types encountered)" : "");
1791 return false;
1794 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1795 seems consistent (and useful) with what we already have in the non-speculative context. */
1797 bool
1798 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type,
1799 HOST_WIDE_INT spec_offset,
1800 bool spec_maybe_derived_type,
1801 tree otr_type) const
1803 if (!flag_devirtualize_speculatively)
1804 return false;
1806 /* Non-polymorphic types are useless for deriving likely polymorphic
1807 call targets. */
1808 if (!spec_outer_type || !contains_polymorphic_type_p (spec_outer_type))
1809 return false;
1811 /* If we know nothing, speculation is always good. */
1812 if (!outer_type)
1813 return true;
1815 /* Speculation is only useful to avoid derived types.
1816 This is not 100% true for placement new, where the outer context may
1817 turn out to be useless, but ignore these for now. */
1818 if (!maybe_derived_type)
1819 return false;
1821 /* If types agrees, speculation is consistent, but it makes sense only
1822 when it says something new. */
1823 if (types_must_be_same_for_odr (spec_outer_type, outer_type))
1824 return maybe_derived_type && !spec_maybe_derived_type;
1826 /* If speculation does not contain the type in question, ignore it. */
1827 if (otr_type
1828 && !contains_type_p (spec_outer_type, spec_offset, otr_type, false, true))
1829 return false;
1831 /* If outer type already contains speculation as a filed,
1832 it is useless. We already know from OUTER_TYPE
1833 SPEC_TYPE and that it is not in the construction. */
1834 if (contains_type_p (outer_type, offset - spec_offset,
1835 spec_outer_type, false, false))
1836 return false;
1838 /* If speculative outer type is not more specified than outer
1839 type, just give up.
1840 We can only decide this safely if we can compare types with OUTER_TYPE.
1842 if ((!in_lto_p || odr_type_p (outer_type))
1843 && !contains_type_p (spec_outer_type,
1844 spec_offset - offset,
1845 outer_type, false))
1846 return false;
1847 return true;
1850 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1851 NEW_MAYBE_DERIVED_TYPE
1852 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1854 bool
1855 ipa_polymorphic_call_context::combine_speculation_with
1856 (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1857 tree otr_type)
1859 if (!new_outer_type)
1860 return false;
1862 /* restrict_to_inner_class may eliminate wrong speculation making our job
1863 easeier. */
1864 if (otr_type)
1865 restrict_to_inner_class (otr_type);
1867 if (!speculation_consistent_p (new_outer_type, new_offset,
1868 new_maybe_derived_type, otr_type))
1869 return false;
1871 /* New speculation is a win in case we have no speculation or new
1872 speculation does not consider derivations. */
1873 if (!speculative_outer_type
1874 || (speculative_maybe_derived_type
1875 && !new_maybe_derived_type))
1877 speculative_outer_type = new_outer_type;
1878 speculative_offset = new_offset;
1879 speculative_maybe_derived_type = new_maybe_derived_type;
1880 return true;
1882 else if (types_must_be_same_for_odr (speculative_outer_type,
1883 new_outer_type))
1885 if (speculative_offset != new_offset)
1887 /* OK we have two contexts that seems valid but they disagree,
1888 just give up.
1890 This is not a lattice operation, so we may want to drop it later. */
1891 if (dump_file && (dump_flags & TDF_DETAILS))
1892 fprintf (dump_file,
1893 "Speculative outer types match, "
1894 "offset mismatch -> invalid speculation\n");
1895 clear_speculation ();
1896 return true;
1898 else
1900 if (speculative_maybe_derived_type && !new_maybe_derived_type)
1902 speculative_maybe_derived_type = false;
1903 return true;
1905 else
1906 return false;
1909 /* Choose type that contains the other. This one either contains the outer
1910 as a field (thus giving exactly one target) or is deeper in the type
1911 hiearchy. */
1912 else if (speculative_outer_type
1913 && speculative_maybe_derived_type
1914 && (new_offset > speculative_offset
1915 || (new_offset == speculative_offset
1916 && contains_type_p (new_outer_type,
1917 0, speculative_outer_type, false))))
1919 tree old_outer_type = speculative_outer_type;
1920 HOST_WIDE_INT old_offset = speculative_offset;
1921 bool old_maybe_derived_type = speculative_maybe_derived_type;
1923 speculative_outer_type = new_outer_type;
1924 speculative_offset = new_offset;
1925 speculative_maybe_derived_type = new_maybe_derived_type;
1927 if (otr_type)
1928 restrict_to_inner_class (otr_type);
1930 /* If the speculation turned out to make no sense, revert to sensible
1931 one. */
1932 if (!speculative_outer_type)
1934 speculative_outer_type = old_outer_type;
1935 speculative_offset = old_offset;
1936 speculative_maybe_derived_type = old_maybe_derived_type;
1937 return false;
1939 return (old_offset != speculative_offset
1940 || old_maybe_derived_type != speculative_maybe_derived_type
1941 || types_must_be_same_for_odr (speculative_outer_type,
1942 new_outer_type));
1944 return false;
1947 /* Make speculation less specific so
1948 NEW_OUTER_TYPE, NEW_OFFSET, NEW_MAYBE_DERIVED_TYPE is also included.
1949 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1951 bool
1952 ipa_polymorphic_call_context::meet_speculation_with
1953 (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1954 tree otr_type)
1956 if (!new_outer_type && speculative_outer_type)
1958 clear_speculation ();
1959 return true;
1962 /* restrict_to_inner_class may eliminate wrong speculation making our job
1963 easeier. */
1964 if (otr_type)
1965 restrict_to_inner_class (otr_type);
1967 if (!speculative_outer_type
1968 || !speculation_consistent_p (speculative_outer_type,
1969 speculative_offset,
1970 speculative_maybe_derived_type,
1971 otr_type))
1972 return false;
1974 if (!speculation_consistent_p (new_outer_type, new_offset,
1975 new_maybe_derived_type, otr_type))
1977 clear_speculation ();
1978 return true;
1981 else if (types_must_be_same_for_odr (speculative_outer_type,
1982 new_outer_type))
1984 if (speculative_offset != new_offset)
1986 clear_speculation ();
1987 return true;
1989 else
1991 if (!speculative_maybe_derived_type && new_maybe_derived_type)
1993 speculative_maybe_derived_type = true;
1994 return true;
1996 else
1997 return false;
2000 /* See if one type contains the other as a field (not base). */
2001 else if (contains_type_p (new_outer_type, new_offset - speculative_offset,
2002 speculative_outer_type, false, false))
2003 return false;
2004 else if (contains_type_p (speculative_outer_type,
2005 speculative_offset - new_offset,
2006 new_outer_type, false, false))
2008 speculative_outer_type = new_outer_type;
2009 speculative_offset = new_offset;
2010 speculative_maybe_derived_type = new_maybe_derived_type;
2011 return true;
2013 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2014 else if (contains_type_p (new_outer_type,
2015 new_offset - speculative_offset,
2016 speculative_outer_type, false, true))
2018 if (!speculative_maybe_derived_type)
2020 speculative_maybe_derived_type = true;
2021 return true;
2023 return false;
2025 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2026 else if (contains_type_p (speculative_outer_type,
2027 speculative_offset - new_offset, new_outer_type, false, true))
2029 speculative_outer_type = new_outer_type;
2030 speculative_offset = new_offset;
2031 speculative_maybe_derived_type = true;
2032 return true;
2034 else
2036 if (dump_file && (dump_flags & TDF_DETAILS))
2037 fprintf (dump_file, "Giving up on speculative meet\n");
2038 clear_speculation ();
2039 return true;
2043 /* Assume that both THIS and a given context is valid and strenghten THIS
2044 if possible. Return true if any strenghtening was made.
2045 If actual type the context is being used in is known, OTR_TYPE should be
2046 set accordingly. This improves quality of combined result. */
2048 bool
2049 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx,
2050 tree otr_type)
2052 bool updated = false;
2054 if (ctx.useless_p () || invalid)
2055 return false;
2057 /* Restricting context to inner type makes merging easier, however do not
2058 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
2059 if (otr_type && !invalid && !ctx.invalid)
2061 restrict_to_inner_class (otr_type);
2062 ctx.restrict_to_inner_class (otr_type);
2063 if(invalid)
2064 return false;
2067 if (dump_file && (dump_flags & TDF_DETAILS))
2069 fprintf (dump_file, "Polymorphic call context combine:");
2070 dump (dump_file);
2071 fprintf (dump_file, "With context: ");
2072 ctx.dump (dump_file);
2073 if (otr_type)
2075 fprintf (dump_file, "To be used with type: ");
2076 print_generic_expr (dump_file, otr_type, TDF_SLIM);
2077 fprintf (dump_file, "\n");
2081 /* If call is known to be invalid, we are done. */
2082 if (ctx.invalid)
2084 if (dump_file && (dump_flags & TDF_DETAILS))
2085 fprintf (dump_file, "-> Invalid context\n");
2086 goto invalidate;
2089 if (!ctx.outer_type)
2091 else if (!outer_type)
2093 outer_type = ctx.outer_type;
2094 offset = ctx.offset;
2095 dynamic = ctx.dynamic;
2096 maybe_in_construction = ctx.maybe_in_construction;
2097 maybe_derived_type = ctx.maybe_derived_type;
2098 updated = true;
2100 /* If types are known to be same, merging is quite easy. */
2101 else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2103 if (offset != ctx.offset
2104 && TYPE_SIZE (outer_type)
2105 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2107 if (dump_file && (dump_flags & TDF_DETAILS))
2108 fprintf (dump_file, "Outer types match, offset mismatch -> invalid\n");
2109 clear_speculation ();
2110 clear_outer_type ();
2111 invalid = true;
2112 return true;
2114 if (dump_file && (dump_flags & TDF_DETAILS))
2115 fprintf (dump_file, "Outer types match, merging flags\n");
2116 if (maybe_in_construction && !ctx.maybe_in_construction)
2118 updated = true;
2119 maybe_in_construction = false;
2121 if (maybe_derived_type && !ctx.maybe_derived_type)
2123 updated = true;
2124 maybe_derived_type = false;
2126 if (dynamic && !ctx.dynamic)
2128 updated = true;
2129 dynamic = false;
2132 /* If we know the type precisely, there is not much to improve. */
2133 else if (!maybe_derived_type && !maybe_in_construction
2134 && !ctx.maybe_derived_type && !ctx.maybe_in_construction)
2136 /* It may be easy to check if second context permits the first
2137 and set INVALID otherwise. This is not easy to do in general;
2138 contains_type_p may return false negatives for non-comparable
2139 types.
2141 If OTR_TYPE is known, we however can expect that
2142 restrict_to_inner_class should have discovered the same base
2143 type. */
2144 if (otr_type && !ctx.maybe_in_construction && !ctx.maybe_derived_type)
2146 if (dump_file && (dump_flags & TDF_DETAILS))
2147 fprintf (dump_file, "Contextes disagree -> invalid\n");
2148 goto invalidate;
2151 /* See if one type contains the other as a field (not base).
2152 In this case we want to choose the wider type, because it contains
2153 more information. */
2154 else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2155 outer_type, false, false))
2157 if (dump_file && (dump_flags & TDF_DETAILS))
2158 fprintf (dump_file, "Second type contain the first as a field\n");
2160 if (maybe_derived_type)
2162 outer_type = ctx.outer_type;
2163 maybe_derived_type = ctx.maybe_derived_type;
2164 offset = ctx.offset;
2165 dynamic = ctx.dynamic;
2166 updated = true;
2169 /* If we do not know how the context is being used, we can
2170 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
2171 to other component of OUTER_TYPE later and we know nothing
2172 about it. */
2173 if (otr_type && maybe_in_construction
2174 && !ctx.maybe_in_construction)
2176 maybe_in_construction = false;
2177 updated = true;
2180 else if (contains_type_p (outer_type, offset - ctx.offset,
2181 ctx.outer_type, false, false))
2183 if (dump_file && (dump_flags & TDF_DETAILS))
2184 fprintf (dump_file, "First type contain the second as a field\n");
2186 if (otr_type && maybe_in_construction
2187 && !ctx.maybe_in_construction)
2189 maybe_in_construction = false;
2190 updated = true;
2193 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2194 else if (contains_type_p (ctx.outer_type,
2195 ctx.offset - offset, outer_type, false, true))
2197 if (dump_file && (dump_flags & TDF_DETAILS))
2198 fprintf (dump_file, "First type is base of second\n");
2199 if (!maybe_derived_type)
2201 if (!ctx.maybe_in_construction
2202 && types_odr_comparable (outer_type, ctx.outer_type))
2204 if (dump_file && (dump_flags & TDF_DETAILS))
2205 fprintf (dump_file, "Second context does not permit base -> invalid\n");
2206 goto invalidate;
2209 /* Pick variant deeper in the hiearchy. */
2210 else
2212 outer_type = ctx.outer_type;
2213 maybe_in_construction = ctx.maybe_in_construction;
2214 maybe_derived_type = ctx.maybe_derived_type;
2215 offset = ctx.offset;
2216 dynamic = ctx.dynamic;
2217 updated = true;
2220 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2221 else if (contains_type_p (outer_type,
2222 offset - ctx.offset, ctx.outer_type, false, true))
2224 if (dump_file && (dump_flags & TDF_DETAILS))
2225 fprintf (dump_file, "Second type is base of first\n");
2226 if (!ctx.maybe_derived_type)
2228 if (!maybe_in_construction
2229 && types_odr_comparable (outer_type, ctx.outer_type))
2231 if (dump_file && (dump_flags & TDF_DETAILS))
2232 fprintf (dump_file, "First context does not permit base -> invalid\n");
2233 goto invalidate;
2235 /* Pick the base type. */
2236 else if (maybe_in_construction)
2238 outer_type = ctx.outer_type;
2239 maybe_in_construction = ctx.maybe_in_construction;
2240 maybe_derived_type = ctx.maybe_derived_type;
2241 offset = ctx.offset;
2242 dynamic = ctx.dynamic;
2243 updated = true;
2247 /* TODO handle merging using hiearchy. */
2248 else if (dump_file && (dump_flags & TDF_DETAILS))
2249 fprintf (dump_file, "Giving up on merge\n");
2251 updated |= combine_speculation_with (ctx.speculative_outer_type,
2252 ctx.speculative_offset,
2253 ctx.speculative_maybe_derived_type,
2254 otr_type);
2256 if (updated && dump_file && (dump_flags & TDF_DETAILS))
2258 fprintf (dump_file, "Updated as: ");
2259 dump (dump_file);
2260 fprintf (dump_file, "\n");
2262 return updated;
2264 invalidate:
2265 invalid = true;
2266 clear_speculation ();
2267 clear_outer_type ();
2268 return true;
2271 /* Take non-speculative info, merge it with speculative and clear speculation.
2272 Used when we no longer manage to keep track of actual outer type, but we
2273 think it is still there.
2275 If OTR_TYPE is set, the transformation can be done more effectively assuming
2276 that context is going to be used only that way. */
2278 void
2279 ipa_polymorphic_call_context::make_speculative (tree otr_type)
2281 tree spec_outer_type = outer_type;
2282 HOST_WIDE_INT spec_offset = offset;
2283 bool spec_maybe_derived_type = maybe_derived_type;
2285 if (invalid)
2287 invalid = false;
2288 clear_outer_type ();
2289 clear_speculation ();
2290 return;
2292 if (!outer_type)
2293 return;
2294 clear_outer_type ();
2295 combine_speculation_with (spec_outer_type, spec_offset,
2296 spec_maybe_derived_type,
2297 otr_type);
2300 /* Use when we can not track dynamic type change. This speculatively assume
2301 type change is not happening. */
2303 void
2304 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor,
2305 tree otr_type)
2307 if (dynamic)
2308 make_speculative (otr_type);
2309 else if (in_poly_cdtor)
2310 maybe_in_construction = true;
2313 /* Return TRUE if this context conveys the same information as OTHER. */
2315 bool
2316 ipa_polymorphic_call_context::equal_to
2317 (const ipa_polymorphic_call_context &x) const
2319 if (useless_p ())
2320 return x.useless_p ();
2321 if (invalid)
2322 return x.invalid;
2323 if (x.useless_p () || x.invalid)
2324 return false;
2326 if (outer_type)
2328 if (!x.outer_type
2329 || !types_odr_comparable (outer_type, x.outer_type)
2330 || !types_same_for_odr (outer_type, x.outer_type)
2331 || offset != x.offset
2332 || maybe_in_construction != x.maybe_in_construction
2333 || maybe_derived_type != x.maybe_derived_type
2334 || dynamic != x.dynamic)
2335 return false;
2337 else if (x.outer_type)
2338 return false;
2341 if (speculative_outer_type
2342 && speculation_consistent_p (speculative_outer_type, speculative_offset,
2343 speculative_maybe_derived_type, NULL_TREE))
2345 if (!x.speculative_outer_type)
2346 return false;
2348 if (!types_odr_comparable (speculative_outer_type,
2349 x.speculative_outer_type)
2350 || !types_same_for_odr (speculative_outer_type,
2351 x.speculative_outer_type)
2352 || speculative_offset != x.speculative_offset
2353 || speculative_maybe_derived_type != x.speculative_maybe_derived_type)
2354 return false;
2356 else if (x.speculative_outer_type
2357 && x.speculation_consistent_p (x.speculative_outer_type,
2358 x.speculative_offset,
2359 x.speculative_maybe_derived_type,
2360 NULL))
2361 return false;
2363 return true;
2366 /* Modify context to be strictly less restrictive than CTX. */
2368 bool
2369 ipa_polymorphic_call_context::meet_with (ipa_polymorphic_call_context ctx,
2370 tree otr_type)
2372 bool updated = false;
2374 if (useless_p () || ctx.invalid)
2375 return false;
2377 /* Restricting context to inner type makes merging easier, however do not
2378 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
2379 if (otr_type && !useless_p () && !ctx.useless_p ())
2381 restrict_to_inner_class (otr_type);
2382 ctx.restrict_to_inner_class (otr_type);
2383 if(invalid)
2384 return false;
2387 if (equal_to (ctx))
2388 return false;
2390 if (ctx.useless_p () || invalid)
2392 *this = ctx;
2393 return true;
2396 if (dump_file && (dump_flags & TDF_DETAILS))
2398 fprintf (dump_file, "Polymorphic call context meet:");
2399 dump (dump_file);
2400 fprintf (dump_file, "With context: ");
2401 ctx.dump (dump_file);
2402 if (otr_type)
2404 fprintf (dump_file, "To be used with type: ");
2405 print_generic_expr (dump_file, otr_type, TDF_SLIM);
2406 fprintf (dump_file, "\n");
2410 if (!dynamic && ctx.dynamic)
2412 dynamic = true;
2413 updated = true;
2416 /* If call is known to be invalid, we are done. */
2417 if (!outer_type)
2419 else if (!ctx.outer_type)
2421 clear_outer_type ();
2422 updated = true;
2424 /* If types are known to be same, merging is quite easy. */
2425 else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2427 if (offset != ctx.offset
2428 && TYPE_SIZE (outer_type)
2429 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2431 if (dump_file && (dump_flags & TDF_DETAILS))
2432 fprintf (dump_file, "Outer types match, offset mismatch -> clearing\n");
2433 clear_outer_type ();
2434 return true;
2436 if (dump_file && (dump_flags & TDF_DETAILS))
2437 fprintf (dump_file, "Outer types match, merging flags\n");
2438 if (!maybe_in_construction && ctx.maybe_in_construction)
2440 updated = true;
2441 maybe_in_construction = true;
2443 if (!maybe_derived_type && ctx.maybe_derived_type)
2445 updated = true;
2446 maybe_derived_type = true;
2448 if (!dynamic && ctx.dynamic)
2450 updated = true;
2451 dynamic = true;
2454 /* See if one type contains the other as a field (not base). */
2455 else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2456 outer_type, false, false))
2458 if (dump_file && (dump_flags & TDF_DETAILS))
2459 fprintf (dump_file, "Second type contain the first as a field\n");
2461 /* The second type is more specified, so we keep the first.
2462 We need to set DYNAMIC flag to avoid declaring context INVALID
2463 of OFFSET ends up being out of range. */
2464 if (!dynamic
2465 && (ctx.dynamic
2466 || (!otr_type
2467 && (!TYPE_SIZE (ctx.outer_type)
2468 || !TYPE_SIZE (outer_type)
2469 || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2470 TYPE_SIZE (outer_type), 0)))))
2472 dynamic = true;
2473 updated = true;
2476 else if (contains_type_p (outer_type, offset - ctx.offset,
2477 ctx.outer_type, false, false))
2479 if (dump_file && (dump_flags & TDF_DETAILS))
2480 fprintf (dump_file, "First type contain the second as a field\n");
2482 if (!dynamic
2483 && (ctx.dynamic
2484 || (!otr_type
2485 && (!TYPE_SIZE (ctx.outer_type)
2486 || !TYPE_SIZE (outer_type)
2487 || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2488 TYPE_SIZE (outer_type), 0)))))
2489 dynamic = true;
2490 outer_type = ctx.outer_type;
2491 offset = ctx.offset;
2492 dynamic = ctx.dynamic;
2493 maybe_in_construction = ctx.maybe_in_construction;
2494 maybe_derived_type = ctx.maybe_derived_type;
2495 updated = true;
2497 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2498 else if (contains_type_p (ctx.outer_type,
2499 ctx.offset - offset, outer_type, false, true))
2501 if (dump_file && (dump_flags & TDF_DETAILS))
2502 fprintf (dump_file, "First type is base of second\n");
2503 if (!maybe_derived_type)
2505 maybe_derived_type = true;
2506 updated = true;
2508 if (!maybe_in_construction && ctx.maybe_in_construction)
2510 maybe_in_construction = true;
2511 updated = true;
2513 if (!dynamic && ctx.dynamic)
2515 dynamic = true;
2516 updated = true;
2519 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2520 else if (contains_type_p (outer_type,
2521 offset - ctx.offset, ctx.outer_type, false, true))
2523 if (dump_file && (dump_flags & TDF_DETAILS))
2524 fprintf (dump_file, "Second type is base of first\n");
2525 outer_type = ctx.outer_type;
2526 offset = ctx.offset;
2527 updated = true;
2528 if (!maybe_derived_type)
2529 maybe_derived_type = true;
2530 if (!maybe_in_construction && ctx.maybe_in_construction)
2531 maybe_in_construction = true;
2532 if (!dynamic && ctx.dynamic)
2533 dynamic = true;
2535 /* TODO handle merging using hiearchy. */
2536 else
2538 if (dump_file && (dump_flags & TDF_DETAILS))
2539 fprintf (dump_file, "Giving up on meet\n");
2540 clear_outer_type ();
2541 updated = true;
2544 updated |= meet_speculation_with (ctx.speculative_outer_type,
2545 ctx.speculative_offset,
2546 ctx.speculative_maybe_derived_type,
2547 otr_type);
2549 if (updated && dump_file && (dump_flags & TDF_DETAILS))
2551 fprintf (dump_file, "Updated as: ");
2552 dump (dump_file);
2553 fprintf (dump_file, "\n");
2555 return updated;