1 /* Analysis of polymorphic call context.
2 Copyright (C) 2013-2016 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-pass.h"
29 #include "tree-ssa-operands.h"
30 #include "streamer-hooks.h"
32 #include "data-streamer.h"
33 #include "diagnostic.h"
35 #include "fold-const.h"
37 #include "ipa-utils.h"
39 #include "gimple-pretty-print.h"
40 #include "tree-into-ssa.h"
43 /* Return true when TYPE contains an polymorphic type and thus is interesting
44 for devirtualization machinery. */
46 static bool contains_type_p (tree
, HOST_WIDE_INT
, tree
,
47 bool consider_placement_new
= true,
48 bool consider_bases
= true);
51 contains_polymorphic_type_p (const_tree type
)
53 type
= TYPE_MAIN_VARIANT (type
);
55 if (RECORD_OR_UNION_TYPE_P (type
))
58 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
60 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
61 if (TREE_CODE (fld
) == FIELD_DECL
62 && !DECL_ARTIFICIAL (fld
)
63 && contains_polymorphic_type_p (TREE_TYPE (fld
)))
67 if (TREE_CODE (type
) == ARRAY_TYPE
)
68 return contains_polymorphic_type_p (TREE_TYPE (type
));
72 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
73 at possition CUR_OFFSET within TYPE.
75 POD can be changed to an instance of a polymorphic type by
76 placement new. Here we play safe and assume that any
77 non-polymorphic type is POD. */
79 possible_placement_new (tree type
, tree expected_type
,
80 HOST_WIDE_INT cur_offset
)
84 return ((TREE_CODE (type
) != RECORD_TYPE
86 || cur_offset
>= POINTER_SIZE
87 || !polymorphic_type_binfo_p (TYPE_BINFO (type
)))
89 || !tree_fits_shwi_p (TYPE_SIZE (type
))
91 + (expected_type
? tree_to_uhwi (TYPE_SIZE (expected_type
))
93 <= tree_to_uhwi (TYPE_SIZE (type
)))));
96 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
97 is contained at THIS->OFFSET. Walk the memory representation of
98 THIS->OUTER_TYPE and find the outermost class type that match
99 OTR_TYPE or contain OTR_TYPE as a base. Update THIS
102 If OTR_TYPE is NULL, just find outermost polymorphic type with
103 virtual table present at possition OFFSET.
105 For example when THIS represents type
111 and we look for type at offset sizeof(int), we end up with B and offset 0.
112 If the same is produced by multiple inheritance, we end up with A and offset
115 If we can not find corresponding class, give up by setting
116 THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
117 Return true when lookup was sucesful.
119 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
120 valid only via alocation of new polymorphic type inside by means
123 When CONSIDER_BASES is false, only look for actual fields, not base types
127 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type
,
128 bool consider_placement_new
,
131 tree type
= outer_type
;
132 HOST_WIDE_INT cur_offset
= offset
;
133 bool speculative
= false;
134 bool size_unknown
= false;
135 unsigned HOST_WIDE_INT otr_type_size
= POINTER_SIZE
;
137 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
140 clear_outer_type (otr_type
);
144 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
145 that the context is either invalid, or the instance type must be
146 derived from OUTER_TYPE.
148 Because the instance type may contain field whose type is of OUTER_TYPE,
149 we can not derive any effective information about it.
151 TODO: In the case we know all derrived types, we can definitely do better
153 else if (TYPE_SIZE (outer_type
)
154 && tree_fits_shwi_p (TYPE_SIZE (outer_type
))
155 && tree_to_shwi (TYPE_SIZE (outer_type
)) >= 0
156 && tree_to_shwi (TYPE_SIZE (outer_type
)) <= offset
)
158 bool der
= maybe_derived_type
; /* clear_outer_type will reset it. */
160 clear_outer_type (otr_type
);
164 /* If derived type is not allowed, we know that the context is invalid.
165 For dynamic types, we really do not have information about
166 size of the memory location. It is possible that completely
167 different type is stored after outer_type. */
170 clear_speculation ();
176 if (otr_type
&& TYPE_SIZE (otr_type
)
177 && tree_fits_shwi_p (TYPE_SIZE (otr_type
)))
178 otr_type_size
= tree_to_uhwi (TYPE_SIZE (otr_type
));
180 if (!type
|| offset
< 0)
181 goto no_useful_type_info
;
183 /* Find the sub-object the constant actually refers to and mark whether it is
184 an artificial one (as opposed to a user-defined one).
186 This loop is performed twice; first time for outer_type and second time
187 for speculative_outer_type. The second run has SPECULATIVE set. */
190 unsigned HOST_WIDE_INT pos
, size
;
193 /* If we do not know size of TYPE, we need to be more conservative
194 about accepting cases where we can not find EXPECTED_TYPE.
195 Generally the types that do matter here are of constant size.
196 Size_unknown case should be very rare. */
198 && tree_fits_shwi_p (TYPE_SIZE (type
))
199 && tree_to_shwi (TYPE_SIZE (type
)) >= 0)
200 size_unknown
= false;
204 /* On a match, just return what we found. */
206 && types_odr_comparable (type
, otr_type
)
207 && types_same_for_odr (type
, otr_type
))
209 && TREE_CODE (type
) == RECORD_TYPE
211 && polymorphic_type_binfo_p (TYPE_BINFO (type
))))
215 /* If we did not match the offset, just give up on speculation. */
217 /* Also check if speculation did not end up being same as
219 || (types_must_be_same_for_odr (speculative_outer_type
,
221 && (maybe_derived_type
222 == speculative_maybe_derived_type
)))
223 clear_speculation ();
228 /* If type is known to be final, do not worry about derived
229 types. Testing it here may help us to avoid speculation. */
230 if (otr_type
&& TREE_CODE (outer_type
) == RECORD_TYPE
231 && (!in_lto_p
|| odr_type_p (outer_type
))
232 && type_with_linkage_p (outer_type
)
233 && type_known_to_have_no_derivations_p (outer_type
))
234 maybe_derived_type
= false;
236 /* Type can not contain itself on an non-zero offset. In that case
237 just give up. Still accept the case where size is now known.
238 Either the second copy may appear past the end of type or within
239 the non-POD buffer located inside the variably sized type
242 goto no_useful_type_info
;
243 /* If we determined type precisely or we have no clue on
244 speuclation, we are done. */
245 if (!maybe_derived_type
|| !speculative_outer_type
246 || !speculation_consistent_p (speculative_outer_type
,
248 speculative_maybe_derived_type
,
251 clear_speculation ();
254 /* Otherwise look into speculation now. */
258 type
= speculative_outer_type
;
259 cur_offset
= speculative_offset
;
265 /* Walk fields and find corresponding on at OFFSET. */
266 if (TREE_CODE (type
) == RECORD_TYPE
)
268 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
270 if (TREE_CODE (fld
) != FIELD_DECL
)
273 pos
= int_bit_position (fld
);
274 if (pos
> (unsigned HOST_WIDE_INT
)cur_offset
)
277 /* Do not consider vptr itself. Not even for placement new. */
278 if (!pos
&& DECL_ARTIFICIAL (fld
)
279 && POINTER_TYPE_P (TREE_TYPE (fld
))
281 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
284 if (!DECL_SIZE (fld
) || !tree_fits_uhwi_p (DECL_SIZE (fld
)))
285 goto no_useful_type_info
;
286 size
= tree_to_uhwi (DECL_SIZE (fld
));
288 /* We can always skip types smaller than pointer size:
289 those can not contain a virtual table pointer.
291 Disqualifying fields that are too small to fit OTR_TYPE
292 saves work needed to walk them for no benefit.
293 Because of the way the bases are packed into a class, the
294 field's size may be smaller than type size, so it needs
295 to be done with a care. */
297 if (pos
<= (unsigned HOST_WIDE_INT
)cur_offset
298 && (pos
+ size
) >= (unsigned HOST_WIDE_INT
)cur_offset
301 || !TYPE_SIZE (TREE_TYPE (fld
))
302 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld
)))
303 || (pos
+ tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld
))))
304 >= cur_offset
+ otr_type_size
))
309 goto no_useful_type_info
;
311 type
= TYPE_MAIN_VARIANT (TREE_TYPE (fld
));
313 /* DECL_ARTIFICIAL represents a basetype. */
314 if (!DECL_ARTIFICIAL (fld
))
320 /* As soon as we se an field containing the type,
321 we know we are not looking for derivations. */
322 maybe_derived_type
= false;
326 speculative_outer_type
= type
;
327 speculative_offset
= cur_offset
;
328 speculative_maybe_derived_type
= false;
331 else if (!consider_bases
)
332 goto no_useful_type_info
;
334 else if (TREE_CODE (type
) == ARRAY_TYPE
)
336 tree subtype
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
338 /* Give up if we don't know array field size.
339 Also give up on non-polymorphic types as they are used
340 as buffers for placement new. */
341 if (!TYPE_SIZE (subtype
)
342 || !tree_fits_shwi_p (TYPE_SIZE (subtype
))
343 || tree_to_shwi (TYPE_SIZE (subtype
)) <= 0
344 || !contains_polymorphic_type_p (subtype
))
345 goto no_useful_type_info
;
347 HOST_WIDE_INT new_offset
= cur_offset
% tree_to_shwi (TYPE_SIZE (subtype
));
349 /* We may see buffer for placement new. In this case the expected type
350 can be bigger than the subtype. */
351 if (TYPE_SIZE (subtype
)
352 && (cur_offset
+ otr_type_size
353 > tree_to_uhwi (TYPE_SIZE (subtype
))))
354 goto no_useful_type_info
;
356 cur_offset
= new_offset
;
357 type
= TYPE_MAIN_VARIANT (subtype
);
362 maybe_derived_type
= false;
366 speculative_outer_type
= type
;
367 speculative_offset
= cur_offset
;
368 speculative_maybe_derived_type
= false;
371 /* Give up on anything else. */
375 if (maybe_derived_type
&& !speculative
376 && TREE_CODE (outer_type
) == RECORD_TYPE
377 && TREE_CODE (otr_type
) == RECORD_TYPE
378 && TYPE_BINFO (otr_type
)
380 && get_binfo_at_offset (TYPE_BINFO (otr_type
), 0, outer_type
))
382 clear_outer_type (otr_type
);
383 if (!speculative_outer_type
384 || !speculation_consistent_p (speculative_outer_type
,
386 speculative_maybe_derived_type
,
388 clear_speculation ();
389 if (speculative_outer_type
)
392 type
= speculative_outer_type
;
393 cur_offset
= speculative_offset
;
398 /* We found no way to embedd EXPECTED_TYPE in TYPE.
399 We still permit two special cases - placement new and
400 the case of variadic types containing themselves. */
402 && consider_placement_new
403 && (size_unknown
|| !type
|| maybe_derived_type
404 || possible_placement_new (type
, otr_type
, cur_offset
)))
406 /* In these weird cases we want to accept the context.
407 In non-speculative run we have no useful outer_type info
408 (TODO: we may eventually want to record upper bound on the
409 type size that can be used to prune the walk),
410 but we still want to consider speculation that may
414 clear_outer_type (otr_type
);
415 if (!speculative_outer_type
416 || !speculation_consistent_p (speculative_outer_type
,
418 speculative_maybe_derived_type
,
420 clear_speculation ();
421 if (speculative_outer_type
)
424 type
= speculative_outer_type
;
425 cur_offset
= speculative_offset
;
432 clear_speculation ();
438 clear_speculation ();
441 clear_outer_type (otr_type
);
449 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
450 CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
451 be built within OUTER_TYPE by means of placement new. CONSIDER_BASES makes
452 function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
453 base of one of fields of OUTER_TYPE. */
456 contains_type_p (tree outer_type
, HOST_WIDE_INT offset
,
458 bool consider_placement_new
,
461 ipa_polymorphic_call_context context
;
463 /* Check that type is within range. */
466 if (TYPE_SIZE (outer_type
) && TYPE_SIZE (otr_type
)
467 && TREE_CODE (TYPE_SIZE (outer_type
)) == INTEGER_CST
468 && TREE_CODE (TYPE_SIZE (otr_type
)) == INTEGER_CST
469 && wi::ltu_p (wi::to_offset (TYPE_SIZE (outer_type
)),
470 (wi::to_offset (TYPE_SIZE (otr_type
)) + offset
)))
473 context
.offset
= offset
;
474 context
.outer_type
= TYPE_MAIN_VARIANT (outer_type
);
475 context
.maybe_derived_type
= false;
476 context
.dynamic
= false;
477 return context
.restrict_to_inner_class (otr_type
, consider_placement_new
,
482 /* Return a FUNCTION_DECL if FN represent a constructor or destructor.
483 If CHECK_CLONES is true, also check for clones of ctor/dtors. */
486 polymorphic_ctor_dtor_p (tree fn
, bool check_clones
)
488 if (TREE_CODE (TREE_TYPE (fn
)) != METHOD_TYPE
489 || (!DECL_CXX_CONSTRUCTOR_P (fn
) && !DECL_CXX_DESTRUCTOR_P (fn
)))
494 /* Watch for clones where we constant propagated the first
495 argument (pointer to the instance). */
496 fn
= DECL_ABSTRACT_ORIGIN (fn
);
498 || TREE_CODE (TREE_TYPE (fn
)) != METHOD_TYPE
499 || (!DECL_CXX_CONSTRUCTOR_P (fn
) && !DECL_CXX_DESTRUCTOR_P (fn
)))
503 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
509 /* Return a FUNCTION_DECL if BLOCK represents a constructor or destructor.
510 If CHECK_CLONES is true, also check for clones of ctor/dtors. */
513 inlined_polymorphic_ctor_dtor_block_p (tree block
, bool check_clones
)
515 tree fn
= block_ultimate_origin (block
);
516 if (fn
== NULL
|| TREE_CODE (fn
) != FUNCTION_DECL
)
519 return polymorphic_ctor_dtor_p (fn
, check_clones
);
523 /* We know that the instance is stored in variable or parameter
524 (not dynamically allocated) and we want to disprove the fact
525 that it may be in construction at invocation of CALL.
527 BASE represents memory location where instance is stored.
528 If BASE is NULL, it is assumed to be global memory.
529 OUTER_TYPE is known type of the instance or NULL if not
532 For the variable to be in construction we actually need to
533 be in constructor of corresponding global variable or
534 the inline stack of CALL must contain the constructor.
535 Check this condition. This check works safely only before
536 IPA passes, because inline stacks may become out of date
540 decl_maybe_in_construction_p (tree base
, tree outer_type
,
541 gimple
*call
, tree function
)
544 outer_type
= TYPE_MAIN_VARIANT (outer_type
);
545 gcc_assert (!base
|| DECL_P (base
));
547 /* After inlining the code unification optimizations may invalidate
548 inline stacks. Also we need to give up on global variables after
549 IPA, because addresses of these may have been propagated to their
551 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
554 /* Pure functions can not do any changes on the dynamic type;
555 that require writting to memory. */
556 if ((!base
|| !auto_var_in_fn_p (base
, function
))
557 && flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
560 bool check_clones
= !base
|| is_global_var (base
);
561 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
562 block
= BLOCK_SUPERCONTEXT (block
))
563 if (tree fn
= inlined_polymorphic_ctor_dtor_block_p (block
, check_clones
))
565 tree type
= TYPE_METHOD_BASETYPE (TREE_TYPE (fn
));
567 if (!outer_type
|| !types_odr_comparable (type
, outer_type
))
569 if (TREE_CODE (type
) == RECORD_TYPE
571 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
574 else if (types_same_for_odr (type
, outer_type
))
578 if (!base
|| (VAR_P (base
) && is_global_var (base
)))
580 if (TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
581 || (!DECL_CXX_CONSTRUCTOR_P (function
)
582 && !DECL_CXX_DESTRUCTOR_P (function
)))
584 if (!DECL_ABSTRACT_ORIGIN (function
))
586 /* Watch for clones where we constant propagated the first
587 argument (pointer to the instance). */
588 function
= DECL_ABSTRACT_ORIGIN (function
);
590 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
591 || (!DECL_CXX_CONSTRUCTOR_P (function
)
592 && !DECL_CXX_DESTRUCTOR_P (function
)))
595 tree type
= TYPE_METHOD_BASETYPE (TREE_TYPE (function
));
596 if (!outer_type
|| !types_odr_comparable (type
, outer_type
))
598 if (TREE_CODE (type
) == RECORD_TYPE
600 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
603 else if (types_same_for_odr (type
, outer_type
))
609 /* Dump human readable context to F. If NEWLINE is true, it will be terminated
613 ipa_polymorphic_call_context::dump (FILE *f
, bool newline
) const
617 fprintf (f
, "Call is known to be undefined");
621 fprintf (f
, "nothing known");
622 if (outer_type
|| offset
)
624 fprintf (f
, "Outer type%s:", dynamic
? " (dynamic)":"");
625 print_generic_expr (f
, outer_type
, TDF_SLIM
);
626 if (maybe_derived_type
)
627 fprintf (f
, " (or a derived type)");
628 if (maybe_in_construction
)
629 fprintf (f
, " (maybe in construction)");
630 fprintf (f
, " offset " HOST_WIDE_INT_PRINT_DEC
,
633 if (speculative_outer_type
)
635 if (outer_type
|| offset
)
637 fprintf (f
, "Speculative outer type:");
638 print_generic_expr (f
, speculative_outer_type
, TDF_SLIM
);
639 if (speculative_maybe_derived_type
)
640 fprintf (f
, " (or a derived type)");
641 fprintf (f
, " at offset " HOST_WIDE_INT_PRINT_DEC
,
649 /* Print context to stderr. */
652 ipa_polymorphic_call_context::debug () const
657 /* Stream out the context to OB. */
660 ipa_polymorphic_call_context::stream_out (struct output_block
*ob
) const
662 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
664 bp_pack_value (&bp
, invalid
, 1);
665 bp_pack_value (&bp
, maybe_in_construction
, 1);
666 bp_pack_value (&bp
, maybe_derived_type
, 1);
667 bp_pack_value (&bp
, speculative_maybe_derived_type
, 1);
668 bp_pack_value (&bp
, dynamic
, 1);
669 bp_pack_value (&bp
, outer_type
!= NULL
, 1);
670 bp_pack_value (&bp
, offset
!= 0, 1);
671 bp_pack_value (&bp
, speculative_outer_type
!= NULL
, 1);
672 streamer_write_bitpack (&bp
);
674 if (outer_type
!= NULL
)
675 stream_write_tree (ob
, outer_type
, true);
677 streamer_write_hwi (ob
, offset
);
678 if (speculative_outer_type
!= NULL
)
680 stream_write_tree (ob
, speculative_outer_type
, true);
681 streamer_write_hwi (ob
, speculative_offset
);
684 gcc_assert (!speculative_offset
);
687 /* Stream in the context from IB and DATA_IN. */
690 ipa_polymorphic_call_context::stream_in (struct lto_input_block
*ib
,
691 struct data_in
*data_in
)
693 struct bitpack_d bp
= streamer_read_bitpack (ib
);
695 invalid
= bp_unpack_value (&bp
, 1);
696 maybe_in_construction
= bp_unpack_value (&bp
, 1);
697 maybe_derived_type
= bp_unpack_value (&bp
, 1);
698 speculative_maybe_derived_type
= bp_unpack_value (&bp
, 1);
699 dynamic
= bp_unpack_value (&bp
, 1);
700 bool outer_type_p
= bp_unpack_value (&bp
, 1);
701 bool offset_p
= bp_unpack_value (&bp
, 1);
702 bool speculative_outer_type_p
= bp_unpack_value (&bp
, 1);
705 outer_type
= stream_read_tree (ib
, data_in
);
709 offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
712 if (speculative_outer_type_p
)
714 speculative_outer_type
= stream_read_tree (ib
, data_in
);
715 speculative_offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
719 speculative_outer_type
= NULL
;
720 speculative_offset
= 0;
724 /* Proudce polymorphic call context for call method of instance
725 that is located within BASE (that is assumed to be a decl) at offset OFF. */
728 ipa_polymorphic_call_context::set_by_decl (tree base
, HOST_WIDE_INT off
)
730 gcc_assert (DECL_P (base
));
731 clear_speculation ();
733 if (!contains_polymorphic_type_p (TREE_TYPE (base
)))
739 outer_type
= TYPE_MAIN_VARIANT (TREE_TYPE (base
));
741 /* Make very conservative assumption that all objects
742 may be in construction.
744 It is up to caller to revisit this via
745 get_dynamic_type or decl_maybe_in_construction_p. */
746 maybe_in_construction
= true;
747 maybe_derived_type
= false;
751 /* CST is an invariant (address of decl), try to get meaningful
752 polymorphic call context for polymorphic call of method
753 if instance of OTR_TYPE that is located at offset OFF of this invariant.
754 Return FALSE if nothing meaningful can be found. */
757 ipa_polymorphic_call_context::set_by_invariant (tree cst
,
761 HOST_WIDE_INT offset2
, size
, max_size
;
767 clear_outer_type (otr_type
);
769 if (TREE_CODE (cst
) != ADDR_EXPR
)
772 cst
= TREE_OPERAND (cst
, 0);
773 base
= get_ref_base_and_extent (cst
, &offset2
, &size
, &max_size
, &reverse
);
774 if (!DECL_P (base
) || max_size
== -1 || max_size
!= size
)
777 /* Only type inconsistent programs can have otr_type that is
778 not part of outer type. */
779 if (otr_type
&& !contains_type_p (TREE_TYPE (base
), off
, otr_type
))
782 set_by_decl (base
, off
);
786 /* See if OP is SSA name initialized as a copy or by single assignment.
787 If so, walk the SSA graph up. Because simple PHI conditional is considered
788 copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
792 walk_ssa_copies (tree op
, hash_set
<tree
> **global_visited
= NULL
)
794 hash_set
<tree
> *visited
= NULL
;
796 while (TREE_CODE (op
) == SSA_NAME
797 && !SSA_NAME_IS_DEFAULT_DEF (op
)
798 /* We might be called via fold_stmt during cfgcleanup where
799 SSA form need not be up-to-date. */
800 && !name_registered_for_update_p (op
)
801 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op
))
802 || gimple_code (SSA_NAME_DEF_STMT (op
)) == GIMPLE_PHI
))
806 if (!*global_visited
)
807 *global_visited
= new hash_set
<tree
>;
808 if ((*global_visited
)->add (op
))
814 visited
= new hash_set
<tree
>;
815 if (visited
->add (op
))
823 This pattern is implicitly produced for casts to non-primary
824 bases. When doing context analysis, we do not really care
825 about the case pointer is NULL, because the call will be
827 if (gimple_code (SSA_NAME_DEF_STMT (op
)) == GIMPLE_PHI
)
829 gimple
*phi
= SSA_NAME_DEF_STMT (op
);
831 if (gimple_phi_num_args (phi
) > 2)
833 if (gimple_phi_num_args (phi
) == 1)
834 op
= gimple_phi_arg_def (phi
, 0);
835 else if (integer_zerop (gimple_phi_arg_def (phi
, 0)))
836 op
= gimple_phi_arg_def (phi
, 1);
837 else if (integer_zerop (gimple_phi_arg_def (phi
, 1)))
838 op
= gimple_phi_arg_def (phi
, 0);
844 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op
)))
846 op
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op
));
856 /* Create polymorphic call context from IP invariant CST.
857 This is typically &global_var.
858 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
859 is offset of call. */
861 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst
,
865 clear_speculation ();
866 set_by_invariant (cst
, otr_type
, off
);
869 /* Build context for pointer REF contained in FNDECL at statement STMT.
870 if INSTANCE is non-NULL, return pointer to the object described by
871 the context or DECL where context is contained in. */
873 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl
,
878 tree otr_type
= NULL
;
880 hash_set
<tree
> *visited
= NULL
;
882 if (TREE_CODE (ref
) == OBJ_TYPE_REF
)
884 otr_type
= obj_type_ref_class (ref
);
885 base_pointer
= OBJ_TYPE_REF_OBJECT (ref
);
890 /* Set up basic info in case we find nothing interesting in the analysis. */
891 clear_speculation ();
892 clear_outer_type (otr_type
);
895 /* Walk SSA for outer object. */
898 base_pointer
= walk_ssa_copies (base_pointer
, &visited
);
899 if (TREE_CODE (base_pointer
) == ADDR_EXPR
)
901 HOST_WIDE_INT size
, max_size
;
902 HOST_WIDE_INT offset2
;
905 = get_ref_base_and_extent (TREE_OPERAND (base_pointer
, 0),
906 &offset2
, &size
, &max_size
, &reverse
);
908 if (max_size
!= -1 && max_size
== size
)
909 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base
)),
912 NULL
/* Do not change outer type. */);
914 /* If this is a varying address, punt. */
915 if ((TREE_CODE (base
) == MEM_REF
|| DECL_P (base
))
919 /* We found dereference of a pointer. Type of the pointer
920 and MEM_REF is meaningless, but we can look futher. */
921 if (TREE_CODE (base
) == MEM_REF
)
923 base_pointer
= TREE_OPERAND (base
, 0);
925 += offset2
+ mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
928 /* We found base object. In this case the outer_type
930 else if (DECL_P (base
))
934 /* Only type inconsistent programs can have otr_type that is
935 not part of outer type. */
937 && !contains_type_p (TREE_TYPE (base
),
938 offset
+ offset2
, otr_type
))
942 *instance
= base_pointer
;
945 set_by_decl (base
, offset
+ offset2
);
946 if (outer_type
&& maybe_in_construction
&& stmt
)
947 maybe_in_construction
948 = decl_maybe_in_construction_p (base
,
962 else if (TREE_CODE (base_pointer
) == POINTER_PLUS_EXPR
963 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer
, 1)))
965 offset
+= tree_to_shwi (TREE_OPERAND (base_pointer
, 1))
967 base_pointer
= TREE_OPERAND (base_pointer
, 0);
976 /* Try to determine type of the outer object. */
977 if (TREE_CODE (base_pointer
) == SSA_NAME
978 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
979 && TREE_CODE (SSA_NAME_VAR (base_pointer
)) == PARM_DECL
)
981 /* See if parameter is THIS pointer of a method. */
982 if (TREE_CODE (TREE_TYPE (fndecl
)) == METHOD_TYPE
983 && SSA_NAME_VAR (base_pointer
) == DECL_ARGUMENTS (fndecl
))
986 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer
)));
987 gcc_assert (TREE_CODE (outer_type
) == RECORD_TYPE
988 || TREE_CODE (outer_type
) == UNION_TYPE
);
990 /* Dynamic casting has possibly upcasted the type
991 in the hiearchy. In this case outer type is less
992 informative than inner type and we should forget
995 && !contains_type_p (outer_type
, offset
,
997 || !contains_polymorphic_type_p (outer_type
))
1001 *instance
= base_pointer
;
1007 /* If the function is constructor or destructor, then
1008 the type is possibly in construction, but we know
1009 it is not derived type. */
1010 if (DECL_CXX_CONSTRUCTOR_P (fndecl
)
1011 || DECL_CXX_DESTRUCTOR_P (fndecl
))
1013 maybe_in_construction
= true;
1014 maybe_derived_type
= false;
1018 maybe_derived_type
= true;
1019 maybe_in_construction
= false;
1022 *instance
= base_pointer
;
1025 /* Non-PODs passed by value are really passed by invisible
1026 reference. In this case we also know the type of the
1028 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer
)))
1031 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer
)));
1032 /* Only type inconsistent programs can have otr_type that is
1033 not part of outer type. */
1034 if (otr_type
&& !contains_type_p (outer_type
, offset
,
1039 *instance
= base_pointer
;
1042 /* Non-polymorphic types have no interest for us. */
1043 else if (!otr_type
&& !contains_polymorphic_type_p (outer_type
))
1047 *instance
= base_pointer
;
1050 maybe_derived_type
= false;
1051 maybe_in_construction
= false;
1053 *instance
= base_pointer
;
1058 tree base_type
= TREE_TYPE (base_pointer
);
1060 if (TREE_CODE (base_pointer
) == SSA_NAME
1061 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
1062 && !(TREE_CODE (SSA_NAME_VAR (base_pointer
)) == PARM_DECL
1063 || TREE_CODE (SSA_NAME_VAR (base_pointer
)) == RESULT_DECL
))
1067 *instance
= base_pointer
;
1070 if (TREE_CODE (base_pointer
) == SSA_NAME
1071 && SSA_NAME_DEF_STMT (base_pointer
)
1072 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer
)))
1073 base_type
= TREE_TYPE (gimple_assign_rhs1
1074 (SSA_NAME_DEF_STMT (base_pointer
)));
1076 if (base_type
&& POINTER_TYPE_P (base_type
))
1077 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type
)),
1079 true, NULL
/* Do not change type here */);
1080 /* TODO: There are multiple ways to derive a type. For instance
1081 if BASE_POINTER is passed to an constructor call prior our refernece.
1082 We do not make this type of flow sensitive analysis yet. */
1084 *instance
= base_pointer
;
1088 /* Structure to be passed in between detect_type_change and
1089 check_stmt_for_type_change. */
1091 struct type_change_info
1093 /* Offset into the object where there is the virtual method pointer we are
1095 HOST_WIDE_INT offset
;
1096 /* The declaration or SSA_NAME pointer of the base that we are checking for
1099 /* The reference to virtual table pointer used. */
1102 /* If we actually can tell the type that the object has changed to, it is
1103 stored in this field. Otherwise it remains NULL_TREE. */
1104 tree known_current_type
;
1105 HOST_WIDE_INT known_current_offset
;
1107 /* Set to nonzero if we possibly missed some dynamic type changes and we
1108 should consider the set to be speculative. */
1109 unsigned speculative
;
1111 /* Set to true if dynamic type change has been detected. */
1112 bool type_maybe_changed
;
1113 /* Set to true if multiple types have been encountered. known_current_type
1114 must be disregarded in that case. */
1115 bool multiple_types_encountered
;
1116 bool seen_unanalyzed_store
;
1119 /* Return true if STMT is not call and can modify a virtual method table pointer.
1120 We take advantage of fact that vtable stores must appear within constructor
1121 and destructor functions. */
1124 noncall_stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
1126 if (is_gimple_assign (stmt
))
1128 tree lhs
= gimple_assign_lhs (stmt
);
1130 if (gimple_clobber_p (stmt
))
1132 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
1134 if (flag_strict_aliasing
1135 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
1138 if (TREE_CODE (lhs
) == COMPONENT_REF
1139 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
1141 /* In the future we might want to use get_base_ref_and_offset to find
1142 if there is a field corresponding to the offset and if so, proceed
1143 almost like if it was a component ref. */
1147 /* Code unification may mess with inline stacks. */
1148 if (cfun
->after_inlining
)
1151 /* Walk the inline stack and watch out for ctors/dtors.
1152 TODO: Maybe we can require the store to appear in toplevel
1153 block of CTOR/DTOR. */
1154 for (tree block
= gimple_block (stmt
); block
&& TREE_CODE (block
) == BLOCK
;
1155 block
= BLOCK_SUPERCONTEXT (block
))
1156 if (BLOCK_ABSTRACT_ORIGIN (block
)
1157 && TREE_CODE (block_ultimate_origin (block
)) == FUNCTION_DECL
)
1158 return inlined_polymorphic_ctor_dtor_block_p (block
, false);
1159 return (TREE_CODE (TREE_TYPE (current_function_decl
)) == METHOD_TYPE
1160 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl
)
1161 || DECL_CXX_DESTRUCTOR_P (current_function_decl
)));
1164 /* If STMT can be proved to be an assignment to the virtual method table
1165 pointer of ANALYZED_OBJ and the type associated with the new table
1166 identified, return the type. Otherwise return NULL_TREE if type changes
1167 in unknown way or ERROR_MARK_NODE if type is unchanged. */
1170 extr_type_from_vtbl_ptr_store (gimple
*stmt
, struct type_change_info
*tci
,
1171 HOST_WIDE_INT
*type_offset
)
1173 HOST_WIDE_INT offset
, size
, max_size
;
1174 tree lhs
, rhs
, base
;
1177 if (!gimple_assign_single_p (stmt
))
1180 lhs
= gimple_assign_lhs (stmt
);
1181 rhs
= gimple_assign_rhs1 (stmt
);
1182 if (TREE_CODE (lhs
) != COMPONENT_REF
1183 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
1186 fprintf (dump_file
, " LHS is not virtual table.\n");
1190 if (tci
->vtbl_ptr_ref
&& operand_equal_p (lhs
, tci
->vtbl_ptr_ref
, 0))
1194 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
, &reverse
);
1195 if (DECL_P (tci
->instance
))
1197 if (base
!= tci
->instance
)
1201 fprintf (dump_file
, " base:");
1202 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1203 fprintf (dump_file
, " does not match instance:");
1204 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1205 fprintf (dump_file
, "\n");
1210 else if (TREE_CODE (base
) == MEM_REF
)
1212 if (!operand_equal_p (tci
->instance
, TREE_OPERAND (base
, 0), 0))
1216 fprintf (dump_file
, " base mem ref:");
1217 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1218 fprintf (dump_file
, " does not match instance:");
1219 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1220 fprintf (dump_file
, "\n");
1224 if (!integer_zerop (TREE_OPERAND (base
, 1)))
1226 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
1230 fprintf (dump_file
, " base mem ref:");
1231 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1232 fprintf (dump_file
, " has non-representable offset:");
1233 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1234 fprintf (dump_file
, "\n");
1239 offset
+= tree_to_shwi (TREE_OPERAND (base
, 1)) * BITS_PER_UNIT
;
1242 else if (!operand_equal_p (tci
->instance
, base
, 0)
1247 fprintf (dump_file
, " base:");
1248 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1249 fprintf (dump_file
, " does not match instance:");
1250 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1251 fprintf (dump_file
, " with offset %i\n", (int)tci
->offset
);
1253 return tci
->offset
> POINTER_SIZE
? error_mark_node
: NULL_TREE
;
1255 if (offset
!= tci
->offset
1256 || size
!= POINTER_SIZE
1257 || max_size
!= POINTER_SIZE
)
1260 fprintf (dump_file
, " wrong offset %i!=%i or size %i\n",
1261 (int)offset
, (int)tci
->offset
, (int)size
);
1262 return offset
+ POINTER_SIZE
<= tci
->offset
1264 && tci
->offset
+ POINTER_SIZE
> offset
+ max_size
)
1265 ? error_mark_node
: NULL
;
1270 unsigned HOST_WIDE_INT offset2
;
1272 if (!vtable_pointer_value_to_vtable (rhs
, &vtable
, &offset2
))
1275 fprintf (dump_file
, " Failed to lookup binfo\n");
1279 tree binfo
= subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable
)),
1284 fprintf (dump_file
, " Construction vtable used\n");
1285 /* FIXME: We should suport construction contexts. */
1289 *type_offset
= tree_to_shwi (BINFO_OFFSET (binfo
)) * BITS_PER_UNIT
;
1290 return DECL_CONTEXT (vtable
);
1293 /* Record dynamic type change of TCI to TYPE. */
1296 record_known_type (struct type_change_info
*tci
, tree type
, HOST_WIDE_INT offset
)
1302 fprintf (dump_file
, " Recording type: ");
1303 print_generic_expr (dump_file
, type
, TDF_SLIM
);
1304 fprintf (dump_file
, " at offset %i\n", (int)offset
);
1307 fprintf (dump_file
, " Recording unknown type\n");
1310 /* If we found a constructor of type that is not polymorphic or
1311 that may contain the type in question as a field (not as base),
1312 restrict to the inner class first to make type matching bellow
1316 || (TREE_CODE (type
) != RECORD_TYPE
1317 || !TYPE_BINFO (type
)
1318 || !polymorphic_type_binfo_p (TYPE_BINFO (type
)))))
1320 ipa_polymorphic_call_context context
;
1322 context
.offset
= offset
;
1323 context
.outer_type
= type
;
1324 context
.maybe_in_construction
= false;
1325 context
.maybe_derived_type
= false;
1326 context
.dynamic
= true;
1327 /* If we failed to find the inner type, we know that the call
1328 would be undefined for type produced here. */
1329 if (!context
.restrict_to_inner_class (tci
->otr_type
))
1332 fprintf (dump_file
, " Ignoring; does not contain otr_type\n");
1335 /* Watch for case we reached an POD type and anticipate placement
1337 if (!context
.maybe_derived_type
)
1339 type
= context
.outer_type
;
1340 offset
= context
.offset
;
1343 if (tci
->type_maybe_changed
1344 && (!types_same_for_odr (type
, tci
->known_current_type
)
1345 || offset
!= tci
->known_current_offset
))
1346 tci
->multiple_types_encountered
= true;
1347 tci
->known_current_type
= TYPE_MAIN_VARIANT (type
);
1348 tci
->known_current_offset
= offset
;
1349 tci
->type_maybe_changed
= true;
1353 /* The maximum number of may-defs we visit when looking for a must-def
1354 that changes the dynamic type in check_stmt_for_type_change. Tuned
1355 after the PR12392 testcase which unlimited spends 40% time within
1356 these alias walks and 8% with the following limit. */
1359 csftc_abort_walking_p (unsigned speculative
)
1361 unsigned max
= PARAM_VALUE (PARAM_MAX_SPECULATIVE_DEVIRT_MAYDEFS
);
1362 return speculative
> max
? true : false;
1365 /* Callback of walk_aliased_vdefs and a helper function for
1366 detect_type_change to check whether a particular statement may modify
1367 the virtual table pointer, and if possible also determine the new type of
1368 the (sub-)object. It stores its result into DATA, which points to a
1369 type_change_info structure. */
1372 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
1374 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
1375 struct type_change_info
*tci
= (struct type_change_info
*) data
;
1378 /* If we already gave up, just terminate the rest of walk. */
1379 if (tci
->multiple_types_encountered
)
1382 if (is_gimple_call (stmt
))
1384 if (gimple_call_flags (stmt
) & (ECF_CONST
| ECF_PURE
))
1387 /* Check for a constructor call. */
1388 if ((fn
= gimple_call_fndecl (stmt
)) != NULL_TREE
1389 && DECL_CXX_CONSTRUCTOR_P (fn
)
1390 && TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
1391 && gimple_call_num_args (stmt
))
1393 tree op
= walk_ssa_copies (gimple_call_arg (stmt
, 0));
1394 tree type
= TYPE_METHOD_BASETYPE (TREE_TYPE (fn
));
1395 HOST_WIDE_INT offset
= 0, size
, max_size
;
1400 fprintf (dump_file
, " Checking constructor call: ");
1401 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1404 /* See if THIS parameter seems like instance pointer. */
1405 if (TREE_CODE (op
) == ADDR_EXPR
)
1407 op
= get_ref_base_and_extent (TREE_OPERAND (op
, 0), &offset
,
1408 &size
, &max_size
, &reverse
);
1409 if (size
!= max_size
|| max_size
== -1)
1412 return csftc_abort_walking_p (tci
->speculative
);
1414 if (op
&& TREE_CODE (op
) == MEM_REF
)
1416 if (!tree_fits_shwi_p (TREE_OPERAND (op
, 1)))
1419 return csftc_abort_walking_p (tci
->speculative
);
1421 offset
+= tree_to_shwi (TREE_OPERAND (op
, 1))
1423 op
= TREE_OPERAND (op
, 0);
1425 else if (DECL_P (op
))
1430 return csftc_abort_walking_p (tci
->speculative
);
1432 op
= walk_ssa_copies (op
);
1434 if (operand_equal_p (op
, tci
->instance
, 0)
1436 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
1437 && tree_fits_shwi_p (TYPE_SIZE (type
))
1438 && tree_to_shwi (TYPE_SIZE (type
)) + offset
> tci
->offset
1439 /* Some inlined constructors may look as follows:
1440 _3 = operator new (16);
1441 MEM[(struct &)_3] ={v} {CLOBBER};
1442 MEM[(struct CompositeClass *)_3]._vptr.CompositeClass
1443 = &MEM[(void *)&_ZTV14CompositeClass + 16B];
1444 _7 = &MEM[(struct CompositeClass *)_3].object;
1445 EmptyClass::EmptyClass (_7);
1447 When determining dynamic type of _3 and because we stop at first
1448 dynamic type found, we would stop on EmptyClass::EmptyClass (_7).
1449 In this case the emptyclass is not even polymorphic and we miss
1450 it is contained in an outer type that is polymorphic. */
1452 && (tci
->offset
== offset
|| contains_polymorphic_type_p (type
)))
1454 record_known_type (tci
, type
, tci
->offset
- offset
);
1458 /* Calls may possibly change dynamic type by placement new. Assume
1459 it will not happen, but make result speculative only. */
1462 fprintf (dump_file
, " Function call may change dynamic type:");
1463 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1466 return csftc_abort_walking_p (tci
->speculative
);
1468 /* Check for inlined virtual table store. */
1469 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt
))
1472 HOST_WIDE_INT offset
= 0;
1475 fprintf (dump_file
, " Checking vtbl store: ");
1476 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1479 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
, &offset
);
1480 if (type
== error_mark_node
)
1482 gcc_assert (!type
|| TYPE_MAIN_VARIANT (type
) == type
);
1486 fprintf (dump_file
, " Unanalyzed store may change type.\n");
1487 tci
->seen_unanalyzed_store
= true;
1491 record_known_type (tci
, type
, offset
);
1498 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1499 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1500 INSTANCE is pointer to the outer instance as returned by
1501 get_polymorphic_context. To avoid creation of temporary expressions,
1502 INSTANCE may also be an declaration of get_polymorphic_context found the
1503 value to be in static storage.
1505 If the type of instance is not fully determined
1506 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1507 is set), try to walk memory writes and find the actual construction of the
1510 Return true if memory is unchanged from function entry.
1512 We do not include this analysis in the context analysis itself, because
1513 it needs memory SSA to be fully built and the walk may be expensive.
1514 So it is not suitable for use withing fold_stmt and similar uses. */
1517 ipa_polymorphic_call_context::get_dynamic_type (tree instance
,
1522 struct type_change_info tci
;
1524 bool function_entry_reached
= false;
1525 tree instance_ref
= NULL
;
1526 gimple
*stmt
= call
;
1527 /* Remember OFFSET before it is modified by restrict_to_inner_class.
1528 This is because we do not update INSTANCE when walking inwards. */
1529 HOST_WIDE_INT instance_offset
= offset
;
1530 tree instance_outer_type
= outer_type
;
1533 otr_type
= TYPE_MAIN_VARIANT (otr_type
);
1535 /* Walk into inner type. This may clear maybe_derived_type and save us
1536 from useless work. It also makes later comparsions with static type
1538 if (outer_type
&& otr_type
)
1540 if (!restrict_to_inner_class (otr_type
))
1544 if (!maybe_in_construction
&& !maybe_derived_type
)
1547 /* If we are in fact not looking at any object object or the instance is
1548 some placement new into a random load, give up straight away. */
1549 if (TREE_CODE (instance
) == MEM_REF
)
1552 /* We need to obtain refernce to virtual table pointer. It is better
1553 to look it up in the code rather than build our own. This require bit
1554 of pattern matching, but we end up verifying that what we found is
1557 What we pattern match is:
1559 tmp = instance->_vptr.A; // vtbl ptr load
1560 tmp2 = tmp[otr_token]; // vtable lookup
1561 OBJ_TYPE_REF(tmp2;instance->0) (instance);
1563 We want to start alias oracle walk from vtbl pointer load,
1564 but we may not be able to identify it, for example, when PRE moved the
1567 if (gimple_code (call
) == GIMPLE_CALL
)
1569 tree ref
= gimple_call_fn (call
);
1570 HOST_WIDE_INT offset2
, size
, max_size
;
1573 if (TREE_CODE (ref
) == OBJ_TYPE_REF
)
1575 ref
= OBJ_TYPE_REF_EXPR (ref
);
1576 ref
= walk_ssa_copies (ref
);
1578 /* If call target is already known, no need to do the expensive
1580 if (is_gimple_min_invariant (ref
))
1583 /* Check if definition looks like vtable lookup. */
1584 if (TREE_CODE (ref
) == SSA_NAME
1585 && !SSA_NAME_IS_DEFAULT_DEF (ref
)
1586 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref
))
1587 && TREE_CODE (gimple_assign_rhs1
1588 (SSA_NAME_DEF_STMT (ref
))) == MEM_REF
)
1590 ref
= get_base_address
1591 (TREE_OPERAND (gimple_assign_rhs1
1592 (SSA_NAME_DEF_STMT (ref
)), 0));
1593 ref
= walk_ssa_copies (ref
);
1594 /* Find base address of the lookup and see if it looks like
1596 if (TREE_CODE (ref
) == SSA_NAME
1597 && !SSA_NAME_IS_DEFAULT_DEF (ref
)
1598 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref
)))
1600 tree ref_exp
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref
));
1602 = get_ref_base_and_extent (ref_exp
, &offset2
, &size
,
1603 &max_size
, &reverse
);
1605 /* Finally verify that what we found looks like read from
1606 OTR_OBJECT or from INSTANCE with offset OFFSET. */
1608 && ((TREE_CODE (base_ref
) == MEM_REF
1609 && ((offset2
== instance_offset
1610 && TREE_OPERAND (base_ref
, 0) == instance
)
1612 && TREE_OPERAND (base_ref
, 0)
1614 || (DECL_P (instance
) && base_ref
== instance
1615 && offset2
== instance_offset
)))
1617 stmt
= SSA_NAME_DEF_STMT (ref
);
1618 instance_ref
= ref_exp
;
1625 /* If we failed to look up the reference in code, build our own. */
1628 /* If the statement in question does not use memory, we can't tell
1630 if (!gimple_vuse (stmt
))
1632 ao_ref_init_from_ptr_and_size (&ao
, otr_object
, NULL
);
1635 /* Otherwise use the real reference. */
1636 ao_ref_init (&ao
, instance_ref
);
1638 /* We look for vtbl pointer read. */
1639 ao
.size
= POINTER_SIZE
;
1640 ao
.max_size
= ao
.size
;
1641 /* We are looking for stores to vptr pointer within the instance of
1643 TODO: The vptr pointer type is globally known, we probably should
1644 keep it and do that even when otr_type is unknown. */
1648 = get_alias_set (outer_type
? outer_type
: otr_type
);
1650 = get_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type
))));
1655 fprintf (dump_file
, "Determining dynamic type for call: ");
1656 print_gimple_stmt (dump_file
, call
, 0, 0);
1657 fprintf (dump_file
, " Starting walk at: ");
1658 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1659 fprintf (dump_file
, " instance pointer: ");
1660 print_generic_expr (dump_file
, otr_object
, TDF_SLIM
);
1661 fprintf (dump_file
, " Outer instance pointer: ");
1662 print_generic_expr (dump_file
, instance
, TDF_SLIM
);
1663 fprintf (dump_file
, " offset: %i (bits)", (int)instance_offset
);
1664 fprintf (dump_file
, " vtbl reference: ");
1665 print_generic_expr (dump_file
, instance_ref
, TDF_SLIM
);
1666 fprintf (dump_file
, "\n");
1669 tci
.offset
= instance_offset
;
1670 tci
.instance
= instance
;
1671 tci
.vtbl_ptr_ref
= instance_ref
;
1672 tci
.known_current_type
= NULL_TREE
;
1673 tci
.known_current_offset
= 0;
1674 tci
.otr_type
= otr_type
;
1675 tci
.type_maybe_changed
= false;
1676 tci
.multiple_types_encountered
= false;
1677 tci
.speculative
= 0;
1678 tci
.seen_unanalyzed_store
= false;
1680 walk_aliased_vdefs (&ao
, gimple_vuse (stmt
), check_stmt_for_type_change
,
1681 &tci
, NULL
, &function_entry_reached
);
1683 /* If we did not find any type changing statements, we may still drop
1684 maybe_in_construction flag if the context already have outer type.
1686 Here we make special assumptions about both constructors and
1687 destructors which are all the functions that are allowed to alter the
1688 VMT pointers. It assumes that destructors begin with assignment into
1689 all VMT pointers and that constructors essentially look in the
1692 1) The very first thing they do is that they call constructors of
1693 ancestor sub-objects that have them.
1695 2) Then VMT pointers of this and all its ancestors is set to new
1696 values corresponding to the type corresponding to the constructor.
1698 3) Only afterwards, other stuff such as constructor of member
1699 sub-objects and the code written by the user is run. Only this may
1700 include calling virtual functions, directly or indirectly.
1702 4) placement new can not be used to change type of non-POD statically
1703 allocated variables.
1705 There is no way to call a constructor of an ancestor sub-object in any
1708 This means that we do not have to care whether constructors get the
1709 correct type information because they will always change it (in fact,
1710 if we define the type to be given by the VMT pointer, it is undefined).
1712 The most important fact to derive from the above is that if, for some
1713 statement in the section 3, we try to detect whether the dynamic type
1714 has changed, we can safely ignore all calls as we examine the function
1715 body backwards until we reach statements in section 2 because these
1716 calls cannot be ancestor constructors or destructors (if the input is
1717 not bogus) and so do not change the dynamic type (this holds true only
1718 for automatically allocated objects but at the moment we devirtualize
1719 only these). We then must detect that statements in section 2 change
1720 the dynamic type and can try to derive the new type. That is enough
1721 and we can stop, we will never see the calls into constructors of
1722 sub-objects in this code.
1724 Therefore if the static outer type was found (outer_type)
1725 we can safely ignore tci.speculative that is set on calls and give up
1726 only if there was dyanmic type store that may affect given variable
1727 (seen_unanalyzed_store) */
1729 if (!tci
.type_maybe_changed
1732 && !tci
.seen_unanalyzed_store
1733 && !tci
.multiple_types_encountered
1734 && ((offset
== tci
.offset
1735 && types_same_for_odr (tci
.known_current_type
,
1737 || (instance_offset
== offset
1738 && types_same_for_odr (tci
.known_current_type
,
1739 instance_outer_type
)))))
1741 if (!outer_type
|| tci
.seen_unanalyzed_store
)
1743 if (maybe_in_construction
)
1744 maybe_in_construction
= false;
1746 fprintf (dump_file
, " No dynamic type change found.\n");
1750 if (tci
.known_current_type
1751 && !function_entry_reached
1752 && !tci
.multiple_types_encountered
)
1754 if (!tci
.speculative
)
1756 outer_type
= TYPE_MAIN_VARIANT (tci
.known_current_type
);
1757 offset
= tci
.known_current_offset
;
1759 maybe_in_construction
= false;
1760 maybe_derived_type
= false;
1762 fprintf (dump_file
, " Determined dynamic type.\n");
1764 else if (!speculative_outer_type
1765 || speculative_maybe_derived_type
)
1767 speculative_outer_type
= TYPE_MAIN_VARIANT (tci
.known_current_type
);
1768 speculative_offset
= tci
.known_current_offset
;
1769 speculative_maybe_derived_type
= false;
1771 fprintf (dump_file
, " Determined speculative dynamic type.\n");
1776 fprintf (dump_file
, " Found multiple types%s%s\n",
1777 function_entry_reached
? " (function entry reached)" : "",
1778 function_entry_reached
? " (multiple types encountered)" : "");
1784 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1785 seems consistent (and useful) with what we already have in the non-speculative context. */
1788 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type
,
1789 HOST_WIDE_INT spec_offset
,
1790 bool spec_maybe_derived_type
,
1791 tree otr_type
) const
1793 if (!flag_devirtualize_speculatively
)
1796 /* Non-polymorphic types are useless for deriving likely polymorphic
1798 if (!spec_outer_type
|| !contains_polymorphic_type_p (spec_outer_type
))
1801 /* If we know nothing, speculation is always good. */
1805 /* Speculation is only useful to avoid derived types.
1806 This is not 100% true for placement new, where the outer context may
1807 turn out to be useless, but ignore these for now. */
1808 if (!maybe_derived_type
)
1811 /* If types agrees, speculation is consistent, but it makes sense only
1812 when it says something new. */
1813 if (types_must_be_same_for_odr (spec_outer_type
, outer_type
))
1814 return maybe_derived_type
&& !spec_maybe_derived_type
;
1816 /* If speculation does not contain the type in question, ignore it. */
1818 && !contains_type_p (spec_outer_type
, spec_offset
, otr_type
, false, true))
1821 /* If outer type already contains speculation as a filed,
1822 it is useless. We already know from OUTER_TYPE
1823 SPEC_TYPE and that it is not in the construction. */
1824 if (contains_type_p (outer_type
, offset
- spec_offset
,
1825 spec_outer_type
, false, false))
1828 /* If speculative outer type is not more specified than outer
1830 We can only decide this safely if we can compare types with OUTER_TYPE.
1832 if ((!in_lto_p
|| odr_type_p (outer_type
))
1833 && !contains_type_p (spec_outer_type
,
1834 spec_offset
- offset
,
1840 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1841 NEW_MAYBE_DERIVED_TYPE
1842 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1845 ipa_polymorphic_call_context::combine_speculation_with
1846 (tree new_outer_type
, HOST_WIDE_INT new_offset
, bool new_maybe_derived_type
,
1849 if (!new_outer_type
)
1852 /* restrict_to_inner_class may eliminate wrong speculation making our job
1855 restrict_to_inner_class (otr_type
);
1857 if (!speculation_consistent_p (new_outer_type
, new_offset
,
1858 new_maybe_derived_type
, otr_type
))
1861 /* New speculation is a win in case we have no speculation or new
1862 speculation does not consider derivations. */
1863 if (!speculative_outer_type
1864 || (speculative_maybe_derived_type
1865 && !new_maybe_derived_type
))
1867 speculative_outer_type
= new_outer_type
;
1868 speculative_offset
= new_offset
;
1869 speculative_maybe_derived_type
= new_maybe_derived_type
;
1872 else if (types_must_be_same_for_odr (speculative_outer_type
,
1875 if (speculative_offset
!= new_offset
)
1877 /* OK we have two contexts that seems valid but they disagree,
1880 This is not a lattice operation, so we may want to drop it later. */
1881 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1883 "Speculative outer types match, "
1884 "offset mismatch -> invalid speculation\n");
1885 clear_speculation ();
1890 if (speculative_maybe_derived_type
&& !new_maybe_derived_type
)
1892 speculative_maybe_derived_type
= false;
1899 /* Choose type that contains the other. This one either contains the outer
1900 as a field (thus giving exactly one target) or is deeper in the type
1902 else if (speculative_outer_type
1903 && speculative_maybe_derived_type
1904 && (new_offset
> speculative_offset
1905 || (new_offset
== speculative_offset
1906 && contains_type_p (new_outer_type
,
1907 0, speculative_outer_type
, false))))
1909 tree old_outer_type
= speculative_outer_type
;
1910 HOST_WIDE_INT old_offset
= speculative_offset
;
1911 bool old_maybe_derived_type
= speculative_maybe_derived_type
;
1913 speculative_outer_type
= new_outer_type
;
1914 speculative_offset
= new_offset
;
1915 speculative_maybe_derived_type
= new_maybe_derived_type
;
1918 restrict_to_inner_class (otr_type
);
1920 /* If the speculation turned out to make no sense, revert to sensible
1922 if (!speculative_outer_type
)
1924 speculative_outer_type
= old_outer_type
;
1925 speculative_offset
= old_offset
;
1926 speculative_maybe_derived_type
= old_maybe_derived_type
;
1929 return (old_offset
!= speculative_offset
1930 || old_maybe_derived_type
!= speculative_maybe_derived_type
1931 || types_must_be_same_for_odr (speculative_outer_type
,
1937 /* Make speculation less specific so
1938 NEW_OUTER_TYPE, NEW_OFFSET, NEW_MAYBE_DERIVED_TYPE is also included.
1939 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1942 ipa_polymorphic_call_context::meet_speculation_with
1943 (tree new_outer_type
, HOST_WIDE_INT new_offset
, bool new_maybe_derived_type
,
1946 if (!new_outer_type
&& speculative_outer_type
)
1948 clear_speculation ();
1952 /* restrict_to_inner_class may eliminate wrong speculation making our job
1955 restrict_to_inner_class (otr_type
);
1957 if (!speculative_outer_type
1958 || !speculation_consistent_p (speculative_outer_type
,
1960 speculative_maybe_derived_type
,
1964 if (!speculation_consistent_p (new_outer_type
, new_offset
,
1965 new_maybe_derived_type
, otr_type
))
1967 clear_speculation ();
1971 else if (types_must_be_same_for_odr (speculative_outer_type
,
1974 if (speculative_offset
!= new_offset
)
1976 clear_speculation ();
1981 if (!speculative_maybe_derived_type
&& new_maybe_derived_type
)
1983 speculative_maybe_derived_type
= true;
1990 /* See if one type contains the other as a field (not base). */
1991 else if (contains_type_p (new_outer_type
, new_offset
- speculative_offset
,
1992 speculative_outer_type
, false, false))
1994 else if (contains_type_p (speculative_outer_type
,
1995 speculative_offset
- new_offset
,
1996 new_outer_type
, false, false))
1998 speculative_outer_type
= new_outer_type
;
1999 speculative_offset
= new_offset
;
2000 speculative_maybe_derived_type
= new_maybe_derived_type
;
2003 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2004 else if (contains_type_p (new_outer_type
,
2005 new_offset
- speculative_offset
,
2006 speculative_outer_type
, false, true))
2008 if (!speculative_maybe_derived_type
)
2010 speculative_maybe_derived_type
= true;
2015 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2016 else if (contains_type_p (speculative_outer_type
,
2017 speculative_offset
- new_offset
, new_outer_type
, false, true))
2019 speculative_outer_type
= new_outer_type
;
2020 speculative_offset
= new_offset
;
2021 speculative_maybe_derived_type
= true;
2026 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2027 fprintf (dump_file
, "Giving up on speculative meet\n");
2028 clear_speculation ();
2033 /* Assume that both THIS and a given context is valid and strenghten THIS
2034 if possible. Return true if any strenghtening was made.
2035 If actual type the context is being used in is known, OTR_TYPE should be
2036 set accordingly. This improves quality of combined result. */
2039 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx
,
2042 bool updated
= false;
2044 if (ctx
.useless_p () || invalid
)
2047 /* Restricting context to inner type makes merging easier, however do not
2048 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
2049 if (otr_type
&& !invalid
&& !ctx
.invalid
)
2051 restrict_to_inner_class (otr_type
);
2052 ctx
.restrict_to_inner_class (otr_type
);
2057 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2059 fprintf (dump_file
, "Polymorphic call context combine:");
2061 fprintf (dump_file
, "With context: ");
2062 ctx
.dump (dump_file
);
2065 fprintf (dump_file
, "To be used with type: ");
2066 print_generic_expr (dump_file
, otr_type
, TDF_SLIM
);
2067 fprintf (dump_file
, "\n");
2071 /* If call is known to be invalid, we are done. */
2074 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2075 fprintf (dump_file
, "-> Invalid context\n");
2079 if (!ctx
.outer_type
)
2081 else if (!outer_type
)
2083 outer_type
= ctx
.outer_type
;
2084 offset
= ctx
.offset
;
2085 dynamic
= ctx
.dynamic
;
2086 maybe_in_construction
= ctx
.maybe_in_construction
;
2087 maybe_derived_type
= ctx
.maybe_derived_type
;
2090 /* If types are known to be same, merging is quite easy. */
2091 else if (types_must_be_same_for_odr (outer_type
, ctx
.outer_type
))
2093 if (offset
!= ctx
.offset
2094 && TYPE_SIZE (outer_type
)
2095 && TREE_CODE (TYPE_SIZE (outer_type
)) == INTEGER_CST
)
2097 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2098 fprintf (dump_file
, "Outer types match, offset mismatch -> invalid\n");
2099 clear_speculation ();
2100 clear_outer_type ();
2104 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2105 fprintf (dump_file
, "Outer types match, merging flags\n");
2106 if (maybe_in_construction
&& !ctx
.maybe_in_construction
)
2109 maybe_in_construction
= false;
2111 if (maybe_derived_type
&& !ctx
.maybe_derived_type
)
2114 maybe_derived_type
= false;
2116 if (dynamic
&& !ctx
.dynamic
)
2122 /* If we know the type precisely, there is not much to improve. */
2123 else if (!maybe_derived_type
&& !maybe_in_construction
2124 && !ctx
.maybe_derived_type
&& !ctx
.maybe_in_construction
)
2126 /* It may be easy to check if second context permits the first
2127 and set INVALID otherwise. This is not easy to do in general;
2128 contains_type_p may return false negatives for non-comparable
2131 If OTR_TYPE is known, we however can expect that
2132 restrict_to_inner_class should have discovered the same base
2134 if (otr_type
&& !ctx
.maybe_in_construction
&& !ctx
.maybe_derived_type
)
2136 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2137 fprintf (dump_file
, "Contextes disagree -> invalid\n");
2141 /* See if one type contains the other as a field (not base).
2142 In this case we want to choose the wider type, because it contains
2143 more information. */
2144 else if (contains_type_p (ctx
.outer_type
, ctx
.offset
- offset
,
2145 outer_type
, false, false))
2147 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2148 fprintf (dump_file
, "Second type contain the first as a field\n");
2150 if (maybe_derived_type
)
2152 outer_type
= ctx
.outer_type
;
2153 maybe_derived_type
= ctx
.maybe_derived_type
;
2154 offset
= ctx
.offset
;
2155 dynamic
= ctx
.dynamic
;
2159 /* If we do not know how the context is being used, we can
2160 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
2161 to other component of OUTER_TYPE later and we know nothing
2163 if (otr_type
&& maybe_in_construction
2164 && !ctx
.maybe_in_construction
)
2166 maybe_in_construction
= false;
2170 else if (contains_type_p (outer_type
, offset
- ctx
.offset
,
2171 ctx
.outer_type
, false, false))
2173 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2174 fprintf (dump_file
, "First type contain the second as a field\n");
2176 if (otr_type
&& maybe_in_construction
2177 && !ctx
.maybe_in_construction
)
2179 maybe_in_construction
= false;
2183 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2184 else if (contains_type_p (ctx
.outer_type
,
2185 ctx
.offset
- offset
, outer_type
, false, true))
2187 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2188 fprintf (dump_file
, "First type is base of second\n");
2189 if (!maybe_derived_type
)
2191 if (!ctx
.maybe_in_construction
2192 && types_odr_comparable (outer_type
, ctx
.outer_type
))
2194 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2195 fprintf (dump_file
, "Second context does not permit base -> invalid\n");
2199 /* Pick variant deeper in the hiearchy. */
2202 outer_type
= ctx
.outer_type
;
2203 maybe_in_construction
= ctx
.maybe_in_construction
;
2204 maybe_derived_type
= ctx
.maybe_derived_type
;
2205 offset
= ctx
.offset
;
2206 dynamic
= ctx
.dynamic
;
2210 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2211 else if (contains_type_p (outer_type
,
2212 offset
- ctx
.offset
, ctx
.outer_type
, false, true))
2214 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2215 fprintf (dump_file
, "Second type is base of first\n");
2216 if (!ctx
.maybe_derived_type
)
2218 if (!maybe_in_construction
2219 && types_odr_comparable (outer_type
, ctx
.outer_type
))
2221 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2222 fprintf (dump_file
, "First context does not permit base -> invalid\n");
2225 /* Pick the base type. */
2226 else if (maybe_in_construction
)
2228 outer_type
= ctx
.outer_type
;
2229 maybe_in_construction
= ctx
.maybe_in_construction
;
2230 maybe_derived_type
= ctx
.maybe_derived_type
;
2231 offset
= ctx
.offset
;
2232 dynamic
= ctx
.dynamic
;
2237 /* TODO handle merging using hiearchy. */
2238 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2239 fprintf (dump_file
, "Giving up on merge\n");
2241 updated
|= combine_speculation_with (ctx
.speculative_outer_type
,
2242 ctx
.speculative_offset
,
2243 ctx
.speculative_maybe_derived_type
,
2246 if (updated
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
2248 fprintf (dump_file
, "Updated as: ");
2250 fprintf (dump_file
, "\n");
2256 clear_speculation ();
2257 clear_outer_type ();
2261 /* Take non-speculative info, merge it with speculative and clear speculation.
2262 Used when we no longer manage to keep track of actual outer type, but we
2263 think it is still there.
2265 If OTR_TYPE is set, the transformation can be done more effectively assuming
2266 that context is going to be used only that way. */
2269 ipa_polymorphic_call_context::make_speculative (tree otr_type
)
2271 tree spec_outer_type
= outer_type
;
2272 HOST_WIDE_INT spec_offset
= offset
;
2273 bool spec_maybe_derived_type
= maybe_derived_type
;
2278 clear_outer_type ();
2279 clear_speculation ();
2284 clear_outer_type ();
2285 combine_speculation_with (spec_outer_type
, spec_offset
,
2286 spec_maybe_derived_type
,
2290 /* Use when we can not track dynamic type change. This speculatively assume
2291 type change is not happening. */
2294 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor
,
2298 make_speculative (otr_type
);
2299 else if (in_poly_cdtor
)
2300 maybe_in_construction
= true;
2303 /* Return TRUE if this context conveys the same information as OTHER. */
2306 ipa_polymorphic_call_context::equal_to
2307 (const ipa_polymorphic_call_context
&x
) const
2310 return x
.useless_p ();
2313 if (x
.useless_p () || x
.invalid
)
2319 || !types_odr_comparable (outer_type
, x
.outer_type
)
2320 || !types_same_for_odr (outer_type
, x
.outer_type
)
2321 || offset
!= x
.offset
2322 || maybe_in_construction
!= x
.maybe_in_construction
2323 || maybe_derived_type
!= x
.maybe_derived_type
2324 || dynamic
!= x
.dynamic
)
2327 else if (x
.outer_type
)
2331 if (speculative_outer_type
2332 && speculation_consistent_p (speculative_outer_type
, speculative_offset
,
2333 speculative_maybe_derived_type
, NULL_TREE
))
2335 if (!x
.speculative_outer_type
)
2338 if (!types_odr_comparable (speculative_outer_type
,
2339 x
.speculative_outer_type
)
2340 || !types_same_for_odr (speculative_outer_type
,
2341 x
.speculative_outer_type
)
2342 || speculative_offset
!= x
.speculative_offset
2343 || speculative_maybe_derived_type
!= x
.speculative_maybe_derived_type
)
2346 else if (x
.speculative_outer_type
2347 && x
.speculation_consistent_p (x
.speculative_outer_type
,
2348 x
.speculative_offset
,
2349 x
.speculative_maybe_derived_type
,
2356 /* Modify context to be strictly less restrictive than CTX. */
2359 ipa_polymorphic_call_context::meet_with (ipa_polymorphic_call_context ctx
,
2362 bool updated
= false;
2364 if (useless_p () || ctx
.invalid
)
2367 /* Restricting context to inner type makes merging easier, however do not
2368 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
2369 if (otr_type
&& !useless_p () && !ctx
.useless_p ())
2371 restrict_to_inner_class (otr_type
);
2372 ctx
.restrict_to_inner_class (otr_type
);
2380 if (ctx
.useless_p () || invalid
)
2386 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2388 fprintf (dump_file
, "Polymorphic call context meet:");
2390 fprintf (dump_file
, "With context: ");
2391 ctx
.dump (dump_file
);
2394 fprintf (dump_file
, "To be used with type: ");
2395 print_generic_expr (dump_file
, otr_type
, TDF_SLIM
);
2396 fprintf (dump_file
, "\n");
2400 if (!dynamic
&& ctx
.dynamic
)
2406 /* If call is known to be invalid, we are done. */
2409 else if (!ctx
.outer_type
)
2411 clear_outer_type ();
2414 /* If types are known to be same, merging is quite easy. */
2415 else if (types_must_be_same_for_odr (outer_type
, ctx
.outer_type
))
2417 if (offset
!= ctx
.offset
2418 && TYPE_SIZE (outer_type
)
2419 && TREE_CODE (TYPE_SIZE (outer_type
)) == INTEGER_CST
)
2421 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2422 fprintf (dump_file
, "Outer types match, offset mismatch -> clearing\n");
2423 clear_outer_type ();
2426 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2427 fprintf (dump_file
, "Outer types match, merging flags\n");
2428 if (!maybe_in_construction
&& ctx
.maybe_in_construction
)
2431 maybe_in_construction
= true;
2433 if (!maybe_derived_type
&& ctx
.maybe_derived_type
)
2436 maybe_derived_type
= true;
2438 if (!dynamic
&& ctx
.dynamic
)
2444 /* See if one type contains the other as a field (not base). */
2445 else if (contains_type_p (ctx
.outer_type
, ctx
.offset
- offset
,
2446 outer_type
, false, false))
2448 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2449 fprintf (dump_file
, "Second type contain the first as a field\n");
2451 /* The second type is more specified, so we keep the first.
2452 We need to set DYNAMIC flag to avoid declaring context INVALID
2453 of OFFSET ends up being out of range. */
2457 && (!TYPE_SIZE (ctx
.outer_type
)
2458 || !TYPE_SIZE (outer_type
)
2459 || !operand_equal_p (TYPE_SIZE (ctx
.outer_type
),
2460 TYPE_SIZE (outer_type
), 0)))))
2466 else if (contains_type_p (outer_type
, offset
- ctx
.offset
,
2467 ctx
.outer_type
, false, false))
2469 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2470 fprintf (dump_file
, "First type contain the second as a field\n");
2475 && (!TYPE_SIZE (ctx
.outer_type
)
2476 || !TYPE_SIZE (outer_type
)
2477 || !operand_equal_p (TYPE_SIZE (ctx
.outer_type
),
2478 TYPE_SIZE (outer_type
), 0)))))
2480 outer_type
= ctx
.outer_type
;
2481 offset
= ctx
.offset
;
2482 dynamic
= ctx
.dynamic
;
2483 maybe_in_construction
= ctx
.maybe_in_construction
;
2484 maybe_derived_type
= ctx
.maybe_derived_type
;
2487 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2488 else if (contains_type_p (ctx
.outer_type
,
2489 ctx
.offset
- offset
, outer_type
, false, true))
2491 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2492 fprintf (dump_file
, "First type is base of second\n");
2493 if (!maybe_derived_type
)
2495 maybe_derived_type
= true;
2498 if (!maybe_in_construction
&& ctx
.maybe_in_construction
)
2500 maybe_in_construction
= true;
2503 if (!dynamic
&& ctx
.dynamic
)
2509 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2510 else if (contains_type_p (outer_type
,
2511 offset
- ctx
.offset
, ctx
.outer_type
, false, true))
2513 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2514 fprintf (dump_file
, "Second type is base of first\n");
2515 outer_type
= ctx
.outer_type
;
2516 offset
= ctx
.offset
;
2518 if (!maybe_derived_type
)
2519 maybe_derived_type
= true;
2520 if (!maybe_in_construction
&& ctx
.maybe_in_construction
)
2521 maybe_in_construction
= true;
2522 if (!dynamic
&& ctx
.dynamic
)
2525 /* TODO handle merging using hiearchy. */
2528 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2529 fprintf (dump_file
, "Giving up on meet\n");
2530 clear_outer_type ();
2534 updated
|= meet_speculation_with (ctx
.speculative_outer_type
,
2535 ctx
.speculative_offset
,
2536 ctx
.speculative_maybe_derived_type
,
2539 if (updated
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
2541 fprintf (dump_file
, "Updated as: ");
2543 fprintf (dump_file
, "\n");