1 /* Analysis of polymorphic call context.
2 Copyright (C) 2013-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "print-tree.h"
29 #include "tree-pass.h"
32 #include "hash-table.h"
34 #include "tree-pretty-print.h"
35 #include "ipa-utils.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "gimple-expr.h"
41 #include "ipa-inline.h"
42 #include "diagnostic.h"
46 #include "gimple-pretty-print.h"
47 #include "stor-layout.h"
49 #include "data-streamer.h"
50 #include "lto-streamer.h"
51 #include "streamer-hooks.h"
53 /* Return true when TYPE contains an polymorphic type and thus is interesting
54 for devirtualization machinery. */
56 static bool contains_type_p (tree
, HOST_WIDE_INT
, tree
,
57 bool consider_placement_new
= true,
58 bool consider_bases
= true);
61 contains_polymorphic_type_p (const_tree type
)
63 type
= TYPE_MAIN_VARIANT (type
);
65 if (RECORD_OR_UNION_TYPE_P (type
))
68 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
70 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
71 if (TREE_CODE (fld
) == FIELD_DECL
72 && !DECL_ARTIFICIAL (fld
)
73 && contains_polymorphic_type_p (TREE_TYPE (fld
)))
77 if (TREE_CODE (type
) == ARRAY_TYPE
)
78 return contains_polymorphic_type_p (TREE_TYPE (type
));
82 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
83 at possition CUR_OFFSET within TYPE.
85 POD can be changed to an instance of a polymorphic type by
86 placement new. Here we play safe and assume that any
87 non-polymorphic type is POD. */
89 possible_placement_new (tree type
, tree expected_type
,
90 HOST_WIDE_INT cur_offset
)
92 return ((TREE_CODE (type
) != RECORD_TYPE
94 || cur_offset
>= BITS_PER_WORD
95 || !polymorphic_type_binfo_p (TYPE_BINFO (type
)))
97 || !tree_fits_shwi_p (TYPE_SIZE (type
))
99 + (expected_type
? tree_to_uhwi (TYPE_SIZE (expected_type
))
100 : GET_MODE_BITSIZE (Pmode
))
101 <= tree_to_uhwi (TYPE_SIZE (type
)))));
104 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
105 is contained at THIS->OFFSET. Walk the memory representation of
106 THIS->OUTER_TYPE and find the outermost class type that match
107 OTR_TYPE or contain OTR_TYPE as a base. Update THIS
110 If OTR_TYPE is NULL, just find outermost polymorphic type with
111 virtual table present at possition OFFSET.
113 For example when THIS represents type
119 and we look for type at offset sizeof(int), we end up with B and offset 0.
120 If the same is produced by multiple inheritance, we end up with A and offset
123 If we can not find corresponding class, give up by setting
124 THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
125 Return true when lookup was sucesful.
127 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
128 valid only via alocation of new polymorphic type inside by means
131 When CONSIDER_BASES is false, only look for actual fields, not base types
135 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type
,
136 bool consider_placement_new
,
139 tree type
= outer_type
;
140 HOST_WIDE_INT cur_offset
= offset
;
141 bool speculative
= false;
142 bool size_unknown
= false;
143 unsigned HOST_WIDE_INT otr_type_size
= GET_MODE_BITSIZE (Pmode
);
145 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
148 clear_outer_type (otr_type
);
152 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
153 that the context is either invalid, or the instance type must be
154 derived from OUTER_TYPE.
156 Because the instance type may contain field whose type is of OUTER_TYPE,
157 we can not derive any effective information about it.
159 TODO: In the case we know all derrived types, we can definitely do better
161 else if (TYPE_SIZE (outer_type
)
162 && tree_fits_shwi_p (TYPE_SIZE (outer_type
))
163 && tree_to_shwi (TYPE_SIZE (outer_type
)) >= 0
164 && tree_to_shwi (TYPE_SIZE (outer_type
)) <= offset
)
166 clear_outer_type (otr_type
);
170 /* If derived type is not allowed, we know that the context is invalid.
171 For dynamic types, we really do not have information about
172 size of the memory location. It is possible that completely
173 different type is stored after outer_type. */
174 if (!maybe_derived_type
&& !dynamic
)
176 clear_speculation ();
182 if (otr_type
&& TYPE_SIZE (otr_type
)
183 && tree_fits_shwi_p (TYPE_SIZE (otr_type
)))
184 otr_type_size
= tree_to_uhwi (TYPE_SIZE (otr_type
));
186 if (!type
|| offset
< 0)
187 goto no_useful_type_info
;
189 /* Find the sub-object the constant actually refers to and mark whether it is
190 an artificial one (as opposed to a user-defined one).
192 This loop is performed twice; first time for outer_type and second time
193 for speculative_outer_type. The second run has SPECULATIVE set. */
196 unsigned HOST_WIDE_INT pos
, size
;
199 /* If we do not know size of TYPE, we need to be more conservative
200 about accepting cases where we can not find EXPECTED_TYPE.
201 Generally the types that do matter here are of constant size.
202 Size_unknown case should be very rare. */
204 && tree_fits_shwi_p (TYPE_SIZE (type
))
205 && tree_to_shwi (TYPE_SIZE (type
)) >= 0)
206 size_unknown
= false;
210 /* On a match, just return what we found. */
212 && types_odr_comparable (type
, otr_type
)
213 && types_same_for_odr (type
, otr_type
))
215 && TREE_CODE (type
) == RECORD_TYPE
217 && polymorphic_type_binfo_p (TYPE_BINFO (type
))))
221 /* If we did not match the offset, just give up on speculation. */
223 /* Also check if speculation did not end up being same as
225 || (types_must_be_same_for_odr (speculative_outer_type
,
227 && (maybe_derived_type
228 == speculative_maybe_derived_type
)))
229 clear_speculation ();
234 /* If type is known to be final, do not worry about derived
235 types. Testing it here may help us to avoid speculation. */
236 if (otr_type
&& TREE_CODE (outer_type
) == RECORD_TYPE
237 && (!in_lto_p
|| odr_type_p (outer_type
))
238 && type_known_to_have_no_deriavations_p (outer_type
))
239 maybe_derived_type
= false;
241 /* Type can not contain itself on an non-zero offset. In that case
242 just give up. Still accept the case where size is now known.
243 Either the second copy may appear past the end of type or within
244 the non-POD buffer located inside the variably sized type
247 goto no_useful_type_info
;
248 /* If we determined type precisely or we have no clue on
249 speuclation, we are done. */
250 if (!maybe_derived_type
|| !speculative_outer_type
251 || !speculation_consistent_p (speculative_outer_type
,
253 speculative_maybe_derived_type
,
256 clear_speculation ();
259 /* Otherwise look into speculation now. */
263 type
= speculative_outer_type
;
264 cur_offset
= speculative_offset
;
270 /* Walk fields and find corresponding on at OFFSET. */
271 if (TREE_CODE (type
) == RECORD_TYPE
)
273 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
275 if (TREE_CODE (fld
) != FIELD_DECL
)
278 pos
= int_bit_position (fld
);
279 if (pos
> (unsigned HOST_WIDE_INT
)cur_offset
)
282 /* Do not consider vptr itself. Not even for placement new. */
283 if (!pos
&& DECL_ARTIFICIAL (fld
)
284 && POINTER_TYPE_P (TREE_TYPE (fld
))
286 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
289 if (!DECL_SIZE (fld
) || !tree_fits_uhwi_p (DECL_SIZE (fld
)))
290 goto no_useful_type_info
;
291 size
= tree_to_uhwi (DECL_SIZE (fld
));
293 /* We can always skip types smaller than pointer size:
294 those can not contain a virtual table pointer.
296 Disqualifying fields that are too small to fit OTR_TYPE
297 saves work needed to walk them for no benefit.
298 Because of the way the bases are packed into a class, the
299 field's size may be smaller than type size, so it needs
300 to be done with a care. */
302 if (pos
<= (unsigned HOST_WIDE_INT
)cur_offset
303 && (pos
+ size
) >= (unsigned HOST_WIDE_INT
)cur_offset
304 + GET_MODE_BITSIZE (Pmode
)
306 || !TYPE_SIZE (TREE_TYPE (fld
))
307 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld
)))
308 || (pos
+ tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld
))))
309 >= cur_offset
+ otr_type_size
))
314 goto no_useful_type_info
;
316 type
= TYPE_MAIN_VARIANT (TREE_TYPE (fld
));
318 /* DECL_ARTIFICIAL represents a basetype. */
319 if (!DECL_ARTIFICIAL (fld
))
325 /* As soon as we se an field containing the type,
326 we know we are not looking for derivations. */
327 maybe_derived_type
= false;
331 speculative_outer_type
= type
;
332 speculative_offset
= cur_offset
;
333 speculative_maybe_derived_type
= false;
336 else if (!consider_bases
)
337 goto no_useful_type_info
;
339 else if (TREE_CODE (type
) == ARRAY_TYPE
)
341 tree subtype
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
343 /* Give up if we don't know array field size.
344 Also give up on non-polymorphic types as they are used
345 as buffers for placement new. */
346 if (!TYPE_SIZE (subtype
)
347 || !tree_fits_shwi_p (TYPE_SIZE (subtype
))
348 || tree_to_shwi (TYPE_SIZE (subtype
)) <= 0
349 || !contains_polymorphic_type_p (subtype
))
350 goto no_useful_type_info
;
352 HOST_WIDE_INT new_offset
= cur_offset
% tree_to_shwi (TYPE_SIZE (subtype
));
354 /* We may see buffer for placement new. In this case the expected type
355 can be bigger than the subtype. */
356 if (TYPE_SIZE (subtype
)
357 && (cur_offset
+ otr_type_size
358 > tree_to_uhwi (TYPE_SIZE (subtype
))))
359 goto no_useful_type_info
;
361 cur_offset
= new_offset
;
367 maybe_derived_type
= false;
371 speculative_outer_type
= type
;
372 speculative_offset
= cur_offset
;
373 speculative_maybe_derived_type
= false;
376 /* Give up on anything else. */
380 if (maybe_derived_type
&& !speculative
381 && TREE_CODE (outer_type
) == RECORD_TYPE
382 && TREE_CODE (otr_type
) == RECORD_TYPE
383 && TYPE_BINFO (otr_type
)
385 && get_binfo_at_offset (TYPE_BINFO (otr_type
), 0, outer_type
))
387 clear_outer_type (otr_type
);
388 if (!speculative_outer_type
389 || !speculation_consistent_p (speculative_outer_type
,
391 speculative_maybe_derived_type
,
393 clear_speculation ();
394 if (speculative_outer_type
)
397 type
= speculative_outer_type
;
398 cur_offset
= speculative_offset
;
403 /* We found no way to embedd EXPECTED_TYPE in TYPE.
404 We still permit two special cases - placement new and
405 the case of variadic types containing themselves. */
407 && consider_placement_new
408 && (size_unknown
|| !type
|| maybe_derived_type
409 || possible_placement_new (type
, otr_type
, cur_offset
)))
411 /* In these weird cases we want to accept the context.
412 In non-speculative run we have no useful outer_type info
413 (TODO: we may eventually want to record upper bound on the
414 type size that can be used to prune the walk),
415 but we still want to consider speculation that may
419 clear_outer_type (otr_type
);
420 if (!speculative_outer_type
421 || !speculation_consistent_p (speculative_outer_type
,
423 speculative_maybe_derived_type
,
425 clear_speculation ();
426 if (speculative_outer_type
)
429 type
= speculative_outer_type
;
430 cur_offset
= speculative_offset
;
436 clear_speculation ();
441 clear_speculation ();
444 clear_outer_type (otr_type
);
452 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
453 CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
454 be built within OUTER_TYPE by means of placement new. CONSIDER_BASES makes
455 function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
456 base of one of fields of OUTER_TYPE. */
459 contains_type_p (tree outer_type
, HOST_WIDE_INT offset
,
461 bool consider_placement_new
,
464 ipa_polymorphic_call_context context
;
466 /* Check that type is within range. */
469 if (TYPE_SIZE (outer_type
) && TYPE_SIZE (otr_type
)
470 && TREE_CODE (outer_type
) == INTEGER_CST
471 && TREE_CODE (otr_type
) == INTEGER_CST
472 && wi::ltu_p (wi::to_offset (outer_type
), (wi::to_offset (otr_type
) + offset
)))
475 context
.offset
= offset
;
476 context
.outer_type
= TYPE_MAIN_VARIANT (outer_type
);
477 context
.maybe_derived_type
= false;
478 return context
.restrict_to_inner_class (otr_type
, consider_placement_new
, consider_bases
);
482 /* We know that the instance is stored in variable or parameter
483 (not dynamically allocated) and we want to disprove the fact
484 that it may be in construction at invocation of CALL.
486 BASE represents memory location where instance is stored.
487 If BASE is NULL, it is assumed to be global memory.
488 OUTER_TYPE is known type of the instance or NULL if not
491 For the variable to be in construction we actually need to
492 be in constructor of corresponding global variable or
493 the inline stack of CALL must contain the constructor.
494 Check this condition. This check works safely only before
495 IPA passes, because inline stacks may become out of date
499 decl_maybe_in_construction_p (tree base
, tree outer_type
,
500 gimple call
, tree function
)
503 outer_type
= TYPE_MAIN_VARIANT (outer_type
);
504 gcc_assert (!base
|| DECL_P (base
));
506 /* After inlining the code unification optimizations may invalidate
507 inline stacks. Also we need to give up on global variables after
508 IPA, because addresses of these may have been propagated to their
510 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
513 /* Pure functions can not do any changes on the dynamic type;
514 that require writting to memory. */
515 if ((!base
|| !auto_var_in_fn_p (base
, function
))
516 && flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
519 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
520 block
= BLOCK_SUPERCONTEXT (block
))
521 if (BLOCK_ABSTRACT_ORIGIN (block
)
522 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
524 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
526 if (TREE_CODE (TREE_TYPE (fn
)) != METHOD_TYPE
527 || (!DECL_CXX_CONSTRUCTOR_P (fn
)
528 && !DECL_CXX_DESTRUCTOR_P (fn
)))
530 /* Watch for clones where we constant propagated the first
531 argument (pointer to the instance). */
532 fn
= DECL_ABSTRACT_ORIGIN (fn
);
534 || (base
&& !is_global_var (base
))
535 || TREE_CODE (TREE_TYPE (fn
)) != METHOD_TYPE
536 || (!DECL_CXX_CONSTRUCTOR_P (fn
)
537 && !DECL_CXX_DESTRUCTOR_P (fn
)))
540 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
543 tree type
= TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn
)));
545 if (!outer_type
|| !types_odr_comparable (type
, outer_type
))
547 if (TREE_CODE (type
) == RECORD_TYPE
549 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
552 else if (types_same_for_odr (type
, outer_type
))
556 if (!base
|| (TREE_CODE (base
) == VAR_DECL
&& is_global_var (base
)))
558 if (TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
559 || (!DECL_CXX_CONSTRUCTOR_P (function
)
560 && !DECL_CXX_DESTRUCTOR_P (function
)))
562 if (!DECL_ABSTRACT_ORIGIN (function
))
564 /* Watch for clones where we constant propagated the first
565 argument (pointer to the instance). */
566 function
= DECL_ABSTRACT_ORIGIN (function
);
568 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
569 || (!DECL_CXX_CONSTRUCTOR_P (function
)
570 && !DECL_CXX_DESTRUCTOR_P (function
)))
573 tree type
= TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function
)));
574 if (!outer_type
|| !types_odr_comparable (type
, outer_type
))
576 if (TREE_CODE (type
) == RECORD_TYPE
578 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
581 else if (types_same_for_odr (type
, outer_type
))
587 /* Dump human readable context to F. */
590 ipa_polymorphic_call_context::dump (FILE *f
) const
594 fprintf (f
, "Call is known to be undefined");
598 fprintf (f
, "nothing known");
599 if (outer_type
|| offset
)
601 fprintf (f
, "Outer type%s:", dynamic
? " (dynamic)":"");
602 print_generic_expr (f
, outer_type
, TDF_SLIM
);
603 if (maybe_derived_type
)
604 fprintf (f
, " (or a derived type)");
605 if (maybe_in_construction
)
606 fprintf (f
, " (maybe in construction)");
607 fprintf (f
, " offset "HOST_WIDE_INT_PRINT_DEC
,
610 if (speculative_outer_type
)
612 if (outer_type
|| offset
)
614 fprintf (f
, "Speculative outer type:");
615 print_generic_expr (f
, speculative_outer_type
, TDF_SLIM
);
616 if (speculative_maybe_derived_type
)
617 fprintf (f
, " (or a derived type)");
618 fprintf (f
, " at offset "HOST_WIDE_INT_PRINT_DEC
,
625 /* Print context to stderr. */
628 ipa_polymorphic_call_context::debug () const
633 /* Stream out the context to OB. */
636 ipa_polymorphic_call_context::stream_out (struct output_block
*ob
) const
638 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
640 bp_pack_value (&bp
, invalid
, 1);
641 bp_pack_value (&bp
, maybe_in_construction
, 1);
642 bp_pack_value (&bp
, maybe_derived_type
, 1);
643 bp_pack_value (&bp
, speculative_maybe_derived_type
, 1);
644 bp_pack_value (&bp
, dynamic
, 1);
645 bp_pack_value (&bp
, outer_type
!= NULL
, 1);
646 bp_pack_value (&bp
, offset
!= 0, 1);
647 bp_pack_value (&bp
, speculative_outer_type
!= NULL
, 1);
648 streamer_write_bitpack (&bp
);
650 if (outer_type
!= NULL
)
651 stream_write_tree (ob
, outer_type
, true);
653 streamer_write_hwi (ob
, offset
);
654 if (speculative_outer_type
!= NULL
)
656 stream_write_tree (ob
, speculative_outer_type
, true);
657 streamer_write_hwi (ob
, speculative_offset
);
660 gcc_assert (!speculative_offset
);
663 /* Stream in the context from IB and DATA_IN. */
666 ipa_polymorphic_call_context::stream_in (struct lto_input_block
*ib
,
667 struct data_in
*data_in
)
669 struct bitpack_d bp
= streamer_read_bitpack (ib
);
671 invalid
= bp_unpack_value (&bp
, 1);
672 maybe_in_construction
= bp_unpack_value (&bp
, 1);
673 maybe_derived_type
= bp_unpack_value (&bp
, 1);
674 speculative_maybe_derived_type
= bp_unpack_value (&bp
, 1);
675 dynamic
= bp_unpack_value (&bp
, 1);
676 bool outer_type_p
= bp_unpack_value (&bp
, 1);
677 bool offset_p
= bp_unpack_value (&bp
, 1);
678 bool speculative_outer_type_p
= bp_unpack_value (&bp
, 1);
681 outer_type
= stream_read_tree (ib
, data_in
);
685 offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
688 if (speculative_outer_type_p
)
690 speculative_outer_type
= stream_read_tree (ib
, data_in
);
691 speculative_offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
695 speculative_outer_type
= NULL
;
696 speculative_offset
= 0;
700 /* Proudce polymorphic call context for call method of instance
701 that is located within BASE (that is assumed to be a decl) at offset OFF. */
704 ipa_polymorphic_call_context::set_by_decl (tree base
, HOST_WIDE_INT off
)
706 gcc_assert (DECL_P (base
));
707 clear_speculation ();
709 if (!contains_polymorphic_type_p (TREE_TYPE (base
)))
715 outer_type
= TYPE_MAIN_VARIANT (TREE_TYPE (base
));
717 /* Make very conservative assumption that all objects
718 may be in construction.
720 It is up to caller to revisit this via
721 get_dynamic_type or decl_maybe_in_construction_p. */
722 maybe_in_construction
= true;
723 maybe_derived_type
= false;
727 /* CST is an invariant (address of decl), try to get meaningful
728 polymorphic call context for polymorphic call of method
729 if instance of OTR_TYPE that is located at offset OFF of this invariant.
730 Return FALSE if nothing meaningful can be found. */
733 ipa_polymorphic_call_context::set_by_invariant (tree cst
,
737 HOST_WIDE_INT offset2
, size
, max_size
;
742 clear_outer_type (otr_type
);
744 if (TREE_CODE (cst
) != ADDR_EXPR
)
747 cst
= TREE_OPERAND (cst
, 0);
748 base
= get_ref_base_and_extent (cst
, &offset2
, &size
, &max_size
);
749 if (!DECL_P (base
) || max_size
== -1 || max_size
!= size
)
752 /* Only type inconsistent programs can have otr_type that is
753 not part of outer type. */
754 if (otr_type
&& !contains_type_p (TREE_TYPE (base
), off
, otr_type
))
757 set_by_decl (base
, off
);
761 /* See if OP is SSA name initialized as a copy or by single assignment.
762 If so, walk the SSA graph up. Because simple PHI conditional is considered
763 copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
767 walk_ssa_copies (tree op
, hash_set
<tree
> **global_visited
= NULL
)
769 hash_set
<tree
> *visited
= NULL
;
771 while (TREE_CODE (op
) == SSA_NAME
772 && !SSA_NAME_IS_DEFAULT_DEF (op
)
773 && SSA_NAME_DEF_STMT (op
)
774 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op
))
775 || gimple_code (SSA_NAME_DEF_STMT (op
)) == GIMPLE_PHI
))
779 if (!*global_visited
)
780 *global_visited
= new hash_set
<tree
>;
781 if ((*global_visited
)->add (op
))
787 visited
= new hash_set
<tree
>;
788 if (visited
->add (op
))
796 This pattern is implicitly produced for casts to non-primary
797 bases. When doing context analysis, we do not really care
798 about the case pointer is NULL, becuase the call will be
800 if (gimple_code (SSA_NAME_DEF_STMT (op
)) == GIMPLE_PHI
)
802 gimple phi
= SSA_NAME_DEF_STMT (op
);
804 if (gimple_phi_num_args (phi
) > 2)
806 if (gimple_phi_num_args (phi
) == 1)
807 op
= gimple_phi_arg_def (phi
, 0);
808 else if (integer_zerop (gimple_phi_arg_def (phi
, 0)))
809 op
= gimple_phi_arg_def (phi
, 1);
810 else if (integer_zerop (gimple_phi_arg_def (phi
, 1)))
811 op
= gimple_phi_arg_def (phi
, 0);
817 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op
)))
819 op
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op
));
829 /* Create polymorphic call context from IP invariant CST.
830 This is typically &global_var.
831 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
832 is offset of call. */
834 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst
,
838 clear_speculation ();
839 set_by_invariant (cst
, otr_type
, off
);
842 /* Build context for pointer REF contained in FNDECL at statement STMT.
843 if INSTANCE is non-NULL, return pointer to the object described by
844 the context or DECL where context is contained in. */
846 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl
,
851 tree otr_type
= NULL
;
853 hash_set
<tree
> *visited
= NULL
;
855 if (TREE_CODE (ref
) == OBJ_TYPE_REF
)
857 otr_type
= obj_type_ref_class (ref
);
858 base_pointer
= OBJ_TYPE_REF_OBJECT (ref
);
863 /* Set up basic info in case we find nothing interesting in the analysis. */
864 clear_speculation ();
865 clear_outer_type (otr_type
);
868 /* Walk SSA for outer object. */
871 base_pointer
= walk_ssa_copies (base_pointer
, &visited
);
872 if (TREE_CODE (base_pointer
) == ADDR_EXPR
)
874 HOST_WIDE_INT size
, max_size
;
875 HOST_WIDE_INT offset2
;
876 tree base
= get_ref_base_and_extent (TREE_OPERAND (base_pointer
, 0),
877 &offset2
, &size
, &max_size
);
879 if (max_size
!= -1 && max_size
== size
)
880 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base
)),
883 NULL
/* Do not change outer type. */);
885 /* If this is a varying address, punt. */
886 if ((TREE_CODE (base
) == MEM_REF
|| DECL_P (base
))
890 /* We found dereference of a pointer. Type of the pointer
891 and MEM_REF is meaningless, but we can look futher. */
892 if (TREE_CODE (base
) == MEM_REF
)
894 base_pointer
= TREE_OPERAND (base
, 0);
896 += offset2
+ mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
899 /* We found base object. In this case the outer_type
901 else if (DECL_P (base
))
905 /* Only type inconsistent programs can have otr_type that is
906 not part of outer type. */
908 && !contains_type_p (TREE_TYPE (base
),
909 offset
+ offset2
, otr_type
))
913 *instance
= base_pointer
;
916 set_by_decl (base
, offset
+ offset2
);
917 if (outer_type
&& maybe_in_construction
&& stmt
)
918 maybe_in_construction
919 = decl_maybe_in_construction_p (base
,
933 else if (TREE_CODE (base_pointer
) == POINTER_PLUS_EXPR
934 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer
, 1)))
936 offset
+= tree_to_shwi (TREE_OPERAND (base_pointer
, 1))
938 base_pointer
= TREE_OPERAND (base_pointer
, 0);
947 /* Try to determine type of the outer object. */
948 if (TREE_CODE (base_pointer
) == SSA_NAME
949 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
950 && TREE_CODE (SSA_NAME_VAR (base_pointer
)) == PARM_DECL
)
952 /* See if parameter is THIS pointer of a method. */
953 if (TREE_CODE (TREE_TYPE (fndecl
)) == METHOD_TYPE
954 && SSA_NAME_VAR (base_pointer
) == DECL_ARGUMENTS (fndecl
))
957 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer
)));
958 gcc_assert (TREE_CODE (outer_type
) == RECORD_TYPE
959 || TREE_CODE (outer_type
) == UNION_TYPE
);
961 /* Dynamic casting has possibly upcasted the type
962 in the hiearchy. In this case outer type is less
963 informative than inner type and we should forget
966 && !contains_type_p (outer_type
, offset
,
968 || !contains_polymorphic_type_p (outer_type
))
972 *instance
= base_pointer
;
978 /* If the function is constructor or destructor, then
979 the type is possibly in construction, but we know
980 it is not derived type. */
981 if (DECL_CXX_CONSTRUCTOR_P (fndecl
)
982 || DECL_CXX_DESTRUCTOR_P (fndecl
))
984 maybe_in_construction
= true;
985 maybe_derived_type
= false;
989 maybe_derived_type
= true;
990 maybe_in_construction
= false;
993 *instance
= base_pointer
;
996 /* Non-PODs passed by value are really passed by invisible
997 reference. In this case we also know the type of the
999 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer
)))
1002 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer
)));
1003 /* Only type inconsistent programs can have otr_type that is
1004 not part of outer type. */
1005 if (otr_type
&& !contains_type_p (outer_type
, offset
,
1010 *instance
= base_pointer
;
1013 /* Non-polymorphic types have no interest for us. */
1014 else if (!otr_type
&& !contains_polymorphic_type_p (outer_type
))
1018 *instance
= base_pointer
;
1021 maybe_derived_type
= false;
1022 maybe_in_construction
= false;
1024 *instance
= base_pointer
;
1029 tree base_type
= TREE_TYPE (base_pointer
);
1031 if (TREE_CODE (base_pointer
) == SSA_NAME
1032 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
1033 && TREE_CODE (SSA_NAME_VAR (base_pointer
)) != PARM_DECL
)
1037 *instance
= base_pointer
;
1040 if (TREE_CODE (base_pointer
) == SSA_NAME
1041 && SSA_NAME_DEF_STMT (base_pointer
)
1042 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer
)))
1043 base_type
= TREE_TYPE (gimple_assign_rhs1
1044 (SSA_NAME_DEF_STMT (base_pointer
)));
1046 if (POINTER_TYPE_P (base_type
))
1047 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type
)),
1049 true, NULL
/* Do not change type here */);
1050 /* TODO: There are multiple ways to derive a type. For instance
1051 if BASE_POINTER is passed to an constructor call prior our refernece.
1052 We do not make this type of flow sensitive analysis yet. */
1054 *instance
= base_pointer
;
1058 /* Structure to be passed in between detect_type_change and
1059 check_stmt_for_type_change. */
1061 struct type_change_info
1063 /* Offset into the object where there is the virtual method pointer we are
1065 HOST_WIDE_INT offset
;
1066 /* The declaration or SSA_NAME pointer of the base that we are checking for
1069 /* The reference to virtual table pointer used. */
1072 /* If we actually can tell the type that the object has changed to, it is
1073 stored in this field. Otherwise it remains NULL_TREE. */
1074 tree known_current_type
;
1075 HOST_WIDE_INT known_current_offset
;
1077 /* Set to true if dynamic type change has been detected. */
1078 bool type_maybe_changed
;
1079 /* Set to true if multiple types have been encountered. known_current_type
1080 must be disregarded in that case. */
1081 bool multiple_types_encountered
;
1082 /* Set to true if we possibly missed some dynamic type changes and we should
1083 consider the set to be speculative. */
1085 bool seen_unanalyzed_store
;
1088 /* Return true if STMT is not call and can modify a virtual method table pointer.
1089 We take advantage of fact that vtable stores must appear within constructor
1090 and destructor functions. */
1093 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt
)
1095 if (is_gimple_assign (stmt
))
1097 tree lhs
= gimple_assign_lhs (stmt
);
1099 if (gimple_clobber_p (stmt
))
1101 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
1103 if (flag_strict_aliasing
1104 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
1107 if (TREE_CODE (lhs
) == COMPONENT_REF
1108 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
1110 /* In the future we might want to use get_base_ref_and_offset to find
1111 if there is a field corresponding to the offset and if so, proceed
1112 almost like if it was a component ref. */
1116 /* Code unification may mess with inline stacks. */
1117 if (cfun
->after_inlining
)
1120 /* Walk the inline stack and watch out for ctors/dtors.
1121 TODO: Maybe we can require the store to appear in toplevel
1122 block of CTOR/DTOR. */
1123 for (tree block
= gimple_block (stmt
); block
&& TREE_CODE (block
) == BLOCK
;
1124 block
= BLOCK_SUPERCONTEXT (block
))
1125 if (BLOCK_ABSTRACT_ORIGIN (block
)
1126 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
1128 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
1130 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
1132 return (TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
1133 && (DECL_CXX_CONSTRUCTOR_P (fn
)
1134 || DECL_CXX_DESTRUCTOR_P (fn
)));
1136 return (TREE_CODE (TREE_TYPE (current_function_decl
)) == METHOD_TYPE
1137 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl
)
1138 || DECL_CXX_DESTRUCTOR_P (current_function_decl
)));
1141 /* If STMT can be proved to be an assignment to the virtual method table
1142 pointer of ANALYZED_OBJ and the type associated with the new table
1143 identified, return the type. Otherwise return NULL_TREE if type changes
1144 in unknown way or ERROR_MARK_NODE if type is unchanged. */
1147 extr_type_from_vtbl_ptr_store (gimple stmt
, struct type_change_info
*tci
,
1148 HOST_WIDE_INT
*type_offset
)
1150 HOST_WIDE_INT offset
, size
, max_size
;
1151 tree lhs
, rhs
, base
;
1153 if (!gimple_assign_single_p (stmt
))
1156 lhs
= gimple_assign_lhs (stmt
);
1157 rhs
= gimple_assign_rhs1 (stmt
);
1158 if (TREE_CODE (lhs
) != COMPONENT_REF
1159 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
1162 fprintf (dump_file
, " LHS is not virtual table.\n");
1166 if (tci
->vtbl_ptr_ref
&& operand_equal_p (lhs
, tci
->vtbl_ptr_ref
, 0))
1170 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
1171 if (DECL_P (tci
->instance
))
1173 if (base
!= tci
->instance
)
1177 fprintf (dump_file
, " base:");
1178 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1179 fprintf (dump_file
, " does not match instance:");
1180 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1181 fprintf (dump_file
, "\n");
1186 else if (TREE_CODE (base
) == MEM_REF
)
1188 if (!operand_equal_p (tci
->instance
, TREE_OPERAND (base
, 0), 0))
1192 fprintf (dump_file
, " base mem ref:");
1193 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1194 fprintf (dump_file
, " does not match instance:");
1195 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1196 fprintf (dump_file
, "\n");
1200 if (!integer_zerop (TREE_OPERAND (base
, 1)))
1202 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
1206 fprintf (dump_file
, " base mem ref:");
1207 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1208 fprintf (dump_file
, " has non-representable offset:");
1209 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1210 fprintf (dump_file
, "\n");
1215 offset
+= tree_to_shwi (TREE_OPERAND (base
, 1)) * BITS_PER_UNIT
;
1218 else if (!operand_equal_p (tci
->instance
, base
, 0)
1223 fprintf (dump_file
, " base:");
1224 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1225 fprintf (dump_file
, " does not match instance:");
1226 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1227 fprintf (dump_file
, " with offset %i\n", (int)tci
->offset
);
1229 return tci
->offset
> GET_MODE_BITSIZE (Pmode
) ? error_mark_node
: NULL_TREE
;
1231 if (offset
!= tci
->offset
1232 || size
!= POINTER_SIZE
1233 || max_size
!= POINTER_SIZE
)
1236 fprintf (dump_file
, " wrong offset %i!=%i or size %i\n",
1237 (int)offset
, (int)tci
->offset
, (int)size
);
1238 return offset
+ GET_MODE_BITSIZE (Pmode
) <= tci
->offset
1240 && tci
->offset
+ GET_MODE_BITSIZE (Pmode
) > offset
+ max_size
)
1241 ? error_mark_node
: NULL
;
1246 unsigned HOST_WIDE_INT offset2
;
1248 if (!vtable_pointer_value_to_vtable (rhs
, &vtable
, &offset2
))
1251 fprintf (dump_file
, " Failed to lookup binfo\n");
1255 tree binfo
= subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable
)),
1260 fprintf (dump_file
, " Construction vtable used\n");
1261 /* FIXME: We should suport construction contexts. */
1265 *type_offset
= tree_to_shwi (BINFO_OFFSET (binfo
)) * BITS_PER_UNIT
;
1266 return DECL_CONTEXT (vtable
);
1269 /* Record dynamic type change of TCI to TYPE. */
1272 record_known_type (struct type_change_info
*tci
, tree type
, HOST_WIDE_INT offset
)
1278 fprintf (dump_file
, " Recording type: ");
1279 print_generic_expr (dump_file
, type
, TDF_SLIM
);
1280 fprintf (dump_file
, " at offset %i\n", (int)offset
);
1283 fprintf (dump_file
, " Recording unknown type\n");
1286 /* If we found a constructor of type that is not polymorphic or
1287 that may contain the type in question as a field (not as base),
1288 restrict to the inner class first to make type matching bellow
1292 || (TREE_CODE (type
) != RECORD_TYPE
1293 || !polymorphic_type_binfo_p (TYPE_BINFO (type
)))))
1295 ipa_polymorphic_call_context context
;
1297 context
.offset
= offset
;
1298 context
.outer_type
= type
;
1299 context
.maybe_in_construction
= false;
1300 context
.maybe_derived_type
= false;
1301 context
.dynamic
= true;
1302 /* If we failed to find the inner type, we know that the call
1303 would be undefined for type produced here. */
1304 if (!context
.restrict_to_inner_class (tci
->otr_type
))
1307 fprintf (dump_file
, " Ignoring; does not contain otr_type\n");
1310 /* Watch for case we reached an POD type and anticipate placement
1312 if (!context
.maybe_derived_type
)
1314 type
= context
.outer_type
;
1315 offset
= context
.offset
;
1318 if (tci
->type_maybe_changed
1319 && (!types_same_for_odr (type
, tci
->known_current_type
)
1320 || offset
!= tci
->known_current_offset
))
1321 tci
->multiple_types_encountered
= true;
1322 tci
->known_current_type
= TYPE_MAIN_VARIANT (type
);
1323 tci
->known_current_offset
= offset
;
1324 tci
->type_maybe_changed
= true;
1327 /* Callback of walk_aliased_vdefs and a helper function for
1328 detect_type_change to check whether a particular statement may modify
1329 the virtual table pointer, and if possible also determine the new type of
1330 the (sub-)object. It stores its result into DATA, which points to a
1331 type_change_info structure. */
1334 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
1336 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
1337 struct type_change_info
*tci
= (struct type_change_info
*) data
;
1340 /* If we already gave up, just terminate the rest of walk. */
1341 if (tci
->multiple_types_encountered
)
1344 if (is_gimple_call (stmt
))
1346 if (gimple_call_flags (stmt
) & (ECF_CONST
| ECF_PURE
))
1349 /* Check for a constructor call. */
1350 if ((fn
= gimple_call_fndecl (stmt
)) != NULL_TREE
1351 && DECL_CXX_CONSTRUCTOR_P (fn
)
1352 && TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
1353 && gimple_call_num_args (stmt
))
1355 tree op
= walk_ssa_copies (gimple_call_arg (stmt
, 0));
1356 tree type
= method_class_type (TREE_TYPE (fn
));
1357 HOST_WIDE_INT offset
= 0, size
, max_size
;
1361 fprintf (dump_file
, " Checking constructor call: ");
1362 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1365 /* See if THIS parameter seems like instance pointer. */
1366 if (TREE_CODE (op
) == ADDR_EXPR
)
1368 op
= get_ref_base_and_extent (TREE_OPERAND (op
, 0),
1369 &offset
, &size
, &max_size
);
1370 if (size
!= max_size
|| max_size
== -1)
1372 tci
->speculative
= true;
1375 if (op
&& TREE_CODE (op
) == MEM_REF
)
1377 if (!tree_fits_shwi_p (TREE_OPERAND (op
, 1)))
1379 tci
->speculative
= true;
1382 offset
+= tree_to_shwi (TREE_OPERAND (op
, 1))
1384 op
= TREE_OPERAND (op
, 0);
1386 else if (DECL_P (op
))
1390 tci
->speculative
= true;
1393 op
= walk_ssa_copies (op
);
1395 if (operand_equal_p (op
, tci
->instance
, 0)
1397 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
1398 && tree_fits_shwi_p (TYPE_SIZE (type
))
1399 && tree_to_shwi (TYPE_SIZE (type
)) + offset
> tci
->offset
)
1401 record_known_type (tci
, type
, tci
->offset
- offset
);
1405 /* Calls may possibly change dynamic type by placement new. Assume
1406 it will not happen, but make result speculative only. */
1409 fprintf (dump_file
, " Function call may change dynamic type:");
1410 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1412 tci
->speculative
= true;
1415 /* Check for inlined virtual table store. */
1416 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt
))
1419 HOST_WIDE_INT offset
= 0;
1422 fprintf (dump_file
, " Checking vtbl store: ");
1423 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1426 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
, &offset
);
1427 if (type
== error_mark_node
)
1429 gcc_assert (!type
|| TYPE_MAIN_VARIANT (type
) == type
);
1433 fprintf (dump_file
, " Unanalyzed store may change type.\n");
1434 tci
->seen_unanalyzed_store
= true;
1435 tci
->speculative
= true;
1438 record_known_type (tci
, type
, offset
);
1445 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1446 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1447 INSTANCE is pointer to the outer instance as returned by
1448 get_polymorphic_context. To avoid creation of temporary expressions,
1449 INSTANCE may also be an declaration of get_polymorphic_context found the
1450 value to be in static storage.
1452 If the type of instance is not fully determined
1453 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1454 is set), try to walk memory writes and find the actual construction of the
1457 Return true if memory is unchanged from function entry.
1459 We do not include this analysis in the context analysis itself, because
1460 it needs memory SSA to be fully built and the walk may be expensive.
1461 So it is not suitable for use withing fold_stmt and similar uses. */
1464 ipa_polymorphic_call_context::get_dynamic_type (tree instance
,
1469 struct type_change_info tci
;
1471 bool function_entry_reached
= false;
1472 tree instance_ref
= NULL
;
1474 /* Remember OFFSET before it is modified by restrict_to_inner_class.
1475 This is because we do not update INSTANCE when walking inwards. */
1476 HOST_WIDE_INT instance_offset
= offset
;
1479 otr_type
= TYPE_MAIN_VARIANT (otr_type
);
1481 /* Walk into inner type. This may clear maybe_derived_type and save us
1482 from useless work. It also makes later comparsions with static type
1484 if (outer_type
&& otr_type
)
1486 if (!restrict_to_inner_class (otr_type
))
1490 if (!maybe_in_construction
&& !maybe_derived_type
)
1493 /* We need to obtain refernce to virtual table pointer. It is better
1494 to look it up in the code rather than build our own. This require bit
1495 of pattern matching, but we end up verifying that what we found is
1498 What we pattern match is:
1500 tmp = instance->_vptr.A; // vtbl ptr load
1501 tmp2 = tmp[otr_token]; // vtable lookup
1502 OBJ_TYPE_REF(tmp2;instance->0) (instance);
1504 We want to start alias oracle walk from vtbl pointer load,
1505 but we may not be able to identify it, for example, when PRE moved the
1508 if (gimple_code (call
) == GIMPLE_CALL
)
1510 tree ref
= gimple_call_fn (call
);
1511 HOST_WIDE_INT offset2
, size
, max_size
;
1513 if (TREE_CODE (ref
) == OBJ_TYPE_REF
)
1515 ref
= OBJ_TYPE_REF_EXPR (ref
);
1516 ref
= walk_ssa_copies (ref
);
1518 /* Check if definition looks like vtable lookup. */
1519 if (TREE_CODE (ref
) == SSA_NAME
1520 && !SSA_NAME_IS_DEFAULT_DEF (ref
)
1521 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref
))
1522 && TREE_CODE (gimple_assign_rhs1
1523 (SSA_NAME_DEF_STMT (ref
))) == MEM_REF
)
1525 ref
= get_base_address
1526 (TREE_OPERAND (gimple_assign_rhs1
1527 (SSA_NAME_DEF_STMT (ref
)), 0));
1528 ref
= walk_ssa_copies (ref
);
1529 /* Find base address of the lookup and see if it looks like
1531 if (TREE_CODE (ref
) == SSA_NAME
1532 && !SSA_NAME_IS_DEFAULT_DEF (ref
)
1533 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref
)))
1535 tree ref_exp
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref
));
1536 tree base_ref
= get_ref_base_and_extent
1537 (ref_exp
, &offset2
, &size
, &max_size
);
1539 /* Finally verify that what we found looks like read from OTR_OBJECT
1540 or from INSTANCE with offset OFFSET. */
1542 && ((TREE_CODE (base_ref
) == MEM_REF
1543 && ((offset2
== instance_offset
1544 && TREE_OPERAND (base_ref
, 0) == instance
)
1545 || (!offset2
&& TREE_OPERAND (base_ref
, 0) == otr_object
)))
1546 || (DECL_P (instance
) && base_ref
== instance
1547 && offset2
== instance_offset
)))
1549 stmt
= SSA_NAME_DEF_STMT (ref
);
1550 instance_ref
= ref_exp
;
1557 /* If we failed to look up the refernece in code, build our own. */
1560 /* If the statement in question does not use memory, we can't tell
1562 if (!gimple_vuse (stmt
))
1564 ao_ref_init_from_ptr_and_size (&ao
, otr_object
, NULL
);
1567 /* Otherwise use the real reference. */
1568 ao_ref_init (&ao
, instance_ref
);
1570 /* We look for vtbl pointer read. */
1571 ao
.size
= POINTER_SIZE
;
1572 ao
.max_size
= ao
.size
;
1575 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type
))));
1579 fprintf (dump_file
, "Determining dynamic type for call: ");
1580 print_gimple_stmt (dump_file
, call
, 0, 0);
1581 fprintf (dump_file
, " Starting walk at: ");
1582 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1583 fprintf (dump_file
, " instance pointer: ");
1584 print_generic_expr (dump_file
, otr_object
, TDF_SLIM
);
1585 fprintf (dump_file
, " Outer instance pointer: ");
1586 print_generic_expr (dump_file
, instance
, TDF_SLIM
);
1587 fprintf (dump_file
, " offset: %i (bits)", (int)offset
);
1588 fprintf (dump_file
, " vtbl reference: ");
1589 print_generic_expr (dump_file
, instance_ref
, TDF_SLIM
);
1590 fprintf (dump_file
, "\n");
1593 tci
.offset
= offset
;
1594 tci
.instance
= instance
;
1595 tci
.vtbl_ptr_ref
= instance_ref
;
1596 gcc_assert (TREE_CODE (instance
) != MEM_REF
);
1597 tci
.known_current_type
= NULL_TREE
;
1598 tci
.known_current_offset
= 0;
1599 tci
.otr_type
= otr_type
;
1600 tci
.type_maybe_changed
= false;
1601 tci
.multiple_types_encountered
= false;
1602 tci
.speculative
= false;
1603 tci
.seen_unanalyzed_store
= false;
1605 walk_aliased_vdefs (&ao
, gimple_vuse (stmt
), check_stmt_for_type_change
,
1606 &tci
, NULL
, &function_entry_reached
);
1608 /* If we did not find any type changing statements, we may still drop
1609 maybe_in_construction flag if the context already have outer type.
1611 Here we make special assumptions about both constructors and
1612 destructors which are all the functions that are allowed to alter the
1613 VMT pointers. It assumes that destructors begin with assignment into
1614 all VMT pointers and that constructors essentially look in the
1617 1) The very first thing they do is that they call constructors of
1618 ancestor sub-objects that have them.
1620 2) Then VMT pointers of this and all its ancestors is set to new
1621 values corresponding to the type corresponding to the constructor.
1623 3) Only afterwards, other stuff such as constructor of member
1624 sub-objects and the code written by the user is run. Only this may
1625 include calling virtual functions, directly or indirectly.
1627 4) placement new can not be used to change type of non-POD statically
1628 allocated variables.
1630 There is no way to call a constructor of an ancestor sub-object in any
1633 This means that we do not have to care whether constructors get the
1634 correct type information because they will always change it (in fact,
1635 if we define the type to be given by the VMT pointer, it is undefined).
1637 The most important fact to derive from the above is that if, for some
1638 statement in the section 3, we try to detect whether the dynamic type
1639 has changed, we can safely ignore all calls as we examine the function
1640 body backwards until we reach statements in section 2 because these
1641 calls cannot be ancestor constructors or destructors (if the input is
1642 not bogus) and so do not change the dynamic type (this holds true only
1643 for automatically allocated objects but at the moment we devirtualize
1644 only these). We then must detect that statements in section 2 change
1645 the dynamic type and can try to derive the new type. That is enough
1646 and we can stop, we will never see the calls into constructors of
1647 sub-objects in this code.
1649 Therefore if the static outer type was found (outer_type)
1650 we can safely ignore tci.speculative that is set on calls and give up
1651 only if there was dyanmic type store that may affect given variable
1652 (seen_unanalyzed_store) */
1654 if (!tci
.type_maybe_changed
1657 && !tci
.seen_unanalyzed_store
1658 && !tci
.multiple_types_encountered
1659 && offset
== tci
.offset
1660 && types_same_for_odr (tci
.known_current_type
,
1663 if (!outer_type
|| tci
.seen_unanalyzed_store
)
1665 if (maybe_in_construction
)
1666 maybe_in_construction
= false;
1668 fprintf (dump_file
, " No dynamic type change found.\n");
1672 if (tci
.known_current_type
1673 && !function_entry_reached
1674 && !tci
.multiple_types_encountered
)
1676 if (!tci
.speculative
)
1678 outer_type
= TYPE_MAIN_VARIANT (tci
.known_current_type
);
1679 offset
= tci
.known_current_offset
;
1681 maybe_in_construction
= false;
1682 maybe_derived_type
= false;
1684 fprintf (dump_file
, " Determined dynamic type.\n");
1686 else if (!speculative_outer_type
1687 || speculative_maybe_derived_type
)
1689 speculative_outer_type
= TYPE_MAIN_VARIANT (tci
.known_current_type
);
1690 speculative_offset
= tci
.known_current_offset
;
1691 speculative_maybe_derived_type
= false;
1693 fprintf (dump_file
, " Determined speculative dynamic type.\n");
1698 fprintf (dump_file
, " Found multiple types%s%s\n",
1699 function_entry_reached
? " (function entry reached)" : "",
1700 function_entry_reached
? " (multiple types encountered)" : "");
1706 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1707 seems consistent (and useful) with what we already have in the non-speculative context. */
1710 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type
,
1711 HOST_WIDE_INT spec_offset
,
1712 bool spec_maybe_derived_type
,
1715 if (!flag_devirtualize_speculatively
)
1718 /* Non-polymorphic types are useless for deriving likely polymorphic
1720 if (!spec_outer_type
|| !contains_polymorphic_type_p (spec_outer_type
))
1723 /* If we know nothing, speculation is always good. */
1727 /* Speculation is only useful to avoid derived types.
1728 This is not 100% true for placement new, where the outer context may
1729 turn out to be useless, but ignore these for now. */
1730 if (!maybe_derived_type
)
1733 /* If types agrees, speculation is consistent, but it makes sense only
1734 when it says something new. */
1735 if (types_must_be_same_for_odr (spec_outer_type
, outer_type
))
1736 return maybe_derived_type
&& !spec_maybe_derived_type
;
1738 /* If speculation does not contain the type in question, ignore it. */
1740 && !contains_type_p (spec_outer_type
, spec_offset
, otr_type
, false, true))
1743 /* If outer type already contains speculation as a filed,
1744 it is useless. We already know from OUTER_TYPE
1745 SPEC_TYPE and that it is not in the construction. */
1746 if (contains_type_p (outer_type
, offset
- spec_offset
,
1747 spec_outer_type
, false, false))
1750 /* If speculative outer type is not more specified than outer
1752 We can only decide this safely if we can compare types with OUTER_TYPE.
1754 if ((!in_lto_p
|| odr_type_p (outer_type
))
1755 && !contains_type_p (spec_outer_type
,
1756 spec_offset
- offset
,
1762 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1763 NEW_MAYBE_DERIVED_TYPE
1764 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1767 ipa_polymorphic_call_context::combine_speculation_with
1768 (tree new_outer_type
, HOST_WIDE_INT new_offset
, bool new_maybe_derived_type
,
1771 if (!new_outer_type
)
1774 /* restrict_to_inner_class may eliminate wrong speculation making our job
1777 restrict_to_inner_class (otr_type
);
1779 if (!speculation_consistent_p (new_outer_type
, new_offset
,
1780 new_maybe_derived_type
, otr_type
))
1783 /* New speculation is a win in case we have no speculation or new
1784 speculation does not consider derivations. */
1785 if (!speculative_outer_type
1786 || (speculative_maybe_derived_type
1787 && !new_maybe_derived_type
))
1789 speculative_outer_type
= new_outer_type
;
1790 speculative_offset
= new_offset
;
1791 speculative_maybe_derived_type
= new_maybe_derived_type
;
1794 else if (types_must_be_same_for_odr (speculative_outer_type
,
1797 if (speculative_offset
!= new_offset
)
1799 /* OK we have two contexts that seems valid but they disagree,
1802 This is not a lattice operation, so we may want to drop it later. */
1803 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1805 "Speculative outer types match, "
1806 "offset mismatch -> invalid speculation\n");
1807 clear_speculation ();
1812 if (speculative_maybe_derived_type
&& !new_maybe_derived_type
)
1814 speculative_maybe_derived_type
= false;
1821 /* Choose type that contains the other. This one either contains the outer
1822 as a field (thus giving exactly one target) or is deeper in the type
1824 else if (speculative_outer_type
1825 && speculative_maybe_derived_type
1826 && (new_offset
> speculative_offset
1827 || (new_offset
== speculative_offset
1828 && contains_type_p (new_outer_type
,
1829 0, speculative_outer_type
, false))))
1831 tree old_outer_type
= speculative_outer_type
;
1832 HOST_WIDE_INT old_offset
= speculative_offset
;
1833 bool old_maybe_derived_type
= speculative_maybe_derived_type
;
1835 speculative_outer_type
= new_outer_type
;
1836 speculative_offset
= new_offset
;
1837 speculative_maybe_derived_type
= new_maybe_derived_type
;
1840 restrict_to_inner_class (otr_type
);
1842 /* If the speculation turned out to make no sense, revert to sensible
1844 if (!speculative_outer_type
)
1846 speculative_outer_type
= old_outer_type
;
1847 speculative_offset
= old_offset
;
1848 speculative_maybe_derived_type
= old_maybe_derived_type
;
1851 return (old_offset
!= speculative_offset
1852 || old_maybe_derived_type
!= speculative_maybe_derived_type
1853 || types_must_be_same_for_odr (speculative_outer_type
,
1859 /* Assume that both THIS and a given context is valid and strenghten THIS
1860 if possible. Return true if any strenghtening was made.
1861 If actual type the context is being used in is known, OTR_TYPE should be
1862 set accordingly. This improves quality of combined result. */
1865 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx
,
1868 bool updated
= false;
1870 if (ctx
.useless_p () || invalid
)
1873 /* Restricting context to inner type makes merging easier, however do not
1874 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
1875 if (otr_type
&& !invalid
&& !ctx
.invalid
)
1877 restrict_to_inner_class (otr_type
);
1878 ctx
.restrict_to_inner_class (otr_type
);
1883 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1885 fprintf (dump_file
, "Polymorphic call context combine:");
1887 fprintf (dump_file
, "With context: ");
1888 ctx
.dump (dump_file
);
1891 fprintf (dump_file
, "To be used with type: ");
1892 print_generic_expr (dump_file
, otr_type
, TDF_SLIM
);
1893 fprintf (dump_file
, "\n");
1897 /* If call is known to be invalid, we are done. */
1900 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1901 fprintf (dump_file
, "-> Invalid context\n");
1905 if (!ctx
.outer_type
)
1907 else if (!outer_type
)
1909 outer_type
= ctx
.outer_type
;
1910 offset
= ctx
.offset
;
1911 dynamic
= ctx
.dynamic
;
1912 maybe_in_construction
= ctx
.maybe_in_construction
;
1913 maybe_derived_type
= ctx
.maybe_derived_type
;
1916 /* If types are known to be same, merging is quite easy. */
1917 else if (types_must_be_same_for_odr (outer_type
, ctx
.outer_type
))
1919 if (offset
!= ctx
.offset
1920 && TYPE_SIZE (outer_type
)
1921 && TREE_CODE (TYPE_SIZE (outer_type
)) == INTEGER_CST
)
1923 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1924 fprintf (dump_file
, "Outer types match, offset mismatch -> invalid\n");
1925 clear_speculation ();
1926 clear_outer_type ();
1930 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1931 fprintf (dump_file
, "Outer types match, merging flags\n");
1932 if (maybe_in_construction
&& !ctx
.maybe_in_construction
)
1935 maybe_in_construction
= false;
1937 if (maybe_derived_type
&& !ctx
.maybe_derived_type
)
1940 maybe_derived_type
= false;
1942 if (dynamic
&& !ctx
.dynamic
)
1948 /* If we know the type precisely, there is not much to improve. */
1949 else if (!maybe_derived_type
&& !maybe_in_construction
1950 && !ctx
.maybe_derived_type
&& !ctx
.maybe_in_construction
)
1952 /* It may be easy to check if second context permits the first
1953 and set INVALID otherwise. This is not easy to do in general;
1954 contains_type_p may return false negatives for non-comparable
1957 If OTR_TYPE is known, we however can expect that
1958 restrict_to_inner_class should have discovered the same base
1960 if (otr_type
&& !ctx
.maybe_in_construction
&& !ctx
.maybe_derived_type
)
1962 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1963 fprintf (dump_file
, "Contextes disagree -> invalid\n");
1967 /* See if one type contains the other as a field (not base).
1968 In this case we want to choose the wider type, because it contains
1969 more information. */
1970 else if (contains_type_p (ctx
.outer_type
, ctx
.offset
- offset
,
1971 outer_type
, false, false))
1973 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1974 fprintf (dump_file
, "Second type contain the first as a field\n");
1976 if (maybe_derived_type
)
1978 outer_type
= ctx
.outer_type
;
1979 maybe_derived_type
= ctx
.maybe_derived_type
;
1980 offset
= ctx
.offset
;
1981 dynamic
= ctx
.dynamic
;
1985 /* If we do not know how the context is being used, we can
1986 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
1987 to other component of OUTER_TYPE later and we know nothing
1989 if (otr_type
&& maybe_in_construction
1990 && !ctx
.maybe_in_construction
)
1992 maybe_in_construction
= false;
1996 else if (contains_type_p (outer_type
, offset
- ctx
.offset
,
1997 ctx
.outer_type
, false, false))
1999 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2000 fprintf (dump_file
, "First type contain the second as a field\n");
2002 if (otr_type
&& maybe_in_construction
2003 && !ctx
.maybe_in_construction
)
2005 maybe_in_construction
= false;
2009 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2010 else if (contains_type_p (ctx
.outer_type
,
2011 ctx
.offset
- offset
, outer_type
, false, true))
2013 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2014 fprintf (dump_file
, "First type is base of second\n");
2015 if (!maybe_derived_type
)
2017 if (!ctx
.maybe_in_construction
2018 && types_odr_comparable (outer_type
, ctx
.outer_type
))
2020 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2021 fprintf (dump_file
, "Second context does not permit base -> invalid\n");
2025 /* Pick variant deeper in the hiearchy. */
2028 outer_type
= ctx
.outer_type
;
2029 maybe_in_construction
= ctx
.maybe_in_construction
;
2030 maybe_derived_type
= ctx
.maybe_derived_type
;
2031 offset
= ctx
.offset
;
2032 dynamic
= ctx
.dynamic
;
2036 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2037 else if (contains_type_p (outer_type
,
2038 offset
- ctx
.offset
, ctx
.outer_type
, false, true))
2040 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2041 fprintf (dump_file
, "Second type is base of first\n");
2042 if (!ctx
.maybe_derived_type
)
2044 if (!maybe_in_construction
2045 && types_odr_comparable (outer_type
, ctx
.outer_type
))
2047 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2048 fprintf (dump_file
, "First context does not permit base -> invalid\n");
2053 /* TODO handle merging using hiearchy. */
2054 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2055 fprintf (dump_file
, "Giving up on merge\n");
2057 updated
|= combine_speculation_with (ctx
.speculative_outer_type
,
2058 ctx
.speculative_offset
,
2059 ctx
.speculative_maybe_derived_type
,
2062 if (updated
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
2064 fprintf (dump_file
, "Updated as: ");
2066 fprintf (dump_file
, "\n");
2072 clear_speculation ();
2073 clear_outer_type ();
2077 /* Take non-speculative info, merge it with speculative and clear speculation.
2078 Used when we no longer manage to keep track of actual outer type, but we
2079 think it is still there.
2081 If OTR_TYPE is set, the transformation can be done more effectively assuming
2082 that context is going to be used only that way. */
2085 ipa_polymorphic_call_context::make_speculative (tree otr_type
)
2087 tree spec_outer_type
= outer_type
;
2088 HOST_WIDE_INT spec_offset
= offset
;
2089 bool spec_maybe_derived_type
= maybe_derived_type
;
2094 clear_outer_type ();
2095 clear_speculation ();
2100 clear_outer_type ();
2101 combine_speculation_with (spec_outer_type
, spec_offset
,
2102 spec_maybe_derived_type
,
2106 /* Use when we can not track dynamic type change. This speculatively assume
2107 type change is not happening. */
2110 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor
,
2114 make_speculative (otr_type
);
2115 else if (in_poly_cdtor
)
2116 maybe_in_construction
= true;