1 /* Analysis of polymorphic call context.
2 Copyright (C) 2013-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "print-tree.h"
29 #include "tree-pass.h"
32 #include "hash-table.h"
34 #include "tree-pretty-print.h"
36 #include "basic-block.h"
39 #include "plugin-api.h"
43 #include "hard-reg-set.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-alias.h"
50 #include "internal-fn.h"
51 #include "gimple-fold.h"
52 #include "gimple-expr.h"
54 #include "alloc-pool.h"
56 #include "ipa-inline.h"
57 #include "diagnostic.h"
61 #include "gimple-pretty-print.h"
62 #include "stor-layout.h"
64 #include "data-streamer.h"
65 #include "lto-streamer.h"
66 #include "streamer-hooks.h"
68 /* Return true when TYPE contains an polymorphic type and thus is interesting
69 for devirtualization machinery. */
71 static bool contains_type_p (tree
, HOST_WIDE_INT
, tree
,
72 bool consider_placement_new
= true,
73 bool consider_bases
= true);
76 contains_polymorphic_type_p (const_tree type
)
78 type
= TYPE_MAIN_VARIANT (type
);
80 if (RECORD_OR_UNION_TYPE_P (type
))
83 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
85 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
86 if (TREE_CODE (fld
) == FIELD_DECL
87 && !DECL_ARTIFICIAL (fld
)
88 && contains_polymorphic_type_p (TREE_TYPE (fld
)))
92 if (TREE_CODE (type
) == ARRAY_TYPE
)
93 return contains_polymorphic_type_p (TREE_TYPE (type
));
97 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
98 at possition CUR_OFFSET within TYPE.
100 POD can be changed to an instance of a polymorphic type by
101 placement new. Here we play safe and assume that any
102 non-polymorphic type is POD. */
104 possible_placement_new (tree type
, tree expected_type
,
105 HOST_WIDE_INT cur_offset
)
107 return ((TREE_CODE (type
) != RECORD_TYPE
108 || !TYPE_BINFO (type
)
109 || cur_offset
>= BITS_PER_WORD
110 || !polymorphic_type_binfo_p (TYPE_BINFO (type
)))
111 && (!TYPE_SIZE (type
)
112 || !tree_fits_shwi_p (TYPE_SIZE (type
))
114 + (expected_type
? tree_to_uhwi (TYPE_SIZE (expected_type
))
115 : GET_MODE_BITSIZE (Pmode
))
116 <= tree_to_uhwi (TYPE_SIZE (type
)))));
119 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
120 is contained at THIS->OFFSET. Walk the memory representation of
121 THIS->OUTER_TYPE and find the outermost class type that match
122 OTR_TYPE or contain OTR_TYPE as a base. Update THIS
125 If OTR_TYPE is NULL, just find outermost polymorphic type with
126 virtual table present at possition OFFSET.
128 For example when THIS represents type
134 and we look for type at offset sizeof(int), we end up with B and offset 0.
135 If the same is produced by multiple inheritance, we end up with A and offset
138 If we can not find corresponding class, give up by setting
139 THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
140 Return true when lookup was sucesful.
142 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
143 valid only via alocation of new polymorphic type inside by means
146 When CONSIDER_BASES is false, only look for actual fields, not base types
150 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type
,
151 bool consider_placement_new
,
154 tree type
= outer_type
;
155 HOST_WIDE_INT cur_offset
= offset
;
156 bool speculative
= false;
157 bool size_unknown
= false;
158 unsigned HOST_WIDE_INT otr_type_size
= GET_MODE_BITSIZE (Pmode
);
160 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
163 clear_outer_type (otr_type
);
167 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
168 that the context is either invalid, or the instance type must be
169 derived from OUTER_TYPE.
171 Because the instance type may contain field whose type is of OUTER_TYPE,
172 we can not derive any effective information about it.
174 TODO: In the case we know all derrived types, we can definitely do better
176 else if (TYPE_SIZE (outer_type
)
177 && tree_fits_shwi_p (TYPE_SIZE (outer_type
))
178 && tree_to_shwi (TYPE_SIZE (outer_type
)) >= 0
179 && tree_to_shwi (TYPE_SIZE (outer_type
)) <= offset
)
181 clear_outer_type (otr_type
);
185 /* If derived type is not allowed, we know that the context is invalid.
186 For dynamic types, we really do not have information about
187 size of the memory location. It is possible that completely
188 different type is stored after outer_type. */
189 if (!maybe_derived_type
&& !dynamic
)
191 clear_speculation ();
197 if (otr_type
&& TYPE_SIZE (otr_type
)
198 && tree_fits_shwi_p (TYPE_SIZE (otr_type
)))
199 otr_type_size
= tree_to_uhwi (TYPE_SIZE (otr_type
));
201 if (!type
|| offset
< 0)
202 goto no_useful_type_info
;
204 /* Find the sub-object the constant actually refers to and mark whether it is
205 an artificial one (as opposed to a user-defined one).
207 This loop is performed twice; first time for outer_type and second time
208 for speculative_outer_type. The second run has SPECULATIVE set. */
211 unsigned HOST_WIDE_INT pos
, size
;
214 /* If we do not know size of TYPE, we need to be more conservative
215 about accepting cases where we can not find EXPECTED_TYPE.
216 Generally the types that do matter here are of constant size.
217 Size_unknown case should be very rare. */
219 && tree_fits_shwi_p (TYPE_SIZE (type
))
220 && tree_to_shwi (TYPE_SIZE (type
)) >= 0)
221 size_unknown
= false;
225 /* On a match, just return what we found. */
227 && types_odr_comparable (type
, otr_type
)
228 && types_same_for_odr (type
, otr_type
))
230 && TREE_CODE (type
) == RECORD_TYPE
232 && polymorphic_type_binfo_p (TYPE_BINFO (type
))))
236 /* If we did not match the offset, just give up on speculation. */
238 /* Also check if speculation did not end up being same as
240 || (types_must_be_same_for_odr (speculative_outer_type
,
242 && (maybe_derived_type
243 == speculative_maybe_derived_type
)))
244 clear_speculation ();
249 /* If type is known to be final, do not worry about derived
250 types. Testing it here may help us to avoid speculation. */
251 if (otr_type
&& TREE_CODE (outer_type
) == RECORD_TYPE
252 && (!in_lto_p
|| odr_type_p (outer_type
))
253 && type_known_to_have_no_deriavations_p (outer_type
))
254 maybe_derived_type
= false;
256 /* Type can not contain itself on an non-zero offset. In that case
257 just give up. Still accept the case where size is now known.
258 Either the second copy may appear past the end of type or within
259 the non-POD buffer located inside the variably sized type
262 goto no_useful_type_info
;
263 /* If we determined type precisely or we have no clue on
264 speuclation, we are done. */
265 if (!maybe_derived_type
|| !speculative_outer_type
266 || !speculation_consistent_p (speculative_outer_type
,
268 speculative_maybe_derived_type
,
271 clear_speculation ();
274 /* Otherwise look into speculation now. */
278 type
= speculative_outer_type
;
279 cur_offset
= speculative_offset
;
285 /* Walk fields and find corresponding on at OFFSET. */
286 if (TREE_CODE (type
) == RECORD_TYPE
)
288 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
290 if (TREE_CODE (fld
) != FIELD_DECL
)
293 pos
= int_bit_position (fld
);
294 if (pos
> (unsigned HOST_WIDE_INT
)cur_offset
)
297 /* Do not consider vptr itself. Not even for placement new. */
298 if (!pos
&& DECL_ARTIFICIAL (fld
)
299 && POINTER_TYPE_P (TREE_TYPE (fld
))
301 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
304 if (!DECL_SIZE (fld
) || !tree_fits_uhwi_p (DECL_SIZE (fld
)))
305 goto no_useful_type_info
;
306 size
= tree_to_uhwi (DECL_SIZE (fld
));
308 /* We can always skip types smaller than pointer size:
309 those can not contain a virtual table pointer.
311 Disqualifying fields that are too small to fit OTR_TYPE
312 saves work needed to walk them for no benefit.
313 Because of the way the bases are packed into a class, the
314 field's size may be smaller than type size, so it needs
315 to be done with a care. */
317 if (pos
<= (unsigned HOST_WIDE_INT
)cur_offset
318 && (pos
+ size
) >= (unsigned HOST_WIDE_INT
)cur_offset
319 + GET_MODE_BITSIZE (Pmode
)
321 || !TYPE_SIZE (TREE_TYPE (fld
))
322 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld
)))
323 || (pos
+ tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld
))))
324 >= cur_offset
+ otr_type_size
))
329 goto no_useful_type_info
;
331 type
= TYPE_MAIN_VARIANT (TREE_TYPE (fld
));
333 /* DECL_ARTIFICIAL represents a basetype. */
334 if (!DECL_ARTIFICIAL (fld
))
340 /* As soon as we se an field containing the type,
341 we know we are not looking for derivations. */
342 maybe_derived_type
= false;
346 speculative_outer_type
= type
;
347 speculative_offset
= cur_offset
;
348 speculative_maybe_derived_type
= false;
351 else if (!consider_bases
)
352 goto no_useful_type_info
;
354 else if (TREE_CODE (type
) == ARRAY_TYPE
)
356 tree subtype
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
358 /* Give up if we don't know array field size.
359 Also give up on non-polymorphic types as they are used
360 as buffers for placement new. */
361 if (!TYPE_SIZE (subtype
)
362 || !tree_fits_shwi_p (TYPE_SIZE (subtype
))
363 || tree_to_shwi (TYPE_SIZE (subtype
)) <= 0
364 || !contains_polymorphic_type_p (subtype
))
365 goto no_useful_type_info
;
367 HOST_WIDE_INT new_offset
= cur_offset
% tree_to_shwi (TYPE_SIZE (subtype
));
369 /* We may see buffer for placement new. In this case the expected type
370 can be bigger than the subtype. */
371 if (TYPE_SIZE (subtype
)
372 && (cur_offset
+ otr_type_size
373 > tree_to_uhwi (TYPE_SIZE (subtype
))))
374 goto no_useful_type_info
;
376 cur_offset
= new_offset
;
382 maybe_derived_type
= false;
386 speculative_outer_type
= type
;
387 speculative_offset
= cur_offset
;
388 speculative_maybe_derived_type
= false;
391 /* Give up on anything else. */
395 if (maybe_derived_type
&& !speculative
396 && TREE_CODE (outer_type
) == RECORD_TYPE
397 && TREE_CODE (otr_type
) == RECORD_TYPE
398 && TYPE_BINFO (otr_type
)
400 && get_binfo_at_offset (TYPE_BINFO (otr_type
), 0, outer_type
))
402 clear_outer_type (otr_type
);
403 if (!speculative_outer_type
404 || !speculation_consistent_p (speculative_outer_type
,
406 speculative_maybe_derived_type
,
408 clear_speculation ();
409 if (speculative_outer_type
)
412 type
= speculative_outer_type
;
413 cur_offset
= speculative_offset
;
418 /* We found no way to embedd EXPECTED_TYPE in TYPE.
419 We still permit two special cases - placement new and
420 the case of variadic types containing themselves. */
422 && consider_placement_new
423 && (size_unknown
|| !type
|| maybe_derived_type
424 || possible_placement_new (type
, otr_type
, cur_offset
)))
426 /* In these weird cases we want to accept the context.
427 In non-speculative run we have no useful outer_type info
428 (TODO: we may eventually want to record upper bound on the
429 type size that can be used to prune the walk),
430 but we still want to consider speculation that may
434 clear_outer_type (otr_type
);
435 if (!speculative_outer_type
436 || !speculation_consistent_p (speculative_outer_type
,
438 speculative_maybe_derived_type
,
440 clear_speculation ();
441 if (speculative_outer_type
)
444 type
= speculative_outer_type
;
445 cur_offset
= speculative_offset
;
451 clear_speculation ();
456 clear_speculation ();
459 clear_outer_type (otr_type
);
467 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
468 CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
469 be built within OUTER_TYPE by means of placement new. CONSIDER_BASES makes
470 function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
471 base of one of fields of OUTER_TYPE. */
474 contains_type_p (tree outer_type
, HOST_WIDE_INT offset
,
476 bool consider_placement_new
,
479 ipa_polymorphic_call_context context
;
481 /* Check that type is within range. */
484 if (TYPE_SIZE (outer_type
) && TYPE_SIZE (otr_type
)
485 && TREE_CODE (outer_type
) == INTEGER_CST
486 && TREE_CODE (otr_type
) == INTEGER_CST
487 && wi::ltu_p (wi::to_offset (outer_type
), (wi::to_offset (otr_type
) + offset
)))
490 context
.offset
= offset
;
491 context
.outer_type
= TYPE_MAIN_VARIANT (outer_type
);
492 context
.maybe_derived_type
= false;
493 return context
.restrict_to_inner_class (otr_type
, consider_placement_new
, consider_bases
);
497 /* We know that the instance is stored in variable or parameter
498 (not dynamically allocated) and we want to disprove the fact
499 that it may be in construction at invocation of CALL.
501 BASE represents memory location where instance is stored.
502 If BASE is NULL, it is assumed to be global memory.
503 OUTER_TYPE is known type of the instance or NULL if not
506 For the variable to be in construction we actually need to
507 be in constructor of corresponding global variable or
508 the inline stack of CALL must contain the constructor.
509 Check this condition. This check works safely only before
510 IPA passes, because inline stacks may become out of date
514 decl_maybe_in_construction_p (tree base
, tree outer_type
,
515 gimple call
, tree function
)
518 outer_type
= TYPE_MAIN_VARIANT (outer_type
);
519 gcc_assert (!base
|| DECL_P (base
));
521 /* After inlining the code unification optimizations may invalidate
522 inline stacks. Also we need to give up on global variables after
523 IPA, because addresses of these may have been propagated to their
525 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
528 /* Pure functions can not do any changes on the dynamic type;
529 that require writting to memory. */
530 if ((!base
|| !auto_var_in_fn_p (base
, function
))
531 && flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
534 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
535 block
= BLOCK_SUPERCONTEXT (block
))
536 if (BLOCK_ABSTRACT_ORIGIN (block
)
537 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
539 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
541 if (TREE_CODE (TREE_TYPE (fn
)) != METHOD_TYPE
542 || (!DECL_CXX_CONSTRUCTOR_P (fn
)
543 && !DECL_CXX_DESTRUCTOR_P (fn
)))
545 /* Watch for clones where we constant propagated the first
546 argument (pointer to the instance). */
547 fn
= DECL_ABSTRACT_ORIGIN (fn
);
549 || (base
&& !is_global_var (base
))
550 || TREE_CODE (TREE_TYPE (fn
)) != METHOD_TYPE
551 || (!DECL_CXX_CONSTRUCTOR_P (fn
)
552 && !DECL_CXX_DESTRUCTOR_P (fn
)))
555 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
558 tree type
= TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn
)));
560 if (!outer_type
|| !types_odr_comparable (type
, outer_type
))
562 if (TREE_CODE (type
) == RECORD_TYPE
564 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
567 else if (types_same_for_odr (type
, outer_type
))
571 if (!base
|| (TREE_CODE (base
) == VAR_DECL
&& is_global_var (base
)))
573 if (TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
574 || (!DECL_CXX_CONSTRUCTOR_P (function
)
575 && !DECL_CXX_DESTRUCTOR_P (function
)))
577 if (!DECL_ABSTRACT_ORIGIN (function
))
579 /* Watch for clones where we constant propagated the first
580 argument (pointer to the instance). */
581 function
= DECL_ABSTRACT_ORIGIN (function
);
583 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
584 || (!DECL_CXX_CONSTRUCTOR_P (function
)
585 && !DECL_CXX_DESTRUCTOR_P (function
)))
588 tree type
= TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function
)));
589 if (!outer_type
|| !types_odr_comparable (type
, outer_type
))
591 if (TREE_CODE (type
) == RECORD_TYPE
593 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
596 else if (types_same_for_odr (type
, outer_type
))
602 /* Dump human readable context to F. */
605 ipa_polymorphic_call_context::dump (FILE *f
) const
609 fprintf (f
, "Call is known to be undefined");
613 fprintf (f
, "nothing known");
614 if (outer_type
|| offset
)
616 fprintf (f
, "Outer type%s:", dynamic
? " (dynamic)":"");
617 print_generic_expr (f
, outer_type
, TDF_SLIM
);
618 if (maybe_derived_type
)
619 fprintf (f
, " (or a derived type)");
620 if (maybe_in_construction
)
621 fprintf (f
, " (maybe in construction)");
622 fprintf (f
, " offset "HOST_WIDE_INT_PRINT_DEC
,
625 if (speculative_outer_type
)
627 if (outer_type
|| offset
)
629 fprintf (f
, "Speculative outer type:");
630 print_generic_expr (f
, speculative_outer_type
, TDF_SLIM
);
631 if (speculative_maybe_derived_type
)
632 fprintf (f
, " (or a derived type)");
633 fprintf (f
, " at offset "HOST_WIDE_INT_PRINT_DEC
,
640 /* Print context to stderr. */
643 ipa_polymorphic_call_context::debug () const
648 /* Stream out the context to OB. */
651 ipa_polymorphic_call_context::stream_out (struct output_block
*ob
) const
653 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
655 bp_pack_value (&bp
, invalid
, 1);
656 bp_pack_value (&bp
, maybe_in_construction
, 1);
657 bp_pack_value (&bp
, maybe_derived_type
, 1);
658 bp_pack_value (&bp
, speculative_maybe_derived_type
, 1);
659 bp_pack_value (&bp
, dynamic
, 1);
660 bp_pack_value (&bp
, outer_type
!= NULL
, 1);
661 bp_pack_value (&bp
, offset
!= 0, 1);
662 bp_pack_value (&bp
, speculative_outer_type
!= NULL
, 1);
663 streamer_write_bitpack (&bp
);
665 if (outer_type
!= NULL
)
666 stream_write_tree (ob
, outer_type
, true);
668 streamer_write_hwi (ob
, offset
);
669 if (speculative_outer_type
!= NULL
)
671 stream_write_tree (ob
, speculative_outer_type
, true);
672 streamer_write_hwi (ob
, speculative_offset
);
675 gcc_assert (!speculative_offset
);
678 /* Stream in the context from IB and DATA_IN. */
681 ipa_polymorphic_call_context::stream_in (struct lto_input_block
*ib
,
682 struct data_in
*data_in
)
684 struct bitpack_d bp
= streamer_read_bitpack (ib
);
686 invalid
= bp_unpack_value (&bp
, 1);
687 maybe_in_construction
= bp_unpack_value (&bp
, 1);
688 maybe_derived_type
= bp_unpack_value (&bp
, 1);
689 speculative_maybe_derived_type
= bp_unpack_value (&bp
, 1);
690 dynamic
= bp_unpack_value (&bp
, 1);
691 bool outer_type_p
= bp_unpack_value (&bp
, 1);
692 bool offset_p
= bp_unpack_value (&bp
, 1);
693 bool speculative_outer_type_p
= bp_unpack_value (&bp
, 1);
696 outer_type
= stream_read_tree (ib
, data_in
);
700 offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
703 if (speculative_outer_type_p
)
705 speculative_outer_type
= stream_read_tree (ib
, data_in
);
706 speculative_offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
710 speculative_outer_type
= NULL
;
711 speculative_offset
= 0;
715 /* Proudce polymorphic call context for call method of instance
716 that is located within BASE (that is assumed to be a decl) at offset OFF. */
719 ipa_polymorphic_call_context::set_by_decl (tree base
, HOST_WIDE_INT off
)
721 gcc_assert (DECL_P (base
));
722 clear_speculation ();
724 if (!contains_polymorphic_type_p (TREE_TYPE (base
)))
730 outer_type
= TYPE_MAIN_VARIANT (TREE_TYPE (base
));
732 /* Make very conservative assumption that all objects
733 may be in construction.
735 It is up to caller to revisit this via
736 get_dynamic_type or decl_maybe_in_construction_p. */
737 maybe_in_construction
= true;
738 maybe_derived_type
= false;
742 /* CST is an invariant (address of decl), try to get meaningful
743 polymorphic call context for polymorphic call of method
744 if instance of OTR_TYPE that is located at offset OFF of this invariant.
745 Return FALSE if nothing meaningful can be found. */
748 ipa_polymorphic_call_context::set_by_invariant (tree cst
,
752 HOST_WIDE_INT offset2
, size
, max_size
;
758 clear_outer_type (otr_type
);
760 if (TREE_CODE (cst
) != ADDR_EXPR
)
763 cst
= TREE_OPERAND (cst
, 0);
764 base
= get_ref_base_and_extent (cst
, &offset2
, &size
, &max_size
, &reverse
);
765 if (!DECL_P (base
) || max_size
== -1 || max_size
!= size
)
768 /* Only type inconsistent programs can have otr_type that is
769 not part of outer type. */
770 if (otr_type
&& !contains_type_p (TREE_TYPE (base
), off
, otr_type
))
773 set_by_decl (base
, off
);
777 /* See if OP is SSA name initialized as a copy or by single assignment.
778 If so, walk the SSA graph up. Because simple PHI conditional is considered
779 copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
783 walk_ssa_copies (tree op
, hash_set
<tree
> **global_visited
= NULL
)
785 hash_set
<tree
> *visited
= NULL
;
787 while (TREE_CODE (op
) == SSA_NAME
788 && !SSA_NAME_IS_DEFAULT_DEF (op
)
789 && SSA_NAME_DEF_STMT (op
)
790 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op
))
791 || gimple_code (SSA_NAME_DEF_STMT (op
)) == GIMPLE_PHI
))
795 if (!*global_visited
)
796 *global_visited
= new hash_set
<tree
>;
797 if ((*global_visited
)->add (op
))
803 visited
= new hash_set
<tree
>;
804 if (visited
->add (op
))
812 This pattern is implicitly produced for casts to non-primary
813 bases. When doing context analysis, we do not really care
814 about the case pointer is NULL, becuase the call will be
816 if (gimple_code (SSA_NAME_DEF_STMT (op
)) == GIMPLE_PHI
)
818 gimple phi
= SSA_NAME_DEF_STMT (op
);
820 if (gimple_phi_num_args (phi
) > 2)
822 if (gimple_phi_num_args (phi
) == 1)
823 op
= gimple_phi_arg_def (phi
, 0);
824 else if (integer_zerop (gimple_phi_arg_def (phi
, 0)))
825 op
= gimple_phi_arg_def (phi
, 1);
826 else if (integer_zerop (gimple_phi_arg_def (phi
, 1)))
827 op
= gimple_phi_arg_def (phi
, 0);
833 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op
)))
835 op
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op
));
845 /* Create polymorphic call context from IP invariant CST.
846 This is typically &global_var.
847 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
848 is offset of call. */
850 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst
,
854 clear_speculation ();
855 set_by_invariant (cst
, otr_type
, off
);
858 /* Build context for pointer REF contained in FNDECL at statement STMT.
859 if INSTANCE is non-NULL, return pointer to the object described by
860 the context or DECL where context is contained in. */
862 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl
,
867 tree otr_type
= NULL
;
869 hash_set
<tree
> *visited
= NULL
;
871 if (TREE_CODE (ref
) == OBJ_TYPE_REF
)
873 otr_type
= obj_type_ref_class (ref
);
874 base_pointer
= OBJ_TYPE_REF_OBJECT (ref
);
879 /* Set up basic info in case we find nothing interesting in the analysis. */
880 clear_speculation ();
881 clear_outer_type (otr_type
);
884 /* Walk SSA for outer object. */
887 base_pointer
= walk_ssa_copies (base_pointer
, &visited
);
888 if (TREE_CODE (base_pointer
) == ADDR_EXPR
)
890 HOST_WIDE_INT size
, max_size
;
891 HOST_WIDE_INT offset2
;
894 = get_ref_base_and_extent (TREE_OPERAND (base_pointer
, 0),
895 &offset2
, &size
, &max_size
, &reverse
);
897 if (max_size
!= -1 && max_size
== size
)
898 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base
)),
901 NULL
/* Do not change outer type. */);
903 /* If this is a varying address, punt. */
904 if ((TREE_CODE (base
) == MEM_REF
|| DECL_P (base
))
908 /* We found dereference of a pointer. Type of the pointer
909 and MEM_REF is meaningless, but we can look futher. */
910 if (TREE_CODE (base
) == MEM_REF
)
912 base_pointer
= TREE_OPERAND (base
, 0);
914 += offset2
+ mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
917 /* We found base object. In this case the outer_type
919 else if (DECL_P (base
))
923 /* Only type inconsistent programs can have otr_type that is
924 not part of outer type. */
926 && !contains_type_p (TREE_TYPE (base
),
927 offset
+ offset2
, otr_type
))
931 *instance
= base_pointer
;
934 set_by_decl (base
, offset
+ offset2
);
935 if (outer_type
&& maybe_in_construction
&& stmt
)
936 maybe_in_construction
937 = decl_maybe_in_construction_p (base
,
951 else if (TREE_CODE (base_pointer
) == POINTER_PLUS_EXPR
952 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer
, 1)))
954 offset
+= tree_to_shwi (TREE_OPERAND (base_pointer
, 1))
956 base_pointer
= TREE_OPERAND (base_pointer
, 0);
965 /* Try to determine type of the outer object. */
966 if (TREE_CODE (base_pointer
) == SSA_NAME
967 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
968 && TREE_CODE (SSA_NAME_VAR (base_pointer
)) == PARM_DECL
)
970 /* See if parameter is THIS pointer of a method. */
971 if (TREE_CODE (TREE_TYPE (fndecl
)) == METHOD_TYPE
972 && SSA_NAME_VAR (base_pointer
) == DECL_ARGUMENTS (fndecl
))
975 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer
)));
976 gcc_assert (TREE_CODE (outer_type
) == RECORD_TYPE
977 || TREE_CODE (outer_type
) == UNION_TYPE
);
979 /* Dynamic casting has possibly upcasted the type
980 in the hiearchy. In this case outer type is less
981 informative than inner type and we should forget
984 && !contains_type_p (outer_type
, offset
,
986 || !contains_polymorphic_type_p (outer_type
))
990 *instance
= base_pointer
;
996 /* If the function is constructor or destructor, then
997 the type is possibly in construction, but we know
998 it is not derived type. */
999 if (DECL_CXX_CONSTRUCTOR_P (fndecl
)
1000 || DECL_CXX_DESTRUCTOR_P (fndecl
))
1002 maybe_in_construction
= true;
1003 maybe_derived_type
= false;
1007 maybe_derived_type
= true;
1008 maybe_in_construction
= false;
1011 *instance
= base_pointer
;
1014 /* Non-PODs passed by value are really passed by invisible
1015 reference. In this case we also know the type of the
1017 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer
)))
1020 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer
)));
1021 /* Only type inconsistent programs can have otr_type that is
1022 not part of outer type. */
1023 if (otr_type
&& !contains_type_p (outer_type
, offset
,
1028 *instance
= base_pointer
;
1031 /* Non-polymorphic types have no interest for us. */
1032 else if (!otr_type
&& !contains_polymorphic_type_p (outer_type
))
1036 *instance
= base_pointer
;
1039 maybe_derived_type
= false;
1040 maybe_in_construction
= false;
1042 *instance
= base_pointer
;
1047 tree base_type
= TREE_TYPE (base_pointer
);
1049 if (TREE_CODE (base_pointer
) == SSA_NAME
1050 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
1051 && TREE_CODE (SSA_NAME_VAR (base_pointer
)) != PARM_DECL
)
1055 *instance
= base_pointer
;
1058 if (TREE_CODE (base_pointer
) == SSA_NAME
1059 && SSA_NAME_DEF_STMT (base_pointer
)
1060 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer
)))
1061 base_type
= TREE_TYPE (gimple_assign_rhs1
1062 (SSA_NAME_DEF_STMT (base_pointer
)));
1064 if (POINTER_TYPE_P (base_type
))
1065 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type
)),
1067 true, NULL
/* Do not change type here */);
1068 /* TODO: There are multiple ways to derive a type. For instance
1069 if BASE_POINTER is passed to an constructor call prior our refernece.
1070 We do not make this type of flow sensitive analysis yet. */
1072 *instance
= base_pointer
;
1076 /* Structure to be passed in between detect_type_change and
1077 check_stmt_for_type_change. */
1079 struct type_change_info
1081 /* Offset into the object where there is the virtual method pointer we are
1083 HOST_WIDE_INT offset
;
1084 /* The declaration or SSA_NAME pointer of the base that we are checking for
1087 /* The reference to virtual table pointer used. */
1090 /* If we actually can tell the type that the object has changed to, it is
1091 stored in this field. Otherwise it remains NULL_TREE. */
1092 tree known_current_type
;
1093 HOST_WIDE_INT known_current_offset
;
1095 /* Set to true if dynamic type change has been detected. */
1096 bool type_maybe_changed
;
1097 /* Set to true if multiple types have been encountered. known_current_type
1098 must be disregarded in that case. */
1099 bool multiple_types_encountered
;
1100 /* Set to true if we possibly missed some dynamic type changes and we should
1101 consider the set to be speculative. */
1103 bool seen_unanalyzed_store
;
1106 /* Return true if STMT is not call and can modify a virtual method table pointer.
1107 We take advantage of fact that vtable stores must appear within constructor
1108 and destructor functions. */
1111 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt
)
1113 if (is_gimple_assign (stmt
))
1115 tree lhs
= gimple_assign_lhs (stmt
);
1117 if (gimple_clobber_p (stmt
))
1119 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
1121 if (flag_strict_aliasing
1122 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
1125 if (TREE_CODE (lhs
) == COMPONENT_REF
1126 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
1128 /* In the future we might want to use get_base_ref_and_offset to find
1129 if there is a field corresponding to the offset and if so, proceed
1130 almost like if it was a component ref. */
1134 /* Code unification may mess with inline stacks. */
1135 if (cfun
->after_inlining
)
1138 /* Walk the inline stack and watch out for ctors/dtors.
1139 TODO: Maybe we can require the store to appear in toplevel
1140 block of CTOR/DTOR. */
1141 for (tree block
= gimple_block (stmt
); block
&& TREE_CODE (block
) == BLOCK
;
1142 block
= BLOCK_SUPERCONTEXT (block
))
1143 if (BLOCK_ABSTRACT_ORIGIN (block
)
1144 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
1146 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
1148 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
1150 return (TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
1151 && (DECL_CXX_CONSTRUCTOR_P (fn
)
1152 || DECL_CXX_DESTRUCTOR_P (fn
)));
1154 return (TREE_CODE (TREE_TYPE (current_function_decl
)) == METHOD_TYPE
1155 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl
)
1156 || DECL_CXX_DESTRUCTOR_P (current_function_decl
)));
1159 /* If STMT can be proved to be an assignment to the virtual method table
1160 pointer of ANALYZED_OBJ and the type associated with the new table
1161 identified, return the type. Otherwise return NULL_TREE if type changes
1162 in unknown way or ERROR_MARK_NODE if type is unchanged. */
1165 extr_type_from_vtbl_ptr_store (gimple stmt
, struct type_change_info
*tci
,
1166 HOST_WIDE_INT
*type_offset
)
1168 HOST_WIDE_INT offset
, size
, max_size
;
1169 tree lhs
, rhs
, base
;
1172 if (!gimple_assign_single_p (stmt
))
1175 lhs
= gimple_assign_lhs (stmt
);
1176 rhs
= gimple_assign_rhs1 (stmt
);
1177 if (TREE_CODE (lhs
) != COMPONENT_REF
1178 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
1181 fprintf (dump_file
, " LHS is not virtual table.\n");
1185 if (tci
->vtbl_ptr_ref
&& operand_equal_p (lhs
, tci
->vtbl_ptr_ref
, 0))
1189 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
, &reverse
);
1190 if (DECL_P (tci
->instance
))
1192 if (base
!= tci
->instance
)
1196 fprintf (dump_file
, " base:");
1197 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1198 fprintf (dump_file
, " does not match instance:");
1199 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1200 fprintf (dump_file
, "\n");
1205 else if (TREE_CODE (base
) == MEM_REF
)
1207 if (!operand_equal_p (tci
->instance
, TREE_OPERAND (base
, 0), 0))
1211 fprintf (dump_file
, " base mem ref:");
1212 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1213 fprintf (dump_file
, " does not match instance:");
1214 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1215 fprintf (dump_file
, "\n");
1219 if (!integer_zerop (TREE_OPERAND (base
, 1)))
1221 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
1225 fprintf (dump_file
, " base mem ref:");
1226 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1227 fprintf (dump_file
, " has non-representable offset:");
1228 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1229 fprintf (dump_file
, "\n");
1234 offset
+= tree_to_shwi (TREE_OPERAND (base
, 1)) * BITS_PER_UNIT
;
1237 else if (!operand_equal_p (tci
->instance
, base
, 0)
1242 fprintf (dump_file
, " base:");
1243 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1244 fprintf (dump_file
, " does not match instance:");
1245 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1246 fprintf (dump_file
, " with offset %i\n", (int)tci
->offset
);
1248 return tci
->offset
> GET_MODE_BITSIZE (Pmode
) ? error_mark_node
: NULL_TREE
;
1250 if (offset
!= tci
->offset
1251 || size
!= POINTER_SIZE
1252 || max_size
!= POINTER_SIZE
)
1255 fprintf (dump_file
, " wrong offset %i!=%i or size %i\n",
1256 (int)offset
, (int)tci
->offset
, (int)size
);
1257 return offset
+ GET_MODE_BITSIZE (Pmode
) <= tci
->offset
1259 && tci
->offset
+ GET_MODE_BITSIZE (Pmode
) > offset
+ max_size
)
1260 ? error_mark_node
: NULL
;
1265 unsigned HOST_WIDE_INT offset2
;
1267 if (!vtable_pointer_value_to_vtable (rhs
, &vtable
, &offset2
))
1270 fprintf (dump_file
, " Failed to lookup binfo\n");
1274 tree binfo
= subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable
)),
1279 fprintf (dump_file
, " Construction vtable used\n");
1280 /* FIXME: We should suport construction contexts. */
1284 *type_offset
= tree_to_shwi (BINFO_OFFSET (binfo
)) * BITS_PER_UNIT
;
1285 return DECL_CONTEXT (vtable
);
1288 /* Record dynamic type change of TCI to TYPE. */
1291 record_known_type (struct type_change_info
*tci
, tree type
, HOST_WIDE_INT offset
)
1297 fprintf (dump_file
, " Recording type: ");
1298 print_generic_expr (dump_file
, type
, TDF_SLIM
);
1299 fprintf (dump_file
, " at offset %i\n", (int)offset
);
1302 fprintf (dump_file
, " Recording unknown type\n");
1305 /* If we found a constructor of type that is not polymorphic or
1306 that may contain the type in question as a field (not as base),
1307 restrict to the inner class first to make type matching bellow
1311 || (TREE_CODE (type
) != RECORD_TYPE
1312 || !polymorphic_type_binfo_p (TYPE_BINFO (type
)))))
1314 ipa_polymorphic_call_context context
;
1316 context
.offset
= offset
;
1317 context
.outer_type
= type
;
1318 context
.maybe_in_construction
= false;
1319 context
.maybe_derived_type
= false;
1320 context
.dynamic
= true;
1321 /* If we failed to find the inner type, we know that the call
1322 would be undefined for type produced here. */
1323 if (!context
.restrict_to_inner_class (tci
->otr_type
))
1326 fprintf (dump_file
, " Ignoring; does not contain otr_type\n");
1329 /* Watch for case we reached an POD type and anticipate placement
1331 if (!context
.maybe_derived_type
)
1333 type
= context
.outer_type
;
1334 offset
= context
.offset
;
1337 if (tci
->type_maybe_changed
1338 && (!types_same_for_odr (type
, tci
->known_current_type
)
1339 || offset
!= tci
->known_current_offset
))
1340 tci
->multiple_types_encountered
= true;
1341 tci
->known_current_type
= TYPE_MAIN_VARIANT (type
);
1342 tci
->known_current_offset
= offset
;
1343 tci
->type_maybe_changed
= true;
1346 /* Callback of walk_aliased_vdefs and a helper function for
1347 detect_type_change to check whether a particular statement may modify
1348 the virtual table pointer, and if possible also determine the new type of
1349 the (sub-)object. It stores its result into DATA, which points to a
1350 type_change_info structure. */
1353 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
1355 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
1356 struct type_change_info
*tci
= (struct type_change_info
*) data
;
1359 /* If we already gave up, just terminate the rest of walk. */
1360 if (tci
->multiple_types_encountered
)
1363 if (is_gimple_call (stmt
))
1365 if (gimple_call_flags (stmt
) & (ECF_CONST
| ECF_PURE
))
1368 /* Check for a constructor call. */
1369 if ((fn
= gimple_call_fndecl (stmt
)) != NULL_TREE
1370 && DECL_CXX_CONSTRUCTOR_P (fn
)
1371 && TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
1372 && gimple_call_num_args (stmt
))
1374 tree op
= walk_ssa_copies (gimple_call_arg (stmt
, 0));
1375 tree type
= method_class_type (TREE_TYPE (fn
));
1376 HOST_WIDE_INT offset
= 0, size
, max_size
;
1381 fprintf (dump_file
, " Checking constructor call: ");
1382 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1385 /* See if THIS parameter seems like instance pointer. */
1386 if (TREE_CODE (op
) == ADDR_EXPR
)
1388 op
= get_ref_base_and_extent (TREE_OPERAND (op
, 0), &offset
,
1389 &size
, &max_size
, &reverse
);
1390 if (size
!= max_size
|| max_size
== -1)
1392 tci
->speculative
= true;
1395 if (op
&& TREE_CODE (op
) == MEM_REF
)
1397 if (!tree_fits_shwi_p (TREE_OPERAND (op
, 1)))
1399 tci
->speculative
= true;
1402 offset
+= tree_to_shwi (TREE_OPERAND (op
, 1))
1404 op
= TREE_OPERAND (op
, 0);
1406 else if (DECL_P (op
))
1410 tci
->speculative
= true;
1413 op
= walk_ssa_copies (op
);
1415 if (operand_equal_p (op
, tci
->instance
, 0)
1417 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
1418 && tree_fits_shwi_p (TYPE_SIZE (type
))
1419 && tree_to_shwi (TYPE_SIZE (type
)) + offset
> tci
->offset
)
1421 record_known_type (tci
, type
, tci
->offset
- offset
);
1425 /* Calls may possibly change dynamic type by placement new. Assume
1426 it will not happen, but make result speculative only. */
1429 fprintf (dump_file
, " Function call may change dynamic type:");
1430 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1432 tci
->speculative
= true;
1435 /* Check for inlined virtual table store. */
1436 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt
))
1439 HOST_WIDE_INT offset
= 0;
1442 fprintf (dump_file
, " Checking vtbl store: ");
1443 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1446 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
, &offset
);
1447 if (type
== error_mark_node
)
1449 gcc_assert (!type
|| TYPE_MAIN_VARIANT (type
) == type
);
1453 fprintf (dump_file
, " Unanalyzed store may change type.\n");
1454 tci
->seen_unanalyzed_store
= true;
1455 tci
->speculative
= true;
1458 record_known_type (tci
, type
, offset
);
1465 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1466 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1467 INSTANCE is pointer to the outer instance as returned by
1468 get_polymorphic_context. To avoid creation of temporary expressions,
1469 INSTANCE may also be an declaration of get_polymorphic_context found the
1470 value to be in static storage.
1472 If the type of instance is not fully determined
1473 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1474 is set), try to walk memory writes and find the actual construction of the
1477 Return true if memory is unchanged from function entry.
1479 We do not include this analysis in the context analysis itself, because
1480 it needs memory SSA to be fully built and the walk may be expensive.
1481 So it is not suitable for use withing fold_stmt and similar uses. */
1484 ipa_polymorphic_call_context::get_dynamic_type (tree instance
,
1489 struct type_change_info tci
;
1491 bool function_entry_reached
= false;
1492 tree instance_ref
= NULL
;
1494 /* Remember OFFSET before it is modified by restrict_to_inner_class.
1495 This is because we do not update INSTANCE when walking inwards. */
1496 HOST_WIDE_INT instance_offset
= offset
;
1499 otr_type
= TYPE_MAIN_VARIANT (otr_type
);
1501 /* Walk into inner type. This may clear maybe_derived_type and save us
1502 from useless work. It also makes later comparsions with static type
1504 if (outer_type
&& otr_type
)
1506 if (!restrict_to_inner_class (otr_type
))
1510 if (!maybe_in_construction
&& !maybe_derived_type
)
1513 /* We need to obtain refernce to virtual table pointer. It is better
1514 to look it up in the code rather than build our own. This require bit
1515 of pattern matching, but we end up verifying that what we found is
1518 What we pattern match is:
1520 tmp = instance->_vptr.A; // vtbl ptr load
1521 tmp2 = tmp[otr_token]; // vtable lookup
1522 OBJ_TYPE_REF(tmp2;instance->0) (instance);
1524 We want to start alias oracle walk from vtbl pointer load,
1525 but we may not be able to identify it, for example, when PRE moved the
1528 if (gimple_code (call
) == GIMPLE_CALL
)
1530 tree ref
= gimple_call_fn (call
);
1531 HOST_WIDE_INT offset2
, size
, max_size
;
1534 if (TREE_CODE (ref
) == OBJ_TYPE_REF
)
1536 ref
= OBJ_TYPE_REF_EXPR (ref
);
1537 ref
= walk_ssa_copies (ref
);
1539 /* Check if definition looks like vtable lookup. */
1540 if (TREE_CODE (ref
) == SSA_NAME
1541 && !SSA_NAME_IS_DEFAULT_DEF (ref
)
1542 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref
))
1543 && TREE_CODE (gimple_assign_rhs1
1544 (SSA_NAME_DEF_STMT (ref
))) == MEM_REF
)
1546 ref
= get_base_address
1547 (TREE_OPERAND (gimple_assign_rhs1
1548 (SSA_NAME_DEF_STMT (ref
)), 0));
1549 ref
= walk_ssa_copies (ref
);
1550 /* Find base address of the lookup and see if it looks like
1552 if (TREE_CODE (ref
) == SSA_NAME
1553 && !SSA_NAME_IS_DEFAULT_DEF (ref
)
1554 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref
)))
1556 tree ref_exp
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref
));
1558 = get_ref_base_and_extent (ref_exp
, &offset2
, &size
,
1559 &max_size
, &reverse
);
1561 /* Finally verify that what we found looks like read from OTR_OBJECT
1562 or from INSTANCE with offset OFFSET. */
1564 && ((TREE_CODE (base_ref
) == MEM_REF
1565 && ((offset2
== instance_offset
1566 && TREE_OPERAND (base_ref
, 0) == instance
)
1567 || (!offset2
&& TREE_OPERAND (base_ref
, 0) == otr_object
)))
1568 || (DECL_P (instance
) && base_ref
== instance
1569 && offset2
== instance_offset
)))
1571 stmt
= SSA_NAME_DEF_STMT (ref
);
1572 instance_ref
= ref_exp
;
1579 /* If we failed to look up the refernece in code, build our own. */
1582 /* If the statement in question does not use memory, we can't tell
1584 if (!gimple_vuse (stmt
))
1586 ao_ref_init_from_ptr_and_size (&ao
, otr_object
, NULL
);
1589 /* Otherwise use the real reference. */
1590 ao_ref_init (&ao
, instance_ref
);
1592 /* We look for vtbl pointer read. */
1593 ao
.size
= POINTER_SIZE
;
1594 ao
.max_size
= ao
.size
;
1597 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type
))));
1601 fprintf (dump_file
, "Determining dynamic type for call: ");
1602 print_gimple_stmt (dump_file
, call
, 0, 0);
1603 fprintf (dump_file
, " Starting walk at: ");
1604 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1605 fprintf (dump_file
, " instance pointer: ");
1606 print_generic_expr (dump_file
, otr_object
, TDF_SLIM
);
1607 fprintf (dump_file
, " Outer instance pointer: ");
1608 print_generic_expr (dump_file
, instance
, TDF_SLIM
);
1609 fprintf (dump_file
, " offset: %i (bits)", (int)offset
);
1610 fprintf (dump_file
, " vtbl reference: ");
1611 print_generic_expr (dump_file
, instance_ref
, TDF_SLIM
);
1612 fprintf (dump_file
, "\n");
1615 tci
.offset
= offset
;
1616 tci
.instance
= instance
;
1617 tci
.vtbl_ptr_ref
= instance_ref
;
1618 gcc_assert (TREE_CODE (instance
) != MEM_REF
);
1619 tci
.known_current_type
= NULL_TREE
;
1620 tci
.known_current_offset
= 0;
1621 tci
.otr_type
= otr_type
;
1622 tci
.type_maybe_changed
= false;
1623 tci
.multiple_types_encountered
= false;
1624 tci
.speculative
= false;
1625 tci
.seen_unanalyzed_store
= false;
1627 walk_aliased_vdefs (&ao
, gimple_vuse (stmt
), check_stmt_for_type_change
,
1628 &tci
, NULL
, &function_entry_reached
);
1630 /* If we did not find any type changing statements, we may still drop
1631 maybe_in_construction flag if the context already have outer type.
1633 Here we make special assumptions about both constructors and
1634 destructors which are all the functions that are allowed to alter the
1635 VMT pointers. It assumes that destructors begin with assignment into
1636 all VMT pointers and that constructors essentially look in the
1639 1) The very first thing they do is that they call constructors of
1640 ancestor sub-objects that have them.
1642 2) Then VMT pointers of this and all its ancestors is set to new
1643 values corresponding to the type corresponding to the constructor.
1645 3) Only afterwards, other stuff such as constructor of member
1646 sub-objects and the code written by the user is run. Only this may
1647 include calling virtual functions, directly or indirectly.
1649 4) placement new can not be used to change type of non-POD statically
1650 allocated variables.
1652 There is no way to call a constructor of an ancestor sub-object in any
1655 This means that we do not have to care whether constructors get the
1656 correct type information because they will always change it (in fact,
1657 if we define the type to be given by the VMT pointer, it is undefined).
1659 The most important fact to derive from the above is that if, for some
1660 statement in the section 3, we try to detect whether the dynamic type
1661 has changed, we can safely ignore all calls as we examine the function
1662 body backwards until we reach statements in section 2 because these
1663 calls cannot be ancestor constructors or destructors (if the input is
1664 not bogus) and so do not change the dynamic type (this holds true only
1665 for automatically allocated objects but at the moment we devirtualize
1666 only these). We then must detect that statements in section 2 change
1667 the dynamic type and can try to derive the new type. That is enough
1668 and we can stop, we will never see the calls into constructors of
1669 sub-objects in this code.
1671 Therefore if the static outer type was found (outer_type)
1672 we can safely ignore tci.speculative that is set on calls and give up
1673 only if there was dyanmic type store that may affect given variable
1674 (seen_unanalyzed_store) */
1676 if (!tci
.type_maybe_changed
1679 && !tci
.seen_unanalyzed_store
1680 && !tci
.multiple_types_encountered
1681 && offset
== tci
.offset
1682 && types_same_for_odr (tci
.known_current_type
,
1685 if (!outer_type
|| tci
.seen_unanalyzed_store
)
1687 if (maybe_in_construction
)
1688 maybe_in_construction
= false;
1690 fprintf (dump_file
, " No dynamic type change found.\n");
1694 if (tci
.known_current_type
1695 && !function_entry_reached
1696 && !tci
.multiple_types_encountered
)
1698 if (!tci
.speculative
)
1700 outer_type
= TYPE_MAIN_VARIANT (tci
.known_current_type
);
1701 offset
= tci
.known_current_offset
;
1703 maybe_in_construction
= false;
1704 maybe_derived_type
= false;
1706 fprintf (dump_file
, " Determined dynamic type.\n");
1708 else if (!speculative_outer_type
1709 || speculative_maybe_derived_type
)
1711 speculative_outer_type
= TYPE_MAIN_VARIANT (tci
.known_current_type
);
1712 speculative_offset
= tci
.known_current_offset
;
1713 speculative_maybe_derived_type
= false;
1715 fprintf (dump_file
, " Determined speculative dynamic type.\n");
1720 fprintf (dump_file
, " Found multiple types%s%s\n",
1721 function_entry_reached
? " (function entry reached)" : "",
1722 function_entry_reached
? " (multiple types encountered)" : "");
1728 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1729 seems consistent (and useful) with what we already have in the non-speculative context. */
1732 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type
,
1733 HOST_WIDE_INT spec_offset
,
1734 bool spec_maybe_derived_type
,
1737 if (!flag_devirtualize_speculatively
)
1740 /* Non-polymorphic types are useless for deriving likely polymorphic
1742 if (!spec_outer_type
|| !contains_polymorphic_type_p (spec_outer_type
))
1745 /* If we know nothing, speculation is always good. */
1749 /* Speculation is only useful to avoid derived types.
1750 This is not 100% true for placement new, where the outer context may
1751 turn out to be useless, but ignore these for now. */
1752 if (!maybe_derived_type
)
1755 /* If types agrees, speculation is consistent, but it makes sense only
1756 when it says something new. */
1757 if (types_must_be_same_for_odr (spec_outer_type
, outer_type
))
1758 return maybe_derived_type
&& !spec_maybe_derived_type
;
1760 /* If speculation does not contain the type in question, ignore it. */
1762 && !contains_type_p (spec_outer_type
, spec_offset
, otr_type
, false, true))
1765 /* If outer type already contains speculation as a filed,
1766 it is useless. We already know from OUTER_TYPE
1767 SPEC_TYPE and that it is not in the construction. */
1768 if (contains_type_p (outer_type
, offset
- spec_offset
,
1769 spec_outer_type
, false, false))
1772 /* If speculative outer type is not more specified than outer
1774 We can only decide this safely if we can compare types with OUTER_TYPE.
1776 if ((!in_lto_p
|| odr_type_p (outer_type
))
1777 && !contains_type_p (spec_outer_type
,
1778 spec_offset
- offset
,
1784 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1785 NEW_MAYBE_DERIVED_TYPE
1786 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1789 ipa_polymorphic_call_context::combine_speculation_with
1790 (tree new_outer_type
, HOST_WIDE_INT new_offset
, bool new_maybe_derived_type
,
1793 if (!new_outer_type
)
1796 /* restrict_to_inner_class may eliminate wrong speculation making our job
1799 restrict_to_inner_class (otr_type
);
1801 if (!speculation_consistent_p (new_outer_type
, new_offset
,
1802 new_maybe_derived_type
, otr_type
))
1805 /* New speculation is a win in case we have no speculation or new
1806 speculation does not consider derivations. */
1807 if (!speculative_outer_type
1808 || (speculative_maybe_derived_type
1809 && !new_maybe_derived_type
))
1811 speculative_outer_type
= new_outer_type
;
1812 speculative_offset
= new_offset
;
1813 speculative_maybe_derived_type
= new_maybe_derived_type
;
1816 else if (types_must_be_same_for_odr (speculative_outer_type
,
1819 if (speculative_offset
!= new_offset
)
1821 /* OK we have two contexts that seems valid but they disagree,
1824 This is not a lattice operation, so we may want to drop it later. */
1825 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1827 "Speculative outer types match, "
1828 "offset mismatch -> invalid speculation\n");
1829 clear_speculation ();
1834 if (speculative_maybe_derived_type
&& !new_maybe_derived_type
)
1836 speculative_maybe_derived_type
= false;
1843 /* Choose type that contains the other. This one either contains the outer
1844 as a field (thus giving exactly one target) or is deeper in the type
1846 else if (speculative_outer_type
1847 && speculative_maybe_derived_type
1848 && (new_offset
> speculative_offset
1849 || (new_offset
== speculative_offset
1850 && contains_type_p (new_outer_type
,
1851 0, speculative_outer_type
, false))))
1853 tree old_outer_type
= speculative_outer_type
;
1854 HOST_WIDE_INT old_offset
= speculative_offset
;
1855 bool old_maybe_derived_type
= speculative_maybe_derived_type
;
1857 speculative_outer_type
= new_outer_type
;
1858 speculative_offset
= new_offset
;
1859 speculative_maybe_derived_type
= new_maybe_derived_type
;
1862 restrict_to_inner_class (otr_type
);
1864 /* If the speculation turned out to make no sense, revert to sensible
1866 if (!speculative_outer_type
)
1868 speculative_outer_type
= old_outer_type
;
1869 speculative_offset
= old_offset
;
1870 speculative_maybe_derived_type
= old_maybe_derived_type
;
1873 return (old_offset
!= speculative_offset
1874 || old_maybe_derived_type
!= speculative_maybe_derived_type
1875 || types_must_be_same_for_odr (speculative_outer_type
,
1881 /* Assume that both THIS and a given context is valid and strenghten THIS
1882 if possible. Return true if any strenghtening was made.
1883 If actual type the context is being used in is known, OTR_TYPE should be
1884 set accordingly. This improves quality of combined result. */
1887 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx
,
1890 bool updated
= false;
1892 if (ctx
.useless_p () || invalid
)
1895 /* Restricting context to inner type makes merging easier, however do not
1896 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
1897 if (otr_type
&& !invalid
&& !ctx
.invalid
)
1899 restrict_to_inner_class (otr_type
);
1900 ctx
.restrict_to_inner_class (otr_type
);
1905 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1907 fprintf (dump_file
, "Polymorphic call context combine:");
1909 fprintf (dump_file
, "With context: ");
1910 ctx
.dump (dump_file
);
1913 fprintf (dump_file
, "To be used with type: ");
1914 print_generic_expr (dump_file
, otr_type
, TDF_SLIM
);
1915 fprintf (dump_file
, "\n");
1919 /* If call is known to be invalid, we are done. */
1922 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1923 fprintf (dump_file
, "-> Invalid context\n");
1927 if (!ctx
.outer_type
)
1929 else if (!outer_type
)
1931 outer_type
= ctx
.outer_type
;
1932 offset
= ctx
.offset
;
1933 dynamic
= ctx
.dynamic
;
1934 maybe_in_construction
= ctx
.maybe_in_construction
;
1935 maybe_derived_type
= ctx
.maybe_derived_type
;
1938 /* If types are known to be same, merging is quite easy. */
1939 else if (types_must_be_same_for_odr (outer_type
, ctx
.outer_type
))
1941 if (offset
!= ctx
.offset
1942 && TYPE_SIZE (outer_type
)
1943 && TREE_CODE (TYPE_SIZE (outer_type
)) == INTEGER_CST
)
1945 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1946 fprintf (dump_file
, "Outer types match, offset mismatch -> invalid\n");
1947 clear_speculation ();
1948 clear_outer_type ();
1952 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1953 fprintf (dump_file
, "Outer types match, merging flags\n");
1954 if (maybe_in_construction
&& !ctx
.maybe_in_construction
)
1957 maybe_in_construction
= false;
1959 if (maybe_derived_type
&& !ctx
.maybe_derived_type
)
1962 maybe_derived_type
= false;
1964 if (dynamic
&& !ctx
.dynamic
)
1970 /* If we know the type precisely, there is not much to improve. */
1971 else if (!maybe_derived_type
&& !maybe_in_construction
1972 && !ctx
.maybe_derived_type
&& !ctx
.maybe_in_construction
)
1974 /* It may be easy to check if second context permits the first
1975 and set INVALID otherwise. This is not easy to do in general;
1976 contains_type_p may return false negatives for non-comparable
1979 If OTR_TYPE is known, we however can expect that
1980 restrict_to_inner_class should have discovered the same base
1982 if (otr_type
&& !ctx
.maybe_in_construction
&& !ctx
.maybe_derived_type
)
1984 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1985 fprintf (dump_file
, "Contextes disagree -> invalid\n");
1989 /* See if one type contains the other as a field (not base).
1990 In this case we want to choose the wider type, because it contains
1991 more information. */
1992 else if (contains_type_p (ctx
.outer_type
, ctx
.offset
- offset
,
1993 outer_type
, false, false))
1995 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1996 fprintf (dump_file
, "Second type contain the first as a field\n");
1998 if (maybe_derived_type
)
2000 outer_type
= ctx
.outer_type
;
2001 maybe_derived_type
= ctx
.maybe_derived_type
;
2002 offset
= ctx
.offset
;
2003 dynamic
= ctx
.dynamic
;
2007 /* If we do not know how the context is being used, we can
2008 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
2009 to other component of OUTER_TYPE later and we know nothing
2011 if (otr_type
&& maybe_in_construction
2012 && !ctx
.maybe_in_construction
)
2014 maybe_in_construction
= false;
2018 else if (contains_type_p (outer_type
, offset
- ctx
.offset
,
2019 ctx
.outer_type
, false, false))
2021 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2022 fprintf (dump_file
, "First type contain the second as a field\n");
2024 if (otr_type
&& maybe_in_construction
2025 && !ctx
.maybe_in_construction
)
2027 maybe_in_construction
= false;
2031 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2032 else if (contains_type_p (ctx
.outer_type
,
2033 ctx
.offset
- offset
, outer_type
, false, true))
2035 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2036 fprintf (dump_file
, "First type is base of second\n");
2037 if (!maybe_derived_type
)
2039 if (!ctx
.maybe_in_construction
2040 && types_odr_comparable (outer_type
, ctx
.outer_type
))
2042 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2043 fprintf (dump_file
, "Second context does not permit base -> invalid\n");
2047 /* Pick variant deeper in the hiearchy. */
2050 outer_type
= ctx
.outer_type
;
2051 maybe_in_construction
= ctx
.maybe_in_construction
;
2052 maybe_derived_type
= ctx
.maybe_derived_type
;
2053 offset
= ctx
.offset
;
2054 dynamic
= ctx
.dynamic
;
2058 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2059 else if (contains_type_p (outer_type
,
2060 offset
- ctx
.offset
, ctx
.outer_type
, false, true))
2062 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2063 fprintf (dump_file
, "Second type is base of first\n");
2064 if (!ctx
.maybe_derived_type
)
2066 if (!maybe_in_construction
2067 && types_odr_comparable (outer_type
, ctx
.outer_type
))
2069 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2070 fprintf (dump_file
, "First context does not permit base -> invalid\n");
2075 /* TODO handle merging using hiearchy. */
2076 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2077 fprintf (dump_file
, "Giving up on merge\n");
2079 updated
|= combine_speculation_with (ctx
.speculative_outer_type
,
2080 ctx
.speculative_offset
,
2081 ctx
.speculative_maybe_derived_type
,
2084 if (updated
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
2086 fprintf (dump_file
, "Updated as: ");
2088 fprintf (dump_file
, "\n");
2094 clear_speculation ();
2095 clear_outer_type ();
2099 /* Take non-speculative info, merge it with speculative and clear speculation.
2100 Used when we no longer manage to keep track of actual outer type, but we
2101 think it is still there.
2103 If OTR_TYPE is set, the transformation can be done more effectively assuming
2104 that context is going to be used only that way. */
2107 ipa_polymorphic_call_context::make_speculative (tree otr_type
)
2109 tree spec_outer_type
= outer_type
;
2110 HOST_WIDE_INT spec_offset
= offset
;
2111 bool spec_maybe_derived_type
= maybe_derived_type
;
2116 clear_outer_type ();
2117 clear_speculation ();
2122 clear_outer_type ();
2123 combine_speculation_with (spec_outer_type
, spec_offset
,
2124 spec_maybe_derived_type
,
2128 /* Use when we can not track dynamic type change. This speculatively assume
2129 type change is not happening. */
2132 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor
,
2136 make_speculative (otr_type
);
2137 else if (in_poly_cdtor
)
2138 maybe_in_construction
= true;