1 /* Analysis of polymorphic call context.
2 Copyright (C) 2013-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "print-tree.h"
29 #include "tree-pass.h"
32 #include "hash-table.h"
34 #include "tree-pretty-print.h"
36 #include "basic-block.h"
39 #include "plugin-api.h"
43 #include "hard-reg-set.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-alias.h"
50 #include "internal-fn.h"
51 #include "gimple-fold.h"
52 #include "gimple-expr.h"
54 #include "alloc-pool.h"
56 #include "ipa-inline.h"
57 #include "diagnostic.h"
61 #include "gimple-pretty-print.h"
62 #include "stor-layout.h"
64 #include "data-streamer.h"
65 #include "lto-streamer.h"
66 #include "streamer-hooks.h"
68 /* Return true when TYPE contains an polymorphic type and thus is interesting
69 for devirtualization machinery. */
71 static bool contains_type_p (tree
, HOST_WIDE_INT
, tree
,
72 bool consider_placement_new
= true,
73 bool consider_bases
= true);
76 contains_polymorphic_type_p (const_tree type
)
78 type
= TYPE_MAIN_VARIANT (type
);
80 if (RECORD_OR_UNION_TYPE_P (type
))
83 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
85 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
86 if (TREE_CODE (fld
) == FIELD_DECL
87 && !DECL_ARTIFICIAL (fld
)
88 && contains_polymorphic_type_p (TREE_TYPE (fld
)))
92 if (TREE_CODE (type
) == ARRAY_TYPE
)
93 return contains_polymorphic_type_p (TREE_TYPE (type
));
97 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
98 at possition CUR_OFFSET within TYPE.
100 POD can be changed to an instance of a polymorphic type by
101 placement new. Here we play safe and assume that any
102 non-polymorphic type is POD. */
104 possible_placement_new (tree type
, tree expected_type
,
105 HOST_WIDE_INT cur_offset
)
107 return ((TREE_CODE (type
) != RECORD_TYPE
108 || !TYPE_BINFO (type
)
109 || cur_offset
>= BITS_PER_WORD
110 || !polymorphic_type_binfo_p (TYPE_BINFO (type
)))
111 && (!TYPE_SIZE (type
)
112 || !tree_fits_shwi_p (TYPE_SIZE (type
))
114 + (expected_type
? tree_to_uhwi (TYPE_SIZE (expected_type
))
115 : GET_MODE_BITSIZE (Pmode
))
116 <= tree_to_uhwi (TYPE_SIZE (type
)))));
119 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
120 is contained at THIS->OFFSET. Walk the memory representation of
121 THIS->OUTER_TYPE and find the outermost class type that match
122 OTR_TYPE or contain OTR_TYPE as a base. Update THIS
125 If OTR_TYPE is NULL, just find outermost polymorphic type with
126 virtual table present at possition OFFSET.
128 For example when THIS represents type
134 and we look for type at offset sizeof(int), we end up with B and offset 0.
135 If the same is produced by multiple inheritance, we end up with A and offset
138 If we can not find corresponding class, give up by setting
139 THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
140 Return true when lookup was sucesful.
142 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
143 valid only via alocation of new polymorphic type inside by means
146 When CONSIDER_BASES is false, only look for actual fields, not base types
150 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type
,
151 bool consider_placement_new
,
154 tree type
= outer_type
;
155 HOST_WIDE_INT cur_offset
= offset
;
156 bool speculative
= false;
157 bool size_unknown
= false;
158 unsigned HOST_WIDE_INT otr_type_size
= GET_MODE_BITSIZE (Pmode
);
160 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
163 clear_outer_type (otr_type
);
167 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
168 that the context is either invalid, or the instance type must be
169 derived from OUTER_TYPE.
171 Because the instance type may contain field whose type is of OUTER_TYPE,
172 we can not derive any effective information about it.
174 TODO: In the case we know all derrived types, we can definitely do better
176 else if (TYPE_SIZE (outer_type
)
177 && tree_fits_shwi_p (TYPE_SIZE (outer_type
))
178 && tree_to_shwi (TYPE_SIZE (outer_type
)) >= 0
179 && tree_to_shwi (TYPE_SIZE (outer_type
)) <= offset
)
181 clear_outer_type (otr_type
);
185 /* If derived type is not allowed, we know that the context is invalid.
186 For dynamic types, we really do not have information about
187 size of the memory location. It is possible that completely
188 different type is stored after outer_type. */
189 if (!maybe_derived_type
&& !dynamic
)
191 clear_speculation ();
197 if (otr_type
&& TYPE_SIZE (otr_type
)
198 && tree_fits_shwi_p (TYPE_SIZE (otr_type
)))
199 otr_type_size
= tree_to_uhwi (TYPE_SIZE (otr_type
));
201 if (!type
|| offset
< 0)
202 goto no_useful_type_info
;
204 /* Find the sub-object the constant actually refers to and mark whether it is
205 an artificial one (as opposed to a user-defined one).
207 This loop is performed twice; first time for outer_type and second time
208 for speculative_outer_type. The second run has SPECULATIVE set. */
211 unsigned HOST_WIDE_INT pos
, size
;
214 /* If we do not know size of TYPE, we need to be more conservative
215 about accepting cases where we can not find EXPECTED_TYPE.
216 Generally the types that do matter here are of constant size.
217 Size_unknown case should be very rare. */
219 && tree_fits_shwi_p (TYPE_SIZE (type
))
220 && tree_to_shwi (TYPE_SIZE (type
)) >= 0)
221 size_unknown
= false;
225 /* On a match, just return what we found. */
227 && types_odr_comparable (type
, otr_type
)
228 && types_same_for_odr (type
, otr_type
))
230 && TREE_CODE (type
) == RECORD_TYPE
232 && polymorphic_type_binfo_p (TYPE_BINFO (type
))))
236 /* If we did not match the offset, just give up on speculation. */
238 /* Also check if speculation did not end up being same as
240 || (types_must_be_same_for_odr (speculative_outer_type
,
242 && (maybe_derived_type
243 == speculative_maybe_derived_type
)))
244 clear_speculation ();
249 /* If type is known to be final, do not worry about derived
250 types. Testing it here may help us to avoid speculation. */
251 if (otr_type
&& TREE_CODE (outer_type
) == RECORD_TYPE
252 && (!in_lto_p
|| odr_type_p (outer_type
))
253 && type_known_to_have_no_deriavations_p (outer_type
))
254 maybe_derived_type
= false;
256 /* Type can not contain itself on an non-zero offset. In that case
257 just give up. Still accept the case where size is now known.
258 Either the second copy may appear past the end of type or within
259 the non-POD buffer located inside the variably sized type
262 goto no_useful_type_info
;
263 /* If we determined type precisely or we have no clue on
264 speuclation, we are done. */
265 if (!maybe_derived_type
|| !speculative_outer_type
266 || !speculation_consistent_p (speculative_outer_type
,
268 speculative_maybe_derived_type
,
271 clear_speculation ();
274 /* Otherwise look into speculation now. */
278 type
= speculative_outer_type
;
279 cur_offset
= speculative_offset
;
285 /* Walk fields and find corresponding on at OFFSET. */
286 if (TREE_CODE (type
) == RECORD_TYPE
)
288 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
290 if (TREE_CODE (fld
) != FIELD_DECL
)
293 pos
= int_bit_position (fld
);
294 if (pos
> (unsigned HOST_WIDE_INT
)cur_offset
)
297 /* Do not consider vptr itself. Not even for placement new. */
298 if (!pos
&& DECL_ARTIFICIAL (fld
)
299 && POINTER_TYPE_P (TREE_TYPE (fld
))
301 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
304 if (!DECL_SIZE (fld
) || !tree_fits_uhwi_p (DECL_SIZE (fld
)))
305 goto no_useful_type_info
;
306 size
= tree_to_uhwi (DECL_SIZE (fld
));
308 /* We can always skip types smaller than pointer size:
309 those can not contain a virtual table pointer.
311 Disqualifying fields that are too small to fit OTR_TYPE
312 saves work needed to walk them for no benefit.
313 Because of the way the bases are packed into a class, the
314 field's size may be smaller than type size, so it needs
315 to be done with a care. */
317 if (pos
<= (unsigned HOST_WIDE_INT
)cur_offset
318 && (pos
+ size
) >= (unsigned HOST_WIDE_INT
)cur_offset
319 + GET_MODE_BITSIZE (Pmode
)
321 || !TYPE_SIZE (TREE_TYPE (fld
))
322 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld
)))
323 || (pos
+ tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld
))))
324 >= cur_offset
+ otr_type_size
))
329 goto no_useful_type_info
;
331 type
= TYPE_MAIN_VARIANT (TREE_TYPE (fld
));
333 /* DECL_ARTIFICIAL represents a basetype. */
334 if (!DECL_ARTIFICIAL (fld
))
340 /* As soon as we se an field containing the type,
341 we know we are not looking for derivations. */
342 maybe_derived_type
= false;
346 speculative_outer_type
= type
;
347 speculative_offset
= cur_offset
;
348 speculative_maybe_derived_type
= false;
351 else if (!consider_bases
)
352 goto no_useful_type_info
;
354 else if (TREE_CODE (type
) == ARRAY_TYPE
)
356 tree subtype
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
358 /* Give up if we don't know array field size.
359 Also give up on non-polymorphic types as they are used
360 as buffers for placement new. */
361 if (!TYPE_SIZE (subtype
)
362 || !tree_fits_shwi_p (TYPE_SIZE (subtype
))
363 || tree_to_shwi (TYPE_SIZE (subtype
)) <= 0
364 || !contains_polymorphic_type_p (subtype
))
365 goto no_useful_type_info
;
367 HOST_WIDE_INT new_offset
= cur_offset
% tree_to_shwi (TYPE_SIZE (subtype
));
369 /* We may see buffer for placement new. In this case the expected type
370 can be bigger than the subtype. */
371 if (TYPE_SIZE (subtype
)
372 && (cur_offset
+ otr_type_size
373 > tree_to_uhwi (TYPE_SIZE (subtype
))))
374 goto no_useful_type_info
;
376 cur_offset
= new_offset
;
382 maybe_derived_type
= false;
386 speculative_outer_type
= type
;
387 speculative_offset
= cur_offset
;
388 speculative_maybe_derived_type
= false;
391 /* Give up on anything else. */
395 if (maybe_derived_type
&& !speculative
396 && TREE_CODE (outer_type
) == RECORD_TYPE
397 && TREE_CODE (otr_type
) == RECORD_TYPE
398 && TYPE_BINFO (otr_type
)
400 && get_binfo_at_offset (TYPE_BINFO (otr_type
), 0, outer_type
))
402 clear_outer_type (otr_type
);
403 if (!speculative_outer_type
404 || !speculation_consistent_p (speculative_outer_type
,
406 speculative_maybe_derived_type
,
408 clear_speculation ();
409 if (speculative_outer_type
)
412 type
= speculative_outer_type
;
413 cur_offset
= speculative_offset
;
418 /* We found no way to embedd EXPECTED_TYPE in TYPE.
419 We still permit two special cases - placement new and
420 the case of variadic types containing themselves. */
422 && consider_placement_new
423 && (size_unknown
|| !type
|| maybe_derived_type
424 || possible_placement_new (type
, otr_type
, cur_offset
)))
426 /* In these weird cases we want to accept the context.
427 In non-speculative run we have no useful outer_type info
428 (TODO: we may eventually want to record upper bound on the
429 type size that can be used to prune the walk),
430 but we still want to consider speculation that may
434 clear_outer_type (otr_type
);
435 if (!speculative_outer_type
436 || !speculation_consistent_p (speculative_outer_type
,
438 speculative_maybe_derived_type
,
440 clear_speculation ();
441 if (speculative_outer_type
)
444 type
= speculative_outer_type
;
445 cur_offset
= speculative_offset
;
451 clear_speculation ();
456 clear_speculation ();
459 clear_outer_type (otr_type
);
467 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
468 CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
469 be built within OUTER_TYPE by means of placement new. CONSIDER_BASES makes
470 function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
471 base of one of fields of OUTER_TYPE. */
474 contains_type_p (tree outer_type
, HOST_WIDE_INT offset
,
476 bool consider_placement_new
,
479 ipa_polymorphic_call_context context
;
481 /* Check that type is within range. */
484 if (TYPE_SIZE (outer_type
) && TYPE_SIZE (otr_type
)
485 && TREE_CODE (outer_type
) == INTEGER_CST
486 && TREE_CODE (otr_type
) == INTEGER_CST
487 && wi::ltu_p (wi::to_offset (outer_type
), (wi::to_offset (otr_type
) + offset
)))
490 context
.offset
= offset
;
491 context
.outer_type
= TYPE_MAIN_VARIANT (outer_type
);
492 context
.maybe_derived_type
= false;
493 return context
.restrict_to_inner_class (otr_type
, consider_placement_new
, consider_bases
);
497 /* We know that the instance is stored in variable or parameter
498 (not dynamically allocated) and we want to disprove the fact
499 that it may be in construction at invocation of CALL.
501 BASE represents memory location where instance is stored.
502 If BASE is NULL, it is assumed to be global memory.
503 OUTER_TYPE is known type of the instance or NULL if not
506 For the variable to be in construction we actually need to
507 be in constructor of corresponding global variable or
508 the inline stack of CALL must contain the constructor.
509 Check this condition. This check works safely only before
510 IPA passes, because inline stacks may become out of date
514 decl_maybe_in_construction_p (tree base
, tree outer_type
,
515 gimple call
, tree function
)
518 outer_type
= TYPE_MAIN_VARIANT (outer_type
);
519 gcc_assert (!base
|| DECL_P (base
));
521 /* After inlining the code unification optimizations may invalidate
522 inline stacks. Also we need to give up on global variables after
523 IPA, because addresses of these may have been propagated to their
525 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
528 /* Pure functions can not do any changes on the dynamic type;
529 that require writting to memory. */
530 if ((!base
|| !auto_var_in_fn_p (base
, function
))
531 && flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
534 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
535 block
= BLOCK_SUPERCONTEXT (block
))
536 if (BLOCK_ABSTRACT_ORIGIN (block
)
537 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
539 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
541 if (TREE_CODE (TREE_TYPE (fn
)) != METHOD_TYPE
542 || (!DECL_CXX_CONSTRUCTOR_P (fn
)
543 && !DECL_CXX_DESTRUCTOR_P (fn
)))
545 /* Watch for clones where we constant propagated the first
546 argument (pointer to the instance). */
547 fn
= DECL_ABSTRACT_ORIGIN (fn
);
549 || (base
&& !is_global_var (base
))
550 || TREE_CODE (TREE_TYPE (fn
)) != METHOD_TYPE
551 || (!DECL_CXX_CONSTRUCTOR_P (fn
)
552 && !DECL_CXX_DESTRUCTOR_P (fn
)))
555 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
558 tree type
= TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn
)));
560 if (!outer_type
|| !types_odr_comparable (type
, outer_type
))
562 if (TREE_CODE (type
) == RECORD_TYPE
564 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
567 else if (types_same_for_odr (type
, outer_type
))
571 if (!base
|| (TREE_CODE (base
) == VAR_DECL
&& is_global_var (base
)))
573 if (TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
574 || (!DECL_CXX_CONSTRUCTOR_P (function
)
575 && !DECL_CXX_DESTRUCTOR_P (function
)))
577 if (!DECL_ABSTRACT_ORIGIN (function
))
579 /* Watch for clones where we constant propagated the first
580 argument (pointer to the instance). */
581 function
= DECL_ABSTRACT_ORIGIN (function
);
583 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
584 || (!DECL_CXX_CONSTRUCTOR_P (function
)
585 && !DECL_CXX_DESTRUCTOR_P (function
)))
588 tree type
= TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function
)));
589 if (!outer_type
|| !types_odr_comparable (type
, outer_type
))
591 if (TREE_CODE (type
) == RECORD_TYPE
593 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
596 else if (types_same_for_odr (type
, outer_type
))
602 /* Dump human readable context to F. */
605 ipa_polymorphic_call_context::dump (FILE *f
) const
609 fprintf (f
, "Call is known to be undefined");
613 fprintf (f
, "nothing known");
614 if (outer_type
|| offset
)
616 fprintf (f
, "Outer type%s:", dynamic
? " (dynamic)":"");
617 print_generic_expr (f
, outer_type
, TDF_SLIM
);
618 if (maybe_derived_type
)
619 fprintf (f
, " (or a derived type)");
620 if (maybe_in_construction
)
621 fprintf (f
, " (maybe in construction)");
622 fprintf (f
, " offset "HOST_WIDE_INT_PRINT_DEC
,
625 if (speculative_outer_type
)
627 if (outer_type
|| offset
)
629 fprintf (f
, "Speculative outer type:");
630 print_generic_expr (f
, speculative_outer_type
, TDF_SLIM
);
631 if (speculative_maybe_derived_type
)
632 fprintf (f
, " (or a derived type)");
633 fprintf (f
, " at offset "HOST_WIDE_INT_PRINT_DEC
,
640 /* Print context to stderr. */
643 ipa_polymorphic_call_context::debug () const
648 /* Stream out the context to OB. */
651 ipa_polymorphic_call_context::stream_out (struct output_block
*ob
) const
653 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
655 bp_pack_value (&bp
, invalid
, 1);
656 bp_pack_value (&bp
, maybe_in_construction
, 1);
657 bp_pack_value (&bp
, maybe_derived_type
, 1);
658 bp_pack_value (&bp
, speculative_maybe_derived_type
, 1);
659 bp_pack_value (&bp
, dynamic
, 1);
660 bp_pack_value (&bp
, outer_type
!= NULL
, 1);
661 bp_pack_value (&bp
, offset
!= 0, 1);
662 bp_pack_value (&bp
, speculative_outer_type
!= NULL
, 1);
663 streamer_write_bitpack (&bp
);
665 if (outer_type
!= NULL
)
666 stream_write_tree (ob
, outer_type
, true);
668 streamer_write_hwi (ob
, offset
);
669 if (speculative_outer_type
!= NULL
)
671 stream_write_tree (ob
, speculative_outer_type
, true);
672 streamer_write_hwi (ob
, speculative_offset
);
675 gcc_assert (!speculative_offset
);
678 /* Stream in the context from IB and DATA_IN. */
681 ipa_polymorphic_call_context::stream_in (struct lto_input_block
*ib
,
682 struct data_in
*data_in
)
684 struct bitpack_d bp
= streamer_read_bitpack (ib
);
686 invalid
= bp_unpack_value (&bp
, 1);
687 maybe_in_construction
= bp_unpack_value (&bp
, 1);
688 maybe_derived_type
= bp_unpack_value (&bp
, 1);
689 speculative_maybe_derived_type
= bp_unpack_value (&bp
, 1);
690 dynamic
= bp_unpack_value (&bp
, 1);
691 bool outer_type_p
= bp_unpack_value (&bp
, 1);
692 bool offset_p
= bp_unpack_value (&bp
, 1);
693 bool speculative_outer_type_p
= bp_unpack_value (&bp
, 1);
696 outer_type
= stream_read_tree (ib
, data_in
);
700 offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
703 if (speculative_outer_type_p
)
705 speculative_outer_type
= stream_read_tree (ib
, data_in
);
706 speculative_offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
710 speculative_outer_type
= NULL
;
711 speculative_offset
= 0;
715 /* Proudce polymorphic call context for call method of instance
716 that is located within BASE (that is assumed to be a decl) at offset OFF. */
719 ipa_polymorphic_call_context::set_by_decl (tree base
, HOST_WIDE_INT off
)
721 gcc_assert (DECL_P (base
));
722 clear_speculation ();
724 if (!contains_polymorphic_type_p (TREE_TYPE (base
)))
730 outer_type
= TYPE_MAIN_VARIANT (TREE_TYPE (base
));
732 /* Make very conservative assumption that all objects
733 may be in construction.
735 It is up to caller to revisit this via
736 get_dynamic_type or decl_maybe_in_construction_p. */
737 maybe_in_construction
= true;
738 maybe_derived_type
= false;
742 /* CST is an invariant (address of decl), try to get meaningful
743 polymorphic call context for polymorphic call of method
744 if instance of OTR_TYPE that is located at offset OFF of this invariant.
745 Return FALSE if nothing meaningful can be found. */
748 ipa_polymorphic_call_context::set_by_invariant (tree cst
,
752 HOST_WIDE_INT offset2
, size
, max_size
;
757 clear_outer_type (otr_type
);
759 if (TREE_CODE (cst
) != ADDR_EXPR
)
762 cst
= TREE_OPERAND (cst
, 0);
763 base
= get_ref_base_and_extent (cst
, &offset2
, &size
, &max_size
);
764 if (!DECL_P (base
) || max_size
== -1 || max_size
!= size
)
767 /* Only type inconsistent programs can have otr_type that is
768 not part of outer type. */
769 if (otr_type
&& !contains_type_p (TREE_TYPE (base
), off
, otr_type
))
772 set_by_decl (base
, off
);
776 /* See if OP is SSA name initialized as a copy or by single assignment.
777 If so, walk the SSA graph up. Because simple PHI conditional is considered
778 copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
782 walk_ssa_copies (tree op
, hash_set
<tree
> **global_visited
= NULL
)
784 hash_set
<tree
> *visited
= NULL
;
786 while (TREE_CODE (op
) == SSA_NAME
787 && !SSA_NAME_IS_DEFAULT_DEF (op
)
788 && SSA_NAME_DEF_STMT (op
)
789 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op
))
790 || gimple_code (SSA_NAME_DEF_STMT (op
)) == GIMPLE_PHI
))
794 if (!*global_visited
)
795 *global_visited
= new hash_set
<tree
>;
796 if ((*global_visited
)->add (op
))
802 visited
= new hash_set
<tree
>;
803 if (visited
->add (op
))
811 This pattern is implicitly produced for casts to non-primary
812 bases. When doing context analysis, we do not really care
813 about the case pointer is NULL, becuase the call will be
815 if (gimple_code (SSA_NAME_DEF_STMT (op
)) == GIMPLE_PHI
)
817 gimple phi
= SSA_NAME_DEF_STMT (op
);
819 if (gimple_phi_num_args (phi
) > 2)
821 if (gimple_phi_num_args (phi
) == 1)
822 op
= gimple_phi_arg_def (phi
, 0);
823 else if (integer_zerop (gimple_phi_arg_def (phi
, 0)))
824 op
= gimple_phi_arg_def (phi
, 1);
825 else if (integer_zerop (gimple_phi_arg_def (phi
, 1)))
826 op
= gimple_phi_arg_def (phi
, 0);
832 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op
)))
834 op
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op
));
844 /* Create polymorphic call context from IP invariant CST.
845 This is typically &global_var.
846 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
847 is offset of call. */
849 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst
,
853 clear_speculation ();
854 set_by_invariant (cst
, otr_type
, off
);
857 /* Build context for pointer REF contained in FNDECL at statement STMT.
858 if INSTANCE is non-NULL, return pointer to the object described by
859 the context or DECL where context is contained in. */
861 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl
,
866 tree otr_type
= NULL
;
868 hash_set
<tree
> *visited
= NULL
;
870 if (TREE_CODE (ref
) == OBJ_TYPE_REF
)
872 otr_type
= obj_type_ref_class (ref
);
873 base_pointer
= OBJ_TYPE_REF_OBJECT (ref
);
878 /* Set up basic info in case we find nothing interesting in the analysis. */
879 clear_speculation ();
880 clear_outer_type (otr_type
);
883 /* Walk SSA for outer object. */
886 base_pointer
= walk_ssa_copies (base_pointer
, &visited
);
887 if (TREE_CODE (base_pointer
) == ADDR_EXPR
)
889 HOST_WIDE_INT size
, max_size
;
890 HOST_WIDE_INT offset2
;
891 tree base
= get_ref_base_and_extent (TREE_OPERAND (base_pointer
, 0),
892 &offset2
, &size
, &max_size
);
894 if (max_size
!= -1 && max_size
== size
)
895 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base
)),
898 NULL
/* Do not change outer type. */);
900 /* If this is a varying address, punt. */
901 if ((TREE_CODE (base
) == MEM_REF
|| DECL_P (base
))
905 /* We found dereference of a pointer. Type of the pointer
906 and MEM_REF is meaningless, but we can look futher. */
907 if (TREE_CODE (base
) == MEM_REF
)
909 base_pointer
= TREE_OPERAND (base
, 0);
911 += offset2
+ mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
914 /* We found base object. In this case the outer_type
916 else if (DECL_P (base
))
920 /* Only type inconsistent programs can have otr_type that is
921 not part of outer type. */
923 && !contains_type_p (TREE_TYPE (base
),
924 offset
+ offset2
, otr_type
))
928 *instance
= base_pointer
;
931 set_by_decl (base
, offset
+ offset2
);
932 if (outer_type
&& maybe_in_construction
&& stmt
)
933 maybe_in_construction
934 = decl_maybe_in_construction_p (base
,
948 else if (TREE_CODE (base_pointer
) == POINTER_PLUS_EXPR
949 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer
, 1)))
951 offset
+= tree_to_shwi (TREE_OPERAND (base_pointer
, 1))
953 base_pointer
= TREE_OPERAND (base_pointer
, 0);
962 /* Try to determine type of the outer object. */
963 if (TREE_CODE (base_pointer
) == SSA_NAME
964 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
965 && TREE_CODE (SSA_NAME_VAR (base_pointer
)) == PARM_DECL
)
967 /* See if parameter is THIS pointer of a method. */
968 if (TREE_CODE (TREE_TYPE (fndecl
)) == METHOD_TYPE
969 && SSA_NAME_VAR (base_pointer
) == DECL_ARGUMENTS (fndecl
))
972 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer
)));
973 gcc_assert (TREE_CODE (outer_type
) == RECORD_TYPE
974 || TREE_CODE (outer_type
) == UNION_TYPE
);
976 /* Dynamic casting has possibly upcasted the type
977 in the hiearchy. In this case outer type is less
978 informative than inner type and we should forget
981 && !contains_type_p (outer_type
, offset
,
983 || !contains_polymorphic_type_p (outer_type
))
987 *instance
= base_pointer
;
993 /* If the function is constructor or destructor, then
994 the type is possibly in construction, but we know
995 it is not derived type. */
996 if (DECL_CXX_CONSTRUCTOR_P (fndecl
)
997 || DECL_CXX_DESTRUCTOR_P (fndecl
))
999 maybe_in_construction
= true;
1000 maybe_derived_type
= false;
1004 maybe_derived_type
= true;
1005 maybe_in_construction
= false;
1008 *instance
= base_pointer
;
1011 /* Non-PODs passed by value are really passed by invisible
1012 reference. In this case we also know the type of the
1014 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer
)))
1017 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer
)));
1018 /* Only type inconsistent programs can have otr_type that is
1019 not part of outer type. */
1020 if (otr_type
&& !contains_type_p (outer_type
, offset
,
1025 *instance
= base_pointer
;
1028 /* Non-polymorphic types have no interest for us. */
1029 else if (!otr_type
&& !contains_polymorphic_type_p (outer_type
))
1033 *instance
= base_pointer
;
1036 maybe_derived_type
= false;
1037 maybe_in_construction
= false;
1039 *instance
= base_pointer
;
1044 tree base_type
= TREE_TYPE (base_pointer
);
1046 if (TREE_CODE (base_pointer
) == SSA_NAME
1047 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
1048 && TREE_CODE (SSA_NAME_VAR (base_pointer
)) != PARM_DECL
)
1052 *instance
= base_pointer
;
1055 if (TREE_CODE (base_pointer
) == SSA_NAME
1056 && SSA_NAME_DEF_STMT (base_pointer
)
1057 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer
)))
1058 base_type
= TREE_TYPE (gimple_assign_rhs1
1059 (SSA_NAME_DEF_STMT (base_pointer
)));
1061 if (POINTER_TYPE_P (base_type
))
1062 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type
)),
1064 true, NULL
/* Do not change type here */);
1065 /* TODO: There are multiple ways to derive a type. For instance
1066 if BASE_POINTER is passed to an constructor call prior our refernece.
1067 We do not make this type of flow sensitive analysis yet. */
1069 *instance
= base_pointer
;
1073 /* Structure to be passed in between detect_type_change and
1074 check_stmt_for_type_change. */
1076 struct type_change_info
1078 /* Offset into the object where there is the virtual method pointer we are
1080 HOST_WIDE_INT offset
;
1081 /* The declaration or SSA_NAME pointer of the base that we are checking for
1084 /* The reference to virtual table pointer used. */
1087 /* If we actually can tell the type that the object has changed to, it is
1088 stored in this field. Otherwise it remains NULL_TREE. */
1089 tree known_current_type
;
1090 HOST_WIDE_INT known_current_offset
;
1092 /* Set to true if dynamic type change has been detected. */
1093 bool type_maybe_changed
;
1094 /* Set to true if multiple types have been encountered. known_current_type
1095 must be disregarded in that case. */
1096 bool multiple_types_encountered
;
1097 /* Set to true if we possibly missed some dynamic type changes and we should
1098 consider the set to be speculative. */
1100 bool seen_unanalyzed_store
;
1103 /* Return true if STMT is not call and can modify a virtual method table pointer.
1104 We take advantage of fact that vtable stores must appear within constructor
1105 and destructor functions. */
1108 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt
)
1110 if (is_gimple_assign (stmt
))
1112 tree lhs
= gimple_assign_lhs (stmt
);
1114 if (gimple_clobber_p (stmt
))
1116 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
1118 if (flag_strict_aliasing
1119 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
1122 if (TREE_CODE (lhs
) == COMPONENT_REF
1123 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
1125 /* In the future we might want to use get_base_ref_and_offset to find
1126 if there is a field corresponding to the offset and if so, proceed
1127 almost like if it was a component ref. */
1131 /* Code unification may mess with inline stacks. */
1132 if (cfun
->after_inlining
)
1135 /* Walk the inline stack and watch out for ctors/dtors.
1136 TODO: Maybe we can require the store to appear in toplevel
1137 block of CTOR/DTOR. */
1138 for (tree block
= gimple_block (stmt
); block
&& TREE_CODE (block
) == BLOCK
;
1139 block
= BLOCK_SUPERCONTEXT (block
))
1140 if (BLOCK_ABSTRACT_ORIGIN (block
)
1141 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
1143 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
1145 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
1147 return (TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
1148 && (DECL_CXX_CONSTRUCTOR_P (fn
)
1149 || DECL_CXX_DESTRUCTOR_P (fn
)));
1151 return (TREE_CODE (TREE_TYPE (current_function_decl
)) == METHOD_TYPE
1152 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl
)
1153 || DECL_CXX_DESTRUCTOR_P (current_function_decl
)));
1156 /* If STMT can be proved to be an assignment to the virtual method table
1157 pointer of ANALYZED_OBJ and the type associated with the new table
1158 identified, return the type. Otherwise return NULL_TREE if type changes
1159 in unknown way or ERROR_MARK_NODE if type is unchanged. */
1162 extr_type_from_vtbl_ptr_store (gimple stmt
, struct type_change_info
*tci
,
1163 HOST_WIDE_INT
*type_offset
)
1165 HOST_WIDE_INT offset
, size
, max_size
;
1166 tree lhs
, rhs
, base
;
1168 if (!gimple_assign_single_p (stmt
))
1171 lhs
= gimple_assign_lhs (stmt
);
1172 rhs
= gimple_assign_rhs1 (stmt
);
1173 if (TREE_CODE (lhs
) != COMPONENT_REF
1174 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
1177 fprintf (dump_file
, " LHS is not virtual table.\n");
1181 if (tci
->vtbl_ptr_ref
&& operand_equal_p (lhs
, tci
->vtbl_ptr_ref
, 0))
1185 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
1186 if (DECL_P (tci
->instance
))
1188 if (base
!= tci
->instance
)
1192 fprintf (dump_file
, " base:");
1193 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1194 fprintf (dump_file
, " does not match instance:");
1195 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1196 fprintf (dump_file
, "\n");
1201 else if (TREE_CODE (base
) == MEM_REF
)
1203 if (!operand_equal_p (tci
->instance
, TREE_OPERAND (base
, 0), 0))
1207 fprintf (dump_file
, " base mem ref:");
1208 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1209 fprintf (dump_file
, " does not match instance:");
1210 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1211 fprintf (dump_file
, "\n");
1215 if (!integer_zerop (TREE_OPERAND (base
, 1)))
1217 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
1221 fprintf (dump_file
, " base mem ref:");
1222 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1223 fprintf (dump_file
, " has non-representable offset:");
1224 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1225 fprintf (dump_file
, "\n");
1230 offset
+= tree_to_shwi (TREE_OPERAND (base
, 1)) * BITS_PER_UNIT
;
1233 else if (!operand_equal_p (tci
->instance
, base
, 0)
1238 fprintf (dump_file
, " base:");
1239 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1240 fprintf (dump_file
, " does not match instance:");
1241 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1242 fprintf (dump_file
, " with offset %i\n", (int)tci
->offset
);
1244 return tci
->offset
> GET_MODE_BITSIZE (Pmode
) ? error_mark_node
: NULL_TREE
;
1246 if (offset
!= tci
->offset
1247 || size
!= POINTER_SIZE
1248 || max_size
!= POINTER_SIZE
)
1251 fprintf (dump_file
, " wrong offset %i!=%i or size %i\n",
1252 (int)offset
, (int)tci
->offset
, (int)size
);
1253 return offset
+ GET_MODE_BITSIZE (Pmode
) <= tci
->offset
1255 && tci
->offset
+ GET_MODE_BITSIZE (Pmode
) > offset
+ max_size
)
1256 ? error_mark_node
: NULL
;
1261 unsigned HOST_WIDE_INT offset2
;
1263 if (!vtable_pointer_value_to_vtable (rhs
, &vtable
, &offset2
))
1266 fprintf (dump_file
, " Failed to lookup binfo\n");
1270 tree binfo
= subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable
)),
1275 fprintf (dump_file
, " Construction vtable used\n");
1276 /* FIXME: We should suport construction contexts. */
1280 *type_offset
= tree_to_shwi (BINFO_OFFSET (binfo
)) * BITS_PER_UNIT
;
1281 return DECL_CONTEXT (vtable
);
1284 /* Record dynamic type change of TCI to TYPE. */
1287 record_known_type (struct type_change_info
*tci
, tree type
, HOST_WIDE_INT offset
)
1293 fprintf (dump_file
, " Recording type: ");
1294 print_generic_expr (dump_file
, type
, TDF_SLIM
);
1295 fprintf (dump_file
, " at offset %i\n", (int)offset
);
1298 fprintf (dump_file
, " Recording unknown type\n");
1301 /* If we found a constructor of type that is not polymorphic or
1302 that may contain the type in question as a field (not as base),
1303 restrict to the inner class first to make type matching bellow
1307 || (TREE_CODE (type
) != RECORD_TYPE
1308 || !polymorphic_type_binfo_p (TYPE_BINFO (type
)))))
1310 ipa_polymorphic_call_context context
;
1312 context
.offset
= offset
;
1313 context
.outer_type
= type
;
1314 context
.maybe_in_construction
= false;
1315 context
.maybe_derived_type
= false;
1316 context
.dynamic
= true;
1317 /* If we failed to find the inner type, we know that the call
1318 would be undefined for type produced here. */
1319 if (!context
.restrict_to_inner_class (tci
->otr_type
))
1322 fprintf (dump_file
, " Ignoring; does not contain otr_type\n");
1325 /* Watch for case we reached an POD type and anticipate placement
1327 if (!context
.maybe_derived_type
)
1329 type
= context
.outer_type
;
1330 offset
= context
.offset
;
1333 if (tci
->type_maybe_changed
1334 && (!types_same_for_odr (type
, tci
->known_current_type
)
1335 || offset
!= tci
->known_current_offset
))
1336 tci
->multiple_types_encountered
= true;
1337 tci
->known_current_type
= TYPE_MAIN_VARIANT (type
);
1338 tci
->known_current_offset
= offset
;
1339 tci
->type_maybe_changed
= true;
1342 /* Callback of walk_aliased_vdefs and a helper function for
1343 detect_type_change to check whether a particular statement may modify
1344 the virtual table pointer, and if possible also determine the new type of
1345 the (sub-)object. It stores its result into DATA, which points to a
1346 type_change_info structure. */
1349 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
1351 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
1352 struct type_change_info
*tci
= (struct type_change_info
*) data
;
1355 /* If we already gave up, just terminate the rest of walk. */
1356 if (tci
->multiple_types_encountered
)
1359 if (is_gimple_call (stmt
))
1361 if (gimple_call_flags (stmt
) & (ECF_CONST
| ECF_PURE
))
1364 /* Check for a constructor call. */
1365 if ((fn
= gimple_call_fndecl (stmt
)) != NULL_TREE
1366 && DECL_CXX_CONSTRUCTOR_P (fn
)
1367 && TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
1368 && gimple_call_num_args (stmt
))
1370 tree op
= walk_ssa_copies (gimple_call_arg (stmt
, 0));
1371 tree type
= method_class_type (TREE_TYPE (fn
));
1372 HOST_WIDE_INT offset
= 0, size
, max_size
;
1376 fprintf (dump_file
, " Checking constructor call: ");
1377 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1380 /* See if THIS parameter seems like instance pointer. */
1381 if (TREE_CODE (op
) == ADDR_EXPR
)
1383 op
= get_ref_base_and_extent (TREE_OPERAND (op
, 0),
1384 &offset
, &size
, &max_size
);
1385 if (size
!= max_size
|| max_size
== -1)
1387 tci
->speculative
= true;
1390 if (op
&& TREE_CODE (op
) == MEM_REF
)
1392 if (!tree_fits_shwi_p (TREE_OPERAND (op
, 1)))
1394 tci
->speculative
= true;
1397 offset
+= tree_to_shwi (TREE_OPERAND (op
, 1))
1399 op
= TREE_OPERAND (op
, 0);
1401 else if (DECL_P (op
))
1405 tci
->speculative
= true;
1408 op
= walk_ssa_copies (op
);
1410 if (operand_equal_p (op
, tci
->instance
, 0)
1412 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
1413 && tree_fits_shwi_p (TYPE_SIZE (type
))
1414 && tree_to_shwi (TYPE_SIZE (type
)) + offset
> tci
->offset
)
1416 record_known_type (tci
, type
, tci
->offset
- offset
);
1420 /* Calls may possibly change dynamic type by placement new. Assume
1421 it will not happen, but make result speculative only. */
1424 fprintf (dump_file
, " Function call may change dynamic type:");
1425 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1427 tci
->speculative
= true;
1430 /* Check for inlined virtual table store. */
1431 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt
))
1434 HOST_WIDE_INT offset
= 0;
1437 fprintf (dump_file
, " Checking vtbl store: ");
1438 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1441 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
, &offset
);
1442 if (type
== error_mark_node
)
1444 gcc_assert (!type
|| TYPE_MAIN_VARIANT (type
) == type
);
1448 fprintf (dump_file
, " Unanalyzed store may change type.\n");
1449 tci
->seen_unanalyzed_store
= true;
1450 tci
->speculative
= true;
1453 record_known_type (tci
, type
, offset
);
1460 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1461 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1462 INSTANCE is pointer to the outer instance as returned by
1463 get_polymorphic_context. To avoid creation of temporary expressions,
1464 INSTANCE may also be an declaration of get_polymorphic_context found the
1465 value to be in static storage.
1467 If the type of instance is not fully determined
1468 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1469 is set), try to walk memory writes and find the actual construction of the
1472 Return true if memory is unchanged from function entry.
1474 We do not include this analysis in the context analysis itself, because
1475 it needs memory SSA to be fully built and the walk may be expensive.
1476 So it is not suitable for use withing fold_stmt and similar uses. */
1479 ipa_polymorphic_call_context::get_dynamic_type (tree instance
,
1484 struct type_change_info tci
;
1486 bool function_entry_reached
= false;
1487 tree instance_ref
= NULL
;
1489 /* Remember OFFSET before it is modified by restrict_to_inner_class.
1490 This is because we do not update INSTANCE when walking inwards. */
1491 HOST_WIDE_INT instance_offset
= offset
;
1494 otr_type
= TYPE_MAIN_VARIANT (otr_type
);
1496 /* Walk into inner type. This may clear maybe_derived_type and save us
1497 from useless work. It also makes later comparsions with static type
1499 if (outer_type
&& otr_type
)
1501 if (!restrict_to_inner_class (otr_type
))
1505 if (!maybe_in_construction
&& !maybe_derived_type
)
1508 /* We need to obtain refernce to virtual table pointer. It is better
1509 to look it up in the code rather than build our own. This require bit
1510 of pattern matching, but we end up verifying that what we found is
1513 What we pattern match is:
1515 tmp = instance->_vptr.A; // vtbl ptr load
1516 tmp2 = tmp[otr_token]; // vtable lookup
1517 OBJ_TYPE_REF(tmp2;instance->0) (instance);
1519 We want to start alias oracle walk from vtbl pointer load,
1520 but we may not be able to identify it, for example, when PRE moved the
1523 if (gimple_code (call
) == GIMPLE_CALL
)
1525 tree ref
= gimple_call_fn (call
);
1526 HOST_WIDE_INT offset2
, size
, max_size
;
1528 if (TREE_CODE (ref
) == OBJ_TYPE_REF
)
1530 ref
= OBJ_TYPE_REF_EXPR (ref
);
1531 ref
= walk_ssa_copies (ref
);
1533 /* Check if definition looks like vtable lookup. */
1534 if (TREE_CODE (ref
) == SSA_NAME
1535 && !SSA_NAME_IS_DEFAULT_DEF (ref
)
1536 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref
))
1537 && TREE_CODE (gimple_assign_rhs1
1538 (SSA_NAME_DEF_STMT (ref
))) == MEM_REF
)
1540 ref
= get_base_address
1541 (TREE_OPERAND (gimple_assign_rhs1
1542 (SSA_NAME_DEF_STMT (ref
)), 0));
1543 ref
= walk_ssa_copies (ref
);
1544 /* Find base address of the lookup and see if it looks like
1546 if (TREE_CODE (ref
) == SSA_NAME
1547 && !SSA_NAME_IS_DEFAULT_DEF (ref
)
1548 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref
)))
1550 tree ref_exp
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref
));
1551 tree base_ref
= get_ref_base_and_extent
1552 (ref_exp
, &offset2
, &size
, &max_size
);
1554 /* Finally verify that what we found looks like read from OTR_OBJECT
1555 or from INSTANCE with offset OFFSET. */
1557 && ((TREE_CODE (base_ref
) == MEM_REF
1558 && ((offset2
== instance_offset
1559 && TREE_OPERAND (base_ref
, 0) == instance
)
1560 || (!offset2
&& TREE_OPERAND (base_ref
, 0) == otr_object
)))
1561 || (DECL_P (instance
) && base_ref
== instance
1562 && offset2
== instance_offset
)))
1564 stmt
= SSA_NAME_DEF_STMT (ref
);
1565 instance_ref
= ref_exp
;
1572 /* If we failed to look up the refernece in code, build our own. */
1575 /* If the statement in question does not use memory, we can't tell
1577 if (!gimple_vuse (stmt
))
1579 ao_ref_init_from_ptr_and_size (&ao
, otr_object
, NULL
);
1582 /* Otherwise use the real reference. */
1583 ao_ref_init (&ao
, instance_ref
);
1585 /* We look for vtbl pointer read. */
1586 ao
.size
= POINTER_SIZE
;
1587 ao
.max_size
= ao
.size
;
1590 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type
))));
1594 fprintf (dump_file
, "Determining dynamic type for call: ");
1595 print_gimple_stmt (dump_file
, call
, 0, 0);
1596 fprintf (dump_file
, " Starting walk at: ");
1597 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1598 fprintf (dump_file
, " instance pointer: ");
1599 print_generic_expr (dump_file
, otr_object
, TDF_SLIM
);
1600 fprintf (dump_file
, " Outer instance pointer: ");
1601 print_generic_expr (dump_file
, instance
, TDF_SLIM
);
1602 fprintf (dump_file
, " offset: %i (bits)", (int)offset
);
1603 fprintf (dump_file
, " vtbl reference: ");
1604 print_generic_expr (dump_file
, instance_ref
, TDF_SLIM
);
1605 fprintf (dump_file
, "\n");
1608 tci
.offset
= offset
;
1609 tci
.instance
= instance
;
1610 tci
.vtbl_ptr_ref
= instance_ref
;
1611 gcc_assert (TREE_CODE (instance
) != MEM_REF
);
1612 tci
.known_current_type
= NULL_TREE
;
1613 tci
.known_current_offset
= 0;
1614 tci
.otr_type
= otr_type
;
1615 tci
.type_maybe_changed
= false;
1616 tci
.multiple_types_encountered
= false;
1617 tci
.speculative
= false;
1618 tci
.seen_unanalyzed_store
= false;
1620 walk_aliased_vdefs (&ao
, gimple_vuse (stmt
), check_stmt_for_type_change
,
1621 &tci
, NULL
, &function_entry_reached
);
1623 /* If we did not find any type changing statements, we may still drop
1624 maybe_in_construction flag if the context already have outer type.
1626 Here we make special assumptions about both constructors and
1627 destructors which are all the functions that are allowed to alter the
1628 VMT pointers. It assumes that destructors begin with assignment into
1629 all VMT pointers and that constructors essentially look in the
1632 1) The very first thing they do is that they call constructors of
1633 ancestor sub-objects that have them.
1635 2) Then VMT pointers of this and all its ancestors is set to new
1636 values corresponding to the type corresponding to the constructor.
1638 3) Only afterwards, other stuff such as constructor of member
1639 sub-objects and the code written by the user is run. Only this may
1640 include calling virtual functions, directly or indirectly.
1642 4) placement new can not be used to change type of non-POD statically
1643 allocated variables.
1645 There is no way to call a constructor of an ancestor sub-object in any
1648 This means that we do not have to care whether constructors get the
1649 correct type information because they will always change it (in fact,
1650 if we define the type to be given by the VMT pointer, it is undefined).
1652 The most important fact to derive from the above is that if, for some
1653 statement in the section 3, we try to detect whether the dynamic type
1654 has changed, we can safely ignore all calls as we examine the function
1655 body backwards until we reach statements in section 2 because these
1656 calls cannot be ancestor constructors or destructors (if the input is
1657 not bogus) and so do not change the dynamic type (this holds true only
1658 for automatically allocated objects but at the moment we devirtualize
1659 only these). We then must detect that statements in section 2 change
1660 the dynamic type and can try to derive the new type. That is enough
1661 and we can stop, we will never see the calls into constructors of
1662 sub-objects in this code.
1664 Therefore if the static outer type was found (outer_type)
1665 we can safely ignore tci.speculative that is set on calls and give up
1666 only if there was dyanmic type store that may affect given variable
1667 (seen_unanalyzed_store) */
1669 if (!tci
.type_maybe_changed
1672 && !tci
.seen_unanalyzed_store
1673 && !tci
.multiple_types_encountered
1674 && offset
== tci
.offset
1675 && types_same_for_odr (tci
.known_current_type
,
1678 if (!outer_type
|| tci
.seen_unanalyzed_store
)
1680 if (maybe_in_construction
)
1681 maybe_in_construction
= false;
1683 fprintf (dump_file
, " No dynamic type change found.\n");
1687 if (tci
.known_current_type
1688 && !function_entry_reached
1689 && !tci
.multiple_types_encountered
)
1691 if (!tci
.speculative
)
1693 outer_type
= TYPE_MAIN_VARIANT (tci
.known_current_type
);
1694 offset
= tci
.known_current_offset
;
1696 maybe_in_construction
= false;
1697 maybe_derived_type
= false;
1699 fprintf (dump_file
, " Determined dynamic type.\n");
1701 else if (!speculative_outer_type
1702 || speculative_maybe_derived_type
)
1704 speculative_outer_type
= TYPE_MAIN_VARIANT (tci
.known_current_type
);
1705 speculative_offset
= tci
.known_current_offset
;
1706 speculative_maybe_derived_type
= false;
1708 fprintf (dump_file
, " Determined speculative dynamic type.\n");
1713 fprintf (dump_file
, " Found multiple types%s%s\n",
1714 function_entry_reached
? " (function entry reached)" : "",
1715 function_entry_reached
? " (multiple types encountered)" : "");
1721 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1722 seems consistent (and useful) with what we already have in the non-speculative context. */
1725 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type
,
1726 HOST_WIDE_INT spec_offset
,
1727 bool spec_maybe_derived_type
,
1730 if (!flag_devirtualize_speculatively
)
1733 /* Non-polymorphic types are useless for deriving likely polymorphic
1735 if (!spec_outer_type
|| !contains_polymorphic_type_p (spec_outer_type
))
1738 /* If we know nothing, speculation is always good. */
1742 /* Speculation is only useful to avoid derived types.
1743 This is not 100% true for placement new, where the outer context may
1744 turn out to be useless, but ignore these for now. */
1745 if (!maybe_derived_type
)
1748 /* If types agrees, speculation is consistent, but it makes sense only
1749 when it says something new. */
1750 if (types_must_be_same_for_odr (spec_outer_type
, outer_type
))
1751 return maybe_derived_type
&& !spec_maybe_derived_type
;
1753 /* If speculation does not contain the type in question, ignore it. */
1755 && !contains_type_p (spec_outer_type
, spec_offset
, otr_type
, false, true))
1758 /* If outer type already contains speculation as a filed,
1759 it is useless. We already know from OUTER_TYPE
1760 SPEC_TYPE and that it is not in the construction. */
1761 if (contains_type_p (outer_type
, offset
- spec_offset
,
1762 spec_outer_type
, false, false))
1765 /* If speculative outer type is not more specified than outer
1767 We can only decide this safely if we can compare types with OUTER_TYPE.
1769 if ((!in_lto_p
|| odr_type_p (outer_type
))
1770 && !contains_type_p (spec_outer_type
,
1771 spec_offset
- offset
,
1777 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1778 NEW_MAYBE_DERIVED_TYPE
1779 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1782 ipa_polymorphic_call_context::combine_speculation_with
1783 (tree new_outer_type
, HOST_WIDE_INT new_offset
, bool new_maybe_derived_type
,
1786 if (!new_outer_type
)
1789 /* restrict_to_inner_class may eliminate wrong speculation making our job
1792 restrict_to_inner_class (otr_type
);
1794 if (!speculation_consistent_p (new_outer_type
, new_offset
,
1795 new_maybe_derived_type
, otr_type
))
1798 /* New speculation is a win in case we have no speculation or new
1799 speculation does not consider derivations. */
1800 if (!speculative_outer_type
1801 || (speculative_maybe_derived_type
1802 && !new_maybe_derived_type
))
1804 speculative_outer_type
= new_outer_type
;
1805 speculative_offset
= new_offset
;
1806 speculative_maybe_derived_type
= new_maybe_derived_type
;
1809 else if (types_must_be_same_for_odr (speculative_outer_type
,
1812 if (speculative_offset
!= new_offset
)
1814 /* OK we have two contexts that seems valid but they disagree,
1817 This is not a lattice operation, so we may want to drop it later. */
1818 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1820 "Speculative outer types match, "
1821 "offset mismatch -> invalid speculation\n");
1822 clear_speculation ();
1827 if (speculative_maybe_derived_type
&& !new_maybe_derived_type
)
1829 speculative_maybe_derived_type
= false;
1836 /* Choose type that contains the other. This one either contains the outer
1837 as a field (thus giving exactly one target) or is deeper in the type
1839 else if (speculative_outer_type
1840 && speculative_maybe_derived_type
1841 && (new_offset
> speculative_offset
1842 || (new_offset
== speculative_offset
1843 && contains_type_p (new_outer_type
,
1844 0, speculative_outer_type
, false))))
1846 tree old_outer_type
= speculative_outer_type
;
1847 HOST_WIDE_INT old_offset
= speculative_offset
;
1848 bool old_maybe_derived_type
= speculative_maybe_derived_type
;
1850 speculative_outer_type
= new_outer_type
;
1851 speculative_offset
= new_offset
;
1852 speculative_maybe_derived_type
= new_maybe_derived_type
;
1855 restrict_to_inner_class (otr_type
);
1857 /* If the speculation turned out to make no sense, revert to sensible
1859 if (!speculative_outer_type
)
1861 speculative_outer_type
= old_outer_type
;
1862 speculative_offset
= old_offset
;
1863 speculative_maybe_derived_type
= old_maybe_derived_type
;
1866 return (old_offset
!= speculative_offset
1867 || old_maybe_derived_type
!= speculative_maybe_derived_type
1868 || types_must_be_same_for_odr (speculative_outer_type
,
1874 /* Assume that both THIS and a given context is valid and strenghten THIS
1875 if possible. Return true if any strenghtening was made.
1876 If actual type the context is being used in is known, OTR_TYPE should be
1877 set accordingly. This improves quality of combined result. */
1880 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx
,
1883 bool updated
= false;
1885 if (ctx
.useless_p () || invalid
)
1888 /* Restricting context to inner type makes merging easier, however do not
1889 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
1890 if (otr_type
&& !invalid
&& !ctx
.invalid
)
1892 restrict_to_inner_class (otr_type
);
1893 ctx
.restrict_to_inner_class (otr_type
);
1898 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1900 fprintf (dump_file
, "Polymorphic call context combine:");
1902 fprintf (dump_file
, "With context: ");
1903 ctx
.dump (dump_file
);
1906 fprintf (dump_file
, "To be used with type: ");
1907 print_generic_expr (dump_file
, otr_type
, TDF_SLIM
);
1908 fprintf (dump_file
, "\n");
1912 /* If call is known to be invalid, we are done. */
1915 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1916 fprintf (dump_file
, "-> Invalid context\n");
1920 if (!ctx
.outer_type
)
1922 else if (!outer_type
)
1924 outer_type
= ctx
.outer_type
;
1925 offset
= ctx
.offset
;
1926 dynamic
= ctx
.dynamic
;
1927 maybe_in_construction
= ctx
.maybe_in_construction
;
1928 maybe_derived_type
= ctx
.maybe_derived_type
;
1931 /* If types are known to be same, merging is quite easy. */
1932 else if (types_must_be_same_for_odr (outer_type
, ctx
.outer_type
))
1934 if (offset
!= ctx
.offset
1935 && TYPE_SIZE (outer_type
)
1936 && TREE_CODE (TYPE_SIZE (outer_type
)) == INTEGER_CST
)
1938 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1939 fprintf (dump_file
, "Outer types match, offset mismatch -> invalid\n");
1940 clear_speculation ();
1941 clear_outer_type ();
1945 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1946 fprintf (dump_file
, "Outer types match, merging flags\n");
1947 if (maybe_in_construction
&& !ctx
.maybe_in_construction
)
1950 maybe_in_construction
= false;
1952 if (maybe_derived_type
&& !ctx
.maybe_derived_type
)
1955 maybe_derived_type
= false;
1957 if (dynamic
&& !ctx
.dynamic
)
1963 /* If we know the type precisely, there is not much to improve. */
1964 else if (!maybe_derived_type
&& !maybe_in_construction
1965 && !ctx
.maybe_derived_type
&& !ctx
.maybe_in_construction
)
1967 /* It may be easy to check if second context permits the first
1968 and set INVALID otherwise. This is not easy to do in general;
1969 contains_type_p may return false negatives for non-comparable
1972 If OTR_TYPE is known, we however can expect that
1973 restrict_to_inner_class should have discovered the same base
1975 if (otr_type
&& !ctx
.maybe_in_construction
&& !ctx
.maybe_derived_type
)
1977 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1978 fprintf (dump_file
, "Contextes disagree -> invalid\n");
1982 /* See if one type contains the other as a field (not base).
1983 In this case we want to choose the wider type, because it contains
1984 more information. */
1985 else if (contains_type_p (ctx
.outer_type
, ctx
.offset
- offset
,
1986 outer_type
, false, false))
1988 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1989 fprintf (dump_file
, "Second type contain the first as a field\n");
1991 if (maybe_derived_type
)
1993 outer_type
= ctx
.outer_type
;
1994 maybe_derived_type
= ctx
.maybe_derived_type
;
1995 offset
= ctx
.offset
;
1996 dynamic
= ctx
.dynamic
;
2000 /* If we do not know how the context is being used, we can
2001 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
2002 to other component of OUTER_TYPE later and we know nothing
2004 if (otr_type
&& maybe_in_construction
2005 && !ctx
.maybe_in_construction
)
2007 maybe_in_construction
= false;
2011 else if (contains_type_p (outer_type
, offset
- ctx
.offset
,
2012 ctx
.outer_type
, false, false))
2014 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2015 fprintf (dump_file
, "First type contain the second as a field\n");
2017 if (otr_type
&& maybe_in_construction
2018 && !ctx
.maybe_in_construction
)
2020 maybe_in_construction
= false;
2024 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2025 else if (contains_type_p (ctx
.outer_type
,
2026 ctx
.offset
- offset
, outer_type
, false, true))
2028 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2029 fprintf (dump_file
, "First type is base of second\n");
2030 if (!maybe_derived_type
)
2032 if (!ctx
.maybe_in_construction
2033 && types_odr_comparable (outer_type
, ctx
.outer_type
))
2035 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2036 fprintf (dump_file
, "Second context does not permit base -> invalid\n");
2040 /* Pick variant deeper in the hiearchy. */
2043 outer_type
= ctx
.outer_type
;
2044 maybe_in_construction
= ctx
.maybe_in_construction
;
2045 maybe_derived_type
= ctx
.maybe_derived_type
;
2046 offset
= ctx
.offset
;
2047 dynamic
= ctx
.dynamic
;
2051 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2052 else if (contains_type_p (outer_type
,
2053 offset
- ctx
.offset
, ctx
.outer_type
, false, true))
2055 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2056 fprintf (dump_file
, "Second type is base of first\n");
2057 if (!ctx
.maybe_derived_type
)
2059 if (!maybe_in_construction
2060 && types_odr_comparable (outer_type
, ctx
.outer_type
))
2062 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2063 fprintf (dump_file
, "First context does not permit base -> invalid\n");
2068 /* TODO handle merging using hiearchy. */
2069 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2070 fprintf (dump_file
, "Giving up on merge\n");
2072 updated
|= combine_speculation_with (ctx
.speculative_outer_type
,
2073 ctx
.speculative_offset
,
2074 ctx
.speculative_maybe_derived_type
,
2077 if (updated
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
2079 fprintf (dump_file
, "Updated as: ");
2081 fprintf (dump_file
, "\n");
2087 clear_speculation ();
2088 clear_outer_type ();
2092 /* Take non-speculative info, merge it with speculative and clear speculation.
2093 Used when we no longer manage to keep track of actual outer type, but we
2094 think it is still there.
2096 If OTR_TYPE is set, the transformation can be done more effectively assuming
2097 that context is going to be used only that way. */
2100 ipa_polymorphic_call_context::make_speculative (tree otr_type
)
2102 tree spec_outer_type
= outer_type
;
2103 HOST_WIDE_INT spec_offset
= offset
;
2104 bool spec_maybe_derived_type
= maybe_derived_type
;
2109 clear_outer_type ();
2110 clear_speculation ();
2115 clear_outer_type ();
2116 combine_speculation_with (spec_outer_type
, spec_offset
,
2117 spec_maybe_derived_type
,
2121 /* Use when we can not track dynamic type change. This speculatively assume
2122 type change is not happening. */
2125 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor
,
2129 make_speculative (otr_type
);
2130 else if (in_poly_cdtor
)
2131 maybe_in_construction
= true;