1 /* Analysis of polymorphic call context.
2 Copyright (C) 2013-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "double-int.h"
35 #include "fold-const.h"
36 #include "print-tree.h"
39 #include "hard-reg-set.h"
43 #include "statistics.h"
45 #include "fixed-value.h"
46 #include "insn-config.h"
54 #include "tree-pass.h"
56 #include "tree-pretty-print.h"
58 #include "basic-block.h"
61 #include "plugin-api.h"
64 #include "ipa-utils.h"
65 #include "tree-ssa-alias.h"
66 #include "internal-fn.h"
67 #include "gimple-fold.h"
68 #include "gimple-expr.h"
70 #include "alloc-pool.h"
71 #include "symbol-summary.h"
73 #include "ipa-inline.h"
74 #include "diagnostic.h"
78 #include "gimple-pretty-print.h"
79 #include "stor-layout.h"
81 #include "data-streamer.h"
82 #include "lto-streamer.h"
83 #include "streamer-hooks.h"
85 /* Return true when TYPE contains an polymorphic type and thus is interesting
86 for devirtualization machinery. */
88 static bool contains_type_p (tree
, HOST_WIDE_INT
, tree
,
89 bool consider_placement_new
= true,
90 bool consider_bases
= true);
93 contains_polymorphic_type_p (const_tree type
)
95 type
= TYPE_MAIN_VARIANT (type
);
97 if (RECORD_OR_UNION_TYPE_P (type
))
100 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
102 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
103 if (TREE_CODE (fld
) == FIELD_DECL
104 && !DECL_ARTIFICIAL (fld
)
105 && contains_polymorphic_type_p (TREE_TYPE (fld
)))
109 if (TREE_CODE (type
) == ARRAY_TYPE
)
110 return contains_polymorphic_type_p (TREE_TYPE (type
));
114 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
115 at possition CUR_OFFSET within TYPE.
117 POD can be changed to an instance of a polymorphic type by
118 placement new. Here we play safe and assume that any
119 non-polymorphic type is POD. */
121 possible_placement_new (tree type
, tree expected_type
,
122 HOST_WIDE_INT cur_offset
)
124 return ((TREE_CODE (type
) != RECORD_TYPE
125 || !TYPE_BINFO (type
)
126 || cur_offset
>= POINTER_SIZE
127 || !polymorphic_type_binfo_p (TYPE_BINFO (type
)))
128 && (!TYPE_SIZE (type
)
129 || !tree_fits_shwi_p (TYPE_SIZE (type
))
131 + (expected_type
? tree_to_uhwi (TYPE_SIZE (expected_type
))
133 <= tree_to_uhwi (TYPE_SIZE (type
)))));
136 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
137 is contained at THIS->OFFSET. Walk the memory representation of
138 THIS->OUTER_TYPE and find the outermost class type that match
139 OTR_TYPE or contain OTR_TYPE as a base. Update THIS
142 If OTR_TYPE is NULL, just find outermost polymorphic type with
143 virtual table present at possition OFFSET.
145 For example when THIS represents type
151 and we look for type at offset sizeof(int), we end up with B and offset 0.
152 If the same is produced by multiple inheritance, we end up with A and offset
155 If we can not find corresponding class, give up by setting
156 THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
157 Return true when lookup was sucesful.
159 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
160 valid only via alocation of new polymorphic type inside by means
163 When CONSIDER_BASES is false, only look for actual fields, not base types
167 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type
,
168 bool consider_placement_new
,
171 tree type
= outer_type
;
172 HOST_WIDE_INT cur_offset
= offset
;
173 bool speculative
= false;
174 bool size_unknown
= false;
175 unsigned HOST_WIDE_INT otr_type_size
= POINTER_SIZE
;
177 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
180 clear_outer_type (otr_type
);
184 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
185 that the context is either invalid, or the instance type must be
186 derived from OUTER_TYPE.
188 Because the instance type may contain field whose type is of OUTER_TYPE,
189 we can not derive any effective information about it.
191 TODO: In the case we know all derrived types, we can definitely do better
193 else if (TYPE_SIZE (outer_type
)
194 && tree_fits_shwi_p (TYPE_SIZE (outer_type
))
195 && tree_to_shwi (TYPE_SIZE (outer_type
)) >= 0
196 && tree_to_shwi (TYPE_SIZE (outer_type
)) <= offset
)
198 clear_outer_type (otr_type
);
202 /* If derived type is not allowed, we know that the context is invalid.
203 For dynamic types, we really do not have information about
204 size of the memory location. It is possible that completely
205 different type is stored after outer_type. */
206 if (!maybe_derived_type
&& !dynamic
)
208 clear_speculation ();
214 if (otr_type
&& TYPE_SIZE (otr_type
)
215 && tree_fits_shwi_p (TYPE_SIZE (otr_type
)))
216 otr_type_size
= tree_to_uhwi (TYPE_SIZE (otr_type
));
218 if (!type
|| offset
< 0)
219 goto no_useful_type_info
;
221 /* Find the sub-object the constant actually refers to and mark whether it is
222 an artificial one (as opposed to a user-defined one).
224 This loop is performed twice; first time for outer_type and second time
225 for speculative_outer_type. The second run has SPECULATIVE set. */
228 unsigned HOST_WIDE_INT pos
, size
;
231 /* If we do not know size of TYPE, we need to be more conservative
232 about accepting cases where we can not find EXPECTED_TYPE.
233 Generally the types that do matter here are of constant size.
234 Size_unknown case should be very rare. */
236 && tree_fits_shwi_p (TYPE_SIZE (type
))
237 && tree_to_shwi (TYPE_SIZE (type
)) >= 0)
238 size_unknown
= false;
242 /* On a match, just return what we found. */
244 && types_odr_comparable (type
, otr_type
)
245 && types_same_for_odr (type
, otr_type
))
247 && TREE_CODE (type
) == RECORD_TYPE
249 && polymorphic_type_binfo_p (TYPE_BINFO (type
))))
253 /* If we did not match the offset, just give up on speculation. */
255 /* Also check if speculation did not end up being same as
257 || (types_must_be_same_for_odr (speculative_outer_type
,
259 && (maybe_derived_type
260 == speculative_maybe_derived_type
)))
261 clear_speculation ();
266 /* If type is known to be final, do not worry about derived
267 types. Testing it here may help us to avoid speculation. */
268 if (otr_type
&& TREE_CODE (outer_type
) == RECORD_TYPE
269 && (!in_lto_p
|| odr_type_p (outer_type
))
270 && type_known_to_have_no_deriavations_p (outer_type
))
271 maybe_derived_type
= false;
273 /* Type can not contain itself on an non-zero offset. In that case
274 just give up. Still accept the case where size is now known.
275 Either the second copy may appear past the end of type or within
276 the non-POD buffer located inside the variably sized type
279 goto no_useful_type_info
;
280 /* If we determined type precisely or we have no clue on
281 speuclation, we are done. */
282 if (!maybe_derived_type
|| !speculative_outer_type
283 || !speculation_consistent_p (speculative_outer_type
,
285 speculative_maybe_derived_type
,
288 clear_speculation ();
291 /* Otherwise look into speculation now. */
295 type
= speculative_outer_type
;
296 cur_offset
= speculative_offset
;
302 /* Walk fields and find corresponding on at OFFSET. */
303 if (TREE_CODE (type
) == RECORD_TYPE
)
305 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
307 if (TREE_CODE (fld
) != FIELD_DECL
)
310 pos
= int_bit_position (fld
);
311 if (pos
> (unsigned HOST_WIDE_INT
)cur_offset
)
314 /* Do not consider vptr itself. Not even for placement new. */
315 if (!pos
&& DECL_ARTIFICIAL (fld
)
316 && POINTER_TYPE_P (TREE_TYPE (fld
))
318 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
321 if (!DECL_SIZE (fld
) || !tree_fits_uhwi_p (DECL_SIZE (fld
)))
322 goto no_useful_type_info
;
323 size
= tree_to_uhwi (DECL_SIZE (fld
));
325 /* We can always skip types smaller than pointer size:
326 those can not contain a virtual table pointer.
328 Disqualifying fields that are too small to fit OTR_TYPE
329 saves work needed to walk them for no benefit.
330 Because of the way the bases are packed into a class, the
331 field's size may be smaller than type size, so it needs
332 to be done with a care. */
334 if (pos
<= (unsigned HOST_WIDE_INT
)cur_offset
335 && (pos
+ size
) >= (unsigned HOST_WIDE_INT
)cur_offset
338 || !TYPE_SIZE (TREE_TYPE (fld
))
339 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld
)))
340 || (pos
+ tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld
))))
341 >= cur_offset
+ otr_type_size
))
346 goto no_useful_type_info
;
348 type
= TYPE_MAIN_VARIANT (TREE_TYPE (fld
));
350 /* DECL_ARTIFICIAL represents a basetype. */
351 if (!DECL_ARTIFICIAL (fld
))
357 /* As soon as we se an field containing the type,
358 we know we are not looking for derivations. */
359 maybe_derived_type
= false;
363 speculative_outer_type
= type
;
364 speculative_offset
= cur_offset
;
365 speculative_maybe_derived_type
= false;
368 else if (!consider_bases
)
369 goto no_useful_type_info
;
371 else if (TREE_CODE (type
) == ARRAY_TYPE
)
373 tree subtype
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
375 /* Give up if we don't know array field size.
376 Also give up on non-polymorphic types as they are used
377 as buffers for placement new. */
378 if (!TYPE_SIZE (subtype
)
379 || !tree_fits_shwi_p (TYPE_SIZE (subtype
))
380 || tree_to_shwi (TYPE_SIZE (subtype
)) <= 0
381 || !contains_polymorphic_type_p (subtype
))
382 goto no_useful_type_info
;
384 HOST_WIDE_INT new_offset
= cur_offset
% tree_to_shwi (TYPE_SIZE (subtype
));
386 /* We may see buffer for placement new. In this case the expected type
387 can be bigger than the subtype. */
388 if (TYPE_SIZE (subtype
)
389 && (cur_offset
+ otr_type_size
390 > tree_to_uhwi (TYPE_SIZE (subtype
))))
391 goto no_useful_type_info
;
393 cur_offset
= new_offset
;
399 maybe_derived_type
= false;
403 speculative_outer_type
= type
;
404 speculative_offset
= cur_offset
;
405 speculative_maybe_derived_type
= false;
408 /* Give up on anything else. */
412 if (maybe_derived_type
&& !speculative
413 && TREE_CODE (outer_type
) == RECORD_TYPE
414 && TREE_CODE (otr_type
) == RECORD_TYPE
415 && TYPE_BINFO (otr_type
)
417 && get_binfo_at_offset (TYPE_BINFO (otr_type
), 0, outer_type
))
419 clear_outer_type (otr_type
);
420 if (!speculative_outer_type
421 || !speculation_consistent_p (speculative_outer_type
,
423 speculative_maybe_derived_type
,
425 clear_speculation ();
426 if (speculative_outer_type
)
429 type
= speculative_outer_type
;
430 cur_offset
= speculative_offset
;
435 /* We found no way to embedd EXPECTED_TYPE in TYPE.
436 We still permit two special cases - placement new and
437 the case of variadic types containing themselves. */
439 && consider_placement_new
440 && (size_unknown
|| !type
|| maybe_derived_type
441 || possible_placement_new (type
, otr_type
, cur_offset
)))
443 /* In these weird cases we want to accept the context.
444 In non-speculative run we have no useful outer_type info
445 (TODO: we may eventually want to record upper bound on the
446 type size that can be used to prune the walk),
447 but we still want to consider speculation that may
451 clear_outer_type (otr_type
);
452 if (!speculative_outer_type
453 || !speculation_consistent_p (speculative_outer_type
,
455 speculative_maybe_derived_type
,
457 clear_speculation ();
458 if (speculative_outer_type
)
461 type
= speculative_outer_type
;
462 cur_offset
= speculative_offset
;
468 clear_speculation ();
473 clear_speculation ();
476 clear_outer_type (otr_type
);
484 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
485 CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
486 be built within OUTER_TYPE by means of placement new. CONSIDER_BASES makes
487 function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
488 base of one of fields of OUTER_TYPE. */
491 contains_type_p (tree outer_type
, HOST_WIDE_INT offset
,
493 bool consider_placement_new
,
496 ipa_polymorphic_call_context context
;
498 /* Check that type is within range. */
501 if (TYPE_SIZE (outer_type
) && TYPE_SIZE (otr_type
)
502 && TREE_CODE (outer_type
) == INTEGER_CST
503 && TREE_CODE (otr_type
) == INTEGER_CST
504 && wi::ltu_p (wi::to_offset (outer_type
), (wi::to_offset (otr_type
) + offset
)))
507 context
.offset
= offset
;
508 context
.outer_type
= TYPE_MAIN_VARIANT (outer_type
);
509 context
.maybe_derived_type
= false;
510 return context
.restrict_to_inner_class (otr_type
, consider_placement_new
, consider_bases
);
514 /* We know that the instance is stored in variable or parameter
515 (not dynamically allocated) and we want to disprove the fact
516 that it may be in construction at invocation of CALL.
518 BASE represents memory location where instance is stored.
519 If BASE is NULL, it is assumed to be global memory.
520 OUTER_TYPE is known type of the instance or NULL if not
523 For the variable to be in construction we actually need to
524 be in constructor of corresponding global variable or
525 the inline stack of CALL must contain the constructor.
526 Check this condition. This check works safely only before
527 IPA passes, because inline stacks may become out of date
531 decl_maybe_in_construction_p (tree base
, tree outer_type
,
532 gimple call
, tree function
)
535 outer_type
= TYPE_MAIN_VARIANT (outer_type
);
536 gcc_assert (!base
|| DECL_P (base
));
538 /* After inlining the code unification optimizations may invalidate
539 inline stacks. Also we need to give up on global variables after
540 IPA, because addresses of these may have been propagated to their
542 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
545 /* Pure functions can not do any changes on the dynamic type;
546 that require writting to memory. */
547 if ((!base
|| !auto_var_in_fn_p (base
, function
))
548 && flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
551 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
552 block
= BLOCK_SUPERCONTEXT (block
))
553 if (BLOCK_ABSTRACT_ORIGIN (block
)
554 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
556 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
558 if (TREE_CODE (TREE_TYPE (fn
)) != METHOD_TYPE
559 || (!DECL_CXX_CONSTRUCTOR_P (fn
)
560 && !DECL_CXX_DESTRUCTOR_P (fn
)))
562 /* Watch for clones where we constant propagated the first
563 argument (pointer to the instance). */
564 fn
= DECL_ABSTRACT_ORIGIN (fn
);
566 || (base
&& !is_global_var (base
))
567 || TREE_CODE (TREE_TYPE (fn
)) != METHOD_TYPE
568 || (!DECL_CXX_CONSTRUCTOR_P (fn
)
569 && !DECL_CXX_DESTRUCTOR_P (fn
)))
572 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
575 tree type
= TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn
)));
577 if (!outer_type
|| !types_odr_comparable (type
, outer_type
))
579 if (TREE_CODE (type
) == RECORD_TYPE
581 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
584 else if (types_same_for_odr (type
, outer_type
))
588 if (!base
|| (TREE_CODE (base
) == VAR_DECL
&& is_global_var (base
)))
590 if (TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
591 || (!DECL_CXX_CONSTRUCTOR_P (function
)
592 && !DECL_CXX_DESTRUCTOR_P (function
)))
594 if (!DECL_ABSTRACT_ORIGIN (function
))
596 /* Watch for clones where we constant propagated the first
597 argument (pointer to the instance). */
598 function
= DECL_ABSTRACT_ORIGIN (function
);
600 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
601 || (!DECL_CXX_CONSTRUCTOR_P (function
)
602 && !DECL_CXX_DESTRUCTOR_P (function
)))
605 tree type
= TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function
)));
606 if (!outer_type
|| !types_odr_comparable (type
, outer_type
))
608 if (TREE_CODE (type
) == RECORD_TYPE
610 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
613 else if (types_same_for_odr (type
, outer_type
))
619 /* Dump human readable context to F. If NEWLINE is true, it will be terminated
623 ipa_polymorphic_call_context::dump (FILE *f
, bool newline
) const
627 fprintf (f
, "Call is known to be undefined");
631 fprintf (f
, "nothing known");
632 if (outer_type
|| offset
)
634 fprintf (f
, "Outer type%s:", dynamic
? " (dynamic)":"");
635 print_generic_expr (f
, outer_type
, TDF_SLIM
);
636 if (maybe_derived_type
)
637 fprintf (f
, " (or a derived type)");
638 if (maybe_in_construction
)
639 fprintf (f
, " (maybe in construction)");
640 fprintf (f
, " offset "HOST_WIDE_INT_PRINT_DEC
,
643 if (speculative_outer_type
)
645 if (outer_type
|| offset
)
647 fprintf (f
, "Speculative outer type:");
648 print_generic_expr (f
, speculative_outer_type
, TDF_SLIM
);
649 if (speculative_maybe_derived_type
)
650 fprintf (f
, " (or a derived type)");
651 fprintf (f
, " at offset "HOST_WIDE_INT_PRINT_DEC
,
659 /* Print context to stderr. */
662 ipa_polymorphic_call_context::debug () const
667 /* Stream out the context to OB. */
670 ipa_polymorphic_call_context::stream_out (struct output_block
*ob
) const
672 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
674 bp_pack_value (&bp
, invalid
, 1);
675 bp_pack_value (&bp
, maybe_in_construction
, 1);
676 bp_pack_value (&bp
, maybe_derived_type
, 1);
677 bp_pack_value (&bp
, speculative_maybe_derived_type
, 1);
678 bp_pack_value (&bp
, dynamic
, 1);
679 bp_pack_value (&bp
, outer_type
!= NULL
, 1);
680 bp_pack_value (&bp
, offset
!= 0, 1);
681 bp_pack_value (&bp
, speculative_outer_type
!= NULL
, 1);
682 streamer_write_bitpack (&bp
);
684 if (outer_type
!= NULL
)
685 stream_write_tree (ob
, outer_type
, true);
687 streamer_write_hwi (ob
, offset
);
688 if (speculative_outer_type
!= NULL
)
690 stream_write_tree (ob
, speculative_outer_type
, true);
691 streamer_write_hwi (ob
, speculative_offset
);
694 gcc_assert (!speculative_offset
);
697 /* Stream in the context from IB and DATA_IN. */
700 ipa_polymorphic_call_context::stream_in (struct lto_input_block
*ib
,
701 struct data_in
*data_in
)
703 struct bitpack_d bp
= streamer_read_bitpack (ib
);
705 invalid
= bp_unpack_value (&bp
, 1);
706 maybe_in_construction
= bp_unpack_value (&bp
, 1);
707 maybe_derived_type
= bp_unpack_value (&bp
, 1);
708 speculative_maybe_derived_type
= bp_unpack_value (&bp
, 1);
709 dynamic
= bp_unpack_value (&bp
, 1);
710 bool outer_type_p
= bp_unpack_value (&bp
, 1);
711 bool offset_p
= bp_unpack_value (&bp
, 1);
712 bool speculative_outer_type_p
= bp_unpack_value (&bp
, 1);
715 outer_type
= stream_read_tree (ib
, data_in
);
719 offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
722 if (speculative_outer_type_p
)
724 speculative_outer_type
= stream_read_tree (ib
, data_in
);
725 speculative_offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
729 speculative_outer_type
= NULL
;
730 speculative_offset
= 0;
734 /* Proudce polymorphic call context for call method of instance
735 that is located within BASE (that is assumed to be a decl) at offset OFF. */
738 ipa_polymorphic_call_context::set_by_decl (tree base
, HOST_WIDE_INT off
)
740 gcc_assert (DECL_P (base
));
741 clear_speculation ();
743 if (!contains_polymorphic_type_p (TREE_TYPE (base
)))
749 outer_type
= TYPE_MAIN_VARIANT (TREE_TYPE (base
));
751 /* Make very conservative assumption that all objects
752 may be in construction.
754 It is up to caller to revisit this via
755 get_dynamic_type or decl_maybe_in_construction_p. */
756 maybe_in_construction
= true;
757 maybe_derived_type
= false;
761 /* CST is an invariant (address of decl), try to get meaningful
762 polymorphic call context for polymorphic call of method
763 if instance of OTR_TYPE that is located at offset OFF of this invariant.
764 Return FALSE if nothing meaningful can be found. */
767 ipa_polymorphic_call_context::set_by_invariant (tree cst
,
771 HOST_WIDE_INT offset2
, size
, max_size
;
776 clear_outer_type (otr_type
);
778 if (TREE_CODE (cst
) != ADDR_EXPR
)
781 cst
= TREE_OPERAND (cst
, 0);
782 base
= get_ref_base_and_extent (cst
, &offset2
, &size
, &max_size
);
783 if (!DECL_P (base
) || max_size
== -1 || max_size
!= size
)
786 /* Only type inconsistent programs can have otr_type that is
787 not part of outer type. */
788 if (otr_type
&& !contains_type_p (TREE_TYPE (base
), off
, otr_type
))
791 set_by_decl (base
, off
);
795 /* See if OP is SSA name initialized as a copy or by single assignment.
796 If so, walk the SSA graph up. Because simple PHI conditional is considered
797 copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
801 walk_ssa_copies (tree op
, hash_set
<tree
> **global_visited
= NULL
)
803 hash_set
<tree
> *visited
= NULL
;
805 while (TREE_CODE (op
) == SSA_NAME
806 && !SSA_NAME_IS_DEFAULT_DEF (op
)
807 && SSA_NAME_DEF_STMT (op
)
808 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op
))
809 || gimple_code (SSA_NAME_DEF_STMT (op
)) == GIMPLE_PHI
))
813 if (!*global_visited
)
814 *global_visited
= new hash_set
<tree
>;
815 if ((*global_visited
)->add (op
))
821 visited
= new hash_set
<tree
>;
822 if (visited
->add (op
))
830 This pattern is implicitly produced for casts to non-primary
831 bases. When doing context analysis, we do not really care
832 about the case pointer is NULL, becuase the call will be
834 if (gimple_code (SSA_NAME_DEF_STMT (op
)) == GIMPLE_PHI
)
836 gimple phi
= SSA_NAME_DEF_STMT (op
);
838 if (gimple_phi_num_args (phi
) > 2)
840 if (gimple_phi_num_args (phi
) == 1)
841 op
= gimple_phi_arg_def (phi
, 0);
842 else if (integer_zerop (gimple_phi_arg_def (phi
, 0)))
843 op
= gimple_phi_arg_def (phi
, 1);
844 else if (integer_zerop (gimple_phi_arg_def (phi
, 1)))
845 op
= gimple_phi_arg_def (phi
, 0);
851 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op
)))
853 op
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op
));
863 /* Create polymorphic call context from IP invariant CST.
864 This is typically &global_var.
865 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
866 is offset of call. */
868 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst
,
872 clear_speculation ();
873 set_by_invariant (cst
, otr_type
, off
);
876 /* Build context for pointer REF contained in FNDECL at statement STMT.
877 if INSTANCE is non-NULL, return pointer to the object described by
878 the context or DECL where context is contained in. */
880 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl
,
885 tree otr_type
= NULL
;
887 hash_set
<tree
> *visited
= NULL
;
889 if (TREE_CODE (ref
) == OBJ_TYPE_REF
)
891 otr_type
= obj_type_ref_class (ref
);
892 base_pointer
= OBJ_TYPE_REF_OBJECT (ref
);
897 /* Set up basic info in case we find nothing interesting in the analysis. */
898 clear_speculation ();
899 clear_outer_type (otr_type
);
902 /* Walk SSA for outer object. */
905 base_pointer
= walk_ssa_copies (base_pointer
, &visited
);
906 if (TREE_CODE (base_pointer
) == ADDR_EXPR
)
908 HOST_WIDE_INT size
, max_size
;
909 HOST_WIDE_INT offset2
;
910 tree base
= get_ref_base_and_extent (TREE_OPERAND (base_pointer
, 0),
911 &offset2
, &size
, &max_size
);
913 if (max_size
!= -1 && max_size
== size
)
914 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base
)),
917 NULL
/* Do not change outer type. */);
919 /* If this is a varying address, punt. */
920 if ((TREE_CODE (base
) == MEM_REF
|| DECL_P (base
))
924 /* We found dereference of a pointer. Type of the pointer
925 and MEM_REF is meaningless, but we can look futher. */
926 if (TREE_CODE (base
) == MEM_REF
)
928 base_pointer
= TREE_OPERAND (base
, 0);
930 += offset2
+ mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
933 /* We found base object. In this case the outer_type
935 else if (DECL_P (base
))
939 /* Only type inconsistent programs can have otr_type that is
940 not part of outer type. */
942 && !contains_type_p (TREE_TYPE (base
),
943 offset
+ offset2
, otr_type
))
947 *instance
= base_pointer
;
950 set_by_decl (base
, offset
+ offset2
);
951 if (outer_type
&& maybe_in_construction
&& stmt
)
952 maybe_in_construction
953 = decl_maybe_in_construction_p (base
,
967 else if (TREE_CODE (base_pointer
) == POINTER_PLUS_EXPR
968 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer
, 1)))
970 offset
+= tree_to_shwi (TREE_OPERAND (base_pointer
, 1))
972 base_pointer
= TREE_OPERAND (base_pointer
, 0);
981 /* Try to determine type of the outer object. */
982 if (TREE_CODE (base_pointer
) == SSA_NAME
983 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
984 && TREE_CODE (SSA_NAME_VAR (base_pointer
)) == PARM_DECL
)
986 /* See if parameter is THIS pointer of a method. */
987 if (TREE_CODE (TREE_TYPE (fndecl
)) == METHOD_TYPE
988 && SSA_NAME_VAR (base_pointer
) == DECL_ARGUMENTS (fndecl
))
991 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer
)));
992 gcc_assert (TREE_CODE (outer_type
) == RECORD_TYPE
993 || TREE_CODE (outer_type
) == UNION_TYPE
);
995 /* Dynamic casting has possibly upcasted the type
996 in the hiearchy. In this case outer type is less
997 informative than inner type and we should forget
1000 && !contains_type_p (outer_type
, offset
,
1002 || !contains_polymorphic_type_p (outer_type
))
1006 *instance
= base_pointer
;
1012 /* If the function is constructor or destructor, then
1013 the type is possibly in construction, but we know
1014 it is not derived type. */
1015 if (DECL_CXX_CONSTRUCTOR_P (fndecl
)
1016 || DECL_CXX_DESTRUCTOR_P (fndecl
))
1018 maybe_in_construction
= true;
1019 maybe_derived_type
= false;
1023 maybe_derived_type
= true;
1024 maybe_in_construction
= false;
1027 *instance
= base_pointer
;
1030 /* Non-PODs passed by value are really passed by invisible
1031 reference. In this case we also know the type of the
1033 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer
)))
1036 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer
)));
1037 /* Only type inconsistent programs can have otr_type that is
1038 not part of outer type. */
1039 if (otr_type
&& !contains_type_p (outer_type
, offset
,
1044 *instance
= base_pointer
;
1047 /* Non-polymorphic types have no interest for us. */
1048 else if (!otr_type
&& !contains_polymorphic_type_p (outer_type
))
1052 *instance
= base_pointer
;
1055 maybe_derived_type
= false;
1056 maybe_in_construction
= false;
1058 *instance
= base_pointer
;
1063 tree base_type
= TREE_TYPE (base_pointer
);
1065 if (TREE_CODE (base_pointer
) == SSA_NAME
1066 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
1067 && !(TREE_CODE (SSA_NAME_VAR (base_pointer
)) == PARM_DECL
1068 || TREE_CODE (SSA_NAME_VAR (base_pointer
)) == RESULT_DECL
))
1072 *instance
= base_pointer
;
1075 if (TREE_CODE (base_pointer
) == SSA_NAME
1076 && SSA_NAME_DEF_STMT (base_pointer
)
1077 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer
)))
1078 base_type
= TREE_TYPE (gimple_assign_rhs1
1079 (SSA_NAME_DEF_STMT (base_pointer
)));
1081 if (POINTER_TYPE_P (base_type
))
1082 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type
)),
1084 true, NULL
/* Do not change type here */);
1085 /* TODO: There are multiple ways to derive a type. For instance
1086 if BASE_POINTER is passed to an constructor call prior our refernece.
1087 We do not make this type of flow sensitive analysis yet. */
1089 *instance
= base_pointer
;
1093 /* Structure to be passed in between detect_type_change and
1094 check_stmt_for_type_change. */
1096 struct type_change_info
1098 /* Offset into the object where there is the virtual method pointer we are
1100 HOST_WIDE_INT offset
;
1101 /* The declaration or SSA_NAME pointer of the base that we are checking for
1104 /* The reference to virtual table pointer used. */
1107 /* If we actually can tell the type that the object has changed to, it is
1108 stored in this field. Otherwise it remains NULL_TREE. */
1109 tree known_current_type
;
1110 HOST_WIDE_INT known_current_offset
;
1112 /* Set to true if dynamic type change has been detected. */
1113 bool type_maybe_changed
;
1114 /* Set to true if multiple types have been encountered. known_current_type
1115 must be disregarded in that case. */
1116 bool multiple_types_encountered
;
1117 /* Set to true if we possibly missed some dynamic type changes and we should
1118 consider the set to be speculative. */
1120 bool seen_unanalyzed_store
;
1123 /* Return true if STMT is not call and can modify a virtual method table pointer.
1124 We take advantage of fact that vtable stores must appear within constructor
1125 and destructor functions. */
1128 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt
)
1130 if (is_gimple_assign (stmt
))
1132 tree lhs
= gimple_assign_lhs (stmt
);
1134 if (gimple_clobber_p (stmt
))
1136 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
1138 if (flag_strict_aliasing
1139 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
1142 if (TREE_CODE (lhs
) == COMPONENT_REF
1143 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
1145 /* In the future we might want to use get_base_ref_and_offset to find
1146 if there is a field corresponding to the offset and if so, proceed
1147 almost like if it was a component ref. */
1151 /* Code unification may mess with inline stacks. */
1152 if (cfun
->after_inlining
)
1155 /* Walk the inline stack and watch out for ctors/dtors.
1156 TODO: Maybe we can require the store to appear in toplevel
1157 block of CTOR/DTOR. */
1158 for (tree block
= gimple_block (stmt
); block
&& TREE_CODE (block
) == BLOCK
;
1159 block
= BLOCK_SUPERCONTEXT (block
))
1160 if (BLOCK_ABSTRACT_ORIGIN (block
)
1161 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
1163 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
1165 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
1167 return (TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
1168 && (DECL_CXX_CONSTRUCTOR_P (fn
)
1169 || DECL_CXX_DESTRUCTOR_P (fn
)));
1171 return (TREE_CODE (TREE_TYPE (current_function_decl
)) == METHOD_TYPE
1172 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl
)
1173 || DECL_CXX_DESTRUCTOR_P (current_function_decl
)));
1176 /* If STMT can be proved to be an assignment to the virtual method table
1177 pointer of ANALYZED_OBJ and the type associated with the new table
1178 identified, return the type. Otherwise return NULL_TREE if type changes
1179 in unknown way or ERROR_MARK_NODE if type is unchanged. */
1182 extr_type_from_vtbl_ptr_store (gimple stmt
, struct type_change_info
*tci
,
1183 HOST_WIDE_INT
*type_offset
)
1185 HOST_WIDE_INT offset
, size
, max_size
;
1186 tree lhs
, rhs
, base
;
1188 if (!gimple_assign_single_p (stmt
))
1191 lhs
= gimple_assign_lhs (stmt
);
1192 rhs
= gimple_assign_rhs1 (stmt
);
1193 if (TREE_CODE (lhs
) != COMPONENT_REF
1194 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
1197 fprintf (dump_file
, " LHS is not virtual table.\n");
1201 if (tci
->vtbl_ptr_ref
&& operand_equal_p (lhs
, tci
->vtbl_ptr_ref
, 0))
1205 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
1206 if (DECL_P (tci
->instance
))
1208 if (base
!= tci
->instance
)
1212 fprintf (dump_file
, " base:");
1213 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1214 fprintf (dump_file
, " does not match instance:");
1215 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1216 fprintf (dump_file
, "\n");
1221 else if (TREE_CODE (base
) == MEM_REF
)
1223 if (!operand_equal_p (tci
->instance
, TREE_OPERAND (base
, 0), 0))
1227 fprintf (dump_file
, " base mem ref:");
1228 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1229 fprintf (dump_file
, " does not match instance:");
1230 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1231 fprintf (dump_file
, "\n");
1235 if (!integer_zerop (TREE_OPERAND (base
, 1)))
1237 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
1241 fprintf (dump_file
, " base mem ref:");
1242 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1243 fprintf (dump_file
, " has non-representable offset:");
1244 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1245 fprintf (dump_file
, "\n");
1250 offset
+= tree_to_shwi (TREE_OPERAND (base
, 1)) * BITS_PER_UNIT
;
1253 else if (!operand_equal_p (tci
->instance
, base
, 0)
1258 fprintf (dump_file
, " base:");
1259 print_generic_expr (dump_file
, base
, TDF_SLIM
);
1260 fprintf (dump_file
, " does not match instance:");
1261 print_generic_expr (dump_file
, tci
->instance
, TDF_SLIM
);
1262 fprintf (dump_file
, " with offset %i\n", (int)tci
->offset
);
1264 return tci
->offset
> POINTER_SIZE
? error_mark_node
: NULL_TREE
;
1266 if (offset
!= tci
->offset
1267 || size
!= POINTER_SIZE
1268 || max_size
!= POINTER_SIZE
)
1271 fprintf (dump_file
, " wrong offset %i!=%i or size %i\n",
1272 (int)offset
, (int)tci
->offset
, (int)size
);
1273 return offset
+ POINTER_SIZE
<= tci
->offset
1275 && tci
->offset
+ POINTER_SIZE
> offset
+ max_size
)
1276 ? error_mark_node
: NULL
;
1281 unsigned HOST_WIDE_INT offset2
;
1283 if (!vtable_pointer_value_to_vtable (rhs
, &vtable
, &offset2
))
1286 fprintf (dump_file
, " Failed to lookup binfo\n");
1290 tree binfo
= subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable
)),
1295 fprintf (dump_file
, " Construction vtable used\n");
1296 /* FIXME: We should suport construction contexts. */
1300 *type_offset
= tree_to_shwi (BINFO_OFFSET (binfo
)) * BITS_PER_UNIT
;
1301 return DECL_CONTEXT (vtable
);
1304 /* Record dynamic type change of TCI to TYPE. */
1307 record_known_type (struct type_change_info
*tci
, tree type
, HOST_WIDE_INT offset
)
1313 fprintf (dump_file
, " Recording type: ");
1314 print_generic_expr (dump_file
, type
, TDF_SLIM
);
1315 fprintf (dump_file
, " at offset %i\n", (int)offset
);
1318 fprintf (dump_file
, " Recording unknown type\n");
1321 /* If we found a constructor of type that is not polymorphic or
1322 that may contain the type in question as a field (not as base),
1323 restrict to the inner class first to make type matching bellow
1327 || (TREE_CODE (type
) != RECORD_TYPE
1328 || !TYPE_BINFO (type
)
1329 || !polymorphic_type_binfo_p (TYPE_BINFO (type
)))))
1331 ipa_polymorphic_call_context context
;
1333 context
.offset
= offset
;
1334 context
.outer_type
= type
;
1335 context
.maybe_in_construction
= false;
1336 context
.maybe_derived_type
= false;
1337 context
.dynamic
= true;
1338 /* If we failed to find the inner type, we know that the call
1339 would be undefined for type produced here. */
1340 if (!context
.restrict_to_inner_class (tci
->otr_type
))
1343 fprintf (dump_file
, " Ignoring; does not contain otr_type\n");
1346 /* Watch for case we reached an POD type and anticipate placement
1348 if (!context
.maybe_derived_type
)
1350 type
= context
.outer_type
;
1351 offset
= context
.offset
;
1354 if (tci
->type_maybe_changed
1355 && (!types_same_for_odr (type
, tci
->known_current_type
)
1356 || offset
!= tci
->known_current_offset
))
1357 tci
->multiple_types_encountered
= true;
1358 tci
->known_current_type
= TYPE_MAIN_VARIANT (type
);
1359 tci
->known_current_offset
= offset
;
1360 tci
->type_maybe_changed
= true;
1363 /* Callback of walk_aliased_vdefs and a helper function for
1364 detect_type_change to check whether a particular statement may modify
1365 the virtual table pointer, and if possible also determine the new type of
1366 the (sub-)object. It stores its result into DATA, which points to a
1367 type_change_info structure. */
1370 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
1372 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
1373 struct type_change_info
*tci
= (struct type_change_info
*) data
;
1376 /* If we already gave up, just terminate the rest of walk. */
1377 if (tci
->multiple_types_encountered
)
1380 if (is_gimple_call (stmt
))
1382 if (gimple_call_flags (stmt
) & (ECF_CONST
| ECF_PURE
))
1385 /* Check for a constructor call. */
1386 if ((fn
= gimple_call_fndecl (stmt
)) != NULL_TREE
1387 && DECL_CXX_CONSTRUCTOR_P (fn
)
1388 && TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
1389 && gimple_call_num_args (stmt
))
1391 tree op
= walk_ssa_copies (gimple_call_arg (stmt
, 0));
1392 tree type
= method_class_type (TREE_TYPE (fn
));
1393 HOST_WIDE_INT offset
= 0, size
, max_size
;
1397 fprintf (dump_file
, " Checking constructor call: ");
1398 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1401 /* See if THIS parameter seems like instance pointer. */
1402 if (TREE_CODE (op
) == ADDR_EXPR
)
1404 op
= get_ref_base_and_extent (TREE_OPERAND (op
, 0),
1405 &offset
, &size
, &max_size
);
1406 if (size
!= max_size
|| max_size
== -1)
1408 tci
->speculative
= true;
1411 if (op
&& TREE_CODE (op
) == MEM_REF
)
1413 if (!tree_fits_shwi_p (TREE_OPERAND (op
, 1)))
1415 tci
->speculative
= true;
1418 offset
+= tree_to_shwi (TREE_OPERAND (op
, 1))
1420 op
= TREE_OPERAND (op
, 0);
1422 else if (DECL_P (op
))
1426 tci
->speculative
= true;
1429 op
= walk_ssa_copies (op
);
1431 if (operand_equal_p (op
, tci
->instance
, 0)
1433 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
1434 && tree_fits_shwi_p (TYPE_SIZE (type
))
1435 && tree_to_shwi (TYPE_SIZE (type
)) + offset
> tci
->offset
)
1437 record_known_type (tci
, type
, tci
->offset
- offset
);
1441 /* Calls may possibly change dynamic type by placement new. Assume
1442 it will not happen, but make result speculative only. */
1445 fprintf (dump_file
, " Function call may change dynamic type:");
1446 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1448 tci
->speculative
= true;
1451 /* Check for inlined virtual table store. */
1452 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt
))
1455 HOST_WIDE_INT offset
= 0;
1458 fprintf (dump_file
, " Checking vtbl store: ");
1459 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1462 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
, &offset
);
1463 if (type
== error_mark_node
)
1465 gcc_assert (!type
|| TYPE_MAIN_VARIANT (type
) == type
);
1469 fprintf (dump_file
, " Unanalyzed store may change type.\n");
1470 tci
->seen_unanalyzed_store
= true;
1471 tci
->speculative
= true;
1474 record_known_type (tci
, type
, offset
);
1481 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1482 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1483 INSTANCE is pointer to the outer instance as returned by
1484 get_polymorphic_context. To avoid creation of temporary expressions,
1485 INSTANCE may also be an declaration of get_polymorphic_context found the
1486 value to be in static storage.
1488 If the type of instance is not fully determined
1489 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1490 is set), try to walk memory writes and find the actual construction of the
1493 Return true if memory is unchanged from function entry.
1495 We do not include this analysis in the context analysis itself, because
1496 it needs memory SSA to be fully built and the walk may be expensive.
1497 So it is not suitable for use withing fold_stmt and similar uses. */
1500 ipa_polymorphic_call_context::get_dynamic_type (tree instance
,
1505 struct type_change_info tci
;
1507 bool function_entry_reached
= false;
1508 tree instance_ref
= NULL
;
1510 /* Remember OFFSET before it is modified by restrict_to_inner_class.
1511 This is because we do not update INSTANCE when walking inwards. */
1512 HOST_WIDE_INT instance_offset
= offset
;
1515 otr_type
= TYPE_MAIN_VARIANT (otr_type
);
1517 /* Walk into inner type. This may clear maybe_derived_type and save us
1518 from useless work. It also makes later comparsions with static type
1520 if (outer_type
&& otr_type
)
1522 if (!restrict_to_inner_class (otr_type
))
1526 if (!maybe_in_construction
&& !maybe_derived_type
)
1529 /* We need to obtain refernce to virtual table pointer. It is better
1530 to look it up in the code rather than build our own. This require bit
1531 of pattern matching, but we end up verifying that what we found is
1534 What we pattern match is:
1536 tmp = instance->_vptr.A; // vtbl ptr load
1537 tmp2 = tmp[otr_token]; // vtable lookup
1538 OBJ_TYPE_REF(tmp2;instance->0) (instance);
1540 We want to start alias oracle walk from vtbl pointer load,
1541 but we may not be able to identify it, for example, when PRE moved the
1544 if (gimple_code (call
) == GIMPLE_CALL
)
1546 tree ref
= gimple_call_fn (call
);
1547 HOST_WIDE_INT offset2
, size
, max_size
;
1549 if (TREE_CODE (ref
) == OBJ_TYPE_REF
)
1551 ref
= OBJ_TYPE_REF_EXPR (ref
);
1552 ref
= walk_ssa_copies (ref
);
1554 /* Check if definition looks like vtable lookup. */
1555 if (TREE_CODE (ref
) == SSA_NAME
1556 && !SSA_NAME_IS_DEFAULT_DEF (ref
)
1557 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref
))
1558 && TREE_CODE (gimple_assign_rhs1
1559 (SSA_NAME_DEF_STMT (ref
))) == MEM_REF
)
1561 ref
= get_base_address
1562 (TREE_OPERAND (gimple_assign_rhs1
1563 (SSA_NAME_DEF_STMT (ref
)), 0));
1564 ref
= walk_ssa_copies (ref
);
1565 /* Find base address of the lookup and see if it looks like
1567 if (TREE_CODE (ref
) == SSA_NAME
1568 && !SSA_NAME_IS_DEFAULT_DEF (ref
)
1569 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref
)))
1571 tree ref_exp
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref
));
1572 tree base_ref
= get_ref_base_and_extent
1573 (ref_exp
, &offset2
, &size
, &max_size
);
1575 /* Finally verify that what we found looks like read from OTR_OBJECT
1576 or from INSTANCE with offset OFFSET. */
1578 && ((TREE_CODE (base_ref
) == MEM_REF
1579 && ((offset2
== instance_offset
1580 && TREE_OPERAND (base_ref
, 0) == instance
)
1581 || (!offset2
&& TREE_OPERAND (base_ref
, 0) == otr_object
)))
1582 || (DECL_P (instance
) && base_ref
== instance
1583 && offset2
== instance_offset
)))
1585 stmt
= SSA_NAME_DEF_STMT (ref
);
1586 instance_ref
= ref_exp
;
1593 /* If we failed to look up the refernece in code, build our own. */
1596 /* If the statement in question does not use memory, we can't tell
1598 if (!gimple_vuse (stmt
))
1600 ao_ref_init_from_ptr_and_size (&ao
, otr_object
, NULL
);
1603 /* Otherwise use the real reference. */
1604 ao_ref_init (&ao
, instance_ref
);
1606 /* We look for vtbl pointer read. */
1607 ao
.size
= POINTER_SIZE
;
1608 ao
.max_size
= ao
.size
;
1611 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type
))));
1615 fprintf (dump_file
, "Determining dynamic type for call: ");
1616 print_gimple_stmt (dump_file
, call
, 0, 0);
1617 fprintf (dump_file
, " Starting walk at: ");
1618 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1619 fprintf (dump_file
, " instance pointer: ");
1620 print_generic_expr (dump_file
, otr_object
, TDF_SLIM
);
1621 fprintf (dump_file
, " Outer instance pointer: ");
1622 print_generic_expr (dump_file
, instance
, TDF_SLIM
);
1623 fprintf (dump_file
, " offset: %i (bits)", (int)offset
);
1624 fprintf (dump_file
, " vtbl reference: ");
1625 print_generic_expr (dump_file
, instance_ref
, TDF_SLIM
);
1626 fprintf (dump_file
, "\n");
1629 tci
.offset
= offset
;
1630 tci
.instance
= instance
;
1631 tci
.vtbl_ptr_ref
= instance_ref
;
1632 gcc_assert (TREE_CODE (instance
) != MEM_REF
);
1633 tci
.known_current_type
= NULL_TREE
;
1634 tci
.known_current_offset
= 0;
1635 tci
.otr_type
= otr_type
;
1636 tci
.type_maybe_changed
= false;
1637 tci
.multiple_types_encountered
= false;
1638 tci
.speculative
= false;
1639 tci
.seen_unanalyzed_store
= false;
1641 walk_aliased_vdefs (&ao
, gimple_vuse (stmt
), check_stmt_for_type_change
,
1642 &tci
, NULL
, &function_entry_reached
);
1644 /* If we did not find any type changing statements, we may still drop
1645 maybe_in_construction flag if the context already have outer type.
1647 Here we make special assumptions about both constructors and
1648 destructors which are all the functions that are allowed to alter the
1649 VMT pointers. It assumes that destructors begin with assignment into
1650 all VMT pointers and that constructors essentially look in the
1653 1) The very first thing they do is that they call constructors of
1654 ancestor sub-objects that have them.
1656 2) Then VMT pointers of this and all its ancestors is set to new
1657 values corresponding to the type corresponding to the constructor.
1659 3) Only afterwards, other stuff such as constructor of member
1660 sub-objects and the code written by the user is run. Only this may
1661 include calling virtual functions, directly or indirectly.
1663 4) placement new can not be used to change type of non-POD statically
1664 allocated variables.
1666 There is no way to call a constructor of an ancestor sub-object in any
1669 This means that we do not have to care whether constructors get the
1670 correct type information because they will always change it (in fact,
1671 if we define the type to be given by the VMT pointer, it is undefined).
1673 The most important fact to derive from the above is that if, for some
1674 statement in the section 3, we try to detect whether the dynamic type
1675 has changed, we can safely ignore all calls as we examine the function
1676 body backwards until we reach statements in section 2 because these
1677 calls cannot be ancestor constructors or destructors (if the input is
1678 not bogus) and so do not change the dynamic type (this holds true only
1679 for automatically allocated objects but at the moment we devirtualize
1680 only these). We then must detect that statements in section 2 change
1681 the dynamic type and can try to derive the new type. That is enough
1682 and we can stop, we will never see the calls into constructors of
1683 sub-objects in this code.
1685 Therefore if the static outer type was found (outer_type)
1686 we can safely ignore tci.speculative that is set on calls and give up
1687 only if there was dyanmic type store that may affect given variable
1688 (seen_unanalyzed_store) */
1690 if (!tci
.type_maybe_changed
1693 && !tci
.seen_unanalyzed_store
1694 && !tci
.multiple_types_encountered
1695 && offset
== tci
.offset
1696 && types_same_for_odr (tci
.known_current_type
,
1699 if (!outer_type
|| tci
.seen_unanalyzed_store
)
1701 if (maybe_in_construction
)
1702 maybe_in_construction
= false;
1704 fprintf (dump_file
, " No dynamic type change found.\n");
1708 if (tci
.known_current_type
1709 && !function_entry_reached
1710 && !tci
.multiple_types_encountered
)
1712 if (!tci
.speculative
)
1714 outer_type
= TYPE_MAIN_VARIANT (tci
.known_current_type
);
1715 offset
= tci
.known_current_offset
;
1717 maybe_in_construction
= false;
1718 maybe_derived_type
= false;
1720 fprintf (dump_file
, " Determined dynamic type.\n");
1722 else if (!speculative_outer_type
1723 || speculative_maybe_derived_type
)
1725 speculative_outer_type
= TYPE_MAIN_VARIANT (tci
.known_current_type
);
1726 speculative_offset
= tci
.known_current_offset
;
1727 speculative_maybe_derived_type
= false;
1729 fprintf (dump_file
, " Determined speculative dynamic type.\n");
1734 fprintf (dump_file
, " Found multiple types%s%s\n",
1735 function_entry_reached
? " (function entry reached)" : "",
1736 function_entry_reached
? " (multiple types encountered)" : "");
1742 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1743 seems consistent (and useful) with what we already have in the non-speculative context. */
1746 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type
,
1747 HOST_WIDE_INT spec_offset
,
1748 bool spec_maybe_derived_type
,
1749 tree otr_type
) const
1751 if (!flag_devirtualize_speculatively
)
1754 /* Non-polymorphic types are useless for deriving likely polymorphic
1756 if (!spec_outer_type
|| !contains_polymorphic_type_p (spec_outer_type
))
1759 /* If we know nothing, speculation is always good. */
1763 /* Speculation is only useful to avoid derived types.
1764 This is not 100% true for placement new, where the outer context may
1765 turn out to be useless, but ignore these for now. */
1766 if (!maybe_derived_type
)
1769 /* If types agrees, speculation is consistent, but it makes sense only
1770 when it says something new. */
1771 if (types_must_be_same_for_odr (spec_outer_type
, outer_type
))
1772 return maybe_derived_type
&& !spec_maybe_derived_type
;
1774 /* If speculation does not contain the type in question, ignore it. */
1776 && !contains_type_p (spec_outer_type
, spec_offset
, otr_type
, false, true))
1779 /* If outer type already contains speculation as a filed,
1780 it is useless. We already know from OUTER_TYPE
1781 SPEC_TYPE and that it is not in the construction. */
1782 if (contains_type_p (outer_type
, offset
- spec_offset
,
1783 spec_outer_type
, false, false))
1786 /* If speculative outer type is not more specified than outer
1788 We can only decide this safely if we can compare types with OUTER_TYPE.
1790 if ((!in_lto_p
|| odr_type_p (outer_type
))
1791 && !contains_type_p (spec_outer_type
,
1792 spec_offset
- offset
,
1798 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1799 NEW_MAYBE_DERIVED_TYPE
1800 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1803 ipa_polymorphic_call_context::combine_speculation_with
1804 (tree new_outer_type
, HOST_WIDE_INT new_offset
, bool new_maybe_derived_type
,
1807 if (!new_outer_type
)
1810 /* restrict_to_inner_class may eliminate wrong speculation making our job
1813 restrict_to_inner_class (otr_type
);
1815 if (!speculation_consistent_p (new_outer_type
, new_offset
,
1816 new_maybe_derived_type
, otr_type
))
1819 /* New speculation is a win in case we have no speculation or new
1820 speculation does not consider derivations. */
1821 if (!speculative_outer_type
1822 || (speculative_maybe_derived_type
1823 && !new_maybe_derived_type
))
1825 speculative_outer_type
= new_outer_type
;
1826 speculative_offset
= new_offset
;
1827 speculative_maybe_derived_type
= new_maybe_derived_type
;
1830 else if (types_must_be_same_for_odr (speculative_outer_type
,
1833 if (speculative_offset
!= new_offset
)
1835 /* OK we have two contexts that seems valid but they disagree,
1838 This is not a lattice operation, so we may want to drop it later. */
1839 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1841 "Speculative outer types match, "
1842 "offset mismatch -> invalid speculation\n");
1843 clear_speculation ();
1848 if (speculative_maybe_derived_type
&& !new_maybe_derived_type
)
1850 speculative_maybe_derived_type
= false;
1857 /* Choose type that contains the other. This one either contains the outer
1858 as a field (thus giving exactly one target) or is deeper in the type
1860 else if (speculative_outer_type
1861 && speculative_maybe_derived_type
1862 && (new_offset
> speculative_offset
1863 || (new_offset
== speculative_offset
1864 && contains_type_p (new_outer_type
,
1865 0, speculative_outer_type
, false))))
1867 tree old_outer_type
= speculative_outer_type
;
1868 HOST_WIDE_INT old_offset
= speculative_offset
;
1869 bool old_maybe_derived_type
= speculative_maybe_derived_type
;
1871 speculative_outer_type
= new_outer_type
;
1872 speculative_offset
= new_offset
;
1873 speculative_maybe_derived_type
= new_maybe_derived_type
;
1876 restrict_to_inner_class (otr_type
);
1878 /* If the speculation turned out to make no sense, revert to sensible
1880 if (!speculative_outer_type
)
1882 speculative_outer_type
= old_outer_type
;
1883 speculative_offset
= old_offset
;
1884 speculative_maybe_derived_type
= old_maybe_derived_type
;
1887 return (old_offset
!= speculative_offset
1888 || old_maybe_derived_type
!= speculative_maybe_derived_type
1889 || types_must_be_same_for_odr (speculative_outer_type
,
1895 /* Make speculation less specific so
1896 NEW_OUTER_TYPE, NEW_OFFSET, NEW_MAYBE_DERIVED_TYPE is also included.
1897 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1900 ipa_polymorphic_call_context::meet_speculation_with
1901 (tree new_outer_type
, HOST_WIDE_INT new_offset
, bool new_maybe_derived_type
,
1904 if (!new_outer_type
&& speculative_outer_type
)
1906 clear_speculation ();
1910 /* restrict_to_inner_class may eliminate wrong speculation making our job
1913 restrict_to_inner_class (otr_type
);
1915 if (!speculative_outer_type
1916 || !speculation_consistent_p (speculative_outer_type
,
1918 speculative_maybe_derived_type
,
1922 if (!speculation_consistent_p (new_outer_type
, new_offset
,
1923 new_maybe_derived_type
, otr_type
))
1925 clear_speculation ();
1929 else if (types_must_be_same_for_odr (speculative_outer_type
,
1932 if (speculative_offset
!= new_offset
)
1934 clear_speculation ();
1939 if (!speculative_maybe_derived_type
&& new_maybe_derived_type
)
1941 speculative_maybe_derived_type
= true;
1948 /* See if one type contains the other as a field (not base). */
1949 else if (contains_type_p (new_outer_type
, new_offset
- speculative_offset
,
1950 speculative_outer_type
, false, false))
1952 else if (contains_type_p (speculative_outer_type
,
1953 speculative_offset
- new_offset
,
1954 new_outer_type
, false, false))
1956 speculative_outer_type
= new_outer_type
;
1957 speculative_offset
= new_offset
;
1958 speculative_maybe_derived_type
= new_maybe_derived_type
;
1961 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
1962 else if (contains_type_p (new_outer_type
,
1963 new_offset
- speculative_offset
,
1964 speculative_outer_type
, false, true))
1966 if (!speculative_maybe_derived_type
)
1968 speculative_maybe_derived_type
= true;
1973 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
1974 else if (contains_type_p (speculative_outer_type
,
1975 speculative_offset
- new_offset
, new_outer_type
, false, true))
1977 speculative_outer_type
= new_outer_type
;
1978 speculative_offset
= new_offset
;
1979 speculative_maybe_derived_type
= true;
1984 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1985 fprintf (dump_file
, "Giving up on speculative meet\n");
1986 clear_speculation ();
1991 /* Assume that both THIS and a given context is valid and strenghten THIS
1992 if possible. Return true if any strenghtening was made.
1993 If actual type the context is being used in is known, OTR_TYPE should be
1994 set accordingly. This improves quality of combined result. */
1997 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx
,
2000 bool updated
= false;
2002 if (ctx
.useless_p () || invalid
)
2005 /* Restricting context to inner type makes merging easier, however do not
2006 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
2007 if (otr_type
&& !invalid
&& !ctx
.invalid
)
2009 restrict_to_inner_class (otr_type
);
2010 ctx
.restrict_to_inner_class (otr_type
);
2015 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2017 fprintf (dump_file
, "Polymorphic call context combine:");
2019 fprintf (dump_file
, "With context: ");
2020 ctx
.dump (dump_file
);
2023 fprintf (dump_file
, "To be used with type: ");
2024 print_generic_expr (dump_file
, otr_type
, TDF_SLIM
);
2025 fprintf (dump_file
, "\n");
2029 /* If call is known to be invalid, we are done. */
2032 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2033 fprintf (dump_file
, "-> Invalid context\n");
2037 if (!ctx
.outer_type
)
2039 else if (!outer_type
)
2041 outer_type
= ctx
.outer_type
;
2042 offset
= ctx
.offset
;
2043 dynamic
= ctx
.dynamic
;
2044 maybe_in_construction
= ctx
.maybe_in_construction
;
2045 maybe_derived_type
= ctx
.maybe_derived_type
;
2048 /* If types are known to be same, merging is quite easy. */
2049 else if (types_must_be_same_for_odr (outer_type
, ctx
.outer_type
))
2051 if (offset
!= ctx
.offset
2052 && TYPE_SIZE (outer_type
)
2053 && TREE_CODE (TYPE_SIZE (outer_type
)) == INTEGER_CST
)
2055 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2056 fprintf (dump_file
, "Outer types match, offset mismatch -> invalid\n");
2057 clear_speculation ();
2058 clear_outer_type ();
2062 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2063 fprintf (dump_file
, "Outer types match, merging flags\n");
2064 if (maybe_in_construction
&& !ctx
.maybe_in_construction
)
2067 maybe_in_construction
= false;
2069 if (maybe_derived_type
&& !ctx
.maybe_derived_type
)
2072 maybe_derived_type
= false;
2074 if (dynamic
&& !ctx
.dynamic
)
2080 /* If we know the type precisely, there is not much to improve. */
2081 else if (!maybe_derived_type
&& !maybe_in_construction
2082 && !ctx
.maybe_derived_type
&& !ctx
.maybe_in_construction
)
2084 /* It may be easy to check if second context permits the first
2085 and set INVALID otherwise. This is not easy to do in general;
2086 contains_type_p may return false negatives for non-comparable
2089 If OTR_TYPE is known, we however can expect that
2090 restrict_to_inner_class should have discovered the same base
2092 if (otr_type
&& !ctx
.maybe_in_construction
&& !ctx
.maybe_derived_type
)
2094 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2095 fprintf (dump_file
, "Contextes disagree -> invalid\n");
2099 /* See if one type contains the other as a field (not base).
2100 In this case we want to choose the wider type, because it contains
2101 more information. */
2102 else if (contains_type_p (ctx
.outer_type
, ctx
.offset
- offset
,
2103 outer_type
, false, false))
2105 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2106 fprintf (dump_file
, "Second type contain the first as a field\n");
2108 if (maybe_derived_type
)
2110 outer_type
= ctx
.outer_type
;
2111 maybe_derived_type
= ctx
.maybe_derived_type
;
2112 offset
= ctx
.offset
;
2113 dynamic
= ctx
.dynamic
;
2117 /* If we do not know how the context is being used, we can
2118 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
2119 to other component of OUTER_TYPE later and we know nothing
2121 if (otr_type
&& maybe_in_construction
2122 && !ctx
.maybe_in_construction
)
2124 maybe_in_construction
= false;
2128 else if (contains_type_p (outer_type
, offset
- ctx
.offset
,
2129 ctx
.outer_type
, false, false))
2131 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2132 fprintf (dump_file
, "First type contain the second as a field\n");
2134 if (otr_type
&& maybe_in_construction
2135 && !ctx
.maybe_in_construction
)
2137 maybe_in_construction
= false;
2141 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2142 else if (contains_type_p (ctx
.outer_type
,
2143 ctx
.offset
- offset
, outer_type
, false, true))
2145 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2146 fprintf (dump_file
, "First type is base of second\n");
2147 if (!maybe_derived_type
)
2149 if (!ctx
.maybe_in_construction
2150 && types_odr_comparable (outer_type
, ctx
.outer_type
))
2152 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2153 fprintf (dump_file
, "Second context does not permit base -> invalid\n");
2157 /* Pick variant deeper in the hiearchy. */
2160 outer_type
= ctx
.outer_type
;
2161 maybe_in_construction
= ctx
.maybe_in_construction
;
2162 maybe_derived_type
= ctx
.maybe_derived_type
;
2163 offset
= ctx
.offset
;
2164 dynamic
= ctx
.dynamic
;
2168 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2169 else if (contains_type_p (outer_type
,
2170 offset
- ctx
.offset
, ctx
.outer_type
, false, true))
2172 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2173 fprintf (dump_file
, "Second type is base of first\n");
2174 if (!ctx
.maybe_derived_type
)
2176 if (!maybe_in_construction
2177 && types_odr_comparable (outer_type
, ctx
.outer_type
))
2179 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2180 fprintf (dump_file
, "First context does not permit base -> invalid\n");
2183 /* Pick the base type. */
2184 else if (maybe_in_construction
)
2186 outer_type
= ctx
.outer_type
;
2187 maybe_in_construction
= ctx
.maybe_in_construction
;
2188 maybe_derived_type
= ctx
.maybe_derived_type
;
2189 offset
= ctx
.offset
;
2190 dynamic
= ctx
.dynamic
;
2195 /* TODO handle merging using hiearchy. */
2196 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2197 fprintf (dump_file
, "Giving up on merge\n");
2199 updated
|= combine_speculation_with (ctx
.speculative_outer_type
,
2200 ctx
.speculative_offset
,
2201 ctx
.speculative_maybe_derived_type
,
2204 if (updated
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
2206 fprintf (dump_file
, "Updated as: ");
2208 fprintf (dump_file
, "\n");
2214 clear_speculation ();
2215 clear_outer_type ();
2219 /* Take non-speculative info, merge it with speculative and clear speculation.
2220 Used when we no longer manage to keep track of actual outer type, but we
2221 think it is still there.
2223 If OTR_TYPE is set, the transformation can be done more effectively assuming
2224 that context is going to be used only that way. */
2227 ipa_polymorphic_call_context::make_speculative (tree otr_type
)
2229 tree spec_outer_type
= outer_type
;
2230 HOST_WIDE_INT spec_offset
= offset
;
2231 bool spec_maybe_derived_type
= maybe_derived_type
;
2236 clear_outer_type ();
2237 clear_speculation ();
2242 clear_outer_type ();
2243 combine_speculation_with (spec_outer_type
, spec_offset
,
2244 spec_maybe_derived_type
,
2248 /* Use when we can not track dynamic type change. This speculatively assume
2249 type change is not happening. */
2252 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor
,
2256 make_speculative (otr_type
);
2257 else if (in_poly_cdtor
)
2258 maybe_in_construction
= true;
2261 /* Return TRUE if this context conveys the same information as OTHER. */
2264 ipa_polymorphic_call_context::equal_to
2265 (const ipa_polymorphic_call_context
&x
) const
2268 return x
.useless_p ();
2271 if (x
.useless_p () || x
.invalid
)
2277 || !types_odr_comparable (outer_type
, x
.outer_type
)
2278 || !types_same_for_odr (outer_type
, x
.outer_type
)
2279 || offset
!= x
.offset
2280 || maybe_in_construction
!= x
.maybe_in_construction
2281 || maybe_derived_type
!= x
.maybe_derived_type
2282 || dynamic
!= x
.dynamic
)
2285 else if (x
.outer_type
)
2289 if (speculative_outer_type
2290 && speculation_consistent_p (speculative_outer_type
, speculative_offset
,
2291 speculative_maybe_derived_type
, NULL_TREE
))
2293 if (!x
.speculative_outer_type
)
2296 if (!types_odr_comparable (speculative_outer_type
,
2297 x
.speculative_outer_type
)
2298 || !types_same_for_odr (speculative_outer_type
,
2299 x
.speculative_outer_type
)
2300 || speculative_offset
!= x
.speculative_offset
2301 || speculative_maybe_derived_type
!= x
.speculative_maybe_derived_type
)
2304 else if (x
.speculative_outer_type
2305 && x
.speculation_consistent_p (x
.speculative_outer_type
,
2306 x
.speculative_offset
,
2307 x
.speculative_maybe_derived_type
,
2314 /* Modify context to be strictly less restrictive than CTX. */
2317 ipa_polymorphic_call_context::meet_with (ipa_polymorphic_call_context ctx
,
2320 bool updated
= false;
2322 if (useless_p () || ctx
.invalid
)
2325 /* Restricting context to inner type makes merging easier, however do not
2326 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
2327 if (otr_type
&& !useless_p () && !ctx
.useless_p ())
2329 restrict_to_inner_class (otr_type
);
2330 ctx
.restrict_to_inner_class (otr_type
);
2338 if (ctx
.useless_p () || invalid
)
2344 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2346 fprintf (dump_file
, "Polymorphic call context meet:");
2348 fprintf (dump_file
, "With context: ");
2349 ctx
.dump (dump_file
);
2352 fprintf (dump_file
, "To be used with type: ");
2353 print_generic_expr (dump_file
, otr_type
, TDF_SLIM
);
2354 fprintf (dump_file
, "\n");
2358 if (!dynamic
&& ctx
.dynamic
)
2364 /* If call is known to be invalid, we are done. */
2367 else if (!ctx
.outer_type
)
2369 clear_outer_type ();
2372 /* If types are known to be same, merging is quite easy. */
2373 else if (types_must_be_same_for_odr (outer_type
, ctx
.outer_type
))
2375 if (offset
!= ctx
.offset
2376 && TYPE_SIZE (outer_type
)
2377 && TREE_CODE (TYPE_SIZE (outer_type
)) == INTEGER_CST
)
2379 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2380 fprintf (dump_file
, "Outer types match, offset mismatch -> clearing\n");
2381 clear_outer_type ();
2384 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2385 fprintf (dump_file
, "Outer types match, merging flags\n");
2386 if (!maybe_in_construction
&& ctx
.maybe_in_construction
)
2389 maybe_in_construction
= true;
2391 if (!maybe_derived_type
&& ctx
.maybe_derived_type
)
2394 maybe_derived_type
= true;
2396 if (!dynamic
&& ctx
.dynamic
)
2402 /* See if one type contains the other as a field (not base). */
2403 else if (contains_type_p (ctx
.outer_type
, ctx
.offset
- offset
,
2404 outer_type
, false, false))
2406 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2407 fprintf (dump_file
, "Second type contain the first as a field\n");
2409 /* The second type is more specified, so we keep the first.
2410 We need to set DYNAMIC flag to avoid declaring context INVALID
2411 of OFFSET ends up being out of range. */
2415 && (!TYPE_SIZE (ctx
.outer_type
)
2416 || !TYPE_SIZE (outer_type
)
2417 || !operand_equal_p (TYPE_SIZE (ctx
.outer_type
),
2418 TYPE_SIZE (outer_type
), 0)))))
2424 else if (contains_type_p (outer_type
, offset
- ctx
.offset
,
2425 ctx
.outer_type
, false, false))
2427 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2428 fprintf (dump_file
, "First type contain the second as a field\n");
2433 && (!TYPE_SIZE (ctx
.outer_type
)
2434 || !TYPE_SIZE (outer_type
)
2435 || !operand_equal_p (TYPE_SIZE (ctx
.outer_type
),
2436 TYPE_SIZE (outer_type
), 0)))))
2438 outer_type
= ctx
.outer_type
;
2439 offset
= ctx
.offset
;
2440 dynamic
= ctx
.dynamic
;
2441 maybe_in_construction
= ctx
.maybe_in_construction
;
2442 maybe_derived_type
= ctx
.maybe_derived_type
;
2445 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2446 else if (contains_type_p (ctx
.outer_type
,
2447 ctx
.offset
- offset
, outer_type
, false, true))
2449 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2450 fprintf (dump_file
, "First type is base of second\n");
2451 if (!maybe_derived_type
)
2453 maybe_derived_type
= true;
2456 if (!maybe_in_construction
&& ctx
.maybe_in_construction
)
2458 maybe_in_construction
= true;
2461 if (!dynamic
&& ctx
.dynamic
)
2467 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2468 else if (contains_type_p (outer_type
,
2469 offset
- ctx
.offset
, ctx
.outer_type
, false, true))
2471 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2472 fprintf (dump_file
, "Second type is base of first\n");
2473 outer_type
= ctx
.outer_type
;
2474 offset
= ctx
.offset
;
2476 if (!maybe_derived_type
)
2477 maybe_derived_type
= true;
2478 if (!maybe_in_construction
&& ctx
.maybe_in_construction
)
2479 maybe_in_construction
= true;
2480 if (!dynamic
&& ctx
.dynamic
)
2483 /* TODO handle merging using hiearchy. */
2486 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2487 fprintf (dump_file
, "Giving up on meet\n");
2488 clear_outer_type ();
2492 updated
|= meet_speculation_with (ctx
.speculative_outer_type
,
2493 ctx
.speculative_offset
,
2494 ctx
.speculative_maybe_derived_type
,
2497 if (updated
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
2499 fprintf (dump_file
, "Updated as: ");
2501 fprintf (dump_file
, "\n");