Only allow e500 double in SPE_SIMD_REGNO_P registers.
[official-gcc.git] / gcc / ipa-polymorphic-call.c
blob652c205b88239bb891ca535dd2a82a78d18a8069
1 /* Analysis of polymorphic call context.
2 Copyright (C) 2013-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "print-tree.h"
27 #include "calls.h"
28 #include "expr.h"
29 #include "tree-pass.h"
30 #include "hash-set.h"
31 #include "target.h"
32 #include "hash-table.h"
33 #include "inchash.h"
34 #include "tree-pretty-print.h"
35 #include "ipa-utils.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "gimple-expr.h"
40 #include "gimple.h"
41 #include "ipa-inline.h"
42 #include "diagnostic.h"
43 #include "tree-dfa.h"
44 #include "demangle.h"
45 #include "dbgcnt.h"
46 #include "gimple-pretty-print.h"
47 #include "stor-layout.h"
48 #include "intl.h"
49 #include "data-streamer.h"
50 #include "lto-streamer.h"
51 #include "streamer-hooks.h"
53 /* Return true when TYPE contains an polymorphic type and thus is interesting
54 for devirtualization machinery. */
56 static bool contains_type_p (tree, HOST_WIDE_INT, tree,
57 bool consider_placement_new = true,
58 bool consider_bases = true);
60 bool
61 contains_polymorphic_type_p (const_tree type)
63 type = TYPE_MAIN_VARIANT (type);
65 if (RECORD_OR_UNION_TYPE_P (type))
67 if (TYPE_BINFO (type)
68 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
69 return true;
70 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
71 if (TREE_CODE (fld) == FIELD_DECL
72 && !DECL_ARTIFICIAL (fld)
73 && contains_polymorphic_type_p (TREE_TYPE (fld)))
74 return true;
75 return false;
77 if (TREE_CODE (type) == ARRAY_TYPE)
78 return contains_polymorphic_type_p (TREE_TYPE (type));
79 return false;
82 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
83 at possition CUR_OFFSET within TYPE.
85 POD can be changed to an instance of a polymorphic type by
86 placement new. Here we play safe and assume that any
87 non-polymorphic type is POD. */
88 bool
89 possible_placement_new (tree type, tree expected_type,
90 HOST_WIDE_INT cur_offset)
92 return ((TREE_CODE (type) != RECORD_TYPE
93 || !TYPE_BINFO (type)
94 || cur_offset >= BITS_PER_WORD
95 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
96 && (!TYPE_SIZE (type)
97 || !tree_fits_shwi_p (TYPE_SIZE (type))
98 || (cur_offset
99 + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
100 : GET_MODE_BITSIZE (Pmode))
101 <= tree_to_uhwi (TYPE_SIZE (type)))));
104 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
105 is contained at THIS->OFFSET. Walk the memory representation of
106 THIS->OUTER_TYPE and find the outermost class type that match
107 OTR_TYPE or contain OTR_TYPE as a base. Update THIS
108 to represent it.
110 If OTR_TYPE is NULL, just find outermost polymorphic type with
111 virtual table present at possition OFFSET.
113 For example when THIS represents type
114 class A
116 int a;
117 class B b;
119 and we look for type at offset sizeof(int), we end up with B and offset 0.
120 If the same is produced by multiple inheritance, we end up with A and offset
121 sizeof(int).
123 If we can not find corresponding class, give up by setting
124 THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
125 Return true when lookup was sucesful.
127 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
128 valid only via alocation of new polymorphic type inside by means
129 of placement new.
131 When CONSIDER_BASES is false, only look for actual fields, not base types
132 of TYPE. */
134 bool
135 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type,
136 bool consider_placement_new,
137 bool consider_bases)
139 tree type = outer_type;
140 HOST_WIDE_INT cur_offset = offset;
141 bool speculative = false;
142 bool size_unknown = false;
143 unsigned HOST_WIDE_INT otr_type_size = GET_MODE_BITSIZE (Pmode);
145 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
146 if (!outer_type)
148 clear_outer_type (otr_type);
149 type = otr_type;
150 cur_offset = 0;
152 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
153 that the context is either invalid, or the instance type must be
154 derived from OUTER_TYPE.
156 Because the instance type may contain field whose type is of OUTER_TYPE,
157 we can not derive any effective information about it.
159 TODO: In the case we know all derrived types, we can definitely do better
160 here. */
161 else if (TYPE_SIZE (outer_type)
162 && tree_fits_shwi_p (TYPE_SIZE (outer_type))
163 && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
164 && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
166 clear_outer_type (otr_type);
167 type = otr_type;
168 cur_offset = 0;
170 /* If derived type is not allowed, we know that the context is invalid.
171 For dynamic types, we really do not have information about
172 size of the memory location. It is possible that completely
173 different type is stored after outer_type. */
174 if (!maybe_derived_type && !dynamic)
176 clear_speculation ();
177 invalid = true;
178 return false;
182 if (otr_type && TYPE_SIZE (otr_type)
183 && tree_fits_shwi_p (TYPE_SIZE (otr_type)))
184 otr_type_size = tree_to_uhwi (TYPE_SIZE (otr_type));
186 if (!type || offset < 0)
187 goto no_useful_type_info;
189 /* Find the sub-object the constant actually refers to and mark whether it is
190 an artificial one (as opposed to a user-defined one).
192 This loop is performed twice; first time for outer_type and second time
193 for speculative_outer_type. The second run has SPECULATIVE set. */
194 while (true)
196 unsigned HOST_WIDE_INT pos, size;
197 tree fld;
199 /* If we do not know size of TYPE, we need to be more conservative
200 about accepting cases where we can not find EXPECTED_TYPE.
201 Generally the types that do matter here are of constant size.
202 Size_unknown case should be very rare. */
203 if (TYPE_SIZE (type)
204 && tree_fits_shwi_p (TYPE_SIZE (type))
205 && tree_to_shwi (TYPE_SIZE (type)) >= 0)
206 size_unknown = false;
207 else
208 size_unknown = true;
210 /* On a match, just return what we found. */
211 if ((otr_type
212 && types_odr_comparable (type, otr_type)
213 && types_same_for_odr (type, otr_type))
214 || (!otr_type
215 && TREE_CODE (type) == RECORD_TYPE
216 && TYPE_BINFO (type)
217 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
219 if (speculative)
221 /* If we did not match the offset, just give up on speculation. */
222 if (cur_offset != 0
223 /* Also check if speculation did not end up being same as
224 non-speculation. */
225 || (types_must_be_same_for_odr (speculative_outer_type,
226 outer_type)
227 && (maybe_derived_type
228 == speculative_maybe_derived_type)))
229 clear_speculation ();
230 return true;
232 else
234 /* If type is known to be final, do not worry about derived
235 types. Testing it here may help us to avoid speculation. */
236 if (otr_type && TREE_CODE (outer_type) == RECORD_TYPE
237 && (!in_lto_p || odr_type_p (outer_type))
238 && type_known_to_have_no_deriavations_p (outer_type))
239 maybe_derived_type = false;
241 /* Type can not contain itself on an non-zero offset. In that case
242 just give up. Still accept the case where size is now known.
243 Either the second copy may appear past the end of type or within
244 the non-POD buffer located inside the variably sized type
245 itself. */
246 if (cur_offset != 0)
247 goto no_useful_type_info;
248 /* If we determined type precisely or we have no clue on
249 speuclation, we are done. */
250 if (!maybe_derived_type || !speculative_outer_type
251 || !speculation_consistent_p (speculative_outer_type,
252 speculative_offset,
253 speculative_maybe_derived_type,
254 otr_type))
256 clear_speculation ();
257 return true;
259 /* Otherwise look into speculation now. */
260 else
262 speculative = true;
263 type = speculative_outer_type;
264 cur_offset = speculative_offset;
265 continue;
270 /* Walk fields and find corresponding on at OFFSET. */
271 if (TREE_CODE (type) == RECORD_TYPE)
273 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
275 if (TREE_CODE (fld) != FIELD_DECL)
276 continue;
278 pos = int_bit_position (fld);
279 if (pos > (unsigned HOST_WIDE_INT)cur_offset)
280 continue;
282 /* Do not consider vptr itself. Not even for placement new. */
283 if (!pos && DECL_ARTIFICIAL (fld)
284 && POINTER_TYPE_P (TREE_TYPE (fld))
285 && TYPE_BINFO (type)
286 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
287 continue;
289 if (!DECL_SIZE (fld) || !tree_fits_uhwi_p (DECL_SIZE (fld)))
290 goto no_useful_type_info;
291 size = tree_to_uhwi (DECL_SIZE (fld));
293 /* We can always skip types smaller than pointer size:
294 those can not contain a virtual table pointer.
296 Disqualifying fields that are too small to fit OTR_TYPE
297 saves work needed to walk them for no benefit.
298 Because of the way the bases are packed into a class, the
299 field's size may be smaller than type size, so it needs
300 to be done with a care. */
302 if (pos <= (unsigned HOST_WIDE_INT)cur_offset
303 && (pos + size) >= (unsigned HOST_WIDE_INT)cur_offset
304 + GET_MODE_BITSIZE (Pmode)
305 && (!otr_type
306 || !TYPE_SIZE (TREE_TYPE (fld))
307 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld)))
308 || (pos + tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld))))
309 >= cur_offset + otr_type_size))
310 break;
313 if (!fld)
314 goto no_useful_type_info;
316 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
317 cur_offset -= pos;
318 /* DECL_ARTIFICIAL represents a basetype. */
319 if (!DECL_ARTIFICIAL (fld))
321 if (!speculative)
323 outer_type = type;
324 offset = cur_offset;
325 /* As soon as we se an field containing the type,
326 we know we are not looking for derivations. */
327 maybe_derived_type = false;
329 else
331 speculative_outer_type = type;
332 speculative_offset = cur_offset;
333 speculative_maybe_derived_type = false;
336 else if (!consider_bases)
337 goto no_useful_type_info;
339 else if (TREE_CODE (type) == ARRAY_TYPE)
341 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
343 /* Give up if we don't know array field size.
344 Also give up on non-polymorphic types as they are used
345 as buffers for placement new. */
346 if (!TYPE_SIZE (subtype)
347 || !tree_fits_shwi_p (TYPE_SIZE (subtype))
348 || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
349 || !contains_polymorphic_type_p (subtype))
350 goto no_useful_type_info;
352 HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
354 /* We may see buffer for placement new. In this case the expected type
355 can be bigger than the subtype. */
356 if (TYPE_SIZE (subtype)
357 && (cur_offset + otr_type_size
358 > tree_to_uhwi (TYPE_SIZE (subtype))))
359 goto no_useful_type_info;
361 cur_offset = new_offset;
362 type = subtype;
363 if (!speculative)
365 outer_type = type;
366 offset = cur_offset;
367 maybe_derived_type = false;
369 else
371 speculative_outer_type = type;
372 speculative_offset = cur_offset;
373 speculative_maybe_derived_type = false;
376 /* Give up on anything else. */
377 else
379 no_useful_type_info:
380 if (maybe_derived_type && !speculative
381 && TREE_CODE (outer_type) == RECORD_TYPE
382 && TREE_CODE (otr_type) == RECORD_TYPE
383 && TYPE_BINFO (otr_type)
384 && !offset
385 && get_binfo_at_offset (TYPE_BINFO (otr_type), 0, outer_type))
387 clear_outer_type (otr_type);
388 if (!speculative_outer_type
389 || !speculation_consistent_p (speculative_outer_type,
390 speculative_offset,
391 speculative_maybe_derived_type,
392 otr_type))
393 clear_speculation ();
394 if (speculative_outer_type)
396 speculative = true;
397 type = speculative_outer_type;
398 cur_offset = speculative_offset;
400 else
401 return true;
403 /* We found no way to embedd EXPECTED_TYPE in TYPE.
404 We still permit two special cases - placement new and
405 the case of variadic types containing themselves. */
406 if (!speculative
407 && consider_placement_new
408 && (size_unknown || !type || maybe_derived_type
409 || possible_placement_new (type, otr_type, cur_offset)))
411 /* In these weird cases we want to accept the context.
412 In non-speculative run we have no useful outer_type info
413 (TODO: we may eventually want to record upper bound on the
414 type size that can be used to prune the walk),
415 but we still want to consider speculation that may
416 give useful info. */
417 if (!speculative)
419 clear_outer_type (otr_type);
420 if (!speculative_outer_type
421 || !speculation_consistent_p (speculative_outer_type,
422 speculative_offset,
423 speculative_maybe_derived_type,
424 otr_type))
425 clear_speculation ();
426 if (speculative_outer_type)
428 speculative = true;
429 type = speculative_outer_type;
430 cur_offset = speculative_offset;
432 else
433 return true;
435 else
436 clear_speculation ();
437 return true;
439 else
441 clear_speculation ();
442 if (speculative)
443 return true;
444 clear_outer_type (otr_type);
445 invalid = true;
446 return false;
452 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
453 CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
454 be built within OUTER_TYPE by means of placement new. CONSIDER_BASES makes
455 function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
456 base of one of fields of OUTER_TYPE. */
458 static bool
459 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
460 tree otr_type,
461 bool consider_placement_new,
462 bool consider_bases)
464 ipa_polymorphic_call_context context;
466 /* Check that type is within range. */
467 if (offset < 0)
468 return false;
469 if (TYPE_SIZE (outer_type) && TYPE_SIZE (otr_type)
470 && TREE_CODE (outer_type) == INTEGER_CST
471 && TREE_CODE (otr_type) == INTEGER_CST
472 && wi::ltu_p (wi::to_offset (outer_type), (wi::to_offset (otr_type) + offset)))
473 return false;
475 context.offset = offset;
476 context.outer_type = TYPE_MAIN_VARIANT (outer_type);
477 context.maybe_derived_type = false;
478 return context.restrict_to_inner_class (otr_type, consider_placement_new, consider_bases);
482 /* We know that the instance is stored in variable or parameter
483 (not dynamically allocated) and we want to disprove the fact
484 that it may be in construction at invocation of CALL.
486 BASE represents memory location where instance is stored.
487 If BASE is NULL, it is assumed to be global memory.
488 OUTER_TYPE is known type of the instance or NULL if not
489 known.
491 For the variable to be in construction we actually need to
492 be in constructor of corresponding global variable or
493 the inline stack of CALL must contain the constructor.
494 Check this condition. This check works safely only before
495 IPA passes, because inline stacks may become out of date
496 later. */
498 bool
499 decl_maybe_in_construction_p (tree base, tree outer_type,
500 gimple call, tree function)
502 if (outer_type)
503 outer_type = TYPE_MAIN_VARIANT (outer_type);
504 gcc_assert (!base || DECL_P (base));
506 /* After inlining the code unification optimizations may invalidate
507 inline stacks. Also we need to give up on global variables after
508 IPA, because addresses of these may have been propagated to their
509 constructors. */
510 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
511 return true;
513 /* Pure functions can not do any changes on the dynamic type;
514 that require writting to memory. */
515 if ((!base || !auto_var_in_fn_p (base, function))
516 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
517 return false;
519 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
520 block = BLOCK_SUPERCONTEXT (block))
521 if (BLOCK_ABSTRACT_ORIGIN (block)
522 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
524 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
526 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
527 || (!DECL_CXX_CONSTRUCTOR_P (fn)
528 && !DECL_CXX_DESTRUCTOR_P (fn)))
530 /* Watch for clones where we constant propagated the first
531 argument (pointer to the instance). */
532 fn = DECL_ABSTRACT_ORIGIN (fn);
533 if (!fn
534 || (base && !is_global_var (base))
535 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
536 || (!DECL_CXX_CONSTRUCTOR_P (fn)
537 && !DECL_CXX_DESTRUCTOR_P (fn)))
538 continue;
540 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
541 continue;
543 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
545 if (!outer_type || !types_odr_comparable (type, outer_type))
547 if (TREE_CODE (type) == RECORD_TYPE
548 && TYPE_BINFO (type)
549 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
550 return true;
552 else if (types_same_for_odr (type, outer_type))
553 return true;
556 if (!base || (TREE_CODE (base) == VAR_DECL && is_global_var (base)))
558 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
559 || (!DECL_CXX_CONSTRUCTOR_P (function)
560 && !DECL_CXX_DESTRUCTOR_P (function)))
562 if (!DECL_ABSTRACT_ORIGIN (function))
563 return false;
564 /* Watch for clones where we constant propagated the first
565 argument (pointer to the instance). */
566 function = DECL_ABSTRACT_ORIGIN (function);
567 if (!function
568 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
569 || (!DECL_CXX_CONSTRUCTOR_P (function)
570 && !DECL_CXX_DESTRUCTOR_P (function)))
571 return false;
573 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
574 if (!outer_type || !types_odr_comparable (type, outer_type))
576 if (TREE_CODE (type) == RECORD_TYPE
577 && TYPE_BINFO (type)
578 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
579 return true;
581 else if (types_same_for_odr (type, outer_type))
582 return true;
584 return false;
587 /* Dump human readable context to F. */
589 void
590 ipa_polymorphic_call_context::dump (FILE *f) const
592 fprintf (f, " ");
593 if (invalid)
594 fprintf (f, "Call is known to be undefined");
595 else
597 if (useless_p ())
598 fprintf (f, "nothing known");
599 if (outer_type || offset)
601 fprintf (f, "Outer type%s:", dynamic ? " (dynamic)":"");
602 print_generic_expr (f, outer_type, TDF_SLIM);
603 if (maybe_derived_type)
604 fprintf (f, " (or a derived type)");
605 if (maybe_in_construction)
606 fprintf (f, " (maybe in construction)");
607 fprintf (f, " offset "HOST_WIDE_INT_PRINT_DEC,
608 offset);
610 if (speculative_outer_type)
612 if (outer_type || offset)
613 fprintf (f, " ");
614 fprintf (f, "Speculative outer type:");
615 print_generic_expr (f, speculative_outer_type, TDF_SLIM);
616 if (speculative_maybe_derived_type)
617 fprintf (f, " (or a derived type)");
618 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC,
619 speculative_offset);
622 fprintf(f, "\n");
625 /* Print context to stderr. */
627 void
628 ipa_polymorphic_call_context::debug () const
630 dump (stderr);
633 /* Stream out the context to OB. */
635 void
636 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
638 struct bitpack_d bp = bitpack_create (ob->main_stream);
640 bp_pack_value (&bp, invalid, 1);
641 bp_pack_value (&bp, maybe_in_construction, 1);
642 bp_pack_value (&bp, maybe_derived_type, 1);
643 bp_pack_value (&bp, speculative_maybe_derived_type, 1);
644 bp_pack_value (&bp, dynamic, 1);
645 bp_pack_value (&bp, outer_type != NULL, 1);
646 bp_pack_value (&bp, offset != 0, 1);
647 bp_pack_value (&bp, speculative_outer_type != NULL, 1);
648 streamer_write_bitpack (&bp);
650 if (outer_type != NULL)
651 stream_write_tree (ob, outer_type, true);
652 if (offset)
653 streamer_write_hwi (ob, offset);
654 if (speculative_outer_type != NULL)
656 stream_write_tree (ob, speculative_outer_type, true);
657 streamer_write_hwi (ob, speculative_offset);
659 else
660 gcc_assert (!speculative_offset);
663 /* Stream in the context from IB and DATA_IN. */
665 void
666 ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib,
667 struct data_in *data_in)
669 struct bitpack_d bp = streamer_read_bitpack (ib);
671 invalid = bp_unpack_value (&bp, 1);
672 maybe_in_construction = bp_unpack_value (&bp, 1);
673 maybe_derived_type = bp_unpack_value (&bp, 1);
674 speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
675 dynamic = bp_unpack_value (&bp, 1);
676 bool outer_type_p = bp_unpack_value (&bp, 1);
677 bool offset_p = bp_unpack_value (&bp, 1);
678 bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
680 if (outer_type_p)
681 outer_type = stream_read_tree (ib, data_in);
682 else
683 outer_type = NULL;
684 if (offset_p)
685 offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
686 else
687 offset = 0;
688 if (speculative_outer_type_p)
690 speculative_outer_type = stream_read_tree (ib, data_in);
691 speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
693 else
695 speculative_outer_type = NULL;
696 speculative_offset = 0;
700 /* Proudce polymorphic call context for call method of instance
701 that is located within BASE (that is assumed to be a decl) at offset OFF. */
703 void
704 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
706 gcc_assert (DECL_P (base));
707 clear_speculation ();
709 if (!contains_polymorphic_type_p (TREE_TYPE (base)))
711 clear_outer_type ();
712 offset = off;
713 return;
715 outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
716 offset = off;
717 /* Make very conservative assumption that all objects
718 may be in construction.
720 It is up to caller to revisit this via
721 get_dynamic_type or decl_maybe_in_construction_p. */
722 maybe_in_construction = true;
723 maybe_derived_type = false;
724 dynamic = false;
727 /* CST is an invariant (address of decl), try to get meaningful
728 polymorphic call context for polymorphic call of method
729 if instance of OTR_TYPE that is located at offset OFF of this invariant.
730 Return FALSE if nothing meaningful can be found. */
732 bool
733 ipa_polymorphic_call_context::set_by_invariant (tree cst,
734 tree otr_type,
735 HOST_WIDE_INT off)
737 HOST_WIDE_INT offset2, size, max_size;
738 tree base;
740 invalid = false;
741 off = 0;
742 clear_outer_type (otr_type);
744 if (TREE_CODE (cst) != ADDR_EXPR)
745 return false;
747 cst = TREE_OPERAND (cst, 0);
748 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
749 if (!DECL_P (base) || max_size == -1 || max_size != size)
750 return false;
752 /* Only type inconsistent programs can have otr_type that is
753 not part of outer type. */
754 if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
755 return false;
757 set_by_decl (base, off);
758 return true;
761 /* See if OP is SSA name initialized as a copy or by single assignment.
762 If so, walk the SSA graph up. Because simple PHI conditional is considered
763 copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
764 graph. */
766 static tree
767 walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL)
769 hash_set <tree> *visited = NULL;
770 STRIP_NOPS (op);
771 while (TREE_CODE (op) == SSA_NAME
772 && !SSA_NAME_IS_DEFAULT_DEF (op)
773 && SSA_NAME_DEF_STMT (op)
774 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op))
775 || gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI))
777 if (global_visited)
779 if (!*global_visited)
780 *global_visited = new hash_set<tree>;
781 if ((*global_visited)->add (op))
782 goto done;
784 else
786 if (!visited)
787 visited = new hash_set<tree>;
788 if (visited->add (op))
789 goto done;
791 /* Special case
792 if (ptr == 0)
793 ptr = 0;
794 else
795 ptr = ptr.foo;
796 This pattern is implicitly produced for casts to non-primary
797 bases. When doing context analysis, we do not really care
798 about the case pointer is NULL, becuase the call will be
799 undefined anyway. */
800 if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
802 gimple phi = SSA_NAME_DEF_STMT (op);
804 if (gimple_phi_num_args (phi) > 2)
805 goto done;
806 if (gimple_phi_num_args (phi) == 1)
807 op = gimple_phi_arg_def (phi, 0);
808 else if (integer_zerop (gimple_phi_arg_def (phi, 0)))
809 op = gimple_phi_arg_def (phi, 1);
810 else if (integer_zerop (gimple_phi_arg_def (phi, 1)))
811 op = gimple_phi_arg_def (phi, 0);
812 else
813 goto done;
815 else
817 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
818 goto done;
819 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
821 STRIP_NOPS (op);
823 done:
824 if (visited)
825 delete (visited);
826 return op;
829 /* Create polymorphic call context from IP invariant CST.
830 This is typically &global_var.
831 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
832 is offset of call. */
834 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
835 tree otr_type,
836 HOST_WIDE_INT off)
838 clear_speculation ();
839 set_by_invariant (cst, otr_type, off);
842 /* Build context for pointer REF contained in FNDECL at statement STMT.
843 if INSTANCE is non-NULL, return pointer to the object described by
844 the context or DECL where context is contained in. */
846 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
847 tree ref,
848 gimple stmt,
849 tree *instance)
851 tree otr_type = NULL;
852 tree base_pointer;
853 hash_set <tree> *visited = NULL;
855 if (TREE_CODE (ref) == OBJ_TYPE_REF)
857 otr_type = obj_type_ref_class (ref);
858 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
860 else
861 base_pointer = ref;
863 /* Set up basic info in case we find nothing interesting in the analysis. */
864 clear_speculation ();
865 clear_outer_type (otr_type);
866 invalid = false;
868 /* Walk SSA for outer object. */
869 while (true)
871 base_pointer = walk_ssa_copies (base_pointer, &visited);
872 if (TREE_CODE (base_pointer) == ADDR_EXPR)
874 HOST_WIDE_INT size, max_size;
875 HOST_WIDE_INT offset2;
876 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
877 &offset2, &size, &max_size);
879 if (max_size != -1 && max_size == size)
880 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
881 offset + offset2,
882 true,
883 NULL /* Do not change outer type. */);
885 /* If this is a varying address, punt. */
886 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
887 && max_size != -1
888 && max_size == size)
890 /* We found dereference of a pointer. Type of the pointer
891 and MEM_REF is meaningless, but we can look futher. */
892 if (TREE_CODE (base) == MEM_REF)
894 base_pointer = TREE_OPERAND (base, 0);
895 offset
896 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
897 outer_type = NULL;
899 /* We found base object. In this case the outer_type
900 is known. */
901 else if (DECL_P (base))
903 if (visited)
904 delete (visited);
905 /* Only type inconsistent programs can have otr_type that is
906 not part of outer type. */
907 if (otr_type
908 && !contains_type_p (TREE_TYPE (base),
909 offset + offset2, otr_type))
911 invalid = true;
912 if (instance)
913 *instance = base_pointer;
914 return;
916 set_by_decl (base, offset + offset2);
917 if (outer_type && maybe_in_construction && stmt)
918 maybe_in_construction
919 = decl_maybe_in_construction_p (base,
920 outer_type,
921 stmt,
922 fndecl);
923 if (instance)
924 *instance = base;
925 return;
927 else
928 break;
930 else
931 break;
933 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
934 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
936 offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
937 * BITS_PER_UNIT;
938 base_pointer = TREE_OPERAND (base_pointer, 0);
940 else
941 break;
944 if (visited)
945 delete (visited);
947 /* Try to determine type of the outer object. */
948 if (TREE_CODE (base_pointer) == SSA_NAME
949 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
950 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
952 /* See if parameter is THIS pointer of a method. */
953 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
954 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
956 outer_type
957 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
958 gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE
959 || TREE_CODE (outer_type) == UNION_TYPE);
961 /* Dynamic casting has possibly upcasted the type
962 in the hiearchy. In this case outer type is less
963 informative than inner type and we should forget
964 about it. */
965 if ((otr_type
966 && !contains_type_p (outer_type, offset,
967 otr_type))
968 || !contains_polymorphic_type_p (outer_type))
970 outer_type = NULL;
971 if (instance)
972 *instance = base_pointer;
973 return;
976 dynamic = true;
978 /* If the function is constructor or destructor, then
979 the type is possibly in construction, but we know
980 it is not derived type. */
981 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
982 || DECL_CXX_DESTRUCTOR_P (fndecl))
984 maybe_in_construction = true;
985 maybe_derived_type = false;
987 else
989 maybe_derived_type = true;
990 maybe_in_construction = false;
992 if (instance)
993 *instance = base_pointer;
994 return;
996 /* Non-PODs passed by value are really passed by invisible
997 reference. In this case we also know the type of the
998 object. */
999 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1001 outer_type
1002 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1003 /* Only type inconsistent programs can have otr_type that is
1004 not part of outer type. */
1005 if (otr_type && !contains_type_p (outer_type, offset,
1006 otr_type))
1008 invalid = true;
1009 if (instance)
1010 *instance = base_pointer;
1011 return;
1013 /* Non-polymorphic types have no interest for us. */
1014 else if (!otr_type && !contains_polymorphic_type_p (outer_type))
1016 outer_type = NULL;
1017 if (instance)
1018 *instance = base_pointer;
1019 return;
1021 maybe_derived_type = false;
1022 maybe_in_construction = false;
1023 if (instance)
1024 *instance = base_pointer;
1025 return;
1029 tree base_type = TREE_TYPE (base_pointer);
1031 if (TREE_CODE (base_pointer) == SSA_NAME
1032 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1033 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
1035 invalid = true;
1036 if (instance)
1037 *instance = base_pointer;
1038 return;
1040 if (TREE_CODE (base_pointer) == SSA_NAME
1041 && SSA_NAME_DEF_STMT (base_pointer)
1042 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1043 base_type = TREE_TYPE (gimple_assign_rhs1
1044 (SSA_NAME_DEF_STMT (base_pointer)));
1046 if (POINTER_TYPE_P (base_type))
1047 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
1048 offset,
1049 true, NULL /* Do not change type here */);
1050 /* TODO: There are multiple ways to derive a type. For instance
1051 if BASE_POINTER is passed to an constructor call prior our refernece.
1052 We do not make this type of flow sensitive analysis yet. */
1053 if (instance)
1054 *instance = base_pointer;
1055 return;
1058 /* Structure to be passed in between detect_type_change and
1059 check_stmt_for_type_change. */
1061 struct type_change_info
1063 /* Offset into the object where there is the virtual method pointer we are
1064 looking for. */
1065 HOST_WIDE_INT offset;
1066 /* The declaration or SSA_NAME pointer of the base that we are checking for
1067 type change. */
1068 tree instance;
1069 /* The reference to virtual table pointer used. */
1070 tree vtbl_ptr_ref;
1071 tree otr_type;
1072 /* If we actually can tell the type that the object has changed to, it is
1073 stored in this field. Otherwise it remains NULL_TREE. */
1074 tree known_current_type;
1075 HOST_WIDE_INT known_current_offset;
1077 /* Set to true if dynamic type change has been detected. */
1078 bool type_maybe_changed;
1079 /* Set to true if multiple types have been encountered. known_current_type
1080 must be disregarded in that case. */
1081 bool multiple_types_encountered;
1082 /* Set to true if we possibly missed some dynamic type changes and we should
1083 consider the set to be speculative. */
1084 bool speculative;
1085 bool seen_unanalyzed_store;
1088 /* Return true if STMT is not call and can modify a virtual method table pointer.
1089 We take advantage of fact that vtable stores must appear within constructor
1090 and destructor functions. */
1092 static bool
1093 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
1095 if (is_gimple_assign (stmt))
1097 tree lhs = gimple_assign_lhs (stmt);
1099 if (gimple_clobber_p (stmt))
1100 return false;
1101 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
1103 if (flag_strict_aliasing
1104 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
1105 return false;
1107 if (TREE_CODE (lhs) == COMPONENT_REF
1108 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1109 return false;
1110 /* In the future we might want to use get_base_ref_and_offset to find
1111 if there is a field corresponding to the offset and if so, proceed
1112 almost like if it was a component ref. */
1116 /* Code unification may mess with inline stacks. */
1117 if (cfun->after_inlining)
1118 return true;
1120 /* Walk the inline stack and watch out for ctors/dtors.
1121 TODO: Maybe we can require the store to appear in toplevel
1122 block of CTOR/DTOR. */
1123 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
1124 block = BLOCK_SUPERCONTEXT (block))
1125 if (BLOCK_ABSTRACT_ORIGIN (block)
1126 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
1128 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
1130 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
1131 return false;
1132 return (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1133 && (DECL_CXX_CONSTRUCTOR_P (fn)
1134 || DECL_CXX_DESTRUCTOR_P (fn)));
1136 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
1137 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
1138 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
1141 /* If STMT can be proved to be an assignment to the virtual method table
1142 pointer of ANALYZED_OBJ and the type associated with the new table
1143 identified, return the type. Otherwise return NULL_TREE if type changes
1144 in unknown way or ERROR_MARK_NODE if type is unchanged. */
1146 static tree
1147 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
1148 HOST_WIDE_INT *type_offset)
1150 HOST_WIDE_INT offset, size, max_size;
1151 tree lhs, rhs, base;
1153 if (!gimple_assign_single_p (stmt))
1154 return NULL_TREE;
1156 lhs = gimple_assign_lhs (stmt);
1157 rhs = gimple_assign_rhs1 (stmt);
1158 if (TREE_CODE (lhs) != COMPONENT_REF
1159 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1161 if (dump_file)
1162 fprintf (dump_file, " LHS is not virtual table.\n");
1163 return NULL_TREE;
1166 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
1168 else
1170 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
1171 if (DECL_P (tci->instance))
1173 if (base != tci->instance)
1175 if (dump_file)
1177 fprintf (dump_file, " base:");
1178 print_generic_expr (dump_file, base, TDF_SLIM);
1179 fprintf (dump_file, " does not match instance:");
1180 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1181 fprintf (dump_file, "\n");
1183 return NULL_TREE;
1186 else if (TREE_CODE (base) == MEM_REF)
1188 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0))
1190 if (dump_file)
1192 fprintf (dump_file, " base mem ref:");
1193 print_generic_expr (dump_file, base, TDF_SLIM);
1194 fprintf (dump_file, " does not match instance:");
1195 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1196 fprintf (dump_file, "\n");
1198 return NULL_TREE;
1200 if (!integer_zerop (TREE_OPERAND (base, 1)))
1202 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
1204 if (dump_file)
1206 fprintf (dump_file, " base mem ref:");
1207 print_generic_expr (dump_file, base, TDF_SLIM);
1208 fprintf (dump_file, " has non-representable offset:");
1209 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1210 fprintf (dump_file, "\n");
1212 return NULL_TREE;
1214 else
1215 offset += tree_to_shwi (TREE_OPERAND (base, 1)) * BITS_PER_UNIT;
1218 else if (!operand_equal_p (tci->instance, base, 0)
1219 || tci->offset)
1221 if (dump_file)
1223 fprintf (dump_file, " base:");
1224 print_generic_expr (dump_file, base, TDF_SLIM);
1225 fprintf (dump_file, " does not match instance:");
1226 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1227 fprintf (dump_file, " with offset %i\n", (int)tci->offset);
1229 return tci->offset > GET_MODE_BITSIZE (Pmode) ? error_mark_node : NULL_TREE;
1231 if (offset != tci->offset
1232 || size != POINTER_SIZE
1233 || max_size != POINTER_SIZE)
1235 if (dump_file)
1236 fprintf (dump_file, " wrong offset %i!=%i or size %i\n",
1237 (int)offset, (int)tci->offset, (int)size);
1238 return offset + GET_MODE_BITSIZE (Pmode) <= tci->offset
1239 || (max_size != -1
1240 && tci->offset + GET_MODE_BITSIZE (Pmode) > offset + max_size)
1241 ? error_mark_node : NULL;
1245 tree vtable;
1246 unsigned HOST_WIDE_INT offset2;
1248 if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
1250 if (dump_file)
1251 fprintf (dump_file, " Failed to lookup binfo\n");
1252 return NULL;
1255 tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1256 offset2, vtable);
1257 if (!binfo)
1259 if (dump_file)
1260 fprintf (dump_file, " Construction vtable used\n");
1261 /* FIXME: We should suport construction contexts. */
1262 return NULL;
1265 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
1266 return DECL_CONTEXT (vtable);
1269 /* Record dynamic type change of TCI to TYPE. */
1271 static void
1272 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
1274 if (dump_file)
1276 if (type)
1278 fprintf (dump_file, " Recording type: ");
1279 print_generic_expr (dump_file, type, TDF_SLIM);
1280 fprintf (dump_file, " at offset %i\n", (int)offset);
1282 else
1283 fprintf (dump_file, " Recording unknown type\n");
1286 /* If we found a constructor of type that is not polymorphic or
1287 that may contain the type in question as a field (not as base),
1288 restrict to the inner class first to make type matching bellow
1289 happier. */
1290 if (type
1291 && (offset
1292 || (TREE_CODE (type) != RECORD_TYPE
1293 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
1295 ipa_polymorphic_call_context context;
1297 context.offset = offset;
1298 context.outer_type = type;
1299 context.maybe_in_construction = false;
1300 context.maybe_derived_type = false;
1301 context.dynamic = true;
1302 /* If we failed to find the inner type, we know that the call
1303 would be undefined for type produced here. */
1304 if (!context.restrict_to_inner_class (tci->otr_type))
1306 if (dump_file)
1307 fprintf (dump_file, " Ignoring; does not contain otr_type\n");
1308 return;
1310 /* Watch for case we reached an POD type and anticipate placement
1311 new. */
1312 if (!context.maybe_derived_type)
1314 type = context.outer_type;
1315 offset = context.offset;
1318 if (tci->type_maybe_changed
1319 && (!types_same_for_odr (type, tci->known_current_type)
1320 || offset != tci->known_current_offset))
1321 tci->multiple_types_encountered = true;
1322 tci->known_current_type = TYPE_MAIN_VARIANT (type);
1323 tci->known_current_offset = offset;
1324 tci->type_maybe_changed = true;
1327 /* Callback of walk_aliased_vdefs and a helper function for
1328 detect_type_change to check whether a particular statement may modify
1329 the virtual table pointer, and if possible also determine the new type of
1330 the (sub-)object. It stores its result into DATA, which points to a
1331 type_change_info structure. */
1333 static bool
1334 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
1336 gimple stmt = SSA_NAME_DEF_STMT (vdef);
1337 struct type_change_info *tci = (struct type_change_info *) data;
1338 tree fn;
1340 /* If we already gave up, just terminate the rest of walk. */
1341 if (tci->multiple_types_encountered)
1342 return true;
1344 if (is_gimple_call (stmt))
1346 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
1347 return false;
1349 /* Check for a constructor call. */
1350 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
1351 && DECL_CXX_CONSTRUCTOR_P (fn)
1352 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1353 && gimple_call_num_args (stmt))
1355 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
1356 tree type = method_class_type (TREE_TYPE (fn));
1357 HOST_WIDE_INT offset = 0, size, max_size;
1359 if (dump_file)
1361 fprintf (dump_file, " Checking constructor call: ");
1362 print_gimple_stmt (dump_file, stmt, 0, 0);
1365 /* See if THIS parameter seems like instance pointer. */
1366 if (TREE_CODE (op) == ADDR_EXPR)
1368 op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
1369 &offset, &size, &max_size);
1370 if (size != max_size || max_size == -1)
1372 tci->speculative = true;
1373 return false;
1375 if (op && TREE_CODE (op) == MEM_REF)
1377 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
1379 tci->speculative = true;
1380 return false;
1382 offset += tree_to_shwi (TREE_OPERAND (op, 1))
1383 * BITS_PER_UNIT;
1384 op = TREE_OPERAND (op, 0);
1386 else if (DECL_P (op))
1388 else
1390 tci->speculative = true;
1391 return false;
1393 op = walk_ssa_copies (op);
1395 if (operand_equal_p (op, tci->instance, 0)
1396 && TYPE_SIZE (type)
1397 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
1398 && tree_fits_shwi_p (TYPE_SIZE (type))
1399 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset)
1401 record_known_type (tci, type, tci->offset - offset);
1402 return true;
1405 /* Calls may possibly change dynamic type by placement new. Assume
1406 it will not happen, but make result speculative only. */
1407 if (dump_file)
1409 fprintf (dump_file, " Function call may change dynamic type:");
1410 print_gimple_stmt (dump_file, stmt, 0, 0);
1412 tci->speculative = true;
1413 return false;
1415 /* Check for inlined virtual table store. */
1416 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
1418 tree type;
1419 HOST_WIDE_INT offset = 0;
1420 if (dump_file)
1422 fprintf (dump_file, " Checking vtbl store: ");
1423 print_gimple_stmt (dump_file, stmt, 0, 0);
1426 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
1427 if (type == error_mark_node)
1428 return false;
1429 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
1430 if (!type)
1432 if (dump_file)
1433 fprintf (dump_file, " Unanalyzed store may change type.\n");
1434 tci->seen_unanalyzed_store = true;
1435 tci->speculative = true;
1437 else
1438 record_known_type (tci, type, offset);
1439 return true;
1441 else
1442 return false;
1445 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1446 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1447 INSTANCE is pointer to the outer instance as returned by
1448 get_polymorphic_context. To avoid creation of temporary expressions,
1449 INSTANCE may also be an declaration of get_polymorphic_context found the
1450 value to be in static storage.
1452 If the type of instance is not fully determined
1453 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1454 is set), try to walk memory writes and find the actual construction of the
1455 instance.
1457 Return true if memory is unchanged from function entry.
1459 We do not include this analysis in the context analysis itself, because
1460 it needs memory SSA to be fully built and the walk may be expensive.
1461 So it is not suitable for use withing fold_stmt and similar uses. */
1463 bool
1464 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
1465 tree otr_object,
1466 tree otr_type,
1467 gimple call)
1469 struct type_change_info tci;
1470 ao_ref ao;
1471 bool function_entry_reached = false;
1472 tree instance_ref = NULL;
1473 gimple stmt = call;
1474 /* Remember OFFSET before it is modified by restrict_to_inner_class.
1475 This is because we do not update INSTANCE when walking inwards. */
1476 HOST_WIDE_INT instance_offset = offset;
1478 if (otr_type)
1479 otr_type = TYPE_MAIN_VARIANT (otr_type);
1481 /* Walk into inner type. This may clear maybe_derived_type and save us
1482 from useless work. It also makes later comparsions with static type
1483 easier. */
1484 if (outer_type && otr_type)
1486 if (!restrict_to_inner_class (otr_type))
1487 return false;
1490 if (!maybe_in_construction && !maybe_derived_type)
1491 return false;
1493 /* We need to obtain refernce to virtual table pointer. It is better
1494 to look it up in the code rather than build our own. This require bit
1495 of pattern matching, but we end up verifying that what we found is
1496 correct.
1498 What we pattern match is:
1500 tmp = instance->_vptr.A; // vtbl ptr load
1501 tmp2 = tmp[otr_token]; // vtable lookup
1502 OBJ_TYPE_REF(tmp2;instance->0) (instance);
1504 We want to start alias oracle walk from vtbl pointer load,
1505 but we may not be able to identify it, for example, when PRE moved the
1506 load around. */
1508 if (gimple_code (call) == GIMPLE_CALL)
1510 tree ref = gimple_call_fn (call);
1511 HOST_WIDE_INT offset2, size, max_size;
1513 if (TREE_CODE (ref) == OBJ_TYPE_REF)
1515 ref = OBJ_TYPE_REF_EXPR (ref);
1516 ref = walk_ssa_copies (ref);
1518 /* Check if definition looks like vtable lookup. */
1519 if (TREE_CODE (ref) == SSA_NAME
1520 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1521 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
1522 && TREE_CODE (gimple_assign_rhs1
1523 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
1525 ref = get_base_address
1526 (TREE_OPERAND (gimple_assign_rhs1
1527 (SSA_NAME_DEF_STMT (ref)), 0));
1528 ref = walk_ssa_copies (ref);
1529 /* Find base address of the lookup and see if it looks like
1530 vptr load. */
1531 if (TREE_CODE (ref) == SSA_NAME
1532 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1533 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
1535 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
1536 tree base_ref = get_ref_base_and_extent
1537 (ref_exp, &offset2, &size, &max_size);
1539 /* Finally verify that what we found looks like read from OTR_OBJECT
1540 or from INSTANCE with offset OFFSET. */
1541 if (base_ref
1542 && ((TREE_CODE (base_ref) == MEM_REF
1543 && ((offset2 == instance_offset
1544 && TREE_OPERAND (base_ref, 0) == instance)
1545 || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object)))
1546 || (DECL_P (instance) && base_ref == instance
1547 && offset2 == instance_offset)))
1549 stmt = SSA_NAME_DEF_STMT (ref);
1550 instance_ref = ref_exp;
1557 /* If we failed to look up the refernece in code, build our own. */
1558 if (!instance_ref)
1560 /* If the statement in question does not use memory, we can't tell
1561 anything. */
1562 if (!gimple_vuse (stmt))
1563 return false;
1564 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
1566 else
1567 /* Otherwise use the real reference. */
1568 ao_ref_init (&ao, instance_ref);
1570 /* We look for vtbl pointer read. */
1571 ao.size = POINTER_SIZE;
1572 ao.max_size = ao.size;
1573 if (otr_type)
1574 ao.ref_alias_set
1575 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
1577 if (dump_file)
1579 fprintf (dump_file, "Determining dynamic type for call: ");
1580 print_gimple_stmt (dump_file, call, 0, 0);
1581 fprintf (dump_file, " Starting walk at: ");
1582 print_gimple_stmt (dump_file, stmt, 0, 0);
1583 fprintf (dump_file, " instance pointer: ");
1584 print_generic_expr (dump_file, otr_object, TDF_SLIM);
1585 fprintf (dump_file, " Outer instance pointer: ");
1586 print_generic_expr (dump_file, instance, TDF_SLIM);
1587 fprintf (dump_file, " offset: %i (bits)", (int)offset);
1588 fprintf (dump_file, " vtbl reference: ");
1589 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
1590 fprintf (dump_file, "\n");
1593 tci.offset = offset;
1594 tci.instance = instance;
1595 tci.vtbl_ptr_ref = instance_ref;
1596 gcc_assert (TREE_CODE (instance) != MEM_REF);
1597 tci.known_current_type = NULL_TREE;
1598 tci.known_current_offset = 0;
1599 tci.otr_type = otr_type;
1600 tci.type_maybe_changed = false;
1601 tci.multiple_types_encountered = false;
1602 tci.speculative = false;
1603 tci.seen_unanalyzed_store = false;
1605 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
1606 &tci, NULL, &function_entry_reached);
1608 /* If we did not find any type changing statements, we may still drop
1609 maybe_in_construction flag if the context already have outer type.
1611 Here we make special assumptions about both constructors and
1612 destructors which are all the functions that are allowed to alter the
1613 VMT pointers. It assumes that destructors begin with assignment into
1614 all VMT pointers and that constructors essentially look in the
1615 following way:
1617 1) The very first thing they do is that they call constructors of
1618 ancestor sub-objects that have them.
1620 2) Then VMT pointers of this and all its ancestors is set to new
1621 values corresponding to the type corresponding to the constructor.
1623 3) Only afterwards, other stuff such as constructor of member
1624 sub-objects and the code written by the user is run. Only this may
1625 include calling virtual functions, directly or indirectly.
1627 4) placement new can not be used to change type of non-POD statically
1628 allocated variables.
1630 There is no way to call a constructor of an ancestor sub-object in any
1631 other way.
1633 This means that we do not have to care whether constructors get the
1634 correct type information because they will always change it (in fact,
1635 if we define the type to be given by the VMT pointer, it is undefined).
1637 The most important fact to derive from the above is that if, for some
1638 statement in the section 3, we try to detect whether the dynamic type
1639 has changed, we can safely ignore all calls as we examine the function
1640 body backwards until we reach statements in section 2 because these
1641 calls cannot be ancestor constructors or destructors (if the input is
1642 not bogus) and so do not change the dynamic type (this holds true only
1643 for automatically allocated objects but at the moment we devirtualize
1644 only these). We then must detect that statements in section 2 change
1645 the dynamic type and can try to derive the new type. That is enough
1646 and we can stop, we will never see the calls into constructors of
1647 sub-objects in this code.
1649 Therefore if the static outer type was found (outer_type)
1650 we can safely ignore tci.speculative that is set on calls and give up
1651 only if there was dyanmic type store that may affect given variable
1652 (seen_unanalyzed_store) */
1654 if (!tci.type_maybe_changed
1655 || (outer_type
1656 && !dynamic
1657 && !tci.seen_unanalyzed_store
1658 && !tci.multiple_types_encountered
1659 && offset == tci.offset
1660 && types_same_for_odr (tci.known_current_type,
1661 outer_type)))
1663 if (!outer_type || tci.seen_unanalyzed_store)
1664 return false;
1665 if (maybe_in_construction)
1666 maybe_in_construction = false;
1667 if (dump_file)
1668 fprintf (dump_file, " No dynamic type change found.\n");
1669 return true;
1672 if (tci.known_current_type
1673 && !function_entry_reached
1674 && !tci.multiple_types_encountered)
1676 if (!tci.speculative)
1678 outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1679 offset = tci.known_current_offset;
1680 dynamic = true;
1681 maybe_in_construction = false;
1682 maybe_derived_type = false;
1683 if (dump_file)
1684 fprintf (dump_file, " Determined dynamic type.\n");
1686 else if (!speculative_outer_type
1687 || speculative_maybe_derived_type)
1689 speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1690 speculative_offset = tci.known_current_offset;
1691 speculative_maybe_derived_type = false;
1692 if (dump_file)
1693 fprintf (dump_file, " Determined speculative dynamic type.\n");
1696 else if (dump_file)
1698 fprintf (dump_file, " Found multiple types%s%s\n",
1699 function_entry_reached ? " (function entry reached)" : "",
1700 function_entry_reached ? " (multiple types encountered)" : "");
1703 return false;
1706 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1707 seems consistent (and useful) with what we already have in the non-speculative context. */
1709 bool
1710 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type,
1711 HOST_WIDE_INT spec_offset,
1712 bool spec_maybe_derived_type,
1713 tree otr_type)
1715 if (!flag_devirtualize_speculatively)
1716 return false;
1718 /* Non-polymorphic types are useless for deriving likely polymorphic
1719 call targets. */
1720 if (!spec_outer_type || !contains_polymorphic_type_p (spec_outer_type))
1721 return false;
1723 /* If we know nothing, speculation is always good. */
1724 if (!outer_type)
1725 return true;
1727 /* Speculation is only useful to avoid derived types.
1728 This is not 100% true for placement new, where the outer context may
1729 turn out to be useless, but ignore these for now. */
1730 if (!maybe_derived_type)
1731 return false;
1733 /* If types agrees, speculation is consistent, but it makes sense only
1734 when it says something new. */
1735 if (types_must_be_same_for_odr (spec_outer_type, outer_type))
1736 return maybe_derived_type && !spec_maybe_derived_type;
1738 /* If speculation does not contain the type in question, ignore it. */
1739 if (otr_type
1740 && !contains_type_p (spec_outer_type, spec_offset, otr_type, false, true))
1741 return false;
1743 /* If outer type already contains speculation as a filed,
1744 it is useless. We already know from OUTER_TYPE
1745 SPEC_TYPE and that it is not in the construction. */
1746 if (contains_type_p (outer_type, offset - spec_offset,
1747 spec_outer_type, false, false))
1748 return false;
1750 /* If speculative outer type is not more specified than outer
1751 type, just give up.
1752 We can only decide this safely if we can compare types with OUTER_TYPE.
1754 if ((!in_lto_p || odr_type_p (outer_type))
1755 && !contains_type_p (spec_outer_type,
1756 spec_offset - offset,
1757 outer_type, false))
1758 return false;
1759 return true;
1762 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1763 NEW_MAYBE_DERIVED_TYPE
1764 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1766 bool
1767 ipa_polymorphic_call_context::combine_speculation_with
1768 (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1769 tree otr_type)
1771 if (!new_outer_type)
1772 return false;
1774 /* restrict_to_inner_class may eliminate wrong speculation making our job
1775 easeier. */
1776 if (otr_type)
1777 restrict_to_inner_class (otr_type);
1779 if (!speculation_consistent_p (new_outer_type, new_offset,
1780 new_maybe_derived_type, otr_type))
1781 return false;
1783 /* New speculation is a win in case we have no speculation or new
1784 speculation does not consider derivations. */
1785 if (!speculative_outer_type
1786 || (speculative_maybe_derived_type
1787 && !new_maybe_derived_type))
1789 speculative_outer_type = new_outer_type;
1790 speculative_offset = new_offset;
1791 speculative_maybe_derived_type = new_maybe_derived_type;
1792 return true;
1794 else if (types_must_be_same_for_odr (speculative_outer_type,
1795 new_outer_type))
1797 if (speculative_offset != new_offset)
1799 /* OK we have two contexts that seems valid but they disagree,
1800 just give up.
1802 This is not a lattice operation, so we may want to drop it later. */
1803 if (dump_file && (dump_flags & TDF_DETAILS))
1804 fprintf (dump_file,
1805 "Speculative outer types match, "
1806 "offset mismatch -> invalid speculation\n");
1807 clear_speculation ();
1808 return true;
1810 else
1812 if (speculative_maybe_derived_type && !new_maybe_derived_type)
1814 speculative_maybe_derived_type = false;
1815 return true;
1817 else
1818 return false;
1821 /* Choose type that contains the other. This one either contains the outer
1822 as a field (thus giving exactly one target) or is deeper in the type
1823 hiearchy. */
1824 else if (speculative_outer_type
1825 && speculative_maybe_derived_type
1826 && (new_offset > speculative_offset
1827 || (new_offset == speculative_offset
1828 && contains_type_p (new_outer_type,
1829 0, speculative_outer_type, false))))
1831 tree old_outer_type = speculative_outer_type;
1832 HOST_WIDE_INT old_offset = speculative_offset;
1833 bool old_maybe_derived_type = speculative_maybe_derived_type;
1835 speculative_outer_type = new_outer_type;
1836 speculative_offset = new_offset;
1837 speculative_maybe_derived_type = new_maybe_derived_type;
1839 if (otr_type)
1840 restrict_to_inner_class (otr_type);
1842 /* If the speculation turned out to make no sense, revert to sensible
1843 one. */
1844 if (!speculative_outer_type)
1846 speculative_outer_type = old_outer_type;
1847 speculative_offset = old_offset;
1848 speculative_maybe_derived_type = old_maybe_derived_type;
1849 return false;
1851 return (old_offset != speculative_offset
1852 || old_maybe_derived_type != speculative_maybe_derived_type
1853 || types_must_be_same_for_odr (speculative_outer_type,
1854 new_outer_type));
1856 return false;
1859 /* Assume that both THIS and a given context is valid and strenghten THIS
1860 if possible. Return true if any strenghtening was made.
1861 If actual type the context is being used in is known, OTR_TYPE should be
1862 set accordingly. This improves quality of combined result. */
1864 bool
1865 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx,
1866 tree otr_type)
1868 bool updated = false;
1870 if (ctx.useless_p () || invalid)
1871 return false;
1873 /* Restricting context to inner type makes merging easier, however do not
1874 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
1875 if (otr_type && !invalid && !ctx.invalid)
1877 restrict_to_inner_class (otr_type);
1878 ctx.restrict_to_inner_class (otr_type);
1879 if(invalid)
1880 return false;
1883 if (dump_file && (dump_flags & TDF_DETAILS))
1885 fprintf (dump_file, "Polymorphic call context combine:");
1886 dump (dump_file);
1887 fprintf (dump_file, "With context: ");
1888 ctx.dump (dump_file);
1889 if (otr_type)
1891 fprintf (dump_file, "To be used with type: ");
1892 print_generic_expr (dump_file, otr_type, TDF_SLIM);
1893 fprintf (dump_file, "\n");
1897 /* If call is known to be invalid, we are done. */
1898 if (ctx.invalid)
1900 if (dump_file && (dump_flags & TDF_DETAILS))
1901 fprintf (dump_file, "-> Invalid context\n");
1902 goto invalidate;
1905 if (!ctx.outer_type)
1907 else if (!outer_type)
1909 outer_type = ctx.outer_type;
1910 offset = ctx.offset;
1911 dynamic = ctx.dynamic;
1912 maybe_in_construction = ctx.maybe_in_construction;
1913 maybe_derived_type = ctx.maybe_derived_type;
1914 updated = true;
1916 /* If types are known to be same, merging is quite easy. */
1917 else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
1919 if (offset != ctx.offset
1920 && TYPE_SIZE (outer_type)
1921 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
1923 if (dump_file && (dump_flags & TDF_DETAILS))
1924 fprintf (dump_file, "Outer types match, offset mismatch -> invalid\n");
1925 clear_speculation ();
1926 clear_outer_type ();
1927 invalid = true;
1928 return true;
1930 if (dump_file && (dump_flags & TDF_DETAILS))
1931 fprintf (dump_file, "Outer types match, merging flags\n");
1932 if (maybe_in_construction && !ctx.maybe_in_construction)
1934 updated = true;
1935 maybe_in_construction = false;
1937 if (maybe_derived_type && !ctx.maybe_derived_type)
1939 updated = true;
1940 maybe_derived_type = false;
1942 if (dynamic && !ctx.dynamic)
1944 updated = true;
1945 dynamic = false;
1948 /* If we know the type precisely, there is not much to improve. */
1949 else if (!maybe_derived_type && !maybe_in_construction
1950 && !ctx.maybe_derived_type && !ctx.maybe_in_construction)
1952 /* It may be easy to check if second context permits the first
1953 and set INVALID otherwise. This is not easy to do in general;
1954 contains_type_p may return false negatives for non-comparable
1955 types.
1957 If OTR_TYPE is known, we however can expect that
1958 restrict_to_inner_class should have discovered the same base
1959 type. */
1960 if (otr_type && !ctx.maybe_in_construction && !ctx.maybe_derived_type)
1962 if (dump_file && (dump_flags & TDF_DETAILS))
1963 fprintf (dump_file, "Contextes disagree -> invalid\n");
1964 goto invalidate;
1967 /* See if one type contains the other as a field (not base).
1968 In this case we want to choose the wider type, because it contains
1969 more information. */
1970 else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
1971 outer_type, false, false))
1973 if (dump_file && (dump_flags & TDF_DETAILS))
1974 fprintf (dump_file, "Second type contain the first as a field\n");
1976 if (maybe_derived_type)
1978 outer_type = ctx.outer_type;
1979 maybe_derived_type = ctx.maybe_derived_type;
1980 offset = ctx.offset;
1981 dynamic = ctx.dynamic;
1982 updated = true;
1985 /* If we do not know how the context is being used, we can
1986 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
1987 to other component of OUTER_TYPE later and we know nothing
1988 about it. */
1989 if (otr_type && maybe_in_construction
1990 && !ctx.maybe_in_construction)
1992 maybe_in_construction = false;
1993 updated = true;
1996 else if (contains_type_p (outer_type, offset - ctx.offset,
1997 ctx.outer_type, false, false))
1999 if (dump_file && (dump_flags & TDF_DETAILS))
2000 fprintf (dump_file, "First type contain the second as a field\n");
2002 if (otr_type && maybe_in_construction
2003 && !ctx.maybe_in_construction)
2005 maybe_in_construction = false;
2006 updated = true;
2009 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2010 else if (contains_type_p (ctx.outer_type,
2011 ctx.offset - offset, outer_type, false, true))
2013 if (dump_file && (dump_flags & TDF_DETAILS))
2014 fprintf (dump_file, "First type is base of second\n");
2015 if (!maybe_derived_type)
2017 if (!ctx.maybe_in_construction
2018 && types_odr_comparable (outer_type, ctx.outer_type))
2020 if (dump_file && (dump_flags & TDF_DETAILS))
2021 fprintf (dump_file, "Second context does not permit base -> invalid\n");
2022 goto invalidate;
2025 /* Pick variant deeper in the hiearchy. */
2026 else
2028 outer_type = ctx.outer_type;
2029 maybe_in_construction = ctx.maybe_in_construction;
2030 maybe_derived_type = ctx.maybe_derived_type;
2031 offset = ctx.offset;
2032 dynamic = ctx.dynamic;
2033 updated = true;
2036 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2037 else if (contains_type_p (outer_type,
2038 offset - ctx.offset, ctx.outer_type, false, true))
2040 if (dump_file && (dump_flags & TDF_DETAILS))
2041 fprintf (dump_file, "Second type is base of first\n");
2042 if (!ctx.maybe_derived_type)
2044 if (!maybe_in_construction
2045 && types_odr_comparable (outer_type, ctx.outer_type))
2047 if (dump_file && (dump_flags & TDF_DETAILS))
2048 fprintf (dump_file, "First context does not permit base -> invalid\n");
2049 goto invalidate;
2053 /* TODO handle merging using hiearchy. */
2054 else if (dump_file && (dump_flags & TDF_DETAILS))
2055 fprintf (dump_file, "Giving up on merge\n");
2057 updated |= combine_speculation_with (ctx.speculative_outer_type,
2058 ctx.speculative_offset,
2059 ctx.speculative_maybe_derived_type,
2060 otr_type);
2062 if (updated && dump_file && (dump_flags & TDF_DETAILS))
2064 fprintf (dump_file, "Updated as: ");
2065 dump (dump_file);
2066 fprintf (dump_file, "\n");
2068 return updated;
2070 invalidate:
2071 invalid = true;
2072 clear_speculation ();
2073 clear_outer_type ();
2074 return true;
2077 /* Take non-speculative info, merge it with speculative and clear speculation.
2078 Used when we no longer manage to keep track of actual outer type, but we
2079 think it is still there.
2081 If OTR_TYPE is set, the transformation can be done more effectively assuming
2082 that context is going to be used only that way. */
2084 void
2085 ipa_polymorphic_call_context::make_speculative (tree otr_type)
2087 tree spec_outer_type = outer_type;
2088 HOST_WIDE_INT spec_offset = offset;
2089 bool spec_maybe_derived_type = maybe_derived_type;
2091 if (invalid)
2093 invalid = false;
2094 clear_outer_type ();
2095 clear_speculation ();
2096 return;
2098 if (!outer_type)
2099 return;
2100 clear_outer_type ();
2101 combine_speculation_with (spec_outer_type, spec_offset,
2102 spec_maybe_derived_type,
2103 otr_type);
2106 /* Use when we can not track dynamic type change. This speculatively assume
2107 type change is not happening. */
2109 void
2110 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor,
2111 tree otr_type)
2113 if (dynamic)
2114 make_speculative (otr_type);
2115 else if (in_poly_cdtor)
2116 maybe_in_construction = true;