compiler: don't generate stubs for ambiguous direct interface methods
[official-gcc.git] / gcc / ipa-prop.cc
blobc037668e7d8ac6701a043a80209708ec46511004
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-iterator.h"
35 #include "gimple-fold.h"
36 #include "tree-eh.h"
37 #include "calls.h"
38 #include "stor-layout.h"
39 #include "print-tree.h"
40 #include "gimplify.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55 #include "options.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58 #include "gimple-range.h"
60 /* Function summary where the parameter infos are actually stored. */
61 ipa_node_params_t *ipa_node_params_sum = NULL;
63 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
65 /* Edge summary for IPA-CP edge information. */
66 ipa_edge_args_sum_t *ipa_edge_args_sum;
68 /* Traits for a hash table for reusing already existing ipa_bits. */
70 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
72 typedef ipa_bits *value_type;
73 typedef ipa_bits *compare_type;
74 static hashval_t
75 hash (const ipa_bits *p)
77 hashval_t t = (hashval_t) p->value.to_shwi ();
78 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
80 static bool
81 equal (const ipa_bits *a, const ipa_bits *b)
83 return a->value == b->value && a->mask == b->mask;
85 static const bool empty_zero_p = true;
86 static void
87 mark_empty (ipa_bits *&p)
89 p = NULL;
91 static bool
92 is_empty (const ipa_bits *p)
94 return p == NULL;
96 static bool
97 is_deleted (const ipa_bits *p)
99 return p == reinterpret_cast<const ipa_bits *> (1);
101 static void
102 mark_deleted (ipa_bits *&p)
104 p = reinterpret_cast<ipa_bits *> (1);
108 /* Hash table for avoid repeated allocations of equal ipa_bits. */
109 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
111 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
112 the equiv bitmap is not hashed and is expected to be NULL. */
114 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
116 typedef value_range *value_type;
117 typedef value_range *compare_type;
118 static hashval_t
119 hash (const value_range *p)
121 inchash::hash hstate (p->kind ());
122 inchash::add_expr (p->min (), hstate);
123 inchash::add_expr (p->max (), hstate);
124 return hstate.end ();
126 static bool
127 equal (const value_range *a, const value_range *b)
129 return (a->equal_p (*b)
130 && types_compatible_p (a->type (), b->type ()));
132 static const bool empty_zero_p = true;
133 static void
134 mark_empty (value_range *&p)
136 p = NULL;
138 static bool
139 is_empty (const value_range *p)
141 return p == NULL;
143 static bool
144 is_deleted (const value_range *p)
146 return p == reinterpret_cast<const value_range *> (1);
148 static void
149 mark_deleted (value_range *&p)
151 p = reinterpret_cast<value_range *> (1);
155 /* Hash table for avoid repeated allocations of equal value_ranges. */
156 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
158 /* Holders of ipa cgraph hooks: */
159 static struct cgraph_node_hook_list *function_insertion_hook_holder;
161 /* Description of a reference to an IPA constant. */
162 struct ipa_cst_ref_desc
164 /* Edge that corresponds to the statement which took the reference. */
165 struct cgraph_edge *cs;
166 /* Linked list of duplicates created when call graph edges are cloned. */
167 struct ipa_cst_ref_desc *next_duplicate;
168 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
169 if out of control. */
170 int refcount;
173 /* Allocation pool for reference descriptions. */
175 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
176 ("IPA-PROP ref descriptions");
178 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
179 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
181 static bool
182 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
184 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
186 if (!fs_opts)
187 return false;
188 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
191 /* Return index of the formal whose tree is PTREE in function which corresponds
192 to INFO. */
194 static int
195 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
196 tree ptree)
198 int i, count;
200 count = vec_safe_length (descriptors);
201 for (i = 0; i < count; i++)
202 if ((*descriptors)[i].decl_or_type == ptree)
203 return i;
205 return -1;
208 /* Return index of the formal whose tree is PTREE in function which corresponds
209 to INFO. */
212 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
214 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
217 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
218 NODE. */
220 static void
221 ipa_populate_param_decls (struct cgraph_node *node,
222 vec<ipa_param_descriptor, va_gc> &descriptors)
224 tree fndecl;
225 tree fnargs;
226 tree parm;
227 int param_num;
229 fndecl = node->decl;
230 gcc_assert (gimple_has_body_p (fndecl));
231 fnargs = DECL_ARGUMENTS (fndecl);
232 param_num = 0;
233 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
235 descriptors[param_num].decl_or_type = parm;
236 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
237 descriptors[param_num].move_cost = cost;
238 /* Watch overflow, move_cost is a bitfield. */
239 gcc_checking_assert (cost == descriptors[param_num].move_cost);
240 param_num++;
244 /* Return how many formal parameters FNDECL has. */
247 count_formal_params (tree fndecl)
249 tree parm;
250 int count = 0;
251 gcc_assert (gimple_has_body_p (fndecl));
253 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
254 count++;
256 return count;
259 /* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
263 void
264 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
266 fprintf (file, "param #%i", i);
267 if ((*info->descriptors)[i].decl_or_type)
269 fprintf (file, " ");
270 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
274 /* If necessary, allocate vector of parameter descriptors in info of NODE.
275 Return true if they were allocated, false if not. */
277 static bool
278 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
280 ipa_node_params *info = ipa_node_params_sum->get_create (node);
282 if (!info->descriptors && param_count)
284 vec_safe_grow_cleared (info->descriptors, param_count, true);
285 return true;
287 else
288 return false;
291 /* Initialize the ipa_node_params structure associated with NODE by counting
292 the function parameters, creating the descriptors and populating their
293 param_decls. */
295 void
296 ipa_initialize_node_params (struct cgraph_node *node)
298 ipa_node_params *info = ipa_node_params_sum->get_create (node);
300 if (!info->descriptors
301 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
302 ipa_populate_param_decls (node, *info->descriptors);
305 /* Print the jump functions associated with call graph edge CS to file F. */
307 static void
308 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
310 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
311 int count = ipa_get_cs_argument_count (args);
313 for (int i = 0; i < count; i++)
315 struct ipa_jump_func *jump_func;
316 enum jump_func_type type;
318 jump_func = ipa_get_ith_jump_func (args, i);
319 type = jump_func->type;
321 fprintf (f, " param %d: ", i);
322 if (type == IPA_JF_UNKNOWN)
323 fprintf (f, "UNKNOWN\n");
324 else if (type == IPA_JF_CONST)
326 tree val = jump_func->value.constant.value;
327 fprintf (f, "CONST: ");
328 print_generic_expr (f, val);
329 if (TREE_CODE (val) == ADDR_EXPR
330 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
332 fprintf (f, " -> ");
333 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
335 fprintf (f, "\n");
337 else if (type == IPA_JF_PASS_THROUGH)
339 fprintf (f, "PASS THROUGH: ");
340 fprintf (f, "%d, op %s",
341 jump_func->value.pass_through.formal_id,
342 get_tree_code_name(jump_func->value.pass_through.operation));
343 if (jump_func->value.pass_through.operation != NOP_EXPR)
345 fprintf (f, " ");
346 print_generic_expr (f, jump_func->value.pass_through.operand);
348 if (jump_func->value.pass_through.agg_preserved)
349 fprintf (f, ", agg_preserved");
350 fprintf (f, "\n");
352 else if (type == IPA_JF_ANCESTOR)
354 fprintf (f, "ANCESTOR: ");
355 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
356 jump_func->value.ancestor.formal_id,
357 jump_func->value.ancestor.offset);
358 if (jump_func->value.ancestor.agg_preserved)
359 fprintf (f, ", agg_preserved");
360 if (jump_func->value.ancestor.keep_null)
361 fprintf (f, ", keep_null");
362 fprintf (f, "\n");
365 if (jump_func->agg.items)
367 struct ipa_agg_jf_item *item;
368 int j;
370 fprintf (f, " Aggregate passed by %s:\n",
371 jump_func->agg.by_ref ? "reference" : "value");
372 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
374 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
375 item->offset);
376 fprintf (f, "type: ");
377 print_generic_expr (f, item->type);
378 fprintf (f, ", ");
379 if (item->jftype == IPA_JF_PASS_THROUGH)
380 fprintf (f, "PASS THROUGH: %d,",
381 item->value.pass_through.formal_id);
382 else if (item->jftype == IPA_JF_LOAD_AGG)
384 fprintf (f, "LOAD AGG: %d",
385 item->value.pass_through.formal_id);
386 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
387 item->value.load_agg.offset,
388 item->value.load_agg.by_ref ? "reference"
389 : "value");
392 if (item->jftype == IPA_JF_PASS_THROUGH
393 || item->jftype == IPA_JF_LOAD_AGG)
395 fprintf (f, " op %s",
396 get_tree_code_name (item->value.pass_through.operation));
397 if (item->value.pass_through.operation != NOP_EXPR)
399 fprintf (f, " ");
400 print_generic_expr (f, item->value.pass_through.operand);
403 else if (item->jftype == IPA_JF_CONST)
405 fprintf (f, "CONST: ");
406 print_generic_expr (f, item->value.constant);
408 else if (item->jftype == IPA_JF_UNKNOWN)
409 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
410 tree_to_uhwi (TYPE_SIZE (item->type)));
411 fprintf (f, "\n");
415 class ipa_polymorphic_call_context *ctx
416 = ipa_get_ith_polymorhic_call_context (args, i);
417 if (ctx && !ctx->useless_p ())
419 fprintf (f, " Context: ");
420 ctx->dump (dump_file);
423 if (jump_func->bits)
425 fprintf (f, " value: ");
426 print_hex (jump_func->bits->value, f);
427 fprintf (f, ", mask: ");
428 print_hex (jump_func->bits->mask, f);
429 fprintf (f, "\n");
431 else
432 fprintf (f, " Unknown bits\n");
434 if (jump_func->m_vr)
436 fprintf (f, " VR ");
437 fprintf (f, "%s[",
438 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
439 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
440 fprintf (f, ", ");
441 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
442 fprintf (f, "]\n");
444 else
445 fprintf (f, " Unknown VR\n");
450 /* Print the jump functions of all arguments on all call graph edges going from
451 NODE to file F. */
453 void
454 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
456 struct cgraph_edge *cs;
458 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
459 for (cs = node->callees; cs; cs = cs->next_callee)
462 fprintf (f, " callsite %s -> %s : \n",
463 node->dump_name (),
464 cs->callee->dump_name ());
465 if (!ipa_edge_args_info_available_for_edge_p (cs))
466 fprintf (f, " no arg info\n");
467 else
468 ipa_print_node_jump_functions_for_edge (f, cs);
471 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
473 class cgraph_indirect_call_info *ii;
475 ii = cs->indirect_info;
476 if (ii->agg_contents)
477 fprintf (f, " indirect %s callsite, calling param %i, "
478 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
479 ii->member_ptr ? "member ptr" : "aggregate",
480 ii->param_index, ii->offset,
481 ii->by_ref ? "by reference" : "by_value");
482 else
483 fprintf (f, " indirect %s callsite, calling param %i, "
484 "offset " HOST_WIDE_INT_PRINT_DEC,
485 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
486 ii->offset);
488 if (cs->call_stmt)
490 fprintf (f, ", for stmt ");
491 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
493 else
494 fprintf (f, "\n");
495 if (ii->polymorphic)
496 ii->context.dump (f);
497 if (!ipa_edge_args_info_available_for_edge_p (cs))
498 fprintf (f, " no arg info\n");
499 else
500 ipa_print_node_jump_functions_for_edge (f, cs);
504 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
506 void
507 ipa_print_all_jump_functions (FILE *f)
509 struct cgraph_node *node;
511 fprintf (f, "\nJump functions:\n");
512 FOR_EACH_FUNCTION (node)
514 ipa_print_node_jump_functions (f, node);
518 /* Set jfunc to be a know-really nothing jump function. */
520 static void
521 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
523 jfunc->type = IPA_JF_UNKNOWN;
526 /* Set JFUNC to be a copy of another jmp (to be used by jump function
527 combination code). The two functions will share their rdesc. */
529 static void
530 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
531 struct ipa_jump_func *src)
534 gcc_checking_assert (src->type == IPA_JF_CONST);
535 dst->type = IPA_JF_CONST;
536 dst->value.constant = src->value.constant;
539 /* Set JFUNC to be a constant jmp function. */
541 static void
542 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
543 struct cgraph_edge *cs)
545 jfunc->type = IPA_JF_CONST;
546 jfunc->value.constant.value = unshare_expr_without_location (constant);
548 if (TREE_CODE (constant) == ADDR_EXPR
549 && (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL
550 || (TREE_CODE (TREE_OPERAND (constant, 0)) == VAR_DECL
551 && TREE_STATIC (TREE_OPERAND (constant, 0)))))
553 struct ipa_cst_ref_desc *rdesc;
555 rdesc = ipa_refdesc_pool.allocate ();
556 rdesc->cs = cs;
557 rdesc->next_duplicate = NULL;
558 rdesc->refcount = 1;
559 jfunc->value.constant.rdesc = rdesc;
561 else
562 jfunc->value.constant.rdesc = NULL;
565 /* Set JFUNC to be a simple pass-through jump function. */
566 static void
567 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
568 bool agg_preserved)
570 jfunc->type = IPA_JF_PASS_THROUGH;
571 jfunc->value.pass_through.operand = NULL_TREE;
572 jfunc->value.pass_through.formal_id = formal_id;
573 jfunc->value.pass_through.operation = NOP_EXPR;
574 jfunc->value.pass_through.agg_preserved = agg_preserved;
577 /* Set JFUNC to be an unary pass through jump function. */
579 static void
580 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
581 enum tree_code operation)
583 jfunc->type = IPA_JF_PASS_THROUGH;
584 jfunc->value.pass_through.operand = NULL_TREE;
585 jfunc->value.pass_through.formal_id = formal_id;
586 jfunc->value.pass_through.operation = operation;
587 jfunc->value.pass_through.agg_preserved = false;
589 /* Set JFUNC to be an arithmetic pass through jump function. */
591 static void
592 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
593 tree operand, enum tree_code operation)
595 jfunc->type = IPA_JF_PASS_THROUGH;
596 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
597 jfunc->value.pass_through.formal_id = formal_id;
598 jfunc->value.pass_through.operation = operation;
599 jfunc->value.pass_through.agg_preserved = false;
602 /* Set JFUNC to be an ancestor jump function. */
604 static void
605 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
606 int formal_id, bool agg_preserved, bool keep_null)
608 jfunc->type = IPA_JF_ANCESTOR;
609 jfunc->value.ancestor.formal_id = formal_id;
610 jfunc->value.ancestor.offset = offset;
611 jfunc->value.ancestor.agg_preserved = agg_preserved;
612 jfunc->value.ancestor.keep_null = keep_null;
615 /* Get IPA BB information about the given BB. FBI is the context of analyzis
616 of this function body. */
618 static struct ipa_bb_info *
619 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
621 gcc_checking_assert (fbi);
622 return &fbi->bb_infos[bb->index];
625 /* Structure to be passed in between detect_type_change and
626 check_stmt_for_type_change. */
628 struct prop_type_change_info
630 /* Offset into the object where there is the virtual method pointer we are
631 looking for. */
632 HOST_WIDE_INT offset;
633 /* The declaration or SSA_NAME pointer of the base that we are checking for
634 type change. */
635 tree object;
636 /* Set to true if dynamic type change has been detected. */
637 bool type_maybe_changed;
640 /* Return true if STMT can modify a virtual method table pointer.
642 This function makes special assumptions about both constructors and
643 destructors which are all the functions that are allowed to alter the VMT
644 pointers. It assumes that destructors begin with assignment into all VMT
645 pointers and that constructors essentially look in the following way:
647 1) The very first thing they do is that they call constructors of ancestor
648 sub-objects that have them.
650 2) Then VMT pointers of this and all its ancestors is set to new values
651 corresponding to the type corresponding to the constructor.
653 3) Only afterwards, other stuff such as constructor of member sub-objects
654 and the code written by the user is run. Only this may include calling
655 virtual functions, directly or indirectly.
657 There is no way to call a constructor of an ancestor sub-object in any
658 other way.
660 This means that we do not have to care whether constructors get the correct
661 type information because they will always change it (in fact, if we define
662 the type to be given by the VMT pointer, it is undefined).
664 The most important fact to derive from the above is that if, for some
665 statement in the section 3, we try to detect whether the dynamic type has
666 changed, we can safely ignore all calls as we examine the function body
667 backwards until we reach statements in section 2 because these calls cannot
668 be ancestor constructors or destructors (if the input is not bogus) and so
669 do not change the dynamic type (this holds true only for automatically
670 allocated objects but at the moment we devirtualize only these). We then
671 must detect that statements in section 2 change the dynamic type and can try
672 to derive the new type. That is enough and we can stop, we will never see
673 the calls into constructors of sub-objects in this code. Therefore we can
674 safely ignore all call statements that we traverse.
677 static bool
678 stmt_may_be_vtbl_ptr_store (gimple *stmt)
680 if (is_gimple_call (stmt))
681 return false;
682 if (gimple_clobber_p (stmt))
683 return false;
684 else if (is_gimple_assign (stmt))
686 tree lhs = gimple_assign_lhs (stmt);
688 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
690 if (flag_strict_aliasing
691 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
692 return false;
694 if (TREE_CODE (lhs) == COMPONENT_REF
695 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
696 return false;
697 /* In the future we might want to use get_ref_base_and_extent to find
698 if there is a field corresponding to the offset and if so, proceed
699 almost like if it was a component ref. */
702 return true;
705 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
706 to check whether a particular statement may modify the virtual table
707 pointerIt stores its result into DATA, which points to a
708 prop_type_change_info structure. */
710 static bool
711 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
713 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
714 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
716 if (stmt_may_be_vtbl_ptr_store (stmt))
718 tci->type_maybe_changed = true;
719 return true;
721 else
722 return false;
725 /* See if ARG is PARAM_DECl describing instance passed by pointer
726 or reference in FUNCTION. Return false if the dynamic type may change
727 in between beggining of the function until CALL is invoked.
729 Generally functions are not allowed to change type of such instances,
730 but they call destructors. We assume that methods cannot destroy the THIS
731 pointer. Also as a special cases, constructor and destructors may change
732 type of the THIS pointer. */
734 static bool
735 param_type_may_change_p (tree function, tree arg, gimple *call)
737 /* Pure functions cannot do any changes on the dynamic type;
738 that require writting to memory. */
739 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
740 return false;
741 /* We need to check if we are within inlined consturctor
742 or destructor (ideally we would have way to check that the
743 inline cdtor is actually working on ARG, but we don't have
744 easy tie on this, so punt on all non-pure cdtors.
745 We may also record the types of cdtors and once we know type
746 of the instance match them.
748 Also code unification optimizations may merge calls from
749 different blocks making return values unreliable. So
750 do nothing during late optimization. */
751 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
752 return true;
753 if (TREE_CODE (arg) == SSA_NAME
754 && SSA_NAME_IS_DEFAULT_DEF (arg)
755 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
757 /* Normal (non-THIS) argument. */
758 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
759 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
760 /* THIS pointer of an method - here we want to watch constructors
761 and destructors as those definitely may change the dynamic
762 type. */
763 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
764 && !DECL_CXX_CONSTRUCTOR_P (function)
765 && !DECL_CXX_DESTRUCTOR_P (function)
766 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
768 /* Walk the inline stack and watch out for ctors/dtors. */
769 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
770 block = BLOCK_SUPERCONTEXT (block))
771 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
772 return true;
773 return false;
776 return true;
779 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
780 callsite CALL) by looking for assignments to its virtual table pointer. If
781 it is, return true. ARG is the object itself (not a pointer
782 to it, unless dereferenced). BASE is the base of the memory access as
783 returned by get_ref_base_and_extent, as is the offset.
785 This is helper function for detect_type_change and detect_type_change_ssa
786 that does the heavy work which is usually unnecesary. */
788 static bool
789 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
790 tree base, tree comp_type, gcall *call,
791 HOST_WIDE_INT offset)
793 struct prop_type_change_info tci;
794 ao_ref ao;
796 gcc_checking_assert (DECL_P (arg)
797 || TREE_CODE (arg) == MEM_REF
798 || handled_component_p (arg));
800 comp_type = TYPE_MAIN_VARIANT (comp_type);
802 /* Const calls cannot call virtual methods through VMT and so type changes do
803 not matter. */
804 if (!flag_devirtualize || !gimple_vuse (call)
805 /* Be sure expected_type is polymorphic. */
806 || !comp_type
807 || TREE_CODE (comp_type) != RECORD_TYPE
808 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
809 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
810 return true;
812 if (fbi->aa_walk_budget == 0)
813 return false;
815 ao_ref_init (&ao, arg);
816 ao.base = base;
817 ao.offset = offset;
818 ao.size = POINTER_SIZE;
819 ao.max_size = ao.size;
821 tci.offset = offset;
822 tci.object = get_base_address (arg);
823 tci.type_maybe_changed = false;
825 int walked
826 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
827 &tci, NULL, NULL, fbi->aa_walk_budget);
828 if (walked >= 0)
829 fbi->aa_walk_budget -= walked;
830 else
831 fbi->aa_walk_budget = 0;
833 if (walked >= 0 && !tci.type_maybe_changed)
834 return false;
836 return true;
839 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
840 If it is, return true. ARG is the object itself (not a pointer
841 to it, unless dereferenced). BASE is the base of the memory access as
842 returned by get_ref_base_and_extent, as is the offset. */
844 static bool
845 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
846 tree comp_type, gcall *call,
847 HOST_WIDE_INT offset)
849 if (!flag_devirtualize)
850 return false;
852 if (TREE_CODE (base) == MEM_REF
853 && !param_type_may_change_p (current_function_decl,
854 TREE_OPERAND (base, 0),
855 call))
856 return false;
857 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
858 call, offset);
861 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
862 SSA name (its dereference will become the base and the offset is assumed to
863 be zero). */
865 static bool
866 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
867 gcall *call)
869 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
870 if (!flag_devirtualize
871 || !POINTER_TYPE_P (TREE_TYPE (arg)))
872 return false;
874 if (!param_type_may_change_p (current_function_decl, arg, call))
875 return false;
877 arg = build2 (MEM_REF, ptr_type_node, arg,
878 build_int_cst (ptr_type_node, 0));
880 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
881 call, 0);
884 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
885 boolean variable pointed to by DATA. */
887 static bool
888 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
889 void *data)
891 bool *b = (bool *) data;
892 *b = true;
893 return true;
896 /* Find the nearest valid aa status for parameter specified by INDEX that
897 dominates BB. */
899 static struct ipa_param_aa_status *
900 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
901 int index)
903 while (true)
905 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
906 if (!bb)
907 return NULL;
908 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
909 if (!bi->param_aa_statuses.is_empty ()
910 && bi->param_aa_statuses[index].valid)
911 return &bi->param_aa_statuses[index];
915 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
916 structures and/or intialize the result with a dominating description as
917 necessary. */
919 static struct ipa_param_aa_status *
920 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
921 int index)
923 gcc_checking_assert (fbi);
924 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
925 if (bi->param_aa_statuses.is_empty ())
926 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
927 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
928 if (!paa->valid)
930 gcc_checking_assert (!paa->parm_modified
931 && !paa->ref_modified
932 && !paa->pt_modified);
933 struct ipa_param_aa_status *dom_paa;
934 dom_paa = find_dominating_aa_status (fbi, bb, index);
935 if (dom_paa)
936 *paa = *dom_paa;
937 else
938 paa->valid = true;
941 return paa;
944 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
945 a value known not to be modified in this function before reaching the
946 statement STMT. FBI holds information about the function we have so far
947 gathered but do not survive the summary building stage. */
949 static bool
950 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
951 gimple *stmt, tree parm_load)
953 struct ipa_param_aa_status *paa;
954 bool modified = false;
955 ao_ref refd;
957 tree base = get_base_address (parm_load);
958 gcc_assert (TREE_CODE (base) == PARM_DECL);
959 if (TREE_READONLY (base))
960 return true;
962 gcc_checking_assert (fbi);
963 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
964 if (paa->parm_modified || fbi->aa_walk_budget == 0)
965 return false;
967 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
968 ao_ref_init (&refd, parm_load);
969 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
970 &modified, NULL, NULL,
971 fbi->aa_walk_budget);
972 if (walked < 0)
974 modified = true;
975 fbi->aa_walk_budget = 0;
977 else
978 fbi->aa_walk_budget -= walked;
979 if (paa && modified)
980 paa->parm_modified = true;
981 return !modified;
984 /* If STMT is an assignment that loads a value from an parameter declaration,
985 return the index of the parameter in ipa_node_params which has not been
986 modified. Otherwise return -1. */
988 static int
989 load_from_unmodified_param (struct ipa_func_body_info *fbi,
990 vec<ipa_param_descriptor, va_gc> *descriptors,
991 gimple *stmt)
993 int index;
994 tree op1;
996 if (!gimple_assign_single_p (stmt))
997 return -1;
999 op1 = gimple_assign_rhs1 (stmt);
1000 if (TREE_CODE (op1) != PARM_DECL)
1001 return -1;
1003 index = ipa_get_param_decl_index_1 (descriptors, op1);
1004 if (index < 0
1005 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1006 return -1;
1008 return index;
1011 /* Return true if memory reference REF (which must be a load through parameter
1012 with INDEX) loads data that are known to be unmodified in this function
1013 before reaching statement STMT. */
1015 static bool
1016 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1017 int index, gimple *stmt, tree ref)
1019 struct ipa_param_aa_status *paa;
1020 bool modified = false;
1021 ao_ref refd;
1023 gcc_checking_assert (fbi);
1024 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1025 if (paa->ref_modified || fbi->aa_walk_budget == 0)
1026 return false;
1028 gcc_checking_assert (gimple_vuse (stmt));
1029 ao_ref_init (&refd, ref);
1030 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1031 &modified, NULL, NULL,
1032 fbi->aa_walk_budget);
1033 if (walked < 0)
1035 modified = true;
1036 fbi->aa_walk_budget = 0;
1038 else
1039 fbi->aa_walk_budget -= walked;
1040 if (modified)
1041 paa->ref_modified = true;
1042 return !modified;
1045 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1046 is known to be unmodified in this function before reaching call statement
1047 CALL into which it is passed. FBI describes the function body. */
1049 static bool
1050 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1051 gimple *call, tree parm)
1053 bool modified = false;
1054 ao_ref refd;
1056 /* It's unnecessary to calculate anything about memory contnets for a const
1057 function because it is not goin to use it. But do not cache the result
1058 either. Also, no such calculations for non-pointers. */
1059 if (!gimple_vuse (call)
1060 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1061 return false;
1063 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1064 gimple_bb (call),
1065 index);
1066 if (paa->pt_modified || fbi->aa_walk_budget == 0)
1067 return false;
1069 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1070 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1071 &modified, NULL, NULL,
1072 fbi->aa_walk_budget);
1073 if (walked < 0)
1075 fbi->aa_walk_budget = 0;
1076 modified = true;
1078 else
1079 fbi->aa_walk_budget -= walked;
1080 if (modified)
1081 paa->pt_modified = true;
1082 return !modified;
1085 /* Return true if we can prove that OP is a memory reference loading
1086 data from an aggregate passed as a parameter.
1088 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1089 false if it cannot prove that the value has not been modified before the
1090 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1091 if it cannot prove the value has not been modified, in that case it will
1092 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1094 INFO and PARMS_AINFO describe parameters of the current function (but the
1095 latter can be NULL), STMT is the load statement. If function returns true,
1096 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1097 within the aggregate and whether it is a load from a value passed by
1098 reference respectively. */
1100 bool
1101 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1102 vec<ipa_param_descriptor, va_gc> *descriptors,
1103 gimple *stmt, tree op, int *index_p,
1104 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1105 bool *by_ref_p, bool *guaranteed_unmodified)
1107 int index;
1108 HOST_WIDE_INT size;
1109 bool reverse;
1110 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1112 if (!base)
1113 return false;
1115 /* We can not propagate across volatile loads. */
1116 if (TREE_THIS_VOLATILE (op))
1117 return false;
1119 if (DECL_P (base))
1121 int index = ipa_get_param_decl_index_1 (descriptors, base);
1122 if (index >= 0
1123 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1125 *index_p = index;
1126 *by_ref_p = false;
1127 if (size_p)
1128 *size_p = size;
1129 if (guaranteed_unmodified)
1130 *guaranteed_unmodified = true;
1131 return true;
1133 return false;
1136 if (TREE_CODE (base) != MEM_REF
1137 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1138 || !integer_zerop (TREE_OPERAND (base, 1)))
1139 return false;
1141 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1143 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1144 index = ipa_get_param_decl_index_1 (descriptors, parm);
1146 else
1148 /* This branch catches situations where a pointer parameter is not a
1149 gimple register, for example:
1151 void hip7(S*) (struct S * p)
1153 void (*<T2e4>) (struct S *) D.1867;
1154 struct S * p.1;
1156 <bb 2>:
1157 p.1_1 = p;
1158 D.1867_2 = p.1_1->f;
1159 D.1867_2 ();
1160 gdp = &p;
1163 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1164 index = load_from_unmodified_param (fbi, descriptors, def);
1167 if (index >= 0)
1169 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1170 if (!data_preserved && !guaranteed_unmodified)
1171 return false;
1173 *index_p = index;
1174 *by_ref_p = true;
1175 if (size_p)
1176 *size_p = size;
1177 if (guaranteed_unmodified)
1178 *guaranteed_unmodified = data_preserved;
1179 return true;
1181 return false;
1184 /* If STMT is an assignment that loads a value from a parameter declaration,
1185 or from an aggregate passed as the parameter either by value or reference,
1186 return the index of the parameter in ipa_node_params. Otherwise return -1.
1188 FBI holds gathered information about the function. INFO describes
1189 parameters of the function, STMT is the assignment statement. If it is a
1190 memory load from an aggregate, *OFFSET_P is filled with offset within the
1191 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1192 reference. */
1194 static int
1195 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1196 class ipa_node_params *info,
1197 gimple *stmt,
1198 HOST_WIDE_INT *offset_p,
1199 bool *by_ref_p)
1201 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1202 poly_int64 size;
1204 /* Load value from a parameter declaration. */
1205 if (index >= 0)
1207 *offset_p = -1;
1208 return index;
1211 if (!gimple_assign_load_p (stmt))
1212 return -1;
1214 tree rhs = gimple_assign_rhs1 (stmt);
1216 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1217 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1218 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1219 return -1;
1221 /* Skip memory reference containing bit-field. */
1222 if (TREE_CODE (rhs) == BIT_FIELD_REF
1223 || contains_bitfld_component_ref_p (rhs))
1224 return -1;
1226 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1227 offset_p, &size, by_ref_p))
1228 return -1;
1230 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1231 size));
1232 if (!*by_ref_p)
1234 tree param_type = ipa_get_type (info, index);
1236 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1237 return -1;
1239 else if (TREE_THIS_VOLATILE (rhs))
1240 return -1;
1242 return index;
1245 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1246 to find original pointer. Initialize RET to the pointer which results from
1247 the walk.
1248 If offset is known return true and initialize OFFSET_RET. */
1250 bool
1251 unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1253 poly_int64 offset = 0;
1254 bool offset_known = true;
1255 int i;
1257 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1259 if (TREE_CODE (op) == ADDR_EXPR)
1261 poly_int64 extra_offset = 0;
1262 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1263 &offset);
1264 if (!base)
1266 base = get_base_address (TREE_OPERAND (op, 0));
1267 if (TREE_CODE (base) != MEM_REF)
1268 break;
1269 offset_known = false;
1271 else
1273 if (TREE_CODE (base) != MEM_REF)
1274 break;
1275 offset += extra_offset;
1277 op = TREE_OPERAND (base, 0);
1278 if (mem_ref_offset (base).to_shwi (&extra_offset))
1279 offset += extra_offset;
1280 else
1281 offset_known = false;
1283 else if (TREE_CODE (op) == SSA_NAME
1284 && !SSA_NAME_IS_DEFAULT_DEF (op))
1286 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1288 if (gimple_assign_single_p (pstmt))
1289 op = gimple_assign_rhs1 (pstmt);
1290 else if (is_gimple_assign (pstmt)
1291 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1293 poly_int64 extra_offset = 0;
1294 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1295 &extra_offset))
1296 offset += extra_offset;
1297 else
1298 offset_known = false;
1299 op = gimple_assign_rhs1 (pstmt);
1301 else
1302 break;
1304 else
1305 break;
1307 *ret = op;
1308 *offset_ret = offset;
1309 return offset_known;
1312 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1313 of an assignment statement STMT, try to determine whether we are actually
1314 handling any of the following cases and construct an appropriate jump
1315 function into JFUNC if so:
1317 1) The passed value is loaded from a formal parameter which is not a gimple
1318 register (most probably because it is addressable, the value has to be
1319 scalar) and we can guarantee the value has not changed. This case can
1320 therefore be described by a simple pass-through jump function. For example:
1322 foo (int a)
1324 int a.0;
1326 a.0_2 = a;
1327 bar (a.0_2);
1329 2) The passed value can be described by a simple arithmetic pass-through
1330 jump function. E.g.
1332 foo (int a)
1334 int D.2064;
1336 D.2064_4 = a.1(D) + 4;
1337 bar (D.2064_4);
1339 This case can also occur in combination of the previous one, e.g.:
1341 foo (int a, int z)
1343 int a.0;
1344 int D.2064;
1346 a.0_3 = a;
1347 D.2064_4 = a.0_3 + 4;
1348 foo (D.2064_4);
1350 3) The passed value is an address of an object within another one (which
1351 also passed by reference). Such situations are described by an ancestor
1352 jump function and describe situations such as:
1354 B::foo() (struct B * const this)
1356 struct A * D.1845;
1358 D.1845_2 = &this_1(D)->D.1748;
1359 A::bar (D.1845_2);
1361 INFO is the structure describing individual parameters access different
1362 stages of IPA optimizations. PARMS_AINFO contains the information that is
1363 only needed for intraprocedural analysis. */
1365 static void
1366 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1367 class ipa_node_params *info,
1368 struct ipa_jump_func *jfunc,
1369 gcall *call, gimple *stmt, tree name,
1370 tree param_type)
1372 HOST_WIDE_INT offset, size;
1373 tree op1, tc_ssa, base, ssa;
1374 bool reverse;
1375 int index;
1377 op1 = gimple_assign_rhs1 (stmt);
1379 if (TREE_CODE (op1) == SSA_NAME)
1381 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1382 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1383 else
1384 index = load_from_unmodified_param (fbi, info->descriptors,
1385 SSA_NAME_DEF_STMT (op1));
1386 tc_ssa = op1;
1388 else
1390 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1391 tc_ssa = gimple_assign_lhs (stmt);
1394 if (index >= 0)
1396 switch (gimple_assign_rhs_class (stmt))
1398 case GIMPLE_BINARY_RHS:
1400 tree op2 = gimple_assign_rhs2 (stmt);
1401 if (!is_gimple_ip_invariant (op2)
1402 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1403 != tcc_comparison)
1404 && !useless_type_conversion_p (TREE_TYPE (name),
1405 TREE_TYPE (op1))))
1406 return;
1408 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1409 gimple_assign_rhs_code (stmt));
1410 break;
1412 case GIMPLE_SINGLE_RHS:
1414 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1415 tc_ssa);
1416 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1417 break;
1419 case GIMPLE_UNARY_RHS:
1420 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1421 ipa_set_jf_unary_pass_through (jfunc, index,
1422 gimple_assign_rhs_code (stmt));
1423 default:;
1425 return;
1428 if (TREE_CODE (op1) != ADDR_EXPR)
1429 return;
1430 op1 = TREE_OPERAND (op1, 0);
1431 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1432 offset_int mem_offset;
1433 if (!base
1434 || TREE_CODE (base) != MEM_REF
1435 || !mem_ref_offset (base).is_constant (&mem_offset))
1436 return;
1437 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1438 ssa = TREE_OPERAND (base, 0);
1439 if (TREE_CODE (ssa) != SSA_NAME
1440 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1441 || offset < 0)
1442 return;
1444 /* Dynamic types are changed in constructors and destructors. */
1445 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1446 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1447 ipa_set_ancestor_jf (jfunc, offset, index,
1448 parm_ref_data_pass_through_p (fbi, index, call, ssa),
1449 false);
1452 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1453 it looks like:
1455 iftmp.1_3 = &obj_2(D)->D.1762;
1457 The base of the MEM_REF must be a default definition SSA NAME of a
1458 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1459 whole MEM_REF expression is returned and the offset calculated from any
1460 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1461 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1463 static tree
1464 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1466 HOST_WIDE_INT size;
1467 tree expr, parm, obj;
1468 bool reverse;
1470 if (!gimple_assign_single_p (assign))
1471 return NULL_TREE;
1472 expr = gimple_assign_rhs1 (assign);
1474 if (TREE_CODE (expr) != ADDR_EXPR)
1475 return NULL_TREE;
1476 expr = TREE_OPERAND (expr, 0);
1477 obj = expr;
1478 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1480 offset_int mem_offset;
1481 if (!expr
1482 || TREE_CODE (expr) != MEM_REF
1483 || !mem_ref_offset (expr).is_constant (&mem_offset))
1484 return NULL_TREE;
1485 parm = TREE_OPERAND (expr, 0);
1486 if (TREE_CODE (parm) != SSA_NAME
1487 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1488 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1489 return NULL_TREE;
1491 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1492 *obj_p = obj;
1493 return expr;
1497 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1498 statement PHI, try to find out whether NAME is in fact a
1499 multiple-inheritance typecast from a descendant into an ancestor of a formal
1500 parameter and thus can be described by an ancestor jump function and if so,
1501 write the appropriate function into JFUNC.
1503 Essentially we want to match the following pattern:
1505 if (obj_2(D) != 0B)
1506 goto <bb 3>;
1507 else
1508 goto <bb 4>;
1510 <bb 3>:
1511 iftmp.1_3 = &obj_2(D)->D.1762;
1513 <bb 4>:
1514 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1515 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1516 return D.1879_6; */
1518 static void
1519 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1520 class ipa_node_params *info,
1521 struct ipa_jump_func *jfunc,
1522 gcall *call, gphi *phi)
1524 HOST_WIDE_INT offset;
1525 gimple *assign, *cond;
1526 basic_block phi_bb, assign_bb, cond_bb;
1527 tree tmp, parm, expr, obj;
1528 int index, i;
1530 if (gimple_phi_num_args (phi) != 2)
1531 return;
1533 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1534 tmp = PHI_ARG_DEF (phi, 0);
1535 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1536 tmp = PHI_ARG_DEF (phi, 1);
1537 else
1538 return;
1539 if (TREE_CODE (tmp) != SSA_NAME
1540 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1541 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1542 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1543 return;
1545 assign = SSA_NAME_DEF_STMT (tmp);
1546 assign_bb = gimple_bb (assign);
1547 if (!single_pred_p (assign_bb))
1548 return;
1549 expr = get_ancestor_addr_info (assign, &obj, &offset);
1550 if (!expr)
1551 return;
1552 parm = TREE_OPERAND (expr, 0);
1553 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1554 if (index < 0)
1555 return;
1557 cond_bb = single_pred (assign_bb);
1558 cond = last_stmt (cond_bb);
1559 if (!cond
1560 || gimple_code (cond) != GIMPLE_COND
1561 || gimple_cond_code (cond) != NE_EXPR
1562 || gimple_cond_lhs (cond) != parm
1563 || !integer_zerop (gimple_cond_rhs (cond)))
1564 return;
1566 phi_bb = gimple_bb (phi);
1567 for (i = 0; i < 2; i++)
1569 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1570 if (pred != assign_bb && pred != cond_bb)
1571 return;
1574 ipa_set_ancestor_jf (jfunc, offset, index,
1575 parm_ref_data_pass_through_p (fbi, index, call, parm),
1576 true);
1579 /* Inspect the given TYPE and return true iff it has the same structure (the
1580 same number of fields of the same types) as a C++ member pointer. If
1581 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1582 corresponding fields there. */
1584 static bool
1585 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1587 tree fld;
1589 if (TREE_CODE (type) != RECORD_TYPE)
1590 return false;
1592 fld = TYPE_FIELDS (type);
1593 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1594 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1595 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1596 return false;
1598 if (method_ptr)
1599 *method_ptr = fld;
1601 fld = DECL_CHAIN (fld);
1602 if (!fld || INTEGRAL_TYPE_P (fld)
1603 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1604 return false;
1605 if (delta)
1606 *delta = fld;
1608 if (DECL_CHAIN (fld))
1609 return false;
1611 return true;
1614 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1615 return the rhs of its defining statement, and this statement is stored in
1616 *RHS_STMT. Otherwise return RHS as it is. */
1618 static inline tree
1619 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1621 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1623 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1625 if (gimple_assign_single_p (def_stmt))
1626 rhs = gimple_assign_rhs1 (def_stmt);
1627 else
1628 break;
1629 *rhs_stmt = def_stmt;
1631 return rhs;
1634 /* Simple linked list, describing contents of an aggregate before call. */
1636 struct ipa_known_agg_contents_list
1638 /* Offset and size of the described part of the aggregate. */
1639 HOST_WIDE_INT offset, size;
1641 /* Type of the described part of the aggregate. */
1642 tree type;
1644 /* Known constant value or jump function data describing contents. */
1645 struct ipa_load_agg_data value;
1647 /* Pointer to the next structure in the list. */
1648 struct ipa_known_agg_contents_list *next;
1651 /* Add an aggregate content item into a linked list of
1652 ipa_known_agg_contents_list structure, in which all elements
1653 are sorted ascendingly by offset. */
1655 static inline void
1656 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1657 struct ipa_known_agg_contents_list *item)
1659 struct ipa_known_agg_contents_list *list = *plist;
1661 for (; list; list = list->next)
1663 if (list->offset >= item->offset)
1664 break;
1666 plist = &list->next;
1669 item->next = list;
1670 *plist = item;
1673 /* Check whether a given aggregate content is clobbered by certain element in
1674 a linked list of ipa_known_agg_contents_list. */
1676 static inline bool
1677 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1678 struct ipa_known_agg_contents_list *item)
1680 for (; list; list = list->next)
1682 if (list->offset >= item->offset)
1683 return list->offset < item->offset + item->size;
1685 if (list->offset + list->size > item->offset)
1686 return true;
1689 return false;
1692 /* Build aggregate jump function from LIST, assuming there are exactly
1693 VALUE_COUNT entries there and that offset of the passed argument
1694 is ARG_OFFSET and store it into JFUNC. */
1696 static void
1697 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1698 int value_count, HOST_WIDE_INT arg_offset,
1699 struct ipa_jump_func *jfunc)
1701 vec_safe_reserve (jfunc->agg.items, value_count, true);
1702 for (; list; list = list->next)
1704 struct ipa_agg_jf_item item;
1705 tree operand = list->value.pass_through.operand;
1707 if (list->value.pass_through.formal_id >= 0)
1709 /* Content value is derived from some formal paramerter. */
1710 if (list->value.offset >= 0)
1711 item.jftype = IPA_JF_LOAD_AGG;
1712 else
1713 item.jftype = IPA_JF_PASS_THROUGH;
1715 item.value.load_agg = list->value;
1716 if (operand)
1717 item.value.pass_through.operand
1718 = unshare_expr_without_location (operand);
1720 else if (operand)
1722 /* Content value is known constant. */
1723 item.jftype = IPA_JF_CONST;
1724 item.value.constant = unshare_expr_without_location (operand);
1726 else
1727 continue;
1729 item.type = list->type;
1730 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1732 item.offset = list->offset - arg_offset;
1733 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1735 jfunc->agg.items->quick_push (item);
1739 /* Given an assignment statement STMT, try to collect information into
1740 AGG_VALUE that will be used to construct jump function for RHS of the
1741 assignment, from which content value of an aggregate part comes.
1743 Besides constant and simple pass-through jump functions, also try to
1744 identify whether it matches the following pattern that can be described by
1745 a load-value-from-aggregate jump function, which is a derivative of simple
1746 pass-through jump function.
1748 foo (int *p)
1752 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1753 bar (q_5);
1756 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1757 constant, simple pass-through and load-vale-from-aggregate. If value
1758 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1759 set to -1. For simple pass-through and load-value-from-aggregate, field
1760 FORMAL_ID specifies the related formal parameter index, and field
1761 OFFSET can be used to distinguish them, -1 means simple pass-through,
1762 otherwise means load-value-from-aggregate. */
1764 static void
1765 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1766 struct ipa_load_agg_data *agg_value,
1767 gimple *stmt)
1769 tree lhs = gimple_assign_lhs (stmt);
1770 tree rhs1 = gimple_assign_rhs1 (stmt);
1771 enum tree_code code;
1772 int index = -1;
1774 /* Initialize jump function data for the aggregate part. */
1775 memset (agg_value, 0, sizeof (*agg_value));
1776 agg_value->pass_through.operation = NOP_EXPR;
1777 agg_value->pass_through.formal_id = -1;
1778 agg_value->offset = -1;
1780 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1781 || TREE_THIS_VOLATILE (lhs)
1782 || TREE_CODE (lhs) == BIT_FIELD_REF
1783 || contains_bitfld_component_ref_p (lhs))
1784 return;
1786 /* Skip SSA copies. */
1787 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1789 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1790 break;
1792 stmt = SSA_NAME_DEF_STMT (rhs1);
1793 if (!is_gimple_assign (stmt))
1794 break;
1796 rhs1 = gimple_assign_rhs1 (stmt);
1799 if (gphi *phi = dyn_cast<gphi *> (stmt))
1801 /* Also special case like the following (a is a formal parameter):
1803 _12 = *a_11(D).dim[0].stride;
1805 # iftmp.22_9 = PHI <_12(2), 1(3)>
1807 parm.6.dim[0].stride = iftmp.22_9;
1809 __x_MOD_foo (&parm.6, b_31(D));
1811 The aggregate function describing parm.6.dim[0].stride is encoded as a
1812 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1813 (the constant from the PHI node). */
1815 if (gimple_phi_num_args (phi) != 2)
1816 return;
1817 tree arg0 = gimple_phi_arg_def (phi, 0);
1818 tree arg1 = gimple_phi_arg_def (phi, 1);
1819 tree operand;
1821 if (is_gimple_ip_invariant (arg1))
1823 operand = arg1;
1824 rhs1 = arg0;
1826 else if (is_gimple_ip_invariant (arg0))
1828 operand = arg0;
1829 rhs1 = arg1;
1831 else
1832 return;
1834 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1835 if (!is_gimple_assign (stmt))
1836 return;
1838 code = ASSERT_EXPR;
1839 agg_value->pass_through.operand = operand;
1841 else if (is_gimple_assign (stmt))
1843 code = gimple_assign_rhs_code (stmt);
1844 switch (gimple_assign_rhs_class (stmt))
1846 case GIMPLE_SINGLE_RHS:
1847 if (is_gimple_ip_invariant (rhs1))
1849 agg_value->pass_through.operand = rhs1;
1850 return;
1852 code = NOP_EXPR;
1853 break;
1855 case GIMPLE_UNARY_RHS:
1856 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1857 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1858 tcc_binary, this subtleness is somewhat misleading.
1860 Since tcc_unary is widely used in IPA-CP code to check an operation
1861 with one operand, here we only allow tc_unary operation to avoid
1862 possible problem. Then we can use (opclass == tc_unary) or not to
1863 distinguish unary and binary. */
1864 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1865 return;
1867 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1868 break;
1870 case GIMPLE_BINARY_RHS:
1872 gimple *rhs1_stmt = stmt;
1873 gimple *rhs2_stmt = stmt;
1874 tree rhs2 = gimple_assign_rhs2 (stmt);
1876 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1877 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1879 if (is_gimple_ip_invariant (rhs2))
1881 agg_value->pass_through.operand = rhs2;
1882 stmt = rhs1_stmt;
1884 else if (is_gimple_ip_invariant (rhs1))
1886 if (TREE_CODE_CLASS (code) == tcc_comparison)
1887 code = swap_tree_comparison (code);
1888 else if (!commutative_tree_code (code))
1889 return;
1891 agg_value->pass_through.operand = rhs1;
1892 stmt = rhs2_stmt;
1893 rhs1 = rhs2;
1895 else
1896 return;
1898 if (TREE_CODE_CLASS (code) != tcc_comparison
1899 && !useless_type_conversion_p (TREE_TYPE (lhs),
1900 TREE_TYPE (rhs1)))
1901 return;
1903 break;
1905 default:
1906 return;
1909 else
1910 return;
1912 if (TREE_CODE (rhs1) != SSA_NAME)
1913 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1914 &agg_value->offset,
1915 &agg_value->by_ref);
1916 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1917 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1919 if (index >= 0)
1921 if (agg_value->offset >= 0)
1922 agg_value->type = TREE_TYPE (rhs1);
1923 agg_value->pass_through.formal_id = index;
1924 agg_value->pass_through.operation = code;
1926 else
1927 agg_value->pass_through.operand = NULL_TREE;
1930 /* If STMT is a memory store to the object whose address is BASE, extract
1931 information (offset, size, and value) into CONTENT, and return true,
1932 otherwise we conservatively assume the whole object is modified with
1933 unknown content, and return false. CHECK_REF means that access to object
1934 is expected to be in form of MEM_REF expression. */
1936 static bool
1937 extract_mem_content (struct ipa_func_body_info *fbi,
1938 gimple *stmt, tree base, bool check_ref,
1939 struct ipa_known_agg_contents_list *content)
1941 HOST_WIDE_INT lhs_offset, lhs_size;
1942 bool reverse;
1944 if (!is_gimple_assign (stmt))
1945 return false;
1947 tree lhs = gimple_assign_lhs (stmt);
1948 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1949 &reverse);
1950 if (!lhs_base)
1951 return false;
1953 if (check_ref)
1955 if (TREE_CODE (lhs_base) != MEM_REF
1956 || TREE_OPERAND (lhs_base, 0) != base
1957 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1958 return false;
1960 else if (lhs_base != base)
1961 return false;
1963 content->offset = lhs_offset;
1964 content->size = lhs_size;
1965 content->type = TREE_TYPE (lhs);
1966 content->next = NULL;
1968 analyze_agg_content_value (fbi, &content->value, stmt);
1969 return true;
1972 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1973 in ARG is filled in constants or values that are derived from caller's
1974 formal parameter in the way described by some kinds of jump functions. FBI
1975 is the context of the caller function for interprocedural analysis. ARG can
1976 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1977 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1979 static void
1980 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1981 gcall *call, tree arg,
1982 tree arg_type,
1983 struct ipa_jump_func *jfunc)
1985 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1986 bitmap visited = NULL;
1987 int item_count = 0, value_count = 0;
1988 HOST_WIDE_INT arg_offset, arg_size;
1989 tree arg_base;
1990 bool check_ref, by_ref;
1991 ao_ref r;
1992 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
1994 if (max_agg_items == 0)
1995 return;
1997 /* The function operates in three stages. First, we prepare check_ref, r,
1998 arg_base and arg_offset based on what is actually passed as an actual
1999 argument. */
2001 if (POINTER_TYPE_P (arg_type))
2003 by_ref = true;
2004 if (TREE_CODE (arg) == SSA_NAME)
2006 tree type_size;
2007 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
2008 || !POINTER_TYPE_P (TREE_TYPE (arg)))
2009 return;
2010 check_ref = true;
2011 arg_base = arg;
2012 arg_offset = 0;
2013 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
2014 arg_size = tree_to_uhwi (type_size);
2015 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2017 else if (TREE_CODE (arg) == ADDR_EXPR)
2019 bool reverse;
2021 arg = TREE_OPERAND (arg, 0);
2022 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2023 &arg_size, &reverse);
2024 if (!arg_base)
2025 return;
2026 if (DECL_P (arg_base))
2028 check_ref = false;
2029 ao_ref_init (&r, arg_base);
2031 else
2032 return;
2034 else
2035 return;
2037 else
2039 bool reverse;
2041 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2043 by_ref = false;
2044 check_ref = false;
2045 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2046 &arg_size, &reverse);
2047 if (!arg_base)
2048 return;
2050 ao_ref_init (&r, arg);
2053 /* Second stage traverses virtual SSA web backwards starting from the call
2054 statement, only looks at individual dominating virtual operand (its
2055 definition dominates the call), as long as it is confident that content
2056 of the aggregate is affected by definition of the virtual operand, it
2057 builds a sorted linked list of ipa_agg_jf_list describing that. */
2059 for (tree dom_vuse = gimple_vuse (call);
2060 dom_vuse && fbi->aa_walk_budget > 0;)
2062 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
2064 if (gimple_code (stmt) == GIMPLE_PHI)
2066 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2067 fbi->aa_walk_budget,
2068 &visited, false, NULL, NULL);
2069 continue;
2072 fbi->aa_walk_budget--;
2073 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2075 struct ipa_known_agg_contents_list *content
2076 = XALLOCA (struct ipa_known_agg_contents_list);
2078 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2079 break;
2081 /* Now we get a dominating virtual operand, and need to check
2082 whether its value is clobbered any other dominating one. */
2083 if ((content->value.pass_through.formal_id >= 0
2084 || content->value.pass_through.operand)
2085 && !clobber_by_agg_contents_list_p (all_list, content))
2087 struct ipa_known_agg_contents_list *copy
2088 = XALLOCA (struct ipa_known_agg_contents_list);
2090 /* Add to the list consisting of only dominating virtual
2091 operands, whose definitions can finally reach the call. */
2092 add_to_agg_contents_list (&list, (*copy = *content, copy));
2094 if (++value_count == max_agg_items)
2095 break;
2098 /* Add to the list consisting of all dominating virtual operands. */
2099 add_to_agg_contents_list (&all_list, content);
2101 if (++item_count == 2 * max_agg_items)
2102 break;
2104 dom_vuse = gimple_vuse (stmt);
2107 if (visited)
2108 BITMAP_FREE (visited);
2110 /* Third stage just goes over the list and creates an appropriate vector of
2111 ipa_agg_jf_item structures out of it, of course only if there are
2112 any meaningful items to begin with. */
2114 if (value_count)
2116 jfunc->agg.by_ref = by_ref;
2117 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2122 /* Return the Ith param type of callee associated with call graph
2123 edge E. */
2125 tree
2126 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2128 int n;
2129 tree type = (e->callee
2130 ? TREE_TYPE (e->callee->decl)
2131 : gimple_call_fntype (e->call_stmt));
2132 tree t = TYPE_ARG_TYPES (type);
2134 for (n = 0; n < i; n++)
2136 if (!t)
2137 break;
2138 t = TREE_CHAIN (t);
2140 if (t)
2141 return TREE_VALUE (t);
2142 if (!e->callee)
2143 return NULL;
2144 t = DECL_ARGUMENTS (e->callee->decl);
2145 for (n = 0; n < i; n++)
2147 if (!t)
2148 return NULL;
2149 t = TREE_CHAIN (t);
2151 if (t)
2152 return TREE_TYPE (t);
2153 return NULL;
2156 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2157 allocated structure or a previously existing one shared with other jump
2158 functions and/or transformation summaries. */
2160 ipa_bits *
2161 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2163 ipa_bits tmp;
2164 tmp.value = value;
2165 tmp.mask = mask;
2167 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2168 if (*slot)
2169 return *slot;
2171 ipa_bits *res = ggc_alloc<ipa_bits> ();
2172 res->value = value;
2173 res->mask = mask;
2174 *slot = res;
2176 return res;
2179 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2180 table in order to avoid creating multiple same ipa_bits structures. */
2182 static void
2183 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2184 const widest_int &mask)
2186 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2189 /* Return a pointer to a value_range just like *TMP, but either find it in
2190 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2192 static value_range *
2193 ipa_get_value_range (value_range *tmp)
2195 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2196 if (*slot)
2197 return *slot;
2199 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
2200 *vr = *tmp;
2201 *slot = vr;
2203 return vr;
2206 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2207 equiv set. Use hash table in order to avoid creating multiple same copies of
2208 value_ranges. */
2210 static value_range *
2211 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2213 value_range tmp (min, max, kind);
2214 return ipa_get_value_range (&tmp);
2217 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2218 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2219 same value_range structures. */
2221 static void
2222 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2223 tree min, tree max)
2225 jf->m_vr = ipa_get_value_range (type, min, max);
2228 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2229 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2231 static void
2232 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2234 jf->m_vr = ipa_get_value_range (tmp);
2237 /* Compute jump function for all arguments of callsite CS and insert the
2238 information in the jump_functions array in the ipa_edge_args corresponding
2239 to this callsite. */
2241 static void
2242 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2243 struct cgraph_edge *cs)
2245 ipa_node_params *info = ipa_node_params_sum->get (cs->caller);
2246 ipa_edge_args *args = ipa_edge_args_sum->get_create (cs);
2247 gcall *call = cs->call_stmt;
2248 int n, arg_num = gimple_call_num_args (call);
2249 bool useful_context = false;
2250 value_range vr;
2252 if (arg_num == 0 || args->jump_functions)
2253 return;
2254 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2255 if (flag_devirtualize)
2256 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2258 if (gimple_call_internal_p (call))
2259 return;
2260 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2261 return;
2263 for (n = 0; n < arg_num; n++)
2265 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2266 tree arg = gimple_call_arg (call, n);
2267 tree param_type = ipa_get_callee_param_type (cs, n);
2268 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2270 tree instance;
2271 class ipa_polymorphic_call_context context (cs->caller->decl,
2272 arg, cs->call_stmt,
2273 &instance);
2274 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2275 &fbi->aa_walk_budget);
2276 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2277 if (!context.useless_p ())
2278 useful_context = true;
2281 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2283 bool addr_nonzero = false;
2284 bool strict_overflow = false;
2286 if (TREE_CODE (arg) == SSA_NAME
2287 && param_type
2288 && get_range_query (cfun)->range_of_expr (vr, arg)
2289 && vr.nonzero_p ())
2290 addr_nonzero = true;
2291 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2292 addr_nonzero = true;
2294 if (addr_nonzero)
2296 tree z = build_int_cst (TREE_TYPE (arg), 0);
2297 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2299 else
2300 gcc_assert (!jfunc->m_vr);
2302 else
2304 if (TREE_CODE (arg) == SSA_NAME
2305 && param_type
2306 && get_range_query (cfun)->range_of_expr (vr, arg)
2307 && !vr.undefined_p ())
2309 value_range resvr;
2310 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2311 &vr, TREE_TYPE (arg));
2312 if (!resvr.undefined_p () && !resvr.varying_p ())
2313 ipa_set_jfunc_vr (jfunc, &resvr);
2314 else
2315 gcc_assert (!jfunc->m_vr);
2317 else
2318 gcc_assert (!jfunc->m_vr);
2321 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2322 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2324 if (TREE_CODE (arg) == SSA_NAME)
2325 ipa_set_jfunc_bits (jfunc, 0,
2326 widest_int::from (get_nonzero_bits (arg),
2327 TYPE_SIGN (TREE_TYPE (arg))));
2328 else
2329 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2331 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2333 unsigned HOST_WIDE_INT bitpos;
2334 unsigned align;
2336 get_pointer_alignment_1 (arg, &align, &bitpos);
2337 widest_int mask = wi::bit_and_not
2338 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2339 align / BITS_PER_UNIT - 1);
2340 widest_int value = bitpos / BITS_PER_UNIT;
2341 ipa_set_jfunc_bits (jfunc, value, mask);
2343 else
2344 gcc_assert (!jfunc->bits);
2346 if (is_gimple_ip_invariant (arg)
2347 || (VAR_P (arg)
2348 && is_global_var (arg)
2349 && TREE_READONLY (arg)))
2350 ipa_set_jf_constant (jfunc, arg, cs);
2351 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2352 && TREE_CODE (arg) == PARM_DECL)
2354 int index = ipa_get_param_decl_index (info, arg);
2356 gcc_assert (index >=0);
2357 /* Aggregate passed by value, check for pass-through, otherwise we
2358 will attempt to fill in aggregate contents later in this
2359 for cycle. */
2360 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2362 ipa_set_jf_simple_pass_through (jfunc, index, false);
2363 continue;
2366 else if (TREE_CODE (arg) == SSA_NAME)
2368 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2370 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2371 if (index >= 0)
2373 bool agg_p;
2374 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2375 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2378 else
2380 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2381 if (is_gimple_assign (stmt))
2382 compute_complex_assign_jump_func (fbi, info, jfunc,
2383 call, stmt, arg, param_type);
2384 else if (gimple_code (stmt) == GIMPLE_PHI)
2385 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2386 call,
2387 as_a <gphi *> (stmt));
2391 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2392 passed (because type conversions are ignored in gimple). Usually we can
2393 safely get type from function declaration, but in case of K&R prototypes or
2394 variadic functions we can try our luck with type of the pointer passed.
2395 TODO: Since we look for actual initialization of the memory object, we may better
2396 work out the type based on the memory stores we find. */
2397 if (!param_type)
2398 param_type = TREE_TYPE (arg);
2400 if ((jfunc->type != IPA_JF_PASS_THROUGH
2401 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2402 && (jfunc->type != IPA_JF_ANCESTOR
2403 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2404 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2405 || POINTER_TYPE_P (param_type)))
2406 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2408 if (!useful_context)
2409 vec_free (args->polymorphic_call_contexts);
2412 /* Compute jump functions for all edges - both direct and indirect - outgoing
2413 from BB. */
2415 static void
2416 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2418 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2419 int i;
2420 struct cgraph_edge *cs;
2422 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2424 struct cgraph_node *callee = cs->callee;
2426 if (callee)
2428 callee = callee->ultimate_alias_target ();
2429 /* We do not need to bother analyzing calls to unknown functions
2430 unless they may become known during lto/whopr. */
2431 if (!callee->definition && !flag_lto
2432 && !gimple_call_fnspec (cs->call_stmt).known_p ())
2433 continue;
2435 ipa_compute_jump_functions_for_edge (fbi, cs);
2439 /* If STMT looks like a statement loading a value from a member pointer formal
2440 parameter, return that parameter and store the offset of the field to
2441 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2442 might be clobbered). If USE_DELTA, then we look for a use of the delta
2443 field rather than the pfn. */
2445 static tree
2446 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2447 HOST_WIDE_INT *offset_p)
2449 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2451 if (!gimple_assign_single_p (stmt))
2452 return NULL_TREE;
2454 rhs = gimple_assign_rhs1 (stmt);
2455 if (TREE_CODE (rhs) == COMPONENT_REF)
2457 ref_field = TREE_OPERAND (rhs, 1);
2458 rhs = TREE_OPERAND (rhs, 0);
2460 else
2461 ref_field = NULL_TREE;
2462 if (TREE_CODE (rhs) != MEM_REF)
2463 return NULL_TREE;
2464 rec = TREE_OPERAND (rhs, 0);
2465 if (TREE_CODE (rec) != ADDR_EXPR)
2466 return NULL_TREE;
2467 rec = TREE_OPERAND (rec, 0);
2468 if (TREE_CODE (rec) != PARM_DECL
2469 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2470 return NULL_TREE;
2471 ref_offset = TREE_OPERAND (rhs, 1);
2473 if (use_delta)
2474 fld = delta_field;
2475 else
2476 fld = ptr_field;
2477 if (offset_p)
2478 *offset_p = int_bit_position (fld);
2480 if (ref_field)
2482 if (integer_nonzerop (ref_offset))
2483 return NULL_TREE;
2484 return ref_field == fld ? rec : NULL_TREE;
2486 else
2487 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2488 : NULL_TREE;
2491 /* Returns true iff T is an SSA_NAME defined by a statement. */
2493 static bool
2494 ipa_is_ssa_with_stmt_def (tree t)
2496 if (TREE_CODE (t) == SSA_NAME
2497 && !SSA_NAME_IS_DEFAULT_DEF (t))
2498 return true;
2499 else
2500 return false;
2503 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2504 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2505 indirect call graph edge.
2506 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2508 static struct cgraph_edge *
2509 ipa_note_param_call (struct cgraph_node *node, int param_index,
2510 gcall *stmt, bool polymorphic)
2512 struct cgraph_edge *cs;
2514 cs = node->get_edge (stmt);
2515 cs->indirect_info->param_index = param_index;
2516 cs->indirect_info->agg_contents = 0;
2517 cs->indirect_info->member_ptr = 0;
2518 cs->indirect_info->guaranteed_unmodified = 0;
2519 ipa_node_params *info = ipa_node_params_sum->get (node);
2520 ipa_set_param_used_by_indirect_call (info, param_index, true);
2521 if (cs->indirect_info->polymorphic || polymorphic)
2522 ipa_set_param_used_by_polymorphic_call (info, param_index, true);
2523 return cs;
2526 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2527 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2528 intermediate information about each formal parameter. Currently it checks
2529 whether the call calls a pointer that is a formal parameter and if so, the
2530 parameter is marked with the called flag and an indirect call graph edge
2531 describing the call is created. This is very simple for ordinary pointers
2532 represented in SSA but not-so-nice when it comes to member pointers. The
2533 ugly part of this function does nothing more than trying to match the
2534 pattern of such a call. An example of such a pattern is the gimple dump
2535 below, the call is on the last line:
2537 <bb 2>:
2538 f$__delta_5 = f.__delta;
2539 f$__pfn_24 = f.__pfn;
2542 <bb 2>:
2543 f$__delta_5 = MEM[(struct *)&f];
2544 f$__pfn_24 = MEM[(struct *)&f + 4B];
2546 and a few lines below:
2548 <bb 5>
2549 D.2496_3 = (int) f$__pfn_24;
2550 D.2497_4 = D.2496_3 & 1;
2551 if (D.2497_4 != 0)
2552 goto <bb 3>;
2553 else
2554 goto <bb 4>;
2556 <bb 6>:
2557 D.2500_7 = (unsigned int) f$__delta_5;
2558 D.2501_8 = &S + D.2500_7;
2559 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2560 D.2503_10 = *D.2502_9;
2561 D.2504_12 = f$__pfn_24 + -1;
2562 D.2505_13 = (unsigned int) D.2504_12;
2563 D.2506_14 = D.2503_10 + D.2505_13;
2564 D.2507_15 = *D.2506_14;
2565 iftmp.11_16 = (String:: *) D.2507_15;
2567 <bb 7>:
2568 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2569 D.2500_19 = (unsigned int) f$__delta_5;
2570 D.2508_20 = &S + D.2500_19;
2571 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2573 Such patterns are results of simple calls to a member pointer:
2575 int doprinting (int (MyString::* f)(int) const)
2577 MyString S ("somestring");
2579 return (S.*f)(4);
2582 Moreover, the function also looks for called pointers loaded from aggregates
2583 passed by value or reference. */
2585 static void
2586 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2587 tree target)
2589 class ipa_node_params *info = fbi->info;
2590 HOST_WIDE_INT offset;
2591 bool by_ref;
2593 if (SSA_NAME_IS_DEFAULT_DEF (target))
2595 tree var = SSA_NAME_VAR (target);
2596 int index = ipa_get_param_decl_index (info, var);
2597 if (index >= 0)
2598 ipa_note_param_call (fbi->node, index, call, false);
2599 return;
2602 int index;
2603 gimple *def = SSA_NAME_DEF_STMT (target);
2604 bool guaranteed_unmodified;
2605 if (gimple_assign_single_p (def)
2606 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2607 gimple_assign_rhs1 (def), &index, &offset,
2608 NULL, &by_ref, &guaranteed_unmodified))
2610 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2611 call, false);
2612 cs->indirect_info->offset = offset;
2613 cs->indirect_info->agg_contents = 1;
2614 cs->indirect_info->by_ref = by_ref;
2615 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2616 return;
2619 /* Now we need to try to match the complex pattern of calling a member
2620 pointer. */
2621 if (gimple_code (def) != GIMPLE_PHI
2622 || gimple_phi_num_args (def) != 2
2623 || !POINTER_TYPE_P (TREE_TYPE (target))
2624 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2625 return;
2627 /* First, we need to check whether one of these is a load from a member
2628 pointer that is a parameter to this function. */
2629 tree n1 = PHI_ARG_DEF (def, 0);
2630 tree n2 = PHI_ARG_DEF (def, 1);
2631 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2632 return;
2633 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2634 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2636 tree rec;
2637 basic_block bb, virt_bb;
2638 basic_block join = gimple_bb (def);
2639 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2641 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2642 return;
2644 bb = EDGE_PRED (join, 0)->src;
2645 virt_bb = gimple_bb (d2);
2647 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2649 bb = EDGE_PRED (join, 1)->src;
2650 virt_bb = gimple_bb (d1);
2652 else
2653 return;
2655 /* Second, we need to check that the basic blocks are laid out in the way
2656 corresponding to the pattern. */
2658 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2659 || single_pred (virt_bb) != bb
2660 || single_succ (virt_bb) != join)
2661 return;
2663 /* Third, let's see that the branching is done depending on the least
2664 significant bit of the pfn. */
2666 gimple *branch = last_stmt (bb);
2667 if (!branch || gimple_code (branch) != GIMPLE_COND)
2668 return;
2670 if ((gimple_cond_code (branch) != NE_EXPR
2671 && gimple_cond_code (branch) != EQ_EXPR)
2672 || !integer_zerop (gimple_cond_rhs (branch)))
2673 return;
2675 tree cond = gimple_cond_lhs (branch);
2676 if (!ipa_is_ssa_with_stmt_def (cond))
2677 return;
2679 def = SSA_NAME_DEF_STMT (cond);
2680 if (!is_gimple_assign (def)
2681 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2682 || !integer_onep (gimple_assign_rhs2 (def)))
2683 return;
2685 cond = gimple_assign_rhs1 (def);
2686 if (!ipa_is_ssa_with_stmt_def (cond))
2687 return;
2689 def = SSA_NAME_DEF_STMT (cond);
2691 if (is_gimple_assign (def)
2692 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2694 cond = gimple_assign_rhs1 (def);
2695 if (!ipa_is_ssa_with_stmt_def (cond))
2696 return;
2697 def = SSA_NAME_DEF_STMT (cond);
2700 tree rec2;
2701 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2702 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2703 == ptrmemfunc_vbit_in_delta),
2704 NULL);
2705 if (rec != rec2)
2706 return;
2708 index = ipa_get_param_decl_index (info, rec);
2709 if (index >= 0
2710 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2712 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2713 call, false);
2714 cs->indirect_info->offset = offset;
2715 cs->indirect_info->agg_contents = 1;
2716 cs->indirect_info->member_ptr = 1;
2717 cs->indirect_info->guaranteed_unmodified = 1;
2720 return;
2723 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2724 object referenced in the expression is a formal parameter of the caller
2725 FBI->node (described by FBI->info), create a call note for the
2726 statement. */
2728 static void
2729 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2730 gcall *call, tree target)
2732 tree obj = OBJ_TYPE_REF_OBJECT (target);
2733 int index;
2734 HOST_WIDE_INT anc_offset;
2736 if (!flag_devirtualize)
2737 return;
2739 if (TREE_CODE (obj) != SSA_NAME)
2740 return;
2742 class ipa_node_params *info = fbi->info;
2743 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2745 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2746 return;
2748 anc_offset = 0;
2749 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2750 gcc_assert (index >= 0);
2751 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2752 call))
2753 return;
2755 else
2757 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2758 tree expr;
2760 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2761 if (!expr)
2762 return;
2763 index = ipa_get_param_decl_index (info,
2764 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2765 gcc_assert (index >= 0);
2766 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2767 call, anc_offset))
2768 return;
2771 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2772 call, true);
2773 class cgraph_indirect_call_info *ii = cs->indirect_info;
2774 ii->offset = anc_offset;
2775 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2776 ii->otr_type = obj_type_ref_class (target);
2777 ii->polymorphic = 1;
2780 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2781 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2782 containing intermediate information about each formal parameter. */
2784 static void
2785 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2787 tree target = gimple_call_fn (call);
2789 if (!target
2790 || (TREE_CODE (target) != SSA_NAME
2791 && !virtual_method_call_p (target)))
2792 return;
2794 struct cgraph_edge *cs = fbi->node->get_edge (call);
2795 /* If we previously turned the call into a direct call, there is
2796 no need to analyze. */
2797 if (cs && !cs->indirect_unknown_callee)
2798 return;
2800 if (cs->indirect_info->polymorphic && flag_devirtualize)
2802 tree instance;
2803 tree target = gimple_call_fn (call);
2804 ipa_polymorphic_call_context context (current_function_decl,
2805 target, call, &instance);
2807 gcc_checking_assert (cs->indirect_info->otr_type
2808 == obj_type_ref_class (target));
2809 gcc_checking_assert (cs->indirect_info->otr_token
2810 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2812 cs->indirect_info->vptr_changed
2813 = !context.get_dynamic_type (instance,
2814 OBJ_TYPE_REF_OBJECT (target),
2815 obj_type_ref_class (target), call,
2816 &fbi->aa_walk_budget);
2817 cs->indirect_info->context = context;
2820 if (TREE_CODE (target) == SSA_NAME)
2821 ipa_analyze_indirect_call_uses (fbi, call, target);
2822 else if (virtual_method_call_p (target))
2823 ipa_analyze_virtual_call_uses (fbi, call, target);
2827 /* Analyze the call statement STMT with respect to formal parameters (described
2828 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2829 formal parameters are called. */
2831 static void
2832 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2834 if (is_gimple_call (stmt))
2835 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2838 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2839 If OP is a parameter declaration, mark it as used in the info structure
2840 passed in DATA. */
2842 static bool
2843 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2845 class ipa_node_params *info = (class ipa_node_params *) data;
2847 op = get_base_address (op);
2848 if (op
2849 && TREE_CODE (op) == PARM_DECL)
2851 int index = ipa_get_param_decl_index (info, op);
2852 gcc_assert (index >= 0);
2853 ipa_set_param_used (info, index, true);
2856 return false;
2859 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2860 the findings in various structures of the associated ipa_node_params
2861 structure, such as parameter flags, notes etc. FBI holds various data about
2862 the function being analyzed. */
2864 static void
2865 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2867 gimple_stmt_iterator gsi;
2868 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2870 gimple *stmt = gsi_stmt (gsi);
2872 if (is_gimple_debug (stmt))
2873 continue;
2875 ipa_analyze_stmt_uses (fbi, stmt);
2876 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2877 visit_ref_for_mod_analysis,
2878 visit_ref_for_mod_analysis,
2879 visit_ref_for_mod_analysis);
2881 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2882 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2883 visit_ref_for_mod_analysis,
2884 visit_ref_for_mod_analysis,
2885 visit_ref_for_mod_analysis);
2888 /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2890 static bool
2891 load_from_dereferenced_name (tree expr, tree name)
2893 tree base = get_base_address (expr);
2894 return (TREE_CODE (base) == MEM_REF
2895 && TREE_OPERAND (base, 0) == name);
2898 /* Calculate controlled uses of parameters of NODE. */
2900 static void
2901 ipa_analyze_controlled_uses (struct cgraph_node *node)
2903 ipa_node_params *info = ipa_node_params_sum->get (node);
2905 for (int i = 0; i < ipa_get_param_count (info); i++)
2907 tree parm = ipa_get_param (info, i);
2908 int call_uses = 0;
2909 bool load_dereferenced = false;
2911 /* For SSA regs see if parameter is used. For non-SSA we compute
2912 the flag during modification analysis. */
2913 if (is_gimple_reg (parm))
2915 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2916 parm);
2917 if (ddef && !has_zero_uses (ddef))
2919 imm_use_iterator imm_iter;
2920 gimple *stmt;
2922 ipa_set_param_used (info, i, true);
2923 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)
2925 if (is_gimple_debug (stmt))
2926 continue;
2928 int all_stmt_uses = 0;
2929 use_operand_p use_p;
2930 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
2931 all_stmt_uses++;
2933 if (is_gimple_call (stmt))
2935 if (gimple_call_internal_p (stmt))
2937 call_uses = IPA_UNDESCRIBED_USE;
2938 break;
2940 int recognized_stmt_uses;
2941 if (gimple_call_fn (stmt) == ddef)
2942 recognized_stmt_uses = 1;
2943 else
2944 recognized_stmt_uses = 0;
2945 unsigned arg_count = gimple_call_num_args (stmt);
2946 for (unsigned i = 0; i < arg_count; i++)
2948 tree arg = gimple_call_arg (stmt, i);
2949 if (arg == ddef)
2950 recognized_stmt_uses++;
2951 else if (load_from_dereferenced_name (arg, ddef))
2953 load_dereferenced = true;
2954 recognized_stmt_uses++;
2958 if (recognized_stmt_uses != all_stmt_uses)
2960 call_uses = IPA_UNDESCRIBED_USE;
2961 break;
2963 if (call_uses >= 0)
2964 call_uses += all_stmt_uses;
2966 else if (gimple_assign_single_p (stmt))
2968 tree rhs = gimple_assign_rhs1 (stmt);
2969 if (all_stmt_uses != 1
2970 || !load_from_dereferenced_name (rhs, ddef))
2972 call_uses = IPA_UNDESCRIBED_USE;
2973 break;
2975 load_dereferenced = true;
2977 else
2979 call_uses = IPA_UNDESCRIBED_USE;
2980 break;
2984 else
2985 call_uses = 0;
2987 else
2988 call_uses = IPA_UNDESCRIBED_USE;
2989 ipa_set_controlled_uses (info, i, call_uses);
2990 ipa_set_param_load_dereferenced (info, i, load_dereferenced);
2994 /* Free stuff in BI. */
2996 static void
2997 free_ipa_bb_info (struct ipa_bb_info *bi)
2999 bi->cg_edges.release ();
3000 bi->param_aa_statuses.release ();
3003 /* Dominator walker driving the analysis. */
3005 class analysis_dom_walker : public dom_walker
3007 public:
3008 analysis_dom_walker (struct ipa_func_body_info *fbi)
3009 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
3011 virtual edge before_dom_children (basic_block);
3013 private:
3014 struct ipa_func_body_info *m_fbi;
3017 edge
3018 analysis_dom_walker::before_dom_children (basic_block bb)
3020 ipa_analyze_params_uses_in_bb (m_fbi, bb);
3021 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3022 return NULL;
3025 /* Release body info FBI. */
3027 void
3028 ipa_release_body_info (struct ipa_func_body_info *fbi)
3030 int i;
3031 struct ipa_bb_info *bi;
3033 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
3034 free_ipa_bb_info (bi);
3035 fbi->bb_infos.release ();
3038 /* Initialize the array describing properties of formal parameters
3039 of NODE, analyze their uses and compute jump functions associated
3040 with actual arguments of calls from within NODE. */
3042 void
3043 ipa_analyze_node (struct cgraph_node *node)
3045 struct ipa_func_body_info fbi;
3046 class ipa_node_params *info;
3048 ipa_check_create_node_params ();
3049 ipa_check_create_edge_args ();
3050 info = ipa_node_params_sum->get_create (node);
3052 if (info->analysis_done)
3053 return;
3054 info->analysis_done = 1;
3056 if (ipa_func_spec_opts_forbid_analysis_p (node))
3058 for (int i = 0; i < ipa_get_param_count (info); i++)
3060 ipa_set_param_used (info, i, true);
3061 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
3063 return;
3066 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3067 push_cfun (func);
3068 calculate_dominance_info (CDI_DOMINATORS);
3069 ipa_initialize_node_params (node);
3070 ipa_analyze_controlled_uses (node);
3072 fbi.node = node;
3073 fbi.info = info;
3074 fbi.bb_infos = vNULL;
3075 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
3076 fbi.param_count = ipa_get_param_count (info);
3077 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
3079 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3081 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3082 bi->cg_edges.safe_push (cs);
3085 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3087 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3088 bi->cg_edges.safe_push (cs);
3091 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3093 ipa_release_body_info (&fbi);
3094 free_dominance_info (CDI_DOMINATORS);
3095 pop_cfun ();
3098 /* Update the jump functions associated with call graph edge E when the call
3099 graph edge CS is being inlined, assuming that E->caller is already (possibly
3100 indirectly) inlined into CS->callee and that E has not been inlined. */
3102 static void
3103 update_jump_functions_after_inlining (struct cgraph_edge *cs,
3104 struct cgraph_edge *e)
3106 ipa_edge_args *top = ipa_edge_args_sum->get (cs);
3107 ipa_edge_args *args = ipa_edge_args_sum->get (e);
3108 if (!args)
3109 return;
3110 int count = ipa_get_cs_argument_count (args);
3111 int i;
3113 for (i = 0; i < count; i++)
3115 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3116 class ipa_polymorphic_call_context *dst_ctx
3117 = ipa_get_ith_polymorhic_call_context (args, i);
3119 if (dst->agg.items)
3121 struct ipa_agg_jf_item *item;
3122 int j;
3124 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3126 int dst_fid;
3127 struct ipa_jump_func *src;
3129 if (item->jftype != IPA_JF_PASS_THROUGH
3130 && item->jftype != IPA_JF_LOAD_AGG)
3131 continue;
3133 dst_fid = item->value.pass_through.formal_id;
3134 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3136 item->jftype = IPA_JF_UNKNOWN;
3137 continue;
3140 item->value.pass_through.formal_id = -1;
3141 src = ipa_get_ith_jump_func (top, dst_fid);
3142 if (src->type == IPA_JF_CONST)
3144 if (item->jftype == IPA_JF_PASS_THROUGH
3145 && item->value.pass_through.operation == NOP_EXPR)
3147 item->jftype = IPA_JF_CONST;
3148 item->value.constant = src->value.constant.value;
3149 continue;
3152 else if (src->type == IPA_JF_PASS_THROUGH
3153 && src->value.pass_through.operation == NOP_EXPR)
3155 if (item->jftype == IPA_JF_PASS_THROUGH
3156 || !item->value.load_agg.by_ref
3157 || src->value.pass_through.agg_preserved)
3158 item->value.pass_through.formal_id
3159 = src->value.pass_through.formal_id;
3161 else if (src->type == IPA_JF_ANCESTOR)
3163 if (item->jftype == IPA_JF_PASS_THROUGH)
3165 if (!src->value.ancestor.offset)
3166 item->value.pass_through.formal_id
3167 = src->value.ancestor.formal_id;
3169 else if (src->value.ancestor.agg_preserved)
3171 gcc_checking_assert (item->value.load_agg.by_ref);
3173 item->value.pass_through.formal_id
3174 = src->value.ancestor.formal_id;
3175 item->value.load_agg.offset
3176 += src->value.ancestor.offset;
3180 if (item->value.pass_through.formal_id < 0)
3181 item->jftype = IPA_JF_UNKNOWN;
3185 if (!top)
3187 ipa_set_jf_unknown (dst);
3188 continue;
3191 if (dst->type == IPA_JF_ANCESTOR)
3193 struct ipa_jump_func *src;
3194 int dst_fid = dst->value.ancestor.formal_id;
3195 class ipa_polymorphic_call_context *src_ctx
3196 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3198 /* Variable number of arguments can cause havoc if we try to access
3199 one that does not exist in the inlined edge. So make sure we
3200 don't. */
3201 if (dst_fid >= ipa_get_cs_argument_count (top))
3203 ipa_set_jf_unknown (dst);
3204 continue;
3207 src = ipa_get_ith_jump_func (top, dst_fid);
3209 if (src_ctx && !src_ctx->useless_p ())
3211 class ipa_polymorphic_call_context ctx = *src_ctx;
3213 /* TODO: Make type preserved safe WRT contexts. */
3214 if (!ipa_get_jf_ancestor_type_preserved (dst))
3215 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3216 ctx.offset_by (dst->value.ancestor.offset);
3217 if (!ctx.useless_p ())
3219 if (!dst_ctx)
3221 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3222 count, true);
3223 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3226 dst_ctx->combine_with (ctx);
3230 /* Parameter and argument in ancestor jump function must be pointer
3231 type, which means access to aggregate must be by-reference. */
3232 gcc_assert (!src->agg.items || src->agg.by_ref);
3234 if (src->agg.items && dst->value.ancestor.agg_preserved)
3236 struct ipa_agg_jf_item *item;
3237 int j;
3239 /* Currently we do not produce clobber aggregate jump functions,
3240 replace with merging when we do. */
3241 gcc_assert (!dst->agg.items);
3243 dst->agg.items = vec_safe_copy (src->agg.items);
3244 dst->agg.by_ref = src->agg.by_ref;
3245 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3246 item->offset -= dst->value.ancestor.offset;
3249 if (src->type == IPA_JF_PASS_THROUGH
3250 && src->value.pass_through.operation == NOP_EXPR)
3252 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3253 dst->value.ancestor.agg_preserved &=
3254 src->value.pass_through.agg_preserved;
3256 else if (src->type == IPA_JF_ANCESTOR)
3258 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3259 dst->value.ancestor.offset += src->value.ancestor.offset;
3260 dst->value.ancestor.agg_preserved &=
3261 src->value.ancestor.agg_preserved;
3262 dst->value.ancestor.keep_null |= src->value.ancestor.keep_null;
3264 else
3265 ipa_set_jf_unknown (dst);
3267 else if (dst->type == IPA_JF_PASS_THROUGH)
3269 struct ipa_jump_func *src;
3270 /* We must check range due to calls with variable number of arguments
3271 and we cannot combine jump functions with operations. */
3272 if (dst->value.pass_through.operation == NOP_EXPR
3273 && (top && dst->value.pass_through.formal_id
3274 < ipa_get_cs_argument_count (top)))
3276 int dst_fid = dst->value.pass_through.formal_id;
3277 src = ipa_get_ith_jump_func (top, dst_fid);
3278 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3279 class ipa_polymorphic_call_context *src_ctx
3280 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3282 if (src_ctx && !src_ctx->useless_p ())
3284 class ipa_polymorphic_call_context ctx = *src_ctx;
3286 /* TODO: Make type preserved safe WRT contexts. */
3287 if (!ipa_get_jf_pass_through_type_preserved (dst))
3288 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3289 if (!ctx.useless_p ())
3291 if (!dst_ctx)
3293 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3294 count, true);
3295 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3297 dst_ctx->combine_with (ctx);
3300 switch (src->type)
3302 case IPA_JF_UNKNOWN:
3303 ipa_set_jf_unknown (dst);
3304 break;
3305 case IPA_JF_CONST:
3306 ipa_set_jf_cst_copy (dst, src);
3307 break;
3309 case IPA_JF_PASS_THROUGH:
3311 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3312 enum tree_code operation;
3313 operation = ipa_get_jf_pass_through_operation (src);
3315 if (operation == NOP_EXPR)
3317 bool agg_p;
3318 agg_p = dst_agg_p
3319 && ipa_get_jf_pass_through_agg_preserved (src);
3320 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3322 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3323 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3324 else
3326 tree operand = ipa_get_jf_pass_through_operand (src);
3327 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3328 operation);
3330 break;
3332 case IPA_JF_ANCESTOR:
3334 bool agg_p;
3335 agg_p = dst_agg_p
3336 && ipa_get_jf_ancestor_agg_preserved (src);
3337 ipa_set_ancestor_jf (dst,
3338 ipa_get_jf_ancestor_offset (src),
3339 ipa_get_jf_ancestor_formal_id (src),
3340 agg_p,
3341 ipa_get_jf_ancestor_keep_null (src));
3342 break;
3344 default:
3345 gcc_unreachable ();
3348 if (src->agg.items
3349 && (dst_agg_p || !src->agg.by_ref))
3351 /* Currently we do not produce clobber aggregate jump
3352 functions, replace with merging when we do. */
3353 gcc_assert (!dst->agg.items);
3355 dst->agg.by_ref = src->agg.by_ref;
3356 dst->agg.items = vec_safe_copy (src->agg.items);
3359 else
3360 ipa_set_jf_unknown (dst);
3365 /* If TARGET is an addr_expr of a function declaration, make it the
3366 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3367 Otherwise, return NULL. */
3369 struct cgraph_edge *
3370 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3371 bool speculative)
3373 struct cgraph_node *callee;
3374 bool unreachable = false;
3376 if (TREE_CODE (target) == ADDR_EXPR)
3377 target = TREE_OPERAND (target, 0);
3378 if (TREE_CODE (target) != FUNCTION_DECL)
3380 target = canonicalize_constructor_val (target, NULL);
3381 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3383 /* Member pointer call that goes through a VMT lookup. */
3384 if (ie->indirect_info->member_ptr
3385 /* Or if target is not an invariant expression and we do not
3386 know if it will evaulate to function at runtime.
3387 This can happen when folding through &VAR, where &VAR
3388 is IP invariant, but VAR itself is not.
3390 TODO: Revisit this when GCC 5 is branched. It seems that
3391 member_ptr check is not needed and that we may try to fold
3392 the expression and see if VAR is readonly. */
3393 || !is_gimple_ip_invariant (target))
3395 if (dump_enabled_p ())
3397 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3398 "discovered direct call non-invariant %s\n",
3399 ie->caller->dump_name ());
3401 return NULL;
3405 if (dump_enabled_p ())
3407 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3408 "discovered direct call to non-function in %s, "
3409 "making it __builtin_unreachable\n",
3410 ie->caller->dump_name ());
3413 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3414 callee = cgraph_node::get_create (target);
3415 unreachable = true;
3417 else
3418 callee = cgraph_node::get (target);
3420 else
3421 callee = cgraph_node::get (target);
3423 /* Because may-edges are not explicitely represented and vtable may be external,
3424 we may create the first reference to the object in the unit. */
3425 if (!callee || callee->inlined_to)
3428 /* We are better to ensure we can refer to it.
3429 In the case of static functions we are out of luck, since we already
3430 removed its body. In the case of public functions we may or may
3431 not introduce the reference. */
3432 if (!canonicalize_constructor_val (target, NULL)
3433 || !TREE_PUBLIC (target))
3435 if (dump_file)
3436 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3437 "(%s -> %s) but cannot refer to it. Giving up.\n",
3438 ie->caller->dump_name (),
3439 ie->callee->dump_name ());
3440 return NULL;
3442 callee = cgraph_node::get_create (target);
3445 /* If the edge is already speculated. */
3446 if (speculative && ie->speculative)
3448 if (dump_file)
3450 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3451 if (!e2)
3453 if (dump_file)
3454 fprintf (dump_file, "ipa-prop: Discovered call to a "
3455 "speculative target (%s -> %s) but the call is "
3456 "already speculated to different target. "
3457 "Giving up.\n",
3458 ie->caller->dump_name (), callee->dump_name ());
3460 else
3462 if (dump_file)
3463 fprintf (dump_file,
3464 "ipa-prop: Discovered call to a speculative target "
3465 "(%s -> %s) this agree with previous speculation.\n",
3466 ie->caller->dump_name (), callee->dump_name ());
3469 return NULL;
3472 if (!dbg_cnt (devirt))
3473 return NULL;
3475 ipa_check_create_node_params ();
3477 /* We cannot make edges to inline clones. It is bug that someone removed
3478 the cgraph node too early. */
3479 gcc_assert (!callee->inlined_to);
3481 if (dump_file && !unreachable)
3483 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3484 "(%s -> %s), for stmt ",
3485 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3486 speculative ? "speculative" : "known",
3487 ie->caller->dump_name (),
3488 callee->dump_name ());
3489 if (ie->call_stmt)
3490 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3491 else
3492 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3494 if (dump_enabled_p ())
3496 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3497 "converting indirect call in %s to direct call to %s\n",
3498 ie->caller->dump_name (), callee->dump_name ());
3500 if (!speculative)
3502 struct cgraph_edge *orig = ie;
3503 ie = cgraph_edge::make_direct (ie, callee);
3504 /* If we resolved speculative edge the cost is already up to date
3505 for direct call (adjusted by inline_edge_duplication_hook). */
3506 if (ie == orig)
3508 ipa_call_summary *es = ipa_call_summaries->get (ie);
3509 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3510 - eni_size_weights.call_cost);
3511 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3512 - eni_time_weights.call_cost);
3515 else
3517 if (!callee->can_be_discarded_p ())
3519 cgraph_node *alias;
3520 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3521 if (alias)
3522 callee = alias;
3524 /* make_speculative will update ie's cost to direct call cost. */
3525 ie = ie->make_speculative
3526 (callee, ie->count.apply_scale (8, 10));
3529 return ie;
3532 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3533 CONSTRUCTOR and return it. Return NULL if the search fails for some
3534 reason. */
3536 static tree
3537 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3539 tree type = TREE_TYPE (constructor);
3540 if (TREE_CODE (type) != ARRAY_TYPE
3541 && TREE_CODE (type) != RECORD_TYPE)
3542 return NULL;
3544 unsigned ix;
3545 tree index, val;
3546 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3548 HOST_WIDE_INT elt_offset;
3549 if (TREE_CODE (type) == ARRAY_TYPE)
3551 offset_int off;
3552 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3553 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3555 if (index)
3557 if (TREE_CODE (index) == RANGE_EXPR)
3558 off = wi::to_offset (TREE_OPERAND (index, 0));
3559 else
3560 off = wi::to_offset (index);
3561 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3563 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3564 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3565 off = wi::sext (off - wi::to_offset (low_bound),
3566 TYPE_PRECISION (TREE_TYPE (index)));
3568 off *= wi::to_offset (unit_size);
3569 /* ??? Handle more than just the first index of a
3570 RANGE_EXPR. */
3572 else
3573 off = wi::to_offset (unit_size) * ix;
3575 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3576 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3577 continue;
3578 elt_offset = off.to_shwi ();
3580 else if (TREE_CODE (type) == RECORD_TYPE)
3582 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3583 if (DECL_BIT_FIELD (index))
3584 continue;
3585 elt_offset = int_bit_position (index);
3587 else
3588 gcc_unreachable ();
3590 if (elt_offset > req_offset)
3591 return NULL;
3593 if (TREE_CODE (val) == CONSTRUCTOR)
3594 return find_constructor_constant_at_offset (val,
3595 req_offset - elt_offset);
3597 if (elt_offset == req_offset
3598 && is_gimple_reg_type (TREE_TYPE (val))
3599 && is_gimple_ip_invariant (val))
3600 return val;
3602 return NULL;
3605 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3606 invariant from a static constructor and if so, return it. Otherwise return
3607 NULL. */
3609 static tree
3610 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3612 if (by_ref)
3614 if (TREE_CODE (scalar) != ADDR_EXPR)
3615 return NULL;
3616 scalar = TREE_OPERAND (scalar, 0);
3619 if (!VAR_P (scalar)
3620 || !is_global_var (scalar)
3621 || !TREE_READONLY (scalar)
3622 || !DECL_INITIAL (scalar)
3623 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3624 return NULL;
3626 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3629 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3630 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3631 return NULL if there is none. BY_REF specifies whether the value has to be
3632 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3633 the boolean it points to is set to true if the value comes from an
3634 initializer of a constant. */
3636 tree
3637 ipa_find_agg_cst_for_param (const ipa_agg_value_set *agg, tree scalar,
3638 HOST_WIDE_INT offset, bool by_ref,
3639 bool *from_global_constant)
3641 struct ipa_agg_value *item;
3642 int i;
3644 if (scalar)
3646 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3647 if (res)
3649 if (from_global_constant)
3650 *from_global_constant = true;
3651 return res;
3655 if (!agg
3656 || by_ref != agg->by_ref)
3657 return NULL;
3659 FOR_EACH_VEC_ELT (agg->items, i, item)
3660 if (item->offset == offset)
3662 /* Currently we do not have clobber values, return NULL for them once
3663 we do. */
3664 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3665 if (from_global_constant)
3666 *from_global_constant = false;
3667 return item->value;
3669 return NULL;
3672 /* Remove a reference to SYMBOL from the list of references of a node given by
3673 reference description RDESC. Return true if the reference has been
3674 successfully found and removed. */
3676 static bool
3677 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3679 struct ipa_ref *to_del;
3680 struct cgraph_edge *origin;
3682 origin = rdesc->cs;
3683 if (!origin)
3684 return false;
3685 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3686 origin->lto_stmt_uid);
3687 if (!to_del)
3688 return false;
3690 to_del->remove_reference ();
3691 if (dump_file)
3692 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3693 origin->caller->dump_name (), symbol->dump_name ());
3694 return true;
3697 /* If JFUNC has a reference description with refcount different from
3698 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3699 NULL. JFUNC must be a constant jump function. */
3701 static struct ipa_cst_ref_desc *
3702 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3704 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3705 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3706 return rdesc;
3707 else
3708 return NULL;
3711 /* If the value of constant jump function JFUNC is an address of a function
3712 declaration, return the associated call graph node. Otherwise return
3713 NULL. */
3715 static symtab_node *
3716 symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
3718 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3719 tree cst = ipa_get_jf_constant (jfunc);
3720 if (TREE_CODE (cst) != ADDR_EXPR
3721 || (TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL
3722 && TREE_CODE (TREE_OPERAND (cst, 0)) != VAR_DECL))
3723 return NULL;
3725 return symtab_node::get (TREE_OPERAND (cst, 0));
3729 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3730 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3731 the edge specified in the rdesc. Return false if either the symbol or the
3732 reference could not be found, otherwise return true. */
3734 static bool
3735 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3737 struct ipa_cst_ref_desc *rdesc;
3738 if (jfunc->type == IPA_JF_CONST
3739 && (rdesc = jfunc_rdesc_usable (jfunc))
3740 && --rdesc->refcount == 0)
3742 symtab_node *symbol = symtab_node_for_jfunc (jfunc);
3743 if (!symbol)
3744 return false;
3746 return remove_described_reference (symbol, rdesc);
3748 return true;
3751 /* Try to find a destination for indirect edge IE that corresponds to a simple
3752 call or a call of a member function pointer and where the destination is a
3753 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3754 the type of the parameter to which the result of JFUNC is passed. If it can
3755 be determined, return the newly direct edge, otherwise return NULL.
3756 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3757 relative to. */
3759 static struct cgraph_edge *
3760 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3761 struct ipa_jump_func *jfunc, tree target_type,
3762 struct cgraph_node *new_root,
3763 class ipa_node_params *new_root_info)
3765 struct cgraph_edge *cs;
3766 tree target;
3767 bool agg_contents = ie->indirect_info->agg_contents;
3768 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3769 if (agg_contents)
3771 bool from_global_constant;
3772 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3773 new_root,
3774 &jfunc->agg);
3775 target = ipa_find_agg_cst_for_param (&agg, scalar,
3776 ie->indirect_info->offset,
3777 ie->indirect_info->by_ref,
3778 &from_global_constant);
3779 agg.release ();
3780 if (target
3781 && !from_global_constant
3782 && !ie->indirect_info->guaranteed_unmodified)
3783 return NULL;
3785 else
3786 target = scalar;
3787 if (!target)
3788 return NULL;
3789 cs = ipa_make_edge_direct_to_target (ie, target);
3791 if (cs && !agg_contents)
3793 bool ok;
3794 gcc_checking_assert (cs->callee
3795 && (cs != ie
3796 || jfunc->type != IPA_JF_CONST
3797 || !symtab_node_for_jfunc (jfunc)
3798 || cs->callee == symtab_node_for_jfunc (jfunc)));
3799 ok = try_decrement_rdesc_refcount (jfunc);
3800 gcc_checking_assert (ok);
3803 return cs;
3806 /* Return the target to be used in cases of impossible devirtualization. IE
3807 and target (the latter can be NULL) are dumped when dumping is enabled. */
3809 tree
3810 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3812 if (dump_file)
3814 if (target)
3815 fprintf (dump_file,
3816 "Type inconsistent devirtualization: %s->%s\n",
3817 ie->caller->dump_name (),
3818 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3819 else
3820 fprintf (dump_file,
3821 "No devirtualization target in %s\n",
3822 ie->caller->dump_name ());
3824 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3825 cgraph_node::get_create (new_target);
3826 return new_target;
3829 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3830 call based on a formal parameter which is described by jump function JFUNC
3831 and if it can be determined, make it direct and return the direct edge.
3832 Otherwise, return NULL. CTX describes the polymorphic context that the
3833 parameter the call is based on brings along with it. NEW_ROOT and
3834 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3835 to. */
3837 static struct cgraph_edge *
3838 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3839 struct ipa_jump_func *jfunc,
3840 class ipa_polymorphic_call_context ctx,
3841 struct cgraph_node *new_root,
3842 class ipa_node_params *new_root_info)
3844 tree target = NULL;
3845 bool speculative = false;
3847 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3848 return NULL;
3850 gcc_assert (!ie->indirect_info->by_ref);
3852 /* Try to do lookup via known virtual table pointer value. */
3853 if (!ie->indirect_info->vptr_changed
3854 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3856 tree vtable;
3857 unsigned HOST_WIDE_INT offset;
3858 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3859 : NULL;
3860 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3861 new_root,
3862 &jfunc->agg);
3863 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3864 ie->indirect_info->offset,
3865 true);
3866 agg.release ();
3867 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3869 bool can_refer;
3870 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3871 vtable, offset, &can_refer);
3872 if (can_refer)
3874 if (!t
3875 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3876 || !possible_polymorphic_call_target_p
3877 (ie, cgraph_node::get (t)))
3879 /* Do not speculate builtin_unreachable, it is stupid! */
3880 if (!ie->indirect_info->vptr_changed)
3881 target = ipa_impossible_devirt_target (ie, target);
3882 else
3883 target = NULL;
3885 else
3887 target = t;
3888 speculative = ie->indirect_info->vptr_changed;
3894 ipa_polymorphic_call_context ie_context (ie);
3895 vec <cgraph_node *>targets;
3896 bool final;
3898 ctx.offset_by (ie->indirect_info->offset);
3899 if (ie->indirect_info->vptr_changed)
3900 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3901 ie->indirect_info->otr_type);
3902 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3903 targets = possible_polymorphic_call_targets
3904 (ie->indirect_info->otr_type,
3905 ie->indirect_info->otr_token,
3906 ctx, &final);
3907 if (final && targets.length () <= 1)
3909 speculative = false;
3910 if (targets.length () == 1)
3911 target = targets[0]->decl;
3912 else
3913 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3915 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3916 && !ie->speculative && ie->maybe_hot_p ())
3918 cgraph_node *n;
3919 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3920 ie->indirect_info->otr_token,
3921 ie->indirect_info->context);
3922 if (n)
3924 target = n->decl;
3925 speculative = true;
3929 if (target)
3931 if (!possible_polymorphic_call_target_p
3932 (ie, cgraph_node::get_create (target)))
3934 if (speculative)
3935 return NULL;
3936 target = ipa_impossible_devirt_target (ie, target);
3938 return ipa_make_edge_direct_to_target (ie, target, speculative);
3940 else
3941 return NULL;
3944 /* Update the param called notes associated with NODE when CS is being inlined,
3945 assuming NODE is (potentially indirectly) inlined into CS->callee.
3946 Moreover, if the callee is discovered to be constant, create a new cgraph
3947 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3948 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3950 static bool
3951 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3952 struct cgraph_node *node,
3953 vec<cgraph_edge *> *new_edges)
3955 class ipa_edge_args *top;
3956 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3957 struct cgraph_node *new_root;
3958 class ipa_node_params *new_root_info, *inlined_node_info;
3959 bool res = false;
3961 ipa_check_create_edge_args ();
3962 top = ipa_edge_args_sum->get (cs);
3963 new_root = cs->caller->inlined_to
3964 ? cs->caller->inlined_to : cs->caller;
3965 new_root_info = ipa_node_params_sum->get (new_root);
3966 inlined_node_info = ipa_node_params_sum->get (cs->callee->function_symbol ());
3968 for (ie = node->indirect_calls; ie; ie = next_ie)
3970 class cgraph_indirect_call_info *ici = ie->indirect_info;
3971 struct ipa_jump_func *jfunc;
3972 int param_index;
3974 next_ie = ie->next_callee;
3976 if (ici->param_index == -1)
3977 continue;
3979 /* We must check range due to calls with variable number of arguments: */
3980 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3982 ici->param_index = -1;
3983 continue;
3986 param_index = ici->param_index;
3987 jfunc = ipa_get_ith_jump_func (top, param_index);
3989 auto_vec<cgraph_node *, 4> spec_targets;
3990 if (ie->speculative)
3991 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3992 direct;
3993 direct = direct->next_speculative_call_target ())
3994 spec_targets.safe_push (direct->callee);
3996 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3997 new_direct_edge = NULL;
3998 else if (ici->polymorphic)
4000 ipa_polymorphic_call_context ctx;
4001 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
4002 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
4003 new_root,
4004 new_root_info);
4006 else
4008 tree target_type = ipa_get_type (inlined_node_info, param_index);
4009 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
4010 target_type,
4011 new_root,
4012 new_root_info);
4015 /* If speculation was removed, then we need to do nothing. */
4016 if (new_direct_edge && new_direct_edge != ie
4017 && spec_targets.contains (new_direct_edge->callee))
4019 new_direct_edge->indirect_inlining_edge = 1;
4020 res = true;
4021 if (!new_direct_edge->speculative)
4022 continue;
4024 else if (new_direct_edge)
4026 new_direct_edge->indirect_inlining_edge = 1;
4027 if (new_edges)
4029 new_edges->safe_push (new_direct_edge);
4030 res = true;
4032 /* If speculative edge was introduced we still need to update
4033 call info of the indirect edge. */
4034 if (!new_direct_edge->speculative)
4035 continue;
4037 if (jfunc->type == IPA_JF_PASS_THROUGH
4038 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4040 if (ici->agg_contents
4041 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
4042 && !ici->polymorphic)
4043 ici->param_index = -1;
4044 else
4046 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4047 if (ici->polymorphic
4048 && !ipa_get_jf_pass_through_type_preserved (jfunc))
4049 ici->vptr_changed = true;
4050 ipa_set_param_used_by_indirect_call (new_root_info,
4051 ici->param_index, true);
4052 if (ici->polymorphic)
4053 ipa_set_param_used_by_polymorphic_call (new_root_info,
4054 ici->param_index, true);
4057 else if (jfunc->type == IPA_JF_ANCESTOR)
4059 if (ici->agg_contents
4060 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
4061 && !ici->polymorphic)
4062 ici->param_index = -1;
4063 else
4065 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4066 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
4067 if (ici->polymorphic
4068 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4069 ici->vptr_changed = true;
4070 ipa_set_param_used_by_indirect_call (new_root_info,
4071 ici->param_index, true);
4072 if (ici->polymorphic)
4073 ipa_set_param_used_by_polymorphic_call (new_root_info,
4074 ici->param_index, true);
4077 else
4078 /* Either we can find a destination for this edge now or never. */
4079 ici->param_index = -1;
4082 return res;
4085 /* Recursively traverse subtree of NODE (including node) made of inlined
4086 cgraph_edges when CS has been inlined and invoke
4087 update_indirect_edges_after_inlining on all nodes and
4088 update_jump_functions_after_inlining on all non-inlined edges that lead out
4089 of this subtree. Newly discovered indirect edges will be added to
4090 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4091 created. */
4093 static bool
4094 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4095 struct cgraph_node *node,
4096 vec<cgraph_edge *> *new_edges)
4098 struct cgraph_edge *e;
4099 bool res;
4101 res = update_indirect_edges_after_inlining (cs, node, new_edges);
4103 for (e = node->callees; e; e = e->next_callee)
4104 if (!e->inline_failed)
4105 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
4106 else
4107 update_jump_functions_after_inlining (cs, e);
4108 for (e = node->indirect_calls; e; e = e->next_callee)
4109 update_jump_functions_after_inlining (cs, e);
4111 return res;
4114 /* Combine two controlled uses counts as done during inlining. */
4116 static int
4117 combine_controlled_uses_counters (int c, int d)
4119 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4120 return IPA_UNDESCRIBED_USE;
4121 else
4122 return c + d - 1;
4125 /* Propagate number of controlled users from CS->caleee to the new root of the
4126 tree of inlined nodes. */
4128 static void
4129 propagate_controlled_uses (struct cgraph_edge *cs)
4131 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4132 if (!args)
4133 return;
4134 struct cgraph_node *new_root = cs->caller->inlined_to
4135 ? cs->caller->inlined_to : cs->caller;
4136 ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4137 ipa_node_params *old_root_info = ipa_node_params_sum->get (cs->callee);
4138 int count, i;
4140 if (!old_root_info)
4141 return;
4143 count = MIN (ipa_get_cs_argument_count (args),
4144 ipa_get_param_count (old_root_info));
4145 for (i = 0; i < count; i++)
4147 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4148 struct ipa_cst_ref_desc *rdesc;
4150 if (jf->type == IPA_JF_PASS_THROUGH)
4152 int src_idx, c, d;
4153 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4154 c = ipa_get_controlled_uses (new_root_info, src_idx);
4155 d = ipa_get_controlled_uses (old_root_info, i);
4157 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4158 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4159 c = combine_controlled_uses_counters (c, d);
4160 ipa_set_controlled_uses (new_root_info, src_idx, c);
4161 bool lderef = true;
4162 if (c != IPA_UNDESCRIBED_USE)
4164 lderef = (ipa_get_param_load_dereferenced (new_root_info, src_idx)
4165 || ipa_get_param_load_dereferenced (old_root_info, i));
4166 ipa_set_param_load_dereferenced (new_root_info, src_idx, lderef);
4169 if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4171 struct cgraph_node *n;
4172 struct ipa_ref *ref;
4173 tree t = new_root_info->known_csts[src_idx];
4175 if (t && TREE_CODE (t) == ADDR_EXPR
4176 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4177 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4178 && (ref = new_root->find_reference (n, NULL, 0)))
4180 if (dump_file)
4181 fprintf (dump_file, "ipa-prop: Removing cloning-created "
4182 "reference from %s to %s.\n",
4183 new_root->dump_name (),
4184 n->dump_name ());
4185 ref->remove_reference ();
4189 else if (jf->type == IPA_JF_CONST
4190 && (rdesc = jfunc_rdesc_usable (jf)))
4192 int d = ipa_get_controlled_uses (old_root_info, i);
4193 int c = rdesc->refcount;
4194 tree cst = ipa_get_jf_constant (jf);
4195 rdesc->refcount = combine_controlled_uses_counters (c, d);
4196 if (rdesc->refcount != IPA_UNDESCRIBED_USE
4197 && ipa_get_param_load_dereferenced (old_root_info, i)
4198 && TREE_CODE (cst) == ADDR_EXPR
4199 && TREE_CODE (TREE_OPERAND (cst, 0)) == VAR_DECL)
4201 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4202 new_root->create_reference (n, IPA_REF_LOAD, NULL);
4203 if (dump_file)
4204 fprintf (dump_file, "ipa-prop: Address IPA constant will reach "
4205 "a load so adding LOAD reference from %s to %s.\n",
4206 new_root->dump_name (), n->dump_name ());
4208 if (rdesc->refcount == 0)
4210 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4211 && ((TREE_CODE (TREE_OPERAND (cst, 0))
4212 == FUNCTION_DECL)
4213 || (TREE_CODE (TREE_OPERAND (cst, 0))
4214 == VAR_DECL)));
4216 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4217 if (n)
4219 remove_described_reference (n, rdesc);
4220 cgraph_node *clone = cs->caller;
4221 while (clone->inlined_to
4222 && clone->ipcp_clone
4223 && clone != rdesc->cs->caller)
4225 struct ipa_ref *ref;
4226 ref = clone->find_reference (n, NULL, 0);
4227 if (ref)
4229 if (dump_file)
4230 fprintf (dump_file, "ipa-prop: Removing "
4231 "cloning-created reference "
4232 "from %s to %s.\n",
4233 clone->dump_name (),
4234 n->dump_name ());
4235 ref->remove_reference ();
4237 clone = clone->callers->caller;
4244 for (i = ipa_get_param_count (old_root_info);
4245 i < ipa_get_cs_argument_count (args);
4246 i++)
4248 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4250 if (jf->type == IPA_JF_CONST)
4252 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4253 if (rdesc)
4254 rdesc->refcount = IPA_UNDESCRIBED_USE;
4256 else if (jf->type == IPA_JF_PASS_THROUGH)
4257 ipa_set_controlled_uses (new_root_info,
4258 jf->value.pass_through.formal_id,
4259 IPA_UNDESCRIBED_USE);
4263 /* Update jump functions and call note functions on inlining the call site CS.
4264 CS is expected to lead to a node already cloned by
4265 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4266 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4267 created. */
4269 bool
4270 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4271 vec<cgraph_edge *> *new_edges)
4273 bool changed;
4274 /* Do nothing if the preparation phase has not been carried out yet
4275 (i.e. during early inlining). */
4276 if (!ipa_node_params_sum)
4277 return false;
4278 gcc_assert (ipa_edge_args_sum);
4280 propagate_controlled_uses (cs);
4281 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4282 ipa_node_params_sum->remove (cs->callee);
4284 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4285 if (args)
4287 bool ok = true;
4288 if (args->jump_functions)
4290 struct ipa_jump_func *jf;
4291 int i;
4292 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4293 if (jf->type == IPA_JF_CONST
4294 && ipa_get_jf_constant_rdesc (jf))
4296 ok = false;
4297 break;
4300 if (ok)
4301 ipa_edge_args_sum->remove (cs);
4303 if (ipcp_transformation_sum)
4304 ipcp_transformation_sum->remove (cs->callee);
4306 return changed;
4309 /* Ensure that array of edge arguments infos is big enough to accommodate a
4310 structure for all edges and reallocates it if not. Also, allocate
4311 associated hash tables is they do not already exist. */
4313 void
4314 ipa_check_create_edge_args (void)
4316 if (!ipa_edge_args_sum)
4317 ipa_edge_args_sum
4318 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4319 ipa_edge_args_sum_t (symtab, true));
4320 if (!ipa_bits_hash_table)
4321 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4322 if (!ipa_vr_hash_table)
4323 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4326 /* Free all ipa_edge structures. */
4328 void
4329 ipa_free_all_edge_args (void)
4331 if (!ipa_edge_args_sum)
4332 return;
4334 ggc_delete (ipa_edge_args_sum);
4335 ipa_edge_args_sum = NULL;
4338 /* Free all ipa_node_params structures. */
4340 void
4341 ipa_free_all_node_params (void)
4343 if (ipa_node_params_sum)
4344 ggc_delete (ipa_node_params_sum);
4345 ipa_node_params_sum = NULL;
4348 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4349 tables if they do not already exist. */
4351 void
4352 ipcp_transformation_initialize (void)
4354 if (!ipa_bits_hash_table)
4355 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4356 if (!ipa_vr_hash_table)
4357 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4358 if (ipcp_transformation_sum == NULL)
4360 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4361 ipcp_transformation_sum->disable_insertion_hook ();
4365 /* Release the IPA CP transformation summary. */
4367 void
4368 ipcp_free_transformation_sum (void)
4370 if (!ipcp_transformation_sum)
4371 return;
4373 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4374 ggc_free (ipcp_transformation_sum);
4375 ipcp_transformation_sum = NULL;
4378 /* Set the aggregate replacements of NODE to be AGGVALS. */
4380 void
4381 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4382 struct ipa_agg_replacement_value *aggvals)
4384 ipcp_transformation_initialize ();
4385 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4386 s->agg_values = aggvals;
4389 /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4390 count data structures accordingly. */
4392 void
4393 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4395 if (args->jump_functions)
4397 struct ipa_jump_func *jf;
4398 int i;
4399 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4401 struct ipa_cst_ref_desc *rdesc;
4402 try_decrement_rdesc_refcount (jf);
4403 if (jf->type == IPA_JF_CONST
4404 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4405 && rdesc->cs == cs)
4406 rdesc->cs = NULL;
4411 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4412 reference count data strucutres accordingly. */
4414 void
4415 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4416 ipa_edge_args *old_args, ipa_edge_args *new_args)
4418 unsigned int i;
4420 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4421 if (old_args->polymorphic_call_contexts)
4422 new_args->polymorphic_call_contexts
4423 = vec_safe_copy (old_args->polymorphic_call_contexts);
4425 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4427 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4428 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4430 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4432 if (src_jf->type == IPA_JF_CONST)
4434 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4436 if (!src_rdesc)
4437 dst_jf->value.constant.rdesc = NULL;
4438 else if (src->caller == dst->caller)
4440 /* Creation of a speculative edge. If the source edge is the one
4441 grabbing a reference, we must create a new (duplicate)
4442 reference description. Otherwise they refer to the same
4443 description corresponding to a reference taken in a function
4444 src->caller is inlined to. In that case we just must
4445 increment the refcount. */
4446 if (src_rdesc->cs == src)
4448 symtab_node *n = symtab_node_for_jfunc (src_jf);
4449 gcc_checking_assert (n);
4450 ipa_ref *ref
4451 = src->caller->find_reference (n, src->call_stmt,
4452 src->lto_stmt_uid);
4453 gcc_checking_assert (ref);
4454 dst->caller->clone_reference (ref, ref->stmt);
4456 ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4457 dst_rdesc->cs = dst;
4458 dst_rdesc->refcount = src_rdesc->refcount;
4459 dst_rdesc->next_duplicate = NULL;
4460 dst_jf->value.constant.rdesc = dst_rdesc;
4462 else
4464 src_rdesc->refcount++;
4465 dst_jf->value.constant.rdesc = src_rdesc;
4468 else if (src_rdesc->cs == src)
4470 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4471 dst_rdesc->cs = dst;
4472 dst_rdesc->refcount = src_rdesc->refcount;
4473 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4474 src_rdesc->next_duplicate = dst_rdesc;
4475 dst_jf->value.constant.rdesc = dst_rdesc;
4477 else
4479 struct ipa_cst_ref_desc *dst_rdesc;
4480 /* This can happen during inlining, when a JFUNC can refer to a
4481 reference taken in a function up in the tree of inline clones.
4482 We need to find the duplicate that refers to our tree of
4483 inline clones. */
4485 gcc_assert (dst->caller->inlined_to);
4486 for (dst_rdesc = src_rdesc->next_duplicate;
4487 dst_rdesc;
4488 dst_rdesc = dst_rdesc->next_duplicate)
4490 struct cgraph_node *top;
4491 top = dst_rdesc->cs->caller->inlined_to
4492 ? dst_rdesc->cs->caller->inlined_to
4493 : dst_rdesc->cs->caller;
4494 if (dst->caller->inlined_to == top)
4495 break;
4497 gcc_assert (dst_rdesc);
4498 dst_jf->value.constant.rdesc = dst_rdesc;
4501 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4502 && src->caller == dst->caller)
4504 struct cgraph_node *inline_root = dst->caller->inlined_to
4505 ? dst->caller->inlined_to : dst->caller;
4506 ipa_node_params *root_info = ipa_node_params_sum->get (inline_root);
4507 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4509 int c = ipa_get_controlled_uses (root_info, idx);
4510 if (c != IPA_UNDESCRIBED_USE)
4512 c++;
4513 ipa_set_controlled_uses (root_info, idx, c);
4519 /* Analyze newly added function into callgraph. */
4521 static void
4522 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4524 if (node->has_gimple_body_p ())
4525 ipa_analyze_node (node);
4528 /* Hook that is called by summary when a node is duplicated. */
4530 void
4531 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4532 ipa_node_params *old_info,
4533 ipa_node_params *new_info)
4535 ipa_agg_replacement_value *old_av, *new_av;
4537 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4538 new_info->lattices = NULL;
4539 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4540 new_info->known_csts = old_info->known_csts.copy ();
4541 new_info->known_contexts = old_info->known_contexts.copy ();
4543 new_info->analysis_done = old_info->analysis_done;
4544 new_info->node_enqueued = old_info->node_enqueued;
4545 new_info->versionable = old_info->versionable;
4547 old_av = ipa_get_agg_replacements_for_node (src);
4548 if (old_av)
4550 new_av = NULL;
4551 while (old_av)
4553 struct ipa_agg_replacement_value *v;
4555 v = ggc_alloc<ipa_agg_replacement_value> ();
4556 memcpy (v, old_av, sizeof (*v));
4557 v->next = new_av;
4558 new_av = v;
4559 old_av = old_av->next;
4561 ipa_set_node_agg_value_chain (dst, new_av);
4565 /* Duplication of ipcp transformation summaries. */
4567 void
4568 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4569 ipcp_transformation *src_trans,
4570 ipcp_transformation *dst_trans)
4572 /* Avoid redundant work of duplicating vectors we will never use. */
4573 if (dst->inlined_to)
4574 return;
4575 dst_trans->bits = vec_safe_copy (src_trans->bits);
4576 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4577 ipa_agg_replacement_value *agg = src_trans->agg_values,
4578 **aggptr = &dst_trans->agg_values;
4579 while (agg)
4581 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4582 **aggptr = *agg;
4583 agg = agg->next;
4584 aggptr = &(*aggptr)->next;
4588 /* Register our cgraph hooks if they are not already there. */
4590 void
4591 ipa_register_cgraph_hooks (void)
4593 ipa_check_create_node_params ();
4594 ipa_check_create_edge_args ();
4596 function_insertion_hook_holder =
4597 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4600 /* Unregister our cgraph hooks if they are not already there. */
4602 static void
4603 ipa_unregister_cgraph_hooks (void)
4605 if (function_insertion_hook_holder)
4606 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4607 function_insertion_hook_holder = NULL;
4610 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4611 longer needed after ipa-cp. */
4613 void
4614 ipa_free_all_structures_after_ipa_cp (void)
4616 if (!optimize && !in_lto_p)
4618 ipa_free_all_edge_args ();
4619 ipa_free_all_node_params ();
4620 ipcp_sources_pool.release ();
4621 ipcp_cst_values_pool.release ();
4622 ipcp_poly_ctx_values_pool.release ();
4623 ipcp_agg_lattice_pool.release ();
4624 ipa_unregister_cgraph_hooks ();
4625 ipa_refdesc_pool.release ();
4629 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4630 longer needed after indirect inlining. */
4632 void
4633 ipa_free_all_structures_after_iinln (void)
4635 ipa_free_all_edge_args ();
4636 ipa_free_all_node_params ();
4637 ipa_unregister_cgraph_hooks ();
4638 ipcp_sources_pool.release ();
4639 ipcp_cst_values_pool.release ();
4640 ipcp_poly_ctx_values_pool.release ();
4641 ipcp_agg_lattice_pool.release ();
4642 ipa_refdesc_pool.release ();
4645 /* Print ipa_tree_map data structures of all functions in the
4646 callgraph to F. */
4648 void
4649 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4651 int i, count;
4652 class ipa_node_params *info;
4654 if (!node->definition)
4655 return;
4656 info = ipa_node_params_sum->get (node);
4657 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4658 if (!info)
4660 fprintf (f, " no params return\n");
4661 return;
4663 count = ipa_get_param_count (info);
4664 for (i = 0; i < count; i++)
4666 int c;
4668 fprintf (f, " ");
4669 ipa_dump_param (f, info, i);
4670 if (ipa_is_param_used (info, i))
4671 fprintf (f, " used");
4672 if (ipa_is_param_used_by_ipa_predicates (info, i))
4673 fprintf (f, " used_by_ipa_predicates");
4674 if (ipa_is_param_used_by_indirect_call (info, i))
4675 fprintf (f, " used_by_indirect_call");
4676 if (ipa_is_param_used_by_polymorphic_call (info, i))
4677 fprintf (f, " used_by_polymorphic_call");
4678 c = ipa_get_controlled_uses (info, i);
4679 if (c == IPA_UNDESCRIBED_USE)
4680 fprintf (f, " undescribed_use");
4681 else
4682 fprintf (f, " controlled_uses=%i %s", c,
4683 ipa_get_param_load_dereferenced (info, i)
4684 ? "(load_dereferenced)" : "");
4685 fprintf (f, "\n");
4689 /* Print ipa_tree_map data structures of all functions in the
4690 callgraph to F. */
4692 void
4693 ipa_print_all_params (FILE * f)
4695 struct cgraph_node *node;
4697 fprintf (f, "\nFunction parameters:\n");
4698 FOR_EACH_FUNCTION (node)
4699 ipa_print_node_params (f, node);
4702 /* Dump the AV linked list. */
4704 void
4705 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4707 bool comma = false;
4708 fprintf (f, " Aggregate replacements:");
4709 for (; av; av = av->next)
4711 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4712 av->index, av->offset);
4713 print_generic_expr (f, av->value);
4714 comma = true;
4716 fprintf (f, "\n");
4719 /* Stream out jump function JUMP_FUNC to OB. */
4721 static void
4722 ipa_write_jump_function (struct output_block *ob,
4723 struct ipa_jump_func *jump_func)
4725 struct ipa_agg_jf_item *item;
4726 struct bitpack_d bp;
4727 int i, count;
4728 int flag = 0;
4730 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4731 as well as WPA memory by handling them specially. */
4732 if (jump_func->type == IPA_JF_CONST
4733 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4734 flag = 1;
4736 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4737 switch (jump_func->type)
4739 case IPA_JF_UNKNOWN:
4740 break;
4741 case IPA_JF_CONST:
4742 gcc_assert (
4743 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4744 stream_write_tree (ob,
4745 flag
4746 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4747 : jump_func->value.constant.value, true);
4748 break;
4749 case IPA_JF_PASS_THROUGH:
4750 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4751 if (jump_func->value.pass_through.operation == NOP_EXPR)
4753 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4754 bp = bitpack_create (ob->main_stream);
4755 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4756 streamer_write_bitpack (&bp);
4758 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4759 == tcc_unary)
4760 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4761 else
4763 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4764 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4766 break;
4767 case IPA_JF_ANCESTOR:
4768 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4769 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4770 bp = bitpack_create (ob->main_stream);
4771 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4772 bp_pack_value (&bp, jump_func->value.ancestor.keep_null, 1);
4773 streamer_write_bitpack (&bp);
4774 break;
4775 default:
4776 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4779 count = vec_safe_length (jump_func->agg.items);
4780 streamer_write_uhwi (ob, count);
4781 if (count)
4783 bp = bitpack_create (ob->main_stream);
4784 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4785 streamer_write_bitpack (&bp);
4788 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4790 stream_write_tree (ob, item->type, true);
4791 streamer_write_uhwi (ob, item->offset);
4792 streamer_write_uhwi (ob, item->jftype);
4793 switch (item->jftype)
4795 case IPA_JF_UNKNOWN:
4796 break;
4797 case IPA_JF_CONST:
4798 stream_write_tree (ob, item->value.constant, true);
4799 break;
4800 case IPA_JF_PASS_THROUGH:
4801 case IPA_JF_LOAD_AGG:
4802 streamer_write_uhwi (ob, item->value.pass_through.operation);
4803 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4804 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4805 != tcc_unary)
4806 stream_write_tree (ob, item->value.pass_through.operand, true);
4807 if (item->jftype == IPA_JF_LOAD_AGG)
4809 stream_write_tree (ob, item->value.load_agg.type, true);
4810 streamer_write_uhwi (ob, item->value.load_agg.offset);
4811 bp = bitpack_create (ob->main_stream);
4812 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4813 streamer_write_bitpack (&bp);
4815 break;
4816 default:
4817 fatal_error (UNKNOWN_LOCATION,
4818 "invalid jump function in LTO stream");
4822 bp = bitpack_create (ob->main_stream);
4823 bp_pack_value (&bp, !!jump_func->bits, 1);
4824 streamer_write_bitpack (&bp);
4825 if (jump_func->bits)
4827 streamer_write_widest_int (ob, jump_func->bits->value);
4828 streamer_write_widest_int (ob, jump_func->bits->mask);
4830 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4831 streamer_write_bitpack (&bp);
4832 if (jump_func->m_vr)
4834 streamer_write_enum (ob->main_stream, value_rang_type,
4835 VR_LAST, jump_func->m_vr->kind ());
4836 stream_write_tree (ob, jump_func->m_vr->min (), true);
4837 stream_write_tree (ob, jump_func->m_vr->max (), true);
4841 /* Read in jump function JUMP_FUNC from IB. */
4843 static void
4844 ipa_read_jump_function (class lto_input_block *ib,
4845 struct ipa_jump_func *jump_func,
4846 struct cgraph_edge *cs,
4847 class data_in *data_in,
4848 bool prevails)
4850 enum jump_func_type jftype;
4851 enum tree_code operation;
4852 int i, count;
4853 int val = streamer_read_uhwi (ib);
4854 bool flag = val & 1;
4856 jftype = (enum jump_func_type) (val / 2);
4857 switch (jftype)
4859 case IPA_JF_UNKNOWN:
4860 ipa_set_jf_unknown (jump_func);
4861 break;
4862 case IPA_JF_CONST:
4864 tree t = stream_read_tree (ib, data_in);
4865 if (flag && prevails)
4866 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4867 ipa_set_jf_constant (jump_func, t, cs);
4869 break;
4870 case IPA_JF_PASS_THROUGH:
4871 operation = (enum tree_code) streamer_read_uhwi (ib);
4872 if (operation == NOP_EXPR)
4874 int formal_id = streamer_read_uhwi (ib);
4875 struct bitpack_d bp = streamer_read_bitpack (ib);
4876 bool agg_preserved = bp_unpack_value (&bp, 1);
4877 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4879 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4881 int formal_id = streamer_read_uhwi (ib);
4882 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4884 else
4886 tree operand = stream_read_tree (ib, data_in);
4887 int formal_id = streamer_read_uhwi (ib);
4888 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4889 operation);
4891 break;
4892 case IPA_JF_ANCESTOR:
4894 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4895 int formal_id = streamer_read_uhwi (ib);
4896 struct bitpack_d bp = streamer_read_bitpack (ib);
4897 bool agg_preserved = bp_unpack_value (&bp, 1);
4898 bool keep_null = bp_unpack_value (&bp, 1);
4899 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved,
4900 keep_null);
4901 break;
4903 default:
4904 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4907 count = streamer_read_uhwi (ib);
4908 if (prevails)
4910 jump_func->agg.items = NULL;
4911 vec_safe_reserve (jump_func->agg.items, count, true);
4913 if (count)
4915 struct bitpack_d bp = streamer_read_bitpack (ib);
4916 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4918 for (i = 0; i < count; i++)
4920 struct ipa_agg_jf_item item;
4921 item.type = stream_read_tree (ib, data_in);
4922 item.offset = streamer_read_uhwi (ib);
4923 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4925 switch (item.jftype)
4927 case IPA_JF_UNKNOWN:
4928 break;
4929 case IPA_JF_CONST:
4930 item.value.constant = stream_read_tree (ib, data_in);
4931 break;
4932 case IPA_JF_PASS_THROUGH:
4933 case IPA_JF_LOAD_AGG:
4934 operation = (enum tree_code) streamer_read_uhwi (ib);
4935 item.value.pass_through.operation = operation;
4936 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4937 if (TREE_CODE_CLASS (operation) == tcc_unary)
4938 item.value.pass_through.operand = NULL_TREE;
4939 else
4940 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4941 if (item.jftype == IPA_JF_LOAD_AGG)
4943 struct bitpack_d bp;
4944 item.value.load_agg.type = stream_read_tree (ib, data_in);
4945 item.value.load_agg.offset = streamer_read_uhwi (ib);
4946 bp = streamer_read_bitpack (ib);
4947 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4949 break;
4950 default:
4951 fatal_error (UNKNOWN_LOCATION,
4952 "invalid jump function in LTO stream");
4954 if (prevails)
4955 jump_func->agg.items->quick_push (item);
4958 struct bitpack_d bp = streamer_read_bitpack (ib);
4959 bool bits_known = bp_unpack_value (&bp, 1);
4960 if (bits_known)
4962 widest_int value = streamer_read_widest_int (ib);
4963 widest_int mask = streamer_read_widest_int (ib);
4964 if (prevails)
4965 ipa_set_jfunc_bits (jump_func, value, mask);
4967 else
4968 jump_func->bits = NULL;
4970 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4971 bool vr_known = bp_unpack_value (&vr_bp, 1);
4972 if (vr_known)
4974 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4975 VR_LAST);
4976 tree min = stream_read_tree (ib, data_in);
4977 tree max = stream_read_tree (ib, data_in);
4978 if (prevails)
4979 ipa_set_jfunc_vr (jump_func, type, min, max);
4981 else
4982 jump_func->m_vr = NULL;
4985 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4986 relevant to indirect inlining to OB. */
4988 static void
4989 ipa_write_indirect_edge_info (struct output_block *ob,
4990 struct cgraph_edge *cs)
4992 class cgraph_indirect_call_info *ii = cs->indirect_info;
4993 struct bitpack_d bp;
4995 streamer_write_hwi (ob, ii->param_index);
4996 bp = bitpack_create (ob->main_stream);
4997 bp_pack_value (&bp, ii->polymorphic, 1);
4998 bp_pack_value (&bp, ii->agg_contents, 1);
4999 bp_pack_value (&bp, ii->member_ptr, 1);
5000 bp_pack_value (&bp, ii->by_ref, 1);
5001 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
5002 bp_pack_value (&bp, ii->vptr_changed, 1);
5003 streamer_write_bitpack (&bp);
5004 if (ii->agg_contents || ii->polymorphic)
5005 streamer_write_hwi (ob, ii->offset);
5006 else
5007 gcc_assert (ii->offset == 0);
5009 if (ii->polymorphic)
5011 streamer_write_hwi (ob, ii->otr_token);
5012 stream_write_tree (ob, ii->otr_type, true);
5013 ii->context.stream_out (ob);
5017 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5018 relevant to indirect inlining from IB. */
5020 static void
5021 ipa_read_indirect_edge_info (class lto_input_block *ib,
5022 class data_in *data_in,
5023 struct cgraph_edge *cs,
5024 class ipa_node_params *info)
5026 class cgraph_indirect_call_info *ii = cs->indirect_info;
5027 struct bitpack_d bp;
5029 ii->param_index = (int) streamer_read_hwi (ib);
5030 bp = streamer_read_bitpack (ib);
5031 ii->polymorphic = bp_unpack_value (&bp, 1);
5032 ii->agg_contents = bp_unpack_value (&bp, 1);
5033 ii->member_ptr = bp_unpack_value (&bp, 1);
5034 ii->by_ref = bp_unpack_value (&bp, 1);
5035 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
5036 ii->vptr_changed = bp_unpack_value (&bp, 1);
5037 if (ii->agg_contents || ii->polymorphic)
5038 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5039 else
5040 ii->offset = 0;
5041 if (ii->polymorphic)
5043 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
5044 ii->otr_type = stream_read_tree (ib, data_in);
5045 ii->context.stream_in (ib, data_in);
5047 if (info && ii->param_index >= 0)
5049 if (ii->polymorphic)
5050 ipa_set_param_used_by_polymorphic_call (info,
5051 ii->param_index , true);
5052 ipa_set_param_used_by_indirect_call (info,
5053 ii->param_index, true);
5057 /* Stream out NODE info to OB. */
5059 static void
5060 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5062 int node_ref;
5063 lto_symtab_encoder_t encoder;
5064 ipa_node_params *info = ipa_node_params_sum->get (node);
5065 int j;
5066 struct cgraph_edge *e;
5067 struct bitpack_d bp;
5069 encoder = ob->decl_state->symtab_node_encoder;
5070 node_ref = lto_symtab_encoder_encode (encoder, node);
5071 streamer_write_uhwi (ob, node_ref);
5073 streamer_write_uhwi (ob, ipa_get_param_count (info));
5074 for (j = 0; j < ipa_get_param_count (info); j++)
5075 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5076 bp = bitpack_create (ob->main_stream);
5077 gcc_assert (info->analysis_done
5078 || ipa_get_param_count (info) == 0);
5079 gcc_assert (!info->node_enqueued);
5080 gcc_assert (!info->ipcp_orig_node);
5081 for (j = 0; j < ipa_get_param_count (info); j++)
5083 /* TODO: We could just not stream the bit in the undescribed case. */
5084 bool d = (ipa_get_controlled_uses (info, j) != IPA_UNDESCRIBED_USE)
5085 ? ipa_get_param_load_dereferenced (info, j) : true;
5086 bp_pack_value (&bp, d, 1);
5087 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5089 streamer_write_bitpack (&bp);
5090 for (j = 0; j < ipa_get_param_count (info); j++)
5092 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5093 stream_write_tree (ob, ipa_get_type (info, j), true);
5095 for (e = node->callees; e; e = e->next_callee)
5097 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5099 if (!args)
5101 streamer_write_uhwi (ob, 0);
5102 continue;
5105 streamer_write_uhwi (ob,
5106 ipa_get_cs_argument_count (args) * 2
5107 + (args->polymorphic_call_contexts != NULL));
5108 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5110 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5111 if (args->polymorphic_call_contexts != NULL)
5112 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5115 for (e = node->indirect_calls; e; e = e->next_callee)
5117 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5118 if (!args)
5119 streamer_write_uhwi (ob, 0);
5120 else
5122 streamer_write_uhwi (ob,
5123 ipa_get_cs_argument_count (args) * 2
5124 + (args->polymorphic_call_contexts != NULL));
5125 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5127 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5128 if (args->polymorphic_call_contexts != NULL)
5129 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5132 ipa_write_indirect_edge_info (ob, e);
5136 /* Stream in edge E from IB. */
5138 static void
5139 ipa_read_edge_info (class lto_input_block *ib,
5140 class data_in *data_in,
5141 struct cgraph_edge *e, bool prevails)
5143 int count = streamer_read_uhwi (ib);
5144 bool contexts_computed = count & 1;
5146 count /= 2;
5147 if (!count)
5148 return;
5149 if (prevails
5150 && (e->possibly_call_in_translation_unit_p ()
5151 /* Also stream in jump functions to builtins in hope that they
5152 will get fnspecs. */
5153 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
5155 ipa_edge_args *args = ipa_edge_args_sum->get_create (e);
5156 vec_safe_grow_cleared (args->jump_functions, count, true);
5157 if (contexts_computed)
5158 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
5159 for (int k = 0; k < count; k++)
5161 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5162 data_in, prevails);
5163 if (contexts_computed)
5164 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5165 (ib, data_in);
5168 else
5170 for (int k = 0; k < count; k++)
5172 struct ipa_jump_func dummy;
5173 ipa_read_jump_function (ib, &dummy, e,
5174 data_in, prevails);
5175 if (contexts_computed)
5177 class ipa_polymorphic_call_context ctx;
5178 ctx.stream_in (ib, data_in);
5184 /* Stream in NODE info from IB. */
5186 static void
5187 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5188 class data_in *data_in)
5190 int k;
5191 struct cgraph_edge *e;
5192 struct bitpack_d bp;
5193 bool prevails = node->prevailing_p ();
5194 ipa_node_params *info
5195 = prevails ? ipa_node_params_sum->get_create (node) : NULL;
5197 int param_count = streamer_read_uhwi (ib);
5198 if (prevails)
5200 ipa_alloc_node_params (node, param_count);
5201 for (k = 0; k < param_count; k++)
5202 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5203 if (ipa_get_param_count (info) != 0)
5204 info->analysis_done = true;
5205 info->node_enqueued = false;
5207 else
5208 for (k = 0; k < param_count; k++)
5209 streamer_read_uhwi (ib);
5211 bp = streamer_read_bitpack (ib);
5212 for (k = 0; k < param_count; k++)
5214 bool load_dereferenced = bp_unpack_value (&bp, 1);
5215 bool used = bp_unpack_value (&bp, 1);
5217 if (prevails)
5219 ipa_set_param_load_dereferenced (info, k, load_dereferenced);
5220 ipa_set_param_used (info, k, used);
5223 for (k = 0; k < param_count; k++)
5225 int nuses = streamer_read_hwi (ib);
5226 tree type = stream_read_tree (ib, data_in);
5228 if (prevails)
5230 ipa_set_controlled_uses (info, k, nuses);
5231 (*info->descriptors)[k].decl_or_type = type;
5234 for (e = node->callees; e; e = e->next_callee)
5235 ipa_read_edge_info (ib, data_in, e, prevails);
5236 for (e = node->indirect_calls; e; e = e->next_callee)
5238 ipa_read_edge_info (ib, data_in, e, prevails);
5239 ipa_read_indirect_edge_info (ib, data_in, e, info);
5243 /* Write jump functions for nodes in SET. */
5245 void
5246 ipa_prop_write_jump_functions (void)
5248 struct output_block *ob;
5249 unsigned int count = 0;
5250 lto_symtab_encoder_iterator lsei;
5251 lto_symtab_encoder_t encoder;
5253 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5254 return;
5256 ob = create_output_block (LTO_section_jump_functions);
5257 encoder = ob->decl_state->symtab_node_encoder;
5258 ob->symbol = NULL;
5259 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5260 lsei_next_function_in_partition (&lsei))
5262 cgraph_node *node = lsei_cgraph_node (lsei);
5263 if (node->has_gimple_body_p ()
5264 && ipa_node_params_sum->get (node) != NULL)
5265 count++;
5268 streamer_write_uhwi (ob, count);
5270 /* Process all of the functions. */
5271 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5272 lsei_next_function_in_partition (&lsei))
5274 cgraph_node *node = lsei_cgraph_node (lsei);
5275 if (node->has_gimple_body_p ()
5276 && ipa_node_params_sum->get (node) != NULL)
5277 ipa_write_node_info (ob, node);
5279 streamer_write_char_stream (ob->main_stream, 0);
5280 produce_asm (ob, NULL);
5281 destroy_output_block (ob);
5284 /* Read section in file FILE_DATA of length LEN with data DATA. */
5286 static void
5287 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5288 size_t len)
5290 const struct lto_function_header *header =
5291 (const struct lto_function_header *) data;
5292 const int cfg_offset = sizeof (struct lto_function_header);
5293 const int main_offset = cfg_offset + header->cfg_size;
5294 const int string_offset = main_offset + header->main_size;
5295 class data_in *data_in;
5296 unsigned int i;
5297 unsigned int count;
5299 lto_input_block ib_main ((const char *) data + main_offset,
5300 header->main_size, file_data->mode_table);
5302 data_in =
5303 lto_data_in_create (file_data, (const char *) data + string_offset,
5304 header->string_size, vNULL);
5305 count = streamer_read_uhwi (&ib_main);
5307 for (i = 0; i < count; i++)
5309 unsigned int index;
5310 struct cgraph_node *node;
5311 lto_symtab_encoder_t encoder;
5313 index = streamer_read_uhwi (&ib_main);
5314 encoder = file_data->symtab_node_encoder;
5315 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5316 index));
5317 gcc_assert (node->definition);
5318 ipa_read_node_info (&ib_main, node, data_in);
5320 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5321 len);
5322 lto_data_in_delete (data_in);
5325 /* Read ipcp jump functions. */
5327 void
5328 ipa_prop_read_jump_functions (void)
5330 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5331 struct lto_file_decl_data *file_data;
5332 unsigned int j = 0;
5334 ipa_check_create_node_params ();
5335 ipa_check_create_edge_args ();
5336 ipa_register_cgraph_hooks ();
5338 while ((file_data = file_data_vec[j++]))
5340 size_t len;
5341 const char *data
5342 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5343 &len);
5344 if (data)
5345 ipa_prop_read_section (file_data, data, len);
5349 void
5350 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5352 int node_ref;
5353 unsigned int count = 0;
5354 lto_symtab_encoder_t encoder;
5355 struct ipa_agg_replacement_value *aggvals, *av;
5357 aggvals = ipa_get_agg_replacements_for_node (node);
5358 encoder = ob->decl_state->symtab_node_encoder;
5359 node_ref = lto_symtab_encoder_encode (encoder, node);
5360 streamer_write_uhwi (ob, node_ref);
5362 for (av = aggvals; av; av = av->next)
5363 count++;
5364 streamer_write_uhwi (ob, count);
5366 for (av = aggvals; av; av = av->next)
5368 struct bitpack_d bp;
5370 streamer_write_uhwi (ob, av->offset);
5371 streamer_write_uhwi (ob, av->index);
5372 stream_write_tree (ob, av->value, true);
5374 bp = bitpack_create (ob->main_stream);
5375 bp_pack_value (&bp, av->by_ref, 1);
5376 streamer_write_bitpack (&bp);
5379 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5380 if (ts && vec_safe_length (ts->m_vr) > 0)
5382 count = ts->m_vr->length ();
5383 streamer_write_uhwi (ob, count);
5384 for (unsigned i = 0; i < count; ++i)
5386 struct bitpack_d bp;
5387 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5388 bp = bitpack_create (ob->main_stream);
5389 bp_pack_value (&bp, parm_vr->known, 1);
5390 streamer_write_bitpack (&bp);
5391 if (parm_vr->known)
5393 streamer_write_enum (ob->main_stream, value_rang_type,
5394 VR_LAST, parm_vr->type);
5395 streamer_write_wide_int (ob, parm_vr->min);
5396 streamer_write_wide_int (ob, parm_vr->max);
5400 else
5401 streamer_write_uhwi (ob, 0);
5403 if (ts && vec_safe_length (ts->bits) > 0)
5405 count = ts->bits->length ();
5406 streamer_write_uhwi (ob, count);
5408 for (unsigned i = 0; i < count; ++i)
5410 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5411 struct bitpack_d bp = bitpack_create (ob->main_stream);
5412 bp_pack_value (&bp, !!bits_jfunc, 1);
5413 streamer_write_bitpack (&bp);
5414 if (bits_jfunc)
5416 streamer_write_widest_int (ob, bits_jfunc->value);
5417 streamer_write_widest_int (ob, bits_jfunc->mask);
5421 else
5422 streamer_write_uhwi (ob, 0);
5425 /* Stream in the aggregate value replacement chain for NODE from IB. */
5427 static void
5428 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5429 data_in *data_in)
5431 struct ipa_agg_replacement_value *aggvals = NULL;
5432 unsigned int count, i;
5434 count = streamer_read_uhwi (ib);
5435 for (i = 0; i <count; i++)
5437 struct ipa_agg_replacement_value *av;
5438 struct bitpack_d bp;
5440 av = ggc_alloc<ipa_agg_replacement_value> ();
5441 av->offset = streamer_read_uhwi (ib);
5442 av->index = streamer_read_uhwi (ib);
5443 av->value = stream_read_tree (ib, data_in);
5444 bp = streamer_read_bitpack (ib);
5445 av->by_ref = bp_unpack_value (&bp, 1);
5446 av->next = aggvals;
5447 aggvals = av;
5449 ipa_set_node_agg_value_chain (node, aggvals);
5451 count = streamer_read_uhwi (ib);
5452 if (count > 0)
5454 ipcp_transformation_initialize ();
5455 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5456 vec_safe_grow_cleared (ts->m_vr, count, true);
5457 for (i = 0; i < count; i++)
5459 ipa_vr *parm_vr;
5460 parm_vr = &(*ts->m_vr)[i];
5461 struct bitpack_d bp;
5462 bp = streamer_read_bitpack (ib);
5463 parm_vr->known = bp_unpack_value (&bp, 1);
5464 if (parm_vr->known)
5466 parm_vr->type = streamer_read_enum (ib, value_range_kind,
5467 VR_LAST);
5468 parm_vr->min = streamer_read_wide_int (ib);
5469 parm_vr->max = streamer_read_wide_int (ib);
5473 count = streamer_read_uhwi (ib);
5474 if (count > 0)
5476 ipcp_transformation_initialize ();
5477 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5478 vec_safe_grow_cleared (ts->bits, count, true);
5480 for (i = 0; i < count; i++)
5482 struct bitpack_d bp = streamer_read_bitpack (ib);
5483 bool known = bp_unpack_value (&bp, 1);
5484 if (known)
5486 const widest_int value = streamer_read_widest_int (ib);
5487 const widest_int mask = streamer_read_widest_int (ib);
5488 ipa_bits *bits
5489 = ipa_get_ipa_bits_for_value (value, mask);
5490 (*ts->bits)[i] = bits;
5496 /* Write all aggregate replacement for nodes in set. */
5498 void
5499 ipcp_write_transformation_summaries (void)
5501 struct cgraph_node *node;
5502 struct output_block *ob;
5503 unsigned int count = 0;
5504 lto_symtab_encoder_iterator lsei;
5505 lto_symtab_encoder_t encoder;
5507 ob = create_output_block (LTO_section_ipcp_transform);
5508 encoder = ob->decl_state->symtab_node_encoder;
5509 ob->symbol = NULL;
5510 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5511 lsei_next_function_in_partition (&lsei))
5513 node = lsei_cgraph_node (lsei);
5514 if (node->has_gimple_body_p ())
5515 count++;
5518 streamer_write_uhwi (ob, count);
5520 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5521 lsei_next_function_in_partition (&lsei))
5523 node = lsei_cgraph_node (lsei);
5524 if (node->has_gimple_body_p ())
5525 write_ipcp_transformation_info (ob, node);
5527 streamer_write_char_stream (ob->main_stream, 0);
5528 produce_asm (ob, NULL);
5529 destroy_output_block (ob);
5532 /* Read replacements section in file FILE_DATA of length LEN with data
5533 DATA. */
5535 static void
5536 read_replacements_section (struct lto_file_decl_data *file_data,
5537 const char *data,
5538 size_t len)
5540 const struct lto_function_header *header =
5541 (const struct lto_function_header *) data;
5542 const int cfg_offset = sizeof (struct lto_function_header);
5543 const int main_offset = cfg_offset + header->cfg_size;
5544 const int string_offset = main_offset + header->main_size;
5545 class data_in *data_in;
5546 unsigned int i;
5547 unsigned int count;
5549 lto_input_block ib_main ((const char *) data + main_offset,
5550 header->main_size, file_data->mode_table);
5552 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5553 header->string_size, vNULL);
5554 count = streamer_read_uhwi (&ib_main);
5556 for (i = 0; i < count; i++)
5558 unsigned int index;
5559 struct cgraph_node *node;
5560 lto_symtab_encoder_t encoder;
5562 index = streamer_read_uhwi (&ib_main);
5563 encoder = file_data->symtab_node_encoder;
5564 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5565 index));
5566 gcc_assert (node->definition);
5567 read_ipcp_transformation_info (&ib_main, node, data_in);
5569 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5570 len);
5571 lto_data_in_delete (data_in);
5574 /* Read IPA-CP aggregate replacements. */
5576 void
5577 ipcp_read_transformation_summaries (void)
5579 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5580 struct lto_file_decl_data *file_data;
5581 unsigned int j = 0;
5583 while ((file_data = file_data_vec[j++]))
5585 size_t len;
5586 const char *data
5587 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5588 &len);
5589 if (data)
5590 read_replacements_section (file_data, data, len);
5594 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5595 NODE but also if any parameter was IPA-SRAed into a scalar go ahead with
5596 substitution of the default_definitions of that new param with the
5597 appropriate constant.
5599 Return two bools. the first it true if at least one item in AGGVAL still
5600 exists and function body walk should go ahead. The second is true if any
5601 values were already substituted for scalarized parameters and update_cfg
5602 shuld be run after replace_uses_by. */
5604 static std::pair<bool, bool>
5605 adjust_agg_replacement_values (cgraph_node *node,
5606 ipa_agg_replacement_value *aggval,
5607 const vec<ipa_param_descriptor, va_gc>
5608 &descriptors)
5610 struct ipa_agg_replacement_value *v;
5611 clone_info *cinfo = clone_info::get (node);
5612 if (!cinfo || !cinfo->param_adjustments)
5613 return std::pair<bool, bool> (true, false);
5615 bool anything_left = false;
5616 bool done_replacement = false;
5617 for (v = aggval; v; v = v->next)
5619 gcc_checking_assert (v->index >= 0);
5621 unsigned unit_offset = v->offset / BITS_PER_UNIT;
5622 tree cst_type = TREE_TYPE (v->value);
5623 int split_idx;
5624 int new_idx
5625 = cinfo->param_adjustments->get_updated_index_or_split (v->index,
5626 unit_offset,
5627 cst_type,
5628 &split_idx);
5629 v->index = new_idx;
5630 if (new_idx >= 0)
5631 anything_left = true;
5632 else if (split_idx >= 0)
5634 tree parm = ipa_get_param (descriptors, split_idx);
5635 tree ddef = ssa_default_def (cfun, parm);
5636 if (ddef)
5638 replace_uses_by (ddef, v->value);
5639 done_replacement = true;
5643 return std::pair<bool, bool> (anything_left, done_replacement);
5646 /* Dominator walker driving the ipcp modification phase. */
5648 class ipcp_modif_dom_walker : public dom_walker
5650 public:
5651 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5652 vec<ipa_param_descriptor, va_gc> *descs,
5653 struct ipa_agg_replacement_value *av,
5654 bool *sc)
5655 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5656 m_aggval (av), m_something_changed (sc) {}
5658 virtual edge before_dom_children (basic_block);
5659 bool cleanup_eh ()
5660 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
5662 private:
5663 struct ipa_func_body_info *m_fbi;
5664 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5665 struct ipa_agg_replacement_value *m_aggval;
5666 bool *m_something_changed;
5667 auto_bitmap m_need_eh_cleanup;
5670 edge
5671 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5673 gimple_stmt_iterator gsi;
5674 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5676 struct ipa_agg_replacement_value *v;
5677 gimple *stmt = gsi_stmt (gsi);
5678 tree rhs, val, t;
5679 HOST_WIDE_INT offset;
5680 poly_int64 size;
5681 int index;
5682 bool by_ref, vce;
5684 if (!gimple_assign_load_p (stmt))
5685 continue;
5686 rhs = gimple_assign_rhs1 (stmt);
5687 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5688 continue;
5690 vce = false;
5691 t = rhs;
5692 while (handled_component_p (t))
5694 /* V_C_E can do things like convert an array of integers to one
5695 bigger integer and similar things we do not handle below. */
5696 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5698 vce = true;
5699 break;
5701 t = TREE_OPERAND (t, 0);
5703 if (vce)
5704 continue;
5706 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5707 &offset, &size, &by_ref))
5708 continue;
5709 for (v = m_aggval; v; v = v->next)
5710 if (v->index == index
5711 && v->offset == offset)
5712 break;
5713 if (!v
5714 || v->by_ref != by_ref
5715 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5716 size))
5717 continue;
5719 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5720 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5722 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5723 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5724 else if (TYPE_SIZE (TREE_TYPE (rhs))
5725 == TYPE_SIZE (TREE_TYPE (v->value)))
5726 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5727 else
5729 if (dump_file)
5731 fprintf (dump_file, " const ");
5732 print_generic_expr (dump_file, v->value);
5733 fprintf (dump_file, " can't be converted to type of ");
5734 print_generic_expr (dump_file, rhs);
5735 fprintf (dump_file, "\n");
5737 continue;
5740 else
5741 val = v->value;
5743 if (dump_file && (dump_flags & TDF_DETAILS))
5745 fprintf (dump_file, "Modifying stmt:\n ");
5746 print_gimple_stmt (dump_file, stmt, 0);
5748 gimple_assign_set_rhs_from_tree (&gsi, val);
5749 update_stmt (stmt);
5751 if (dump_file && (dump_flags & TDF_DETAILS))
5753 fprintf (dump_file, "into:\n ");
5754 print_gimple_stmt (dump_file, stmt, 0);
5755 fprintf (dump_file, "\n");
5758 *m_something_changed = true;
5759 if (maybe_clean_eh_stmt (stmt))
5760 bitmap_set_bit (m_need_eh_cleanup, bb->index);
5762 return NULL;
5765 /* Return true if we have recorded VALUE and MASK about PARM.
5766 Set VALUE and MASk accordingly. */
5768 bool
5769 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5771 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5772 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5773 if (!ts || vec_safe_length (ts->bits) == 0)
5774 return false;
5776 int i = 0;
5777 for (tree p = DECL_ARGUMENTS (current_function_decl);
5778 p != parm; p = DECL_CHAIN (p))
5780 i++;
5781 /* Ignore static chain. */
5782 if (!p)
5783 return false;
5786 clone_info *cinfo = clone_info::get (cnode);
5787 if (cinfo && cinfo->param_adjustments)
5789 i = cinfo->param_adjustments->get_original_index (i);
5790 if (i < 0)
5791 return false;
5794 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5795 if (!bits[i])
5796 return false;
5797 *mask = bits[i]->mask;
5798 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5799 return true;
5803 /* Update bits info of formal parameters as described in
5804 ipcp_transformation. */
5806 static void
5807 ipcp_update_bits (struct cgraph_node *node)
5809 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5811 if (!ts || vec_safe_length (ts->bits) == 0)
5812 return;
5813 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5814 unsigned count = bits.length ();
5815 if (!count)
5816 return;
5818 auto_vec<int, 16> new_indices;
5819 bool need_remapping = false;
5820 clone_info *cinfo = clone_info::get (node);
5821 if (cinfo && cinfo->param_adjustments)
5823 cinfo->param_adjustments->get_updated_indices (&new_indices);
5824 need_remapping = true;
5826 auto_vec <tree, 16> parm_decls;
5827 push_function_arg_decls (&parm_decls, node->decl);
5829 for (unsigned i = 0; i < count; ++i)
5831 tree parm;
5832 if (need_remapping)
5834 if (i >= new_indices.length ())
5835 continue;
5836 int idx = new_indices[i];
5837 if (idx < 0)
5838 continue;
5839 parm = parm_decls[idx];
5841 else
5842 parm = parm_decls[i];
5843 gcc_checking_assert (parm);
5846 if (!bits[i]
5847 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5848 || POINTER_TYPE_P (TREE_TYPE (parm)))
5849 || !is_gimple_reg (parm))
5850 continue;
5852 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5853 if (!ddef)
5854 continue;
5856 if (dump_file)
5858 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5859 print_hex (bits[i]->mask, dump_file);
5860 fprintf (dump_file, "\n");
5863 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5865 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5866 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5868 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5869 | wide_int::from (bits[i]->value, prec, sgn);
5870 set_nonzero_bits (ddef, nonzero_bits);
5872 else
5874 unsigned tem = bits[i]->mask.to_uhwi ();
5875 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5876 unsigned align = tem & -tem;
5877 unsigned misalign = bitpos & (align - 1);
5879 if (align > 1)
5881 if (dump_file)
5882 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5884 unsigned old_align, old_misalign;
5885 struct ptr_info_def *pi = get_ptr_info (ddef);
5886 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5888 if (old_known
5889 && old_align > align)
5891 if (dump_file)
5893 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5894 if ((old_misalign & (align - 1)) != misalign)
5895 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5896 old_misalign, misalign);
5898 continue;
5901 if (old_known
5902 && ((misalign & (old_align - 1)) != old_misalign)
5903 && dump_file)
5904 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5905 old_misalign, misalign);
5907 set_ptr_info_alignment (pi, align, misalign);
5913 bool
5914 ipa_vr::nonzero_p (tree expr_type) const
5916 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5917 return true;
5919 unsigned prec = TYPE_PRECISION (expr_type);
5920 return (type == VR_RANGE
5921 && TYPE_UNSIGNED (expr_type)
5922 && wi::eq_p (min, wi::one (prec))
5923 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5926 /* Update value range of formal parameters as described in
5927 ipcp_transformation. */
5929 static void
5930 ipcp_update_vr (struct cgraph_node *node)
5932 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5933 if (!ts || vec_safe_length (ts->m_vr) == 0)
5934 return;
5935 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5936 unsigned count = vr.length ();
5937 if (!count)
5938 return;
5940 auto_vec<int, 16> new_indices;
5941 bool need_remapping = false;
5942 clone_info *cinfo = clone_info::get (node);
5943 if (cinfo && cinfo->param_adjustments)
5945 cinfo->param_adjustments->get_updated_indices (&new_indices);
5946 need_remapping = true;
5948 auto_vec <tree, 16> parm_decls;
5949 push_function_arg_decls (&parm_decls, node->decl);
5951 for (unsigned i = 0; i < count; ++i)
5953 tree parm;
5954 int remapped_idx;
5955 if (need_remapping)
5957 if (i >= new_indices.length ())
5958 continue;
5959 remapped_idx = new_indices[i];
5960 if (remapped_idx < 0)
5961 continue;
5963 else
5964 remapped_idx = i;
5966 parm = parm_decls[remapped_idx];
5968 gcc_checking_assert (parm);
5969 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5971 if (!ddef || !is_gimple_reg (parm))
5972 continue;
5974 if (vr[i].known
5975 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5977 tree type = TREE_TYPE (ddef);
5978 unsigned prec = TYPE_PRECISION (type);
5979 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5981 if (dump_file)
5983 fprintf (dump_file, "Setting value range of param %u "
5984 "(now %i) ", i, remapped_idx);
5985 fprintf (dump_file, "%s[",
5986 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5987 print_decs (vr[i].min, dump_file);
5988 fprintf (dump_file, ", ");
5989 print_decs (vr[i].max, dump_file);
5990 fprintf (dump_file, "]\n");
5992 value_range v (type,
5993 wide_int_storage::from (vr[i].min, prec,
5994 TYPE_SIGN (type)),
5995 wide_int_storage::from (vr[i].max, prec,
5996 TYPE_SIGN (type)),
5997 vr[i].type);
5998 set_range_info (ddef, v);
6000 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
6001 && vr[i].nonzero_p (TREE_TYPE (ddef)))
6003 if (dump_file)
6004 fprintf (dump_file, "Setting nonnull for %u\n", i);
6005 set_ptr_nonnull (ddef);
6011 /* IPCP transformation phase doing propagation of aggregate values. */
6013 unsigned int
6014 ipcp_transform_function (struct cgraph_node *node)
6016 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
6017 struct ipa_func_body_info fbi;
6018 struct ipa_agg_replacement_value *aggval;
6019 int param_count;
6021 gcc_checking_assert (cfun);
6022 gcc_checking_assert (current_function_decl);
6024 if (dump_file)
6025 fprintf (dump_file, "Modification phase of node %s\n",
6026 node->dump_name ());
6028 ipcp_update_bits (node);
6029 ipcp_update_vr (node);
6030 aggval = ipa_get_agg_replacements_for_node (node);
6031 if (!aggval)
6032 return 0;
6033 param_count = count_formal_params (node->decl);
6034 if (param_count == 0)
6035 return 0;
6036 vec_safe_grow_cleared (descriptors, param_count, true);
6037 ipa_populate_param_decls (node, *descriptors);
6038 std::pair<bool, bool> rr
6039 = adjust_agg_replacement_values (node, aggval, *descriptors);
6040 bool cfg_changed = rr.second;
6041 if (!rr.first)
6043 vec_free (descriptors);
6044 if (dump_file)
6045 fprintf (dump_file, " All affected aggregate parameters were either "
6046 "removed or converted into scalars, phase done.\n");
6047 if (cfg_changed)
6048 delete_unreachable_blocks_update_callgraph (node, false);
6049 return 0;
6051 if (dump_file)
6052 ipa_dump_agg_replacement_values (dump_file, aggval);
6054 fbi.node = node;
6055 fbi.info = NULL;
6056 fbi.bb_infos = vNULL;
6057 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
6058 fbi.param_count = param_count;
6059 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
6061 bool modified_mem_access = false;
6062 calculate_dominance_info (CDI_DOMINATORS);
6063 ipcp_modif_dom_walker walker (&fbi, descriptors, aggval, &modified_mem_access);
6064 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6065 free_dominance_info (CDI_DOMINATORS);
6066 cfg_changed |= walker.cleanup_eh ();
6068 int i;
6069 struct ipa_bb_info *bi;
6070 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
6071 free_ipa_bb_info (bi);
6072 fbi.bb_infos.release ();
6074 ipcp_transformation *s = ipcp_transformation_sum->get (node);
6075 s->agg_values = NULL;
6076 s->bits = NULL;
6077 s->m_vr = NULL;
6079 vec_free (descriptors);
6080 if (cfg_changed)
6081 delete_unreachable_blocks_update_callgraph (node, false);
6083 return modified_mem_access ? TODO_update_ssa_only_virtuals : 0;
6087 /* Return true if OTHER describes same agg value. */
6088 bool
6089 ipa_agg_value::equal_to (const ipa_agg_value &other)
6091 return offset == other.offset
6092 && operand_equal_p (value, other.value, 0);
6095 /* Destructor also removing individual aggregate values. */
6097 ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
6099 ipa_release_agg_values (m_known_aggs, false);
6104 #include "gt-ipa-prop.h"