fwprop: Fix single_use_p calculation
[official-gcc.git] / gcc / ipa-prop.c
blob010c43f33e83dcc503435d9bf1b79b95cc0ecaef
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55 #include "options.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
59 /* Function summary where the parameter infos are actually stored. */
60 ipa_node_params_t *ipa_node_params_sum = NULL;
62 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
64 /* Edge summary for IPA-CP edge information. */
65 ipa_edge_args_sum_t *ipa_edge_args_sum;
67 /* Traits for a hash table for reusing already existing ipa_bits. */
69 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
71 typedef ipa_bits *value_type;
72 typedef ipa_bits *compare_type;
73 static hashval_t
74 hash (const ipa_bits *p)
76 hashval_t t = (hashval_t) p->value.to_shwi ();
77 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
79 static bool
80 equal (const ipa_bits *a, const ipa_bits *b)
82 return a->value == b->value && a->mask == b->mask;
84 static const bool empty_zero_p = true;
85 static void
86 mark_empty (ipa_bits *&p)
88 p = NULL;
90 static bool
91 is_empty (const ipa_bits *p)
93 return p == NULL;
95 static bool
96 is_deleted (const ipa_bits *p)
98 return p == reinterpret_cast<const ipa_bits *> (1);
100 static void
101 mark_deleted (ipa_bits *&p)
103 p = reinterpret_cast<ipa_bits *> (1);
107 /* Hash table for avoid repeated allocations of equal ipa_bits. */
108 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
110 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
111 the equiv bitmap is not hashed and is expected to be NULL. */
113 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
115 typedef value_range *value_type;
116 typedef value_range *compare_type;
117 static hashval_t
118 hash (const value_range *p)
120 inchash::hash hstate (p->kind ());
121 inchash::add_expr (p->min (), hstate);
122 inchash::add_expr (p->max (), hstate);
123 return hstate.end ();
125 static bool
126 equal (const value_range *a, const value_range *b)
128 return (a->equal_p (*b)
129 && types_compatible_p (a->type (), b->type ()));
131 static const bool empty_zero_p = true;
132 static void
133 mark_empty (value_range *&p)
135 p = NULL;
137 static bool
138 is_empty (const value_range *p)
140 return p == NULL;
142 static bool
143 is_deleted (const value_range *p)
145 return p == reinterpret_cast<const value_range *> (1);
147 static void
148 mark_deleted (value_range *&p)
150 p = reinterpret_cast<value_range *> (1);
154 /* Hash table for avoid repeated allocations of equal value_ranges. */
155 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
157 /* Holders of ipa cgraph hooks: */
158 static struct cgraph_node_hook_list *function_insertion_hook_holder;
160 /* Description of a reference to an IPA constant. */
161 struct ipa_cst_ref_desc
163 /* Edge that corresponds to the statement which took the reference. */
164 struct cgraph_edge *cs;
165 /* Linked list of duplicates created when call graph edges are cloned. */
166 struct ipa_cst_ref_desc *next_duplicate;
167 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
168 if out of control. */
169 int refcount;
172 /* Allocation pool for reference descriptions. */
174 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
175 ("IPA-PROP ref descriptions");
177 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
178 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
180 static bool
181 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
183 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
185 if (!fs_opts)
186 return false;
187 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
190 /* Return index of the formal whose tree is PTREE in function which corresponds
191 to INFO. */
193 static int
194 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
195 tree ptree)
197 int i, count;
199 count = vec_safe_length (descriptors);
200 for (i = 0; i < count; i++)
201 if ((*descriptors)[i].decl_or_type == ptree)
202 return i;
204 return -1;
207 /* Return index of the formal whose tree is PTREE in function which corresponds
208 to INFO. */
211 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
213 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
216 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
217 NODE. */
219 static void
220 ipa_populate_param_decls (struct cgraph_node *node,
221 vec<ipa_param_descriptor, va_gc> &descriptors)
223 tree fndecl;
224 tree fnargs;
225 tree parm;
226 int param_num;
228 fndecl = node->decl;
229 gcc_assert (gimple_has_body_p (fndecl));
230 fnargs = DECL_ARGUMENTS (fndecl);
231 param_num = 0;
232 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
234 descriptors[param_num].decl_or_type = parm;
235 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
236 descriptors[param_num].move_cost = cost;
237 /* Watch overflow, move_cost is a bitfield. */
238 gcc_checking_assert (cost == descriptors[param_num].move_cost);
239 param_num++;
243 /* Return how many formal parameters FNDECL has. */
246 count_formal_params (tree fndecl)
248 tree parm;
249 int count = 0;
250 gcc_assert (gimple_has_body_p (fndecl));
252 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
253 count++;
255 return count;
258 /* Return the declaration of Ith formal parameter of the function corresponding
259 to INFO. Note there is no setter function as this array is built just once
260 using ipa_initialize_node_params. */
262 void
263 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
265 fprintf (file, "param #%i", i);
266 if ((*info->descriptors)[i].decl_or_type)
268 fprintf (file, " ");
269 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
273 /* If necessary, allocate vector of parameter descriptors in info of NODE.
274 Return true if they were allocated, false if not. */
276 static bool
277 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
279 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
281 if (!info->descriptors && param_count)
283 vec_safe_grow_cleared (info->descriptors, param_count, true);
284 return true;
286 else
287 return false;
290 /* Initialize the ipa_node_params structure associated with NODE by counting
291 the function parameters, creating the descriptors and populating their
292 param_decls. */
294 void
295 ipa_initialize_node_params (struct cgraph_node *node)
297 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
299 if (!info->descriptors
300 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
301 ipa_populate_param_decls (node, *info->descriptors);
304 /* Print the jump functions associated with call graph edge CS to file F. */
306 static void
307 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
309 int i, count;
311 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
312 for (i = 0; i < count; i++)
314 struct ipa_jump_func *jump_func;
315 enum jump_func_type type;
317 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
318 type = jump_func->type;
320 fprintf (f, " param %d: ", i);
321 if (type == IPA_JF_UNKNOWN)
322 fprintf (f, "UNKNOWN\n");
323 else if (type == IPA_JF_CONST)
325 tree val = jump_func->value.constant.value;
326 fprintf (f, "CONST: ");
327 print_generic_expr (f, val);
328 if (TREE_CODE (val) == ADDR_EXPR
329 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
331 fprintf (f, " -> ");
332 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
334 fprintf (f, "\n");
336 else if (type == IPA_JF_PASS_THROUGH)
338 fprintf (f, "PASS THROUGH: ");
339 fprintf (f, "%d, op %s",
340 jump_func->value.pass_through.formal_id,
341 get_tree_code_name(jump_func->value.pass_through.operation));
342 if (jump_func->value.pass_through.operation != NOP_EXPR)
344 fprintf (f, " ");
345 print_generic_expr (f, jump_func->value.pass_through.operand);
347 if (jump_func->value.pass_through.agg_preserved)
348 fprintf (f, ", agg_preserved");
349 fprintf (f, "\n");
351 else if (type == IPA_JF_ANCESTOR)
353 fprintf (f, "ANCESTOR: ");
354 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
355 jump_func->value.ancestor.formal_id,
356 jump_func->value.ancestor.offset);
357 if (jump_func->value.ancestor.agg_preserved)
358 fprintf (f, ", agg_preserved");
359 fprintf (f, "\n");
362 if (jump_func->agg.items)
364 struct ipa_agg_jf_item *item;
365 int j;
367 fprintf (f, " Aggregate passed by %s:\n",
368 jump_func->agg.by_ref ? "reference" : "value");
369 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
371 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
372 item->offset);
373 fprintf (f, "type: ");
374 print_generic_expr (f, item->type);
375 fprintf (f, ", ");
376 if (item->jftype == IPA_JF_PASS_THROUGH)
377 fprintf (f, "PASS THROUGH: %d,",
378 item->value.pass_through.formal_id);
379 else if (item->jftype == IPA_JF_LOAD_AGG)
381 fprintf (f, "LOAD AGG: %d",
382 item->value.pass_through.formal_id);
383 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
384 item->value.load_agg.offset,
385 item->value.load_agg.by_ref ? "reference"
386 : "value");
389 if (item->jftype == IPA_JF_PASS_THROUGH
390 || item->jftype == IPA_JF_LOAD_AGG)
392 fprintf (f, " op %s",
393 get_tree_code_name (item->value.pass_through.operation));
394 if (item->value.pass_through.operation != NOP_EXPR)
396 fprintf (f, " ");
397 print_generic_expr (f, item->value.pass_through.operand);
400 else if (item->jftype == IPA_JF_CONST)
402 fprintf (f, "CONST: ");
403 print_generic_expr (f, item->value.constant);
405 else if (item->jftype == IPA_JF_UNKNOWN)
406 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
407 tree_to_uhwi (TYPE_SIZE (item->type)));
408 fprintf (f, "\n");
412 class ipa_polymorphic_call_context *ctx
413 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
414 if (ctx && !ctx->useless_p ())
416 fprintf (f, " Context: ");
417 ctx->dump (dump_file);
420 if (jump_func->bits)
422 fprintf (f, " value: ");
423 print_hex (jump_func->bits->value, f);
424 fprintf (f, ", mask: ");
425 print_hex (jump_func->bits->mask, f);
426 fprintf (f, "\n");
428 else
429 fprintf (f, " Unknown bits\n");
431 if (jump_func->m_vr)
433 fprintf (f, " VR ");
434 fprintf (f, "%s[",
435 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
436 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
437 fprintf (f, ", ");
438 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
439 fprintf (f, "]\n");
441 else
442 fprintf (f, " Unknown VR\n");
447 /* Print the jump functions of all arguments on all call graph edges going from
448 NODE to file F. */
450 void
451 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
453 struct cgraph_edge *cs;
455 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
456 for (cs = node->callees; cs; cs = cs->next_callee)
459 fprintf (f, " callsite %s -> %s : \n",
460 node->dump_name (),
461 cs->callee->dump_name ());
462 if (!ipa_edge_args_info_available_for_edge_p (cs))
463 fprintf (f, " no arg info\n");
464 else
465 ipa_print_node_jump_functions_for_edge (f, cs);
468 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
470 class cgraph_indirect_call_info *ii;
472 ii = cs->indirect_info;
473 if (ii->agg_contents)
474 fprintf (f, " indirect %s callsite, calling param %i, "
475 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
476 ii->member_ptr ? "member ptr" : "aggregate",
477 ii->param_index, ii->offset,
478 ii->by_ref ? "by reference" : "by_value");
479 else
480 fprintf (f, " indirect %s callsite, calling param %i, "
481 "offset " HOST_WIDE_INT_PRINT_DEC,
482 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
483 ii->offset);
485 if (cs->call_stmt)
487 fprintf (f, ", for stmt ");
488 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
490 else
491 fprintf (f, "\n");
492 if (ii->polymorphic)
493 ii->context.dump (f);
494 if (!ipa_edge_args_info_available_for_edge_p (cs))
495 fprintf (f, " no arg info\n");
496 else
497 ipa_print_node_jump_functions_for_edge (f, cs);
501 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
503 void
504 ipa_print_all_jump_functions (FILE *f)
506 struct cgraph_node *node;
508 fprintf (f, "\nJump functions:\n");
509 FOR_EACH_FUNCTION (node)
511 ipa_print_node_jump_functions (f, node);
515 /* Set jfunc to be a know-really nothing jump function. */
517 static void
518 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
520 jfunc->type = IPA_JF_UNKNOWN;
523 /* Set JFUNC to be a copy of another jmp (to be used by jump function
524 combination code). The two functions will share their rdesc. */
526 static void
527 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
528 struct ipa_jump_func *src)
531 gcc_checking_assert (src->type == IPA_JF_CONST);
532 dst->type = IPA_JF_CONST;
533 dst->value.constant = src->value.constant;
536 /* Set JFUNC to be a constant jmp function. */
538 static void
539 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
540 struct cgraph_edge *cs)
542 jfunc->type = IPA_JF_CONST;
543 jfunc->value.constant.value = unshare_expr_without_location (constant);
545 if (TREE_CODE (constant) == ADDR_EXPR
546 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
548 struct ipa_cst_ref_desc *rdesc;
550 rdesc = ipa_refdesc_pool.allocate ();
551 rdesc->cs = cs;
552 rdesc->next_duplicate = NULL;
553 rdesc->refcount = 1;
554 jfunc->value.constant.rdesc = rdesc;
556 else
557 jfunc->value.constant.rdesc = NULL;
560 /* Set JFUNC to be a simple pass-through jump function. */
561 static void
562 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
563 bool agg_preserved)
565 jfunc->type = IPA_JF_PASS_THROUGH;
566 jfunc->value.pass_through.operand = NULL_TREE;
567 jfunc->value.pass_through.formal_id = formal_id;
568 jfunc->value.pass_through.operation = NOP_EXPR;
569 jfunc->value.pass_through.agg_preserved = agg_preserved;
572 /* Set JFUNC to be an unary pass through jump function. */
574 static void
575 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
576 enum tree_code operation)
578 jfunc->type = IPA_JF_PASS_THROUGH;
579 jfunc->value.pass_through.operand = NULL_TREE;
580 jfunc->value.pass_through.formal_id = formal_id;
581 jfunc->value.pass_through.operation = operation;
582 jfunc->value.pass_through.agg_preserved = false;
584 /* Set JFUNC to be an arithmetic pass through jump function. */
586 static void
587 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
588 tree operand, enum tree_code operation)
590 jfunc->type = IPA_JF_PASS_THROUGH;
591 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
592 jfunc->value.pass_through.formal_id = formal_id;
593 jfunc->value.pass_through.operation = operation;
594 jfunc->value.pass_through.agg_preserved = false;
597 /* Set JFUNC to be an ancestor jump function. */
599 static void
600 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
601 int formal_id, bool agg_preserved)
603 jfunc->type = IPA_JF_ANCESTOR;
604 jfunc->value.ancestor.formal_id = formal_id;
605 jfunc->value.ancestor.offset = offset;
606 jfunc->value.ancestor.agg_preserved = agg_preserved;
609 /* Get IPA BB information about the given BB. FBI is the context of analyzis
610 of this function body. */
612 static struct ipa_bb_info *
613 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
615 gcc_checking_assert (fbi);
616 return &fbi->bb_infos[bb->index];
619 /* Structure to be passed in between detect_type_change and
620 check_stmt_for_type_change. */
622 struct prop_type_change_info
624 /* Offset into the object where there is the virtual method pointer we are
625 looking for. */
626 HOST_WIDE_INT offset;
627 /* The declaration or SSA_NAME pointer of the base that we are checking for
628 type change. */
629 tree object;
630 /* Set to true if dynamic type change has been detected. */
631 bool type_maybe_changed;
634 /* Return true if STMT can modify a virtual method table pointer.
636 This function makes special assumptions about both constructors and
637 destructors which are all the functions that are allowed to alter the VMT
638 pointers. It assumes that destructors begin with assignment into all VMT
639 pointers and that constructors essentially look in the following way:
641 1) The very first thing they do is that they call constructors of ancestor
642 sub-objects that have them.
644 2) Then VMT pointers of this and all its ancestors is set to new values
645 corresponding to the type corresponding to the constructor.
647 3) Only afterwards, other stuff such as constructor of member sub-objects
648 and the code written by the user is run. Only this may include calling
649 virtual functions, directly or indirectly.
651 There is no way to call a constructor of an ancestor sub-object in any
652 other way.
654 This means that we do not have to care whether constructors get the correct
655 type information because they will always change it (in fact, if we define
656 the type to be given by the VMT pointer, it is undefined).
658 The most important fact to derive from the above is that if, for some
659 statement in the section 3, we try to detect whether the dynamic type has
660 changed, we can safely ignore all calls as we examine the function body
661 backwards until we reach statements in section 2 because these calls cannot
662 be ancestor constructors or destructors (if the input is not bogus) and so
663 do not change the dynamic type (this holds true only for automatically
664 allocated objects but at the moment we devirtualize only these). We then
665 must detect that statements in section 2 change the dynamic type and can try
666 to derive the new type. That is enough and we can stop, we will never see
667 the calls into constructors of sub-objects in this code. Therefore we can
668 safely ignore all call statements that we traverse.
671 static bool
672 stmt_may_be_vtbl_ptr_store (gimple *stmt)
674 if (is_gimple_call (stmt))
675 return false;
676 if (gimple_clobber_p (stmt))
677 return false;
678 else if (is_gimple_assign (stmt))
680 tree lhs = gimple_assign_lhs (stmt);
682 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
684 if (flag_strict_aliasing
685 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
686 return false;
688 if (TREE_CODE (lhs) == COMPONENT_REF
689 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
690 return false;
691 /* In the future we might want to use get_ref_base_and_extent to find
692 if there is a field corresponding to the offset and if so, proceed
693 almost like if it was a component ref. */
696 return true;
699 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
700 to check whether a particular statement may modify the virtual table
701 pointerIt stores its result into DATA, which points to a
702 prop_type_change_info structure. */
704 static bool
705 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
707 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
708 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
710 if (stmt_may_be_vtbl_ptr_store (stmt))
712 tci->type_maybe_changed = true;
713 return true;
715 else
716 return false;
719 /* See if ARG is PARAM_DECl describing instance passed by pointer
720 or reference in FUNCTION. Return false if the dynamic type may change
721 in between beggining of the function until CALL is invoked.
723 Generally functions are not allowed to change type of such instances,
724 but they call destructors. We assume that methods cannot destroy the THIS
725 pointer. Also as a special cases, constructor and destructors may change
726 type of the THIS pointer. */
728 static bool
729 param_type_may_change_p (tree function, tree arg, gimple *call)
731 /* Pure functions cannot do any changes on the dynamic type;
732 that require writting to memory. */
733 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
734 return false;
735 /* We need to check if we are within inlined consturctor
736 or destructor (ideally we would have way to check that the
737 inline cdtor is actually working on ARG, but we don't have
738 easy tie on this, so punt on all non-pure cdtors.
739 We may also record the types of cdtors and once we know type
740 of the instance match them.
742 Also code unification optimizations may merge calls from
743 different blocks making return values unreliable. So
744 do nothing during late optimization. */
745 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
746 return true;
747 if (TREE_CODE (arg) == SSA_NAME
748 && SSA_NAME_IS_DEFAULT_DEF (arg)
749 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
751 /* Normal (non-THIS) argument. */
752 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
753 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
754 /* THIS pointer of an method - here we want to watch constructors
755 and destructors as those definitely may change the dynamic
756 type. */
757 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
758 && !DECL_CXX_CONSTRUCTOR_P (function)
759 && !DECL_CXX_DESTRUCTOR_P (function)
760 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
762 /* Walk the inline stack and watch out for ctors/dtors. */
763 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
764 block = BLOCK_SUPERCONTEXT (block))
765 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
766 return true;
767 return false;
770 return true;
773 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
774 callsite CALL) by looking for assignments to its virtual table pointer. If
775 it is, return true. ARG is the object itself (not a pointer
776 to it, unless dereferenced). BASE is the base of the memory access as
777 returned by get_ref_base_and_extent, as is the offset.
779 This is helper function for detect_type_change and detect_type_change_ssa
780 that does the heavy work which is usually unnecesary. */
782 static bool
783 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
784 tree base, tree comp_type, gcall *call,
785 HOST_WIDE_INT offset)
787 struct prop_type_change_info tci;
788 ao_ref ao;
790 gcc_checking_assert (DECL_P (arg)
791 || TREE_CODE (arg) == MEM_REF
792 || handled_component_p (arg));
794 comp_type = TYPE_MAIN_VARIANT (comp_type);
796 /* Const calls cannot call virtual methods through VMT and so type changes do
797 not matter. */
798 if (!flag_devirtualize || !gimple_vuse (call)
799 /* Be sure expected_type is polymorphic. */
800 || !comp_type
801 || TREE_CODE (comp_type) != RECORD_TYPE
802 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
803 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
804 return true;
806 if (fbi->aa_walk_budget == 0)
807 return false;
809 ao_ref_init (&ao, arg);
810 ao.base = base;
811 ao.offset = offset;
812 ao.size = POINTER_SIZE;
813 ao.max_size = ao.size;
815 tci.offset = offset;
816 tci.object = get_base_address (arg);
817 tci.type_maybe_changed = false;
819 int walked
820 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
821 &tci, NULL, NULL, fbi->aa_walk_budget);
822 if (walked >= 0)
823 fbi->aa_walk_budget -= walked;
824 else
825 fbi->aa_walk_budget = 0;
827 if (walked >= 0 && !tci.type_maybe_changed)
828 return false;
830 return true;
833 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
834 If it is, return true. ARG is the object itself (not a pointer
835 to it, unless dereferenced). BASE is the base of the memory access as
836 returned by get_ref_base_and_extent, as is the offset. */
838 static bool
839 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
840 tree comp_type, gcall *call,
841 HOST_WIDE_INT offset)
843 if (!flag_devirtualize)
844 return false;
846 if (TREE_CODE (base) == MEM_REF
847 && !param_type_may_change_p (current_function_decl,
848 TREE_OPERAND (base, 0),
849 call))
850 return false;
851 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
852 call, offset);
855 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
856 SSA name (its dereference will become the base and the offset is assumed to
857 be zero). */
859 static bool
860 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
861 gcall *call)
863 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
864 if (!flag_devirtualize
865 || !POINTER_TYPE_P (TREE_TYPE (arg)))
866 return false;
868 if (!param_type_may_change_p (current_function_decl, arg, call))
869 return false;
871 arg = build2 (MEM_REF, ptr_type_node, arg,
872 build_int_cst (ptr_type_node, 0));
874 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
875 call, 0);
878 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
879 boolean variable pointed to by DATA. */
881 static bool
882 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
883 void *data)
885 bool *b = (bool *) data;
886 *b = true;
887 return true;
890 /* Find the nearest valid aa status for parameter specified by INDEX that
891 dominates BB. */
893 static struct ipa_param_aa_status *
894 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
895 int index)
897 while (true)
899 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
900 if (!bb)
901 return NULL;
902 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
903 if (!bi->param_aa_statuses.is_empty ()
904 && bi->param_aa_statuses[index].valid)
905 return &bi->param_aa_statuses[index];
909 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
910 structures and/or intialize the result with a dominating description as
911 necessary. */
913 static struct ipa_param_aa_status *
914 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
915 int index)
917 gcc_checking_assert (fbi);
918 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
919 if (bi->param_aa_statuses.is_empty ())
920 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
921 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
922 if (!paa->valid)
924 gcc_checking_assert (!paa->parm_modified
925 && !paa->ref_modified
926 && !paa->pt_modified);
927 struct ipa_param_aa_status *dom_paa;
928 dom_paa = find_dominating_aa_status (fbi, bb, index);
929 if (dom_paa)
930 *paa = *dom_paa;
931 else
932 paa->valid = true;
935 return paa;
938 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
939 a value known not to be modified in this function before reaching the
940 statement STMT. FBI holds information about the function we have so far
941 gathered but do not survive the summary building stage. */
943 static bool
944 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
945 gimple *stmt, tree parm_load)
947 struct ipa_param_aa_status *paa;
948 bool modified = false;
949 ao_ref refd;
951 tree base = get_base_address (parm_load);
952 gcc_assert (TREE_CODE (base) == PARM_DECL);
953 if (TREE_READONLY (base))
954 return true;
956 gcc_checking_assert (fbi);
957 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
958 if (paa->parm_modified || fbi->aa_walk_budget == 0)
959 return false;
961 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
962 ao_ref_init (&refd, parm_load);
963 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
964 &modified, NULL, NULL,
965 fbi->aa_walk_budget);
966 if (walked < 0)
968 modified = true;
969 fbi->aa_walk_budget = 0;
971 else
972 fbi->aa_walk_budget -= walked;
973 if (paa && modified)
974 paa->parm_modified = true;
975 return !modified;
978 /* If STMT is an assignment that loads a value from an parameter declaration,
979 return the index of the parameter in ipa_node_params which has not been
980 modified. Otherwise return -1. */
982 static int
983 load_from_unmodified_param (struct ipa_func_body_info *fbi,
984 vec<ipa_param_descriptor, va_gc> *descriptors,
985 gimple *stmt)
987 int index;
988 tree op1;
990 if (!gimple_assign_single_p (stmt))
991 return -1;
993 op1 = gimple_assign_rhs1 (stmt);
994 if (TREE_CODE (op1) != PARM_DECL)
995 return -1;
997 index = ipa_get_param_decl_index_1 (descriptors, op1);
998 if (index < 0
999 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1000 return -1;
1002 return index;
1005 /* Return true if memory reference REF (which must be a load through parameter
1006 with INDEX) loads data that are known to be unmodified in this function
1007 before reaching statement STMT. */
1009 static bool
1010 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1011 int index, gimple *stmt, tree ref)
1013 struct ipa_param_aa_status *paa;
1014 bool modified = false;
1015 ao_ref refd;
1017 gcc_checking_assert (fbi);
1018 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1019 if (paa->ref_modified || fbi->aa_walk_budget == 0)
1020 return false;
1022 gcc_checking_assert (gimple_vuse (stmt));
1023 ao_ref_init (&refd, ref);
1024 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1025 &modified, NULL, NULL,
1026 fbi->aa_walk_budget);
1027 if (walked < 0)
1029 modified = true;
1030 fbi->aa_walk_budget = 0;
1032 else
1033 fbi->aa_walk_budget -= walked;
1034 if (modified)
1035 paa->ref_modified = true;
1036 return !modified;
1039 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1040 is known to be unmodified in this function before reaching call statement
1041 CALL into which it is passed. FBI describes the function body. */
1043 static bool
1044 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1045 gimple *call, tree parm)
1047 bool modified = false;
1048 ao_ref refd;
1050 /* It's unnecessary to calculate anything about memory contnets for a const
1051 function because it is not goin to use it. But do not cache the result
1052 either. Also, no such calculations for non-pointers. */
1053 if (!gimple_vuse (call)
1054 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1055 return false;
1057 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1058 gimple_bb (call),
1059 index);
1060 if (paa->pt_modified || fbi->aa_walk_budget == 0)
1061 return false;
1063 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1064 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1065 &modified, NULL, NULL,
1066 fbi->aa_walk_budget);
1067 if (walked < 0)
1069 fbi->aa_walk_budget = 0;
1070 modified = true;
1072 else
1073 fbi->aa_walk_budget -= walked;
1074 if (modified)
1075 paa->pt_modified = true;
1076 return !modified;
1079 /* Return true if we can prove that OP is a memory reference loading
1080 data from an aggregate passed as a parameter.
1082 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1083 false if it cannot prove that the value has not been modified before the
1084 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1085 if it cannot prove the value has not been modified, in that case it will
1086 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1088 INFO and PARMS_AINFO describe parameters of the current function (but the
1089 latter can be NULL), STMT is the load statement. If function returns true,
1090 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1091 within the aggregate and whether it is a load from a value passed by
1092 reference respectively. */
1094 bool
1095 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1096 vec<ipa_param_descriptor, va_gc> *descriptors,
1097 gimple *stmt, tree op, int *index_p,
1098 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1099 bool *by_ref_p, bool *guaranteed_unmodified)
1101 int index;
1102 HOST_WIDE_INT size;
1103 bool reverse;
1104 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1106 if (!base)
1107 return false;
1109 if (DECL_P (base))
1111 int index = ipa_get_param_decl_index_1 (descriptors, base);
1112 if (index >= 0
1113 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1115 *index_p = index;
1116 *by_ref_p = false;
1117 if (size_p)
1118 *size_p = size;
1119 if (guaranteed_unmodified)
1120 *guaranteed_unmodified = true;
1121 return true;
1123 return false;
1126 if (TREE_CODE (base) != MEM_REF
1127 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1128 || !integer_zerop (TREE_OPERAND (base, 1)))
1129 return false;
1131 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1133 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1134 index = ipa_get_param_decl_index_1 (descriptors, parm);
1136 else
1138 /* This branch catches situations where a pointer parameter is not a
1139 gimple register, for example:
1141 void hip7(S*) (struct S * p)
1143 void (*<T2e4>) (struct S *) D.1867;
1144 struct S * p.1;
1146 <bb 2>:
1147 p.1_1 = p;
1148 D.1867_2 = p.1_1->f;
1149 D.1867_2 ();
1150 gdp = &p;
1153 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1154 index = load_from_unmodified_param (fbi, descriptors, def);
1157 if (index >= 0)
1159 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1160 if (!data_preserved && !guaranteed_unmodified)
1161 return false;
1163 *index_p = index;
1164 *by_ref_p = true;
1165 if (size_p)
1166 *size_p = size;
1167 if (guaranteed_unmodified)
1168 *guaranteed_unmodified = data_preserved;
1169 return true;
1171 return false;
1174 /* If STMT is an assignment that loads a value from a parameter declaration,
1175 or from an aggregate passed as the parameter either by value or reference,
1176 return the index of the parameter in ipa_node_params. Otherwise return -1.
1178 FBI holds gathered information about the function. INFO describes
1179 parameters of the function, STMT is the assignment statement. If it is a
1180 memory load from an aggregate, *OFFSET_P is filled with offset within the
1181 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1182 reference. */
1184 static int
1185 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1186 class ipa_node_params *info,
1187 gimple *stmt,
1188 HOST_WIDE_INT *offset_p,
1189 bool *by_ref_p)
1191 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1192 poly_int64 size;
1194 /* Load value from a parameter declaration. */
1195 if (index >= 0)
1197 *offset_p = -1;
1198 return index;
1201 if (!gimple_assign_load_p (stmt))
1202 return -1;
1204 tree rhs = gimple_assign_rhs1 (stmt);
1206 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1207 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1208 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1209 return -1;
1211 /* Skip memory reference containing bit-field. */
1212 if (TREE_CODE (rhs) == BIT_FIELD_REF
1213 || contains_bitfld_component_ref_p (rhs))
1214 return -1;
1216 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1217 offset_p, &size, by_ref_p))
1218 return -1;
1220 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1221 size));
1222 if (!*by_ref_p)
1224 tree param_type = ipa_get_type (info, index);
1226 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1227 return -1;
1229 else if (TREE_THIS_VOLATILE (rhs))
1230 return -1;
1232 return index;
1235 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1236 to find original pointer. Initialize RET to the pointer which results from
1237 the walk.
1238 If offset is known return true and initialize OFFSET_RET. */
1240 bool
1241 unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1243 poly_int64 offset = 0;
1244 bool offset_known = true;
1245 int i;
1247 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1249 if (TREE_CODE (op) == ADDR_EXPR)
1251 poly_int64 extra_offset = 0;
1252 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1253 &offset);
1254 if (!base)
1256 base = get_base_address (TREE_OPERAND (op, 0));
1257 if (TREE_CODE (base) != MEM_REF)
1258 break;
1259 offset_known = false;
1261 else
1263 if (TREE_CODE (base) != MEM_REF)
1264 break;
1265 offset += extra_offset;
1267 op = TREE_OPERAND (base, 0);
1268 if (mem_ref_offset (base).to_shwi (&extra_offset))
1269 offset += extra_offset;
1270 else
1271 offset_known = false;
1273 else if (TREE_CODE (op) == SSA_NAME
1274 && !SSA_NAME_IS_DEFAULT_DEF (op))
1276 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1278 if (gimple_assign_single_p (pstmt))
1279 op = gimple_assign_rhs1 (pstmt);
1280 else if (is_gimple_assign (pstmt)
1281 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1283 poly_int64 extra_offset = 0;
1284 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1285 &extra_offset))
1286 offset += extra_offset;
1287 else
1288 offset_known = false;
1289 op = gimple_assign_rhs1 (pstmt);
1291 else
1292 break;
1294 else
1295 break;
1297 *ret = op;
1298 *offset_ret = offset;
1299 return offset_known;
1302 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1303 of an assignment statement STMT, try to determine whether we are actually
1304 handling any of the following cases and construct an appropriate jump
1305 function into JFUNC if so:
1307 1) The passed value is loaded from a formal parameter which is not a gimple
1308 register (most probably because it is addressable, the value has to be
1309 scalar) and we can guarantee the value has not changed. This case can
1310 therefore be described by a simple pass-through jump function. For example:
1312 foo (int a)
1314 int a.0;
1316 a.0_2 = a;
1317 bar (a.0_2);
1319 2) The passed value can be described by a simple arithmetic pass-through
1320 jump function. E.g.
1322 foo (int a)
1324 int D.2064;
1326 D.2064_4 = a.1(D) + 4;
1327 bar (D.2064_4);
1329 This case can also occur in combination of the previous one, e.g.:
1331 foo (int a, int z)
1333 int a.0;
1334 int D.2064;
1336 a.0_3 = a;
1337 D.2064_4 = a.0_3 + 4;
1338 foo (D.2064_4);
1340 3) The passed value is an address of an object within another one (which
1341 also passed by reference). Such situations are described by an ancestor
1342 jump function and describe situations such as:
1344 B::foo() (struct B * const this)
1346 struct A * D.1845;
1348 D.1845_2 = &this_1(D)->D.1748;
1349 A::bar (D.1845_2);
1351 INFO is the structure describing individual parameters access different
1352 stages of IPA optimizations. PARMS_AINFO contains the information that is
1353 only needed for intraprocedural analysis. */
1355 static void
1356 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1357 class ipa_node_params *info,
1358 struct ipa_jump_func *jfunc,
1359 gcall *call, gimple *stmt, tree name,
1360 tree param_type)
1362 HOST_WIDE_INT offset, size;
1363 tree op1, tc_ssa, base, ssa;
1364 bool reverse;
1365 int index;
1367 op1 = gimple_assign_rhs1 (stmt);
1369 if (TREE_CODE (op1) == SSA_NAME)
1371 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1372 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1373 else
1374 index = load_from_unmodified_param (fbi, info->descriptors,
1375 SSA_NAME_DEF_STMT (op1));
1376 tc_ssa = op1;
1378 else
1380 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1381 tc_ssa = gimple_assign_lhs (stmt);
1384 if (index >= 0)
1386 switch (gimple_assign_rhs_class (stmt))
1388 case GIMPLE_BINARY_RHS:
1390 tree op2 = gimple_assign_rhs2 (stmt);
1391 if (!is_gimple_ip_invariant (op2)
1392 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1393 != tcc_comparison)
1394 && !useless_type_conversion_p (TREE_TYPE (name),
1395 TREE_TYPE (op1))))
1396 return;
1398 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1399 gimple_assign_rhs_code (stmt));
1400 break;
1402 case GIMPLE_SINGLE_RHS:
1404 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1405 tc_ssa);
1406 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1407 break;
1409 case GIMPLE_UNARY_RHS:
1410 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1411 ipa_set_jf_unary_pass_through (jfunc, index,
1412 gimple_assign_rhs_code (stmt));
1413 default:;
1415 return;
1418 if (TREE_CODE (op1) != ADDR_EXPR)
1419 return;
1420 op1 = TREE_OPERAND (op1, 0);
1421 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1422 return;
1423 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1424 offset_int mem_offset;
1425 if (!base
1426 || TREE_CODE (base) != MEM_REF
1427 || !mem_ref_offset (base).is_constant (&mem_offset))
1428 return;
1429 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1430 ssa = TREE_OPERAND (base, 0);
1431 if (TREE_CODE (ssa) != SSA_NAME
1432 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1433 || offset < 0)
1434 return;
1436 /* Dynamic types are changed in constructors and destructors. */
1437 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1438 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1439 ipa_set_ancestor_jf (jfunc, offset, index,
1440 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1443 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1444 it looks like:
1446 iftmp.1_3 = &obj_2(D)->D.1762;
1448 The base of the MEM_REF must be a default definition SSA NAME of a
1449 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1450 whole MEM_REF expression is returned and the offset calculated from any
1451 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1452 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1454 static tree
1455 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1457 HOST_WIDE_INT size;
1458 tree expr, parm, obj;
1459 bool reverse;
1461 if (!gimple_assign_single_p (assign))
1462 return NULL_TREE;
1463 expr = gimple_assign_rhs1 (assign);
1465 if (TREE_CODE (expr) != ADDR_EXPR)
1466 return NULL_TREE;
1467 expr = TREE_OPERAND (expr, 0);
1468 obj = expr;
1469 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1471 offset_int mem_offset;
1472 if (!expr
1473 || TREE_CODE (expr) != MEM_REF
1474 || !mem_ref_offset (expr).is_constant (&mem_offset))
1475 return NULL_TREE;
1476 parm = TREE_OPERAND (expr, 0);
1477 if (TREE_CODE (parm) != SSA_NAME
1478 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1479 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1480 return NULL_TREE;
1482 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1483 *obj_p = obj;
1484 return expr;
1488 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1489 statement PHI, try to find out whether NAME is in fact a
1490 multiple-inheritance typecast from a descendant into an ancestor of a formal
1491 parameter and thus can be described by an ancestor jump function and if so,
1492 write the appropriate function into JFUNC.
1494 Essentially we want to match the following pattern:
1496 if (obj_2(D) != 0B)
1497 goto <bb 3>;
1498 else
1499 goto <bb 4>;
1501 <bb 3>:
1502 iftmp.1_3 = &obj_2(D)->D.1762;
1504 <bb 4>:
1505 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1506 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1507 return D.1879_6; */
1509 static void
1510 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1511 class ipa_node_params *info,
1512 struct ipa_jump_func *jfunc,
1513 gcall *call, gphi *phi)
1515 HOST_WIDE_INT offset;
1516 gimple *assign, *cond;
1517 basic_block phi_bb, assign_bb, cond_bb;
1518 tree tmp, parm, expr, obj;
1519 int index, i;
1521 if (gimple_phi_num_args (phi) != 2)
1522 return;
1524 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1525 tmp = PHI_ARG_DEF (phi, 0);
1526 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1527 tmp = PHI_ARG_DEF (phi, 1);
1528 else
1529 return;
1530 if (TREE_CODE (tmp) != SSA_NAME
1531 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1532 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1533 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1534 return;
1536 assign = SSA_NAME_DEF_STMT (tmp);
1537 assign_bb = gimple_bb (assign);
1538 if (!single_pred_p (assign_bb))
1539 return;
1540 expr = get_ancestor_addr_info (assign, &obj, &offset);
1541 if (!expr)
1542 return;
1543 parm = TREE_OPERAND (expr, 0);
1544 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1545 if (index < 0)
1546 return;
1548 cond_bb = single_pred (assign_bb);
1549 cond = last_stmt (cond_bb);
1550 if (!cond
1551 || gimple_code (cond) != GIMPLE_COND
1552 || gimple_cond_code (cond) != NE_EXPR
1553 || gimple_cond_lhs (cond) != parm
1554 || !integer_zerop (gimple_cond_rhs (cond)))
1555 return;
1557 phi_bb = gimple_bb (phi);
1558 for (i = 0; i < 2; i++)
1560 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1561 if (pred != assign_bb && pred != cond_bb)
1562 return;
1565 ipa_set_ancestor_jf (jfunc, offset, index,
1566 parm_ref_data_pass_through_p (fbi, index, call, parm));
1569 /* Inspect the given TYPE and return true iff it has the same structure (the
1570 same number of fields of the same types) as a C++ member pointer. If
1571 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1572 corresponding fields there. */
1574 static bool
1575 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1577 tree fld;
1579 if (TREE_CODE (type) != RECORD_TYPE)
1580 return false;
1582 fld = TYPE_FIELDS (type);
1583 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1584 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1585 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1586 return false;
1588 if (method_ptr)
1589 *method_ptr = fld;
1591 fld = DECL_CHAIN (fld);
1592 if (!fld || INTEGRAL_TYPE_P (fld)
1593 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1594 return false;
1595 if (delta)
1596 *delta = fld;
1598 if (DECL_CHAIN (fld))
1599 return false;
1601 return true;
1604 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1605 return the rhs of its defining statement, and this statement is stored in
1606 *RHS_STMT. Otherwise return RHS as it is. */
1608 static inline tree
1609 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1611 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1613 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1615 if (gimple_assign_single_p (def_stmt))
1616 rhs = gimple_assign_rhs1 (def_stmt);
1617 else
1618 break;
1619 *rhs_stmt = def_stmt;
1621 return rhs;
1624 /* Simple linked list, describing contents of an aggregate before call. */
1626 struct ipa_known_agg_contents_list
1628 /* Offset and size of the described part of the aggregate. */
1629 HOST_WIDE_INT offset, size;
1631 /* Type of the described part of the aggregate. */
1632 tree type;
1634 /* Known constant value or jump function data describing contents. */
1635 struct ipa_load_agg_data value;
1637 /* Pointer to the next structure in the list. */
1638 struct ipa_known_agg_contents_list *next;
1641 /* Add an aggregate content item into a linked list of
1642 ipa_known_agg_contents_list structure, in which all elements
1643 are sorted ascendingly by offset. */
1645 static inline void
1646 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1647 struct ipa_known_agg_contents_list *item)
1649 struct ipa_known_agg_contents_list *list = *plist;
1651 for (; list; list = list->next)
1653 if (list->offset >= item->offset)
1654 break;
1656 plist = &list->next;
1659 item->next = list;
1660 *plist = item;
1663 /* Check whether a given aggregate content is clobbered by certain element in
1664 a linked list of ipa_known_agg_contents_list. */
1666 static inline bool
1667 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1668 struct ipa_known_agg_contents_list *item)
1670 for (; list; list = list->next)
1672 if (list->offset >= item->offset)
1673 return list->offset < item->offset + item->size;
1675 if (list->offset + list->size > item->offset)
1676 return true;
1679 return false;
1682 /* Build aggregate jump function from LIST, assuming there are exactly
1683 VALUE_COUNT entries there and that offset of the passed argument
1684 is ARG_OFFSET and store it into JFUNC. */
1686 static void
1687 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1688 int value_count, HOST_WIDE_INT arg_offset,
1689 struct ipa_jump_func *jfunc)
1691 vec_safe_reserve (jfunc->agg.items, value_count, true);
1692 for (; list; list = list->next)
1694 struct ipa_agg_jf_item item;
1695 tree operand = list->value.pass_through.operand;
1697 if (list->value.pass_through.formal_id >= 0)
1699 /* Content value is derived from some formal paramerter. */
1700 if (list->value.offset >= 0)
1701 item.jftype = IPA_JF_LOAD_AGG;
1702 else
1703 item.jftype = IPA_JF_PASS_THROUGH;
1705 item.value.load_agg = list->value;
1706 if (operand)
1707 item.value.pass_through.operand
1708 = unshare_expr_without_location (operand);
1710 else if (operand)
1712 /* Content value is known constant. */
1713 item.jftype = IPA_JF_CONST;
1714 item.value.constant = unshare_expr_without_location (operand);
1716 else
1717 continue;
1719 item.type = list->type;
1720 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1722 item.offset = list->offset - arg_offset;
1723 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1725 jfunc->agg.items->quick_push (item);
1729 /* Given an assignment statement STMT, try to collect information into
1730 AGG_VALUE that will be used to construct jump function for RHS of the
1731 assignment, from which content value of an aggregate part comes.
1733 Besides constant and simple pass-through jump functions, also try to
1734 identify whether it matches the following pattern that can be described by
1735 a load-value-from-aggregate jump function, which is a derivative of simple
1736 pass-through jump function.
1738 foo (int *p)
1742 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1743 bar (q_5);
1746 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1747 constant, simple pass-through and load-vale-from-aggregate. If value
1748 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1749 set to -1. For simple pass-through and load-value-from-aggregate, field
1750 FORMAL_ID specifies the related formal parameter index, and field
1751 OFFSET can be used to distinguish them, -1 means simple pass-through,
1752 otherwise means load-value-from-aggregate. */
1754 static void
1755 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1756 struct ipa_load_agg_data *agg_value,
1757 gimple *stmt)
1759 tree lhs = gimple_assign_lhs (stmt);
1760 tree rhs1 = gimple_assign_rhs1 (stmt);
1761 enum tree_code code;
1762 int index = -1;
1764 /* Initialize jump function data for the aggregate part. */
1765 memset (agg_value, 0, sizeof (*agg_value));
1766 agg_value->pass_through.operation = NOP_EXPR;
1767 agg_value->pass_through.formal_id = -1;
1768 agg_value->offset = -1;
1770 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1771 || TREE_THIS_VOLATILE (lhs)
1772 || TREE_CODE (lhs) == BIT_FIELD_REF
1773 || contains_bitfld_component_ref_p (lhs))
1774 return;
1776 /* Skip SSA copies. */
1777 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1779 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1780 break;
1782 stmt = SSA_NAME_DEF_STMT (rhs1);
1783 if (!is_gimple_assign (stmt))
1784 break;
1786 rhs1 = gimple_assign_rhs1 (stmt);
1789 if (gphi *phi = dyn_cast<gphi *> (stmt))
1791 /* Also special case like the following (a is a formal parameter):
1793 _12 = *a_11(D).dim[0].stride;
1795 # iftmp.22_9 = PHI <_12(2), 1(3)>
1797 parm.6.dim[0].stride = iftmp.22_9;
1799 __x_MOD_foo (&parm.6, b_31(D));
1801 The aggregate function describing parm.6.dim[0].stride is encoded as a
1802 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1803 (the constant from the PHI node). */
1805 if (gimple_phi_num_args (phi) != 2)
1806 return;
1807 tree arg0 = gimple_phi_arg_def (phi, 0);
1808 tree arg1 = gimple_phi_arg_def (phi, 1);
1809 tree operand;
1811 if (is_gimple_ip_invariant (arg1))
1813 operand = arg1;
1814 rhs1 = arg0;
1816 else if (is_gimple_ip_invariant (arg0))
1818 operand = arg0;
1819 rhs1 = arg1;
1821 else
1822 return;
1824 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1825 if (!is_gimple_assign (stmt))
1826 return;
1828 code = ASSERT_EXPR;
1829 agg_value->pass_through.operand = operand;
1831 else if (is_gimple_assign (stmt))
1833 code = gimple_assign_rhs_code (stmt);
1834 switch (gimple_assign_rhs_class (stmt))
1836 case GIMPLE_SINGLE_RHS:
1837 if (is_gimple_ip_invariant (rhs1))
1839 agg_value->pass_through.operand = rhs1;
1840 return;
1842 code = NOP_EXPR;
1843 break;
1845 case GIMPLE_UNARY_RHS:
1846 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1847 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1848 tcc_binary, this subtleness is somewhat misleading.
1850 Since tcc_unary is widely used in IPA-CP code to check an operation
1851 with one operand, here we only allow tc_unary operation to avoid
1852 possible problem. Then we can use (opclass == tc_unary) or not to
1853 distinguish unary and binary. */
1854 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1855 return;
1857 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1858 break;
1860 case GIMPLE_BINARY_RHS:
1862 gimple *rhs1_stmt = stmt;
1863 gimple *rhs2_stmt = stmt;
1864 tree rhs2 = gimple_assign_rhs2 (stmt);
1866 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1867 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1869 if (is_gimple_ip_invariant (rhs2))
1871 agg_value->pass_through.operand = rhs2;
1872 stmt = rhs1_stmt;
1874 else if (is_gimple_ip_invariant (rhs1))
1876 if (TREE_CODE_CLASS (code) == tcc_comparison)
1877 code = swap_tree_comparison (code);
1878 else if (!commutative_tree_code (code))
1879 return;
1881 agg_value->pass_through.operand = rhs1;
1882 stmt = rhs2_stmt;
1883 rhs1 = rhs2;
1885 else
1886 return;
1888 if (TREE_CODE_CLASS (code) != tcc_comparison
1889 && !useless_type_conversion_p (TREE_TYPE (lhs),
1890 TREE_TYPE (rhs1)))
1891 return;
1893 break;
1895 default:
1896 return;
1899 else
1900 return;
1902 if (TREE_CODE (rhs1) != SSA_NAME)
1903 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1904 &agg_value->offset,
1905 &agg_value->by_ref);
1906 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1907 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1909 if (index >= 0)
1911 if (agg_value->offset >= 0)
1912 agg_value->type = TREE_TYPE (rhs1);
1913 agg_value->pass_through.formal_id = index;
1914 agg_value->pass_through.operation = code;
1916 else
1917 agg_value->pass_through.operand = NULL_TREE;
1920 /* If STMT is a memory store to the object whose address is BASE, extract
1921 information (offset, size, and value) into CONTENT, and return true,
1922 otherwise we conservatively assume the whole object is modified with
1923 unknown content, and return false. CHECK_REF means that access to object
1924 is expected to be in form of MEM_REF expression. */
1926 static bool
1927 extract_mem_content (struct ipa_func_body_info *fbi,
1928 gimple *stmt, tree base, bool check_ref,
1929 struct ipa_known_agg_contents_list *content)
1931 HOST_WIDE_INT lhs_offset, lhs_size;
1932 bool reverse;
1934 if (!is_gimple_assign (stmt))
1935 return false;
1937 tree lhs = gimple_assign_lhs (stmt);
1938 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1939 &reverse);
1940 if (!lhs_base)
1941 return false;
1943 if (check_ref)
1945 if (TREE_CODE (lhs_base) != MEM_REF
1946 || TREE_OPERAND (lhs_base, 0) != base
1947 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1948 return false;
1950 else if (lhs_base != base)
1951 return false;
1953 content->offset = lhs_offset;
1954 content->size = lhs_size;
1955 content->type = TREE_TYPE (lhs);
1956 content->next = NULL;
1958 analyze_agg_content_value (fbi, &content->value, stmt);
1959 return true;
1962 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1963 in ARG is filled in constants or values that are derived from caller's
1964 formal parameter in the way described by some kinds of jump functions. FBI
1965 is the context of the caller function for interprocedural analysis. ARG can
1966 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1967 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1969 static void
1970 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1971 gcall *call, tree arg,
1972 tree arg_type,
1973 struct ipa_jump_func *jfunc)
1975 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1976 bitmap visited = NULL;
1977 int item_count = 0, value_count = 0;
1978 HOST_WIDE_INT arg_offset, arg_size;
1979 tree arg_base;
1980 bool check_ref, by_ref;
1981 ao_ref r;
1982 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
1984 if (max_agg_items == 0)
1985 return;
1987 /* The function operates in three stages. First, we prepare check_ref, r,
1988 arg_base and arg_offset based on what is actually passed as an actual
1989 argument. */
1991 if (POINTER_TYPE_P (arg_type))
1993 by_ref = true;
1994 if (TREE_CODE (arg) == SSA_NAME)
1996 tree type_size;
1997 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1998 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1999 return;
2000 check_ref = true;
2001 arg_base = arg;
2002 arg_offset = 0;
2003 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
2004 arg_size = tree_to_uhwi (type_size);
2005 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2007 else if (TREE_CODE (arg) == ADDR_EXPR)
2009 bool reverse;
2011 arg = TREE_OPERAND (arg, 0);
2012 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2013 &arg_size, &reverse);
2014 if (!arg_base)
2015 return;
2016 if (DECL_P (arg_base))
2018 check_ref = false;
2019 ao_ref_init (&r, arg_base);
2021 else
2022 return;
2024 else
2025 return;
2027 else
2029 bool reverse;
2031 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2033 by_ref = false;
2034 check_ref = false;
2035 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2036 &arg_size, &reverse);
2037 if (!arg_base)
2038 return;
2040 ao_ref_init (&r, arg);
2043 /* Second stage traverses virtual SSA web backwards starting from the call
2044 statement, only looks at individual dominating virtual operand (its
2045 definition dominates the call), as long as it is confident that content
2046 of the aggregate is affected by definition of the virtual operand, it
2047 builds a sorted linked list of ipa_agg_jf_list describing that. */
2049 for (tree dom_vuse = gimple_vuse (call);
2050 dom_vuse && fbi->aa_walk_budget > 0;)
2052 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
2054 if (gimple_code (stmt) == GIMPLE_PHI)
2056 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2057 fbi->aa_walk_budget,
2058 &visited, false, NULL, NULL);
2059 continue;
2062 fbi->aa_walk_budget--;
2063 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2065 struct ipa_known_agg_contents_list *content
2066 = XALLOCA (struct ipa_known_agg_contents_list);
2068 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2069 break;
2071 /* Now we get a dominating virtual operand, and need to check
2072 whether its value is clobbered any other dominating one. */
2073 if ((content->value.pass_through.formal_id >= 0
2074 || content->value.pass_through.operand)
2075 && !clobber_by_agg_contents_list_p (all_list, content))
2077 struct ipa_known_agg_contents_list *copy
2078 = XALLOCA (struct ipa_known_agg_contents_list);
2080 /* Add to the list consisting of only dominating virtual
2081 operands, whose definitions can finally reach the call. */
2082 add_to_agg_contents_list (&list, (*copy = *content, copy));
2084 if (++value_count == max_agg_items)
2085 break;
2088 /* Add to the list consisting of all dominating virtual operands. */
2089 add_to_agg_contents_list (&all_list, content);
2091 if (++item_count == 2 * max_agg_items)
2092 break;
2094 dom_vuse = gimple_vuse (stmt);
2097 if (visited)
2098 BITMAP_FREE (visited);
2100 /* Third stage just goes over the list and creates an appropriate vector of
2101 ipa_agg_jf_item structures out of it, of course only if there are
2102 any meaningful items to begin with. */
2104 if (value_count)
2106 jfunc->agg.by_ref = by_ref;
2107 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2112 /* Return the Ith param type of callee associated with call graph
2113 edge E. */
2115 tree
2116 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2118 int n;
2119 tree type = (e->callee
2120 ? TREE_TYPE (e->callee->decl)
2121 : gimple_call_fntype (e->call_stmt));
2122 tree t = TYPE_ARG_TYPES (type);
2124 for (n = 0; n < i; n++)
2126 if (!t)
2127 break;
2128 t = TREE_CHAIN (t);
2130 if (t)
2131 return TREE_VALUE (t);
2132 if (!e->callee)
2133 return NULL;
2134 t = DECL_ARGUMENTS (e->callee->decl);
2135 for (n = 0; n < i; n++)
2137 if (!t)
2138 return NULL;
2139 t = TREE_CHAIN (t);
2141 if (t)
2142 return TREE_TYPE (t);
2143 return NULL;
2146 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2147 allocated structure or a previously existing one shared with other jump
2148 functions and/or transformation summaries. */
2150 ipa_bits *
2151 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2153 ipa_bits tmp;
2154 tmp.value = value;
2155 tmp.mask = mask;
2157 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2158 if (*slot)
2159 return *slot;
2161 ipa_bits *res = ggc_alloc<ipa_bits> ();
2162 res->value = value;
2163 res->mask = mask;
2164 *slot = res;
2166 return res;
2169 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2170 table in order to avoid creating multiple same ipa_bits structures. */
2172 static void
2173 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2174 const widest_int &mask)
2176 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2179 /* Return a pointer to a value_range just like *TMP, but either find it in
2180 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2182 static value_range *
2183 ipa_get_value_range (value_range *tmp)
2185 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2186 if (*slot)
2187 return *slot;
2189 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
2190 *vr = *tmp;
2191 *slot = vr;
2193 return vr;
2196 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2197 equiv set. Use hash table in order to avoid creating multiple same copies of
2198 value_ranges. */
2200 static value_range *
2201 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2203 value_range tmp (min, max, kind);
2204 return ipa_get_value_range (&tmp);
2207 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2208 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2209 same value_range structures. */
2211 static void
2212 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2213 tree min, tree max)
2215 jf->m_vr = ipa_get_value_range (type, min, max);
2218 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2219 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2221 static void
2222 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2224 jf->m_vr = ipa_get_value_range (tmp);
2227 /* Compute jump function for all arguments of callsite CS and insert the
2228 information in the jump_functions array in the ipa_edge_args corresponding
2229 to this callsite. */
2231 static void
2232 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2233 struct cgraph_edge *cs)
2235 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
2236 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
2237 gcall *call = cs->call_stmt;
2238 int n, arg_num = gimple_call_num_args (call);
2239 bool useful_context = false;
2241 if (arg_num == 0 || args->jump_functions)
2242 return;
2243 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2244 if (flag_devirtualize)
2245 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2247 if (gimple_call_internal_p (call))
2248 return;
2249 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2250 return;
2252 for (n = 0; n < arg_num; n++)
2254 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2255 tree arg = gimple_call_arg (call, n);
2256 tree param_type = ipa_get_callee_param_type (cs, n);
2257 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2259 tree instance;
2260 class ipa_polymorphic_call_context context (cs->caller->decl,
2261 arg, cs->call_stmt,
2262 &instance);
2263 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2264 &fbi->aa_walk_budget);
2265 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2266 if (!context.useless_p ())
2267 useful_context = true;
2270 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2272 bool addr_nonzero = false;
2273 bool strict_overflow = false;
2275 if (TREE_CODE (arg) == SSA_NAME
2276 && param_type
2277 && get_ptr_nonnull (arg))
2278 addr_nonzero = true;
2279 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2280 addr_nonzero = true;
2282 if (addr_nonzero)
2284 tree z = build_int_cst (TREE_TYPE (arg), 0);
2285 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2287 else
2288 gcc_assert (!jfunc->m_vr);
2290 else
2292 wide_int min, max;
2293 value_range_kind kind;
2294 if (TREE_CODE (arg) == SSA_NAME
2295 && param_type
2296 && (kind = get_range_info (arg, &min, &max))
2297 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
2299 value_range resvr;
2300 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2301 wide_int_to_tree (TREE_TYPE (arg), max),
2302 kind);
2303 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2304 &tmpvr, TREE_TYPE (arg));
2305 if (!resvr.undefined_p () && !resvr.varying_p ())
2306 ipa_set_jfunc_vr (jfunc, &resvr);
2307 else
2308 gcc_assert (!jfunc->m_vr);
2310 else
2311 gcc_assert (!jfunc->m_vr);
2314 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2315 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2317 if (TREE_CODE (arg) == SSA_NAME)
2318 ipa_set_jfunc_bits (jfunc, 0,
2319 widest_int::from (get_nonzero_bits (arg),
2320 TYPE_SIGN (TREE_TYPE (arg))));
2321 else
2322 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2324 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2326 unsigned HOST_WIDE_INT bitpos;
2327 unsigned align;
2329 get_pointer_alignment_1 (arg, &align, &bitpos);
2330 widest_int mask = wi::bit_and_not
2331 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2332 align / BITS_PER_UNIT - 1);
2333 widest_int value = bitpos / BITS_PER_UNIT;
2334 ipa_set_jfunc_bits (jfunc, value, mask);
2336 else
2337 gcc_assert (!jfunc->bits);
2339 if (is_gimple_ip_invariant (arg)
2340 || (VAR_P (arg)
2341 && is_global_var (arg)
2342 && TREE_READONLY (arg)))
2343 ipa_set_jf_constant (jfunc, arg, cs);
2344 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2345 && TREE_CODE (arg) == PARM_DECL)
2347 int index = ipa_get_param_decl_index (info, arg);
2349 gcc_assert (index >=0);
2350 /* Aggregate passed by value, check for pass-through, otherwise we
2351 will attempt to fill in aggregate contents later in this
2352 for cycle. */
2353 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2355 ipa_set_jf_simple_pass_through (jfunc, index, false);
2356 continue;
2359 else if (TREE_CODE (arg) == SSA_NAME)
2361 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2363 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2364 if (index >= 0)
2366 bool agg_p;
2367 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2368 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2371 else
2373 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2374 if (is_gimple_assign (stmt))
2375 compute_complex_assign_jump_func (fbi, info, jfunc,
2376 call, stmt, arg, param_type);
2377 else if (gimple_code (stmt) == GIMPLE_PHI)
2378 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2379 call,
2380 as_a <gphi *> (stmt));
2384 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2385 passed (because type conversions are ignored in gimple). Usually we can
2386 safely get type from function declaration, but in case of K&R prototypes or
2387 variadic functions we can try our luck with type of the pointer passed.
2388 TODO: Since we look for actual initialization of the memory object, we may better
2389 work out the type based on the memory stores we find. */
2390 if (!param_type)
2391 param_type = TREE_TYPE (arg);
2393 if ((jfunc->type != IPA_JF_PASS_THROUGH
2394 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2395 && (jfunc->type != IPA_JF_ANCESTOR
2396 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2397 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2398 || POINTER_TYPE_P (param_type)))
2399 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2401 if (!useful_context)
2402 vec_free (args->polymorphic_call_contexts);
2405 /* Compute jump functions for all edges - both direct and indirect - outgoing
2406 from BB. */
2408 static void
2409 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2411 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2412 int i;
2413 struct cgraph_edge *cs;
2415 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2417 struct cgraph_node *callee = cs->callee;
2419 if (callee)
2421 callee = callee->ultimate_alias_target ();
2422 /* We do not need to bother analyzing calls to unknown functions
2423 unless they may become known during lto/whopr. */
2424 if (!callee->definition && !flag_lto
2425 && !gimple_call_fnspec (cs->call_stmt).known_p ())
2426 continue;
2428 ipa_compute_jump_functions_for_edge (fbi, cs);
2432 /* If STMT looks like a statement loading a value from a member pointer formal
2433 parameter, return that parameter and store the offset of the field to
2434 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2435 might be clobbered). If USE_DELTA, then we look for a use of the delta
2436 field rather than the pfn. */
2438 static tree
2439 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2440 HOST_WIDE_INT *offset_p)
2442 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2444 if (!gimple_assign_single_p (stmt))
2445 return NULL_TREE;
2447 rhs = gimple_assign_rhs1 (stmt);
2448 if (TREE_CODE (rhs) == COMPONENT_REF)
2450 ref_field = TREE_OPERAND (rhs, 1);
2451 rhs = TREE_OPERAND (rhs, 0);
2453 else
2454 ref_field = NULL_TREE;
2455 if (TREE_CODE (rhs) != MEM_REF)
2456 return NULL_TREE;
2457 rec = TREE_OPERAND (rhs, 0);
2458 if (TREE_CODE (rec) != ADDR_EXPR)
2459 return NULL_TREE;
2460 rec = TREE_OPERAND (rec, 0);
2461 if (TREE_CODE (rec) != PARM_DECL
2462 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2463 return NULL_TREE;
2464 ref_offset = TREE_OPERAND (rhs, 1);
2466 if (use_delta)
2467 fld = delta_field;
2468 else
2469 fld = ptr_field;
2470 if (offset_p)
2471 *offset_p = int_bit_position (fld);
2473 if (ref_field)
2475 if (integer_nonzerop (ref_offset))
2476 return NULL_TREE;
2477 return ref_field == fld ? rec : NULL_TREE;
2479 else
2480 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2481 : NULL_TREE;
2484 /* Returns true iff T is an SSA_NAME defined by a statement. */
2486 static bool
2487 ipa_is_ssa_with_stmt_def (tree t)
2489 if (TREE_CODE (t) == SSA_NAME
2490 && !SSA_NAME_IS_DEFAULT_DEF (t))
2491 return true;
2492 else
2493 return false;
2496 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2497 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2498 indirect call graph edge.
2499 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2501 static struct cgraph_edge *
2502 ipa_note_param_call (struct cgraph_node *node, int param_index,
2503 gcall *stmt, bool polymorphic)
2505 struct cgraph_edge *cs;
2507 cs = node->get_edge (stmt);
2508 cs->indirect_info->param_index = param_index;
2509 cs->indirect_info->agg_contents = 0;
2510 cs->indirect_info->member_ptr = 0;
2511 cs->indirect_info->guaranteed_unmodified = 0;
2512 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2513 param_index, true);
2514 if (cs->indirect_info->polymorphic || polymorphic)
2515 ipa_set_param_used_by_polymorphic_call
2516 (IPA_NODE_REF (node), param_index, true);
2517 return cs;
2520 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2521 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2522 intermediate information about each formal parameter. Currently it checks
2523 whether the call calls a pointer that is a formal parameter and if so, the
2524 parameter is marked with the called flag and an indirect call graph edge
2525 describing the call is created. This is very simple for ordinary pointers
2526 represented in SSA but not-so-nice when it comes to member pointers. The
2527 ugly part of this function does nothing more than trying to match the
2528 pattern of such a call. An example of such a pattern is the gimple dump
2529 below, the call is on the last line:
2531 <bb 2>:
2532 f$__delta_5 = f.__delta;
2533 f$__pfn_24 = f.__pfn;
2536 <bb 2>:
2537 f$__delta_5 = MEM[(struct *)&f];
2538 f$__pfn_24 = MEM[(struct *)&f + 4B];
2540 and a few lines below:
2542 <bb 5>
2543 D.2496_3 = (int) f$__pfn_24;
2544 D.2497_4 = D.2496_3 & 1;
2545 if (D.2497_4 != 0)
2546 goto <bb 3>;
2547 else
2548 goto <bb 4>;
2550 <bb 6>:
2551 D.2500_7 = (unsigned int) f$__delta_5;
2552 D.2501_8 = &S + D.2500_7;
2553 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2554 D.2503_10 = *D.2502_9;
2555 D.2504_12 = f$__pfn_24 + -1;
2556 D.2505_13 = (unsigned int) D.2504_12;
2557 D.2506_14 = D.2503_10 + D.2505_13;
2558 D.2507_15 = *D.2506_14;
2559 iftmp.11_16 = (String:: *) D.2507_15;
2561 <bb 7>:
2562 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2563 D.2500_19 = (unsigned int) f$__delta_5;
2564 D.2508_20 = &S + D.2500_19;
2565 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2567 Such patterns are results of simple calls to a member pointer:
2569 int doprinting (int (MyString::* f)(int) const)
2571 MyString S ("somestring");
2573 return (S.*f)(4);
2576 Moreover, the function also looks for called pointers loaded from aggregates
2577 passed by value or reference. */
2579 static void
2580 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2581 tree target)
2583 class ipa_node_params *info = fbi->info;
2584 HOST_WIDE_INT offset;
2585 bool by_ref;
2587 if (SSA_NAME_IS_DEFAULT_DEF (target))
2589 tree var = SSA_NAME_VAR (target);
2590 int index = ipa_get_param_decl_index (info, var);
2591 if (index >= 0)
2592 ipa_note_param_call (fbi->node, index, call, false);
2593 return;
2596 int index;
2597 gimple *def = SSA_NAME_DEF_STMT (target);
2598 bool guaranteed_unmodified;
2599 if (gimple_assign_single_p (def)
2600 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2601 gimple_assign_rhs1 (def), &index, &offset,
2602 NULL, &by_ref, &guaranteed_unmodified))
2604 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2605 call, false);
2606 cs->indirect_info->offset = offset;
2607 cs->indirect_info->agg_contents = 1;
2608 cs->indirect_info->by_ref = by_ref;
2609 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2610 return;
2613 /* Now we need to try to match the complex pattern of calling a member
2614 pointer. */
2615 if (gimple_code (def) != GIMPLE_PHI
2616 || gimple_phi_num_args (def) != 2
2617 || !POINTER_TYPE_P (TREE_TYPE (target))
2618 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2619 return;
2621 /* First, we need to check whether one of these is a load from a member
2622 pointer that is a parameter to this function. */
2623 tree n1 = PHI_ARG_DEF (def, 0);
2624 tree n2 = PHI_ARG_DEF (def, 1);
2625 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2626 return;
2627 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2628 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2630 tree rec;
2631 basic_block bb, virt_bb;
2632 basic_block join = gimple_bb (def);
2633 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2635 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2636 return;
2638 bb = EDGE_PRED (join, 0)->src;
2639 virt_bb = gimple_bb (d2);
2641 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2643 bb = EDGE_PRED (join, 1)->src;
2644 virt_bb = gimple_bb (d1);
2646 else
2647 return;
2649 /* Second, we need to check that the basic blocks are laid out in the way
2650 corresponding to the pattern. */
2652 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2653 || single_pred (virt_bb) != bb
2654 || single_succ (virt_bb) != join)
2655 return;
2657 /* Third, let's see that the branching is done depending on the least
2658 significant bit of the pfn. */
2660 gimple *branch = last_stmt (bb);
2661 if (!branch || gimple_code (branch) != GIMPLE_COND)
2662 return;
2664 if ((gimple_cond_code (branch) != NE_EXPR
2665 && gimple_cond_code (branch) != EQ_EXPR)
2666 || !integer_zerop (gimple_cond_rhs (branch)))
2667 return;
2669 tree cond = gimple_cond_lhs (branch);
2670 if (!ipa_is_ssa_with_stmt_def (cond))
2671 return;
2673 def = SSA_NAME_DEF_STMT (cond);
2674 if (!is_gimple_assign (def)
2675 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2676 || !integer_onep (gimple_assign_rhs2 (def)))
2677 return;
2679 cond = gimple_assign_rhs1 (def);
2680 if (!ipa_is_ssa_with_stmt_def (cond))
2681 return;
2683 def = SSA_NAME_DEF_STMT (cond);
2685 if (is_gimple_assign (def)
2686 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2688 cond = gimple_assign_rhs1 (def);
2689 if (!ipa_is_ssa_with_stmt_def (cond))
2690 return;
2691 def = SSA_NAME_DEF_STMT (cond);
2694 tree rec2;
2695 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2696 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2697 == ptrmemfunc_vbit_in_delta),
2698 NULL);
2699 if (rec != rec2)
2700 return;
2702 index = ipa_get_param_decl_index (info, rec);
2703 if (index >= 0
2704 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2706 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2707 call, false);
2708 cs->indirect_info->offset = offset;
2709 cs->indirect_info->agg_contents = 1;
2710 cs->indirect_info->member_ptr = 1;
2711 cs->indirect_info->guaranteed_unmodified = 1;
2714 return;
2717 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2718 object referenced in the expression is a formal parameter of the caller
2719 FBI->node (described by FBI->info), create a call note for the
2720 statement. */
2722 static void
2723 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2724 gcall *call, tree target)
2726 tree obj = OBJ_TYPE_REF_OBJECT (target);
2727 int index;
2728 HOST_WIDE_INT anc_offset;
2730 if (!flag_devirtualize)
2731 return;
2733 if (TREE_CODE (obj) != SSA_NAME)
2734 return;
2736 class ipa_node_params *info = fbi->info;
2737 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2739 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2740 return;
2742 anc_offset = 0;
2743 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2744 gcc_assert (index >= 0);
2745 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2746 call))
2747 return;
2749 else
2751 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2752 tree expr;
2754 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2755 if (!expr)
2756 return;
2757 index = ipa_get_param_decl_index (info,
2758 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2759 gcc_assert (index >= 0);
2760 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2761 call, anc_offset))
2762 return;
2765 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2766 call, true);
2767 class cgraph_indirect_call_info *ii = cs->indirect_info;
2768 ii->offset = anc_offset;
2769 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2770 ii->otr_type = obj_type_ref_class (target);
2771 ii->polymorphic = 1;
2774 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2775 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2776 containing intermediate information about each formal parameter. */
2778 static void
2779 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2781 tree target = gimple_call_fn (call);
2783 if (!target
2784 || (TREE_CODE (target) != SSA_NAME
2785 && !virtual_method_call_p (target)))
2786 return;
2788 struct cgraph_edge *cs = fbi->node->get_edge (call);
2789 /* If we previously turned the call into a direct call, there is
2790 no need to analyze. */
2791 if (cs && !cs->indirect_unknown_callee)
2792 return;
2794 if (cs->indirect_info->polymorphic && flag_devirtualize)
2796 tree instance;
2797 tree target = gimple_call_fn (call);
2798 ipa_polymorphic_call_context context (current_function_decl,
2799 target, call, &instance);
2801 gcc_checking_assert (cs->indirect_info->otr_type
2802 == obj_type_ref_class (target));
2803 gcc_checking_assert (cs->indirect_info->otr_token
2804 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2806 cs->indirect_info->vptr_changed
2807 = !context.get_dynamic_type (instance,
2808 OBJ_TYPE_REF_OBJECT (target),
2809 obj_type_ref_class (target), call,
2810 &fbi->aa_walk_budget);
2811 cs->indirect_info->context = context;
2814 if (TREE_CODE (target) == SSA_NAME)
2815 ipa_analyze_indirect_call_uses (fbi, call, target);
2816 else if (virtual_method_call_p (target))
2817 ipa_analyze_virtual_call_uses (fbi, call, target);
2821 /* Analyze the call statement STMT with respect to formal parameters (described
2822 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2823 formal parameters are called. */
2825 static void
2826 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2828 if (is_gimple_call (stmt))
2829 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2832 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2833 If OP is a parameter declaration, mark it as used in the info structure
2834 passed in DATA. */
2836 static bool
2837 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2839 class ipa_node_params *info = (class ipa_node_params *) data;
2841 op = get_base_address (op);
2842 if (op
2843 && TREE_CODE (op) == PARM_DECL)
2845 int index = ipa_get_param_decl_index (info, op);
2846 gcc_assert (index >= 0);
2847 ipa_set_param_used (info, index, true);
2850 return false;
2853 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2854 the findings in various structures of the associated ipa_node_params
2855 structure, such as parameter flags, notes etc. FBI holds various data about
2856 the function being analyzed. */
2858 static void
2859 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2861 gimple_stmt_iterator gsi;
2862 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2864 gimple *stmt = gsi_stmt (gsi);
2866 if (is_gimple_debug (stmt))
2867 continue;
2869 ipa_analyze_stmt_uses (fbi, stmt);
2870 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2871 visit_ref_for_mod_analysis,
2872 visit_ref_for_mod_analysis,
2873 visit_ref_for_mod_analysis);
2875 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2876 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2877 visit_ref_for_mod_analysis,
2878 visit_ref_for_mod_analysis,
2879 visit_ref_for_mod_analysis);
2882 /* Calculate controlled uses of parameters of NODE. */
2884 static void
2885 ipa_analyze_controlled_uses (struct cgraph_node *node)
2887 class ipa_node_params *info = IPA_NODE_REF (node);
2889 for (int i = 0; i < ipa_get_param_count (info); i++)
2891 tree parm = ipa_get_param (info, i);
2892 int controlled_uses = 0;
2894 /* For SSA regs see if parameter is used. For non-SSA we compute
2895 the flag during modification analysis. */
2896 if (is_gimple_reg (parm))
2898 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2899 parm);
2900 if (ddef && !has_zero_uses (ddef))
2902 imm_use_iterator imm_iter;
2903 use_operand_p use_p;
2905 ipa_set_param_used (info, i, true);
2906 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2907 if (!is_gimple_call (USE_STMT (use_p)))
2909 if (!is_gimple_debug (USE_STMT (use_p)))
2911 controlled_uses = IPA_UNDESCRIBED_USE;
2912 break;
2915 else
2916 controlled_uses++;
2918 else
2919 controlled_uses = 0;
2921 else
2922 controlled_uses = IPA_UNDESCRIBED_USE;
2923 ipa_set_controlled_uses (info, i, controlled_uses);
2927 /* Free stuff in BI. */
2929 static void
2930 free_ipa_bb_info (struct ipa_bb_info *bi)
2932 bi->cg_edges.release ();
2933 bi->param_aa_statuses.release ();
2936 /* Dominator walker driving the analysis. */
2938 class analysis_dom_walker : public dom_walker
2940 public:
2941 analysis_dom_walker (struct ipa_func_body_info *fbi)
2942 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2944 virtual edge before_dom_children (basic_block);
2946 private:
2947 struct ipa_func_body_info *m_fbi;
2950 edge
2951 analysis_dom_walker::before_dom_children (basic_block bb)
2953 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2954 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2955 return NULL;
2958 /* Release body info FBI. */
2960 void
2961 ipa_release_body_info (struct ipa_func_body_info *fbi)
2963 int i;
2964 struct ipa_bb_info *bi;
2966 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2967 free_ipa_bb_info (bi);
2968 fbi->bb_infos.release ();
2971 /* Initialize the array describing properties of formal parameters
2972 of NODE, analyze their uses and compute jump functions associated
2973 with actual arguments of calls from within NODE. */
2975 void
2976 ipa_analyze_node (struct cgraph_node *node)
2978 struct ipa_func_body_info fbi;
2979 class ipa_node_params *info;
2981 ipa_check_create_node_params ();
2982 ipa_check_create_edge_args ();
2983 info = IPA_NODE_REF_GET_CREATE (node);
2985 if (info->analysis_done)
2986 return;
2987 info->analysis_done = 1;
2989 if (ipa_func_spec_opts_forbid_analysis_p (node))
2991 for (int i = 0; i < ipa_get_param_count (info); i++)
2993 ipa_set_param_used (info, i, true);
2994 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2996 return;
2999 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3000 push_cfun (func);
3001 calculate_dominance_info (CDI_DOMINATORS);
3002 ipa_initialize_node_params (node);
3003 ipa_analyze_controlled_uses (node);
3005 fbi.node = node;
3006 fbi.info = IPA_NODE_REF (node);
3007 fbi.bb_infos = vNULL;
3008 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
3009 fbi.param_count = ipa_get_param_count (info);
3010 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
3012 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3014 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3015 bi->cg_edges.safe_push (cs);
3018 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3020 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3021 bi->cg_edges.safe_push (cs);
3024 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3026 ipa_release_body_info (&fbi);
3027 free_dominance_info (CDI_DOMINATORS);
3028 pop_cfun ();
3031 /* Update the jump functions associated with call graph edge E when the call
3032 graph edge CS is being inlined, assuming that E->caller is already (possibly
3033 indirectly) inlined into CS->callee and that E has not been inlined. */
3035 static void
3036 update_jump_functions_after_inlining (struct cgraph_edge *cs,
3037 struct cgraph_edge *e)
3039 class ipa_edge_args *top = IPA_EDGE_REF (cs);
3040 class ipa_edge_args *args = IPA_EDGE_REF (e);
3041 if (!args)
3042 return;
3043 int count = ipa_get_cs_argument_count (args);
3044 int i;
3046 for (i = 0; i < count; i++)
3048 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3049 class ipa_polymorphic_call_context *dst_ctx
3050 = ipa_get_ith_polymorhic_call_context (args, i);
3052 if (dst->agg.items)
3054 struct ipa_agg_jf_item *item;
3055 int j;
3057 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3059 int dst_fid;
3060 struct ipa_jump_func *src;
3062 if (item->jftype != IPA_JF_PASS_THROUGH
3063 && item->jftype != IPA_JF_LOAD_AGG)
3064 continue;
3066 dst_fid = item->value.pass_through.formal_id;
3067 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3069 item->jftype = IPA_JF_UNKNOWN;
3070 continue;
3073 item->value.pass_through.formal_id = -1;
3074 src = ipa_get_ith_jump_func (top, dst_fid);
3075 if (src->type == IPA_JF_CONST)
3077 if (item->jftype == IPA_JF_PASS_THROUGH
3078 && item->value.pass_through.operation == NOP_EXPR)
3080 item->jftype = IPA_JF_CONST;
3081 item->value.constant = src->value.constant.value;
3082 continue;
3085 else if (src->type == IPA_JF_PASS_THROUGH
3086 && src->value.pass_through.operation == NOP_EXPR)
3088 if (item->jftype == IPA_JF_PASS_THROUGH
3089 || !item->value.load_agg.by_ref
3090 || src->value.pass_through.agg_preserved)
3091 item->value.pass_through.formal_id
3092 = src->value.pass_through.formal_id;
3094 else if (src->type == IPA_JF_ANCESTOR)
3096 if (item->jftype == IPA_JF_PASS_THROUGH)
3098 if (!src->value.ancestor.offset)
3099 item->value.pass_through.formal_id
3100 = src->value.ancestor.formal_id;
3102 else if (src->value.ancestor.agg_preserved)
3104 gcc_checking_assert (item->value.load_agg.by_ref);
3106 item->value.pass_through.formal_id
3107 = src->value.ancestor.formal_id;
3108 item->value.load_agg.offset
3109 += src->value.ancestor.offset;
3113 if (item->value.pass_through.formal_id < 0)
3114 item->jftype = IPA_JF_UNKNOWN;
3118 if (!top)
3120 ipa_set_jf_unknown (dst);
3121 continue;
3124 if (dst->type == IPA_JF_ANCESTOR)
3126 struct ipa_jump_func *src;
3127 int dst_fid = dst->value.ancestor.formal_id;
3128 class ipa_polymorphic_call_context *src_ctx
3129 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3131 /* Variable number of arguments can cause havoc if we try to access
3132 one that does not exist in the inlined edge. So make sure we
3133 don't. */
3134 if (dst_fid >= ipa_get_cs_argument_count (top))
3136 ipa_set_jf_unknown (dst);
3137 continue;
3140 src = ipa_get_ith_jump_func (top, dst_fid);
3142 if (src_ctx && !src_ctx->useless_p ())
3144 class ipa_polymorphic_call_context ctx = *src_ctx;
3146 /* TODO: Make type preserved safe WRT contexts. */
3147 if (!ipa_get_jf_ancestor_type_preserved (dst))
3148 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3149 ctx.offset_by (dst->value.ancestor.offset);
3150 if (!ctx.useless_p ())
3152 if (!dst_ctx)
3154 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3155 count, true);
3156 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3159 dst_ctx->combine_with (ctx);
3163 /* Parameter and argument in ancestor jump function must be pointer
3164 type, which means access to aggregate must be by-reference. */
3165 gcc_assert (!src->agg.items || src->agg.by_ref);
3167 if (src->agg.items && dst->value.ancestor.agg_preserved)
3169 struct ipa_agg_jf_item *item;
3170 int j;
3172 /* Currently we do not produce clobber aggregate jump functions,
3173 replace with merging when we do. */
3174 gcc_assert (!dst->agg.items);
3176 dst->agg.items = vec_safe_copy (src->agg.items);
3177 dst->agg.by_ref = src->agg.by_ref;
3178 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3179 item->offset -= dst->value.ancestor.offset;
3182 if (src->type == IPA_JF_PASS_THROUGH
3183 && src->value.pass_through.operation == NOP_EXPR)
3185 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3186 dst->value.ancestor.agg_preserved &=
3187 src->value.pass_through.agg_preserved;
3189 else if (src->type == IPA_JF_ANCESTOR)
3191 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3192 dst->value.ancestor.offset += src->value.ancestor.offset;
3193 dst->value.ancestor.agg_preserved &=
3194 src->value.ancestor.agg_preserved;
3196 else
3197 ipa_set_jf_unknown (dst);
3199 else if (dst->type == IPA_JF_PASS_THROUGH)
3201 struct ipa_jump_func *src;
3202 /* We must check range due to calls with variable number of arguments
3203 and we cannot combine jump functions with operations. */
3204 if (dst->value.pass_through.operation == NOP_EXPR
3205 && (top && dst->value.pass_through.formal_id
3206 < ipa_get_cs_argument_count (top)))
3208 int dst_fid = dst->value.pass_through.formal_id;
3209 src = ipa_get_ith_jump_func (top, dst_fid);
3210 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3211 class ipa_polymorphic_call_context *src_ctx
3212 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3214 if (src_ctx && !src_ctx->useless_p ())
3216 class ipa_polymorphic_call_context ctx = *src_ctx;
3218 /* TODO: Make type preserved safe WRT contexts. */
3219 if (!ipa_get_jf_pass_through_type_preserved (dst))
3220 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3221 if (!ctx.useless_p ())
3223 if (!dst_ctx)
3225 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3226 count, true);
3227 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3229 dst_ctx->combine_with (ctx);
3232 switch (src->type)
3234 case IPA_JF_UNKNOWN:
3235 ipa_set_jf_unknown (dst);
3236 break;
3237 case IPA_JF_CONST:
3238 ipa_set_jf_cst_copy (dst, src);
3239 break;
3241 case IPA_JF_PASS_THROUGH:
3243 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3244 enum tree_code operation;
3245 operation = ipa_get_jf_pass_through_operation (src);
3247 if (operation == NOP_EXPR)
3249 bool agg_p;
3250 agg_p = dst_agg_p
3251 && ipa_get_jf_pass_through_agg_preserved (src);
3252 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3254 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3255 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3256 else
3258 tree operand = ipa_get_jf_pass_through_operand (src);
3259 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3260 operation);
3262 break;
3264 case IPA_JF_ANCESTOR:
3266 bool agg_p;
3267 agg_p = dst_agg_p
3268 && ipa_get_jf_ancestor_agg_preserved (src);
3269 ipa_set_ancestor_jf (dst,
3270 ipa_get_jf_ancestor_offset (src),
3271 ipa_get_jf_ancestor_formal_id (src),
3272 agg_p);
3273 break;
3275 default:
3276 gcc_unreachable ();
3279 if (src->agg.items
3280 && (dst_agg_p || !src->agg.by_ref))
3282 /* Currently we do not produce clobber aggregate jump
3283 functions, replace with merging when we do. */
3284 gcc_assert (!dst->agg.items);
3286 dst->agg.by_ref = src->agg.by_ref;
3287 dst->agg.items = vec_safe_copy (src->agg.items);
3290 else
3291 ipa_set_jf_unknown (dst);
3296 /* If TARGET is an addr_expr of a function declaration, make it the
3297 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3298 Otherwise, return NULL. */
3300 struct cgraph_edge *
3301 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3302 bool speculative)
3304 struct cgraph_node *callee;
3305 bool unreachable = false;
3307 if (TREE_CODE (target) == ADDR_EXPR)
3308 target = TREE_OPERAND (target, 0);
3309 if (TREE_CODE (target) != FUNCTION_DECL)
3311 target = canonicalize_constructor_val (target, NULL);
3312 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3314 /* Member pointer call that goes through a VMT lookup. */
3315 if (ie->indirect_info->member_ptr
3316 /* Or if target is not an invariant expression and we do not
3317 know if it will evaulate to function at runtime.
3318 This can happen when folding through &VAR, where &VAR
3319 is IP invariant, but VAR itself is not.
3321 TODO: Revisit this when GCC 5 is branched. It seems that
3322 member_ptr check is not needed and that we may try to fold
3323 the expression and see if VAR is readonly. */
3324 || !is_gimple_ip_invariant (target))
3326 if (dump_enabled_p ())
3328 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3329 "discovered direct call non-invariant %s\n",
3330 ie->caller->dump_name ());
3332 return NULL;
3336 if (dump_enabled_p ())
3338 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3339 "discovered direct call to non-function in %s, "
3340 "making it __builtin_unreachable\n",
3341 ie->caller->dump_name ());
3344 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3345 callee = cgraph_node::get_create (target);
3346 unreachable = true;
3348 else
3349 callee = cgraph_node::get (target);
3351 else
3352 callee = cgraph_node::get (target);
3354 /* Because may-edges are not explicitely represented and vtable may be external,
3355 we may create the first reference to the object in the unit. */
3356 if (!callee || callee->inlined_to)
3359 /* We are better to ensure we can refer to it.
3360 In the case of static functions we are out of luck, since we already
3361 removed its body. In the case of public functions we may or may
3362 not introduce the reference. */
3363 if (!canonicalize_constructor_val (target, NULL)
3364 || !TREE_PUBLIC (target))
3366 if (dump_file)
3367 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3368 "(%s -> %s) but cannot refer to it. Giving up.\n",
3369 ie->caller->dump_name (),
3370 ie->callee->dump_name ());
3371 return NULL;
3373 callee = cgraph_node::get_create (target);
3376 /* If the edge is already speculated. */
3377 if (speculative && ie->speculative)
3379 if (dump_file)
3381 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3382 if (!e2)
3384 if (dump_file)
3385 fprintf (dump_file, "ipa-prop: Discovered call to a "
3386 "speculative target (%s -> %s) but the call is "
3387 "already speculated to different target. "
3388 "Giving up.\n",
3389 ie->caller->dump_name (), callee->dump_name ());
3391 else
3393 if (dump_file)
3394 fprintf (dump_file,
3395 "ipa-prop: Discovered call to a speculative target "
3396 "(%s -> %s) this agree with previous speculation.\n",
3397 ie->caller->dump_name (), callee->dump_name ());
3400 return NULL;
3403 if (!dbg_cnt (devirt))
3404 return NULL;
3406 ipa_check_create_node_params ();
3408 /* We cannot make edges to inline clones. It is bug that someone removed
3409 the cgraph node too early. */
3410 gcc_assert (!callee->inlined_to);
3412 if (dump_file && !unreachable)
3414 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3415 "(%s -> %s), for stmt ",
3416 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3417 speculative ? "speculative" : "known",
3418 ie->caller->dump_name (),
3419 callee->dump_name ());
3420 if (ie->call_stmt)
3421 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3422 else
3423 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3425 if (dump_enabled_p ())
3427 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3428 "converting indirect call in %s to direct call to %s\n",
3429 ie->caller->dump_name (), callee->dump_name ());
3431 if (!speculative)
3433 struct cgraph_edge *orig = ie;
3434 ie = cgraph_edge::make_direct (ie, callee);
3435 /* If we resolved speculative edge the cost is already up to date
3436 for direct call (adjusted by inline_edge_duplication_hook). */
3437 if (ie == orig)
3439 ipa_call_summary *es = ipa_call_summaries->get (ie);
3440 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3441 - eni_size_weights.call_cost);
3442 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3443 - eni_time_weights.call_cost);
3446 else
3448 if (!callee->can_be_discarded_p ())
3450 cgraph_node *alias;
3451 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3452 if (alias)
3453 callee = alias;
3455 /* make_speculative will update ie's cost to direct call cost. */
3456 ie = ie->make_speculative
3457 (callee, ie->count.apply_scale (8, 10));
3460 return ie;
3463 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3464 CONSTRUCTOR and return it. Return NULL if the search fails for some
3465 reason. */
3467 static tree
3468 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3470 tree type = TREE_TYPE (constructor);
3471 if (TREE_CODE (type) != ARRAY_TYPE
3472 && TREE_CODE (type) != RECORD_TYPE)
3473 return NULL;
3475 unsigned ix;
3476 tree index, val;
3477 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3479 HOST_WIDE_INT elt_offset;
3480 if (TREE_CODE (type) == ARRAY_TYPE)
3482 offset_int off;
3483 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3484 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3486 if (index)
3488 if (TREE_CODE (index) == RANGE_EXPR)
3489 off = wi::to_offset (TREE_OPERAND (index, 0));
3490 else
3491 off = wi::to_offset (index);
3492 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3494 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3495 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3496 off = wi::sext (off - wi::to_offset (low_bound),
3497 TYPE_PRECISION (TREE_TYPE (index)));
3499 off *= wi::to_offset (unit_size);
3500 /* ??? Handle more than just the first index of a
3501 RANGE_EXPR. */
3503 else
3504 off = wi::to_offset (unit_size) * ix;
3506 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3507 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3508 continue;
3509 elt_offset = off.to_shwi ();
3511 else if (TREE_CODE (type) == RECORD_TYPE)
3513 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3514 if (DECL_BIT_FIELD (index))
3515 continue;
3516 elt_offset = int_bit_position (index);
3518 else
3519 gcc_unreachable ();
3521 if (elt_offset > req_offset)
3522 return NULL;
3524 if (TREE_CODE (val) == CONSTRUCTOR)
3525 return find_constructor_constant_at_offset (val,
3526 req_offset - elt_offset);
3528 if (elt_offset == req_offset
3529 && is_gimple_reg_type (TREE_TYPE (val))
3530 && is_gimple_ip_invariant (val))
3531 return val;
3533 return NULL;
3536 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3537 invariant from a static constructor and if so, return it. Otherwise return
3538 NULL. */
3540 static tree
3541 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3543 if (by_ref)
3545 if (TREE_CODE (scalar) != ADDR_EXPR)
3546 return NULL;
3547 scalar = TREE_OPERAND (scalar, 0);
3550 if (!VAR_P (scalar)
3551 || !is_global_var (scalar)
3552 || !TREE_READONLY (scalar)
3553 || !DECL_INITIAL (scalar)
3554 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3555 return NULL;
3557 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3560 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3561 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3562 return NULL if there is none. BY_REF specifies whether the value has to be
3563 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3564 the boolean it points to is set to true if the value comes from an
3565 initializer of a constant. */
3567 tree
3568 ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
3569 HOST_WIDE_INT offset, bool by_ref,
3570 bool *from_global_constant)
3572 struct ipa_agg_value *item;
3573 int i;
3575 if (scalar)
3577 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3578 if (res)
3580 if (from_global_constant)
3581 *from_global_constant = true;
3582 return res;
3586 if (!agg
3587 || by_ref != agg->by_ref)
3588 return NULL;
3590 FOR_EACH_VEC_ELT (agg->items, i, item)
3591 if (item->offset == offset)
3593 /* Currently we do not have clobber values, return NULL for them once
3594 we do. */
3595 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3596 if (from_global_constant)
3597 *from_global_constant = false;
3598 return item->value;
3600 return NULL;
3603 /* Remove a reference to SYMBOL from the list of references of a node given by
3604 reference description RDESC. Return true if the reference has been
3605 successfully found and removed. */
3607 static bool
3608 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3610 struct ipa_ref *to_del;
3611 struct cgraph_edge *origin;
3613 origin = rdesc->cs;
3614 if (!origin)
3615 return false;
3616 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3617 origin->lto_stmt_uid);
3618 if (!to_del)
3619 return false;
3621 to_del->remove_reference ();
3622 if (dump_file)
3623 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3624 origin->caller->dump_name (), symbol->dump_name ());
3625 return true;
3628 /* If JFUNC has a reference description with refcount different from
3629 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3630 NULL. JFUNC must be a constant jump function. */
3632 static struct ipa_cst_ref_desc *
3633 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3635 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3636 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3637 return rdesc;
3638 else
3639 return NULL;
3642 /* If the value of constant jump function JFUNC is an address of a function
3643 declaration, return the associated call graph node. Otherwise return
3644 NULL. */
3646 static cgraph_node *
3647 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3649 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3650 tree cst = ipa_get_jf_constant (jfunc);
3651 if (TREE_CODE (cst) != ADDR_EXPR
3652 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3653 return NULL;
3655 return cgraph_node::get (TREE_OPERAND (cst, 0));
3659 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3660 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3661 the edge specified in the rdesc. Return false if either the symbol or the
3662 reference could not be found, otherwise return true. */
3664 static bool
3665 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3667 struct ipa_cst_ref_desc *rdesc;
3668 if (jfunc->type == IPA_JF_CONST
3669 && (rdesc = jfunc_rdesc_usable (jfunc))
3670 && --rdesc->refcount == 0)
3672 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3673 if (!symbol)
3674 return false;
3676 return remove_described_reference (symbol, rdesc);
3678 return true;
3681 /* Try to find a destination for indirect edge IE that corresponds to a simple
3682 call or a call of a member function pointer and where the destination is a
3683 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3684 the type of the parameter to which the result of JFUNC is passed. If it can
3685 be determined, return the newly direct edge, otherwise return NULL.
3686 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3687 relative to. */
3689 static struct cgraph_edge *
3690 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3691 struct ipa_jump_func *jfunc, tree target_type,
3692 struct cgraph_node *new_root,
3693 class ipa_node_params *new_root_info)
3695 struct cgraph_edge *cs;
3696 tree target;
3697 bool agg_contents = ie->indirect_info->agg_contents;
3698 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3699 if (agg_contents)
3701 bool from_global_constant;
3702 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3703 new_root,
3704 &jfunc->agg);
3705 target = ipa_find_agg_cst_for_param (&agg, scalar,
3706 ie->indirect_info->offset,
3707 ie->indirect_info->by_ref,
3708 &from_global_constant);
3709 agg.release ();
3710 if (target
3711 && !from_global_constant
3712 && !ie->indirect_info->guaranteed_unmodified)
3713 return NULL;
3715 else
3716 target = scalar;
3717 if (!target)
3718 return NULL;
3719 cs = ipa_make_edge_direct_to_target (ie, target);
3721 if (cs && !agg_contents)
3723 bool ok;
3724 gcc_checking_assert (cs->callee
3725 && (cs != ie
3726 || jfunc->type != IPA_JF_CONST
3727 || !cgraph_node_for_jfunc (jfunc)
3728 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3729 ok = try_decrement_rdesc_refcount (jfunc);
3730 gcc_checking_assert (ok);
3733 return cs;
3736 /* Return the target to be used in cases of impossible devirtualization. IE
3737 and target (the latter can be NULL) are dumped when dumping is enabled. */
3739 tree
3740 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3742 if (dump_file)
3744 if (target)
3745 fprintf (dump_file,
3746 "Type inconsistent devirtualization: %s->%s\n",
3747 ie->caller->dump_name (),
3748 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3749 else
3750 fprintf (dump_file,
3751 "No devirtualization target in %s\n",
3752 ie->caller->dump_name ());
3754 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3755 cgraph_node::get_create (new_target);
3756 return new_target;
3759 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3760 call based on a formal parameter which is described by jump function JFUNC
3761 and if it can be determined, make it direct and return the direct edge.
3762 Otherwise, return NULL. CTX describes the polymorphic context that the
3763 parameter the call is based on brings along with it. NEW_ROOT and
3764 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3765 to. */
3767 static struct cgraph_edge *
3768 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3769 struct ipa_jump_func *jfunc,
3770 class ipa_polymorphic_call_context ctx,
3771 struct cgraph_node *new_root,
3772 class ipa_node_params *new_root_info)
3774 tree target = NULL;
3775 bool speculative = false;
3777 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3778 return NULL;
3780 gcc_assert (!ie->indirect_info->by_ref);
3782 /* Try to do lookup via known virtual table pointer value. */
3783 if (!ie->indirect_info->vptr_changed
3784 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3786 tree vtable;
3787 unsigned HOST_WIDE_INT offset;
3788 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3789 : NULL;
3790 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3791 new_root,
3792 &jfunc->agg);
3793 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3794 ie->indirect_info->offset,
3795 true);
3796 agg.release ();
3797 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3799 bool can_refer;
3800 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3801 vtable, offset, &can_refer);
3802 if (can_refer)
3804 if (!t
3805 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3806 || !possible_polymorphic_call_target_p
3807 (ie, cgraph_node::get (t)))
3809 /* Do not speculate builtin_unreachable, it is stupid! */
3810 if (!ie->indirect_info->vptr_changed)
3811 target = ipa_impossible_devirt_target (ie, target);
3812 else
3813 target = NULL;
3815 else
3817 target = t;
3818 speculative = ie->indirect_info->vptr_changed;
3824 ipa_polymorphic_call_context ie_context (ie);
3825 vec <cgraph_node *>targets;
3826 bool final;
3828 ctx.offset_by (ie->indirect_info->offset);
3829 if (ie->indirect_info->vptr_changed)
3830 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3831 ie->indirect_info->otr_type);
3832 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3833 targets = possible_polymorphic_call_targets
3834 (ie->indirect_info->otr_type,
3835 ie->indirect_info->otr_token,
3836 ctx, &final);
3837 if (final && targets.length () <= 1)
3839 speculative = false;
3840 if (targets.length () == 1)
3841 target = targets[0]->decl;
3842 else
3843 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3845 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3846 && !ie->speculative && ie->maybe_hot_p ())
3848 cgraph_node *n;
3849 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3850 ie->indirect_info->otr_token,
3851 ie->indirect_info->context);
3852 if (n)
3854 target = n->decl;
3855 speculative = true;
3859 if (target)
3861 if (!possible_polymorphic_call_target_p
3862 (ie, cgraph_node::get_create (target)))
3864 if (speculative)
3865 return NULL;
3866 target = ipa_impossible_devirt_target (ie, target);
3868 return ipa_make_edge_direct_to_target (ie, target, speculative);
3870 else
3871 return NULL;
3874 /* Update the param called notes associated with NODE when CS is being inlined,
3875 assuming NODE is (potentially indirectly) inlined into CS->callee.
3876 Moreover, if the callee is discovered to be constant, create a new cgraph
3877 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3878 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3880 static bool
3881 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3882 struct cgraph_node *node,
3883 vec<cgraph_edge *> *new_edges)
3885 class ipa_edge_args *top;
3886 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3887 struct cgraph_node *new_root;
3888 class ipa_node_params *new_root_info, *inlined_node_info;
3889 bool res = false;
3891 ipa_check_create_edge_args ();
3892 top = IPA_EDGE_REF (cs);
3893 new_root = cs->caller->inlined_to
3894 ? cs->caller->inlined_to : cs->caller;
3895 new_root_info = IPA_NODE_REF (new_root);
3896 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3898 for (ie = node->indirect_calls; ie; ie = next_ie)
3900 class cgraph_indirect_call_info *ici = ie->indirect_info;
3901 struct ipa_jump_func *jfunc;
3902 int param_index;
3904 next_ie = ie->next_callee;
3906 if (ici->param_index == -1)
3907 continue;
3909 /* We must check range due to calls with variable number of arguments: */
3910 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3912 ici->param_index = -1;
3913 continue;
3916 param_index = ici->param_index;
3917 jfunc = ipa_get_ith_jump_func (top, param_index);
3919 auto_vec<cgraph_node *, 4> spec_targets;
3920 if (ie->speculative)
3921 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3922 direct;
3923 direct = direct->next_speculative_call_target ())
3924 spec_targets.safe_push (direct->callee);
3926 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3927 new_direct_edge = NULL;
3928 else if (ici->polymorphic)
3930 ipa_polymorphic_call_context ctx;
3931 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3932 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3933 new_root,
3934 new_root_info);
3936 else
3938 tree target_type = ipa_get_type (inlined_node_info, param_index);
3939 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3940 target_type,
3941 new_root,
3942 new_root_info);
3945 /* If speculation was removed, then we need to do nothing. */
3946 if (new_direct_edge && new_direct_edge != ie
3947 && spec_targets.contains (new_direct_edge->callee))
3949 new_direct_edge->indirect_inlining_edge = 1;
3950 top = IPA_EDGE_REF (cs);
3951 res = true;
3952 if (!new_direct_edge->speculative)
3953 continue;
3955 else if (new_direct_edge)
3957 new_direct_edge->indirect_inlining_edge = 1;
3958 if (new_edges)
3960 new_edges->safe_push (new_direct_edge);
3961 res = true;
3963 top = IPA_EDGE_REF (cs);
3964 /* If speculative edge was introduced we still need to update
3965 call info of the indirect edge. */
3966 if (!new_direct_edge->speculative)
3967 continue;
3969 if (jfunc->type == IPA_JF_PASS_THROUGH
3970 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3972 if (ici->agg_contents
3973 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3974 && !ici->polymorphic)
3975 ici->param_index = -1;
3976 else
3978 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3979 if (ici->polymorphic
3980 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3981 ici->vptr_changed = true;
3982 ipa_set_param_used_by_indirect_call (new_root_info,
3983 ici->param_index, true);
3984 if (ici->polymorphic)
3985 ipa_set_param_used_by_polymorphic_call (new_root_info,
3986 ici->param_index, true);
3989 else if (jfunc->type == IPA_JF_ANCESTOR)
3991 if (ici->agg_contents
3992 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3993 && !ici->polymorphic)
3994 ici->param_index = -1;
3995 else
3997 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3998 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3999 if (ici->polymorphic
4000 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4001 ici->vptr_changed = true;
4002 ipa_set_param_used_by_indirect_call (new_root_info,
4003 ici->param_index, true);
4004 if (ici->polymorphic)
4005 ipa_set_param_used_by_polymorphic_call (new_root_info,
4006 ici->param_index, true);
4009 else
4010 /* Either we can find a destination for this edge now or never. */
4011 ici->param_index = -1;
4014 return res;
4017 /* Recursively traverse subtree of NODE (including node) made of inlined
4018 cgraph_edges when CS has been inlined and invoke
4019 update_indirect_edges_after_inlining on all nodes and
4020 update_jump_functions_after_inlining on all non-inlined edges that lead out
4021 of this subtree. Newly discovered indirect edges will be added to
4022 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4023 created. */
4025 static bool
4026 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4027 struct cgraph_node *node,
4028 vec<cgraph_edge *> *new_edges)
4030 struct cgraph_edge *e;
4031 bool res;
4033 res = update_indirect_edges_after_inlining (cs, node, new_edges);
4035 for (e = node->callees; e; e = e->next_callee)
4036 if (!e->inline_failed)
4037 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
4038 else
4039 update_jump_functions_after_inlining (cs, e);
4040 for (e = node->indirect_calls; e; e = e->next_callee)
4041 update_jump_functions_after_inlining (cs, e);
4043 return res;
4046 /* Combine two controlled uses counts as done during inlining. */
4048 static int
4049 combine_controlled_uses_counters (int c, int d)
4051 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4052 return IPA_UNDESCRIBED_USE;
4053 else
4054 return c + d - 1;
4057 /* Propagate number of controlled users from CS->caleee to the new root of the
4058 tree of inlined nodes. */
4060 static void
4061 propagate_controlled_uses (struct cgraph_edge *cs)
4063 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4064 if (!args)
4065 return;
4066 struct cgraph_node *new_root = cs->caller->inlined_to
4067 ? cs->caller->inlined_to : cs->caller;
4068 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
4069 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
4070 int count, i;
4072 if (!old_root_info)
4073 return;
4075 count = MIN (ipa_get_cs_argument_count (args),
4076 ipa_get_param_count (old_root_info));
4077 for (i = 0; i < count; i++)
4079 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4080 struct ipa_cst_ref_desc *rdesc;
4082 if (jf->type == IPA_JF_PASS_THROUGH)
4084 int src_idx, c, d;
4085 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4086 c = ipa_get_controlled_uses (new_root_info, src_idx);
4087 d = ipa_get_controlled_uses (old_root_info, i);
4089 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4090 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4091 c = combine_controlled_uses_counters (c, d);
4092 ipa_set_controlled_uses (new_root_info, src_idx, c);
4093 if (c == 0 && new_root_info->ipcp_orig_node)
4095 struct cgraph_node *n;
4096 struct ipa_ref *ref;
4097 tree t = new_root_info->known_csts[src_idx];
4099 if (t && TREE_CODE (t) == ADDR_EXPR
4100 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4101 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4102 && (ref = new_root->find_reference (n, NULL, 0)))
4104 if (dump_file)
4105 fprintf (dump_file, "ipa-prop: Removing cloning-created "
4106 "reference from %s to %s.\n",
4107 new_root->dump_name (),
4108 n->dump_name ());
4109 ref->remove_reference ();
4113 else if (jf->type == IPA_JF_CONST
4114 && (rdesc = jfunc_rdesc_usable (jf)))
4116 int d = ipa_get_controlled_uses (old_root_info, i);
4117 int c = rdesc->refcount;
4118 rdesc->refcount = combine_controlled_uses_counters (c, d);
4119 if (rdesc->refcount == 0)
4121 tree cst = ipa_get_jf_constant (jf);
4122 struct cgraph_node *n;
4123 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4124 && TREE_CODE (TREE_OPERAND (cst, 0))
4125 == FUNCTION_DECL);
4126 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4127 if (n)
4129 struct cgraph_node *clone;
4130 bool ok;
4131 ok = remove_described_reference (n, rdesc);
4132 gcc_checking_assert (ok);
4134 clone = cs->caller;
4135 while (clone->inlined_to
4136 && clone->ipcp_clone
4137 && clone != rdesc->cs->caller)
4139 struct ipa_ref *ref;
4140 ref = clone->find_reference (n, NULL, 0);
4141 if (ref)
4143 if (dump_file)
4144 fprintf (dump_file, "ipa-prop: Removing "
4145 "cloning-created reference "
4146 "from %s to %s.\n",
4147 clone->dump_name (),
4148 n->dump_name ());
4149 ref->remove_reference ();
4151 clone = clone->callers->caller;
4158 for (i = ipa_get_param_count (old_root_info);
4159 i < ipa_get_cs_argument_count (args);
4160 i++)
4162 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4164 if (jf->type == IPA_JF_CONST)
4166 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4167 if (rdesc)
4168 rdesc->refcount = IPA_UNDESCRIBED_USE;
4170 else if (jf->type == IPA_JF_PASS_THROUGH)
4171 ipa_set_controlled_uses (new_root_info,
4172 jf->value.pass_through.formal_id,
4173 IPA_UNDESCRIBED_USE);
4177 /* Update jump functions and call note functions on inlining the call site CS.
4178 CS is expected to lead to a node already cloned by
4179 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4180 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4181 created. */
4183 bool
4184 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4185 vec<cgraph_edge *> *new_edges)
4187 bool changed;
4188 /* Do nothing if the preparation phase has not been carried out yet
4189 (i.e. during early inlining). */
4190 if (!ipa_node_params_sum)
4191 return false;
4192 gcc_assert (ipa_edge_args_sum);
4194 propagate_controlled_uses (cs);
4195 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4196 ipa_node_params_sum->remove (cs->callee);
4198 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4199 if (args)
4201 bool ok = true;
4202 if (args->jump_functions)
4204 struct ipa_jump_func *jf;
4205 int i;
4206 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4207 if (jf->type == IPA_JF_CONST
4208 && ipa_get_jf_constant_rdesc (jf))
4210 ok = false;
4211 break;
4214 if (ok)
4215 ipa_edge_args_sum->remove (cs);
4217 if (ipcp_transformation_sum)
4218 ipcp_transformation_sum->remove (cs->callee);
4220 return changed;
4223 /* Ensure that array of edge arguments infos is big enough to accommodate a
4224 structure for all edges and reallocates it if not. Also, allocate
4225 associated hash tables is they do not already exist. */
4227 void
4228 ipa_check_create_edge_args (void)
4230 if (!ipa_edge_args_sum)
4231 ipa_edge_args_sum
4232 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4233 ipa_edge_args_sum_t (symtab, true));
4234 if (!ipa_bits_hash_table)
4235 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4236 if (!ipa_vr_hash_table)
4237 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4240 /* Free all ipa_edge structures. */
4242 void
4243 ipa_free_all_edge_args (void)
4245 if (!ipa_edge_args_sum)
4246 return;
4248 ggc_delete (ipa_edge_args_sum);
4249 ipa_edge_args_sum = NULL;
4252 /* Free all ipa_node_params structures. */
4254 void
4255 ipa_free_all_node_params (void)
4257 if (ipa_node_params_sum)
4258 ggc_delete (ipa_node_params_sum);
4259 ipa_node_params_sum = NULL;
4262 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4263 tables if they do not already exist. */
4265 void
4266 ipcp_transformation_initialize (void)
4268 if (!ipa_bits_hash_table)
4269 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4270 if (!ipa_vr_hash_table)
4271 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4272 if (ipcp_transformation_sum == NULL)
4274 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4275 ipcp_transformation_sum->disable_insertion_hook ();
4279 /* Release the IPA CP transformation summary. */
4281 void
4282 ipcp_free_transformation_sum (void)
4284 if (!ipcp_transformation_sum)
4285 return;
4287 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4288 ggc_free (ipcp_transformation_sum);
4289 ipcp_transformation_sum = NULL;
4292 /* Set the aggregate replacements of NODE to be AGGVALS. */
4294 void
4295 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4296 struct ipa_agg_replacement_value *aggvals)
4298 ipcp_transformation_initialize ();
4299 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4300 s->agg_values = aggvals;
4303 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4304 count data structures accordingly. */
4306 void
4307 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4309 if (args->jump_functions)
4311 struct ipa_jump_func *jf;
4312 int i;
4313 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4315 struct ipa_cst_ref_desc *rdesc;
4316 try_decrement_rdesc_refcount (jf);
4317 if (jf->type == IPA_JF_CONST
4318 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4319 && rdesc->cs == cs)
4320 rdesc->cs = NULL;
4325 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4326 reference count data strucutres accordingly. */
4328 void
4329 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4330 ipa_edge_args *old_args, ipa_edge_args *new_args)
4332 unsigned int i;
4334 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4335 if (old_args->polymorphic_call_contexts)
4336 new_args->polymorphic_call_contexts
4337 = vec_safe_copy (old_args->polymorphic_call_contexts);
4339 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4341 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4342 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4344 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4346 if (src_jf->type == IPA_JF_CONST)
4348 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4350 if (!src_rdesc)
4351 dst_jf->value.constant.rdesc = NULL;
4352 else if (src->caller == dst->caller)
4354 struct ipa_ref *ref;
4355 symtab_node *n = cgraph_node_for_jfunc (src_jf);
4356 gcc_checking_assert (n);
4357 ref = src->caller->find_reference (n, src->call_stmt,
4358 src->lto_stmt_uid);
4359 gcc_checking_assert (ref);
4360 dst->caller->clone_reference (ref, ref->stmt);
4362 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4363 dst_rdesc->cs = dst;
4364 dst_rdesc->refcount = src_rdesc->refcount;
4365 dst_rdesc->next_duplicate = NULL;
4366 dst_jf->value.constant.rdesc = dst_rdesc;
4368 else if (src_rdesc->cs == src)
4370 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4371 dst_rdesc->cs = dst;
4372 dst_rdesc->refcount = src_rdesc->refcount;
4373 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4374 src_rdesc->next_duplicate = dst_rdesc;
4375 dst_jf->value.constant.rdesc = dst_rdesc;
4377 else
4379 struct ipa_cst_ref_desc *dst_rdesc;
4380 /* This can happen during inlining, when a JFUNC can refer to a
4381 reference taken in a function up in the tree of inline clones.
4382 We need to find the duplicate that refers to our tree of
4383 inline clones. */
4385 gcc_assert (dst->caller->inlined_to);
4386 for (dst_rdesc = src_rdesc->next_duplicate;
4387 dst_rdesc;
4388 dst_rdesc = dst_rdesc->next_duplicate)
4390 struct cgraph_node *top;
4391 top = dst_rdesc->cs->caller->inlined_to
4392 ? dst_rdesc->cs->caller->inlined_to
4393 : dst_rdesc->cs->caller;
4394 if (dst->caller->inlined_to == top)
4395 break;
4397 gcc_assert (dst_rdesc);
4398 dst_jf->value.constant.rdesc = dst_rdesc;
4401 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4402 && src->caller == dst->caller)
4404 struct cgraph_node *inline_root = dst->caller->inlined_to
4405 ? dst->caller->inlined_to : dst->caller;
4406 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
4407 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4409 int c = ipa_get_controlled_uses (root_info, idx);
4410 if (c != IPA_UNDESCRIBED_USE)
4412 c++;
4413 ipa_set_controlled_uses (root_info, idx, c);
4419 /* Analyze newly added function into callgraph. */
4421 static void
4422 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4424 if (node->has_gimple_body_p ())
4425 ipa_analyze_node (node);
4428 /* Hook that is called by summary when a node is duplicated. */
4430 void
4431 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4432 ipa_node_params *old_info,
4433 ipa_node_params *new_info)
4435 ipa_agg_replacement_value *old_av, *new_av;
4437 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4438 new_info->lattices = NULL;
4439 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4440 new_info->known_csts = old_info->known_csts.copy ();
4441 new_info->known_contexts = old_info->known_contexts.copy ();
4443 new_info->analysis_done = old_info->analysis_done;
4444 new_info->node_enqueued = old_info->node_enqueued;
4445 new_info->versionable = old_info->versionable;
4447 old_av = ipa_get_agg_replacements_for_node (src);
4448 if (old_av)
4450 new_av = NULL;
4451 while (old_av)
4453 struct ipa_agg_replacement_value *v;
4455 v = ggc_alloc<ipa_agg_replacement_value> ();
4456 memcpy (v, old_av, sizeof (*v));
4457 v->next = new_av;
4458 new_av = v;
4459 old_av = old_av->next;
4461 ipa_set_node_agg_value_chain (dst, new_av);
4465 /* Duplication of ipcp transformation summaries. */
4467 void
4468 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4469 ipcp_transformation *src_trans,
4470 ipcp_transformation *dst_trans)
4472 /* Avoid redundant work of duplicating vectors we will never use. */
4473 if (dst->inlined_to)
4474 return;
4475 dst_trans->bits = vec_safe_copy (src_trans->bits);
4476 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4477 ipa_agg_replacement_value *agg = src_trans->agg_values,
4478 **aggptr = &dst_trans->agg_values;
4479 while (agg)
4481 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4482 **aggptr = *agg;
4483 agg = agg->next;
4484 aggptr = &(*aggptr)->next;
4488 /* Register our cgraph hooks if they are not already there. */
4490 void
4491 ipa_register_cgraph_hooks (void)
4493 ipa_check_create_node_params ();
4494 ipa_check_create_edge_args ();
4496 function_insertion_hook_holder =
4497 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4500 /* Unregister our cgraph hooks if they are not already there. */
4502 static void
4503 ipa_unregister_cgraph_hooks (void)
4505 if (function_insertion_hook_holder)
4506 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4507 function_insertion_hook_holder = NULL;
4510 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4511 longer needed after ipa-cp. */
4513 void
4514 ipa_free_all_structures_after_ipa_cp (void)
4516 if (!optimize && !in_lto_p)
4518 ipa_free_all_edge_args ();
4519 ipa_free_all_node_params ();
4520 ipcp_sources_pool.release ();
4521 ipcp_cst_values_pool.release ();
4522 ipcp_poly_ctx_values_pool.release ();
4523 ipcp_agg_lattice_pool.release ();
4524 ipa_unregister_cgraph_hooks ();
4525 ipa_refdesc_pool.release ();
4529 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4530 longer needed after indirect inlining. */
4532 void
4533 ipa_free_all_structures_after_iinln (void)
4535 ipa_free_all_edge_args ();
4536 ipa_free_all_node_params ();
4537 ipa_unregister_cgraph_hooks ();
4538 ipcp_sources_pool.release ();
4539 ipcp_cst_values_pool.release ();
4540 ipcp_poly_ctx_values_pool.release ();
4541 ipcp_agg_lattice_pool.release ();
4542 ipa_refdesc_pool.release ();
4545 /* Print ipa_tree_map data structures of all functions in the
4546 callgraph to F. */
4548 void
4549 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4551 int i, count;
4552 class ipa_node_params *info;
4554 if (!node->definition)
4555 return;
4556 info = IPA_NODE_REF (node);
4557 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4558 if (!info)
4560 fprintf (f, " no params return\n");
4561 return;
4563 count = ipa_get_param_count (info);
4564 for (i = 0; i < count; i++)
4566 int c;
4568 fprintf (f, " ");
4569 ipa_dump_param (f, info, i);
4570 if (ipa_is_param_used (info, i))
4571 fprintf (f, " used");
4572 if (ipa_is_param_used_by_ipa_predicates (info, i))
4573 fprintf (f, " used_by_ipa_predicates");
4574 if (ipa_is_param_used_by_indirect_call (info, i))
4575 fprintf (f, " used_by_indirect_call");
4576 if (ipa_is_param_used_by_polymorphic_call (info, i))
4577 fprintf (f, " used_by_polymorphic_call");
4578 c = ipa_get_controlled_uses (info, i);
4579 if (c == IPA_UNDESCRIBED_USE)
4580 fprintf (f, " undescribed_use");
4581 else
4582 fprintf (f, " controlled_uses=%i", c);
4583 fprintf (f, "\n");
4587 /* Print ipa_tree_map data structures of all functions in the
4588 callgraph to F. */
4590 void
4591 ipa_print_all_params (FILE * f)
4593 struct cgraph_node *node;
4595 fprintf (f, "\nFunction parameters:\n");
4596 FOR_EACH_FUNCTION (node)
4597 ipa_print_node_params (f, node);
4600 /* Dump the AV linked list. */
4602 void
4603 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4605 bool comma = false;
4606 fprintf (f, " Aggregate replacements:");
4607 for (; av; av = av->next)
4609 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4610 av->index, av->offset);
4611 print_generic_expr (f, av->value);
4612 comma = true;
4614 fprintf (f, "\n");
4617 /* Stream out jump function JUMP_FUNC to OB. */
4619 static void
4620 ipa_write_jump_function (struct output_block *ob,
4621 struct ipa_jump_func *jump_func)
4623 struct ipa_agg_jf_item *item;
4624 struct bitpack_d bp;
4625 int i, count;
4626 int flag = 0;
4628 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4629 as well as WPA memory by handling them specially. */
4630 if (jump_func->type == IPA_JF_CONST
4631 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4632 flag = 1;
4634 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4635 switch (jump_func->type)
4637 case IPA_JF_UNKNOWN:
4638 break;
4639 case IPA_JF_CONST:
4640 gcc_assert (
4641 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4642 stream_write_tree (ob,
4643 flag
4644 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4645 : jump_func->value.constant.value, true);
4646 break;
4647 case IPA_JF_PASS_THROUGH:
4648 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4649 if (jump_func->value.pass_through.operation == NOP_EXPR)
4651 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4652 bp = bitpack_create (ob->main_stream);
4653 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4654 streamer_write_bitpack (&bp);
4656 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4657 == tcc_unary)
4658 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4659 else
4661 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4662 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4664 break;
4665 case IPA_JF_ANCESTOR:
4666 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4667 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4668 bp = bitpack_create (ob->main_stream);
4669 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4670 streamer_write_bitpack (&bp);
4671 break;
4672 default:
4673 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4676 count = vec_safe_length (jump_func->agg.items);
4677 streamer_write_uhwi (ob, count);
4678 if (count)
4680 bp = bitpack_create (ob->main_stream);
4681 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4682 streamer_write_bitpack (&bp);
4685 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4687 stream_write_tree (ob, item->type, true);
4688 streamer_write_uhwi (ob, item->offset);
4689 streamer_write_uhwi (ob, item->jftype);
4690 switch (item->jftype)
4692 case IPA_JF_UNKNOWN:
4693 break;
4694 case IPA_JF_CONST:
4695 stream_write_tree (ob, item->value.constant, true);
4696 break;
4697 case IPA_JF_PASS_THROUGH:
4698 case IPA_JF_LOAD_AGG:
4699 streamer_write_uhwi (ob, item->value.pass_through.operation);
4700 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4701 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4702 != tcc_unary)
4703 stream_write_tree (ob, item->value.pass_through.operand, true);
4704 if (item->jftype == IPA_JF_LOAD_AGG)
4706 stream_write_tree (ob, item->value.load_agg.type, true);
4707 streamer_write_uhwi (ob, item->value.load_agg.offset);
4708 bp = bitpack_create (ob->main_stream);
4709 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4710 streamer_write_bitpack (&bp);
4712 break;
4713 default:
4714 fatal_error (UNKNOWN_LOCATION,
4715 "invalid jump function in LTO stream");
4719 bp = bitpack_create (ob->main_stream);
4720 bp_pack_value (&bp, !!jump_func->bits, 1);
4721 streamer_write_bitpack (&bp);
4722 if (jump_func->bits)
4724 streamer_write_widest_int (ob, jump_func->bits->value);
4725 streamer_write_widest_int (ob, jump_func->bits->mask);
4727 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4728 streamer_write_bitpack (&bp);
4729 if (jump_func->m_vr)
4731 streamer_write_enum (ob->main_stream, value_rang_type,
4732 VR_LAST, jump_func->m_vr->kind ());
4733 stream_write_tree (ob, jump_func->m_vr->min (), true);
4734 stream_write_tree (ob, jump_func->m_vr->max (), true);
4738 /* Read in jump function JUMP_FUNC from IB. */
4740 static void
4741 ipa_read_jump_function (class lto_input_block *ib,
4742 struct ipa_jump_func *jump_func,
4743 struct cgraph_edge *cs,
4744 class data_in *data_in,
4745 bool prevails)
4747 enum jump_func_type jftype;
4748 enum tree_code operation;
4749 int i, count;
4750 int val = streamer_read_uhwi (ib);
4751 bool flag = val & 1;
4753 jftype = (enum jump_func_type) (val / 2);
4754 switch (jftype)
4756 case IPA_JF_UNKNOWN:
4757 ipa_set_jf_unknown (jump_func);
4758 break;
4759 case IPA_JF_CONST:
4761 tree t = stream_read_tree (ib, data_in);
4762 if (flag && prevails)
4763 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4764 ipa_set_jf_constant (jump_func, t, cs);
4766 break;
4767 case IPA_JF_PASS_THROUGH:
4768 operation = (enum tree_code) streamer_read_uhwi (ib);
4769 if (operation == NOP_EXPR)
4771 int formal_id = streamer_read_uhwi (ib);
4772 struct bitpack_d bp = streamer_read_bitpack (ib);
4773 bool agg_preserved = bp_unpack_value (&bp, 1);
4774 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4776 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4778 int formal_id = streamer_read_uhwi (ib);
4779 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4781 else
4783 tree operand = stream_read_tree (ib, data_in);
4784 int formal_id = streamer_read_uhwi (ib);
4785 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4786 operation);
4788 break;
4789 case IPA_JF_ANCESTOR:
4791 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4792 int formal_id = streamer_read_uhwi (ib);
4793 struct bitpack_d bp = streamer_read_bitpack (ib);
4794 bool agg_preserved = bp_unpack_value (&bp, 1);
4795 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4796 break;
4798 default:
4799 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4802 count = streamer_read_uhwi (ib);
4803 if (prevails)
4805 jump_func->agg.items = NULL;
4806 vec_safe_reserve (jump_func->agg.items, count, true);
4808 if (count)
4810 struct bitpack_d bp = streamer_read_bitpack (ib);
4811 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4813 for (i = 0; i < count; i++)
4815 struct ipa_agg_jf_item item;
4816 item.type = stream_read_tree (ib, data_in);
4817 item.offset = streamer_read_uhwi (ib);
4818 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4820 switch (item.jftype)
4822 case IPA_JF_UNKNOWN:
4823 break;
4824 case IPA_JF_CONST:
4825 item.value.constant = stream_read_tree (ib, data_in);
4826 break;
4827 case IPA_JF_PASS_THROUGH:
4828 case IPA_JF_LOAD_AGG:
4829 operation = (enum tree_code) streamer_read_uhwi (ib);
4830 item.value.pass_through.operation = operation;
4831 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4832 if (TREE_CODE_CLASS (operation) == tcc_unary)
4833 item.value.pass_through.operand = NULL_TREE;
4834 else
4835 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4836 if (item.jftype == IPA_JF_LOAD_AGG)
4838 struct bitpack_d bp;
4839 item.value.load_agg.type = stream_read_tree (ib, data_in);
4840 item.value.load_agg.offset = streamer_read_uhwi (ib);
4841 bp = streamer_read_bitpack (ib);
4842 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4844 break;
4845 default:
4846 fatal_error (UNKNOWN_LOCATION,
4847 "invalid jump function in LTO stream");
4849 if (prevails)
4850 jump_func->agg.items->quick_push (item);
4853 struct bitpack_d bp = streamer_read_bitpack (ib);
4854 bool bits_known = bp_unpack_value (&bp, 1);
4855 if (bits_known)
4857 widest_int value = streamer_read_widest_int (ib);
4858 widest_int mask = streamer_read_widest_int (ib);
4859 if (prevails)
4860 ipa_set_jfunc_bits (jump_func, value, mask);
4862 else
4863 jump_func->bits = NULL;
4865 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4866 bool vr_known = bp_unpack_value (&vr_bp, 1);
4867 if (vr_known)
4869 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4870 VR_LAST);
4871 tree min = stream_read_tree (ib, data_in);
4872 tree max = stream_read_tree (ib, data_in);
4873 if (prevails)
4874 ipa_set_jfunc_vr (jump_func, type, min, max);
4876 else
4877 jump_func->m_vr = NULL;
4880 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4881 relevant to indirect inlining to OB. */
4883 static void
4884 ipa_write_indirect_edge_info (struct output_block *ob,
4885 struct cgraph_edge *cs)
4887 class cgraph_indirect_call_info *ii = cs->indirect_info;
4888 struct bitpack_d bp;
4890 streamer_write_hwi (ob, ii->param_index);
4891 bp = bitpack_create (ob->main_stream);
4892 bp_pack_value (&bp, ii->polymorphic, 1);
4893 bp_pack_value (&bp, ii->agg_contents, 1);
4894 bp_pack_value (&bp, ii->member_ptr, 1);
4895 bp_pack_value (&bp, ii->by_ref, 1);
4896 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4897 bp_pack_value (&bp, ii->vptr_changed, 1);
4898 streamer_write_bitpack (&bp);
4899 if (ii->agg_contents || ii->polymorphic)
4900 streamer_write_hwi (ob, ii->offset);
4901 else
4902 gcc_assert (ii->offset == 0);
4904 if (ii->polymorphic)
4906 streamer_write_hwi (ob, ii->otr_token);
4907 stream_write_tree (ob, ii->otr_type, true);
4908 ii->context.stream_out (ob);
4912 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4913 relevant to indirect inlining from IB. */
4915 static void
4916 ipa_read_indirect_edge_info (class lto_input_block *ib,
4917 class data_in *data_in,
4918 struct cgraph_edge *cs,
4919 class ipa_node_params *info)
4921 class cgraph_indirect_call_info *ii = cs->indirect_info;
4922 struct bitpack_d bp;
4924 ii->param_index = (int) streamer_read_hwi (ib);
4925 bp = streamer_read_bitpack (ib);
4926 ii->polymorphic = bp_unpack_value (&bp, 1);
4927 ii->agg_contents = bp_unpack_value (&bp, 1);
4928 ii->member_ptr = bp_unpack_value (&bp, 1);
4929 ii->by_ref = bp_unpack_value (&bp, 1);
4930 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4931 ii->vptr_changed = bp_unpack_value (&bp, 1);
4932 if (ii->agg_contents || ii->polymorphic)
4933 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4934 else
4935 ii->offset = 0;
4936 if (ii->polymorphic)
4938 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4939 ii->otr_type = stream_read_tree (ib, data_in);
4940 ii->context.stream_in (ib, data_in);
4942 if (info && ii->param_index >= 0)
4944 if (ii->polymorphic)
4945 ipa_set_param_used_by_polymorphic_call (info,
4946 ii->param_index , true);
4947 ipa_set_param_used_by_indirect_call (info,
4948 ii->param_index, true);
4952 /* Stream out NODE info to OB. */
4954 static void
4955 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4957 int node_ref;
4958 lto_symtab_encoder_t encoder;
4959 class ipa_node_params *info = IPA_NODE_REF (node);
4960 int j;
4961 struct cgraph_edge *e;
4962 struct bitpack_d bp;
4964 encoder = ob->decl_state->symtab_node_encoder;
4965 node_ref = lto_symtab_encoder_encode (encoder, node);
4966 streamer_write_uhwi (ob, node_ref);
4968 streamer_write_uhwi (ob, ipa_get_param_count (info));
4969 for (j = 0; j < ipa_get_param_count (info); j++)
4970 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4971 bp = bitpack_create (ob->main_stream);
4972 gcc_assert (info->analysis_done
4973 || ipa_get_param_count (info) == 0);
4974 gcc_assert (!info->node_enqueued);
4975 gcc_assert (!info->ipcp_orig_node);
4976 for (j = 0; j < ipa_get_param_count (info); j++)
4977 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4978 streamer_write_bitpack (&bp);
4979 for (j = 0; j < ipa_get_param_count (info); j++)
4981 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4982 stream_write_tree (ob, ipa_get_type (info, j), true);
4984 for (e = node->callees; e; e = e->next_callee)
4986 class ipa_edge_args *args = IPA_EDGE_REF (e);
4988 if (!args)
4990 streamer_write_uhwi (ob, 0);
4991 continue;
4994 streamer_write_uhwi (ob,
4995 ipa_get_cs_argument_count (args) * 2
4996 + (args->polymorphic_call_contexts != NULL));
4997 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4999 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5000 if (args->polymorphic_call_contexts != NULL)
5001 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5004 for (e = node->indirect_calls; e; e = e->next_callee)
5006 class ipa_edge_args *args = IPA_EDGE_REF (e);
5007 if (!args)
5008 streamer_write_uhwi (ob, 0);
5009 else
5011 streamer_write_uhwi (ob,
5012 ipa_get_cs_argument_count (args) * 2
5013 + (args->polymorphic_call_contexts != NULL));
5014 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5016 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5017 if (args->polymorphic_call_contexts != NULL)
5018 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5021 ipa_write_indirect_edge_info (ob, e);
5025 /* Stream in edge E from IB. */
5027 static void
5028 ipa_read_edge_info (class lto_input_block *ib,
5029 class data_in *data_in,
5030 struct cgraph_edge *e, bool prevails)
5032 int count = streamer_read_uhwi (ib);
5033 bool contexts_computed = count & 1;
5035 count /= 2;
5036 if (!count)
5037 return;
5038 if (prevails
5039 && (e->possibly_call_in_translation_unit_p ()
5040 /* Also stream in jump functions to builtins in hope that they
5041 will get fnspecs. */
5042 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
5044 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
5045 vec_safe_grow_cleared (args->jump_functions, count, true);
5046 if (contexts_computed)
5047 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
5048 for (int k = 0; k < count; k++)
5050 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5051 data_in, prevails);
5052 if (contexts_computed)
5053 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5054 (ib, data_in);
5057 else
5059 for (int k = 0; k < count; k++)
5061 struct ipa_jump_func dummy;
5062 ipa_read_jump_function (ib, &dummy, e,
5063 data_in, prevails);
5064 if (contexts_computed)
5066 class ipa_polymorphic_call_context ctx;
5067 ctx.stream_in (ib, data_in);
5073 /* Stream in NODE info from IB. */
5075 static void
5076 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5077 class data_in *data_in)
5079 int k;
5080 struct cgraph_edge *e;
5081 struct bitpack_d bp;
5082 bool prevails = node->prevailing_p ();
5083 class ipa_node_params *info = prevails
5084 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
5086 int param_count = streamer_read_uhwi (ib);
5087 if (prevails)
5089 ipa_alloc_node_params (node, param_count);
5090 for (k = 0; k < param_count; k++)
5091 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5092 if (ipa_get_param_count (info) != 0)
5093 info->analysis_done = true;
5094 info->node_enqueued = false;
5096 else
5097 for (k = 0; k < param_count; k++)
5098 streamer_read_uhwi (ib);
5100 bp = streamer_read_bitpack (ib);
5101 for (k = 0; k < param_count; k++)
5103 bool used = bp_unpack_value (&bp, 1);
5105 if (prevails)
5106 ipa_set_param_used (info, k, used);
5108 for (k = 0; k < param_count; k++)
5110 int nuses = streamer_read_hwi (ib);
5111 tree type = stream_read_tree (ib, data_in);
5113 if (prevails)
5115 ipa_set_controlled_uses (info, k, nuses);
5116 (*info->descriptors)[k].decl_or_type = type;
5119 for (e = node->callees; e; e = e->next_callee)
5120 ipa_read_edge_info (ib, data_in, e, prevails);
5121 for (e = node->indirect_calls; e; e = e->next_callee)
5123 ipa_read_edge_info (ib, data_in, e, prevails);
5124 ipa_read_indirect_edge_info (ib, data_in, e, info);
5128 /* Write jump functions for nodes in SET. */
5130 void
5131 ipa_prop_write_jump_functions (void)
5133 struct cgraph_node *node;
5134 struct output_block *ob;
5135 unsigned int count = 0;
5136 lto_symtab_encoder_iterator lsei;
5137 lto_symtab_encoder_t encoder;
5139 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5140 return;
5142 ob = create_output_block (LTO_section_jump_functions);
5143 encoder = ob->decl_state->symtab_node_encoder;
5144 ob->symbol = NULL;
5145 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5146 lsei_next_function_in_partition (&lsei))
5148 node = lsei_cgraph_node (lsei);
5149 if (node->has_gimple_body_p ()
5150 && IPA_NODE_REF (node) != NULL)
5151 count++;
5154 streamer_write_uhwi (ob, count);
5156 /* Process all of the functions. */
5157 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5158 lsei_next_function_in_partition (&lsei))
5160 node = lsei_cgraph_node (lsei);
5161 if (node->has_gimple_body_p ()
5162 && IPA_NODE_REF (node) != NULL)
5163 ipa_write_node_info (ob, node);
5165 streamer_write_char_stream (ob->main_stream, 0);
5166 produce_asm (ob, NULL);
5167 destroy_output_block (ob);
5170 /* Read section in file FILE_DATA of length LEN with data DATA. */
5172 static void
5173 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5174 size_t len)
5176 const struct lto_function_header *header =
5177 (const struct lto_function_header *) data;
5178 const int cfg_offset = sizeof (struct lto_function_header);
5179 const int main_offset = cfg_offset + header->cfg_size;
5180 const int string_offset = main_offset + header->main_size;
5181 class data_in *data_in;
5182 unsigned int i;
5183 unsigned int count;
5185 lto_input_block ib_main ((const char *) data + main_offset,
5186 header->main_size, file_data->mode_table);
5188 data_in =
5189 lto_data_in_create (file_data, (const char *) data + string_offset,
5190 header->string_size, vNULL);
5191 count = streamer_read_uhwi (&ib_main);
5193 for (i = 0; i < count; i++)
5195 unsigned int index;
5196 struct cgraph_node *node;
5197 lto_symtab_encoder_t encoder;
5199 index = streamer_read_uhwi (&ib_main);
5200 encoder = file_data->symtab_node_encoder;
5201 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5202 index));
5203 gcc_assert (node->definition);
5204 ipa_read_node_info (&ib_main, node, data_in);
5206 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5207 len);
5208 lto_data_in_delete (data_in);
5211 /* Read ipcp jump functions. */
5213 void
5214 ipa_prop_read_jump_functions (void)
5216 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5217 struct lto_file_decl_data *file_data;
5218 unsigned int j = 0;
5220 ipa_check_create_node_params ();
5221 ipa_check_create_edge_args ();
5222 ipa_register_cgraph_hooks ();
5224 while ((file_data = file_data_vec[j++]))
5226 size_t len;
5227 const char *data
5228 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5229 &len);
5230 if (data)
5231 ipa_prop_read_section (file_data, data, len);
5235 void
5236 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5238 int node_ref;
5239 unsigned int count = 0;
5240 lto_symtab_encoder_t encoder;
5241 struct ipa_agg_replacement_value *aggvals, *av;
5243 aggvals = ipa_get_agg_replacements_for_node (node);
5244 encoder = ob->decl_state->symtab_node_encoder;
5245 node_ref = lto_symtab_encoder_encode (encoder, node);
5246 streamer_write_uhwi (ob, node_ref);
5248 for (av = aggvals; av; av = av->next)
5249 count++;
5250 streamer_write_uhwi (ob, count);
5252 for (av = aggvals; av; av = av->next)
5254 struct bitpack_d bp;
5256 streamer_write_uhwi (ob, av->offset);
5257 streamer_write_uhwi (ob, av->index);
5258 stream_write_tree (ob, av->value, true);
5260 bp = bitpack_create (ob->main_stream);
5261 bp_pack_value (&bp, av->by_ref, 1);
5262 streamer_write_bitpack (&bp);
5265 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5266 if (ts && vec_safe_length (ts->m_vr) > 0)
5268 count = ts->m_vr->length ();
5269 streamer_write_uhwi (ob, count);
5270 for (unsigned i = 0; i < count; ++i)
5272 struct bitpack_d bp;
5273 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5274 bp = bitpack_create (ob->main_stream);
5275 bp_pack_value (&bp, parm_vr->known, 1);
5276 streamer_write_bitpack (&bp);
5277 if (parm_vr->known)
5279 streamer_write_enum (ob->main_stream, value_rang_type,
5280 VR_LAST, parm_vr->type);
5281 streamer_write_wide_int (ob, parm_vr->min);
5282 streamer_write_wide_int (ob, parm_vr->max);
5286 else
5287 streamer_write_uhwi (ob, 0);
5289 if (ts && vec_safe_length (ts->bits) > 0)
5291 count = ts->bits->length ();
5292 streamer_write_uhwi (ob, count);
5294 for (unsigned i = 0; i < count; ++i)
5296 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5297 struct bitpack_d bp = bitpack_create (ob->main_stream);
5298 bp_pack_value (&bp, !!bits_jfunc, 1);
5299 streamer_write_bitpack (&bp);
5300 if (bits_jfunc)
5302 streamer_write_widest_int (ob, bits_jfunc->value);
5303 streamer_write_widest_int (ob, bits_jfunc->mask);
5307 else
5308 streamer_write_uhwi (ob, 0);
5311 /* Stream in the aggregate value replacement chain for NODE from IB. */
5313 static void
5314 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5315 data_in *data_in)
5317 struct ipa_agg_replacement_value *aggvals = NULL;
5318 unsigned int count, i;
5320 count = streamer_read_uhwi (ib);
5321 for (i = 0; i <count; i++)
5323 struct ipa_agg_replacement_value *av;
5324 struct bitpack_d bp;
5326 av = ggc_alloc<ipa_agg_replacement_value> ();
5327 av->offset = streamer_read_uhwi (ib);
5328 av->index = streamer_read_uhwi (ib);
5329 av->value = stream_read_tree (ib, data_in);
5330 bp = streamer_read_bitpack (ib);
5331 av->by_ref = bp_unpack_value (&bp, 1);
5332 av->next = aggvals;
5333 aggvals = av;
5335 ipa_set_node_agg_value_chain (node, aggvals);
5337 count = streamer_read_uhwi (ib);
5338 if (count > 0)
5340 ipcp_transformation_initialize ();
5341 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5342 vec_safe_grow_cleared (ts->m_vr, count, true);
5343 for (i = 0; i < count; i++)
5345 ipa_vr *parm_vr;
5346 parm_vr = &(*ts->m_vr)[i];
5347 struct bitpack_d bp;
5348 bp = streamer_read_bitpack (ib);
5349 parm_vr->known = bp_unpack_value (&bp, 1);
5350 if (parm_vr->known)
5352 parm_vr->type = streamer_read_enum (ib, value_range_kind,
5353 VR_LAST);
5354 parm_vr->min = streamer_read_wide_int (ib);
5355 parm_vr->max = streamer_read_wide_int (ib);
5359 count = streamer_read_uhwi (ib);
5360 if (count > 0)
5362 ipcp_transformation_initialize ();
5363 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5364 vec_safe_grow_cleared (ts->bits, count, true);
5366 for (i = 0; i < count; i++)
5368 struct bitpack_d bp = streamer_read_bitpack (ib);
5369 bool known = bp_unpack_value (&bp, 1);
5370 if (known)
5372 const widest_int value = streamer_read_widest_int (ib);
5373 const widest_int mask = streamer_read_widest_int (ib);
5374 ipa_bits *bits
5375 = ipa_get_ipa_bits_for_value (value, mask);
5376 (*ts->bits)[i] = bits;
5382 /* Write all aggregate replacement for nodes in set. */
5384 void
5385 ipcp_write_transformation_summaries (void)
5387 struct cgraph_node *node;
5388 struct output_block *ob;
5389 unsigned int count = 0;
5390 lto_symtab_encoder_iterator lsei;
5391 lto_symtab_encoder_t encoder;
5393 ob = create_output_block (LTO_section_ipcp_transform);
5394 encoder = ob->decl_state->symtab_node_encoder;
5395 ob->symbol = NULL;
5396 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5397 lsei_next_function_in_partition (&lsei))
5399 node = lsei_cgraph_node (lsei);
5400 if (node->has_gimple_body_p ())
5401 count++;
5404 streamer_write_uhwi (ob, count);
5406 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5407 lsei_next_function_in_partition (&lsei))
5409 node = lsei_cgraph_node (lsei);
5410 if (node->has_gimple_body_p ())
5411 write_ipcp_transformation_info (ob, node);
5413 streamer_write_char_stream (ob->main_stream, 0);
5414 produce_asm (ob, NULL);
5415 destroy_output_block (ob);
5418 /* Read replacements section in file FILE_DATA of length LEN with data
5419 DATA. */
5421 static void
5422 read_replacements_section (struct lto_file_decl_data *file_data,
5423 const char *data,
5424 size_t len)
5426 const struct lto_function_header *header =
5427 (const struct lto_function_header *) data;
5428 const int cfg_offset = sizeof (struct lto_function_header);
5429 const int main_offset = cfg_offset + header->cfg_size;
5430 const int string_offset = main_offset + header->main_size;
5431 class data_in *data_in;
5432 unsigned int i;
5433 unsigned int count;
5435 lto_input_block ib_main ((const char *) data + main_offset,
5436 header->main_size, file_data->mode_table);
5438 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5439 header->string_size, vNULL);
5440 count = streamer_read_uhwi (&ib_main);
5442 for (i = 0; i < count; i++)
5444 unsigned int index;
5445 struct cgraph_node *node;
5446 lto_symtab_encoder_t encoder;
5448 index = streamer_read_uhwi (&ib_main);
5449 encoder = file_data->symtab_node_encoder;
5450 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5451 index));
5452 gcc_assert (node->definition);
5453 read_ipcp_transformation_info (&ib_main, node, data_in);
5455 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5456 len);
5457 lto_data_in_delete (data_in);
5460 /* Read IPA-CP aggregate replacements. */
5462 void
5463 ipcp_read_transformation_summaries (void)
5465 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5466 struct lto_file_decl_data *file_data;
5467 unsigned int j = 0;
5469 while ((file_data = file_data_vec[j++]))
5471 size_t len;
5472 const char *data
5473 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5474 &len);
5475 if (data)
5476 read_replacements_section (file_data, data, len);
5480 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5481 NODE. */
5483 static void
5484 adjust_agg_replacement_values (struct cgraph_node *node,
5485 struct ipa_agg_replacement_value *aggval)
5487 struct ipa_agg_replacement_value *v;
5488 clone_info *cinfo = clone_info::get (node);
5490 if (!cinfo || !cinfo->param_adjustments)
5491 return;
5493 auto_vec<int, 16> new_indices;
5494 cinfo->param_adjustments->get_updated_indices (&new_indices);
5495 for (v = aggval; v; v = v->next)
5497 gcc_checking_assert (v->index >= 0);
5499 if ((unsigned) v->index < new_indices.length ())
5500 v->index = new_indices[v->index];
5501 else
5502 /* This can happen if we know about a constant passed by reference by
5503 an argument which is never actually used for anything, let alone
5504 loading that constant. */
5505 v->index = -1;
5509 /* Dominator walker driving the ipcp modification phase. */
5511 class ipcp_modif_dom_walker : public dom_walker
5513 public:
5514 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5515 vec<ipa_param_descriptor, va_gc> *descs,
5516 struct ipa_agg_replacement_value *av,
5517 bool *sc, bool *cc)
5518 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5519 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5521 virtual edge before_dom_children (basic_block);
5523 private:
5524 struct ipa_func_body_info *m_fbi;
5525 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5526 struct ipa_agg_replacement_value *m_aggval;
5527 bool *m_something_changed, *m_cfg_changed;
5530 edge
5531 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5533 gimple_stmt_iterator gsi;
5534 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5536 struct ipa_agg_replacement_value *v;
5537 gimple *stmt = gsi_stmt (gsi);
5538 tree rhs, val, t;
5539 HOST_WIDE_INT offset;
5540 poly_int64 size;
5541 int index;
5542 bool by_ref, vce;
5544 if (!gimple_assign_load_p (stmt))
5545 continue;
5546 rhs = gimple_assign_rhs1 (stmt);
5547 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5548 continue;
5550 vce = false;
5551 t = rhs;
5552 while (handled_component_p (t))
5554 /* V_C_E can do things like convert an array of integers to one
5555 bigger integer and similar things we do not handle below. */
5556 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5558 vce = true;
5559 break;
5561 t = TREE_OPERAND (t, 0);
5563 if (vce)
5564 continue;
5566 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5567 &offset, &size, &by_ref))
5568 continue;
5569 for (v = m_aggval; v; v = v->next)
5570 if (v->index == index
5571 && v->offset == offset)
5572 break;
5573 if (!v
5574 || v->by_ref != by_ref
5575 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5576 size))
5577 continue;
5579 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5580 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5582 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5583 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5584 else if (TYPE_SIZE (TREE_TYPE (rhs))
5585 == TYPE_SIZE (TREE_TYPE (v->value)))
5586 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5587 else
5589 if (dump_file)
5591 fprintf (dump_file, " const ");
5592 print_generic_expr (dump_file, v->value);
5593 fprintf (dump_file, " can't be converted to type of ");
5594 print_generic_expr (dump_file, rhs);
5595 fprintf (dump_file, "\n");
5597 continue;
5600 else
5601 val = v->value;
5603 if (dump_file && (dump_flags & TDF_DETAILS))
5605 fprintf (dump_file, "Modifying stmt:\n ");
5606 print_gimple_stmt (dump_file, stmt, 0);
5608 gimple_assign_set_rhs_from_tree (&gsi, val);
5609 update_stmt (stmt);
5611 if (dump_file && (dump_flags & TDF_DETAILS))
5613 fprintf (dump_file, "into:\n ");
5614 print_gimple_stmt (dump_file, stmt, 0);
5615 fprintf (dump_file, "\n");
5618 *m_something_changed = true;
5619 if (maybe_clean_eh_stmt (stmt)
5620 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5621 *m_cfg_changed = true;
5623 return NULL;
5626 /* Return true if we have recorded VALUE and MASK about PARM.
5627 Set VALUE and MASk accordingly. */
5629 bool
5630 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5632 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5633 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5634 if (!ts || vec_safe_length (ts->bits) == 0)
5635 return false;
5637 int i = 0;
5638 for (tree p = DECL_ARGUMENTS (current_function_decl);
5639 p != parm; p = DECL_CHAIN (p))
5641 i++;
5642 /* Ignore static chain. */
5643 if (!p)
5644 return false;
5647 clone_info *cinfo = clone_info::get (cnode);
5648 if (cinfo && cinfo->param_adjustments)
5650 i = cinfo->param_adjustments->get_original_index (i);
5651 if (i < 0)
5652 return false;
5655 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5656 if (!bits[i])
5657 return false;
5658 *mask = bits[i]->mask;
5659 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5660 return true;
5664 /* Update bits info of formal parameters as described in
5665 ipcp_transformation. */
5667 static void
5668 ipcp_update_bits (struct cgraph_node *node)
5670 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5672 if (!ts || vec_safe_length (ts->bits) == 0)
5673 return;
5674 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5675 unsigned count = bits.length ();
5676 if (!count)
5677 return;
5679 auto_vec<int, 16> new_indices;
5680 bool need_remapping = false;
5681 clone_info *cinfo = clone_info::get (node);
5682 if (cinfo && cinfo->param_adjustments)
5684 cinfo->param_adjustments->get_updated_indices (&new_indices);
5685 need_remapping = true;
5687 auto_vec <tree, 16> parm_decls;
5688 push_function_arg_decls (&parm_decls, node->decl);
5690 for (unsigned i = 0; i < count; ++i)
5692 tree parm;
5693 if (need_remapping)
5695 if (i >= new_indices.length ())
5696 continue;
5697 int idx = new_indices[i];
5698 if (idx < 0)
5699 continue;
5700 parm = parm_decls[idx];
5702 else
5703 parm = parm_decls[i];
5704 gcc_checking_assert (parm);
5707 if (!bits[i]
5708 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5709 || POINTER_TYPE_P (TREE_TYPE (parm)))
5710 || !is_gimple_reg (parm))
5711 continue;
5713 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5714 if (!ddef)
5715 continue;
5717 if (dump_file)
5719 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5720 print_hex (bits[i]->mask, dump_file);
5721 fprintf (dump_file, "\n");
5724 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5726 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5727 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5729 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5730 | wide_int::from (bits[i]->value, prec, sgn);
5731 set_nonzero_bits (ddef, nonzero_bits);
5733 else
5735 unsigned tem = bits[i]->mask.to_uhwi ();
5736 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5737 unsigned align = tem & -tem;
5738 unsigned misalign = bitpos & (align - 1);
5740 if (align > 1)
5742 if (dump_file)
5743 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5745 unsigned old_align, old_misalign;
5746 struct ptr_info_def *pi = get_ptr_info (ddef);
5747 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5749 if (old_known
5750 && old_align > align)
5752 if (dump_file)
5754 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5755 if ((old_misalign & (align - 1)) != misalign)
5756 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5757 old_misalign, misalign);
5759 continue;
5762 if (old_known
5763 && ((misalign & (old_align - 1)) != old_misalign)
5764 && dump_file)
5765 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5766 old_misalign, misalign);
5768 set_ptr_info_alignment (pi, align, misalign);
5774 bool
5775 ipa_vr::nonzero_p (tree expr_type) const
5777 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5778 return true;
5780 unsigned prec = TYPE_PRECISION (expr_type);
5781 return (type == VR_RANGE
5782 && TYPE_UNSIGNED (expr_type)
5783 && wi::eq_p (min, wi::one (prec))
5784 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5787 /* Update value range of formal parameters as described in
5788 ipcp_transformation. */
5790 static void
5791 ipcp_update_vr (struct cgraph_node *node)
5793 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5794 if (!ts || vec_safe_length (ts->m_vr) == 0)
5795 return;
5796 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5797 unsigned count = vr.length ();
5798 if (!count)
5799 return;
5801 auto_vec<int, 16> new_indices;
5802 bool need_remapping = false;
5803 clone_info *cinfo = clone_info::get (node);
5804 if (cinfo && cinfo->param_adjustments)
5806 cinfo->param_adjustments->get_updated_indices (&new_indices);
5807 need_remapping = true;
5809 auto_vec <tree, 16> parm_decls;
5810 push_function_arg_decls (&parm_decls, node->decl);
5812 for (unsigned i = 0; i < count; ++i)
5814 tree parm;
5815 int remapped_idx;
5816 if (need_remapping)
5818 if (i >= new_indices.length ())
5819 continue;
5820 remapped_idx = new_indices[i];
5821 if (remapped_idx < 0)
5822 continue;
5824 else
5825 remapped_idx = i;
5827 parm = parm_decls[remapped_idx];
5829 gcc_checking_assert (parm);
5830 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5832 if (!ddef || !is_gimple_reg (parm))
5833 continue;
5835 if (vr[i].known
5836 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5838 tree type = TREE_TYPE (ddef);
5839 unsigned prec = TYPE_PRECISION (type);
5840 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5842 if (dump_file)
5844 fprintf (dump_file, "Setting value range of param %u "
5845 "(now %i) ", i, remapped_idx);
5846 fprintf (dump_file, "%s[",
5847 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5848 print_decs (vr[i].min, dump_file);
5849 fprintf (dump_file, ", ");
5850 print_decs (vr[i].max, dump_file);
5851 fprintf (dump_file, "]\n");
5853 set_range_info (ddef, vr[i].type,
5854 wide_int_storage::from (vr[i].min, prec,
5855 TYPE_SIGN (type)),
5856 wide_int_storage::from (vr[i].max, prec,
5857 TYPE_SIGN (type)));
5859 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5860 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5862 if (dump_file)
5863 fprintf (dump_file, "Setting nonnull for %u\n", i);
5864 set_ptr_nonnull (ddef);
5870 /* IPCP transformation phase doing propagation of aggregate values. */
5872 unsigned int
5873 ipcp_transform_function (struct cgraph_node *node)
5875 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5876 struct ipa_func_body_info fbi;
5877 struct ipa_agg_replacement_value *aggval;
5878 int param_count;
5879 bool cfg_changed = false, something_changed = false;
5881 gcc_checking_assert (cfun);
5882 gcc_checking_assert (current_function_decl);
5884 if (dump_file)
5885 fprintf (dump_file, "Modification phase of node %s\n",
5886 node->dump_name ());
5888 ipcp_update_bits (node);
5889 ipcp_update_vr (node);
5890 aggval = ipa_get_agg_replacements_for_node (node);
5891 if (!aggval)
5892 return 0;
5893 param_count = count_formal_params (node->decl);
5894 if (param_count == 0)
5895 return 0;
5896 adjust_agg_replacement_values (node, aggval);
5897 if (dump_file)
5898 ipa_dump_agg_replacement_values (dump_file, aggval);
5900 fbi.node = node;
5901 fbi.info = NULL;
5902 fbi.bb_infos = vNULL;
5903 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
5904 fbi.param_count = param_count;
5905 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
5907 vec_safe_grow_cleared (descriptors, param_count, true);
5908 ipa_populate_param_decls (node, *descriptors);
5909 calculate_dominance_info (CDI_DOMINATORS);
5910 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5911 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5913 int i;
5914 struct ipa_bb_info *bi;
5915 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5916 free_ipa_bb_info (bi);
5917 fbi.bb_infos.release ();
5918 free_dominance_info (CDI_DOMINATORS);
5920 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5921 s->agg_values = NULL;
5922 s->bits = NULL;
5923 s->m_vr = NULL;
5925 vec_free (descriptors);
5927 if (!something_changed)
5928 return 0;
5930 if (cfg_changed)
5931 delete_unreachable_blocks_update_callgraph (node, false);
5933 return TODO_update_ssa_only_virtuals;
5937 /* Return true if OTHER describes same agg value. */
5938 bool
5939 ipa_agg_value::equal_to (const ipa_agg_value &other)
5941 return offset == other.offset
5942 && operand_equal_p (value, other.value, 0);
5945 /* Destructor also removing individual aggregate values. */
5947 ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
5949 ipa_release_agg_values (m_known_aggs, false);
5954 #include "gt-ipa-prop.h"