2016-09-25 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / ipa-prop.c
blobfeecd232afcafcbe47379733ad37a055ec71134a
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
80 /* Allocation pool for reference descriptions. */
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
104 int i, count;
106 count = descriptors.length ();
107 for (i = 0; i < count; i++)
108 if (descriptors[i].decl_or_type == ptree)
109 return i;
111 return -1;
114 /* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
126 static void
127 ipa_populate_param_decls (struct cgraph_node *node,
128 vec<ipa_param_descriptor> &descriptors)
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
135 fndecl = node->decl;
136 gcc_assert (gimple_has_body_p (fndecl));
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
141 descriptors[param_num].decl_or_type = parm;
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
144 param_num++;
148 /* Return how many formal parameters FNDECL has. */
151 count_formal_params (tree fndecl)
153 tree parm;
154 int count = 0;
155 gcc_assert (gimple_has_body_p (fndecl));
157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
158 count++;
160 return count;
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
167 void
168 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
170 fprintf (file, "param #%i", i);
171 if (info->descriptors[i].decl_or_type)
173 fprintf (file, " ");
174 print_generic_expr (file, info->descriptors[i].decl_or_type, 0);
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
181 void
182 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
184 struct ipa_node_params *info = IPA_NODE_REF (node);
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
194 void
195 ipa_initialize_node_params (struct cgraph_node *node)
197 struct ipa_node_params *info = IPA_NODE_REF (node);
199 if (!info->descriptors.exists ())
201 ipa_alloc_node_params (node, count_formal_params (node->decl));
202 ipa_populate_param_decls (node, info->descriptors);
206 /* Print the jump functions associated with call graph edge CS to file F. */
208 static void
209 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
211 int i, count;
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
225 else if (type == IPA_JF_CONST)
227 tree val = jump_func->value.constant.value;
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
237 fprintf (f, "\n");
239 else if (type == IPA_JF_PASS_THROUGH)
241 fprintf (f, "PASS THROUGH: ");
242 fprintf (f, "%d, op %s",
243 jump_func->value.pass_through.formal_id,
244 get_tree_code_name(jump_func->value.pass_through.operation));
245 if (jump_func->value.pass_through.operation != NOP_EXPR)
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
253 fprintf (f, "\n");
255 else if (type == IPA_JF_ANCESTOR)
257 fprintf (f, "ANCESTOR: ");
258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
263 fprintf (f, "\n");
266 if (jump_func->agg.items)
268 struct ipa_agg_jf_item *item;
269 int j;
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
279 tree_to_uhwi (TYPE_SIZE (item->value)));
280 else
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
285 fprintf (f, "\n");
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
297 if (jump_func->alignment.known)
299 fprintf (f, " Alignment: %u, misalignment: %u\n",
300 jump_func->alignment.align,
301 jump_func->alignment.misalign);
303 else
304 fprintf (f, " Unknown alignment\n");
306 if (jump_func->bits.known)
308 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
309 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
310 fprintf (f, "\n");
312 else
313 fprintf (f, " Unknown bits\n");
315 if (jump_func->vr_known)
317 fprintf (f, " VR ");
318 fprintf (f, "%s[",
319 (jump_func->m_vr.type == VR_ANTI_RANGE) ? "~" : "");
320 print_decs (jump_func->m_vr.min, f);
321 fprintf (f, ", ");
322 print_decs (jump_func->m_vr.max, f);
323 fprintf (f, "]\n");
325 else
326 fprintf (f, " Unknown VR\n");
331 /* Print the jump functions of all arguments on all call graph edges going from
332 NODE to file F. */
334 void
335 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
337 struct cgraph_edge *cs;
339 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
340 node->order);
341 for (cs = node->callees; cs; cs = cs->next_callee)
343 if (!ipa_edge_args_info_available_for_edge_p (cs))
344 continue;
346 fprintf (f, " callsite %s/%i -> %s/%i : \n",
347 xstrdup_for_dump (node->name ()), node->order,
348 xstrdup_for_dump (cs->callee->name ()),
349 cs->callee->order);
350 ipa_print_node_jump_functions_for_edge (f, cs);
353 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
355 struct cgraph_indirect_call_info *ii;
356 if (!ipa_edge_args_info_available_for_edge_p (cs))
357 continue;
359 ii = cs->indirect_info;
360 if (ii->agg_contents)
361 fprintf (f, " indirect %s callsite, calling param %i, "
362 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
363 ii->member_ptr ? "member ptr" : "aggregate",
364 ii->param_index, ii->offset,
365 ii->by_ref ? "by reference" : "by_value");
366 else
367 fprintf (f, " indirect %s callsite, calling param %i, "
368 "offset " HOST_WIDE_INT_PRINT_DEC,
369 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
370 ii->offset);
372 if (cs->call_stmt)
374 fprintf (f, ", for stmt ");
375 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
377 else
378 fprintf (f, "\n");
379 if (ii->polymorphic)
380 ii->context.dump (f);
381 ipa_print_node_jump_functions_for_edge (f, cs);
385 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
387 void
388 ipa_print_all_jump_functions (FILE *f)
390 struct cgraph_node *node;
392 fprintf (f, "\nJump functions:\n");
393 FOR_EACH_FUNCTION (node)
395 ipa_print_node_jump_functions (f, node);
399 /* Set jfunc to be a know-really nothing jump function. */
401 static void
402 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
404 jfunc->type = IPA_JF_UNKNOWN;
405 jfunc->alignment.known = false;
406 jfunc->bits.known = false;
407 jfunc->vr_known = false;
410 /* Set JFUNC to be a copy of another jmp (to be used by jump function
411 combination code). The two functions will share their rdesc. */
413 static void
414 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
415 struct ipa_jump_func *src)
418 gcc_checking_assert (src->type == IPA_JF_CONST);
419 dst->type = IPA_JF_CONST;
420 dst->value.constant = src->value.constant;
423 /* Set JFUNC to be a constant jmp function. */
425 static void
426 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
427 struct cgraph_edge *cs)
429 jfunc->type = IPA_JF_CONST;
430 jfunc->value.constant.value = unshare_expr_without_location (constant);
432 if (TREE_CODE (constant) == ADDR_EXPR
433 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
435 struct ipa_cst_ref_desc *rdesc;
437 rdesc = ipa_refdesc_pool.allocate ();
438 rdesc->cs = cs;
439 rdesc->next_duplicate = NULL;
440 rdesc->refcount = 1;
441 jfunc->value.constant.rdesc = rdesc;
443 else
444 jfunc->value.constant.rdesc = NULL;
447 /* Set JFUNC to be a simple pass-through jump function. */
448 static void
449 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
450 bool agg_preserved)
452 jfunc->type = IPA_JF_PASS_THROUGH;
453 jfunc->value.pass_through.operand = NULL_TREE;
454 jfunc->value.pass_through.formal_id = formal_id;
455 jfunc->value.pass_through.operation = NOP_EXPR;
456 jfunc->value.pass_through.agg_preserved = agg_preserved;
459 /* Set JFUNC to be an arithmetic pass through jump function. */
461 static void
462 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
463 tree operand, enum tree_code operation)
465 jfunc->type = IPA_JF_PASS_THROUGH;
466 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
467 jfunc->value.pass_through.formal_id = formal_id;
468 jfunc->value.pass_through.operation = operation;
469 jfunc->value.pass_through.agg_preserved = false;
472 /* Set JFUNC to be an ancestor jump function. */
474 static void
475 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
476 int formal_id, bool agg_preserved)
478 jfunc->type = IPA_JF_ANCESTOR;
479 jfunc->value.ancestor.formal_id = formal_id;
480 jfunc->value.ancestor.offset = offset;
481 jfunc->value.ancestor.agg_preserved = agg_preserved;
484 /* Get IPA BB information about the given BB. FBI is the context of analyzis
485 of this function body. */
487 static struct ipa_bb_info *
488 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
490 gcc_checking_assert (fbi);
491 return &fbi->bb_infos[bb->index];
494 /* Structure to be passed in between detect_type_change and
495 check_stmt_for_type_change. */
497 struct prop_type_change_info
499 /* Offset into the object where there is the virtual method pointer we are
500 looking for. */
501 HOST_WIDE_INT offset;
502 /* The declaration or SSA_NAME pointer of the base that we are checking for
503 type change. */
504 tree object;
505 /* Set to true if dynamic type change has been detected. */
506 bool type_maybe_changed;
509 /* Return true if STMT can modify a virtual method table pointer.
511 This function makes special assumptions about both constructors and
512 destructors which are all the functions that are allowed to alter the VMT
513 pointers. It assumes that destructors begin with assignment into all VMT
514 pointers and that constructors essentially look in the following way:
516 1) The very first thing they do is that they call constructors of ancestor
517 sub-objects that have them.
519 2) Then VMT pointers of this and all its ancestors is set to new values
520 corresponding to the type corresponding to the constructor.
522 3) Only afterwards, other stuff such as constructor of member sub-objects
523 and the code written by the user is run. Only this may include calling
524 virtual functions, directly or indirectly.
526 There is no way to call a constructor of an ancestor sub-object in any
527 other way.
529 This means that we do not have to care whether constructors get the correct
530 type information because they will always change it (in fact, if we define
531 the type to be given by the VMT pointer, it is undefined).
533 The most important fact to derive from the above is that if, for some
534 statement in the section 3, we try to detect whether the dynamic type has
535 changed, we can safely ignore all calls as we examine the function body
536 backwards until we reach statements in section 2 because these calls cannot
537 be ancestor constructors or destructors (if the input is not bogus) and so
538 do not change the dynamic type (this holds true only for automatically
539 allocated objects but at the moment we devirtualize only these). We then
540 must detect that statements in section 2 change the dynamic type and can try
541 to derive the new type. That is enough and we can stop, we will never see
542 the calls into constructors of sub-objects in this code. Therefore we can
543 safely ignore all call statements that we traverse.
546 static bool
547 stmt_may_be_vtbl_ptr_store (gimple *stmt)
549 if (is_gimple_call (stmt))
550 return false;
551 if (gimple_clobber_p (stmt))
552 return false;
553 else if (is_gimple_assign (stmt))
555 tree lhs = gimple_assign_lhs (stmt);
557 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
559 if (flag_strict_aliasing
560 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
561 return false;
563 if (TREE_CODE (lhs) == COMPONENT_REF
564 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
565 return false;
566 /* In the future we might want to use get_base_ref_and_offset to find
567 if there is a field corresponding to the offset and if so, proceed
568 almost like if it was a component ref. */
571 return true;
574 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
575 to check whether a particular statement may modify the virtual table
576 pointerIt stores its result into DATA, which points to a
577 prop_type_change_info structure. */
579 static bool
580 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
582 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
583 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
585 if (stmt_may_be_vtbl_ptr_store (stmt))
587 tci->type_maybe_changed = true;
588 return true;
590 else
591 return false;
594 /* See if ARG is PARAM_DECl describing instance passed by pointer
595 or reference in FUNCTION. Return false if the dynamic type may change
596 in between beggining of the function until CALL is invoked.
598 Generally functions are not allowed to change type of such instances,
599 but they call destructors. We assume that methods can not destroy the THIS
600 pointer. Also as a special cases, constructor and destructors may change
601 type of the THIS pointer. */
603 static bool
604 param_type_may_change_p (tree function, tree arg, gimple *call)
606 /* Pure functions can not do any changes on the dynamic type;
607 that require writting to memory. */
608 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
609 return false;
610 /* We need to check if we are within inlined consturctor
611 or destructor (ideally we would have way to check that the
612 inline cdtor is actually working on ARG, but we don't have
613 easy tie on this, so punt on all non-pure cdtors.
614 We may also record the types of cdtors and once we know type
615 of the instance match them.
617 Also code unification optimizations may merge calls from
618 different blocks making return values unreliable. So
619 do nothing during late optimization. */
620 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
621 return true;
622 if (TREE_CODE (arg) == SSA_NAME
623 && SSA_NAME_IS_DEFAULT_DEF (arg)
624 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
626 /* Normal (non-THIS) argument. */
627 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
628 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
629 /* THIS pointer of an method - here we want to watch constructors
630 and destructors as those definitely may change the dynamic
631 type. */
632 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
633 && !DECL_CXX_CONSTRUCTOR_P (function)
634 && !DECL_CXX_DESTRUCTOR_P (function)
635 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
637 /* Walk the inline stack and watch out for ctors/dtors. */
638 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
639 block = BLOCK_SUPERCONTEXT (block))
640 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
641 return true;
642 return false;
645 return true;
648 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
649 callsite CALL) by looking for assignments to its virtual table pointer. If
650 it is, return true and fill in the jump function JFUNC with relevant type
651 information or set it to unknown. ARG is the object itself (not a pointer
652 to it, unless dereferenced). BASE is the base of the memory access as
653 returned by get_ref_base_and_extent, as is the offset.
655 This is helper function for detect_type_change and detect_type_change_ssa
656 that does the heavy work which is usually unnecesary. */
658 static bool
659 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
660 gcall *call, struct ipa_jump_func *jfunc,
661 HOST_WIDE_INT offset)
663 struct prop_type_change_info tci;
664 ao_ref ao;
665 bool entry_reached = false;
667 gcc_checking_assert (DECL_P (arg)
668 || TREE_CODE (arg) == MEM_REF
669 || handled_component_p (arg));
671 comp_type = TYPE_MAIN_VARIANT (comp_type);
673 /* Const calls cannot call virtual methods through VMT and so type changes do
674 not matter. */
675 if (!flag_devirtualize || !gimple_vuse (call)
676 /* Be sure expected_type is polymorphic. */
677 || !comp_type
678 || TREE_CODE (comp_type) != RECORD_TYPE
679 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
680 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
681 return true;
683 ao_ref_init (&ao, arg);
684 ao.base = base;
685 ao.offset = offset;
686 ao.size = POINTER_SIZE;
687 ao.max_size = ao.size;
689 tci.offset = offset;
690 tci.object = get_base_address (arg);
691 tci.type_maybe_changed = false;
693 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
694 &tci, NULL, &entry_reached);
695 if (!tci.type_maybe_changed)
696 return false;
698 ipa_set_jf_unknown (jfunc);
699 return true;
702 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
703 If it is, return true and fill in the jump function JFUNC with relevant type
704 information or set it to unknown. ARG is the object itself (not a pointer
705 to it, unless dereferenced). BASE is the base of the memory access as
706 returned by get_ref_base_and_extent, as is the offset. */
708 static bool
709 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
710 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
712 if (!flag_devirtualize)
713 return false;
715 if (TREE_CODE (base) == MEM_REF
716 && !param_type_may_change_p (current_function_decl,
717 TREE_OPERAND (base, 0),
718 call))
719 return false;
720 return detect_type_change_from_memory_writes (arg, base, comp_type,
721 call, jfunc, offset);
724 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
725 SSA name (its dereference will become the base and the offset is assumed to
726 be zero). */
728 static bool
729 detect_type_change_ssa (tree arg, tree comp_type,
730 gcall *call, struct ipa_jump_func *jfunc)
732 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
733 if (!flag_devirtualize
734 || !POINTER_TYPE_P (TREE_TYPE (arg)))
735 return false;
737 if (!param_type_may_change_p (current_function_decl, arg, call))
738 return false;
740 arg = build2 (MEM_REF, ptr_type_node, arg,
741 build_int_cst (ptr_type_node, 0));
743 return detect_type_change_from_memory_writes (arg, arg, comp_type,
744 call, jfunc, 0);
747 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
748 boolean variable pointed to by DATA. */
750 static bool
751 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
752 void *data)
754 bool *b = (bool *) data;
755 *b = true;
756 return true;
759 /* Return true if we have already walked so many statements in AA that we
760 should really just start giving up. */
762 static bool
763 aa_overwalked (struct ipa_func_body_info *fbi)
765 gcc_checking_assert (fbi);
766 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
769 /* Find the nearest valid aa status for parameter specified by INDEX that
770 dominates BB. */
772 static struct ipa_param_aa_status *
773 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
774 int index)
776 while (true)
778 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
779 if (!bb)
780 return NULL;
781 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
782 if (!bi->param_aa_statuses.is_empty ()
783 && bi->param_aa_statuses[index].valid)
784 return &bi->param_aa_statuses[index];
788 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
789 structures and/or intialize the result with a dominating description as
790 necessary. */
792 static struct ipa_param_aa_status *
793 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
794 int index)
796 gcc_checking_assert (fbi);
797 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
798 if (bi->param_aa_statuses.is_empty ())
799 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
800 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
801 if (!paa->valid)
803 gcc_checking_assert (!paa->parm_modified
804 && !paa->ref_modified
805 && !paa->pt_modified);
806 struct ipa_param_aa_status *dom_paa;
807 dom_paa = find_dominating_aa_status (fbi, bb, index);
808 if (dom_paa)
809 *paa = *dom_paa;
810 else
811 paa->valid = true;
814 return paa;
817 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
818 a value known not to be modified in this function before reaching the
819 statement STMT. FBI holds information about the function we have so far
820 gathered but do not survive the summary building stage. */
822 static bool
823 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
824 gimple *stmt, tree parm_load)
826 struct ipa_param_aa_status *paa;
827 bool modified = false;
828 ao_ref refd;
830 tree base = get_base_address (parm_load);
831 gcc_assert (TREE_CODE (base) == PARM_DECL);
832 if (TREE_READONLY (base))
833 return true;
835 /* FIXME: FBI can be NULL if we are being called from outside
836 ipa_node_analysis or ipcp_transform_function, which currently happens
837 during inlining analysis. It would be great to extend fbi's lifetime and
838 always have it. Currently, we are just not afraid of too much walking in
839 that case. */
840 if (fbi)
842 if (aa_overwalked (fbi))
843 return false;
844 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
845 if (paa->parm_modified)
846 return false;
848 else
849 paa = NULL;
851 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
852 ao_ref_init (&refd, parm_load);
853 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
854 &modified, NULL);
855 if (fbi)
856 fbi->aa_walked += walked;
857 if (paa && modified)
858 paa->parm_modified = true;
859 return !modified;
862 /* If STMT is an assignment that loads a value from an parameter declaration,
863 return the index of the parameter in ipa_node_params which has not been
864 modified. Otherwise return -1. */
866 static int
867 load_from_unmodified_param (struct ipa_func_body_info *fbi,
868 vec<ipa_param_descriptor> descriptors,
869 gimple *stmt)
871 int index;
872 tree op1;
874 if (!gimple_assign_single_p (stmt))
875 return -1;
877 op1 = gimple_assign_rhs1 (stmt);
878 if (TREE_CODE (op1) != PARM_DECL)
879 return -1;
881 index = ipa_get_param_decl_index_1 (descriptors, op1);
882 if (index < 0
883 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
884 return -1;
886 return index;
889 /* Return true if memory reference REF (which must be a load through parameter
890 with INDEX) loads data that are known to be unmodified in this function
891 before reaching statement STMT. */
893 static bool
894 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
895 int index, gimple *stmt, tree ref)
897 struct ipa_param_aa_status *paa;
898 bool modified = false;
899 ao_ref refd;
901 /* FIXME: FBI can be NULL if we are being called from outside
902 ipa_node_analysis or ipcp_transform_function, which currently happens
903 during inlining analysis. It would be great to extend fbi's lifetime and
904 always have it. Currently, we are just not afraid of too much walking in
905 that case. */
906 if (fbi)
908 if (aa_overwalked (fbi))
909 return false;
910 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
911 if (paa->ref_modified)
912 return false;
914 else
915 paa = NULL;
917 gcc_checking_assert (gimple_vuse (stmt));
918 ao_ref_init (&refd, ref);
919 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
920 &modified, NULL);
921 if (fbi)
922 fbi->aa_walked += walked;
923 if (paa && modified)
924 paa->ref_modified = true;
925 return !modified;
928 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
929 is known to be unmodified in this function before reaching call statement
930 CALL into which it is passed. FBI describes the function body. */
932 static bool
933 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
934 gimple *call, tree parm)
936 bool modified = false;
937 ao_ref refd;
939 /* It's unnecessary to calculate anything about memory contnets for a const
940 function because it is not goin to use it. But do not cache the result
941 either. Also, no such calculations for non-pointers. */
942 if (!gimple_vuse (call)
943 || !POINTER_TYPE_P (TREE_TYPE (parm))
944 || aa_overwalked (fbi))
945 return false;
947 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
948 gimple_bb (call),
949 index);
950 if (paa->pt_modified)
951 return false;
953 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
954 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
955 &modified, NULL);
956 fbi->aa_walked += walked;
957 if (modified)
958 paa->pt_modified = true;
959 return !modified;
962 /* Return true if we can prove that OP is a memory reference loading
963 data from an aggregate passed as a parameter.
965 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
966 false if it cannot prove that the value has not been modified before the
967 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
968 if it cannot prove the value has not been modified, in that case it will
969 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
971 INFO and PARMS_AINFO describe parameters of the current function (but the
972 latter can be NULL), STMT is the load statement. If function returns true,
973 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
974 within the aggregate and whether it is a load from a value passed by
975 reference respectively. */
977 bool
978 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
979 vec<ipa_param_descriptor> descriptors,
980 gimple *stmt, tree op, int *index_p,
981 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
982 bool *by_ref_p, bool *guaranteed_unmodified)
984 int index;
985 HOST_WIDE_INT size, max_size;
986 bool reverse;
987 tree base
988 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
990 if (max_size == -1 || max_size != size || *offset_p < 0)
991 return false;
993 if (DECL_P (base))
995 int index = ipa_get_param_decl_index_1 (descriptors, base);
996 if (index >= 0
997 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
999 *index_p = index;
1000 *by_ref_p = false;
1001 if (size_p)
1002 *size_p = size;
1003 if (guaranteed_unmodified)
1004 *guaranteed_unmodified = true;
1005 return true;
1007 return false;
1010 if (TREE_CODE (base) != MEM_REF
1011 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1012 || !integer_zerop (TREE_OPERAND (base, 1)))
1013 return false;
1015 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1017 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1018 index = ipa_get_param_decl_index_1 (descriptors, parm);
1020 else
1022 /* This branch catches situations where a pointer parameter is not a
1023 gimple register, for example:
1025 void hip7(S*) (struct S * p)
1027 void (*<T2e4>) (struct S *) D.1867;
1028 struct S * p.1;
1030 <bb 2>:
1031 p.1_1 = p;
1032 D.1867_2 = p.1_1->f;
1033 D.1867_2 ();
1034 gdp = &p;
1037 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1038 index = load_from_unmodified_param (fbi, descriptors, def);
1041 if (index >= 0)
1043 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1044 if (!data_preserved && !guaranteed_unmodified)
1045 return false;
1047 *index_p = index;
1048 *by_ref_p = true;
1049 if (size_p)
1050 *size_p = size;
1051 if (guaranteed_unmodified)
1052 *guaranteed_unmodified = data_preserved;
1053 return true;
1055 return false;
1058 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1059 of an assignment statement STMT, try to determine whether we are actually
1060 handling any of the following cases and construct an appropriate jump
1061 function into JFUNC if so:
1063 1) The passed value is loaded from a formal parameter which is not a gimple
1064 register (most probably because it is addressable, the value has to be
1065 scalar) and we can guarantee the value has not changed. This case can
1066 therefore be described by a simple pass-through jump function. For example:
1068 foo (int a)
1070 int a.0;
1072 a.0_2 = a;
1073 bar (a.0_2);
1075 2) The passed value can be described by a simple arithmetic pass-through
1076 jump function. E.g.
1078 foo (int a)
1080 int D.2064;
1082 D.2064_4 = a.1(D) + 4;
1083 bar (D.2064_4);
1085 This case can also occur in combination of the previous one, e.g.:
1087 foo (int a, int z)
1089 int a.0;
1090 int D.2064;
1092 a.0_3 = a;
1093 D.2064_4 = a.0_3 + 4;
1094 foo (D.2064_4);
1096 3) The passed value is an address of an object within another one (which
1097 also passed by reference). Such situations are described by an ancestor
1098 jump function and describe situations such as:
1100 B::foo() (struct B * const this)
1102 struct A * D.1845;
1104 D.1845_2 = &this_1(D)->D.1748;
1105 A::bar (D.1845_2);
1107 INFO is the structure describing individual parameters access different
1108 stages of IPA optimizations. PARMS_AINFO contains the information that is
1109 only needed for intraprocedural analysis. */
1111 static void
1112 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1113 struct ipa_node_params *info,
1114 struct ipa_jump_func *jfunc,
1115 gcall *call, gimple *stmt, tree name,
1116 tree param_type)
1118 HOST_WIDE_INT offset, size, max_size;
1119 tree op1, tc_ssa, base, ssa;
1120 bool reverse;
1121 int index;
1123 op1 = gimple_assign_rhs1 (stmt);
1125 if (TREE_CODE (op1) == SSA_NAME)
1127 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1128 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1129 else
1130 index = load_from_unmodified_param (fbi, info->descriptors,
1131 SSA_NAME_DEF_STMT (op1));
1132 tc_ssa = op1;
1134 else
1136 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1137 tc_ssa = gimple_assign_lhs (stmt);
1140 if (index >= 0)
1142 tree op2 = gimple_assign_rhs2 (stmt);
1144 if (op2)
1146 if (!is_gimple_ip_invariant (op2)
1147 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1148 && !useless_type_conversion_p (TREE_TYPE (name),
1149 TREE_TYPE (op1))))
1150 return;
1152 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1153 gimple_assign_rhs_code (stmt));
1155 else if (gimple_assign_single_p (stmt))
1157 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1158 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1160 return;
1163 if (TREE_CODE (op1) != ADDR_EXPR)
1164 return;
1165 op1 = TREE_OPERAND (op1, 0);
1166 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1167 return;
1168 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1169 if (TREE_CODE (base) != MEM_REF
1170 /* If this is a varying address, punt. */
1171 || max_size == -1
1172 || max_size != size)
1173 return;
1174 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1175 ssa = TREE_OPERAND (base, 0);
1176 if (TREE_CODE (ssa) != SSA_NAME
1177 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1178 || offset < 0)
1179 return;
1181 /* Dynamic types are changed in constructors and destructors. */
1182 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1183 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1184 ipa_set_ancestor_jf (jfunc, offset, index,
1185 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1188 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1189 it looks like:
1191 iftmp.1_3 = &obj_2(D)->D.1762;
1193 The base of the MEM_REF must be a default definition SSA NAME of a
1194 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1195 whole MEM_REF expression is returned and the offset calculated from any
1196 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1197 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1199 static tree
1200 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1202 HOST_WIDE_INT size, max_size;
1203 tree expr, parm, obj;
1204 bool reverse;
1206 if (!gimple_assign_single_p (assign))
1207 return NULL_TREE;
1208 expr = gimple_assign_rhs1 (assign);
1210 if (TREE_CODE (expr) != ADDR_EXPR)
1211 return NULL_TREE;
1212 expr = TREE_OPERAND (expr, 0);
1213 obj = expr;
1214 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1216 if (TREE_CODE (expr) != MEM_REF
1217 /* If this is a varying address, punt. */
1218 || max_size == -1
1219 || max_size != size
1220 || *offset < 0)
1221 return NULL_TREE;
1222 parm = TREE_OPERAND (expr, 0);
1223 if (TREE_CODE (parm) != SSA_NAME
1224 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1225 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1226 return NULL_TREE;
1228 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1229 *obj_p = obj;
1230 return expr;
1234 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1235 statement PHI, try to find out whether NAME is in fact a
1236 multiple-inheritance typecast from a descendant into an ancestor of a formal
1237 parameter and thus can be described by an ancestor jump function and if so,
1238 write the appropriate function into JFUNC.
1240 Essentially we want to match the following pattern:
1242 if (obj_2(D) != 0B)
1243 goto <bb 3>;
1244 else
1245 goto <bb 4>;
1247 <bb 3>:
1248 iftmp.1_3 = &obj_2(D)->D.1762;
1250 <bb 4>:
1251 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1252 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1253 return D.1879_6; */
1255 static void
1256 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1257 struct ipa_node_params *info,
1258 struct ipa_jump_func *jfunc,
1259 gcall *call, gphi *phi)
1261 HOST_WIDE_INT offset;
1262 gimple *assign, *cond;
1263 basic_block phi_bb, assign_bb, cond_bb;
1264 tree tmp, parm, expr, obj;
1265 int index, i;
1267 if (gimple_phi_num_args (phi) != 2)
1268 return;
1270 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1271 tmp = PHI_ARG_DEF (phi, 0);
1272 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1273 tmp = PHI_ARG_DEF (phi, 1);
1274 else
1275 return;
1276 if (TREE_CODE (tmp) != SSA_NAME
1277 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1278 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1279 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1280 return;
1282 assign = SSA_NAME_DEF_STMT (tmp);
1283 assign_bb = gimple_bb (assign);
1284 if (!single_pred_p (assign_bb))
1285 return;
1286 expr = get_ancestor_addr_info (assign, &obj, &offset);
1287 if (!expr)
1288 return;
1289 parm = TREE_OPERAND (expr, 0);
1290 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1291 if (index < 0)
1292 return;
1294 cond_bb = single_pred (assign_bb);
1295 cond = last_stmt (cond_bb);
1296 if (!cond
1297 || gimple_code (cond) != GIMPLE_COND
1298 || gimple_cond_code (cond) != NE_EXPR
1299 || gimple_cond_lhs (cond) != parm
1300 || !integer_zerop (gimple_cond_rhs (cond)))
1301 return;
1303 phi_bb = gimple_bb (phi);
1304 for (i = 0; i < 2; i++)
1306 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1307 if (pred != assign_bb && pred != cond_bb)
1308 return;
1311 ipa_set_ancestor_jf (jfunc, offset, index,
1312 parm_ref_data_pass_through_p (fbi, index, call, parm));
1315 /* Inspect the given TYPE and return true iff it has the same structure (the
1316 same number of fields of the same types) as a C++ member pointer. If
1317 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1318 corresponding fields there. */
1320 static bool
1321 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1323 tree fld;
1325 if (TREE_CODE (type) != RECORD_TYPE)
1326 return false;
1328 fld = TYPE_FIELDS (type);
1329 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1330 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1331 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1332 return false;
1334 if (method_ptr)
1335 *method_ptr = fld;
1337 fld = DECL_CHAIN (fld);
1338 if (!fld || INTEGRAL_TYPE_P (fld)
1339 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1340 return false;
1341 if (delta)
1342 *delta = fld;
1344 if (DECL_CHAIN (fld))
1345 return false;
1347 return true;
1350 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1351 return the rhs of its defining statement. Otherwise return RHS as it
1352 is. */
1354 static inline tree
1355 get_ssa_def_if_simple_copy (tree rhs)
1357 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1359 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1361 if (gimple_assign_single_p (def_stmt))
1362 rhs = gimple_assign_rhs1 (def_stmt);
1363 else
1364 break;
1366 return rhs;
1369 /* Simple linked list, describing known contents of an aggregate beforere
1370 call. */
1372 struct ipa_known_agg_contents_list
1374 /* Offset and size of the described part of the aggregate. */
1375 HOST_WIDE_INT offset, size;
1376 /* Known constant value or NULL if the contents is known to be unknown. */
1377 tree constant;
1378 /* Pointer to the next structure in the list. */
1379 struct ipa_known_agg_contents_list *next;
1382 /* Find the proper place in linked list of ipa_known_agg_contents_list
1383 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1384 unless there is a partial overlap, in which case return NULL, or such
1385 element is already there, in which case set *ALREADY_THERE to true. */
1387 static struct ipa_known_agg_contents_list **
1388 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1389 HOST_WIDE_INT lhs_offset,
1390 HOST_WIDE_INT lhs_size,
1391 bool *already_there)
1393 struct ipa_known_agg_contents_list **p = list;
1394 while (*p && (*p)->offset < lhs_offset)
1396 if ((*p)->offset + (*p)->size > lhs_offset)
1397 return NULL;
1398 p = &(*p)->next;
1401 if (*p && (*p)->offset < lhs_offset + lhs_size)
1403 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1404 /* We already know this value is subsequently overwritten with
1405 something else. */
1406 *already_there = true;
1407 else
1408 /* Otherwise this is a partial overlap which we cannot
1409 represent. */
1410 return NULL;
1412 return p;
1415 /* Build aggregate jump function from LIST, assuming there are exactly
1416 CONST_COUNT constant entries there and that th offset of the passed argument
1417 is ARG_OFFSET and store it into JFUNC. */
1419 static void
1420 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1421 int const_count, HOST_WIDE_INT arg_offset,
1422 struct ipa_jump_func *jfunc)
1424 vec_alloc (jfunc->agg.items, const_count);
1425 while (list)
1427 if (list->constant)
1429 struct ipa_agg_jf_item item;
1430 item.offset = list->offset - arg_offset;
1431 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1432 item.value = unshare_expr_without_location (list->constant);
1433 jfunc->agg.items->quick_push (item);
1435 list = list->next;
1439 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1440 in ARG is filled in with constant values. ARG can either be an aggregate
1441 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1442 aggregate. JFUNC is the jump function into which the constants are
1443 subsequently stored. */
1445 static void
1446 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1447 tree arg_type,
1448 struct ipa_jump_func *jfunc)
1450 struct ipa_known_agg_contents_list *list = NULL;
1451 int item_count = 0, const_count = 0;
1452 HOST_WIDE_INT arg_offset, arg_size;
1453 gimple_stmt_iterator gsi;
1454 tree arg_base;
1455 bool check_ref, by_ref;
1456 ao_ref r;
1458 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1459 return;
1461 /* The function operates in three stages. First, we prepare check_ref, r,
1462 arg_base and arg_offset based on what is actually passed as an actual
1463 argument. */
1465 if (POINTER_TYPE_P (arg_type))
1467 by_ref = true;
1468 if (TREE_CODE (arg) == SSA_NAME)
1470 tree type_size;
1471 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1472 return;
1473 check_ref = true;
1474 arg_base = arg;
1475 arg_offset = 0;
1476 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1477 arg_size = tree_to_uhwi (type_size);
1478 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1480 else if (TREE_CODE (arg) == ADDR_EXPR)
1482 HOST_WIDE_INT arg_max_size;
1483 bool reverse;
1485 arg = TREE_OPERAND (arg, 0);
1486 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1487 &arg_max_size, &reverse);
1488 if (arg_max_size == -1
1489 || arg_max_size != arg_size
1490 || arg_offset < 0)
1491 return;
1492 if (DECL_P (arg_base))
1494 check_ref = false;
1495 ao_ref_init (&r, arg_base);
1497 else
1498 return;
1500 else
1501 return;
1503 else
1505 HOST_WIDE_INT arg_max_size;
1506 bool reverse;
1508 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1510 by_ref = false;
1511 check_ref = false;
1512 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1513 &arg_max_size, &reverse);
1514 if (arg_max_size == -1
1515 || arg_max_size != arg_size
1516 || arg_offset < 0)
1517 return;
1519 ao_ref_init (&r, arg);
1522 /* Second stage walks back the BB, looks at individual statements and as long
1523 as it is confident of how the statements affect contents of the
1524 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1525 describing it. */
1526 gsi = gsi_for_stmt (call);
1527 gsi_prev (&gsi);
1528 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1530 struct ipa_known_agg_contents_list *n, **p;
1531 gimple *stmt = gsi_stmt (gsi);
1532 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1533 tree lhs, rhs, lhs_base;
1534 bool reverse;
1536 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1537 continue;
1538 if (!gimple_assign_single_p (stmt))
1539 break;
1541 lhs = gimple_assign_lhs (stmt);
1542 rhs = gimple_assign_rhs1 (stmt);
1543 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1544 || TREE_CODE (lhs) == BIT_FIELD_REF
1545 || contains_bitfld_component_ref_p (lhs))
1546 break;
1548 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1549 &lhs_max_size, &reverse);
1550 if (lhs_max_size == -1
1551 || lhs_max_size != lhs_size)
1552 break;
1554 if (check_ref)
1556 if (TREE_CODE (lhs_base) != MEM_REF
1557 || TREE_OPERAND (lhs_base, 0) != arg_base
1558 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1559 break;
1561 else if (lhs_base != arg_base)
1563 if (DECL_P (lhs_base))
1564 continue;
1565 else
1566 break;
1569 bool already_there = false;
1570 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1571 &already_there);
1572 if (!p)
1573 break;
1574 if (already_there)
1575 continue;
1577 rhs = get_ssa_def_if_simple_copy (rhs);
1578 n = XALLOCA (struct ipa_known_agg_contents_list);
1579 n->size = lhs_size;
1580 n->offset = lhs_offset;
1581 if (is_gimple_ip_invariant (rhs))
1583 n->constant = rhs;
1584 const_count++;
1586 else
1587 n->constant = NULL_TREE;
1588 n->next = *p;
1589 *p = n;
1591 item_count++;
1592 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1593 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1594 break;
1597 /* Third stage just goes over the list and creates an appropriate vector of
1598 ipa_agg_jf_item structures out of it, of sourse only if there are
1599 any known constants to begin with. */
1601 if (const_count)
1603 jfunc->agg.by_ref = by_ref;
1604 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1608 static tree
1609 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1611 int n;
1612 tree type = (e->callee
1613 ? TREE_TYPE (e->callee->decl)
1614 : gimple_call_fntype (e->call_stmt));
1615 tree t = TYPE_ARG_TYPES (type);
1617 for (n = 0; n < i; n++)
1619 if (!t)
1620 break;
1621 t = TREE_CHAIN (t);
1623 if (t)
1624 return TREE_VALUE (t);
1625 if (!e->callee)
1626 return NULL;
1627 t = DECL_ARGUMENTS (e->callee->decl);
1628 for (n = 0; n < i; n++)
1630 if (!t)
1631 return NULL;
1632 t = TREE_CHAIN (t);
1634 if (t)
1635 return TREE_TYPE (t);
1636 return NULL;
1639 /* Compute jump function for all arguments of callsite CS and insert the
1640 information in the jump_functions array in the ipa_edge_args corresponding
1641 to this callsite. */
1643 static void
1644 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1645 struct cgraph_edge *cs)
1647 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1648 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1649 gcall *call = cs->call_stmt;
1650 int n, arg_num = gimple_call_num_args (call);
1651 bool useful_context = false;
1653 if (arg_num == 0 || args->jump_functions)
1654 return;
1655 vec_safe_grow_cleared (args->jump_functions, arg_num);
1656 if (flag_devirtualize)
1657 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1659 if (gimple_call_internal_p (call))
1660 return;
1661 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1662 return;
1664 for (n = 0; n < arg_num; n++)
1666 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1667 tree arg = gimple_call_arg (call, n);
1668 tree param_type = ipa_get_callee_param_type (cs, n);
1669 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1671 tree instance;
1672 struct ipa_polymorphic_call_context context (cs->caller->decl,
1673 arg, cs->call_stmt,
1674 &instance);
1675 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1676 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1677 if (!context.useless_p ())
1678 useful_context = true;
1681 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1683 unsigned HOST_WIDE_INT hwi_bitpos;
1684 unsigned align;
1686 get_pointer_alignment_1 (arg, &align, &hwi_bitpos);
1687 if (align > BITS_PER_UNIT
1688 && align % BITS_PER_UNIT == 0
1689 && hwi_bitpos % BITS_PER_UNIT == 0)
1691 jfunc->alignment.known = true;
1692 jfunc->alignment.align = align / BITS_PER_UNIT;
1693 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1695 else
1696 gcc_assert (!jfunc->alignment.known);
1697 gcc_assert (!jfunc->vr_known);
1699 else
1701 wide_int min, max;
1702 value_range_type type;
1703 if (TREE_CODE (arg) == SSA_NAME
1704 && param_type
1705 && (type = get_range_info (arg, &min, &max))
1706 && (type == VR_RANGE || type == VR_ANTI_RANGE)
1707 && (min.get_precision () <= TYPE_PRECISION (param_type)))
1709 jfunc->vr_known = true;
1710 jfunc->m_vr.type = type;
1711 jfunc->m_vr.min = wide_int_to_tree (param_type, min);
1712 jfunc->m_vr.max = wide_int_to_tree (param_type, max);
1714 else
1715 gcc_assert (!jfunc->vr_known);
1716 gcc_assert (!jfunc->alignment.known);
1719 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1720 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1722 jfunc->bits.known = true;
1724 if (TREE_CODE (arg) == SSA_NAME)
1726 jfunc->bits.value = 0;
1727 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1728 TYPE_SIGN (TREE_TYPE (arg)));
1730 else
1732 jfunc->bits.value = wi::to_widest (arg);
1733 jfunc->bits.mask = 0;
1736 else
1737 gcc_assert (!jfunc->bits.known);
1739 if (is_gimple_ip_invariant (arg)
1740 || (TREE_CODE (arg) == VAR_DECL
1741 && is_global_var (arg)
1742 && TREE_READONLY (arg)))
1743 ipa_set_jf_constant (jfunc, arg, cs);
1744 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1745 && TREE_CODE (arg) == PARM_DECL)
1747 int index = ipa_get_param_decl_index (info, arg);
1749 gcc_assert (index >=0);
1750 /* Aggregate passed by value, check for pass-through, otherwise we
1751 will attempt to fill in aggregate contents later in this
1752 for cycle. */
1753 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1755 ipa_set_jf_simple_pass_through (jfunc, index, false);
1756 continue;
1759 else if (TREE_CODE (arg) == SSA_NAME)
1761 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1763 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1764 if (index >= 0)
1766 bool agg_p;
1767 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1768 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1771 else
1773 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1774 if (is_gimple_assign (stmt))
1775 compute_complex_assign_jump_func (fbi, info, jfunc,
1776 call, stmt, arg, param_type);
1777 else if (gimple_code (stmt) == GIMPLE_PHI)
1778 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1779 call,
1780 as_a <gphi *> (stmt));
1784 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1785 passed (because type conversions are ignored in gimple). Usually we can
1786 safely get type from function declaration, but in case of K&R prototypes or
1787 variadic functions we can try our luck with type of the pointer passed.
1788 TODO: Since we look for actual initialization of the memory object, we may better
1789 work out the type based on the memory stores we find. */
1790 if (!param_type)
1791 param_type = TREE_TYPE (arg);
1793 if ((jfunc->type != IPA_JF_PASS_THROUGH
1794 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1795 && (jfunc->type != IPA_JF_ANCESTOR
1796 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1797 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1798 || POINTER_TYPE_P (param_type)))
1799 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1801 if (!useful_context)
1802 vec_free (args->polymorphic_call_contexts);
1805 /* Compute jump functions for all edges - both direct and indirect - outgoing
1806 from BB. */
1808 static void
1809 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1811 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1812 int i;
1813 struct cgraph_edge *cs;
1815 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1817 struct cgraph_node *callee = cs->callee;
1819 if (callee)
1821 callee->ultimate_alias_target ();
1822 /* We do not need to bother analyzing calls to unknown functions
1823 unless they may become known during lto/whopr. */
1824 if (!callee->definition && !flag_lto)
1825 continue;
1827 ipa_compute_jump_functions_for_edge (fbi, cs);
1831 /* If STMT looks like a statement loading a value from a member pointer formal
1832 parameter, return that parameter and store the offset of the field to
1833 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1834 might be clobbered). If USE_DELTA, then we look for a use of the delta
1835 field rather than the pfn. */
1837 static tree
1838 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1839 HOST_WIDE_INT *offset_p)
1841 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1843 if (!gimple_assign_single_p (stmt))
1844 return NULL_TREE;
1846 rhs = gimple_assign_rhs1 (stmt);
1847 if (TREE_CODE (rhs) == COMPONENT_REF)
1849 ref_field = TREE_OPERAND (rhs, 1);
1850 rhs = TREE_OPERAND (rhs, 0);
1852 else
1853 ref_field = NULL_TREE;
1854 if (TREE_CODE (rhs) != MEM_REF)
1855 return NULL_TREE;
1856 rec = TREE_OPERAND (rhs, 0);
1857 if (TREE_CODE (rec) != ADDR_EXPR)
1858 return NULL_TREE;
1859 rec = TREE_OPERAND (rec, 0);
1860 if (TREE_CODE (rec) != PARM_DECL
1861 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1862 return NULL_TREE;
1863 ref_offset = TREE_OPERAND (rhs, 1);
1865 if (use_delta)
1866 fld = delta_field;
1867 else
1868 fld = ptr_field;
1869 if (offset_p)
1870 *offset_p = int_bit_position (fld);
1872 if (ref_field)
1874 if (integer_nonzerop (ref_offset))
1875 return NULL_TREE;
1876 return ref_field == fld ? rec : NULL_TREE;
1878 else
1879 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1880 : NULL_TREE;
1883 /* Returns true iff T is an SSA_NAME defined by a statement. */
1885 static bool
1886 ipa_is_ssa_with_stmt_def (tree t)
1888 if (TREE_CODE (t) == SSA_NAME
1889 && !SSA_NAME_IS_DEFAULT_DEF (t))
1890 return true;
1891 else
1892 return false;
1895 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1896 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1897 indirect call graph edge. */
1899 static struct cgraph_edge *
1900 ipa_note_param_call (struct cgraph_node *node, int param_index,
1901 gcall *stmt)
1903 struct cgraph_edge *cs;
1905 cs = node->get_edge (stmt);
1906 cs->indirect_info->param_index = param_index;
1907 cs->indirect_info->agg_contents = 0;
1908 cs->indirect_info->member_ptr = 0;
1909 cs->indirect_info->guaranteed_unmodified = 0;
1910 return cs;
1913 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1914 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1915 intermediate information about each formal parameter. Currently it checks
1916 whether the call calls a pointer that is a formal parameter and if so, the
1917 parameter is marked with the called flag and an indirect call graph edge
1918 describing the call is created. This is very simple for ordinary pointers
1919 represented in SSA but not-so-nice when it comes to member pointers. The
1920 ugly part of this function does nothing more than trying to match the
1921 pattern of such a call. An example of such a pattern is the gimple dump
1922 below, the call is on the last line:
1924 <bb 2>:
1925 f$__delta_5 = f.__delta;
1926 f$__pfn_24 = f.__pfn;
1929 <bb 2>:
1930 f$__delta_5 = MEM[(struct *)&f];
1931 f$__pfn_24 = MEM[(struct *)&f + 4B];
1933 and a few lines below:
1935 <bb 5>
1936 D.2496_3 = (int) f$__pfn_24;
1937 D.2497_4 = D.2496_3 & 1;
1938 if (D.2497_4 != 0)
1939 goto <bb 3>;
1940 else
1941 goto <bb 4>;
1943 <bb 6>:
1944 D.2500_7 = (unsigned int) f$__delta_5;
1945 D.2501_8 = &S + D.2500_7;
1946 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1947 D.2503_10 = *D.2502_9;
1948 D.2504_12 = f$__pfn_24 + -1;
1949 D.2505_13 = (unsigned int) D.2504_12;
1950 D.2506_14 = D.2503_10 + D.2505_13;
1951 D.2507_15 = *D.2506_14;
1952 iftmp.11_16 = (String:: *) D.2507_15;
1954 <bb 7>:
1955 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1956 D.2500_19 = (unsigned int) f$__delta_5;
1957 D.2508_20 = &S + D.2500_19;
1958 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1960 Such patterns are results of simple calls to a member pointer:
1962 int doprinting (int (MyString::* f)(int) const)
1964 MyString S ("somestring");
1966 return (S.*f)(4);
1969 Moreover, the function also looks for called pointers loaded from aggregates
1970 passed by value or reference. */
1972 static void
1973 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
1974 tree target)
1976 struct ipa_node_params *info = fbi->info;
1977 HOST_WIDE_INT offset;
1978 bool by_ref;
1980 if (SSA_NAME_IS_DEFAULT_DEF (target))
1982 tree var = SSA_NAME_VAR (target);
1983 int index = ipa_get_param_decl_index (info, var);
1984 if (index >= 0)
1985 ipa_note_param_call (fbi->node, index, call);
1986 return;
1989 int index;
1990 gimple *def = SSA_NAME_DEF_STMT (target);
1991 bool guaranteed_unmodified;
1992 if (gimple_assign_single_p (def)
1993 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
1994 gimple_assign_rhs1 (def), &index, &offset,
1995 NULL, &by_ref, &guaranteed_unmodified))
1997 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
1998 cs->indirect_info->offset = offset;
1999 cs->indirect_info->agg_contents = 1;
2000 cs->indirect_info->by_ref = by_ref;
2001 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2002 return;
2005 /* Now we need to try to match the complex pattern of calling a member
2006 pointer. */
2007 if (gimple_code (def) != GIMPLE_PHI
2008 || gimple_phi_num_args (def) != 2
2009 || !POINTER_TYPE_P (TREE_TYPE (target))
2010 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2011 return;
2013 /* First, we need to check whether one of these is a load from a member
2014 pointer that is a parameter to this function. */
2015 tree n1 = PHI_ARG_DEF (def, 0);
2016 tree n2 = PHI_ARG_DEF (def, 1);
2017 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2018 return;
2019 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2020 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2022 tree rec;
2023 basic_block bb, virt_bb;
2024 basic_block join = gimple_bb (def);
2025 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2027 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2028 return;
2030 bb = EDGE_PRED (join, 0)->src;
2031 virt_bb = gimple_bb (d2);
2033 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2035 bb = EDGE_PRED (join, 1)->src;
2036 virt_bb = gimple_bb (d1);
2038 else
2039 return;
2041 /* Second, we need to check that the basic blocks are laid out in the way
2042 corresponding to the pattern. */
2044 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2045 || single_pred (virt_bb) != bb
2046 || single_succ (virt_bb) != join)
2047 return;
2049 /* Third, let's see that the branching is done depending on the least
2050 significant bit of the pfn. */
2052 gimple *branch = last_stmt (bb);
2053 if (!branch || gimple_code (branch) != GIMPLE_COND)
2054 return;
2056 if ((gimple_cond_code (branch) != NE_EXPR
2057 && gimple_cond_code (branch) != EQ_EXPR)
2058 || !integer_zerop (gimple_cond_rhs (branch)))
2059 return;
2061 tree cond = gimple_cond_lhs (branch);
2062 if (!ipa_is_ssa_with_stmt_def (cond))
2063 return;
2065 def = SSA_NAME_DEF_STMT (cond);
2066 if (!is_gimple_assign (def)
2067 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2068 || !integer_onep (gimple_assign_rhs2 (def)))
2069 return;
2071 cond = gimple_assign_rhs1 (def);
2072 if (!ipa_is_ssa_with_stmt_def (cond))
2073 return;
2075 def = SSA_NAME_DEF_STMT (cond);
2077 if (is_gimple_assign (def)
2078 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2080 cond = gimple_assign_rhs1 (def);
2081 if (!ipa_is_ssa_with_stmt_def (cond))
2082 return;
2083 def = SSA_NAME_DEF_STMT (cond);
2086 tree rec2;
2087 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2088 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2089 == ptrmemfunc_vbit_in_delta),
2090 NULL);
2091 if (rec != rec2)
2092 return;
2094 index = ipa_get_param_decl_index (info, rec);
2095 if (index >= 0
2096 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2098 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2099 cs->indirect_info->offset = offset;
2100 cs->indirect_info->agg_contents = 1;
2101 cs->indirect_info->member_ptr = 1;
2102 cs->indirect_info->guaranteed_unmodified = 1;
2105 return;
2108 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2109 object referenced in the expression is a formal parameter of the caller
2110 FBI->node (described by FBI->info), create a call note for the
2111 statement. */
2113 static void
2114 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2115 gcall *call, tree target)
2117 tree obj = OBJ_TYPE_REF_OBJECT (target);
2118 int index;
2119 HOST_WIDE_INT anc_offset;
2121 if (!flag_devirtualize)
2122 return;
2124 if (TREE_CODE (obj) != SSA_NAME)
2125 return;
2127 struct ipa_node_params *info = fbi->info;
2128 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2130 struct ipa_jump_func jfunc;
2131 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2132 return;
2134 anc_offset = 0;
2135 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2136 gcc_assert (index >= 0);
2137 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2138 call, &jfunc))
2139 return;
2141 else
2143 struct ipa_jump_func jfunc;
2144 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2145 tree expr;
2147 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2148 if (!expr)
2149 return;
2150 index = ipa_get_param_decl_index (info,
2151 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2152 gcc_assert (index >= 0);
2153 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2154 call, &jfunc, anc_offset))
2155 return;
2158 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2159 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2160 ii->offset = anc_offset;
2161 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2162 ii->otr_type = obj_type_ref_class (target);
2163 ii->polymorphic = 1;
2166 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2167 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2168 containing intermediate information about each formal parameter. */
2170 static void
2171 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2173 tree target = gimple_call_fn (call);
2175 if (!target
2176 || (TREE_CODE (target) != SSA_NAME
2177 && !virtual_method_call_p (target)))
2178 return;
2180 struct cgraph_edge *cs = fbi->node->get_edge (call);
2181 /* If we previously turned the call into a direct call, there is
2182 no need to analyze. */
2183 if (cs && !cs->indirect_unknown_callee)
2184 return;
2186 if (cs->indirect_info->polymorphic && flag_devirtualize)
2188 tree instance;
2189 tree target = gimple_call_fn (call);
2190 ipa_polymorphic_call_context context (current_function_decl,
2191 target, call, &instance);
2193 gcc_checking_assert (cs->indirect_info->otr_type
2194 == obj_type_ref_class (target));
2195 gcc_checking_assert (cs->indirect_info->otr_token
2196 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2198 cs->indirect_info->vptr_changed
2199 = !context.get_dynamic_type (instance,
2200 OBJ_TYPE_REF_OBJECT (target),
2201 obj_type_ref_class (target), call);
2202 cs->indirect_info->context = context;
2205 if (TREE_CODE (target) == SSA_NAME)
2206 ipa_analyze_indirect_call_uses (fbi, call, target);
2207 else if (virtual_method_call_p (target))
2208 ipa_analyze_virtual_call_uses (fbi, call, target);
2212 /* Analyze the call statement STMT with respect to formal parameters (described
2213 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2214 formal parameters are called. */
2216 static void
2217 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2219 if (is_gimple_call (stmt))
2220 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2223 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2224 If OP is a parameter declaration, mark it as used in the info structure
2225 passed in DATA. */
2227 static bool
2228 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2230 struct ipa_node_params *info = (struct ipa_node_params *) data;
2232 op = get_base_address (op);
2233 if (op
2234 && TREE_CODE (op) == PARM_DECL)
2236 int index = ipa_get_param_decl_index (info, op);
2237 gcc_assert (index >= 0);
2238 ipa_set_param_used (info, index, true);
2241 return false;
2244 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2245 the findings in various structures of the associated ipa_node_params
2246 structure, such as parameter flags, notes etc. FBI holds various data about
2247 the function being analyzed. */
2249 static void
2250 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2252 gimple_stmt_iterator gsi;
2253 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2255 gimple *stmt = gsi_stmt (gsi);
2257 if (is_gimple_debug (stmt))
2258 continue;
2260 ipa_analyze_stmt_uses (fbi, stmt);
2261 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2262 visit_ref_for_mod_analysis,
2263 visit_ref_for_mod_analysis,
2264 visit_ref_for_mod_analysis);
2266 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2267 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2268 visit_ref_for_mod_analysis,
2269 visit_ref_for_mod_analysis,
2270 visit_ref_for_mod_analysis);
2273 /* Calculate controlled uses of parameters of NODE. */
2275 static void
2276 ipa_analyze_controlled_uses (struct cgraph_node *node)
2278 struct ipa_node_params *info = IPA_NODE_REF (node);
2280 for (int i = 0; i < ipa_get_param_count (info); i++)
2282 tree parm = ipa_get_param (info, i);
2283 int controlled_uses = 0;
2285 /* For SSA regs see if parameter is used. For non-SSA we compute
2286 the flag during modification analysis. */
2287 if (is_gimple_reg (parm))
2289 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2290 parm);
2291 if (ddef && !has_zero_uses (ddef))
2293 imm_use_iterator imm_iter;
2294 use_operand_p use_p;
2296 ipa_set_param_used (info, i, true);
2297 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2298 if (!is_gimple_call (USE_STMT (use_p)))
2300 if (!is_gimple_debug (USE_STMT (use_p)))
2302 controlled_uses = IPA_UNDESCRIBED_USE;
2303 break;
2306 else
2307 controlled_uses++;
2309 else
2310 controlled_uses = 0;
2312 else
2313 controlled_uses = IPA_UNDESCRIBED_USE;
2314 ipa_set_controlled_uses (info, i, controlled_uses);
2318 /* Free stuff in BI. */
2320 static void
2321 free_ipa_bb_info (struct ipa_bb_info *bi)
2323 bi->cg_edges.release ();
2324 bi->param_aa_statuses.release ();
2327 /* Dominator walker driving the analysis. */
2329 class analysis_dom_walker : public dom_walker
2331 public:
2332 analysis_dom_walker (struct ipa_func_body_info *fbi)
2333 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2335 virtual edge before_dom_children (basic_block);
2337 private:
2338 struct ipa_func_body_info *m_fbi;
2341 edge
2342 analysis_dom_walker::before_dom_children (basic_block bb)
2344 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2345 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2346 return NULL;
2349 /* Release body info FBI. */
2351 void
2352 ipa_release_body_info (struct ipa_func_body_info *fbi)
2354 int i;
2355 struct ipa_bb_info *bi;
2357 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2358 free_ipa_bb_info (bi);
2359 fbi->bb_infos.release ();
2362 /* Initialize the array describing properties of formal parameters
2363 of NODE, analyze their uses and compute jump functions associated
2364 with actual arguments of calls from within NODE. */
2366 void
2367 ipa_analyze_node (struct cgraph_node *node)
2369 struct ipa_func_body_info fbi;
2370 struct ipa_node_params *info;
2372 ipa_check_create_node_params ();
2373 ipa_check_create_edge_args ();
2374 info = IPA_NODE_REF (node);
2376 if (info->analysis_done)
2377 return;
2378 info->analysis_done = 1;
2380 if (ipa_func_spec_opts_forbid_analysis_p (node))
2382 for (int i = 0; i < ipa_get_param_count (info); i++)
2384 ipa_set_param_used (info, i, true);
2385 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2387 return;
2390 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2391 push_cfun (func);
2392 calculate_dominance_info (CDI_DOMINATORS);
2393 ipa_initialize_node_params (node);
2394 ipa_analyze_controlled_uses (node);
2396 fbi.node = node;
2397 fbi.info = IPA_NODE_REF (node);
2398 fbi.bb_infos = vNULL;
2399 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2400 fbi.param_count = ipa_get_param_count (info);
2401 fbi.aa_walked = 0;
2403 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2405 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2406 bi->cg_edges.safe_push (cs);
2409 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2411 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2412 bi->cg_edges.safe_push (cs);
2415 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2417 ipa_release_body_info (&fbi);
2418 free_dominance_info (CDI_DOMINATORS);
2419 pop_cfun ();
2422 /* Update the jump functions associated with call graph edge E when the call
2423 graph edge CS is being inlined, assuming that E->caller is already (possibly
2424 indirectly) inlined into CS->callee and that E has not been inlined. */
2426 static void
2427 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2428 struct cgraph_edge *e)
2430 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2431 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2432 int count = ipa_get_cs_argument_count (args);
2433 int i;
2435 for (i = 0; i < count; i++)
2437 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2438 struct ipa_polymorphic_call_context *dst_ctx
2439 = ipa_get_ith_polymorhic_call_context (args, i);
2441 if (dst->type == IPA_JF_ANCESTOR)
2443 struct ipa_jump_func *src;
2444 int dst_fid = dst->value.ancestor.formal_id;
2445 struct ipa_polymorphic_call_context *src_ctx
2446 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2448 /* Variable number of arguments can cause havoc if we try to access
2449 one that does not exist in the inlined edge. So make sure we
2450 don't. */
2451 if (dst_fid >= ipa_get_cs_argument_count (top))
2453 ipa_set_jf_unknown (dst);
2454 continue;
2457 src = ipa_get_ith_jump_func (top, dst_fid);
2459 if (src_ctx && !src_ctx->useless_p ())
2461 struct ipa_polymorphic_call_context ctx = *src_ctx;
2463 /* TODO: Make type preserved safe WRT contexts. */
2464 if (!ipa_get_jf_ancestor_type_preserved (dst))
2465 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2466 ctx.offset_by (dst->value.ancestor.offset);
2467 if (!ctx.useless_p ())
2469 if (!dst_ctx)
2471 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2472 count);
2473 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2476 dst_ctx->combine_with (ctx);
2480 if (src->agg.items
2481 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2483 struct ipa_agg_jf_item *item;
2484 int j;
2486 /* Currently we do not produce clobber aggregate jump functions,
2487 replace with merging when we do. */
2488 gcc_assert (!dst->agg.items);
2490 dst->agg.items = vec_safe_copy (src->agg.items);
2491 dst->agg.by_ref = src->agg.by_ref;
2492 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2493 item->offset -= dst->value.ancestor.offset;
2496 if (src->type == IPA_JF_PASS_THROUGH
2497 && src->value.pass_through.operation == NOP_EXPR)
2499 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2500 dst->value.ancestor.agg_preserved &=
2501 src->value.pass_through.agg_preserved;
2503 else if (src->type == IPA_JF_ANCESTOR)
2505 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2506 dst->value.ancestor.offset += src->value.ancestor.offset;
2507 dst->value.ancestor.agg_preserved &=
2508 src->value.ancestor.agg_preserved;
2510 else
2511 ipa_set_jf_unknown (dst);
2513 else if (dst->type == IPA_JF_PASS_THROUGH)
2515 struct ipa_jump_func *src;
2516 /* We must check range due to calls with variable number of arguments
2517 and we cannot combine jump functions with operations. */
2518 if (dst->value.pass_through.operation == NOP_EXPR
2519 && (dst->value.pass_through.formal_id
2520 < ipa_get_cs_argument_count (top)))
2522 int dst_fid = dst->value.pass_through.formal_id;
2523 src = ipa_get_ith_jump_func (top, dst_fid);
2524 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2525 struct ipa_polymorphic_call_context *src_ctx
2526 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2528 if (src_ctx && !src_ctx->useless_p ())
2530 struct ipa_polymorphic_call_context ctx = *src_ctx;
2532 /* TODO: Make type preserved safe WRT contexts. */
2533 if (!ipa_get_jf_pass_through_type_preserved (dst))
2534 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2535 if (!ctx.useless_p ())
2537 if (!dst_ctx)
2539 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2540 count);
2541 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2543 dst_ctx->combine_with (ctx);
2546 switch (src->type)
2548 case IPA_JF_UNKNOWN:
2549 ipa_set_jf_unknown (dst);
2550 break;
2551 case IPA_JF_CONST:
2552 ipa_set_jf_cst_copy (dst, src);
2553 break;
2555 case IPA_JF_PASS_THROUGH:
2557 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2558 enum tree_code operation;
2559 operation = ipa_get_jf_pass_through_operation (src);
2561 if (operation == NOP_EXPR)
2563 bool agg_p;
2564 agg_p = dst_agg_p
2565 && ipa_get_jf_pass_through_agg_preserved (src);
2566 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2568 else
2570 tree operand = ipa_get_jf_pass_through_operand (src);
2571 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2572 operation);
2574 break;
2576 case IPA_JF_ANCESTOR:
2578 bool agg_p;
2579 agg_p = dst_agg_p
2580 && ipa_get_jf_ancestor_agg_preserved (src);
2581 ipa_set_ancestor_jf (dst,
2582 ipa_get_jf_ancestor_offset (src),
2583 ipa_get_jf_ancestor_formal_id (src),
2584 agg_p);
2585 break;
2587 default:
2588 gcc_unreachable ();
2591 if (src->agg.items
2592 && (dst_agg_p || !src->agg.by_ref))
2594 /* Currently we do not produce clobber aggregate jump
2595 functions, replace with merging when we do. */
2596 gcc_assert (!dst->agg.items);
2598 dst->agg.by_ref = src->agg.by_ref;
2599 dst->agg.items = vec_safe_copy (src->agg.items);
2602 else
2603 ipa_set_jf_unknown (dst);
2608 /* If TARGET is an addr_expr of a function declaration, make it the
2609 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2610 Otherwise, return NULL. */
2612 struct cgraph_edge *
2613 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2614 bool speculative)
2616 struct cgraph_node *callee;
2617 struct inline_edge_summary *es = inline_edge_summary (ie);
2618 bool unreachable = false;
2620 if (TREE_CODE (target) == ADDR_EXPR)
2621 target = TREE_OPERAND (target, 0);
2622 if (TREE_CODE (target) != FUNCTION_DECL)
2624 target = canonicalize_constructor_val (target, NULL);
2625 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2627 /* Member pointer call that goes through a VMT lookup. */
2628 if (ie->indirect_info->member_ptr
2629 /* Or if target is not an invariant expression and we do not
2630 know if it will evaulate to function at runtime.
2631 This can happen when folding through &VAR, where &VAR
2632 is IP invariant, but VAR itself is not.
2634 TODO: Revisit this when GCC 5 is branched. It seems that
2635 member_ptr check is not needed and that we may try to fold
2636 the expression and see if VAR is readonly. */
2637 || !is_gimple_ip_invariant (target))
2639 if (dump_enabled_p ())
2641 location_t loc = gimple_location_safe (ie->call_stmt);
2642 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2643 "discovered direct call non-invariant "
2644 "%s/%i\n",
2645 ie->caller->name (), ie->caller->order);
2647 return NULL;
2651 if (dump_enabled_p ())
2653 location_t loc = gimple_location_safe (ie->call_stmt);
2654 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2655 "discovered direct call to non-function in %s/%i, "
2656 "making it __builtin_unreachable\n",
2657 ie->caller->name (), ie->caller->order);
2660 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2661 callee = cgraph_node::get_create (target);
2662 unreachable = true;
2664 else
2665 callee = cgraph_node::get (target);
2667 else
2668 callee = cgraph_node::get (target);
2670 /* Because may-edges are not explicitely represented and vtable may be external,
2671 we may create the first reference to the object in the unit. */
2672 if (!callee || callee->global.inlined_to)
2675 /* We are better to ensure we can refer to it.
2676 In the case of static functions we are out of luck, since we already
2677 removed its body. In the case of public functions we may or may
2678 not introduce the reference. */
2679 if (!canonicalize_constructor_val (target, NULL)
2680 || !TREE_PUBLIC (target))
2682 if (dump_file)
2683 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2684 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2685 xstrdup_for_dump (ie->caller->name ()),
2686 ie->caller->order,
2687 xstrdup_for_dump (ie->callee->name ()),
2688 ie->callee->order);
2689 return NULL;
2691 callee = cgraph_node::get_create (target);
2694 /* If the edge is already speculated. */
2695 if (speculative && ie->speculative)
2697 struct cgraph_edge *e2;
2698 struct ipa_ref *ref;
2699 ie->speculative_call_info (e2, ie, ref);
2700 if (e2->callee->ultimate_alias_target ()
2701 != callee->ultimate_alias_target ())
2703 if (dump_file)
2704 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2705 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2706 xstrdup_for_dump (ie->caller->name ()),
2707 ie->caller->order,
2708 xstrdup_for_dump (callee->name ()),
2709 callee->order,
2710 xstrdup_for_dump (e2->callee->name ()),
2711 e2->callee->order);
2713 else
2715 if (dump_file)
2716 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2717 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2718 xstrdup_for_dump (ie->caller->name ()),
2719 ie->caller->order,
2720 xstrdup_for_dump (callee->name ()),
2721 callee->order);
2723 return NULL;
2726 if (!dbg_cnt (devirt))
2727 return NULL;
2729 ipa_check_create_node_params ();
2731 /* We can not make edges to inline clones. It is bug that someone removed
2732 the cgraph node too early. */
2733 gcc_assert (!callee->global.inlined_to);
2735 if (dump_file && !unreachable)
2737 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2738 "(%s/%i -> %s/%i), for stmt ",
2739 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2740 speculative ? "speculative" : "known",
2741 xstrdup_for_dump (ie->caller->name ()),
2742 ie->caller->order,
2743 xstrdup_for_dump (callee->name ()),
2744 callee->order);
2745 if (ie->call_stmt)
2746 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2747 else
2748 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2750 if (dump_enabled_p ())
2752 location_t loc = gimple_location_safe (ie->call_stmt);
2754 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2755 "converting indirect call in %s to direct call to %s\n",
2756 ie->caller->name (), callee->name ());
2758 if (!speculative)
2760 struct cgraph_edge *orig = ie;
2761 ie = ie->make_direct (callee);
2762 /* If we resolved speculative edge the cost is already up to date
2763 for direct call (adjusted by inline_edge_duplication_hook). */
2764 if (ie == orig)
2766 es = inline_edge_summary (ie);
2767 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2768 - eni_size_weights.call_cost);
2769 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2770 - eni_time_weights.call_cost);
2773 else
2775 if (!callee->can_be_discarded_p ())
2777 cgraph_node *alias;
2778 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2779 if (alias)
2780 callee = alias;
2782 /* make_speculative will update ie's cost to direct call cost. */
2783 ie = ie->make_speculative
2784 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2787 return ie;
2790 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2791 CONSTRUCTOR and return it. Return NULL if the search fails for some
2792 reason. */
2794 static tree
2795 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2797 tree type = TREE_TYPE (constructor);
2798 if (TREE_CODE (type) != ARRAY_TYPE
2799 && TREE_CODE (type) != RECORD_TYPE)
2800 return NULL;
2802 unsigned ix;
2803 tree index, val;
2804 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2806 HOST_WIDE_INT elt_offset;
2807 if (TREE_CODE (type) == ARRAY_TYPE)
2809 offset_int off;
2810 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2811 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2813 if (index)
2815 off = wi::to_offset (index);
2816 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2818 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2819 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2820 off = wi::sext (off - wi::to_offset (low_bound),
2821 TYPE_PRECISION (TREE_TYPE (index)));
2823 off *= wi::to_offset (unit_size);
2825 else
2826 off = wi::to_offset (unit_size) * ix;
2828 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2829 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2830 continue;
2831 elt_offset = off.to_shwi ();
2833 else if (TREE_CODE (type) == RECORD_TYPE)
2835 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2836 if (DECL_BIT_FIELD (index))
2837 continue;
2838 elt_offset = int_bit_position (index);
2840 else
2841 gcc_unreachable ();
2843 if (elt_offset > req_offset)
2844 return NULL;
2846 if (TREE_CODE (val) == CONSTRUCTOR)
2847 return find_constructor_constant_at_offset (val,
2848 req_offset - elt_offset);
2850 if (elt_offset == req_offset
2851 && is_gimple_reg_type (TREE_TYPE (val))
2852 && is_gimple_ip_invariant (val))
2853 return val;
2855 return NULL;
2858 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2859 invariant from a static constructor and if so, return it. Otherwise return
2860 NULL. */
2862 static tree
2863 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2865 if (by_ref)
2867 if (TREE_CODE (scalar) != ADDR_EXPR)
2868 return NULL;
2869 scalar = TREE_OPERAND (scalar, 0);
2872 if (TREE_CODE (scalar) != VAR_DECL
2873 || !is_global_var (scalar)
2874 || !TREE_READONLY (scalar)
2875 || !DECL_INITIAL (scalar)
2876 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2877 return NULL;
2879 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2882 /* Retrieve value from aggregate jump function AGG or static initializer of
2883 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2884 none. BY_REF specifies whether the value has to be passed by reference or
2885 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2886 to is set to true if the value comes from an initializer of a constant. */
2888 tree
2889 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2890 HOST_WIDE_INT offset, bool by_ref,
2891 bool *from_global_constant)
2893 struct ipa_agg_jf_item *item;
2894 int i;
2896 if (scalar)
2898 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2899 if (res)
2901 if (from_global_constant)
2902 *from_global_constant = true;
2903 return res;
2907 if (!agg
2908 || by_ref != agg->by_ref)
2909 return NULL;
2911 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2912 if (item->offset == offset)
2914 /* Currently we do not have clobber values, return NULL for them once
2915 we do. */
2916 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2917 if (from_global_constant)
2918 *from_global_constant = false;
2919 return item->value;
2921 return NULL;
2924 /* Remove a reference to SYMBOL from the list of references of a node given by
2925 reference description RDESC. Return true if the reference has been
2926 successfully found and removed. */
2928 static bool
2929 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2931 struct ipa_ref *to_del;
2932 struct cgraph_edge *origin;
2934 origin = rdesc->cs;
2935 if (!origin)
2936 return false;
2937 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2938 origin->lto_stmt_uid);
2939 if (!to_del)
2940 return false;
2942 to_del->remove_reference ();
2943 if (dump_file)
2944 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2945 xstrdup_for_dump (origin->caller->name ()),
2946 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2947 return true;
2950 /* If JFUNC has a reference description with refcount different from
2951 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2952 NULL. JFUNC must be a constant jump function. */
2954 static struct ipa_cst_ref_desc *
2955 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2957 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2958 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2959 return rdesc;
2960 else
2961 return NULL;
2964 /* If the value of constant jump function JFUNC is an address of a function
2965 declaration, return the associated call graph node. Otherwise return
2966 NULL. */
2968 static cgraph_node *
2969 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2971 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2972 tree cst = ipa_get_jf_constant (jfunc);
2973 if (TREE_CODE (cst) != ADDR_EXPR
2974 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2975 return NULL;
2977 return cgraph_node::get (TREE_OPERAND (cst, 0));
2981 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2982 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2983 the edge specified in the rdesc. Return false if either the symbol or the
2984 reference could not be found, otherwise return true. */
2986 static bool
2987 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2989 struct ipa_cst_ref_desc *rdesc;
2990 if (jfunc->type == IPA_JF_CONST
2991 && (rdesc = jfunc_rdesc_usable (jfunc))
2992 && --rdesc->refcount == 0)
2994 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2995 if (!symbol)
2996 return false;
2998 return remove_described_reference (symbol, rdesc);
3000 return true;
3003 /* Try to find a destination for indirect edge IE that corresponds to a simple
3004 call or a call of a member function pointer and where the destination is a
3005 pointer formal parameter described by jump function JFUNC. If it can be
3006 determined, return the newly direct edge, otherwise return NULL.
3007 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3009 static struct cgraph_edge *
3010 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3011 struct ipa_jump_func *jfunc,
3012 struct ipa_node_params *new_root_info)
3014 struct cgraph_edge *cs;
3015 tree target;
3016 bool agg_contents = ie->indirect_info->agg_contents;
3017 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3018 if (agg_contents)
3020 bool from_global_constant;
3021 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3022 ie->indirect_info->offset,
3023 ie->indirect_info->by_ref,
3024 &from_global_constant);
3025 if (target
3026 && !from_global_constant
3027 && !ie->indirect_info->guaranteed_unmodified)
3028 return NULL;
3030 else
3031 target = scalar;
3032 if (!target)
3033 return NULL;
3034 cs = ipa_make_edge_direct_to_target (ie, target);
3036 if (cs && !agg_contents)
3038 bool ok;
3039 gcc_checking_assert (cs->callee
3040 && (cs != ie
3041 || jfunc->type != IPA_JF_CONST
3042 || !cgraph_node_for_jfunc (jfunc)
3043 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3044 ok = try_decrement_rdesc_refcount (jfunc);
3045 gcc_checking_assert (ok);
3048 return cs;
3051 /* Return the target to be used in cases of impossible devirtualization. IE
3052 and target (the latter can be NULL) are dumped when dumping is enabled. */
3054 tree
3055 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3057 if (dump_file)
3059 if (target)
3060 fprintf (dump_file,
3061 "Type inconsistent devirtualization: %s/%i->%s\n",
3062 ie->caller->name (), ie->caller->order,
3063 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3064 else
3065 fprintf (dump_file,
3066 "No devirtualization target in %s/%i\n",
3067 ie->caller->name (), ie->caller->order);
3069 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3070 cgraph_node::get_create (new_target);
3071 return new_target;
3074 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3075 call based on a formal parameter which is described by jump function JFUNC
3076 and if it can be determined, make it direct and return the direct edge.
3077 Otherwise, return NULL. CTX describes the polymorphic context that the
3078 parameter the call is based on brings along with it. */
3080 static struct cgraph_edge *
3081 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3082 struct ipa_jump_func *jfunc,
3083 struct ipa_polymorphic_call_context ctx)
3085 tree target = NULL;
3086 bool speculative = false;
3088 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3089 return NULL;
3091 gcc_assert (!ie->indirect_info->by_ref);
3093 /* Try to do lookup via known virtual table pointer value. */
3094 if (!ie->indirect_info->vptr_changed
3095 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3097 tree vtable;
3098 unsigned HOST_WIDE_INT offset;
3099 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3100 : NULL;
3101 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3102 ie->indirect_info->offset,
3103 true);
3104 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3106 bool can_refer;
3107 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3108 vtable, offset, &can_refer);
3109 if (can_refer)
3111 if (!t
3112 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3113 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3114 || !possible_polymorphic_call_target_p
3115 (ie, cgraph_node::get (t)))
3117 /* Do not speculate builtin_unreachable, it is stupid! */
3118 if (!ie->indirect_info->vptr_changed)
3119 target = ipa_impossible_devirt_target (ie, target);
3120 else
3121 target = NULL;
3123 else
3125 target = t;
3126 speculative = ie->indirect_info->vptr_changed;
3132 ipa_polymorphic_call_context ie_context (ie);
3133 vec <cgraph_node *>targets;
3134 bool final;
3136 ctx.offset_by (ie->indirect_info->offset);
3137 if (ie->indirect_info->vptr_changed)
3138 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3139 ie->indirect_info->otr_type);
3140 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3141 targets = possible_polymorphic_call_targets
3142 (ie->indirect_info->otr_type,
3143 ie->indirect_info->otr_token,
3144 ctx, &final);
3145 if (final && targets.length () <= 1)
3147 speculative = false;
3148 if (targets.length () == 1)
3149 target = targets[0]->decl;
3150 else
3151 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3153 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3154 && !ie->speculative && ie->maybe_hot_p ())
3156 cgraph_node *n;
3157 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3158 ie->indirect_info->otr_token,
3159 ie->indirect_info->context);
3160 if (n)
3162 target = n->decl;
3163 speculative = true;
3167 if (target)
3169 if (!possible_polymorphic_call_target_p
3170 (ie, cgraph_node::get_create (target)))
3172 if (speculative)
3173 return NULL;
3174 target = ipa_impossible_devirt_target (ie, target);
3176 return ipa_make_edge_direct_to_target (ie, target, speculative);
3178 else
3179 return NULL;
3182 /* Update the param called notes associated with NODE when CS is being inlined,
3183 assuming NODE is (potentially indirectly) inlined into CS->callee.
3184 Moreover, if the callee is discovered to be constant, create a new cgraph
3185 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3186 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3188 static bool
3189 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3190 struct cgraph_node *node,
3191 vec<cgraph_edge *> *new_edges)
3193 struct ipa_edge_args *top;
3194 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3195 struct ipa_node_params *new_root_info;
3196 bool res = false;
3198 ipa_check_create_edge_args ();
3199 top = IPA_EDGE_REF (cs);
3200 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3201 ? cs->caller->global.inlined_to
3202 : cs->caller);
3204 for (ie = node->indirect_calls; ie; ie = next_ie)
3206 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3207 struct ipa_jump_func *jfunc;
3208 int param_index;
3209 cgraph_node *spec_target = NULL;
3211 next_ie = ie->next_callee;
3213 if (ici->param_index == -1)
3214 continue;
3216 /* We must check range due to calls with variable number of arguments: */
3217 if (ici->param_index >= ipa_get_cs_argument_count (top))
3219 ici->param_index = -1;
3220 continue;
3223 param_index = ici->param_index;
3224 jfunc = ipa_get_ith_jump_func (top, param_index);
3226 if (ie->speculative)
3228 struct cgraph_edge *de;
3229 struct ipa_ref *ref;
3230 ie->speculative_call_info (de, ie, ref);
3231 spec_target = de->callee;
3234 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3235 new_direct_edge = NULL;
3236 else if (ici->polymorphic)
3238 ipa_polymorphic_call_context ctx;
3239 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3240 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3242 else
3243 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3244 new_root_info);
3245 /* If speculation was removed, then we need to do nothing. */
3246 if (new_direct_edge && new_direct_edge != ie
3247 && new_direct_edge->callee == spec_target)
3249 new_direct_edge->indirect_inlining_edge = 1;
3250 top = IPA_EDGE_REF (cs);
3251 res = true;
3252 if (!new_direct_edge->speculative)
3253 continue;
3255 else if (new_direct_edge)
3257 new_direct_edge->indirect_inlining_edge = 1;
3258 if (new_direct_edge->call_stmt)
3259 new_direct_edge->call_stmt_cannot_inline_p
3260 = !gimple_check_call_matching_types (
3261 new_direct_edge->call_stmt,
3262 new_direct_edge->callee->decl, false);
3263 if (new_edges)
3265 new_edges->safe_push (new_direct_edge);
3266 res = true;
3268 top = IPA_EDGE_REF (cs);
3269 /* If speculative edge was introduced we still need to update
3270 call info of the indirect edge. */
3271 if (!new_direct_edge->speculative)
3272 continue;
3274 if (jfunc->type == IPA_JF_PASS_THROUGH
3275 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3277 if (ici->agg_contents
3278 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3279 && !ici->polymorphic)
3280 ici->param_index = -1;
3281 else
3283 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3284 if (ici->polymorphic
3285 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3286 ici->vptr_changed = true;
3289 else if (jfunc->type == IPA_JF_ANCESTOR)
3291 if (ici->agg_contents
3292 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3293 && !ici->polymorphic)
3294 ici->param_index = -1;
3295 else
3297 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3298 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3299 if (ici->polymorphic
3300 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3301 ici->vptr_changed = true;
3304 else
3305 /* Either we can find a destination for this edge now or never. */
3306 ici->param_index = -1;
3309 return res;
3312 /* Recursively traverse subtree of NODE (including node) made of inlined
3313 cgraph_edges when CS has been inlined and invoke
3314 update_indirect_edges_after_inlining on all nodes and
3315 update_jump_functions_after_inlining on all non-inlined edges that lead out
3316 of this subtree. Newly discovered indirect edges will be added to
3317 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3318 created. */
3320 static bool
3321 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3322 struct cgraph_node *node,
3323 vec<cgraph_edge *> *new_edges)
3325 struct cgraph_edge *e;
3326 bool res;
3328 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3330 for (e = node->callees; e; e = e->next_callee)
3331 if (!e->inline_failed)
3332 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3333 else
3334 update_jump_functions_after_inlining (cs, e);
3335 for (e = node->indirect_calls; e; e = e->next_callee)
3336 update_jump_functions_after_inlining (cs, e);
3338 return res;
3341 /* Combine two controlled uses counts as done during inlining. */
3343 static int
3344 combine_controlled_uses_counters (int c, int d)
3346 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3347 return IPA_UNDESCRIBED_USE;
3348 else
3349 return c + d - 1;
3352 /* Propagate number of controlled users from CS->caleee to the new root of the
3353 tree of inlined nodes. */
3355 static void
3356 propagate_controlled_uses (struct cgraph_edge *cs)
3358 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3359 struct cgraph_node *new_root = cs->caller->global.inlined_to
3360 ? cs->caller->global.inlined_to : cs->caller;
3361 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3362 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3363 int count, i;
3365 count = MIN (ipa_get_cs_argument_count (args),
3366 ipa_get_param_count (old_root_info));
3367 for (i = 0; i < count; i++)
3369 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3370 struct ipa_cst_ref_desc *rdesc;
3372 if (jf->type == IPA_JF_PASS_THROUGH)
3374 int src_idx, c, d;
3375 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3376 c = ipa_get_controlled_uses (new_root_info, src_idx);
3377 d = ipa_get_controlled_uses (old_root_info, i);
3379 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3380 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3381 c = combine_controlled_uses_counters (c, d);
3382 ipa_set_controlled_uses (new_root_info, src_idx, c);
3383 if (c == 0 && new_root_info->ipcp_orig_node)
3385 struct cgraph_node *n;
3386 struct ipa_ref *ref;
3387 tree t = new_root_info->known_csts[src_idx];
3389 if (t && TREE_CODE (t) == ADDR_EXPR
3390 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3391 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3392 && (ref = new_root->find_reference (n, NULL, 0)))
3394 if (dump_file)
3395 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3396 "reference from %s/%i to %s/%i.\n",
3397 xstrdup_for_dump (new_root->name ()),
3398 new_root->order,
3399 xstrdup_for_dump (n->name ()), n->order);
3400 ref->remove_reference ();
3404 else if (jf->type == IPA_JF_CONST
3405 && (rdesc = jfunc_rdesc_usable (jf)))
3407 int d = ipa_get_controlled_uses (old_root_info, i);
3408 int c = rdesc->refcount;
3409 rdesc->refcount = combine_controlled_uses_counters (c, d);
3410 if (rdesc->refcount == 0)
3412 tree cst = ipa_get_jf_constant (jf);
3413 struct cgraph_node *n;
3414 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3415 && TREE_CODE (TREE_OPERAND (cst, 0))
3416 == FUNCTION_DECL);
3417 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3418 if (n)
3420 struct cgraph_node *clone;
3421 bool ok;
3422 ok = remove_described_reference (n, rdesc);
3423 gcc_checking_assert (ok);
3425 clone = cs->caller;
3426 while (clone->global.inlined_to
3427 && clone != rdesc->cs->caller
3428 && IPA_NODE_REF (clone)->ipcp_orig_node)
3430 struct ipa_ref *ref;
3431 ref = clone->find_reference (n, NULL, 0);
3432 if (ref)
3434 if (dump_file)
3435 fprintf (dump_file, "ipa-prop: Removing "
3436 "cloning-created reference "
3437 "from %s/%i to %s/%i.\n",
3438 xstrdup_for_dump (clone->name ()),
3439 clone->order,
3440 xstrdup_for_dump (n->name ()),
3441 n->order);
3442 ref->remove_reference ();
3444 clone = clone->callers->caller;
3451 for (i = ipa_get_param_count (old_root_info);
3452 i < ipa_get_cs_argument_count (args);
3453 i++)
3455 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3457 if (jf->type == IPA_JF_CONST)
3459 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3460 if (rdesc)
3461 rdesc->refcount = IPA_UNDESCRIBED_USE;
3463 else if (jf->type == IPA_JF_PASS_THROUGH)
3464 ipa_set_controlled_uses (new_root_info,
3465 jf->value.pass_through.formal_id,
3466 IPA_UNDESCRIBED_USE);
3470 /* Update jump functions and call note functions on inlining the call site CS.
3471 CS is expected to lead to a node already cloned by
3472 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3473 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3474 created. */
3476 bool
3477 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3478 vec<cgraph_edge *> *new_edges)
3480 bool changed;
3481 /* Do nothing if the preparation phase has not been carried out yet
3482 (i.e. during early inlining). */
3483 if (!ipa_node_params_sum)
3484 return false;
3485 gcc_assert (ipa_edge_args_vector);
3487 propagate_controlled_uses (cs);
3488 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3490 return changed;
3493 /* Frees all dynamically allocated structures that the argument info points
3494 to. */
3496 void
3497 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3499 vec_free (args->jump_functions);
3500 memset (args, 0, sizeof (*args));
3503 /* Free all ipa_edge structures. */
3505 void
3506 ipa_free_all_edge_args (void)
3508 int i;
3509 struct ipa_edge_args *args;
3511 if (!ipa_edge_args_vector)
3512 return;
3514 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3515 ipa_free_edge_args_substructures (args);
3517 vec_free (ipa_edge_args_vector);
3520 /* Frees all dynamically allocated structures that the param info points
3521 to. */
3523 ipa_node_params::~ipa_node_params ()
3525 descriptors.release ();
3526 free (lattices);
3527 /* Lattice values and their sources are deallocated with their alocation
3528 pool. */
3529 known_csts.release ();
3530 known_contexts.release ();
3532 lattices = NULL;
3533 ipcp_orig_node = NULL;
3534 analysis_done = 0;
3535 node_enqueued = 0;
3536 do_clone_for_all_contexts = 0;
3537 is_all_contexts_clone = 0;
3538 node_dead = 0;
3541 /* Free all ipa_node_params structures. */
3543 void
3544 ipa_free_all_node_params (void)
3546 delete ipa_node_params_sum;
3547 ipa_node_params_sum = NULL;
3550 /* Grow ipcp_transformations if necessary. */
3552 void
3553 ipcp_grow_transformations_if_necessary (void)
3555 if (vec_safe_length (ipcp_transformations)
3556 <= (unsigned) symtab->cgraph_max_uid)
3557 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3560 /* Set the aggregate replacements of NODE to be AGGVALS. */
3562 void
3563 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3564 struct ipa_agg_replacement_value *aggvals)
3566 ipcp_grow_transformations_if_necessary ();
3567 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3570 /* Hook that is called by cgraph.c when an edge is removed. */
3572 static void
3573 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3575 struct ipa_edge_args *args;
3577 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3578 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3579 return;
3581 args = IPA_EDGE_REF (cs);
3582 if (args->jump_functions)
3584 struct ipa_jump_func *jf;
3585 int i;
3586 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3588 struct ipa_cst_ref_desc *rdesc;
3589 try_decrement_rdesc_refcount (jf);
3590 if (jf->type == IPA_JF_CONST
3591 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3592 && rdesc->cs == cs)
3593 rdesc->cs = NULL;
3597 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3600 /* Hook that is called by cgraph.c when an edge is duplicated. */
3602 static void
3603 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3604 void *)
3606 struct ipa_edge_args *old_args, *new_args;
3607 unsigned int i;
3609 ipa_check_create_edge_args ();
3611 old_args = IPA_EDGE_REF (src);
3612 new_args = IPA_EDGE_REF (dst);
3614 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3615 if (old_args->polymorphic_call_contexts)
3616 new_args->polymorphic_call_contexts
3617 = vec_safe_copy (old_args->polymorphic_call_contexts);
3619 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3621 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3622 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3624 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3626 if (src_jf->type == IPA_JF_CONST)
3628 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3630 if (!src_rdesc)
3631 dst_jf->value.constant.rdesc = NULL;
3632 else if (src->caller == dst->caller)
3634 struct ipa_ref *ref;
3635 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3636 gcc_checking_assert (n);
3637 ref = src->caller->find_reference (n, src->call_stmt,
3638 src->lto_stmt_uid);
3639 gcc_checking_assert (ref);
3640 dst->caller->clone_reference (ref, ref->stmt);
3642 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3643 dst_rdesc->cs = dst;
3644 dst_rdesc->refcount = src_rdesc->refcount;
3645 dst_rdesc->next_duplicate = NULL;
3646 dst_jf->value.constant.rdesc = dst_rdesc;
3648 else if (src_rdesc->cs == src)
3650 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3651 dst_rdesc->cs = dst;
3652 dst_rdesc->refcount = src_rdesc->refcount;
3653 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3654 src_rdesc->next_duplicate = dst_rdesc;
3655 dst_jf->value.constant.rdesc = dst_rdesc;
3657 else
3659 struct ipa_cst_ref_desc *dst_rdesc;
3660 /* This can happen during inlining, when a JFUNC can refer to a
3661 reference taken in a function up in the tree of inline clones.
3662 We need to find the duplicate that refers to our tree of
3663 inline clones. */
3665 gcc_assert (dst->caller->global.inlined_to);
3666 for (dst_rdesc = src_rdesc->next_duplicate;
3667 dst_rdesc;
3668 dst_rdesc = dst_rdesc->next_duplicate)
3670 struct cgraph_node *top;
3671 top = dst_rdesc->cs->caller->global.inlined_to
3672 ? dst_rdesc->cs->caller->global.inlined_to
3673 : dst_rdesc->cs->caller;
3674 if (dst->caller->global.inlined_to == top)
3675 break;
3677 gcc_assert (dst_rdesc);
3678 dst_jf->value.constant.rdesc = dst_rdesc;
3681 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3682 && src->caller == dst->caller)
3684 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3685 ? dst->caller->global.inlined_to : dst->caller;
3686 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3687 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3689 int c = ipa_get_controlled_uses (root_info, idx);
3690 if (c != IPA_UNDESCRIBED_USE)
3692 c++;
3693 ipa_set_controlled_uses (root_info, idx, c);
3699 /* Analyze newly added function into callgraph. */
3701 static void
3702 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3704 if (node->has_gimple_body_p ())
3705 ipa_analyze_node (node);
3708 /* Hook that is called by summary when a node is duplicated. */
3710 void
3711 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3712 ipa_node_params *old_info,
3713 ipa_node_params *new_info)
3715 ipa_agg_replacement_value *old_av, *new_av;
3717 new_info->descriptors = old_info->descriptors.copy ();
3718 new_info->lattices = NULL;
3719 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3721 new_info->analysis_done = old_info->analysis_done;
3722 new_info->node_enqueued = old_info->node_enqueued;
3723 new_info->versionable = old_info->versionable;
3725 old_av = ipa_get_agg_replacements_for_node (src);
3726 if (old_av)
3728 new_av = NULL;
3729 while (old_av)
3731 struct ipa_agg_replacement_value *v;
3733 v = ggc_alloc<ipa_agg_replacement_value> ();
3734 memcpy (v, old_av, sizeof (*v));
3735 v->next = new_av;
3736 new_av = v;
3737 old_av = old_av->next;
3739 ipa_set_node_agg_value_chain (dst, new_av);
3742 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3744 if (src_trans)
3746 ipcp_grow_transformations_if_necessary ();
3747 src_trans = ipcp_get_transformation_summary (src);
3748 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3749 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3750 vec<ipa_alignment, va_gc> *&dst_alignments
3751 = ipcp_get_transformation_summary (dst)->alignments;
3752 vec<ipa_vr, va_gc> *&dst_vr
3753 = ipcp_get_transformation_summary (dst)->m_vr;
3754 if (vec_safe_length (src_trans->alignments) > 0)
3756 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3757 for (unsigned i = 0; i < src_alignments->length (); ++i)
3758 dst_alignments->quick_push ((*src_alignments)[i]);
3760 if (vec_safe_length (src_trans->m_vr) > 0)
3762 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3763 for (unsigned i = 0; i < src_vr->length (); ++i)
3764 dst_vr->quick_push ((*src_vr)[i]);
3768 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3770 ipcp_grow_transformations_if_necessary ();
3771 src_trans = ipcp_get_transformation_summary (src);
3772 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3773 vec<ipa_bits, va_gc> *&dst_bits
3774 = ipcp_get_transformation_summary (dst)->bits;
3775 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3776 for (unsigned i = 0; i < src_bits->length (); ++i)
3777 dst_bits->quick_push ((*src_bits)[i]);
3781 /* Register our cgraph hooks if they are not already there. */
3783 void
3784 ipa_register_cgraph_hooks (void)
3786 ipa_check_create_node_params ();
3788 if (!edge_removal_hook_holder)
3789 edge_removal_hook_holder =
3790 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3791 if (!edge_duplication_hook_holder)
3792 edge_duplication_hook_holder =
3793 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3794 function_insertion_hook_holder =
3795 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3798 /* Unregister our cgraph hooks if they are not already there. */
3800 static void
3801 ipa_unregister_cgraph_hooks (void)
3803 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3804 edge_removal_hook_holder = NULL;
3805 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3806 edge_duplication_hook_holder = NULL;
3807 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3808 function_insertion_hook_holder = NULL;
3811 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3812 longer needed after ipa-cp. */
3814 void
3815 ipa_free_all_structures_after_ipa_cp (void)
3817 if (!optimize && !in_lto_p)
3819 ipa_free_all_edge_args ();
3820 ipa_free_all_node_params ();
3821 ipcp_sources_pool.release ();
3822 ipcp_cst_values_pool.release ();
3823 ipcp_poly_ctx_values_pool.release ();
3824 ipcp_agg_lattice_pool.release ();
3825 ipa_unregister_cgraph_hooks ();
3826 ipa_refdesc_pool.release ();
3830 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3831 longer needed after indirect inlining. */
3833 void
3834 ipa_free_all_structures_after_iinln (void)
3836 ipa_free_all_edge_args ();
3837 ipa_free_all_node_params ();
3838 ipa_unregister_cgraph_hooks ();
3839 ipcp_sources_pool.release ();
3840 ipcp_cst_values_pool.release ();
3841 ipcp_poly_ctx_values_pool.release ();
3842 ipcp_agg_lattice_pool.release ();
3843 ipa_refdesc_pool.release ();
3846 /* Print ipa_tree_map data structures of all functions in the
3847 callgraph to F. */
3849 void
3850 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3852 int i, count;
3853 struct ipa_node_params *info;
3855 if (!node->definition)
3856 return;
3857 info = IPA_NODE_REF (node);
3858 fprintf (f, " function %s/%i parameter descriptors:\n",
3859 node->name (), node->order);
3860 count = ipa_get_param_count (info);
3861 for (i = 0; i < count; i++)
3863 int c;
3865 fprintf (f, " ");
3866 ipa_dump_param (f, info, i);
3867 if (ipa_is_param_used (info, i))
3868 fprintf (f, " used");
3869 c = ipa_get_controlled_uses (info, i);
3870 if (c == IPA_UNDESCRIBED_USE)
3871 fprintf (f, " undescribed_use");
3872 else
3873 fprintf (f, " controlled_uses=%i", c);
3874 fprintf (f, "\n");
3878 /* Print ipa_tree_map data structures of all functions in the
3879 callgraph to F. */
3881 void
3882 ipa_print_all_params (FILE * f)
3884 struct cgraph_node *node;
3886 fprintf (f, "\nFunction parameters:\n");
3887 FOR_EACH_FUNCTION (node)
3888 ipa_print_node_params (f, node);
3891 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3893 vec<tree>
3894 ipa_get_vector_of_formal_parms (tree fndecl)
3896 vec<tree> args;
3897 int count;
3898 tree parm;
3900 gcc_assert (!flag_wpa);
3901 count = count_formal_params (fndecl);
3902 args.create (count);
3903 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3904 args.quick_push (parm);
3906 return args;
3909 /* Return a heap allocated vector containing types of formal parameters of
3910 function type FNTYPE. */
3912 vec<tree>
3913 ipa_get_vector_of_formal_parm_types (tree fntype)
3915 vec<tree> types;
3916 int count = 0;
3917 tree t;
3919 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3920 count++;
3922 types.create (count);
3923 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3924 types.quick_push (TREE_VALUE (t));
3926 return types;
3929 /* Modify the function declaration FNDECL and its type according to the plan in
3930 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3931 to reflect the actual parameters being modified which are determined by the
3932 base_index field. */
3934 void
3935 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3937 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3938 tree orig_type = TREE_TYPE (fndecl);
3939 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3941 /* The following test is an ugly hack, some functions simply don't have any
3942 arguments in their type. This is probably a bug but well... */
3943 bool care_for_types = (old_arg_types != NULL_TREE);
3944 bool last_parm_void;
3945 vec<tree> otypes;
3946 if (care_for_types)
3948 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3949 == void_type_node);
3950 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3951 if (last_parm_void)
3952 gcc_assert (oparms.length () + 1 == otypes.length ());
3953 else
3954 gcc_assert (oparms.length () == otypes.length ());
3956 else
3958 last_parm_void = false;
3959 otypes.create (0);
3962 int len = adjustments.length ();
3963 tree *link = &DECL_ARGUMENTS (fndecl);
3964 tree new_arg_types = NULL;
3965 for (int i = 0; i < len; i++)
3967 struct ipa_parm_adjustment *adj;
3968 gcc_assert (link);
3970 adj = &adjustments[i];
3971 tree parm;
3972 if (adj->op == IPA_PARM_OP_NEW)
3973 parm = NULL;
3974 else
3975 parm = oparms[adj->base_index];
3976 adj->base = parm;
3978 if (adj->op == IPA_PARM_OP_COPY)
3980 if (care_for_types)
3981 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3982 new_arg_types);
3983 *link = parm;
3984 link = &DECL_CHAIN (parm);
3986 else if (adj->op != IPA_PARM_OP_REMOVE)
3988 tree new_parm;
3989 tree ptype;
3991 if (adj->by_ref)
3992 ptype = build_pointer_type (adj->type);
3993 else
3995 ptype = adj->type;
3996 if (is_gimple_reg_type (ptype))
3998 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3999 if (TYPE_ALIGN (ptype) != malign)
4000 ptype = build_aligned_type (ptype, malign);
4004 if (care_for_types)
4005 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4007 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4008 ptype);
4009 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4010 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4011 DECL_ARTIFICIAL (new_parm) = 1;
4012 DECL_ARG_TYPE (new_parm) = ptype;
4013 DECL_CONTEXT (new_parm) = fndecl;
4014 TREE_USED (new_parm) = 1;
4015 DECL_IGNORED_P (new_parm) = 1;
4016 layout_decl (new_parm, 0);
4018 if (adj->op == IPA_PARM_OP_NEW)
4019 adj->base = NULL;
4020 else
4021 adj->base = parm;
4022 adj->new_decl = new_parm;
4024 *link = new_parm;
4025 link = &DECL_CHAIN (new_parm);
4029 *link = NULL_TREE;
4031 tree new_reversed = NULL;
4032 if (care_for_types)
4034 new_reversed = nreverse (new_arg_types);
4035 if (last_parm_void)
4037 if (new_reversed)
4038 TREE_CHAIN (new_arg_types) = void_list_node;
4039 else
4040 new_reversed = void_list_node;
4044 /* Use copy_node to preserve as much as possible from original type
4045 (debug info, attribute lists etc.)
4046 Exception is METHOD_TYPEs must have THIS argument.
4047 When we are asked to remove it, we need to build new FUNCTION_TYPE
4048 instead. */
4049 tree new_type = NULL;
4050 if (TREE_CODE (orig_type) != METHOD_TYPE
4051 || (adjustments[0].op == IPA_PARM_OP_COPY
4052 && adjustments[0].base_index == 0))
4054 new_type = build_distinct_type_copy (orig_type);
4055 TYPE_ARG_TYPES (new_type) = new_reversed;
4057 else
4059 new_type
4060 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4061 new_reversed));
4062 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4063 DECL_VINDEX (fndecl) = NULL_TREE;
4066 /* When signature changes, we need to clear builtin info. */
4067 if (DECL_BUILT_IN (fndecl))
4069 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4070 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4073 TREE_TYPE (fndecl) = new_type;
4074 DECL_VIRTUAL_P (fndecl) = 0;
4075 DECL_LANG_SPECIFIC (fndecl) = NULL;
4076 otypes.release ();
4077 oparms.release ();
4080 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4081 If this is a directly recursive call, CS must be NULL. Otherwise it must
4082 contain the corresponding call graph edge. */
4084 void
4085 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4086 ipa_parm_adjustment_vec adjustments)
4088 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4089 vec<tree> vargs;
4090 vec<tree, va_gc> **debug_args = NULL;
4091 gcall *new_stmt;
4092 gimple_stmt_iterator gsi, prev_gsi;
4093 tree callee_decl;
4094 int i, len;
4096 len = adjustments.length ();
4097 vargs.create (len);
4098 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4099 current_node->remove_stmt_references (stmt);
4101 gsi = gsi_for_stmt (stmt);
4102 prev_gsi = gsi;
4103 gsi_prev (&prev_gsi);
4104 for (i = 0; i < len; i++)
4106 struct ipa_parm_adjustment *adj;
4108 adj = &adjustments[i];
4110 if (adj->op == IPA_PARM_OP_COPY)
4112 tree arg = gimple_call_arg (stmt, adj->base_index);
4114 vargs.quick_push (arg);
4116 else if (adj->op != IPA_PARM_OP_REMOVE)
4118 tree expr, base, off;
4119 location_t loc;
4120 unsigned int deref_align = 0;
4121 bool deref_base = false;
4123 /* We create a new parameter out of the value of the old one, we can
4124 do the following kind of transformations:
4126 - A scalar passed by reference is converted to a scalar passed by
4127 value. (adj->by_ref is false and the type of the original
4128 actual argument is a pointer to a scalar).
4130 - A part of an aggregate is passed instead of the whole aggregate.
4131 The part can be passed either by value or by reference, this is
4132 determined by value of adj->by_ref. Moreover, the code below
4133 handles both situations when the original aggregate is passed by
4134 value (its type is not a pointer) and when it is passed by
4135 reference (it is a pointer to an aggregate).
4137 When the new argument is passed by reference (adj->by_ref is true)
4138 it must be a part of an aggregate and therefore we form it by
4139 simply taking the address of a reference inside the original
4140 aggregate. */
4142 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4143 base = gimple_call_arg (stmt, adj->base_index);
4144 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4145 : EXPR_LOCATION (base);
4147 if (TREE_CODE (base) != ADDR_EXPR
4148 && POINTER_TYPE_P (TREE_TYPE (base)))
4149 off = build_int_cst (adj->alias_ptr_type,
4150 adj->offset / BITS_PER_UNIT);
4151 else
4153 HOST_WIDE_INT base_offset;
4154 tree prev_base;
4155 bool addrof;
4157 if (TREE_CODE (base) == ADDR_EXPR)
4159 base = TREE_OPERAND (base, 0);
4160 addrof = true;
4162 else
4163 addrof = false;
4164 prev_base = base;
4165 base = get_addr_base_and_unit_offset (base, &base_offset);
4166 /* Aggregate arguments can have non-invariant addresses. */
4167 if (!base)
4169 base = build_fold_addr_expr (prev_base);
4170 off = build_int_cst (adj->alias_ptr_type,
4171 adj->offset / BITS_PER_UNIT);
4173 else if (TREE_CODE (base) == MEM_REF)
4175 if (!addrof)
4177 deref_base = true;
4178 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4180 off = build_int_cst (adj->alias_ptr_type,
4181 base_offset
4182 + adj->offset / BITS_PER_UNIT);
4183 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4184 off);
4185 base = TREE_OPERAND (base, 0);
4187 else
4189 off = build_int_cst (adj->alias_ptr_type,
4190 base_offset
4191 + adj->offset / BITS_PER_UNIT);
4192 base = build_fold_addr_expr (base);
4196 if (!adj->by_ref)
4198 tree type = adj->type;
4199 unsigned int align;
4200 unsigned HOST_WIDE_INT misalign;
4202 if (deref_base)
4204 align = deref_align;
4205 misalign = 0;
4207 else
4209 get_pointer_alignment_1 (base, &align, &misalign);
4210 if (TYPE_ALIGN (type) > align)
4211 align = TYPE_ALIGN (type);
4213 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4214 * BITS_PER_UNIT);
4215 misalign = misalign & (align - 1);
4216 if (misalign != 0)
4217 align = least_bit_hwi (misalign);
4218 if (align < TYPE_ALIGN (type))
4219 type = build_aligned_type (type, align);
4220 base = force_gimple_operand_gsi (&gsi, base,
4221 true, NULL, true, GSI_SAME_STMT);
4222 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4223 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4224 /* If expr is not a valid gimple call argument emit
4225 a load into a temporary. */
4226 if (is_gimple_reg_type (TREE_TYPE (expr)))
4228 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4229 if (gimple_in_ssa_p (cfun))
4231 gimple_set_vuse (tem, gimple_vuse (stmt));
4232 expr = make_ssa_name (TREE_TYPE (expr), tem);
4234 else
4235 expr = create_tmp_reg (TREE_TYPE (expr));
4236 gimple_assign_set_lhs (tem, expr);
4237 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4240 else
4242 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4243 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4244 expr = build_fold_addr_expr (expr);
4245 expr = force_gimple_operand_gsi (&gsi, expr,
4246 true, NULL, true, GSI_SAME_STMT);
4248 vargs.quick_push (expr);
4250 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4252 unsigned int ix;
4253 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4254 gimple *def_temp;
4256 arg = gimple_call_arg (stmt, adj->base_index);
4257 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4259 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4260 continue;
4261 arg = fold_convert_loc (gimple_location (stmt),
4262 TREE_TYPE (origin), arg);
4264 if (debug_args == NULL)
4265 debug_args = decl_debug_args_insert (callee_decl);
4266 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4267 if (ddecl == origin)
4269 ddecl = (**debug_args)[ix + 1];
4270 break;
4272 if (ddecl == NULL)
4274 ddecl = make_node (DEBUG_EXPR_DECL);
4275 DECL_ARTIFICIAL (ddecl) = 1;
4276 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4277 DECL_MODE (ddecl) = DECL_MODE (origin);
4279 vec_safe_push (*debug_args, origin);
4280 vec_safe_push (*debug_args, ddecl);
4282 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4283 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4287 if (dump_file && (dump_flags & TDF_DETAILS))
4289 fprintf (dump_file, "replacing stmt:");
4290 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4293 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4294 vargs.release ();
4295 if (gimple_call_lhs (stmt))
4296 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4298 gimple_set_block (new_stmt, gimple_block (stmt));
4299 if (gimple_has_location (stmt))
4300 gimple_set_location (new_stmt, gimple_location (stmt));
4301 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4302 gimple_call_copy_flags (new_stmt, stmt);
4303 if (gimple_in_ssa_p (cfun))
4305 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4306 if (gimple_vdef (stmt))
4308 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4309 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4313 if (dump_file && (dump_flags & TDF_DETAILS))
4315 fprintf (dump_file, "with stmt:");
4316 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4317 fprintf (dump_file, "\n");
4319 gsi_replace (&gsi, new_stmt, true);
4320 if (cs)
4321 cs->set_call_stmt (new_stmt);
4324 current_node->record_stmt_references (gsi_stmt (gsi));
4325 gsi_prev (&gsi);
4327 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4330 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4331 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4332 specifies whether the function should care about type incompatibility the
4333 current and new expressions. If it is false, the function will leave
4334 incompatibility issues to the caller. Return true iff the expression
4335 was modified. */
4337 bool
4338 ipa_modify_expr (tree *expr, bool convert,
4339 ipa_parm_adjustment_vec adjustments)
4341 struct ipa_parm_adjustment *cand
4342 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4343 if (!cand)
4344 return false;
4346 tree src;
4347 if (cand->by_ref)
4349 src = build_simple_mem_ref (cand->new_decl);
4350 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4352 else
4353 src = cand->new_decl;
4355 if (dump_file && (dump_flags & TDF_DETAILS))
4357 fprintf (dump_file, "About to replace expr ");
4358 print_generic_expr (dump_file, *expr, 0);
4359 fprintf (dump_file, " with ");
4360 print_generic_expr (dump_file, src, 0);
4361 fprintf (dump_file, "\n");
4364 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4366 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4367 *expr = vce;
4369 else
4370 *expr = src;
4371 return true;
4374 /* If T is an SSA_NAME, return NULL if it is not a default def or
4375 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4376 the base variable is always returned, regardless if it is a default
4377 def. Return T if it is not an SSA_NAME. */
4379 static tree
4380 get_ssa_base_param (tree t, bool ignore_default_def)
4382 if (TREE_CODE (t) == SSA_NAME)
4384 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4385 return SSA_NAME_VAR (t);
4386 else
4387 return NULL_TREE;
4389 return t;
4392 /* Given an expression, return an adjustment entry specifying the
4393 transformation to be done on EXPR. If no suitable adjustment entry
4394 was found, returns NULL.
4396 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4397 default def, otherwise bail on them.
4399 If CONVERT is non-NULL, this function will set *CONVERT if the
4400 expression provided is a component reference. ADJUSTMENTS is the
4401 adjustments vector. */
4403 ipa_parm_adjustment *
4404 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4405 ipa_parm_adjustment_vec adjustments,
4406 bool ignore_default_def)
4408 if (TREE_CODE (**expr) == BIT_FIELD_REF
4409 || TREE_CODE (**expr) == IMAGPART_EXPR
4410 || TREE_CODE (**expr) == REALPART_EXPR)
4412 *expr = &TREE_OPERAND (**expr, 0);
4413 if (convert)
4414 *convert = true;
4417 HOST_WIDE_INT offset, size, max_size;
4418 bool reverse;
4419 tree base
4420 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4421 if (!base || size == -1 || max_size == -1)
4422 return NULL;
4424 if (TREE_CODE (base) == MEM_REF)
4426 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4427 base = TREE_OPERAND (base, 0);
4430 base = get_ssa_base_param (base, ignore_default_def);
4431 if (!base || TREE_CODE (base) != PARM_DECL)
4432 return NULL;
4434 struct ipa_parm_adjustment *cand = NULL;
4435 unsigned int len = adjustments.length ();
4436 for (unsigned i = 0; i < len; i++)
4438 struct ipa_parm_adjustment *adj = &adjustments[i];
4440 if (adj->base == base
4441 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4443 cand = adj;
4444 break;
4448 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4449 return NULL;
4450 return cand;
4453 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4455 static bool
4456 index_in_adjustments_multiple_times_p (int base_index,
4457 ipa_parm_adjustment_vec adjustments)
4459 int i, len = adjustments.length ();
4460 bool one = false;
4462 for (i = 0; i < len; i++)
4464 struct ipa_parm_adjustment *adj;
4465 adj = &adjustments[i];
4467 if (adj->base_index == base_index)
4469 if (one)
4470 return true;
4471 else
4472 one = true;
4475 return false;
4479 /* Return adjustments that should have the same effect on function parameters
4480 and call arguments as if they were first changed according to adjustments in
4481 INNER and then by adjustments in OUTER. */
4483 ipa_parm_adjustment_vec
4484 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4485 ipa_parm_adjustment_vec outer)
4487 int i, outlen = outer.length ();
4488 int inlen = inner.length ();
4489 int removals = 0;
4490 ipa_parm_adjustment_vec adjustments, tmp;
4492 tmp.create (inlen);
4493 for (i = 0; i < inlen; i++)
4495 struct ipa_parm_adjustment *n;
4496 n = &inner[i];
4498 if (n->op == IPA_PARM_OP_REMOVE)
4499 removals++;
4500 else
4502 /* FIXME: Handling of new arguments are not implemented yet. */
4503 gcc_assert (n->op != IPA_PARM_OP_NEW);
4504 tmp.quick_push (*n);
4508 adjustments.create (outlen + removals);
4509 for (i = 0; i < outlen; i++)
4511 struct ipa_parm_adjustment r;
4512 struct ipa_parm_adjustment *out = &outer[i];
4513 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4515 memset (&r, 0, sizeof (r));
4516 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4517 if (out->op == IPA_PARM_OP_REMOVE)
4519 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4521 r.op = IPA_PARM_OP_REMOVE;
4522 adjustments.quick_push (r);
4524 continue;
4526 else
4528 /* FIXME: Handling of new arguments are not implemented yet. */
4529 gcc_assert (out->op != IPA_PARM_OP_NEW);
4532 r.base_index = in->base_index;
4533 r.type = out->type;
4535 /* FIXME: Create nonlocal value too. */
4537 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4538 r.op = IPA_PARM_OP_COPY;
4539 else if (in->op == IPA_PARM_OP_COPY)
4540 r.offset = out->offset;
4541 else if (out->op == IPA_PARM_OP_COPY)
4542 r.offset = in->offset;
4543 else
4544 r.offset = in->offset + out->offset;
4545 adjustments.quick_push (r);
4548 for (i = 0; i < inlen; i++)
4550 struct ipa_parm_adjustment *n = &inner[i];
4552 if (n->op == IPA_PARM_OP_REMOVE)
4553 adjustments.quick_push (*n);
4556 tmp.release ();
4557 return adjustments;
4560 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4561 friendly way, assuming they are meant to be applied to FNDECL. */
4563 void
4564 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4565 tree fndecl)
4567 int i, len = adjustments.length ();
4568 bool first = true;
4569 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4571 fprintf (file, "IPA param adjustments: ");
4572 for (i = 0; i < len; i++)
4574 struct ipa_parm_adjustment *adj;
4575 adj = &adjustments[i];
4577 if (!first)
4578 fprintf (file, " ");
4579 else
4580 first = false;
4582 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4583 print_generic_expr (file, parms[adj->base_index], 0);
4584 if (adj->base)
4586 fprintf (file, ", base: ");
4587 print_generic_expr (file, adj->base, 0);
4589 if (adj->new_decl)
4591 fprintf (file, ", new_decl: ");
4592 print_generic_expr (file, adj->new_decl, 0);
4594 if (adj->new_ssa_base)
4596 fprintf (file, ", new_ssa_base: ");
4597 print_generic_expr (file, adj->new_ssa_base, 0);
4600 if (adj->op == IPA_PARM_OP_COPY)
4601 fprintf (file, ", copy_param");
4602 else if (adj->op == IPA_PARM_OP_REMOVE)
4603 fprintf (file, ", remove_param");
4604 else
4605 fprintf (file, ", offset %li", (long) adj->offset);
4606 if (adj->by_ref)
4607 fprintf (file, ", by_ref");
4608 print_node_brief (file, ", type: ", adj->type, 0);
4609 fprintf (file, "\n");
4611 parms.release ();
4614 /* Dump the AV linked list. */
4616 void
4617 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4619 bool comma = false;
4620 fprintf (f, " Aggregate replacements:");
4621 for (; av; av = av->next)
4623 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4624 av->index, av->offset);
4625 print_generic_expr (f, av->value, 0);
4626 comma = true;
4628 fprintf (f, "\n");
4631 /* Stream out jump function JUMP_FUNC to OB. */
4633 static void
4634 ipa_write_jump_function (struct output_block *ob,
4635 struct ipa_jump_func *jump_func)
4637 struct ipa_agg_jf_item *item;
4638 struct bitpack_d bp;
4639 int i, count;
4641 streamer_write_uhwi (ob, jump_func->type);
4642 switch (jump_func->type)
4644 case IPA_JF_UNKNOWN:
4645 break;
4646 case IPA_JF_CONST:
4647 gcc_assert (
4648 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4649 stream_write_tree (ob, jump_func->value.constant.value, true);
4650 break;
4651 case IPA_JF_PASS_THROUGH:
4652 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4653 if (jump_func->value.pass_through.operation == NOP_EXPR)
4655 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4656 bp = bitpack_create (ob->main_stream);
4657 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4658 streamer_write_bitpack (&bp);
4660 else
4662 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4663 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4665 break;
4666 case IPA_JF_ANCESTOR:
4667 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4668 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4669 bp = bitpack_create (ob->main_stream);
4670 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4671 streamer_write_bitpack (&bp);
4672 break;
4675 count = vec_safe_length (jump_func->agg.items);
4676 streamer_write_uhwi (ob, count);
4677 if (count)
4679 bp = bitpack_create (ob->main_stream);
4680 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4681 streamer_write_bitpack (&bp);
4684 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4686 streamer_write_uhwi (ob, item->offset);
4687 stream_write_tree (ob, item->value, true);
4690 bp = bitpack_create (ob->main_stream);
4691 bp_pack_value (&bp, jump_func->alignment.known, 1);
4692 streamer_write_bitpack (&bp);
4693 if (jump_func->alignment.known)
4695 streamer_write_uhwi (ob, jump_func->alignment.align);
4696 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4699 bp = bitpack_create (ob->main_stream);
4700 bp_pack_value (&bp, jump_func->bits.known, 1);
4701 streamer_write_bitpack (&bp);
4702 if (jump_func->bits.known)
4704 streamer_write_widest_int (ob, jump_func->bits.value);
4705 streamer_write_widest_int (ob, jump_func->bits.mask);
4707 bp_pack_value (&bp, jump_func->vr_known, 1);
4708 streamer_write_bitpack (&bp);
4709 if (jump_func->vr_known)
4711 streamer_write_enum (ob->main_stream, value_rang_type,
4712 VR_LAST, jump_func->m_vr.type);
4713 stream_write_tree (ob, jump_func->m_vr.min, true);
4714 stream_write_tree (ob, jump_func->m_vr.max, true);
4718 /* Read in jump function JUMP_FUNC from IB. */
4720 static void
4721 ipa_read_jump_function (struct lto_input_block *ib,
4722 struct ipa_jump_func *jump_func,
4723 struct cgraph_edge *cs,
4724 struct data_in *data_in)
4726 enum jump_func_type jftype;
4727 enum tree_code operation;
4728 int i, count;
4730 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4731 switch (jftype)
4733 case IPA_JF_UNKNOWN:
4734 ipa_set_jf_unknown (jump_func);
4735 break;
4736 case IPA_JF_CONST:
4737 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4738 break;
4739 case IPA_JF_PASS_THROUGH:
4740 operation = (enum tree_code) streamer_read_uhwi (ib);
4741 if (operation == NOP_EXPR)
4743 int formal_id = streamer_read_uhwi (ib);
4744 struct bitpack_d bp = streamer_read_bitpack (ib);
4745 bool agg_preserved = bp_unpack_value (&bp, 1);
4746 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4748 else
4750 tree operand = stream_read_tree (ib, data_in);
4751 int formal_id = streamer_read_uhwi (ib);
4752 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4753 operation);
4755 break;
4756 case IPA_JF_ANCESTOR:
4758 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4759 int formal_id = streamer_read_uhwi (ib);
4760 struct bitpack_d bp = streamer_read_bitpack (ib);
4761 bool agg_preserved = bp_unpack_value (&bp, 1);
4762 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4763 break;
4767 count = streamer_read_uhwi (ib);
4768 vec_alloc (jump_func->agg.items, count);
4769 if (count)
4771 struct bitpack_d bp = streamer_read_bitpack (ib);
4772 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4774 for (i = 0; i < count; i++)
4776 struct ipa_agg_jf_item item;
4777 item.offset = streamer_read_uhwi (ib);
4778 item.value = stream_read_tree (ib, data_in);
4779 jump_func->agg.items->quick_push (item);
4782 struct bitpack_d bp = streamer_read_bitpack (ib);
4783 bool alignment_known = bp_unpack_value (&bp, 1);
4784 if (alignment_known)
4786 jump_func->alignment.known = true;
4787 jump_func->alignment.align = streamer_read_uhwi (ib);
4788 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4790 else
4791 jump_func->alignment.known = false;
4793 bp = streamer_read_bitpack (ib);
4794 bool bits_known = bp_unpack_value (&bp, 1);
4795 if (bits_known)
4797 jump_func->bits.known = true;
4798 jump_func->bits.value = streamer_read_widest_int (ib);
4799 jump_func->bits.mask = streamer_read_widest_int (ib);
4801 else
4802 jump_func->bits.known = false;
4804 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4805 bool vr_known = bp_unpack_value (&vr_bp, 1);
4806 if (vr_known)
4808 jump_func->vr_known = true;
4809 jump_func->m_vr.type = streamer_read_enum (ib,
4810 value_range_type,
4811 VR_LAST);
4812 jump_func->m_vr.min = stream_read_tree (ib, data_in);
4813 jump_func->m_vr.max = stream_read_tree (ib, data_in);
4815 else
4816 jump_func->vr_known = false;
4819 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4820 relevant to indirect inlining to OB. */
4822 static void
4823 ipa_write_indirect_edge_info (struct output_block *ob,
4824 struct cgraph_edge *cs)
4826 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4827 struct bitpack_d bp;
4829 streamer_write_hwi (ob, ii->param_index);
4830 bp = bitpack_create (ob->main_stream);
4831 bp_pack_value (&bp, ii->polymorphic, 1);
4832 bp_pack_value (&bp, ii->agg_contents, 1);
4833 bp_pack_value (&bp, ii->member_ptr, 1);
4834 bp_pack_value (&bp, ii->by_ref, 1);
4835 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4836 bp_pack_value (&bp, ii->vptr_changed, 1);
4837 streamer_write_bitpack (&bp);
4838 if (ii->agg_contents || ii->polymorphic)
4839 streamer_write_hwi (ob, ii->offset);
4840 else
4841 gcc_assert (ii->offset == 0);
4843 if (ii->polymorphic)
4845 streamer_write_hwi (ob, ii->otr_token);
4846 stream_write_tree (ob, ii->otr_type, true);
4847 ii->context.stream_out (ob);
4851 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4852 relevant to indirect inlining from IB. */
4854 static void
4855 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4856 struct data_in *data_in,
4857 struct cgraph_edge *cs)
4859 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4860 struct bitpack_d bp;
4862 ii->param_index = (int) streamer_read_hwi (ib);
4863 bp = streamer_read_bitpack (ib);
4864 ii->polymorphic = bp_unpack_value (&bp, 1);
4865 ii->agg_contents = bp_unpack_value (&bp, 1);
4866 ii->member_ptr = bp_unpack_value (&bp, 1);
4867 ii->by_ref = bp_unpack_value (&bp, 1);
4868 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4869 ii->vptr_changed = bp_unpack_value (&bp, 1);
4870 if (ii->agg_contents || ii->polymorphic)
4871 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4872 else
4873 ii->offset = 0;
4874 if (ii->polymorphic)
4876 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4877 ii->otr_type = stream_read_tree (ib, data_in);
4878 ii->context.stream_in (ib, data_in);
4882 /* Stream out NODE info to OB. */
4884 static void
4885 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4887 int node_ref;
4888 lto_symtab_encoder_t encoder;
4889 struct ipa_node_params *info = IPA_NODE_REF (node);
4890 int j;
4891 struct cgraph_edge *e;
4892 struct bitpack_d bp;
4894 encoder = ob->decl_state->symtab_node_encoder;
4895 node_ref = lto_symtab_encoder_encode (encoder, node);
4896 streamer_write_uhwi (ob, node_ref);
4898 streamer_write_uhwi (ob, ipa_get_param_count (info));
4899 for (j = 0; j < ipa_get_param_count (info); j++)
4900 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4901 bp = bitpack_create (ob->main_stream);
4902 gcc_assert (info->analysis_done
4903 || ipa_get_param_count (info) == 0);
4904 gcc_assert (!info->node_enqueued);
4905 gcc_assert (!info->ipcp_orig_node);
4906 for (j = 0; j < ipa_get_param_count (info); j++)
4907 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4908 streamer_write_bitpack (&bp);
4909 for (j = 0; j < ipa_get_param_count (info); j++)
4910 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4911 for (e = node->callees; e; e = e->next_callee)
4913 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4915 streamer_write_uhwi (ob,
4916 ipa_get_cs_argument_count (args) * 2
4917 + (args->polymorphic_call_contexts != NULL));
4918 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4920 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4921 if (args->polymorphic_call_contexts != NULL)
4922 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4925 for (e = node->indirect_calls; e; e = e->next_callee)
4927 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4929 streamer_write_uhwi (ob,
4930 ipa_get_cs_argument_count (args) * 2
4931 + (args->polymorphic_call_contexts != NULL));
4932 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4934 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4935 if (args->polymorphic_call_contexts != NULL)
4936 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4938 ipa_write_indirect_edge_info (ob, e);
4942 /* Stream in NODE info from IB. */
4944 static void
4945 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4946 struct data_in *data_in)
4948 struct ipa_node_params *info = IPA_NODE_REF (node);
4949 int k;
4950 struct cgraph_edge *e;
4951 struct bitpack_d bp;
4953 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4955 for (k = 0; k < ipa_get_param_count (info); k++)
4956 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4958 bp = streamer_read_bitpack (ib);
4959 if (ipa_get_param_count (info) != 0)
4960 info->analysis_done = true;
4961 info->node_enqueued = false;
4962 for (k = 0; k < ipa_get_param_count (info); k++)
4963 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4964 for (k = 0; k < ipa_get_param_count (info); k++)
4965 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4966 for (e = node->callees; e; e = e->next_callee)
4968 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4969 int count = streamer_read_uhwi (ib);
4970 bool contexts_computed = count & 1;
4971 count /= 2;
4973 if (!count)
4974 continue;
4975 vec_safe_grow_cleared (args->jump_functions, count);
4976 if (contexts_computed)
4977 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4979 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4981 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4982 data_in);
4983 if (contexts_computed)
4984 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4987 for (e = node->indirect_calls; e; e = e->next_callee)
4989 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4990 int count = streamer_read_uhwi (ib);
4991 bool contexts_computed = count & 1;
4992 count /= 2;
4994 if (count)
4996 vec_safe_grow_cleared (args->jump_functions, count);
4997 if (contexts_computed)
4998 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4999 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5001 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5002 data_in);
5003 if (contexts_computed)
5004 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5007 ipa_read_indirect_edge_info (ib, data_in, e);
5011 /* Write jump functions for nodes in SET. */
5013 void
5014 ipa_prop_write_jump_functions (void)
5016 struct cgraph_node *node;
5017 struct output_block *ob;
5018 unsigned int count = 0;
5019 lto_symtab_encoder_iterator lsei;
5020 lto_symtab_encoder_t encoder;
5022 if (!ipa_node_params_sum)
5023 return;
5025 ob = create_output_block (LTO_section_jump_functions);
5026 encoder = ob->decl_state->symtab_node_encoder;
5027 ob->symbol = NULL;
5028 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5029 lsei_next_function_in_partition (&lsei))
5031 node = lsei_cgraph_node (lsei);
5032 if (node->has_gimple_body_p ()
5033 && IPA_NODE_REF (node) != NULL)
5034 count++;
5037 streamer_write_uhwi (ob, count);
5039 /* Process all of the functions. */
5040 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5041 lsei_next_function_in_partition (&lsei))
5043 node = lsei_cgraph_node (lsei);
5044 if (node->has_gimple_body_p ()
5045 && IPA_NODE_REF (node) != NULL)
5046 ipa_write_node_info (ob, node);
5048 streamer_write_char_stream (ob->main_stream, 0);
5049 produce_asm (ob, NULL);
5050 destroy_output_block (ob);
5053 /* Read section in file FILE_DATA of length LEN with data DATA. */
5055 static void
5056 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5057 size_t len)
5059 const struct lto_function_header *header =
5060 (const struct lto_function_header *) data;
5061 const int cfg_offset = sizeof (struct lto_function_header);
5062 const int main_offset = cfg_offset + header->cfg_size;
5063 const int string_offset = main_offset + header->main_size;
5064 struct data_in *data_in;
5065 unsigned int i;
5066 unsigned int count;
5068 lto_input_block ib_main ((const char *) data + main_offset,
5069 header->main_size, file_data->mode_table);
5071 data_in =
5072 lto_data_in_create (file_data, (const char *) data + string_offset,
5073 header->string_size, vNULL);
5074 count = streamer_read_uhwi (&ib_main);
5076 for (i = 0; i < count; i++)
5078 unsigned int index;
5079 struct cgraph_node *node;
5080 lto_symtab_encoder_t encoder;
5082 index = streamer_read_uhwi (&ib_main);
5083 encoder = file_data->symtab_node_encoder;
5084 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5085 index));
5086 gcc_assert (node->definition);
5087 ipa_read_node_info (&ib_main, node, data_in);
5089 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5090 len);
5091 lto_data_in_delete (data_in);
5094 /* Read ipcp jump functions. */
5096 void
5097 ipa_prop_read_jump_functions (void)
5099 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5100 struct lto_file_decl_data *file_data;
5101 unsigned int j = 0;
5103 ipa_check_create_node_params ();
5104 ipa_check_create_edge_args ();
5105 ipa_register_cgraph_hooks ();
5107 while ((file_data = file_data_vec[j++]))
5109 size_t len;
5110 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5112 if (data)
5113 ipa_prop_read_section (file_data, data, len);
5117 /* After merging units, we can get mismatch in argument counts.
5118 Also decl merging might've rendered parameter lists obsolete.
5119 Also compute called_with_variable_arg info. */
5121 void
5122 ipa_update_after_lto_read (void)
5124 ipa_check_create_node_params ();
5125 ipa_check_create_edge_args ();
5128 void
5129 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5131 int node_ref;
5132 unsigned int count = 0;
5133 lto_symtab_encoder_t encoder;
5134 struct ipa_agg_replacement_value *aggvals, *av;
5136 aggvals = ipa_get_agg_replacements_for_node (node);
5137 encoder = ob->decl_state->symtab_node_encoder;
5138 node_ref = lto_symtab_encoder_encode (encoder, node);
5139 streamer_write_uhwi (ob, node_ref);
5141 for (av = aggvals; av; av = av->next)
5142 count++;
5143 streamer_write_uhwi (ob, count);
5145 for (av = aggvals; av; av = av->next)
5147 struct bitpack_d bp;
5149 streamer_write_uhwi (ob, av->offset);
5150 streamer_write_uhwi (ob, av->index);
5151 stream_write_tree (ob, av->value, true);
5153 bp = bitpack_create (ob->main_stream);
5154 bp_pack_value (&bp, av->by_ref, 1);
5155 streamer_write_bitpack (&bp);
5158 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5159 if (ts && vec_safe_length (ts->alignments) > 0)
5161 count = ts->alignments->length ();
5163 streamer_write_uhwi (ob, count);
5164 for (unsigned i = 0; i < count; ++i)
5166 ipa_alignment *parm_al = &(*ts->alignments)[i];
5168 struct bitpack_d bp;
5169 bp = bitpack_create (ob->main_stream);
5170 bp_pack_value (&bp, parm_al->known, 1);
5171 streamer_write_bitpack (&bp);
5172 if (parm_al->known)
5174 streamer_write_uhwi (ob, parm_al->align);
5175 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
5176 parm_al->misalign);
5180 else
5181 streamer_write_uhwi (ob, 0);
5183 if (ts && vec_safe_length (ts->m_vr) > 0)
5185 count = ts->m_vr->length ();
5186 streamer_write_uhwi (ob, count);
5187 for (unsigned i = 0; i < count; ++i)
5189 struct bitpack_d bp;
5190 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5191 bp = bitpack_create (ob->main_stream);
5192 bp_pack_value (&bp, parm_vr->known, 1);
5193 streamer_write_bitpack (&bp);
5194 if (parm_vr->known)
5196 streamer_write_enum (ob->main_stream, value_rang_type,
5197 VR_LAST, parm_vr->type);
5198 streamer_write_wide_int (ob, parm_vr->min);
5199 streamer_write_wide_int (ob, parm_vr->max);
5203 else
5204 streamer_write_uhwi (ob, 0);
5206 if (ts && vec_safe_length (ts->bits) > 0)
5208 count = ts->bits->length ();
5209 streamer_write_uhwi (ob, count);
5211 for (unsigned i = 0; i < count; ++i)
5213 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5214 struct bitpack_d bp = bitpack_create (ob->main_stream);
5215 bp_pack_value (&bp, bits_jfunc.known, 1);
5216 streamer_write_bitpack (&bp);
5217 if (bits_jfunc.known)
5219 streamer_write_widest_int (ob, bits_jfunc.value);
5220 streamer_write_widest_int (ob, bits_jfunc.mask);
5224 else
5225 streamer_write_uhwi (ob, 0);
5228 /* Stream in the aggregate value replacement chain for NODE from IB. */
5230 static void
5231 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5232 data_in *data_in)
5234 struct ipa_agg_replacement_value *aggvals = NULL;
5235 unsigned int count, i;
5237 count = streamer_read_uhwi (ib);
5238 for (i = 0; i <count; i++)
5240 struct ipa_agg_replacement_value *av;
5241 struct bitpack_d bp;
5243 av = ggc_alloc<ipa_agg_replacement_value> ();
5244 av->offset = streamer_read_uhwi (ib);
5245 av->index = streamer_read_uhwi (ib);
5246 av->value = stream_read_tree (ib, data_in);
5247 bp = streamer_read_bitpack (ib);
5248 av->by_ref = bp_unpack_value (&bp, 1);
5249 av->next = aggvals;
5250 aggvals = av;
5252 ipa_set_node_agg_value_chain (node, aggvals);
5254 count = streamer_read_uhwi (ib);
5255 if (count > 0)
5257 ipcp_grow_transformations_if_necessary ();
5259 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5260 vec_safe_grow_cleared (ts->alignments, count);
5262 for (i = 0; i < count; i++)
5264 ipa_alignment *parm_al;
5265 parm_al = &(*ts->alignments)[i];
5266 struct bitpack_d bp;
5267 bp = streamer_read_bitpack (ib);
5268 parm_al->known = bp_unpack_value (&bp, 1);
5269 if (parm_al->known)
5271 parm_al->align = streamer_read_uhwi (ib);
5272 parm_al->misalign
5273 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
5274 0, parm_al->align);
5279 count = streamer_read_uhwi (ib);
5280 if (count > 0)
5282 ipcp_grow_transformations_if_necessary ();
5284 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5285 vec_safe_grow_cleared (ts->m_vr, count);
5286 for (i = 0; i < count; i++)
5288 ipa_vr *parm_vr;
5289 parm_vr = &(*ts->m_vr)[i];
5290 struct bitpack_d bp;
5291 bp = streamer_read_bitpack (ib);
5292 parm_vr->known = bp_unpack_value (&bp, 1);
5293 if (parm_vr->known)
5295 parm_vr->type = streamer_read_enum (ib, value_range_type,
5296 VR_LAST);
5297 parm_vr->min = streamer_read_wide_int (ib);
5298 parm_vr->max = streamer_read_wide_int (ib);
5302 count = streamer_read_uhwi (ib);
5303 if (count > 0)
5305 ipcp_grow_transformations_if_necessary ();
5307 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5308 vec_safe_grow_cleared (ts->bits, count);
5310 for (i = 0; i < count; i++)
5312 ipa_bits& bits_jfunc = (*ts->bits)[i];
5313 struct bitpack_d bp = streamer_read_bitpack (ib);
5314 bits_jfunc.known = bp_unpack_value (&bp, 1);
5315 if (bits_jfunc.known)
5317 bits_jfunc.value = streamer_read_widest_int (ib);
5318 bits_jfunc.mask = streamer_read_widest_int (ib);
5324 /* Write all aggregate replacement for nodes in set. */
5326 void
5327 ipcp_write_transformation_summaries (void)
5329 struct cgraph_node *node;
5330 struct output_block *ob;
5331 unsigned int count = 0;
5332 lto_symtab_encoder_iterator lsei;
5333 lto_symtab_encoder_t encoder;
5335 ob = create_output_block (LTO_section_ipcp_transform);
5336 encoder = ob->decl_state->symtab_node_encoder;
5337 ob->symbol = NULL;
5338 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5339 lsei_next_function_in_partition (&lsei))
5341 node = lsei_cgraph_node (lsei);
5342 if (node->has_gimple_body_p ())
5343 count++;
5346 streamer_write_uhwi (ob, count);
5348 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5349 lsei_next_function_in_partition (&lsei))
5351 node = lsei_cgraph_node (lsei);
5352 if (node->has_gimple_body_p ())
5353 write_ipcp_transformation_info (ob, node);
5355 streamer_write_char_stream (ob->main_stream, 0);
5356 produce_asm (ob, NULL);
5357 destroy_output_block (ob);
5360 /* Read replacements section in file FILE_DATA of length LEN with data
5361 DATA. */
5363 static void
5364 read_replacements_section (struct lto_file_decl_data *file_data,
5365 const char *data,
5366 size_t len)
5368 const struct lto_function_header *header =
5369 (const struct lto_function_header *) data;
5370 const int cfg_offset = sizeof (struct lto_function_header);
5371 const int main_offset = cfg_offset + header->cfg_size;
5372 const int string_offset = main_offset + header->main_size;
5373 struct data_in *data_in;
5374 unsigned int i;
5375 unsigned int count;
5377 lto_input_block ib_main ((const char *) data + main_offset,
5378 header->main_size, file_data->mode_table);
5380 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5381 header->string_size, vNULL);
5382 count = streamer_read_uhwi (&ib_main);
5384 for (i = 0; i < count; i++)
5386 unsigned int index;
5387 struct cgraph_node *node;
5388 lto_symtab_encoder_t encoder;
5390 index = streamer_read_uhwi (&ib_main);
5391 encoder = file_data->symtab_node_encoder;
5392 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5393 index));
5394 gcc_assert (node->definition);
5395 read_ipcp_transformation_info (&ib_main, node, data_in);
5397 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5398 len);
5399 lto_data_in_delete (data_in);
5402 /* Read IPA-CP aggregate replacements. */
5404 void
5405 ipcp_read_transformation_summaries (void)
5407 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5408 struct lto_file_decl_data *file_data;
5409 unsigned int j = 0;
5411 while ((file_data = file_data_vec[j++]))
5413 size_t len;
5414 const char *data = lto_get_section_data (file_data,
5415 LTO_section_ipcp_transform,
5416 NULL, &len);
5417 if (data)
5418 read_replacements_section (file_data, data, len);
5422 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5423 NODE. */
5425 static void
5426 adjust_agg_replacement_values (struct cgraph_node *node,
5427 struct ipa_agg_replacement_value *aggval)
5429 struct ipa_agg_replacement_value *v;
5430 int i, c = 0, d = 0, *adj;
5432 if (!node->clone.combined_args_to_skip)
5433 return;
5435 for (v = aggval; v; v = v->next)
5437 gcc_assert (v->index >= 0);
5438 if (c < v->index)
5439 c = v->index;
5441 c++;
5443 adj = XALLOCAVEC (int, c);
5444 for (i = 0; i < c; i++)
5445 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5447 adj[i] = -1;
5448 d++;
5450 else
5451 adj[i] = i - d;
5453 for (v = aggval; v; v = v->next)
5454 v->index = adj[v->index];
5457 /* Dominator walker driving the ipcp modification phase. */
5459 class ipcp_modif_dom_walker : public dom_walker
5461 public:
5462 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5463 vec<ipa_param_descriptor> descs,
5464 struct ipa_agg_replacement_value *av,
5465 bool *sc, bool *cc)
5466 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5467 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5469 virtual edge before_dom_children (basic_block);
5471 private:
5472 struct ipa_func_body_info *m_fbi;
5473 vec<ipa_param_descriptor> m_descriptors;
5474 struct ipa_agg_replacement_value *m_aggval;
5475 bool *m_something_changed, *m_cfg_changed;
5478 edge
5479 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5481 gimple_stmt_iterator gsi;
5482 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5484 struct ipa_agg_replacement_value *v;
5485 gimple *stmt = gsi_stmt (gsi);
5486 tree rhs, val, t;
5487 HOST_WIDE_INT offset, size;
5488 int index;
5489 bool by_ref, vce;
5491 if (!gimple_assign_load_p (stmt))
5492 continue;
5493 rhs = gimple_assign_rhs1 (stmt);
5494 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5495 continue;
5497 vce = false;
5498 t = rhs;
5499 while (handled_component_p (t))
5501 /* V_C_E can do things like convert an array of integers to one
5502 bigger integer and similar things we do not handle below. */
5503 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5505 vce = true;
5506 break;
5508 t = TREE_OPERAND (t, 0);
5510 if (vce)
5511 continue;
5513 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5514 &offset, &size, &by_ref))
5515 continue;
5516 for (v = m_aggval; v; v = v->next)
5517 if (v->index == index
5518 && v->offset == offset)
5519 break;
5520 if (!v
5521 || v->by_ref != by_ref
5522 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5523 continue;
5525 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5526 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5528 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5529 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5530 else if (TYPE_SIZE (TREE_TYPE (rhs))
5531 == TYPE_SIZE (TREE_TYPE (v->value)))
5532 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5533 else
5535 if (dump_file)
5537 fprintf (dump_file, " const ");
5538 print_generic_expr (dump_file, v->value, 0);
5539 fprintf (dump_file, " can't be converted to type of ");
5540 print_generic_expr (dump_file, rhs, 0);
5541 fprintf (dump_file, "\n");
5543 continue;
5546 else
5547 val = v->value;
5549 if (dump_file && (dump_flags & TDF_DETAILS))
5551 fprintf (dump_file, "Modifying stmt:\n ");
5552 print_gimple_stmt (dump_file, stmt, 0, 0);
5554 gimple_assign_set_rhs_from_tree (&gsi, val);
5555 update_stmt (stmt);
5557 if (dump_file && (dump_flags & TDF_DETAILS))
5559 fprintf (dump_file, "into:\n ");
5560 print_gimple_stmt (dump_file, stmt, 0, 0);
5561 fprintf (dump_file, "\n");
5564 *m_something_changed = true;
5565 if (maybe_clean_eh_stmt (stmt)
5566 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5567 *m_cfg_changed = true;
5569 return NULL;
5572 /* Update alignment of formal parameters as described in
5573 ipcp_transformation_summary. */
5575 static void
5576 ipcp_update_alignments (struct cgraph_node *node)
5578 tree fndecl = node->decl;
5579 tree parm = DECL_ARGUMENTS (fndecl);
5580 tree next_parm = parm;
5581 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5582 if (!ts || vec_safe_length (ts->alignments) == 0)
5583 return;
5584 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5585 unsigned count = alignments.length ();
5587 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5589 if (node->clone.combined_args_to_skip
5590 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5591 continue;
5592 gcc_checking_assert (parm);
5593 next_parm = DECL_CHAIN (parm);
5595 if (!alignments[i].known || !is_gimple_reg (parm))
5596 continue;
5597 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5598 if (!ddef)
5599 continue;
5601 if (dump_file)
5602 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5603 "misalignment to %u\n", i, alignments[i].align,
5604 alignments[i].misalign);
5606 struct ptr_info_def *pi = get_ptr_info (ddef);
5607 gcc_checking_assert (pi);
5608 unsigned old_align;
5609 unsigned old_misalign;
5610 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5612 if (old_known
5613 && old_align >= alignments[i].align)
5615 if (dump_file)
5616 fprintf (dump_file, " But the alignment was already %u.\n",
5617 old_align);
5618 continue;
5620 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5624 /* Update bits info of formal parameters as described in
5625 ipcp_transformation_summary. */
5627 static void
5628 ipcp_update_bits (struct cgraph_node *node)
5630 tree parm = DECL_ARGUMENTS (node->decl);
5631 tree next_parm = parm;
5632 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5634 if (!ts || vec_safe_length (ts->bits) == 0)
5635 return;
5637 vec<ipa_bits, va_gc> &bits = *ts->bits;
5638 unsigned count = bits.length ();
5640 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5642 if (node->clone.combined_args_to_skip
5643 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5644 continue;
5646 gcc_checking_assert (parm);
5647 next_parm = DECL_CHAIN (parm);
5649 if (!bits[i].known
5650 || !INTEGRAL_TYPE_P (TREE_TYPE (parm))
5651 || !is_gimple_reg (parm))
5652 continue;
5654 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5655 if (!ddef)
5656 continue;
5658 if (dump_file)
5660 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5661 print_hex (bits[i].mask, dump_file);
5662 fprintf (dump_file, "\n");
5665 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5666 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5668 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5669 | wide_int::from (bits[i].value, prec, sgn);
5670 set_nonzero_bits (ddef, nonzero_bits);
5674 /* Update value range of formal parameters as described in
5675 ipcp_transformation_summary. */
5677 static void
5678 ipcp_update_vr (struct cgraph_node *node)
5680 tree fndecl = node->decl;
5681 tree parm = DECL_ARGUMENTS (fndecl);
5682 tree next_parm = parm;
5683 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5684 if (!ts || vec_safe_length (ts->m_vr) == 0)
5685 return;
5686 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5687 unsigned count = vr.length ();
5689 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5691 if (node->clone.combined_args_to_skip
5692 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5693 continue;
5694 gcc_checking_assert (parm);
5695 next_parm = DECL_CHAIN (parm);
5696 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5698 if (!ddef || !is_gimple_reg (parm))
5699 continue;
5701 if (vr[i].known
5702 && INTEGRAL_TYPE_P (TREE_TYPE (ddef))
5703 && !POINTER_TYPE_P (TREE_TYPE (ddef))
5704 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5706 tree type = TREE_TYPE (ddef);
5707 unsigned prec = TYPE_PRECISION (type);
5708 if (dump_file)
5710 fprintf (dump_file, "Setting value range of param %u ", i);
5711 fprintf (dump_file, "%s[",
5712 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5713 print_decs (vr[i].min, dump_file);
5714 fprintf (dump_file, ", ");
5715 print_decs (vr[i].max, dump_file);
5716 fprintf (dump_file, "]\n");
5718 set_range_info (ddef, vr[i].type,
5719 wide_int_storage::from (vr[i].min, prec,
5720 TYPE_SIGN (type)),
5721 wide_int_storage::from (vr[i].max, prec,
5722 TYPE_SIGN (type)));
5727 /* IPCP transformation phase doing propagation of aggregate values. */
5729 unsigned int
5730 ipcp_transform_function (struct cgraph_node *node)
5732 vec<ipa_param_descriptor> descriptors = vNULL;
5733 struct ipa_func_body_info fbi;
5734 struct ipa_agg_replacement_value *aggval;
5735 int param_count;
5736 bool cfg_changed = false, something_changed = false;
5738 gcc_checking_assert (cfun);
5739 gcc_checking_assert (current_function_decl);
5741 if (dump_file)
5742 fprintf (dump_file, "Modification phase of node %s/%i\n",
5743 node->name (), node->order);
5745 ipcp_update_alignments (node);
5746 ipcp_update_bits (node);
5747 ipcp_update_vr (node);
5748 aggval = ipa_get_agg_replacements_for_node (node);
5749 if (!aggval)
5750 return 0;
5751 param_count = count_formal_params (node->decl);
5752 if (param_count == 0)
5753 return 0;
5754 adjust_agg_replacement_values (node, aggval);
5755 if (dump_file)
5756 ipa_dump_agg_replacement_values (dump_file, aggval);
5758 fbi.node = node;
5759 fbi.info = NULL;
5760 fbi.bb_infos = vNULL;
5761 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5762 fbi.param_count = param_count;
5763 fbi.aa_walked = 0;
5765 descriptors.safe_grow_cleared (param_count);
5766 ipa_populate_param_decls (node, descriptors);
5767 calculate_dominance_info (CDI_DOMINATORS);
5768 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5769 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5771 int i;
5772 struct ipa_bb_info *bi;
5773 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5774 free_ipa_bb_info (bi);
5775 fbi.bb_infos.release ();
5776 free_dominance_info (CDI_DOMINATORS);
5777 (*ipcp_transformations)[node->uid].agg_values = NULL;
5778 (*ipcp_transformations)[node->uid].alignments = NULL;
5779 descriptors.release ();
5781 if (!something_changed)
5782 return 0;
5783 else if (cfg_changed)
5784 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5785 else
5786 return TODO_update_ssa_only_virtuals;