2016-10-05 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / ipa-prop.c
blob302a47935a101e09779f28dfe382c00d1adcf6d5
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
80 /* Allocation pool for reference descriptions. */
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
104 int i, count;
106 count = descriptors.length ();
107 for (i = 0; i < count; i++)
108 if (descriptors[i].decl_or_type == ptree)
109 return i;
111 return -1;
114 /* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
126 static void
127 ipa_populate_param_decls (struct cgraph_node *node,
128 vec<ipa_param_descriptor> &descriptors)
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
135 fndecl = node->decl;
136 gcc_assert (gimple_has_body_p (fndecl));
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
141 descriptors[param_num].decl_or_type = parm;
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
144 param_num++;
148 /* Return how many formal parameters FNDECL has. */
151 count_formal_params (tree fndecl)
153 tree parm;
154 int count = 0;
155 gcc_assert (gimple_has_body_p (fndecl));
157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
158 count++;
160 return count;
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
167 void
168 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
170 fprintf (file, "param #%i", i);
171 if (info->descriptors[i].decl_or_type)
173 fprintf (file, " ");
174 print_generic_expr (file, info->descriptors[i].decl_or_type, 0);
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
181 void
182 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
184 struct ipa_node_params *info = IPA_NODE_REF (node);
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
194 void
195 ipa_initialize_node_params (struct cgraph_node *node)
197 struct ipa_node_params *info = IPA_NODE_REF (node);
199 if (!info->descriptors.exists ())
201 ipa_alloc_node_params (node, count_formal_params (node->decl));
202 ipa_populate_param_decls (node, info->descriptors);
206 /* Print the jump functions associated with call graph edge CS to file F. */
208 static void
209 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
211 int i, count;
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
225 else if (type == IPA_JF_CONST)
227 tree val = jump_func->value.constant.value;
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
237 fprintf (f, "\n");
239 else if (type == IPA_JF_PASS_THROUGH)
241 fprintf (f, "PASS THROUGH: ");
242 fprintf (f, "%d, op %s",
243 jump_func->value.pass_through.formal_id,
244 get_tree_code_name(jump_func->value.pass_through.operation));
245 if (jump_func->value.pass_through.operation != NOP_EXPR)
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
253 fprintf (f, "\n");
255 else if (type == IPA_JF_ANCESTOR)
257 fprintf (f, "ANCESTOR: ");
258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
263 fprintf (f, "\n");
266 if (jump_func->agg.items)
268 struct ipa_agg_jf_item *item;
269 int j;
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
279 tree_to_uhwi (TYPE_SIZE (item->value)));
280 else
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
285 fprintf (f, "\n");
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
297 if (jump_func->alignment.known)
299 fprintf (f, " Alignment: %u, misalignment: %u\n",
300 jump_func->alignment.align,
301 jump_func->alignment.misalign);
303 else
304 fprintf (f, " Unknown alignment\n");
306 if (jump_func->bits.known)
308 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
309 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
310 fprintf (f, "\n");
312 else
313 fprintf (f, " Unknown bits\n");
315 if (jump_func->vr_known)
317 fprintf (f, " VR ");
318 fprintf (f, "%s[",
319 (jump_func->m_vr.type == VR_ANTI_RANGE) ? "~" : "");
320 print_decs (jump_func->m_vr.min, f);
321 fprintf (f, ", ");
322 print_decs (jump_func->m_vr.max, f);
323 fprintf (f, "]\n");
325 else
326 fprintf (f, " Unknown VR\n");
331 /* Print the jump functions of all arguments on all call graph edges going from
332 NODE to file F. */
334 void
335 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
337 struct cgraph_edge *cs;
339 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
340 node->order);
341 for (cs = node->callees; cs; cs = cs->next_callee)
343 if (!ipa_edge_args_info_available_for_edge_p (cs))
344 continue;
346 fprintf (f, " callsite %s/%i -> %s/%i : \n",
347 xstrdup_for_dump (node->name ()), node->order,
348 xstrdup_for_dump (cs->callee->name ()),
349 cs->callee->order);
350 ipa_print_node_jump_functions_for_edge (f, cs);
353 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
355 struct cgraph_indirect_call_info *ii;
356 if (!ipa_edge_args_info_available_for_edge_p (cs))
357 continue;
359 ii = cs->indirect_info;
360 if (ii->agg_contents)
361 fprintf (f, " indirect %s callsite, calling param %i, "
362 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
363 ii->member_ptr ? "member ptr" : "aggregate",
364 ii->param_index, ii->offset,
365 ii->by_ref ? "by reference" : "by_value");
366 else
367 fprintf (f, " indirect %s callsite, calling param %i, "
368 "offset " HOST_WIDE_INT_PRINT_DEC,
369 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
370 ii->offset);
372 if (cs->call_stmt)
374 fprintf (f, ", for stmt ");
375 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
377 else
378 fprintf (f, "\n");
379 if (ii->polymorphic)
380 ii->context.dump (f);
381 ipa_print_node_jump_functions_for_edge (f, cs);
385 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
387 void
388 ipa_print_all_jump_functions (FILE *f)
390 struct cgraph_node *node;
392 fprintf (f, "\nJump functions:\n");
393 FOR_EACH_FUNCTION (node)
395 ipa_print_node_jump_functions (f, node);
399 /* Set jfunc to be a know-really nothing jump function. */
401 static void
402 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
404 jfunc->type = IPA_JF_UNKNOWN;
405 jfunc->alignment.known = false;
406 jfunc->bits.known = false;
407 jfunc->vr_known = false;
410 /* Set JFUNC to be a copy of another jmp (to be used by jump function
411 combination code). The two functions will share their rdesc. */
413 static void
414 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
415 struct ipa_jump_func *src)
418 gcc_checking_assert (src->type == IPA_JF_CONST);
419 dst->type = IPA_JF_CONST;
420 dst->value.constant = src->value.constant;
423 /* Set JFUNC to be a constant jmp function. */
425 static void
426 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
427 struct cgraph_edge *cs)
429 jfunc->type = IPA_JF_CONST;
430 jfunc->value.constant.value = unshare_expr_without_location (constant);
432 if (TREE_CODE (constant) == ADDR_EXPR
433 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
435 struct ipa_cst_ref_desc *rdesc;
437 rdesc = ipa_refdesc_pool.allocate ();
438 rdesc->cs = cs;
439 rdesc->next_duplicate = NULL;
440 rdesc->refcount = 1;
441 jfunc->value.constant.rdesc = rdesc;
443 else
444 jfunc->value.constant.rdesc = NULL;
447 /* Set JFUNC to be a simple pass-through jump function. */
448 static void
449 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
450 bool agg_preserved)
452 jfunc->type = IPA_JF_PASS_THROUGH;
453 jfunc->value.pass_through.operand = NULL_TREE;
454 jfunc->value.pass_through.formal_id = formal_id;
455 jfunc->value.pass_through.operation = NOP_EXPR;
456 jfunc->value.pass_through.agg_preserved = agg_preserved;
459 /* Set JFUNC to be an arithmetic pass through jump function. */
461 static void
462 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
463 tree operand, enum tree_code operation)
465 jfunc->type = IPA_JF_PASS_THROUGH;
466 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
467 jfunc->value.pass_through.formal_id = formal_id;
468 jfunc->value.pass_through.operation = operation;
469 jfunc->value.pass_through.agg_preserved = false;
472 /* Set JFUNC to be an ancestor jump function. */
474 static void
475 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
476 int formal_id, bool agg_preserved)
478 jfunc->type = IPA_JF_ANCESTOR;
479 jfunc->value.ancestor.formal_id = formal_id;
480 jfunc->value.ancestor.offset = offset;
481 jfunc->value.ancestor.agg_preserved = agg_preserved;
484 /* Get IPA BB information about the given BB. FBI is the context of analyzis
485 of this function body. */
487 static struct ipa_bb_info *
488 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
490 gcc_checking_assert (fbi);
491 return &fbi->bb_infos[bb->index];
494 /* Structure to be passed in between detect_type_change and
495 check_stmt_for_type_change. */
497 struct prop_type_change_info
499 /* Offset into the object where there is the virtual method pointer we are
500 looking for. */
501 HOST_WIDE_INT offset;
502 /* The declaration or SSA_NAME pointer of the base that we are checking for
503 type change. */
504 tree object;
505 /* Set to true if dynamic type change has been detected. */
506 bool type_maybe_changed;
509 /* Return true if STMT can modify a virtual method table pointer.
511 This function makes special assumptions about both constructors and
512 destructors which are all the functions that are allowed to alter the VMT
513 pointers. It assumes that destructors begin with assignment into all VMT
514 pointers and that constructors essentially look in the following way:
516 1) The very first thing they do is that they call constructors of ancestor
517 sub-objects that have them.
519 2) Then VMT pointers of this and all its ancestors is set to new values
520 corresponding to the type corresponding to the constructor.
522 3) Only afterwards, other stuff such as constructor of member sub-objects
523 and the code written by the user is run. Only this may include calling
524 virtual functions, directly or indirectly.
526 There is no way to call a constructor of an ancestor sub-object in any
527 other way.
529 This means that we do not have to care whether constructors get the correct
530 type information because they will always change it (in fact, if we define
531 the type to be given by the VMT pointer, it is undefined).
533 The most important fact to derive from the above is that if, for some
534 statement in the section 3, we try to detect whether the dynamic type has
535 changed, we can safely ignore all calls as we examine the function body
536 backwards until we reach statements in section 2 because these calls cannot
537 be ancestor constructors or destructors (if the input is not bogus) and so
538 do not change the dynamic type (this holds true only for automatically
539 allocated objects but at the moment we devirtualize only these). We then
540 must detect that statements in section 2 change the dynamic type and can try
541 to derive the new type. That is enough and we can stop, we will never see
542 the calls into constructors of sub-objects in this code. Therefore we can
543 safely ignore all call statements that we traverse.
546 static bool
547 stmt_may_be_vtbl_ptr_store (gimple *stmt)
549 if (is_gimple_call (stmt))
550 return false;
551 if (gimple_clobber_p (stmt))
552 return false;
553 else if (is_gimple_assign (stmt))
555 tree lhs = gimple_assign_lhs (stmt);
557 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
559 if (flag_strict_aliasing
560 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
561 return false;
563 if (TREE_CODE (lhs) == COMPONENT_REF
564 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
565 return false;
566 /* In the future we might want to use get_base_ref_and_offset to find
567 if there is a field corresponding to the offset and if so, proceed
568 almost like if it was a component ref. */
571 return true;
574 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
575 to check whether a particular statement may modify the virtual table
576 pointerIt stores its result into DATA, which points to a
577 prop_type_change_info structure. */
579 static bool
580 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
582 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
583 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
585 if (stmt_may_be_vtbl_ptr_store (stmt))
587 tci->type_maybe_changed = true;
588 return true;
590 else
591 return false;
594 /* See if ARG is PARAM_DECl describing instance passed by pointer
595 or reference in FUNCTION. Return false if the dynamic type may change
596 in between beggining of the function until CALL is invoked.
598 Generally functions are not allowed to change type of such instances,
599 but they call destructors. We assume that methods can not destroy the THIS
600 pointer. Also as a special cases, constructor and destructors may change
601 type of the THIS pointer. */
603 static bool
604 param_type_may_change_p (tree function, tree arg, gimple *call)
606 /* Pure functions can not do any changes on the dynamic type;
607 that require writting to memory. */
608 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
609 return false;
610 /* We need to check if we are within inlined consturctor
611 or destructor (ideally we would have way to check that the
612 inline cdtor is actually working on ARG, but we don't have
613 easy tie on this, so punt on all non-pure cdtors.
614 We may also record the types of cdtors and once we know type
615 of the instance match them.
617 Also code unification optimizations may merge calls from
618 different blocks making return values unreliable. So
619 do nothing during late optimization. */
620 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
621 return true;
622 if (TREE_CODE (arg) == SSA_NAME
623 && SSA_NAME_IS_DEFAULT_DEF (arg)
624 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
626 /* Normal (non-THIS) argument. */
627 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
628 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
629 /* THIS pointer of an method - here we want to watch constructors
630 and destructors as those definitely may change the dynamic
631 type. */
632 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
633 && !DECL_CXX_CONSTRUCTOR_P (function)
634 && !DECL_CXX_DESTRUCTOR_P (function)
635 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
637 /* Walk the inline stack and watch out for ctors/dtors. */
638 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
639 block = BLOCK_SUPERCONTEXT (block))
640 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
641 return true;
642 return false;
645 return true;
648 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
649 callsite CALL) by looking for assignments to its virtual table pointer. If
650 it is, return true and fill in the jump function JFUNC with relevant type
651 information or set it to unknown. ARG is the object itself (not a pointer
652 to it, unless dereferenced). BASE is the base of the memory access as
653 returned by get_ref_base_and_extent, as is the offset.
655 This is helper function for detect_type_change and detect_type_change_ssa
656 that does the heavy work which is usually unnecesary. */
658 static bool
659 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
660 gcall *call, struct ipa_jump_func *jfunc,
661 HOST_WIDE_INT offset)
663 struct prop_type_change_info tci;
664 ao_ref ao;
665 bool entry_reached = false;
667 gcc_checking_assert (DECL_P (arg)
668 || TREE_CODE (arg) == MEM_REF
669 || handled_component_p (arg));
671 comp_type = TYPE_MAIN_VARIANT (comp_type);
673 /* Const calls cannot call virtual methods through VMT and so type changes do
674 not matter. */
675 if (!flag_devirtualize || !gimple_vuse (call)
676 /* Be sure expected_type is polymorphic. */
677 || !comp_type
678 || TREE_CODE (comp_type) != RECORD_TYPE
679 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
680 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
681 return true;
683 ao_ref_init (&ao, arg);
684 ao.base = base;
685 ao.offset = offset;
686 ao.size = POINTER_SIZE;
687 ao.max_size = ao.size;
689 tci.offset = offset;
690 tci.object = get_base_address (arg);
691 tci.type_maybe_changed = false;
693 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
694 &tci, NULL, &entry_reached);
695 if (!tci.type_maybe_changed)
696 return false;
698 ipa_set_jf_unknown (jfunc);
699 return true;
702 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
703 If it is, return true and fill in the jump function JFUNC with relevant type
704 information or set it to unknown. ARG is the object itself (not a pointer
705 to it, unless dereferenced). BASE is the base of the memory access as
706 returned by get_ref_base_and_extent, as is the offset. */
708 static bool
709 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
710 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
712 if (!flag_devirtualize)
713 return false;
715 if (TREE_CODE (base) == MEM_REF
716 && !param_type_may_change_p (current_function_decl,
717 TREE_OPERAND (base, 0),
718 call))
719 return false;
720 return detect_type_change_from_memory_writes (arg, base, comp_type,
721 call, jfunc, offset);
724 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
725 SSA name (its dereference will become the base and the offset is assumed to
726 be zero). */
728 static bool
729 detect_type_change_ssa (tree arg, tree comp_type,
730 gcall *call, struct ipa_jump_func *jfunc)
732 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
733 if (!flag_devirtualize
734 || !POINTER_TYPE_P (TREE_TYPE (arg)))
735 return false;
737 if (!param_type_may_change_p (current_function_decl, arg, call))
738 return false;
740 arg = build2 (MEM_REF, ptr_type_node, arg,
741 build_int_cst (ptr_type_node, 0));
743 return detect_type_change_from_memory_writes (arg, arg, comp_type,
744 call, jfunc, 0);
747 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
748 boolean variable pointed to by DATA. */
750 static bool
751 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
752 void *data)
754 bool *b = (bool *) data;
755 *b = true;
756 return true;
759 /* Return true if we have already walked so many statements in AA that we
760 should really just start giving up. */
762 static bool
763 aa_overwalked (struct ipa_func_body_info *fbi)
765 gcc_checking_assert (fbi);
766 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
769 /* Find the nearest valid aa status for parameter specified by INDEX that
770 dominates BB. */
772 static struct ipa_param_aa_status *
773 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
774 int index)
776 while (true)
778 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
779 if (!bb)
780 return NULL;
781 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
782 if (!bi->param_aa_statuses.is_empty ()
783 && bi->param_aa_statuses[index].valid)
784 return &bi->param_aa_statuses[index];
788 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
789 structures and/or intialize the result with a dominating description as
790 necessary. */
792 static struct ipa_param_aa_status *
793 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
794 int index)
796 gcc_checking_assert (fbi);
797 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
798 if (bi->param_aa_statuses.is_empty ())
799 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
800 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
801 if (!paa->valid)
803 gcc_checking_assert (!paa->parm_modified
804 && !paa->ref_modified
805 && !paa->pt_modified);
806 struct ipa_param_aa_status *dom_paa;
807 dom_paa = find_dominating_aa_status (fbi, bb, index);
808 if (dom_paa)
809 *paa = *dom_paa;
810 else
811 paa->valid = true;
814 return paa;
817 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
818 a value known not to be modified in this function before reaching the
819 statement STMT. FBI holds information about the function we have so far
820 gathered but do not survive the summary building stage. */
822 static bool
823 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
824 gimple *stmt, tree parm_load)
826 struct ipa_param_aa_status *paa;
827 bool modified = false;
828 ao_ref refd;
830 tree base = get_base_address (parm_load);
831 gcc_assert (TREE_CODE (base) == PARM_DECL);
832 if (TREE_READONLY (base))
833 return true;
835 /* FIXME: FBI can be NULL if we are being called from outside
836 ipa_node_analysis or ipcp_transform_function, which currently happens
837 during inlining analysis. It would be great to extend fbi's lifetime and
838 always have it. Currently, we are just not afraid of too much walking in
839 that case. */
840 if (fbi)
842 if (aa_overwalked (fbi))
843 return false;
844 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
845 if (paa->parm_modified)
846 return false;
848 else
849 paa = NULL;
851 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
852 ao_ref_init (&refd, parm_load);
853 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
854 &modified, NULL);
855 if (fbi)
856 fbi->aa_walked += walked;
857 if (paa && modified)
858 paa->parm_modified = true;
859 return !modified;
862 /* If STMT is an assignment that loads a value from an parameter declaration,
863 return the index of the parameter in ipa_node_params which has not been
864 modified. Otherwise return -1. */
866 static int
867 load_from_unmodified_param (struct ipa_func_body_info *fbi,
868 vec<ipa_param_descriptor> descriptors,
869 gimple *stmt)
871 int index;
872 tree op1;
874 if (!gimple_assign_single_p (stmt))
875 return -1;
877 op1 = gimple_assign_rhs1 (stmt);
878 if (TREE_CODE (op1) != PARM_DECL)
879 return -1;
881 index = ipa_get_param_decl_index_1 (descriptors, op1);
882 if (index < 0
883 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
884 return -1;
886 return index;
889 /* Return true if memory reference REF (which must be a load through parameter
890 with INDEX) loads data that are known to be unmodified in this function
891 before reaching statement STMT. */
893 static bool
894 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
895 int index, gimple *stmt, tree ref)
897 struct ipa_param_aa_status *paa;
898 bool modified = false;
899 ao_ref refd;
901 /* FIXME: FBI can be NULL if we are being called from outside
902 ipa_node_analysis or ipcp_transform_function, which currently happens
903 during inlining analysis. It would be great to extend fbi's lifetime and
904 always have it. Currently, we are just not afraid of too much walking in
905 that case. */
906 if (fbi)
908 if (aa_overwalked (fbi))
909 return false;
910 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
911 if (paa->ref_modified)
912 return false;
914 else
915 paa = NULL;
917 gcc_checking_assert (gimple_vuse (stmt));
918 ao_ref_init (&refd, ref);
919 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
920 &modified, NULL);
921 if (fbi)
922 fbi->aa_walked += walked;
923 if (paa && modified)
924 paa->ref_modified = true;
925 return !modified;
928 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
929 is known to be unmodified in this function before reaching call statement
930 CALL into which it is passed. FBI describes the function body. */
932 static bool
933 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
934 gimple *call, tree parm)
936 bool modified = false;
937 ao_ref refd;
939 /* It's unnecessary to calculate anything about memory contnets for a const
940 function because it is not goin to use it. But do not cache the result
941 either. Also, no such calculations for non-pointers. */
942 if (!gimple_vuse (call)
943 || !POINTER_TYPE_P (TREE_TYPE (parm))
944 || aa_overwalked (fbi))
945 return false;
947 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
948 gimple_bb (call),
949 index);
950 if (paa->pt_modified)
951 return false;
953 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
954 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
955 &modified, NULL);
956 fbi->aa_walked += walked;
957 if (modified)
958 paa->pt_modified = true;
959 return !modified;
962 /* Return true if we can prove that OP is a memory reference loading
963 data from an aggregate passed as a parameter.
965 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
966 false if it cannot prove that the value has not been modified before the
967 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
968 if it cannot prove the value has not been modified, in that case it will
969 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
971 INFO and PARMS_AINFO describe parameters of the current function (but the
972 latter can be NULL), STMT is the load statement. If function returns true,
973 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
974 within the aggregate and whether it is a load from a value passed by
975 reference respectively. */
977 bool
978 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
979 vec<ipa_param_descriptor> descriptors,
980 gimple *stmt, tree op, int *index_p,
981 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
982 bool *by_ref_p, bool *guaranteed_unmodified)
984 int index;
985 HOST_WIDE_INT size, max_size;
986 bool reverse;
987 tree base
988 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
990 if (max_size == -1 || max_size != size || *offset_p < 0)
991 return false;
993 if (DECL_P (base))
995 int index = ipa_get_param_decl_index_1 (descriptors, base);
996 if (index >= 0
997 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
999 *index_p = index;
1000 *by_ref_p = false;
1001 if (size_p)
1002 *size_p = size;
1003 if (guaranteed_unmodified)
1004 *guaranteed_unmodified = true;
1005 return true;
1007 return false;
1010 if (TREE_CODE (base) != MEM_REF
1011 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1012 || !integer_zerop (TREE_OPERAND (base, 1)))
1013 return false;
1015 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1017 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1018 index = ipa_get_param_decl_index_1 (descriptors, parm);
1020 else
1022 /* This branch catches situations where a pointer parameter is not a
1023 gimple register, for example:
1025 void hip7(S*) (struct S * p)
1027 void (*<T2e4>) (struct S *) D.1867;
1028 struct S * p.1;
1030 <bb 2>:
1031 p.1_1 = p;
1032 D.1867_2 = p.1_1->f;
1033 D.1867_2 ();
1034 gdp = &p;
1037 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1038 index = load_from_unmodified_param (fbi, descriptors, def);
1041 if (index >= 0)
1043 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1044 if (!data_preserved && !guaranteed_unmodified)
1045 return false;
1047 *index_p = index;
1048 *by_ref_p = true;
1049 if (size_p)
1050 *size_p = size;
1051 if (guaranteed_unmodified)
1052 *guaranteed_unmodified = data_preserved;
1053 return true;
1055 return false;
1058 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1059 of an assignment statement STMT, try to determine whether we are actually
1060 handling any of the following cases and construct an appropriate jump
1061 function into JFUNC if so:
1063 1) The passed value is loaded from a formal parameter which is not a gimple
1064 register (most probably because it is addressable, the value has to be
1065 scalar) and we can guarantee the value has not changed. This case can
1066 therefore be described by a simple pass-through jump function. For example:
1068 foo (int a)
1070 int a.0;
1072 a.0_2 = a;
1073 bar (a.0_2);
1075 2) The passed value can be described by a simple arithmetic pass-through
1076 jump function. E.g.
1078 foo (int a)
1080 int D.2064;
1082 D.2064_4 = a.1(D) + 4;
1083 bar (D.2064_4);
1085 This case can also occur in combination of the previous one, e.g.:
1087 foo (int a, int z)
1089 int a.0;
1090 int D.2064;
1092 a.0_3 = a;
1093 D.2064_4 = a.0_3 + 4;
1094 foo (D.2064_4);
1096 3) The passed value is an address of an object within another one (which
1097 also passed by reference). Such situations are described by an ancestor
1098 jump function and describe situations such as:
1100 B::foo() (struct B * const this)
1102 struct A * D.1845;
1104 D.1845_2 = &this_1(D)->D.1748;
1105 A::bar (D.1845_2);
1107 INFO is the structure describing individual parameters access different
1108 stages of IPA optimizations. PARMS_AINFO contains the information that is
1109 only needed for intraprocedural analysis. */
1111 static void
1112 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1113 struct ipa_node_params *info,
1114 struct ipa_jump_func *jfunc,
1115 gcall *call, gimple *stmt, tree name,
1116 tree param_type)
1118 HOST_WIDE_INT offset, size, max_size;
1119 tree op1, tc_ssa, base, ssa;
1120 bool reverse;
1121 int index;
1123 op1 = gimple_assign_rhs1 (stmt);
1125 if (TREE_CODE (op1) == SSA_NAME)
1127 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1128 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1129 else
1130 index = load_from_unmodified_param (fbi, info->descriptors,
1131 SSA_NAME_DEF_STMT (op1));
1132 tc_ssa = op1;
1134 else
1136 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1137 tc_ssa = gimple_assign_lhs (stmt);
1140 if (index >= 0)
1142 tree op2 = gimple_assign_rhs2 (stmt);
1144 if (op2)
1146 if (!is_gimple_ip_invariant (op2)
1147 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1148 && !useless_type_conversion_p (TREE_TYPE (name),
1149 TREE_TYPE (op1))))
1150 return;
1152 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1153 gimple_assign_rhs_code (stmt));
1155 else if (gimple_assign_single_p (stmt))
1157 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1158 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1160 return;
1163 if (TREE_CODE (op1) != ADDR_EXPR)
1164 return;
1165 op1 = TREE_OPERAND (op1, 0);
1166 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1167 return;
1168 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1169 if (TREE_CODE (base) != MEM_REF
1170 /* If this is a varying address, punt. */
1171 || max_size == -1
1172 || max_size != size)
1173 return;
1174 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1175 ssa = TREE_OPERAND (base, 0);
1176 if (TREE_CODE (ssa) != SSA_NAME
1177 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1178 || offset < 0)
1179 return;
1181 /* Dynamic types are changed in constructors and destructors. */
1182 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1183 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1184 ipa_set_ancestor_jf (jfunc, offset, index,
1185 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1188 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1189 it looks like:
1191 iftmp.1_3 = &obj_2(D)->D.1762;
1193 The base of the MEM_REF must be a default definition SSA NAME of a
1194 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1195 whole MEM_REF expression is returned and the offset calculated from any
1196 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1197 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1199 static tree
1200 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1202 HOST_WIDE_INT size, max_size;
1203 tree expr, parm, obj;
1204 bool reverse;
1206 if (!gimple_assign_single_p (assign))
1207 return NULL_TREE;
1208 expr = gimple_assign_rhs1 (assign);
1210 if (TREE_CODE (expr) != ADDR_EXPR)
1211 return NULL_TREE;
1212 expr = TREE_OPERAND (expr, 0);
1213 obj = expr;
1214 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1216 if (TREE_CODE (expr) != MEM_REF
1217 /* If this is a varying address, punt. */
1218 || max_size == -1
1219 || max_size != size
1220 || *offset < 0)
1221 return NULL_TREE;
1222 parm = TREE_OPERAND (expr, 0);
1223 if (TREE_CODE (parm) != SSA_NAME
1224 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1225 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1226 return NULL_TREE;
1228 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1229 *obj_p = obj;
1230 return expr;
1234 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1235 statement PHI, try to find out whether NAME is in fact a
1236 multiple-inheritance typecast from a descendant into an ancestor of a formal
1237 parameter and thus can be described by an ancestor jump function and if so,
1238 write the appropriate function into JFUNC.
1240 Essentially we want to match the following pattern:
1242 if (obj_2(D) != 0B)
1243 goto <bb 3>;
1244 else
1245 goto <bb 4>;
1247 <bb 3>:
1248 iftmp.1_3 = &obj_2(D)->D.1762;
1250 <bb 4>:
1251 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1252 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1253 return D.1879_6; */
1255 static void
1256 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1257 struct ipa_node_params *info,
1258 struct ipa_jump_func *jfunc,
1259 gcall *call, gphi *phi)
1261 HOST_WIDE_INT offset;
1262 gimple *assign, *cond;
1263 basic_block phi_bb, assign_bb, cond_bb;
1264 tree tmp, parm, expr, obj;
1265 int index, i;
1267 if (gimple_phi_num_args (phi) != 2)
1268 return;
1270 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1271 tmp = PHI_ARG_DEF (phi, 0);
1272 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1273 tmp = PHI_ARG_DEF (phi, 1);
1274 else
1275 return;
1276 if (TREE_CODE (tmp) != SSA_NAME
1277 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1278 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1279 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1280 return;
1282 assign = SSA_NAME_DEF_STMT (tmp);
1283 assign_bb = gimple_bb (assign);
1284 if (!single_pred_p (assign_bb))
1285 return;
1286 expr = get_ancestor_addr_info (assign, &obj, &offset);
1287 if (!expr)
1288 return;
1289 parm = TREE_OPERAND (expr, 0);
1290 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1291 if (index < 0)
1292 return;
1294 cond_bb = single_pred (assign_bb);
1295 cond = last_stmt (cond_bb);
1296 if (!cond
1297 || gimple_code (cond) != GIMPLE_COND
1298 || gimple_cond_code (cond) != NE_EXPR
1299 || gimple_cond_lhs (cond) != parm
1300 || !integer_zerop (gimple_cond_rhs (cond)))
1301 return;
1303 phi_bb = gimple_bb (phi);
1304 for (i = 0; i < 2; i++)
1306 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1307 if (pred != assign_bb && pred != cond_bb)
1308 return;
1311 ipa_set_ancestor_jf (jfunc, offset, index,
1312 parm_ref_data_pass_through_p (fbi, index, call, parm));
1315 /* Inspect the given TYPE and return true iff it has the same structure (the
1316 same number of fields of the same types) as a C++ member pointer. If
1317 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1318 corresponding fields there. */
1320 static bool
1321 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1323 tree fld;
1325 if (TREE_CODE (type) != RECORD_TYPE)
1326 return false;
1328 fld = TYPE_FIELDS (type);
1329 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1330 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1331 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1332 return false;
1334 if (method_ptr)
1335 *method_ptr = fld;
1337 fld = DECL_CHAIN (fld);
1338 if (!fld || INTEGRAL_TYPE_P (fld)
1339 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1340 return false;
1341 if (delta)
1342 *delta = fld;
1344 if (DECL_CHAIN (fld))
1345 return false;
1347 return true;
1350 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1351 return the rhs of its defining statement. Otherwise return RHS as it
1352 is. */
1354 static inline tree
1355 get_ssa_def_if_simple_copy (tree rhs)
1357 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1359 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1361 if (gimple_assign_single_p (def_stmt))
1362 rhs = gimple_assign_rhs1 (def_stmt);
1363 else
1364 break;
1366 return rhs;
1369 /* Simple linked list, describing known contents of an aggregate beforere
1370 call. */
1372 struct ipa_known_agg_contents_list
1374 /* Offset and size of the described part of the aggregate. */
1375 HOST_WIDE_INT offset, size;
1376 /* Known constant value or NULL if the contents is known to be unknown. */
1377 tree constant;
1378 /* Pointer to the next structure in the list. */
1379 struct ipa_known_agg_contents_list *next;
1382 /* Find the proper place in linked list of ipa_known_agg_contents_list
1383 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1384 unless there is a partial overlap, in which case return NULL, or such
1385 element is already there, in which case set *ALREADY_THERE to true. */
1387 static struct ipa_known_agg_contents_list **
1388 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1389 HOST_WIDE_INT lhs_offset,
1390 HOST_WIDE_INT lhs_size,
1391 bool *already_there)
1393 struct ipa_known_agg_contents_list **p = list;
1394 while (*p && (*p)->offset < lhs_offset)
1396 if ((*p)->offset + (*p)->size > lhs_offset)
1397 return NULL;
1398 p = &(*p)->next;
1401 if (*p && (*p)->offset < lhs_offset + lhs_size)
1403 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1404 /* We already know this value is subsequently overwritten with
1405 something else. */
1406 *already_there = true;
1407 else
1408 /* Otherwise this is a partial overlap which we cannot
1409 represent. */
1410 return NULL;
1412 return p;
1415 /* Build aggregate jump function from LIST, assuming there are exactly
1416 CONST_COUNT constant entries there and that th offset of the passed argument
1417 is ARG_OFFSET and store it into JFUNC. */
1419 static void
1420 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1421 int const_count, HOST_WIDE_INT arg_offset,
1422 struct ipa_jump_func *jfunc)
1424 vec_alloc (jfunc->agg.items, const_count);
1425 while (list)
1427 if (list->constant)
1429 struct ipa_agg_jf_item item;
1430 item.offset = list->offset - arg_offset;
1431 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1432 item.value = unshare_expr_without_location (list->constant);
1433 jfunc->agg.items->quick_push (item);
1435 list = list->next;
1439 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1440 in ARG is filled in with constant values. ARG can either be an aggregate
1441 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1442 aggregate. JFUNC is the jump function into which the constants are
1443 subsequently stored. */
1445 static void
1446 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1447 tree arg_type,
1448 struct ipa_jump_func *jfunc)
1450 struct ipa_known_agg_contents_list *list = NULL;
1451 int item_count = 0, const_count = 0;
1452 HOST_WIDE_INT arg_offset, arg_size;
1453 gimple_stmt_iterator gsi;
1454 tree arg_base;
1455 bool check_ref, by_ref;
1456 ao_ref r;
1458 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1459 return;
1461 /* The function operates in three stages. First, we prepare check_ref, r,
1462 arg_base and arg_offset based on what is actually passed as an actual
1463 argument. */
1465 if (POINTER_TYPE_P (arg_type))
1467 by_ref = true;
1468 if (TREE_CODE (arg) == SSA_NAME)
1470 tree type_size;
1471 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1472 return;
1473 check_ref = true;
1474 arg_base = arg;
1475 arg_offset = 0;
1476 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1477 arg_size = tree_to_uhwi (type_size);
1478 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1480 else if (TREE_CODE (arg) == ADDR_EXPR)
1482 HOST_WIDE_INT arg_max_size;
1483 bool reverse;
1485 arg = TREE_OPERAND (arg, 0);
1486 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1487 &arg_max_size, &reverse);
1488 if (arg_max_size == -1
1489 || arg_max_size != arg_size
1490 || arg_offset < 0)
1491 return;
1492 if (DECL_P (arg_base))
1494 check_ref = false;
1495 ao_ref_init (&r, arg_base);
1497 else
1498 return;
1500 else
1501 return;
1503 else
1505 HOST_WIDE_INT arg_max_size;
1506 bool reverse;
1508 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1510 by_ref = false;
1511 check_ref = false;
1512 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1513 &arg_max_size, &reverse);
1514 if (arg_max_size == -1
1515 || arg_max_size != arg_size
1516 || arg_offset < 0)
1517 return;
1519 ao_ref_init (&r, arg);
1522 /* Second stage walks back the BB, looks at individual statements and as long
1523 as it is confident of how the statements affect contents of the
1524 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1525 describing it. */
1526 gsi = gsi_for_stmt (call);
1527 gsi_prev (&gsi);
1528 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1530 struct ipa_known_agg_contents_list *n, **p;
1531 gimple *stmt = gsi_stmt (gsi);
1532 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1533 tree lhs, rhs, lhs_base;
1534 bool reverse;
1536 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1537 continue;
1538 if (!gimple_assign_single_p (stmt))
1539 break;
1541 lhs = gimple_assign_lhs (stmt);
1542 rhs = gimple_assign_rhs1 (stmt);
1543 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1544 || TREE_CODE (lhs) == BIT_FIELD_REF
1545 || contains_bitfld_component_ref_p (lhs))
1546 break;
1548 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1549 &lhs_max_size, &reverse);
1550 if (lhs_max_size == -1
1551 || lhs_max_size != lhs_size)
1552 break;
1554 if (check_ref)
1556 if (TREE_CODE (lhs_base) != MEM_REF
1557 || TREE_OPERAND (lhs_base, 0) != arg_base
1558 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1559 break;
1561 else if (lhs_base != arg_base)
1563 if (DECL_P (lhs_base))
1564 continue;
1565 else
1566 break;
1569 bool already_there = false;
1570 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1571 &already_there);
1572 if (!p)
1573 break;
1574 if (already_there)
1575 continue;
1577 rhs = get_ssa_def_if_simple_copy (rhs);
1578 n = XALLOCA (struct ipa_known_agg_contents_list);
1579 n->size = lhs_size;
1580 n->offset = lhs_offset;
1581 if (is_gimple_ip_invariant (rhs))
1583 n->constant = rhs;
1584 const_count++;
1586 else
1587 n->constant = NULL_TREE;
1588 n->next = *p;
1589 *p = n;
1591 item_count++;
1592 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1593 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1594 break;
1597 /* Third stage just goes over the list and creates an appropriate vector of
1598 ipa_agg_jf_item structures out of it, of sourse only if there are
1599 any known constants to begin with. */
1601 if (const_count)
1603 jfunc->agg.by_ref = by_ref;
1604 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1608 static tree
1609 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1611 int n;
1612 tree type = (e->callee
1613 ? TREE_TYPE (e->callee->decl)
1614 : gimple_call_fntype (e->call_stmt));
1615 tree t = TYPE_ARG_TYPES (type);
1617 for (n = 0; n < i; n++)
1619 if (!t)
1620 break;
1621 t = TREE_CHAIN (t);
1623 if (t)
1624 return TREE_VALUE (t);
1625 if (!e->callee)
1626 return NULL;
1627 t = DECL_ARGUMENTS (e->callee->decl);
1628 for (n = 0; n < i; n++)
1630 if (!t)
1631 return NULL;
1632 t = TREE_CHAIN (t);
1634 if (t)
1635 return TREE_TYPE (t);
1636 return NULL;
1639 /* Compute jump function for all arguments of callsite CS and insert the
1640 information in the jump_functions array in the ipa_edge_args corresponding
1641 to this callsite. */
1643 static void
1644 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1645 struct cgraph_edge *cs)
1647 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1648 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1649 gcall *call = cs->call_stmt;
1650 int n, arg_num = gimple_call_num_args (call);
1651 bool useful_context = false;
1653 if (arg_num == 0 || args->jump_functions)
1654 return;
1655 vec_safe_grow_cleared (args->jump_functions, arg_num);
1656 if (flag_devirtualize)
1657 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1659 if (gimple_call_internal_p (call))
1660 return;
1661 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1662 return;
1664 for (n = 0; n < arg_num; n++)
1666 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1667 tree arg = gimple_call_arg (call, n);
1668 tree param_type = ipa_get_callee_param_type (cs, n);
1669 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1671 tree instance;
1672 struct ipa_polymorphic_call_context context (cs->caller->decl,
1673 arg, cs->call_stmt,
1674 &instance);
1675 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1676 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1677 if (!context.useless_p ())
1678 useful_context = true;
1681 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1683 unsigned HOST_WIDE_INT hwi_bitpos;
1684 unsigned align;
1686 get_pointer_alignment_1 (arg, &align, &hwi_bitpos);
1687 if (align > BITS_PER_UNIT
1688 && align % BITS_PER_UNIT == 0
1689 && hwi_bitpos % BITS_PER_UNIT == 0)
1691 jfunc->alignment.known = true;
1692 jfunc->alignment.align = align / BITS_PER_UNIT;
1693 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1695 else
1696 gcc_assert (!jfunc->alignment.known);
1697 gcc_assert (!jfunc->vr_known);
1699 else
1701 wide_int min, max;
1702 value_range_type type;
1703 if (TREE_CODE (arg) == SSA_NAME
1704 && param_type
1705 && (type = get_range_info (arg, &min, &max))
1706 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1708 value_range vr;
1710 vr.type = type;
1711 vr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1712 vr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1713 vr.equiv = NULL;
1714 extract_range_from_unary_expr (&jfunc->m_vr,
1715 NOP_EXPR,
1716 param_type,
1717 &vr, TREE_TYPE (arg));
1718 if (jfunc->m_vr.type == VR_RANGE
1719 || jfunc->m_vr.type == VR_ANTI_RANGE)
1720 jfunc->vr_known = true;
1721 else
1722 jfunc->vr_known = false;
1724 else
1725 gcc_assert (!jfunc->vr_known);
1726 gcc_assert (!jfunc->alignment.known);
1729 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1730 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1732 jfunc->bits.known = true;
1734 if (TREE_CODE (arg) == SSA_NAME)
1736 jfunc->bits.value = 0;
1737 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1738 TYPE_SIGN (TREE_TYPE (arg)));
1740 else
1742 jfunc->bits.value = wi::to_widest (arg);
1743 jfunc->bits.mask = 0;
1746 else
1747 gcc_assert (!jfunc->bits.known);
1749 if (is_gimple_ip_invariant (arg)
1750 || (TREE_CODE (arg) == VAR_DECL
1751 && is_global_var (arg)
1752 && TREE_READONLY (arg)))
1753 ipa_set_jf_constant (jfunc, arg, cs);
1754 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1755 && TREE_CODE (arg) == PARM_DECL)
1757 int index = ipa_get_param_decl_index (info, arg);
1759 gcc_assert (index >=0);
1760 /* Aggregate passed by value, check for pass-through, otherwise we
1761 will attempt to fill in aggregate contents later in this
1762 for cycle. */
1763 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1765 ipa_set_jf_simple_pass_through (jfunc, index, false);
1766 continue;
1769 else if (TREE_CODE (arg) == SSA_NAME)
1771 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1773 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1774 if (index >= 0)
1776 bool agg_p;
1777 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1778 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1781 else
1783 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1784 if (is_gimple_assign (stmt))
1785 compute_complex_assign_jump_func (fbi, info, jfunc,
1786 call, stmt, arg, param_type);
1787 else if (gimple_code (stmt) == GIMPLE_PHI)
1788 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1789 call,
1790 as_a <gphi *> (stmt));
1794 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1795 passed (because type conversions are ignored in gimple). Usually we can
1796 safely get type from function declaration, but in case of K&R prototypes or
1797 variadic functions we can try our luck with type of the pointer passed.
1798 TODO: Since we look for actual initialization of the memory object, we may better
1799 work out the type based on the memory stores we find. */
1800 if (!param_type)
1801 param_type = TREE_TYPE (arg);
1803 if ((jfunc->type != IPA_JF_PASS_THROUGH
1804 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1805 && (jfunc->type != IPA_JF_ANCESTOR
1806 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1807 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1808 || POINTER_TYPE_P (param_type)))
1809 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1811 if (!useful_context)
1812 vec_free (args->polymorphic_call_contexts);
1815 /* Compute jump functions for all edges - both direct and indirect - outgoing
1816 from BB. */
1818 static void
1819 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1821 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1822 int i;
1823 struct cgraph_edge *cs;
1825 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1827 struct cgraph_node *callee = cs->callee;
1829 if (callee)
1831 callee->ultimate_alias_target ();
1832 /* We do not need to bother analyzing calls to unknown functions
1833 unless they may become known during lto/whopr. */
1834 if (!callee->definition && !flag_lto)
1835 continue;
1837 ipa_compute_jump_functions_for_edge (fbi, cs);
1841 /* If STMT looks like a statement loading a value from a member pointer formal
1842 parameter, return that parameter and store the offset of the field to
1843 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1844 might be clobbered). If USE_DELTA, then we look for a use of the delta
1845 field rather than the pfn. */
1847 static tree
1848 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1849 HOST_WIDE_INT *offset_p)
1851 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1853 if (!gimple_assign_single_p (stmt))
1854 return NULL_TREE;
1856 rhs = gimple_assign_rhs1 (stmt);
1857 if (TREE_CODE (rhs) == COMPONENT_REF)
1859 ref_field = TREE_OPERAND (rhs, 1);
1860 rhs = TREE_OPERAND (rhs, 0);
1862 else
1863 ref_field = NULL_TREE;
1864 if (TREE_CODE (rhs) != MEM_REF)
1865 return NULL_TREE;
1866 rec = TREE_OPERAND (rhs, 0);
1867 if (TREE_CODE (rec) != ADDR_EXPR)
1868 return NULL_TREE;
1869 rec = TREE_OPERAND (rec, 0);
1870 if (TREE_CODE (rec) != PARM_DECL
1871 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1872 return NULL_TREE;
1873 ref_offset = TREE_OPERAND (rhs, 1);
1875 if (use_delta)
1876 fld = delta_field;
1877 else
1878 fld = ptr_field;
1879 if (offset_p)
1880 *offset_p = int_bit_position (fld);
1882 if (ref_field)
1884 if (integer_nonzerop (ref_offset))
1885 return NULL_TREE;
1886 return ref_field == fld ? rec : NULL_TREE;
1888 else
1889 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1890 : NULL_TREE;
1893 /* Returns true iff T is an SSA_NAME defined by a statement. */
1895 static bool
1896 ipa_is_ssa_with_stmt_def (tree t)
1898 if (TREE_CODE (t) == SSA_NAME
1899 && !SSA_NAME_IS_DEFAULT_DEF (t))
1900 return true;
1901 else
1902 return false;
1905 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1906 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1907 indirect call graph edge. */
1909 static struct cgraph_edge *
1910 ipa_note_param_call (struct cgraph_node *node, int param_index,
1911 gcall *stmt)
1913 struct cgraph_edge *cs;
1915 cs = node->get_edge (stmt);
1916 cs->indirect_info->param_index = param_index;
1917 cs->indirect_info->agg_contents = 0;
1918 cs->indirect_info->member_ptr = 0;
1919 cs->indirect_info->guaranteed_unmodified = 0;
1920 return cs;
1923 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1924 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1925 intermediate information about each formal parameter. Currently it checks
1926 whether the call calls a pointer that is a formal parameter and if so, the
1927 parameter is marked with the called flag and an indirect call graph edge
1928 describing the call is created. This is very simple for ordinary pointers
1929 represented in SSA but not-so-nice when it comes to member pointers. The
1930 ugly part of this function does nothing more than trying to match the
1931 pattern of such a call. An example of such a pattern is the gimple dump
1932 below, the call is on the last line:
1934 <bb 2>:
1935 f$__delta_5 = f.__delta;
1936 f$__pfn_24 = f.__pfn;
1939 <bb 2>:
1940 f$__delta_5 = MEM[(struct *)&f];
1941 f$__pfn_24 = MEM[(struct *)&f + 4B];
1943 and a few lines below:
1945 <bb 5>
1946 D.2496_3 = (int) f$__pfn_24;
1947 D.2497_4 = D.2496_3 & 1;
1948 if (D.2497_4 != 0)
1949 goto <bb 3>;
1950 else
1951 goto <bb 4>;
1953 <bb 6>:
1954 D.2500_7 = (unsigned int) f$__delta_5;
1955 D.2501_8 = &S + D.2500_7;
1956 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1957 D.2503_10 = *D.2502_9;
1958 D.2504_12 = f$__pfn_24 + -1;
1959 D.2505_13 = (unsigned int) D.2504_12;
1960 D.2506_14 = D.2503_10 + D.2505_13;
1961 D.2507_15 = *D.2506_14;
1962 iftmp.11_16 = (String:: *) D.2507_15;
1964 <bb 7>:
1965 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1966 D.2500_19 = (unsigned int) f$__delta_5;
1967 D.2508_20 = &S + D.2500_19;
1968 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1970 Such patterns are results of simple calls to a member pointer:
1972 int doprinting (int (MyString::* f)(int) const)
1974 MyString S ("somestring");
1976 return (S.*f)(4);
1979 Moreover, the function also looks for called pointers loaded from aggregates
1980 passed by value or reference. */
1982 static void
1983 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
1984 tree target)
1986 struct ipa_node_params *info = fbi->info;
1987 HOST_WIDE_INT offset;
1988 bool by_ref;
1990 if (SSA_NAME_IS_DEFAULT_DEF (target))
1992 tree var = SSA_NAME_VAR (target);
1993 int index = ipa_get_param_decl_index (info, var);
1994 if (index >= 0)
1995 ipa_note_param_call (fbi->node, index, call);
1996 return;
1999 int index;
2000 gimple *def = SSA_NAME_DEF_STMT (target);
2001 bool guaranteed_unmodified;
2002 if (gimple_assign_single_p (def)
2003 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2004 gimple_assign_rhs1 (def), &index, &offset,
2005 NULL, &by_ref, &guaranteed_unmodified))
2007 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2008 cs->indirect_info->offset = offset;
2009 cs->indirect_info->agg_contents = 1;
2010 cs->indirect_info->by_ref = by_ref;
2011 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2012 return;
2015 /* Now we need to try to match the complex pattern of calling a member
2016 pointer. */
2017 if (gimple_code (def) != GIMPLE_PHI
2018 || gimple_phi_num_args (def) != 2
2019 || !POINTER_TYPE_P (TREE_TYPE (target))
2020 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2021 return;
2023 /* First, we need to check whether one of these is a load from a member
2024 pointer that is a parameter to this function. */
2025 tree n1 = PHI_ARG_DEF (def, 0);
2026 tree n2 = PHI_ARG_DEF (def, 1);
2027 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2028 return;
2029 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2030 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2032 tree rec;
2033 basic_block bb, virt_bb;
2034 basic_block join = gimple_bb (def);
2035 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2037 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2038 return;
2040 bb = EDGE_PRED (join, 0)->src;
2041 virt_bb = gimple_bb (d2);
2043 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2045 bb = EDGE_PRED (join, 1)->src;
2046 virt_bb = gimple_bb (d1);
2048 else
2049 return;
2051 /* Second, we need to check that the basic blocks are laid out in the way
2052 corresponding to the pattern. */
2054 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2055 || single_pred (virt_bb) != bb
2056 || single_succ (virt_bb) != join)
2057 return;
2059 /* Third, let's see that the branching is done depending on the least
2060 significant bit of the pfn. */
2062 gimple *branch = last_stmt (bb);
2063 if (!branch || gimple_code (branch) != GIMPLE_COND)
2064 return;
2066 if ((gimple_cond_code (branch) != NE_EXPR
2067 && gimple_cond_code (branch) != EQ_EXPR)
2068 || !integer_zerop (gimple_cond_rhs (branch)))
2069 return;
2071 tree cond = gimple_cond_lhs (branch);
2072 if (!ipa_is_ssa_with_stmt_def (cond))
2073 return;
2075 def = SSA_NAME_DEF_STMT (cond);
2076 if (!is_gimple_assign (def)
2077 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2078 || !integer_onep (gimple_assign_rhs2 (def)))
2079 return;
2081 cond = gimple_assign_rhs1 (def);
2082 if (!ipa_is_ssa_with_stmt_def (cond))
2083 return;
2085 def = SSA_NAME_DEF_STMT (cond);
2087 if (is_gimple_assign (def)
2088 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2090 cond = gimple_assign_rhs1 (def);
2091 if (!ipa_is_ssa_with_stmt_def (cond))
2092 return;
2093 def = SSA_NAME_DEF_STMT (cond);
2096 tree rec2;
2097 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2098 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2099 == ptrmemfunc_vbit_in_delta),
2100 NULL);
2101 if (rec != rec2)
2102 return;
2104 index = ipa_get_param_decl_index (info, rec);
2105 if (index >= 0
2106 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2108 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2109 cs->indirect_info->offset = offset;
2110 cs->indirect_info->agg_contents = 1;
2111 cs->indirect_info->member_ptr = 1;
2112 cs->indirect_info->guaranteed_unmodified = 1;
2115 return;
2118 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2119 object referenced in the expression is a formal parameter of the caller
2120 FBI->node (described by FBI->info), create a call note for the
2121 statement. */
2123 static void
2124 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2125 gcall *call, tree target)
2127 tree obj = OBJ_TYPE_REF_OBJECT (target);
2128 int index;
2129 HOST_WIDE_INT anc_offset;
2131 if (!flag_devirtualize)
2132 return;
2134 if (TREE_CODE (obj) != SSA_NAME)
2135 return;
2137 struct ipa_node_params *info = fbi->info;
2138 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2140 struct ipa_jump_func jfunc;
2141 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2142 return;
2144 anc_offset = 0;
2145 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2146 gcc_assert (index >= 0);
2147 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2148 call, &jfunc))
2149 return;
2151 else
2153 struct ipa_jump_func jfunc;
2154 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2155 tree expr;
2157 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2158 if (!expr)
2159 return;
2160 index = ipa_get_param_decl_index (info,
2161 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2162 gcc_assert (index >= 0);
2163 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2164 call, &jfunc, anc_offset))
2165 return;
2168 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2169 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2170 ii->offset = anc_offset;
2171 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2172 ii->otr_type = obj_type_ref_class (target);
2173 ii->polymorphic = 1;
2176 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2177 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2178 containing intermediate information about each formal parameter. */
2180 static void
2181 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2183 tree target = gimple_call_fn (call);
2185 if (!target
2186 || (TREE_CODE (target) != SSA_NAME
2187 && !virtual_method_call_p (target)))
2188 return;
2190 struct cgraph_edge *cs = fbi->node->get_edge (call);
2191 /* If we previously turned the call into a direct call, there is
2192 no need to analyze. */
2193 if (cs && !cs->indirect_unknown_callee)
2194 return;
2196 if (cs->indirect_info->polymorphic && flag_devirtualize)
2198 tree instance;
2199 tree target = gimple_call_fn (call);
2200 ipa_polymorphic_call_context context (current_function_decl,
2201 target, call, &instance);
2203 gcc_checking_assert (cs->indirect_info->otr_type
2204 == obj_type_ref_class (target));
2205 gcc_checking_assert (cs->indirect_info->otr_token
2206 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2208 cs->indirect_info->vptr_changed
2209 = !context.get_dynamic_type (instance,
2210 OBJ_TYPE_REF_OBJECT (target),
2211 obj_type_ref_class (target), call);
2212 cs->indirect_info->context = context;
2215 if (TREE_CODE (target) == SSA_NAME)
2216 ipa_analyze_indirect_call_uses (fbi, call, target);
2217 else if (virtual_method_call_p (target))
2218 ipa_analyze_virtual_call_uses (fbi, call, target);
2222 /* Analyze the call statement STMT with respect to formal parameters (described
2223 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2224 formal parameters are called. */
2226 static void
2227 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2229 if (is_gimple_call (stmt))
2230 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2233 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2234 If OP is a parameter declaration, mark it as used in the info structure
2235 passed in DATA. */
2237 static bool
2238 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2240 struct ipa_node_params *info = (struct ipa_node_params *) data;
2242 op = get_base_address (op);
2243 if (op
2244 && TREE_CODE (op) == PARM_DECL)
2246 int index = ipa_get_param_decl_index (info, op);
2247 gcc_assert (index >= 0);
2248 ipa_set_param_used (info, index, true);
2251 return false;
2254 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2255 the findings in various structures of the associated ipa_node_params
2256 structure, such as parameter flags, notes etc. FBI holds various data about
2257 the function being analyzed. */
2259 static void
2260 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2262 gimple_stmt_iterator gsi;
2263 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2265 gimple *stmt = gsi_stmt (gsi);
2267 if (is_gimple_debug (stmt))
2268 continue;
2270 ipa_analyze_stmt_uses (fbi, stmt);
2271 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2272 visit_ref_for_mod_analysis,
2273 visit_ref_for_mod_analysis,
2274 visit_ref_for_mod_analysis);
2276 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2277 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2278 visit_ref_for_mod_analysis,
2279 visit_ref_for_mod_analysis,
2280 visit_ref_for_mod_analysis);
2283 /* Calculate controlled uses of parameters of NODE. */
2285 static void
2286 ipa_analyze_controlled_uses (struct cgraph_node *node)
2288 struct ipa_node_params *info = IPA_NODE_REF (node);
2290 for (int i = 0; i < ipa_get_param_count (info); i++)
2292 tree parm = ipa_get_param (info, i);
2293 int controlled_uses = 0;
2295 /* For SSA regs see if parameter is used. For non-SSA we compute
2296 the flag during modification analysis. */
2297 if (is_gimple_reg (parm))
2299 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2300 parm);
2301 if (ddef && !has_zero_uses (ddef))
2303 imm_use_iterator imm_iter;
2304 use_operand_p use_p;
2306 ipa_set_param_used (info, i, true);
2307 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2308 if (!is_gimple_call (USE_STMT (use_p)))
2310 if (!is_gimple_debug (USE_STMT (use_p)))
2312 controlled_uses = IPA_UNDESCRIBED_USE;
2313 break;
2316 else
2317 controlled_uses++;
2319 else
2320 controlled_uses = 0;
2322 else
2323 controlled_uses = IPA_UNDESCRIBED_USE;
2324 ipa_set_controlled_uses (info, i, controlled_uses);
2328 /* Free stuff in BI. */
2330 static void
2331 free_ipa_bb_info (struct ipa_bb_info *bi)
2333 bi->cg_edges.release ();
2334 bi->param_aa_statuses.release ();
2337 /* Dominator walker driving the analysis. */
2339 class analysis_dom_walker : public dom_walker
2341 public:
2342 analysis_dom_walker (struct ipa_func_body_info *fbi)
2343 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2345 virtual edge before_dom_children (basic_block);
2347 private:
2348 struct ipa_func_body_info *m_fbi;
2351 edge
2352 analysis_dom_walker::before_dom_children (basic_block bb)
2354 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2355 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2356 return NULL;
2359 /* Release body info FBI. */
2361 void
2362 ipa_release_body_info (struct ipa_func_body_info *fbi)
2364 int i;
2365 struct ipa_bb_info *bi;
2367 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2368 free_ipa_bb_info (bi);
2369 fbi->bb_infos.release ();
2372 /* Initialize the array describing properties of formal parameters
2373 of NODE, analyze their uses and compute jump functions associated
2374 with actual arguments of calls from within NODE. */
2376 void
2377 ipa_analyze_node (struct cgraph_node *node)
2379 struct ipa_func_body_info fbi;
2380 struct ipa_node_params *info;
2382 ipa_check_create_node_params ();
2383 ipa_check_create_edge_args ();
2384 info = IPA_NODE_REF (node);
2386 if (info->analysis_done)
2387 return;
2388 info->analysis_done = 1;
2390 if (ipa_func_spec_opts_forbid_analysis_p (node))
2392 for (int i = 0; i < ipa_get_param_count (info); i++)
2394 ipa_set_param_used (info, i, true);
2395 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2397 return;
2400 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2401 push_cfun (func);
2402 calculate_dominance_info (CDI_DOMINATORS);
2403 ipa_initialize_node_params (node);
2404 ipa_analyze_controlled_uses (node);
2406 fbi.node = node;
2407 fbi.info = IPA_NODE_REF (node);
2408 fbi.bb_infos = vNULL;
2409 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2410 fbi.param_count = ipa_get_param_count (info);
2411 fbi.aa_walked = 0;
2413 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2415 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2416 bi->cg_edges.safe_push (cs);
2419 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2421 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2422 bi->cg_edges.safe_push (cs);
2425 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2427 ipa_release_body_info (&fbi);
2428 free_dominance_info (CDI_DOMINATORS);
2429 pop_cfun ();
2432 /* Update the jump functions associated with call graph edge E when the call
2433 graph edge CS is being inlined, assuming that E->caller is already (possibly
2434 indirectly) inlined into CS->callee and that E has not been inlined. */
2436 static void
2437 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2438 struct cgraph_edge *e)
2440 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2441 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2442 int count = ipa_get_cs_argument_count (args);
2443 int i;
2445 for (i = 0; i < count; i++)
2447 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2448 struct ipa_polymorphic_call_context *dst_ctx
2449 = ipa_get_ith_polymorhic_call_context (args, i);
2451 if (dst->type == IPA_JF_ANCESTOR)
2453 struct ipa_jump_func *src;
2454 int dst_fid = dst->value.ancestor.formal_id;
2455 struct ipa_polymorphic_call_context *src_ctx
2456 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2458 /* Variable number of arguments can cause havoc if we try to access
2459 one that does not exist in the inlined edge. So make sure we
2460 don't. */
2461 if (dst_fid >= ipa_get_cs_argument_count (top))
2463 ipa_set_jf_unknown (dst);
2464 continue;
2467 src = ipa_get_ith_jump_func (top, dst_fid);
2469 if (src_ctx && !src_ctx->useless_p ())
2471 struct ipa_polymorphic_call_context ctx = *src_ctx;
2473 /* TODO: Make type preserved safe WRT contexts. */
2474 if (!ipa_get_jf_ancestor_type_preserved (dst))
2475 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2476 ctx.offset_by (dst->value.ancestor.offset);
2477 if (!ctx.useless_p ())
2479 if (!dst_ctx)
2481 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2482 count);
2483 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2486 dst_ctx->combine_with (ctx);
2490 if (src->agg.items
2491 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2493 struct ipa_agg_jf_item *item;
2494 int j;
2496 /* Currently we do not produce clobber aggregate jump functions,
2497 replace with merging when we do. */
2498 gcc_assert (!dst->agg.items);
2500 dst->agg.items = vec_safe_copy (src->agg.items);
2501 dst->agg.by_ref = src->agg.by_ref;
2502 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2503 item->offset -= dst->value.ancestor.offset;
2506 if (src->type == IPA_JF_PASS_THROUGH
2507 && src->value.pass_through.operation == NOP_EXPR)
2509 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2510 dst->value.ancestor.agg_preserved &=
2511 src->value.pass_through.agg_preserved;
2513 else if (src->type == IPA_JF_ANCESTOR)
2515 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2516 dst->value.ancestor.offset += src->value.ancestor.offset;
2517 dst->value.ancestor.agg_preserved &=
2518 src->value.ancestor.agg_preserved;
2520 else
2521 ipa_set_jf_unknown (dst);
2523 else if (dst->type == IPA_JF_PASS_THROUGH)
2525 struct ipa_jump_func *src;
2526 /* We must check range due to calls with variable number of arguments
2527 and we cannot combine jump functions with operations. */
2528 if (dst->value.pass_through.operation == NOP_EXPR
2529 && (dst->value.pass_through.formal_id
2530 < ipa_get_cs_argument_count (top)))
2532 int dst_fid = dst->value.pass_through.formal_id;
2533 src = ipa_get_ith_jump_func (top, dst_fid);
2534 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2535 struct ipa_polymorphic_call_context *src_ctx
2536 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2538 if (src_ctx && !src_ctx->useless_p ())
2540 struct ipa_polymorphic_call_context ctx = *src_ctx;
2542 /* TODO: Make type preserved safe WRT contexts. */
2543 if (!ipa_get_jf_pass_through_type_preserved (dst))
2544 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2545 if (!ctx.useless_p ())
2547 if (!dst_ctx)
2549 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2550 count);
2551 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2553 dst_ctx->combine_with (ctx);
2556 switch (src->type)
2558 case IPA_JF_UNKNOWN:
2559 ipa_set_jf_unknown (dst);
2560 break;
2561 case IPA_JF_CONST:
2562 ipa_set_jf_cst_copy (dst, src);
2563 break;
2565 case IPA_JF_PASS_THROUGH:
2567 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2568 enum tree_code operation;
2569 operation = ipa_get_jf_pass_through_operation (src);
2571 if (operation == NOP_EXPR)
2573 bool agg_p;
2574 agg_p = dst_agg_p
2575 && ipa_get_jf_pass_through_agg_preserved (src);
2576 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2578 else
2580 tree operand = ipa_get_jf_pass_through_operand (src);
2581 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2582 operation);
2584 break;
2586 case IPA_JF_ANCESTOR:
2588 bool agg_p;
2589 agg_p = dst_agg_p
2590 && ipa_get_jf_ancestor_agg_preserved (src);
2591 ipa_set_ancestor_jf (dst,
2592 ipa_get_jf_ancestor_offset (src),
2593 ipa_get_jf_ancestor_formal_id (src),
2594 agg_p);
2595 break;
2597 default:
2598 gcc_unreachable ();
2601 if (src->agg.items
2602 && (dst_agg_p || !src->agg.by_ref))
2604 /* Currently we do not produce clobber aggregate jump
2605 functions, replace with merging when we do. */
2606 gcc_assert (!dst->agg.items);
2608 dst->agg.by_ref = src->agg.by_ref;
2609 dst->agg.items = vec_safe_copy (src->agg.items);
2612 else
2613 ipa_set_jf_unknown (dst);
2618 /* If TARGET is an addr_expr of a function declaration, make it the
2619 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2620 Otherwise, return NULL. */
2622 struct cgraph_edge *
2623 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2624 bool speculative)
2626 struct cgraph_node *callee;
2627 struct inline_edge_summary *es = inline_edge_summary (ie);
2628 bool unreachable = false;
2630 if (TREE_CODE (target) == ADDR_EXPR)
2631 target = TREE_OPERAND (target, 0);
2632 if (TREE_CODE (target) != FUNCTION_DECL)
2634 target = canonicalize_constructor_val (target, NULL);
2635 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2637 /* Member pointer call that goes through a VMT lookup. */
2638 if (ie->indirect_info->member_ptr
2639 /* Or if target is not an invariant expression and we do not
2640 know if it will evaulate to function at runtime.
2641 This can happen when folding through &VAR, where &VAR
2642 is IP invariant, but VAR itself is not.
2644 TODO: Revisit this when GCC 5 is branched. It seems that
2645 member_ptr check is not needed and that we may try to fold
2646 the expression and see if VAR is readonly. */
2647 || !is_gimple_ip_invariant (target))
2649 if (dump_enabled_p ())
2651 location_t loc = gimple_location_safe (ie->call_stmt);
2652 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2653 "discovered direct call non-invariant "
2654 "%s/%i\n",
2655 ie->caller->name (), ie->caller->order);
2657 return NULL;
2661 if (dump_enabled_p ())
2663 location_t loc = gimple_location_safe (ie->call_stmt);
2664 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2665 "discovered direct call to non-function in %s/%i, "
2666 "making it __builtin_unreachable\n",
2667 ie->caller->name (), ie->caller->order);
2670 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2671 callee = cgraph_node::get_create (target);
2672 unreachable = true;
2674 else
2675 callee = cgraph_node::get (target);
2677 else
2678 callee = cgraph_node::get (target);
2680 /* Because may-edges are not explicitely represented and vtable may be external,
2681 we may create the first reference to the object in the unit. */
2682 if (!callee || callee->global.inlined_to)
2685 /* We are better to ensure we can refer to it.
2686 In the case of static functions we are out of luck, since we already
2687 removed its body. In the case of public functions we may or may
2688 not introduce the reference. */
2689 if (!canonicalize_constructor_val (target, NULL)
2690 || !TREE_PUBLIC (target))
2692 if (dump_file)
2693 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2694 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2695 xstrdup_for_dump (ie->caller->name ()),
2696 ie->caller->order,
2697 xstrdup_for_dump (ie->callee->name ()),
2698 ie->callee->order);
2699 return NULL;
2701 callee = cgraph_node::get_create (target);
2704 /* If the edge is already speculated. */
2705 if (speculative && ie->speculative)
2707 struct cgraph_edge *e2;
2708 struct ipa_ref *ref;
2709 ie->speculative_call_info (e2, ie, ref);
2710 if (e2->callee->ultimate_alias_target ()
2711 != callee->ultimate_alias_target ())
2713 if (dump_file)
2714 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2715 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2716 xstrdup_for_dump (ie->caller->name ()),
2717 ie->caller->order,
2718 xstrdup_for_dump (callee->name ()),
2719 callee->order,
2720 xstrdup_for_dump (e2->callee->name ()),
2721 e2->callee->order);
2723 else
2725 if (dump_file)
2726 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2727 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2728 xstrdup_for_dump (ie->caller->name ()),
2729 ie->caller->order,
2730 xstrdup_for_dump (callee->name ()),
2731 callee->order);
2733 return NULL;
2736 if (!dbg_cnt (devirt))
2737 return NULL;
2739 ipa_check_create_node_params ();
2741 /* We can not make edges to inline clones. It is bug that someone removed
2742 the cgraph node too early. */
2743 gcc_assert (!callee->global.inlined_to);
2745 if (dump_file && !unreachable)
2747 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2748 "(%s/%i -> %s/%i), for stmt ",
2749 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2750 speculative ? "speculative" : "known",
2751 xstrdup_for_dump (ie->caller->name ()),
2752 ie->caller->order,
2753 xstrdup_for_dump (callee->name ()),
2754 callee->order);
2755 if (ie->call_stmt)
2756 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2757 else
2758 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2760 if (dump_enabled_p ())
2762 location_t loc = gimple_location_safe (ie->call_stmt);
2764 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2765 "converting indirect call in %s to direct call to %s\n",
2766 ie->caller->name (), callee->name ());
2768 if (!speculative)
2770 struct cgraph_edge *orig = ie;
2771 ie = ie->make_direct (callee);
2772 /* If we resolved speculative edge the cost is already up to date
2773 for direct call (adjusted by inline_edge_duplication_hook). */
2774 if (ie == orig)
2776 es = inline_edge_summary (ie);
2777 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2778 - eni_size_weights.call_cost);
2779 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2780 - eni_time_weights.call_cost);
2783 else
2785 if (!callee->can_be_discarded_p ())
2787 cgraph_node *alias;
2788 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2789 if (alias)
2790 callee = alias;
2792 /* make_speculative will update ie's cost to direct call cost. */
2793 ie = ie->make_speculative
2794 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2797 return ie;
2800 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2801 CONSTRUCTOR and return it. Return NULL if the search fails for some
2802 reason. */
2804 static tree
2805 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2807 tree type = TREE_TYPE (constructor);
2808 if (TREE_CODE (type) != ARRAY_TYPE
2809 && TREE_CODE (type) != RECORD_TYPE)
2810 return NULL;
2812 unsigned ix;
2813 tree index, val;
2814 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2816 HOST_WIDE_INT elt_offset;
2817 if (TREE_CODE (type) == ARRAY_TYPE)
2819 offset_int off;
2820 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2821 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2823 if (index)
2825 off = wi::to_offset (index);
2826 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2828 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2829 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2830 off = wi::sext (off - wi::to_offset (low_bound),
2831 TYPE_PRECISION (TREE_TYPE (index)));
2833 off *= wi::to_offset (unit_size);
2835 else
2836 off = wi::to_offset (unit_size) * ix;
2838 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2839 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2840 continue;
2841 elt_offset = off.to_shwi ();
2843 else if (TREE_CODE (type) == RECORD_TYPE)
2845 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2846 if (DECL_BIT_FIELD (index))
2847 continue;
2848 elt_offset = int_bit_position (index);
2850 else
2851 gcc_unreachable ();
2853 if (elt_offset > req_offset)
2854 return NULL;
2856 if (TREE_CODE (val) == CONSTRUCTOR)
2857 return find_constructor_constant_at_offset (val,
2858 req_offset - elt_offset);
2860 if (elt_offset == req_offset
2861 && is_gimple_reg_type (TREE_TYPE (val))
2862 && is_gimple_ip_invariant (val))
2863 return val;
2865 return NULL;
2868 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2869 invariant from a static constructor and if so, return it. Otherwise return
2870 NULL. */
2872 static tree
2873 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2875 if (by_ref)
2877 if (TREE_CODE (scalar) != ADDR_EXPR)
2878 return NULL;
2879 scalar = TREE_OPERAND (scalar, 0);
2882 if (TREE_CODE (scalar) != VAR_DECL
2883 || !is_global_var (scalar)
2884 || !TREE_READONLY (scalar)
2885 || !DECL_INITIAL (scalar)
2886 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2887 return NULL;
2889 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2892 /* Retrieve value from aggregate jump function AGG or static initializer of
2893 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2894 none. BY_REF specifies whether the value has to be passed by reference or
2895 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2896 to is set to true if the value comes from an initializer of a constant. */
2898 tree
2899 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2900 HOST_WIDE_INT offset, bool by_ref,
2901 bool *from_global_constant)
2903 struct ipa_agg_jf_item *item;
2904 int i;
2906 if (scalar)
2908 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2909 if (res)
2911 if (from_global_constant)
2912 *from_global_constant = true;
2913 return res;
2917 if (!agg
2918 || by_ref != agg->by_ref)
2919 return NULL;
2921 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2922 if (item->offset == offset)
2924 /* Currently we do not have clobber values, return NULL for them once
2925 we do. */
2926 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2927 if (from_global_constant)
2928 *from_global_constant = false;
2929 return item->value;
2931 return NULL;
2934 /* Remove a reference to SYMBOL from the list of references of a node given by
2935 reference description RDESC. Return true if the reference has been
2936 successfully found and removed. */
2938 static bool
2939 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2941 struct ipa_ref *to_del;
2942 struct cgraph_edge *origin;
2944 origin = rdesc->cs;
2945 if (!origin)
2946 return false;
2947 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2948 origin->lto_stmt_uid);
2949 if (!to_del)
2950 return false;
2952 to_del->remove_reference ();
2953 if (dump_file)
2954 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2955 xstrdup_for_dump (origin->caller->name ()),
2956 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2957 return true;
2960 /* If JFUNC has a reference description with refcount different from
2961 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2962 NULL. JFUNC must be a constant jump function. */
2964 static struct ipa_cst_ref_desc *
2965 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2967 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2968 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2969 return rdesc;
2970 else
2971 return NULL;
2974 /* If the value of constant jump function JFUNC is an address of a function
2975 declaration, return the associated call graph node. Otherwise return
2976 NULL. */
2978 static cgraph_node *
2979 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2981 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2982 tree cst = ipa_get_jf_constant (jfunc);
2983 if (TREE_CODE (cst) != ADDR_EXPR
2984 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2985 return NULL;
2987 return cgraph_node::get (TREE_OPERAND (cst, 0));
2991 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2992 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2993 the edge specified in the rdesc. Return false if either the symbol or the
2994 reference could not be found, otherwise return true. */
2996 static bool
2997 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2999 struct ipa_cst_ref_desc *rdesc;
3000 if (jfunc->type == IPA_JF_CONST
3001 && (rdesc = jfunc_rdesc_usable (jfunc))
3002 && --rdesc->refcount == 0)
3004 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3005 if (!symbol)
3006 return false;
3008 return remove_described_reference (symbol, rdesc);
3010 return true;
3013 /* Try to find a destination for indirect edge IE that corresponds to a simple
3014 call or a call of a member function pointer and where the destination is a
3015 pointer formal parameter described by jump function JFUNC. If it can be
3016 determined, return the newly direct edge, otherwise return NULL.
3017 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3019 static struct cgraph_edge *
3020 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3021 struct ipa_jump_func *jfunc,
3022 struct ipa_node_params *new_root_info)
3024 struct cgraph_edge *cs;
3025 tree target;
3026 bool agg_contents = ie->indirect_info->agg_contents;
3027 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3028 if (agg_contents)
3030 bool from_global_constant;
3031 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3032 ie->indirect_info->offset,
3033 ie->indirect_info->by_ref,
3034 &from_global_constant);
3035 if (target
3036 && !from_global_constant
3037 && !ie->indirect_info->guaranteed_unmodified)
3038 return NULL;
3040 else
3041 target = scalar;
3042 if (!target)
3043 return NULL;
3044 cs = ipa_make_edge_direct_to_target (ie, target);
3046 if (cs && !agg_contents)
3048 bool ok;
3049 gcc_checking_assert (cs->callee
3050 && (cs != ie
3051 || jfunc->type != IPA_JF_CONST
3052 || !cgraph_node_for_jfunc (jfunc)
3053 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3054 ok = try_decrement_rdesc_refcount (jfunc);
3055 gcc_checking_assert (ok);
3058 return cs;
3061 /* Return the target to be used in cases of impossible devirtualization. IE
3062 and target (the latter can be NULL) are dumped when dumping is enabled. */
3064 tree
3065 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3067 if (dump_file)
3069 if (target)
3070 fprintf (dump_file,
3071 "Type inconsistent devirtualization: %s/%i->%s\n",
3072 ie->caller->name (), ie->caller->order,
3073 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3074 else
3075 fprintf (dump_file,
3076 "No devirtualization target in %s/%i\n",
3077 ie->caller->name (), ie->caller->order);
3079 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3080 cgraph_node::get_create (new_target);
3081 return new_target;
3084 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3085 call based on a formal parameter which is described by jump function JFUNC
3086 and if it can be determined, make it direct and return the direct edge.
3087 Otherwise, return NULL. CTX describes the polymorphic context that the
3088 parameter the call is based on brings along with it. */
3090 static struct cgraph_edge *
3091 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3092 struct ipa_jump_func *jfunc,
3093 struct ipa_polymorphic_call_context ctx)
3095 tree target = NULL;
3096 bool speculative = false;
3098 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3099 return NULL;
3101 gcc_assert (!ie->indirect_info->by_ref);
3103 /* Try to do lookup via known virtual table pointer value. */
3104 if (!ie->indirect_info->vptr_changed
3105 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3107 tree vtable;
3108 unsigned HOST_WIDE_INT offset;
3109 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3110 : NULL;
3111 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3112 ie->indirect_info->offset,
3113 true);
3114 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3116 bool can_refer;
3117 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3118 vtable, offset, &can_refer);
3119 if (can_refer)
3121 if (!t
3122 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3123 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3124 || !possible_polymorphic_call_target_p
3125 (ie, cgraph_node::get (t)))
3127 /* Do not speculate builtin_unreachable, it is stupid! */
3128 if (!ie->indirect_info->vptr_changed)
3129 target = ipa_impossible_devirt_target (ie, target);
3130 else
3131 target = NULL;
3133 else
3135 target = t;
3136 speculative = ie->indirect_info->vptr_changed;
3142 ipa_polymorphic_call_context ie_context (ie);
3143 vec <cgraph_node *>targets;
3144 bool final;
3146 ctx.offset_by (ie->indirect_info->offset);
3147 if (ie->indirect_info->vptr_changed)
3148 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3149 ie->indirect_info->otr_type);
3150 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3151 targets = possible_polymorphic_call_targets
3152 (ie->indirect_info->otr_type,
3153 ie->indirect_info->otr_token,
3154 ctx, &final);
3155 if (final && targets.length () <= 1)
3157 speculative = false;
3158 if (targets.length () == 1)
3159 target = targets[0]->decl;
3160 else
3161 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3163 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3164 && !ie->speculative && ie->maybe_hot_p ())
3166 cgraph_node *n;
3167 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3168 ie->indirect_info->otr_token,
3169 ie->indirect_info->context);
3170 if (n)
3172 target = n->decl;
3173 speculative = true;
3177 if (target)
3179 if (!possible_polymorphic_call_target_p
3180 (ie, cgraph_node::get_create (target)))
3182 if (speculative)
3183 return NULL;
3184 target = ipa_impossible_devirt_target (ie, target);
3186 return ipa_make_edge_direct_to_target (ie, target, speculative);
3188 else
3189 return NULL;
3192 /* Update the param called notes associated with NODE when CS is being inlined,
3193 assuming NODE is (potentially indirectly) inlined into CS->callee.
3194 Moreover, if the callee is discovered to be constant, create a new cgraph
3195 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3196 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3198 static bool
3199 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3200 struct cgraph_node *node,
3201 vec<cgraph_edge *> *new_edges)
3203 struct ipa_edge_args *top;
3204 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3205 struct ipa_node_params *new_root_info;
3206 bool res = false;
3208 ipa_check_create_edge_args ();
3209 top = IPA_EDGE_REF (cs);
3210 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3211 ? cs->caller->global.inlined_to
3212 : cs->caller);
3214 for (ie = node->indirect_calls; ie; ie = next_ie)
3216 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3217 struct ipa_jump_func *jfunc;
3218 int param_index;
3219 cgraph_node *spec_target = NULL;
3221 next_ie = ie->next_callee;
3223 if (ici->param_index == -1)
3224 continue;
3226 /* We must check range due to calls with variable number of arguments: */
3227 if (ici->param_index >= ipa_get_cs_argument_count (top))
3229 ici->param_index = -1;
3230 continue;
3233 param_index = ici->param_index;
3234 jfunc = ipa_get_ith_jump_func (top, param_index);
3236 if (ie->speculative)
3238 struct cgraph_edge *de;
3239 struct ipa_ref *ref;
3240 ie->speculative_call_info (de, ie, ref);
3241 spec_target = de->callee;
3244 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3245 new_direct_edge = NULL;
3246 else if (ici->polymorphic)
3248 ipa_polymorphic_call_context ctx;
3249 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3250 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3252 else
3253 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3254 new_root_info);
3255 /* If speculation was removed, then we need to do nothing. */
3256 if (new_direct_edge && new_direct_edge != ie
3257 && new_direct_edge->callee == spec_target)
3259 new_direct_edge->indirect_inlining_edge = 1;
3260 top = IPA_EDGE_REF (cs);
3261 res = true;
3262 if (!new_direct_edge->speculative)
3263 continue;
3265 else if (new_direct_edge)
3267 new_direct_edge->indirect_inlining_edge = 1;
3268 if (new_direct_edge->call_stmt)
3269 new_direct_edge->call_stmt_cannot_inline_p
3270 = !gimple_check_call_matching_types (
3271 new_direct_edge->call_stmt,
3272 new_direct_edge->callee->decl, false);
3273 if (new_edges)
3275 new_edges->safe_push (new_direct_edge);
3276 res = true;
3278 top = IPA_EDGE_REF (cs);
3279 /* If speculative edge was introduced we still need to update
3280 call info of the indirect edge. */
3281 if (!new_direct_edge->speculative)
3282 continue;
3284 if (jfunc->type == IPA_JF_PASS_THROUGH
3285 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3287 if (ici->agg_contents
3288 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3289 && !ici->polymorphic)
3290 ici->param_index = -1;
3291 else
3293 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3294 if (ici->polymorphic
3295 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3296 ici->vptr_changed = true;
3299 else if (jfunc->type == IPA_JF_ANCESTOR)
3301 if (ici->agg_contents
3302 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3303 && !ici->polymorphic)
3304 ici->param_index = -1;
3305 else
3307 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3308 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3309 if (ici->polymorphic
3310 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3311 ici->vptr_changed = true;
3314 else
3315 /* Either we can find a destination for this edge now or never. */
3316 ici->param_index = -1;
3319 return res;
3322 /* Recursively traverse subtree of NODE (including node) made of inlined
3323 cgraph_edges when CS has been inlined and invoke
3324 update_indirect_edges_after_inlining on all nodes and
3325 update_jump_functions_after_inlining on all non-inlined edges that lead out
3326 of this subtree. Newly discovered indirect edges will be added to
3327 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3328 created. */
3330 static bool
3331 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3332 struct cgraph_node *node,
3333 vec<cgraph_edge *> *new_edges)
3335 struct cgraph_edge *e;
3336 bool res;
3338 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3340 for (e = node->callees; e; e = e->next_callee)
3341 if (!e->inline_failed)
3342 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3343 else
3344 update_jump_functions_after_inlining (cs, e);
3345 for (e = node->indirect_calls; e; e = e->next_callee)
3346 update_jump_functions_after_inlining (cs, e);
3348 return res;
3351 /* Combine two controlled uses counts as done during inlining. */
3353 static int
3354 combine_controlled_uses_counters (int c, int d)
3356 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3357 return IPA_UNDESCRIBED_USE;
3358 else
3359 return c + d - 1;
3362 /* Propagate number of controlled users from CS->caleee to the new root of the
3363 tree of inlined nodes. */
3365 static void
3366 propagate_controlled_uses (struct cgraph_edge *cs)
3368 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3369 struct cgraph_node *new_root = cs->caller->global.inlined_to
3370 ? cs->caller->global.inlined_to : cs->caller;
3371 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3372 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3373 int count, i;
3375 count = MIN (ipa_get_cs_argument_count (args),
3376 ipa_get_param_count (old_root_info));
3377 for (i = 0; i < count; i++)
3379 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3380 struct ipa_cst_ref_desc *rdesc;
3382 if (jf->type == IPA_JF_PASS_THROUGH)
3384 int src_idx, c, d;
3385 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3386 c = ipa_get_controlled_uses (new_root_info, src_idx);
3387 d = ipa_get_controlled_uses (old_root_info, i);
3389 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3390 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3391 c = combine_controlled_uses_counters (c, d);
3392 ipa_set_controlled_uses (new_root_info, src_idx, c);
3393 if (c == 0 && new_root_info->ipcp_orig_node)
3395 struct cgraph_node *n;
3396 struct ipa_ref *ref;
3397 tree t = new_root_info->known_csts[src_idx];
3399 if (t && TREE_CODE (t) == ADDR_EXPR
3400 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3401 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3402 && (ref = new_root->find_reference (n, NULL, 0)))
3404 if (dump_file)
3405 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3406 "reference from %s/%i to %s/%i.\n",
3407 xstrdup_for_dump (new_root->name ()),
3408 new_root->order,
3409 xstrdup_for_dump (n->name ()), n->order);
3410 ref->remove_reference ();
3414 else if (jf->type == IPA_JF_CONST
3415 && (rdesc = jfunc_rdesc_usable (jf)))
3417 int d = ipa_get_controlled_uses (old_root_info, i);
3418 int c = rdesc->refcount;
3419 rdesc->refcount = combine_controlled_uses_counters (c, d);
3420 if (rdesc->refcount == 0)
3422 tree cst = ipa_get_jf_constant (jf);
3423 struct cgraph_node *n;
3424 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3425 && TREE_CODE (TREE_OPERAND (cst, 0))
3426 == FUNCTION_DECL);
3427 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3428 if (n)
3430 struct cgraph_node *clone;
3431 bool ok;
3432 ok = remove_described_reference (n, rdesc);
3433 gcc_checking_assert (ok);
3435 clone = cs->caller;
3436 while (clone->global.inlined_to
3437 && clone != rdesc->cs->caller
3438 && IPA_NODE_REF (clone)->ipcp_orig_node)
3440 struct ipa_ref *ref;
3441 ref = clone->find_reference (n, NULL, 0);
3442 if (ref)
3444 if (dump_file)
3445 fprintf (dump_file, "ipa-prop: Removing "
3446 "cloning-created reference "
3447 "from %s/%i to %s/%i.\n",
3448 xstrdup_for_dump (clone->name ()),
3449 clone->order,
3450 xstrdup_for_dump (n->name ()),
3451 n->order);
3452 ref->remove_reference ();
3454 clone = clone->callers->caller;
3461 for (i = ipa_get_param_count (old_root_info);
3462 i < ipa_get_cs_argument_count (args);
3463 i++)
3465 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3467 if (jf->type == IPA_JF_CONST)
3469 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3470 if (rdesc)
3471 rdesc->refcount = IPA_UNDESCRIBED_USE;
3473 else if (jf->type == IPA_JF_PASS_THROUGH)
3474 ipa_set_controlled_uses (new_root_info,
3475 jf->value.pass_through.formal_id,
3476 IPA_UNDESCRIBED_USE);
3480 /* Update jump functions and call note functions on inlining the call site CS.
3481 CS is expected to lead to a node already cloned by
3482 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3483 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3484 created. */
3486 bool
3487 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3488 vec<cgraph_edge *> *new_edges)
3490 bool changed;
3491 /* Do nothing if the preparation phase has not been carried out yet
3492 (i.e. during early inlining). */
3493 if (!ipa_node_params_sum)
3494 return false;
3495 gcc_assert (ipa_edge_args_vector);
3497 propagate_controlled_uses (cs);
3498 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3500 return changed;
3503 /* Frees all dynamically allocated structures that the argument info points
3504 to. */
3506 void
3507 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3509 vec_free (args->jump_functions);
3510 memset (args, 0, sizeof (*args));
3513 /* Free all ipa_edge structures. */
3515 void
3516 ipa_free_all_edge_args (void)
3518 int i;
3519 struct ipa_edge_args *args;
3521 if (!ipa_edge_args_vector)
3522 return;
3524 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3525 ipa_free_edge_args_substructures (args);
3527 vec_free (ipa_edge_args_vector);
3530 /* Frees all dynamically allocated structures that the param info points
3531 to. */
3533 ipa_node_params::~ipa_node_params ()
3535 descriptors.release ();
3536 free (lattices);
3537 /* Lattice values and their sources are deallocated with their alocation
3538 pool. */
3539 known_csts.release ();
3540 known_contexts.release ();
3542 lattices = NULL;
3543 ipcp_orig_node = NULL;
3544 analysis_done = 0;
3545 node_enqueued = 0;
3546 do_clone_for_all_contexts = 0;
3547 is_all_contexts_clone = 0;
3548 node_dead = 0;
3551 /* Free all ipa_node_params structures. */
3553 void
3554 ipa_free_all_node_params (void)
3556 delete ipa_node_params_sum;
3557 ipa_node_params_sum = NULL;
3560 /* Grow ipcp_transformations if necessary. */
3562 void
3563 ipcp_grow_transformations_if_necessary (void)
3565 if (vec_safe_length (ipcp_transformations)
3566 <= (unsigned) symtab->cgraph_max_uid)
3567 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3570 /* Set the aggregate replacements of NODE to be AGGVALS. */
3572 void
3573 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3574 struct ipa_agg_replacement_value *aggvals)
3576 ipcp_grow_transformations_if_necessary ();
3577 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3580 /* Hook that is called by cgraph.c when an edge is removed. */
3582 static void
3583 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3585 struct ipa_edge_args *args;
3587 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3588 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3589 return;
3591 args = IPA_EDGE_REF (cs);
3592 if (args->jump_functions)
3594 struct ipa_jump_func *jf;
3595 int i;
3596 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3598 struct ipa_cst_ref_desc *rdesc;
3599 try_decrement_rdesc_refcount (jf);
3600 if (jf->type == IPA_JF_CONST
3601 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3602 && rdesc->cs == cs)
3603 rdesc->cs = NULL;
3607 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3610 /* Hook that is called by cgraph.c when an edge is duplicated. */
3612 static void
3613 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3614 void *)
3616 struct ipa_edge_args *old_args, *new_args;
3617 unsigned int i;
3619 ipa_check_create_edge_args ();
3621 old_args = IPA_EDGE_REF (src);
3622 new_args = IPA_EDGE_REF (dst);
3624 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3625 if (old_args->polymorphic_call_contexts)
3626 new_args->polymorphic_call_contexts
3627 = vec_safe_copy (old_args->polymorphic_call_contexts);
3629 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3631 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3632 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3634 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3636 if (src_jf->type == IPA_JF_CONST)
3638 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3640 if (!src_rdesc)
3641 dst_jf->value.constant.rdesc = NULL;
3642 else if (src->caller == dst->caller)
3644 struct ipa_ref *ref;
3645 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3646 gcc_checking_assert (n);
3647 ref = src->caller->find_reference (n, src->call_stmt,
3648 src->lto_stmt_uid);
3649 gcc_checking_assert (ref);
3650 dst->caller->clone_reference (ref, ref->stmt);
3652 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3653 dst_rdesc->cs = dst;
3654 dst_rdesc->refcount = src_rdesc->refcount;
3655 dst_rdesc->next_duplicate = NULL;
3656 dst_jf->value.constant.rdesc = dst_rdesc;
3658 else if (src_rdesc->cs == src)
3660 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3661 dst_rdesc->cs = dst;
3662 dst_rdesc->refcount = src_rdesc->refcount;
3663 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3664 src_rdesc->next_duplicate = dst_rdesc;
3665 dst_jf->value.constant.rdesc = dst_rdesc;
3667 else
3669 struct ipa_cst_ref_desc *dst_rdesc;
3670 /* This can happen during inlining, when a JFUNC can refer to a
3671 reference taken in a function up in the tree of inline clones.
3672 We need to find the duplicate that refers to our tree of
3673 inline clones. */
3675 gcc_assert (dst->caller->global.inlined_to);
3676 for (dst_rdesc = src_rdesc->next_duplicate;
3677 dst_rdesc;
3678 dst_rdesc = dst_rdesc->next_duplicate)
3680 struct cgraph_node *top;
3681 top = dst_rdesc->cs->caller->global.inlined_to
3682 ? dst_rdesc->cs->caller->global.inlined_to
3683 : dst_rdesc->cs->caller;
3684 if (dst->caller->global.inlined_to == top)
3685 break;
3687 gcc_assert (dst_rdesc);
3688 dst_jf->value.constant.rdesc = dst_rdesc;
3691 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3692 && src->caller == dst->caller)
3694 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3695 ? dst->caller->global.inlined_to : dst->caller;
3696 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3697 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3699 int c = ipa_get_controlled_uses (root_info, idx);
3700 if (c != IPA_UNDESCRIBED_USE)
3702 c++;
3703 ipa_set_controlled_uses (root_info, idx, c);
3709 /* Analyze newly added function into callgraph. */
3711 static void
3712 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3714 if (node->has_gimple_body_p ())
3715 ipa_analyze_node (node);
3718 /* Hook that is called by summary when a node is duplicated. */
3720 void
3721 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3722 ipa_node_params *old_info,
3723 ipa_node_params *new_info)
3725 ipa_agg_replacement_value *old_av, *new_av;
3727 new_info->descriptors = old_info->descriptors.copy ();
3728 new_info->lattices = NULL;
3729 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3731 new_info->analysis_done = old_info->analysis_done;
3732 new_info->node_enqueued = old_info->node_enqueued;
3733 new_info->versionable = old_info->versionable;
3735 old_av = ipa_get_agg_replacements_for_node (src);
3736 if (old_av)
3738 new_av = NULL;
3739 while (old_av)
3741 struct ipa_agg_replacement_value *v;
3743 v = ggc_alloc<ipa_agg_replacement_value> ();
3744 memcpy (v, old_av, sizeof (*v));
3745 v->next = new_av;
3746 new_av = v;
3747 old_av = old_av->next;
3749 ipa_set_node_agg_value_chain (dst, new_av);
3752 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3754 if (src_trans)
3756 ipcp_grow_transformations_if_necessary ();
3757 src_trans = ipcp_get_transformation_summary (src);
3758 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3759 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3760 vec<ipa_alignment, va_gc> *&dst_alignments
3761 = ipcp_get_transformation_summary (dst)->alignments;
3762 vec<ipa_vr, va_gc> *&dst_vr
3763 = ipcp_get_transformation_summary (dst)->m_vr;
3764 if (vec_safe_length (src_trans->alignments) > 0)
3766 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3767 for (unsigned i = 0; i < src_alignments->length (); ++i)
3768 dst_alignments->quick_push ((*src_alignments)[i]);
3770 if (vec_safe_length (src_trans->m_vr) > 0)
3772 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3773 for (unsigned i = 0; i < src_vr->length (); ++i)
3774 dst_vr->quick_push ((*src_vr)[i]);
3778 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3780 ipcp_grow_transformations_if_necessary ();
3781 src_trans = ipcp_get_transformation_summary (src);
3782 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3783 vec<ipa_bits, va_gc> *&dst_bits
3784 = ipcp_get_transformation_summary (dst)->bits;
3785 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3786 for (unsigned i = 0; i < src_bits->length (); ++i)
3787 dst_bits->quick_push ((*src_bits)[i]);
3791 /* Register our cgraph hooks if they are not already there. */
3793 void
3794 ipa_register_cgraph_hooks (void)
3796 ipa_check_create_node_params ();
3798 if (!edge_removal_hook_holder)
3799 edge_removal_hook_holder =
3800 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3801 if (!edge_duplication_hook_holder)
3802 edge_duplication_hook_holder =
3803 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3804 function_insertion_hook_holder =
3805 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3808 /* Unregister our cgraph hooks if they are not already there. */
3810 static void
3811 ipa_unregister_cgraph_hooks (void)
3813 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3814 edge_removal_hook_holder = NULL;
3815 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3816 edge_duplication_hook_holder = NULL;
3817 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3818 function_insertion_hook_holder = NULL;
3821 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3822 longer needed after ipa-cp. */
3824 void
3825 ipa_free_all_structures_after_ipa_cp (void)
3827 if (!optimize && !in_lto_p)
3829 ipa_free_all_edge_args ();
3830 ipa_free_all_node_params ();
3831 ipcp_sources_pool.release ();
3832 ipcp_cst_values_pool.release ();
3833 ipcp_poly_ctx_values_pool.release ();
3834 ipcp_agg_lattice_pool.release ();
3835 ipa_unregister_cgraph_hooks ();
3836 ipa_refdesc_pool.release ();
3840 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3841 longer needed after indirect inlining. */
3843 void
3844 ipa_free_all_structures_after_iinln (void)
3846 ipa_free_all_edge_args ();
3847 ipa_free_all_node_params ();
3848 ipa_unregister_cgraph_hooks ();
3849 ipcp_sources_pool.release ();
3850 ipcp_cst_values_pool.release ();
3851 ipcp_poly_ctx_values_pool.release ();
3852 ipcp_agg_lattice_pool.release ();
3853 ipa_refdesc_pool.release ();
3856 /* Print ipa_tree_map data structures of all functions in the
3857 callgraph to F. */
3859 void
3860 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3862 int i, count;
3863 struct ipa_node_params *info;
3865 if (!node->definition)
3866 return;
3867 info = IPA_NODE_REF (node);
3868 fprintf (f, " function %s/%i parameter descriptors:\n",
3869 node->name (), node->order);
3870 count = ipa_get_param_count (info);
3871 for (i = 0; i < count; i++)
3873 int c;
3875 fprintf (f, " ");
3876 ipa_dump_param (f, info, i);
3877 if (ipa_is_param_used (info, i))
3878 fprintf (f, " used");
3879 c = ipa_get_controlled_uses (info, i);
3880 if (c == IPA_UNDESCRIBED_USE)
3881 fprintf (f, " undescribed_use");
3882 else
3883 fprintf (f, " controlled_uses=%i", c);
3884 fprintf (f, "\n");
3888 /* Print ipa_tree_map data structures of all functions in the
3889 callgraph to F. */
3891 void
3892 ipa_print_all_params (FILE * f)
3894 struct cgraph_node *node;
3896 fprintf (f, "\nFunction parameters:\n");
3897 FOR_EACH_FUNCTION (node)
3898 ipa_print_node_params (f, node);
3901 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3903 vec<tree>
3904 ipa_get_vector_of_formal_parms (tree fndecl)
3906 vec<tree> args;
3907 int count;
3908 tree parm;
3910 gcc_assert (!flag_wpa);
3911 count = count_formal_params (fndecl);
3912 args.create (count);
3913 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3914 args.quick_push (parm);
3916 return args;
3919 /* Return a heap allocated vector containing types of formal parameters of
3920 function type FNTYPE. */
3922 vec<tree>
3923 ipa_get_vector_of_formal_parm_types (tree fntype)
3925 vec<tree> types;
3926 int count = 0;
3927 tree t;
3929 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3930 count++;
3932 types.create (count);
3933 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3934 types.quick_push (TREE_VALUE (t));
3936 return types;
3939 /* Modify the function declaration FNDECL and its type according to the plan in
3940 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3941 to reflect the actual parameters being modified which are determined by the
3942 base_index field. */
3944 void
3945 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3947 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3948 tree orig_type = TREE_TYPE (fndecl);
3949 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3951 /* The following test is an ugly hack, some functions simply don't have any
3952 arguments in their type. This is probably a bug but well... */
3953 bool care_for_types = (old_arg_types != NULL_TREE);
3954 bool last_parm_void;
3955 vec<tree> otypes;
3956 if (care_for_types)
3958 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3959 == void_type_node);
3960 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3961 if (last_parm_void)
3962 gcc_assert (oparms.length () + 1 == otypes.length ());
3963 else
3964 gcc_assert (oparms.length () == otypes.length ());
3966 else
3968 last_parm_void = false;
3969 otypes.create (0);
3972 int len = adjustments.length ();
3973 tree *link = &DECL_ARGUMENTS (fndecl);
3974 tree new_arg_types = NULL;
3975 for (int i = 0; i < len; i++)
3977 struct ipa_parm_adjustment *adj;
3978 gcc_assert (link);
3980 adj = &adjustments[i];
3981 tree parm;
3982 if (adj->op == IPA_PARM_OP_NEW)
3983 parm = NULL;
3984 else
3985 parm = oparms[adj->base_index];
3986 adj->base = parm;
3988 if (adj->op == IPA_PARM_OP_COPY)
3990 if (care_for_types)
3991 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3992 new_arg_types);
3993 *link = parm;
3994 link = &DECL_CHAIN (parm);
3996 else if (adj->op != IPA_PARM_OP_REMOVE)
3998 tree new_parm;
3999 tree ptype;
4001 if (adj->by_ref)
4002 ptype = build_pointer_type (adj->type);
4003 else
4005 ptype = adj->type;
4006 if (is_gimple_reg_type (ptype))
4008 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4009 if (TYPE_ALIGN (ptype) != malign)
4010 ptype = build_aligned_type (ptype, malign);
4014 if (care_for_types)
4015 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4017 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4018 ptype);
4019 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4020 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4021 DECL_ARTIFICIAL (new_parm) = 1;
4022 DECL_ARG_TYPE (new_parm) = ptype;
4023 DECL_CONTEXT (new_parm) = fndecl;
4024 TREE_USED (new_parm) = 1;
4025 DECL_IGNORED_P (new_parm) = 1;
4026 layout_decl (new_parm, 0);
4028 if (adj->op == IPA_PARM_OP_NEW)
4029 adj->base = NULL;
4030 else
4031 adj->base = parm;
4032 adj->new_decl = new_parm;
4034 *link = new_parm;
4035 link = &DECL_CHAIN (new_parm);
4039 *link = NULL_TREE;
4041 tree new_reversed = NULL;
4042 if (care_for_types)
4044 new_reversed = nreverse (new_arg_types);
4045 if (last_parm_void)
4047 if (new_reversed)
4048 TREE_CHAIN (new_arg_types) = void_list_node;
4049 else
4050 new_reversed = void_list_node;
4054 /* Use copy_node to preserve as much as possible from original type
4055 (debug info, attribute lists etc.)
4056 Exception is METHOD_TYPEs must have THIS argument.
4057 When we are asked to remove it, we need to build new FUNCTION_TYPE
4058 instead. */
4059 tree new_type = NULL;
4060 if (TREE_CODE (orig_type) != METHOD_TYPE
4061 || (adjustments[0].op == IPA_PARM_OP_COPY
4062 && adjustments[0].base_index == 0))
4064 new_type = build_distinct_type_copy (orig_type);
4065 TYPE_ARG_TYPES (new_type) = new_reversed;
4067 else
4069 new_type
4070 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4071 new_reversed));
4072 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4073 DECL_VINDEX (fndecl) = NULL_TREE;
4076 /* When signature changes, we need to clear builtin info. */
4077 if (DECL_BUILT_IN (fndecl))
4079 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4080 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4083 TREE_TYPE (fndecl) = new_type;
4084 DECL_VIRTUAL_P (fndecl) = 0;
4085 DECL_LANG_SPECIFIC (fndecl) = NULL;
4086 otypes.release ();
4087 oparms.release ();
4090 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4091 If this is a directly recursive call, CS must be NULL. Otherwise it must
4092 contain the corresponding call graph edge. */
4094 void
4095 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4096 ipa_parm_adjustment_vec adjustments)
4098 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4099 vec<tree> vargs;
4100 vec<tree, va_gc> **debug_args = NULL;
4101 gcall *new_stmt;
4102 gimple_stmt_iterator gsi, prev_gsi;
4103 tree callee_decl;
4104 int i, len;
4106 len = adjustments.length ();
4107 vargs.create (len);
4108 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4109 current_node->remove_stmt_references (stmt);
4111 gsi = gsi_for_stmt (stmt);
4112 prev_gsi = gsi;
4113 gsi_prev (&prev_gsi);
4114 for (i = 0; i < len; i++)
4116 struct ipa_parm_adjustment *adj;
4118 adj = &adjustments[i];
4120 if (adj->op == IPA_PARM_OP_COPY)
4122 tree arg = gimple_call_arg (stmt, adj->base_index);
4124 vargs.quick_push (arg);
4126 else if (adj->op != IPA_PARM_OP_REMOVE)
4128 tree expr, base, off;
4129 location_t loc;
4130 unsigned int deref_align = 0;
4131 bool deref_base = false;
4133 /* We create a new parameter out of the value of the old one, we can
4134 do the following kind of transformations:
4136 - A scalar passed by reference is converted to a scalar passed by
4137 value. (adj->by_ref is false and the type of the original
4138 actual argument is a pointer to a scalar).
4140 - A part of an aggregate is passed instead of the whole aggregate.
4141 The part can be passed either by value or by reference, this is
4142 determined by value of adj->by_ref. Moreover, the code below
4143 handles both situations when the original aggregate is passed by
4144 value (its type is not a pointer) and when it is passed by
4145 reference (it is a pointer to an aggregate).
4147 When the new argument is passed by reference (adj->by_ref is true)
4148 it must be a part of an aggregate and therefore we form it by
4149 simply taking the address of a reference inside the original
4150 aggregate. */
4152 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4153 base = gimple_call_arg (stmt, adj->base_index);
4154 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4155 : EXPR_LOCATION (base);
4157 if (TREE_CODE (base) != ADDR_EXPR
4158 && POINTER_TYPE_P (TREE_TYPE (base)))
4159 off = build_int_cst (adj->alias_ptr_type,
4160 adj->offset / BITS_PER_UNIT);
4161 else
4163 HOST_WIDE_INT base_offset;
4164 tree prev_base;
4165 bool addrof;
4167 if (TREE_CODE (base) == ADDR_EXPR)
4169 base = TREE_OPERAND (base, 0);
4170 addrof = true;
4172 else
4173 addrof = false;
4174 prev_base = base;
4175 base = get_addr_base_and_unit_offset (base, &base_offset);
4176 /* Aggregate arguments can have non-invariant addresses. */
4177 if (!base)
4179 base = build_fold_addr_expr (prev_base);
4180 off = build_int_cst (adj->alias_ptr_type,
4181 adj->offset / BITS_PER_UNIT);
4183 else if (TREE_CODE (base) == MEM_REF)
4185 if (!addrof)
4187 deref_base = true;
4188 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4190 off = build_int_cst (adj->alias_ptr_type,
4191 base_offset
4192 + adj->offset / BITS_PER_UNIT);
4193 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4194 off);
4195 base = TREE_OPERAND (base, 0);
4197 else
4199 off = build_int_cst (adj->alias_ptr_type,
4200 base_offset
4201 + adj->offset / BITS_PER_UNIT);
4202 base = build_fold_addr_expr (base);
4206 if (!adj->by_ref)
4208 tree type = adj->type;
4209 unsigned int align;
4210 unsigned HOST_WIDE_INT misalign;
4212 if (deref_base)
4214 align = deref_align;
4215 misalign = 0;
4217 else
4219 get_pointer_alignment_1 (base, &align, &misalign);
4220 if (TYPE_ALIGN (type) > align)
4221 align = TYPE_ALIGN (type);
4223 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4224 * BITS_PER_UNIT);
4225 misalign = misalign & (align - 1);
4226 if (misalign != 0)
4227 align = least_bit_hwi (misalign);
4228 if (align < TYPE_ALIGN (type))
4229 type = build_aligned_type (type, align);
4230 base = force_gimple_operand_gsi (&gsi, base,
4231 true, NULL, true, GSI_SAME_STMT);
4232 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4233 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4234 /* If expr is not a valid gimple call argument emit
4235 a load into a temporary. */
4236 if (is_gimple_reg_type (TREE_TYPE (expr)))
4238 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4239 if (gimple_in_ssa_p (cfun))
4241 gimple_set_vuse (tem, gimple_vuse (stmt));
4242 expr = make_ssa_name (TREE_TYPE (expr), tem);
4244 else
4245 expr = create_tmp_reg (TREE_TYPE (expr));
4246 gimple_assign_set_lhs (tem, expr);
4247 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4250 else
4252 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4253 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4254 expr = build_fold_addr_expr (expr);
4255 expr = force_gimple_operand_gsi (&gsi, expr,
4256 true, NULL, true, GSI_SAME_STMT);
4258 vargs.quick_push (expr);
4260 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4262 unsigned int ix;
4263 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4264 gimple *def_temp;
4266 arg = gimple_call_arg (stmt, adj->base_index);
4267 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4269 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4270 continue;
4271 arg = fold_convert_loc (gimple_location (stmt),
4272 TREE_TYPE (origin), arg);
4274 if (debug_args == NULL)
4275 debug_args = decl_debug_args_insert (callee_decl);
4276 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4277 if (ddecl == origin)
4279 ddecl = (**debug_args)[ix + 1];
4280 break;
4282 if (ddecl == NULL)
4284 ddecl = make_node (DEBUG_EXPR_DECL);
4285 DECL_ARTIFICIAL (ddecl) = 1;
4286 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4287 DECL_MODE (ddecl) = DECL_MODE (origin);
4289 vec_safe_push (*debug_args, origin);
4290 vec_safe_push (*debug_args, ddecl);
4292 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4293 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4297 if (dump_file && (dump_flags & TDF_DETAILS))
4299 fprintf (dump_file, "replacing stmt:");
4300 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4303 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4304 vargs.release ();
4305 if (gimple_call_lhs (stmt))
4306 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4308 gimple_set_block (new_stmt, gimple_block (stmt));
4309 if (gimple_has_location (stmt))
4310 gimple_set_location (new_stmt, gimple_location (stmt));
4311 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4312 gimple_call_copy_flags (new_stmt, stmt);
4313 if (gimple_in_ssa_p (cfun))
4315 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4316 if (gimple_vdef (stmt))
4318 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4319 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4323 if (dump_file && (dump_flags & TDF_DETAILS))
4325 fprintf (dump_file, "with stmt:");
4326 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4327 fprintf (dump_file, "\n");
4329 gsi_replace (&gsi, new_stmt, true);
4330 if (cs)
4331 cs->set_call_stmt (new_stmt);
4334 current_node->record_stmt_references (gsi_stmt (gsi));
4335 gsi_prev (&gsi);
4337 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4340 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4341 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4342 specifies whether the function should care about type incompatibility the
4343 current and new expressions. If it is false, the function will leave
4344 incompatibility issues to the caller. Return true iff the expression
4345 was modified. */
4347 bool
4348 ipa_modify_expr (tree *expr, bool convert,
4349 ipa_parm_adjustment_vec adjustments)
4351 struct ipa_parm_adjustment *cand
4352 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4353 if (!cand)
4354 return false;
4356 tree src;
4357 if (cand->by_ref)
4359 src = build_simple_mem_ref (cand->new_decl);
4360 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4362 else
4363 src = cand->new_decl;
4365 if (dump_file && (dump_flags & TDF_DETAILS))
4367 fprintf (dump_file, "About to replace expr ");
4368 print_generic_expr (dump_file, *expr, 0);
4369 fprintf (dump_file, " with ");
4370 print_generic_expr (dump_file, src, 0);
4371 fprintf (dump_file, "\n");
4374 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4376 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4377 *expr = vce;
4379 else
4380 *expr = src;
4381 return true;
4384 /* If T is an SSA_NAME, return NULL if it is not a default def or
4385 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4386 the base variable is always returned, regardless if it is a default
4387 def. Return T if it is not an SSA_NAME. */
4389 static tree
4390 get_ssa_base_param (tree t, bool ignore_default_def)
4392 if (TREE_CODE (t) == SSA_NAME)
4394 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4395 return SSA_NAME_VAR (t);
4396 else
4397 return NULL_TREE;
4399 return t;
4402 /* Given an expression, return an adjustment entry specifying the
4403 transformation to be done on EXPR. If no suitable adjustment entry
4404 was found, returns NULL.
4406 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4407 default def, otherwise bail on them.
4409 If CONVERT is non-NULL, this function will set *CONVERT if the
4410 expression provided is a component reference. ADJUSTMENTS is the
4411 adjustments vector. */
4413 ipa_parm_adjustment *
4414 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4415 ipa_parm_adjustment_vec adjustments,
4416 bool ignore_default_def)
4418 if (TREE_CODE (**expr) == BIT_FIELD_REF
4419 || TREE_CODE (**expr) == IMAGPART_EXPR
4420 || TREE_CODE (**expr) == REALPART_EXPR)
4422 *expr = &TREE_OPERAND (**expr, 0);
4423 if (convert)
4424 *convert = true;
4427 HOST_WIDE_INT offset, size, max_size;
4428 bool reverse;
4429 tree base
4430 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4431 if (!base || size == -1 || max_size == -1)
4432 return NULL;
4434 if (TREE_CODE (base) == MEM_REF)
4436 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4437 base = TREE_OPERAND (base, 0);
4440 base = get_ssa_base_param (base, ignore_default_def);
4441 if (!base || TREE_CODE (base) != PARM_DECL)
4442 return NULL;
4444 struct ipa_parm_adjustment *cand = NULL;
4445 unsigned int len = adjustments.length ();
4446 for (unsigned i = 0; i < len; i++)
4448 struct ipa_parm_adjustment *adj = &adjustments[i];
4450 if (adj->base == base
4451 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4453 cand = adj;
4454 break;
4458 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4459 return NULL;
4460 return cand;
4463 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4465 static bool
4466 index_in_adjustments_multiple_times_p (int base_index,
4467 ipa_parm_adjustment_vec adjustments)
4469 int i, len = adjustments.length ();
4470 bool one = false;
4472 for (i = 0; i < len; i++)
4474 struct ipa_parm_adjustment *adj;
4475 adj = &adjustments[i];
4477 if (adj->base_index == base_index)
4479 if (one)
4480 return true;
4481 else
4482 one = true;
4485 return false;
4489 /* Return adjustments that should have the same effect on function parameters
4490 and call arguments as if they were first changed according to adjustments in
4491 INNER and then by adjustments in OUTER. */
4493 ipa_parm_adjustment_vec
4494 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4495 ipa_parm_adjustment_vec outer)
4497 int i, outlen = outer.length ();
4498 int inlen = inner.length ();
4499 int removals = 0;
4500 ipa_parm_adjustment_vec adjustments, tmp;
4502 tmp.create (inlen);
4503 for (i = 0; i < inlen; i++)
4505 struct ipa_parm_adjustment *n;
4506 n = &inner[i];
4508 if (n->op == IPA_PARM_OP_REMOVE)
4509 removals++;
4510 else
4512 /* FIXME: Handling of new arguments are not implemented yet. */
4513 gcc_assert (n->op != IPA_PARM_OP_NEW);
4514 tmp.quick_push (*n);
4518 adjustments.create (outlen + removals);
4519 for (i = 0; i < outlen; i++)
4521 struct ipa_parm_adjustment r;
4522 struct ipa_parm_adjustment *out = &outer[i];
4523 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4525 memset (&r, 0, sizeof (r));
4526 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4527 if (out->op == IPA_PARM_OP_REMOVE)
4529 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4531 r.op = IPA_PARM_OP_REMOVE;
4532 adjustments.quick_push (r);
4534 continue;
4536 else
4538 /* FIXME: Handling of new arguments are not implemented yet. */
4539 gcc_assert (out->op != IPA_PARM_OP_NEW);
4542 r.base_index = in->base_index;
4543 r.type = out->type;
4545 /* FIXME: Create nonlocal value too. */
4547 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4548 r.op = IPA_PARM_OP_COPY;
4549 else if (in->op == IPA_PARM_OP_COPY)
4550 r.offset = out->offset;
4551 else if (out->op == IPA_PARM_OP_COPY)
4552 r.offset = in->offset;
4553 else
4554 r.offset = in->offset + out->offset;
4555 adjustments.quick_push (r);
4558 for (i = 0; i < inlen; i++)
4560 struct ipa_parm_adjustment *n = &inner[i];
4562 if (n->op == IPA_PARM_OP_REMOVE)
4563 adjustments.quick_push (*n);
4566 tmp.release ();
4567 return adjustments;
4570 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4571 friendly way, assuming they are meant to be applied to FNDECL. */
4573 void
4574 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4575 tree fndecl)
4577 int i, len = adjustments.length ();
4578 bool first = true;
4579 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4581 fprintf (file, "IPA param adjustments: ");
4582 for (i = 0; i < len; i++)
4584 struct ipa_parm_adjustment *adj;
4585 adj = &adjustments[i];
4587 if (!first)
4588 fprintf (file, " ");
4589 else
4590 first = false;
4592 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4593 print_generic_expr (file, parms[adj->base_index], 0);
4594 if (adj->base)
4596 fprintf (file, ", base: ");
4597 print_generic_expr (file, adj->base, 0);
4599 if (adj->new_decl)
4601 fprintf (file, ", new_decl: ");
4602 print_generic_expr (file, adj->new_decl, 0);
4604 if (adj->new_ssa_base)
4606 fprintf (file, ", new_ssa_base: ");
4607 print_generic_expr (file, adj->new_ssa_base, 0);
4610 if (adj->op == IPA_PARM_OP_COPY)
4611 fprintf (file, ", copy_param");
4612 else if (adj->op == IPA_PARM_OP_REMOVE)
4613 fprintf (file, ", remove_param");
4614 else
4615 fprintf (file, ", offset %li", (long) adj->offset);
4616 if (adj->by_ref)
4617 fprintf (file, ", by_ref");
4618 print_node_brief (file, ", type: ", adj->type, 0);
4619 fprintf (file, "\n");
4621 parms.release ();
4624 /* Dump the AV linked list. */
4626 void
4627 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4629 bool comma = false;
4630 fprintf (f, " Aggregate replacements:");
4631 for (; av; av = av->next)
4633 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4634 av->index, av->offset);
4635 print_generic_expr (f, av->value, 0);
4636 comma = true;
4638 fprintf (f, "\n");
4641 /* Stream out jump function JUMP_FUNC to OB. */
4643 static void
4644 ipa_write_jump_function (struct output_block *ob,
4645 struct ipa_jump_func *jump_func)
4647 struct ipa_agg_jf_item *item;
4648 struct bitpack_d bp;
4649 int i, count;
4651 streamer_write_uhwi (ob, jump_func->type);
4652 switch (jump_func->type)
4654 case IPA_JF_UNKNOWN:
4655 break;
4656 case IPA_JF_CONST:
4657 gcc_assert (
4658 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4659 stream_write_tree (ob, jump_func->value.constant.value, true);
4660 break;
4661 case IPA_JF_PASS_THROUGH:
4662 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4663 if (jump_func->value.pass_through.operation == NOP_EXPR)
4665 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4666 bp = bitpack_create (ob->main_stream);
4667 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4668 streamer_write_bitpack (&bp);
4670 else
4672 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4673 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4675 break;
4676 case IPA_JF_ANCESTOR:
4677 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4678 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4679 bp = bitpack_create (ob->main_stream);
4680 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4681 streamer_write_bitpack (&bp);
4682 break;
4685 count = vec_safe_length (jump_func->agg.items);
4686 streamer_write_uhwi (ob, count);
4687 if (count)
4689 bp = bitpack_create (ob->main_stream);
4690 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4691 streamer_write_bitpack (&bp);
4694 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4696 streamer_write_uhwi (ob, item->offset);
4697 stream_write_tree (ob, item->value, true);
4700 bp = bitpack_create (ob->main_stream);
4701 bp_pack_value (&bp, jump_func->alignment.known, 1);
4702 streamer_write_bitpack (&bp);
4703 if (jump_func->alignment.known)
4705 streamer_write_uhwi (ob, jump_func->alignment.align);
4706 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4709 bp = bitpack_create (ob->main_stream);
4710 bp_pack_value (&bp, jump_func->bits.known, 1);
4711 streamer_write_bitpack (&bp);
4712 if (jump_func->bits.known)
4714 streamer_write_widest_int (ob, jump_func->bits.value);
4715 streamer_write_widest_int (ob, jump_func->bits.mask);
4717 bp_pack_value (&bp, jump_func->vr_known, 1);
4718 streamer_write_bitpack (&bp);
4719 if (jump_func->vr_known)
4721 streamer_write_enum (ob->main_stream, value_rang_type,
4722 VR_LAST, jump_func->m_vr.type);
4723 stream_write_tree (ob, jump_func->m_vr.min, true);
4724 stream_write_tree (ob, jump_func->m_vr.max, true);
4728 /* Read in jump function JUMP_FUNC from IB. */
4730 static void
4731 ipa_read_jump_function (struct lto_input_block *ib,
4732 struct ipa_jump_func *jump_func,
4733 struct cgraph_edge *cs,
4734 struct data_in *data_in)
4736 enum jump_func_type jftype;
4737 enum tree_code operation;
4738 int i, count;
4740 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4741 switch (jftype)
4743 case IPA_JF_UNKNOWN:
4744 ipa_set_jf_unknown (jump_func);
4745 break;
4746 case IPA_JF_CONST:
4747 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4748 break;
4749 case IPA_JF_PASS_THROUGH:
4750 operation = (enum tree_code) streamer_read_uhwi (ib);
4751 if (operation == NOP_EXPR)
4753 int formal_id = streamer_read_uhwi (ib);
4754 struct bitpack_d bp = streamer_read_bitpack (ib);
4755 bool agg_preserved = bp_unpack_value (&bp, 1);
4756 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4758 else
4760 tree operand = stream_read_tree (ib, data_in);
4761 int formal_id = streamer_read_uhwi (ib);
4762 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4763 operation);
4765 break;
4766 case IPA_JF_ANCESTOR:
4768 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4769 int formal_id = streamer_read_uhwi (ib);
4770 struct bitpack_d bp = streamer_read_bitpack (ib);
4771 bool agg_preserved = bp_unpack_value (&bp, 1);
4772 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4773 break;
4777 count = streamer_read_uhwi (ib);
4778 vec_alloc (jump_func->agg.items, count);
4779 if (count)
4781 struct bitpack_d bp = streamer_read_bitpack (ib);
4782 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4784 for (i = 0; i < count; i++)
4786 struct ipa_agg_jf_item item;
4787 item.offset = streamer_read_uhwi (ib);
4788 item.value = stream_read_tree (ib, data_in);
4789 jump_func->agg.items->quick_push (item);
4792 struct bitpack_d bp = streamer_read_bitpack (ib);
4793 bool alignment_known = bp_unpack_value (&bp, 1);
4794 if (alignment_known)
4796 jump_func->alignment.known = true;
4797 jump_func->alignment.align = streamer_read_uhwi (ib);
4798 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4800 else
4801 jump_func->alignment.known = false;
4803 bp = streamer_read_bitpack (ib);
4804 bool bits_known = bp_unpack_value (&bp, 1);
4805 if (bits_known)
4807 jump_func->bits.known = true;
4808 jump_func->bits.value = streamer_read_widest_int (ib);
4809 jump_func->bits.mask = streamer_read_widest_int (ib);
4811 else
4812 jump_func->bits.known = false;
4814 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4815 bool vr_known = bp_unpack_value (&vr_bp, 1);
4816 if (vr_known)
4818 jump_func->vr_known = true;
4819 jump_func->m_vr.type = streamer_read_enum (ib,
4820 value_range_type,
4821 VR_LAST);
4822 jump_func->m_vr.min = stream_read_tree (ib, data_in);
4823 jump_func->m_vr.max = stream_read_tree (ib, data_in);
4825 else
4826 jump_func->vr_known = false;
4829 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4830 relevant to indirect inlining to OB. */
4832 static void
4833 ipa_write_indirect_edge_info (struct output_block *ob,
4834 struct cgraph_edge *cs)
4836 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4837 struct bitpack_d bp;
4839 streamer_write_hwi (ob, ii->param_index);
4840 bp = bitpack_create (ob->main_stream);
4841 bp_pack_value (&bp, ii->polymorphic, 1);
4842 bp_pack_value (&bp, ii->agg_contents, 1);
4843 bp_pack_value (&bp, ii->member_ptr, 1);
4844 bp_pack_value (&bp, ii->by_ref, 1);
4845 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4846 bp_pack_value (&bp, ii->vptr_changed, 1);
4847 streamer_write_bitpack (&bp);
4848 if (ii->agg_contents || ii->polymorphic)
4849 streamer_write_hwi (ob, ii->offset);
4850 else
4851 gcc_assert (ii->offset == 0);
4853 if (ii->polymorphic)
4855 streamer_write_hwi (ob, ii->otr_token);
4856 stream_write_tree (ob, ii->otr_type, true);
4857 ii->context.stream_out (ob);
4861 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4862 relevant to indirect inlining from IB. */
4864 static void
4865 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4866 struct data_in *data_in,
4867 struct cgraph_edge *cs)
4869 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4870 struct bitpack_d bp;
4872 ii->param_index = (int) streamer_read_hwi (ib);
4873 bp = streamer_read_bitpack (ib);
4874 ii->polymorphic = bp_unpack_value (&bp, 1);
4875 ii->agg_contents = bp_unpack_value (&bp, 1);
4876 ii->member_ptr = bp_unpack_value (&bp, 1);
4877 ii->by_ref = bp_unpack_value (&bp, 1);
4878 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4879 ii->vptr_changed = bp_unpack_value (&bp, 1);
4880 if (ii->agg_contents || ii->polymorphic)
4881 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4882 else
4883 ii->offset = 0;
4884 if (ii->polymorphic)
4886 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4887 ii->otr_type = stream_read_tree (ib, data_in);
4888 ii->context.stream_in (ib, data_in);
4892 /* Stream out NODE info to OB. */
4894 static void
4895 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4897 int node_ref;
4898 lto_symtab_encoder_t encoder;
4899 struct ipa_node_params *info = IPA_NODE_REF (node);
4900 int j;
4901 struct cgraph_edge *e;
4902 struct bitpack_d bp;
4904 encoder = ob->decl_state->symtab_node_encoder;
4905 node_ref = lto_symtab_encoder_encode (encoder, node);
4906 streamer_write_uhwi (ob, node_ref);
4908 streamer_write_uhwi (ob, ipa_get_param_count (info));
4909 for (j = 0; j < ipa_get_param_count (info); j++)
4910 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4911 bp = bitpack_create (ob->main_stream);
4912 gcc_assert (info->analysis_done
4913 || ipa_get_param_count (info) == 0);
4914 gcc_assert (!info->node_enqueued);
4915 gcc_assert (!info->ipcp_orig_node);
4916 for (j = 0; j < ipa_get_param_count (info); j++)
4917 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4918 streamer_write_bitpack (&bp);
4919 for (j = 0; j < ipa_get_param_count (info); j++)
4920 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4921 for (e = node->callees; e; e = e->next_callee)
4923 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4925 streamer_write_uhwi (ob,
4926 ipa_get_cs_argument_count (args) * 2
4927 + (args->polymorphic_call_contexts != NULL));
4928 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4930 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4931 if (args->polymorphic_call_contexts != NULL)
4932 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4935 for (e = node->indirect_calls; e; e = e->next_callee)
4937 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4939 streamer_write_uhwi (ob,
4940 ipa_get_cs_argument_count (args) * 2
4941 + (args->polymorphic_call_contexts != NULL));
4942 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4944 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4945 if (args->polymorphic_call_contexts != NULL)
4946 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4948 ipa_write_indirect_edge_info (ob, e);
4952 /* Stream in NODE info from IB. */
4954 static void
4955 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4956 struct data_in *data_in)
4958 struct ipa_node_params *info = IPA_NODE_REF (node);
4959 int k;
4960 struct cgraph_edge *e;
4961 struct bitpack_d bp;
4963 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4965 for (k = 0; k < ipa_get_param_count (info); k++)
4966 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4968 bp = streamer_read_bitpack (ib);
4969 if (ipa_get_param_count (info) != 0)
4970 info->analysis_done = true;
4971 info->node_enqueued = false;
4972 for (k = 0; k < ipa_get_param_count (info); k++)
4973 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4974 for (k = 0; k < ipa_get_param_count (info); k++)
4975 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4976 for (e = node->callees; e; e = e->next_callee)
4978 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4979 int count = streamer_read_uhwi (ib);
4980 bool contexts_computed = count & 1;
4981 count /= 2;
4983 if (!count)
4984 continue;
4985 vec_safe_grow_cleared (args->jump_functions, count);
4986 if (contexts_computed)
4987 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4989 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4991 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4992 data_in);
4993 if (contexts_computed)
4994 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4997 for (e = node->indirect_calls; e; e = e->next_callee)
4999 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5000 int count = streamer_read_uhwi (ib);
5001 bool contexts_computed = count & 1;
5002 count /= 2;
5004 if (count)
5006 vec_safe_grow_cleared (args->jump_functions, count);
5007 if (contexts_computed)
5008 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5009 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5011 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5012 data_in);
5013 if (contexts_computed)
5014 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5017 ipa_read_indirect_edge_info (ib, data_in, e);
5021 /* Write jump functions for nodes in SET. */
5023 void
5024 ipa_prop_write_jump_functions (void)
5026 struct cgraph_node *node;
5027 struct output_block *ob;
5028 unsigned int count = 0;
5029 lto_symtab_encoder_iterator lsei;
5030 lto_symtab_encoder_t encoder;
5032 if (!ipa_node_params_sum)
5033 return;
5035 ob = create_output_block (LTO_section_jump_functions);
5036 encoder = ob->decl_state->symtab_node_encoder;
5037 ob->symbol = NULL;
5038 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5039 lsei_next_function_in_partition (&lsei))
5041 node = lsei_cgraph_node (lsei);
5042 if (node->has_gimple_body_p ()
5043 && IPA_NODE_REF (node) != NULL)
5044 count++;
5047 streamer_write_uhwi (ob, count);
5049 /* Process all of the functions. */
5050 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5051 lsei_next_function_in_partition (&lsei))
5053 node = lsei_cgraph_node (lsei);
5054 if (node->has_gimple_body_p ()
5055 && IPA_NODE_REF (node) != NULL)
5056 ipa_write_node_info (ob, node);
5058 streamer_write_char_stream (ob->main_stream, 0);
5059 produce_asm (ob, NULL);
5060 destroy_output_block (ob);
5063 /* Read section in file FILE_DATA of length LEN with data DATA. */
5065 static void
5066 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5067 size_t len)
5069 const struct lto_function_header *header =
5070 (const struct lto_function_header *) data;
5071 const int cfg_offset = sizeof (struct lto_function_header);
5072 const int main_offset = cfg_offset + header->cfg_size;
5073 const int string_offset = main_offset + header->main_size;
5074 struct data_in *data_in;
5075 unsigned int i;
5076 unsigned int count;
5078 lto_input_block ib_main ((const char *) data + main_offset,
5079 header->main_size, file_data->mode_table);
5081 data_in =
5082 lto_data_in_create (file_data, (const char *) data + string_offset,
5083 header->string_size, vNULL);
5084 count = streamer_read_uhwi (&ib_main);
5086 for (i = 0; i < count; i++)
5088 unsigned int index;
5089 struct cgraph_node *node;
5090 lto_symtab_encoder_t encoder;
5092 index = streamer_read_uhwi (&ib_main);
5093 encoder = file_data->symtab_node_encoder;
5094 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5095 index));
5096 gcc_assert (node->definition);
5097 ipa_read_node_info (&ib_main, node, data_in);
5099 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5100 len);
5101 lto_data_in_delete (data_in);
5104 /* Read ipcp jump functions. */
5106 void
5107 ipa_prop_read_jump_functions (void)
5109 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5110 struct lto_file_decl_data *file_data;
5111 unsigned int j = 0;
5113 ipa_check_create_node_params ();
5114 ipa_check_create_edge_args ();
5115 ipa_register_cgraph_hooks ();
5117 while ((file_data = file_data_vec[j++]))
5119 size_t len;
5120 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5122 if (data)
5123 ipa_prop_read_section (file_data, data, len);
5127 /* After merging units, we can get mismatch in argument counts.
5128 Also decl merging might've rendered parameter lists obsolete.
5129 Also compute called_with_variable_arg info. */
5131 void
5132 ipa_update_after_lto_read (void)
5134 ipa_check_create_node_params ();
5135 ipa_check_create_edge_args ();
5138 void
5139 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5141 int node_ref;
5142 unsigned int count = 0;
5143 lto_symtab_encoder_t encoder;
5144 struct ipa_agg_replacement_value *aggvals, *av;
5146 aggvals = ipa_get_agg_replacements_for_node (node);
5147 encoder = ob->decl_state->symtab_node_encoder;
5148 node_ref = lto_symtab_encoder_encode (encoder, node);
5149 streamer_write_uhwi (ob, node_ref);
5151 for (av = aggvals; av; av = av->next)
5152 count++;
5153 streamer_write_uhwi (ob, count);
5155 for (av = aggvals; av; av = av->next)
5157 struct bitpack_d bp;
5159 streamer_write_uhwi (ob, av->offset);
5160 streamer_write_uhwi (ob, av->index);
5161 stream_write_tree (ob, av->value, true);
5163 bp = bitpack_create (ob->main_stream);
5164 bp_pack_value (&bp, av->by_ref, 1);
5165 streamer_write_bitpack (&bp);
5168 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5169 if (ts && vec_safe_length (ts->alignments) > 0)
5171 count = ts->alignments->length ();
5173 streamer_write_uhwi (ob, count);
5174 for (unsigned i = 0; i < count; ++i)
5176 ipa_alignment *parm_al = &(*ts->alignments)[i];
5178 struct bitpack_d bp;
5179 bp = bitpack_create (ob->main_stream);
5180 bp_pack_value (&bp, parm_al->known, 1);
5181 streamer_write_bitpack (&bp);
5182 if (parm_al->known)
5184 streamer_write_uhwi (ob, parm_al->align);
5185 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
5186 parm_al->misalign);
5190 else
5191 streamer_write_uhwi (ob, 0);
5193 if (ts && vec_safe_length (ts->m_vr) > 0)
5195 count = ts->m_vr->length ();
5196 streamer_write_uhwi (ob, count);
5197 for (unsigned i = 0; i < count; ++i)
5199 struct bitpack_d bp;
5200 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5201 bp = bitpack_create (ob->main_stream);
5202 bp_pack_value (&bp, parm_vr->known, 1);
5203 streamer_write_bitpack (&bp);
5204 if (parm_vr->known)
5206 streamer_write_enum (ob->main_stream, value_rang_type,
5207 VR_LAST, parm_vr->type);
5208 streamer_write_wide_int (ob, parm_vr->min);
5209 streamer_write_wide_int (ob, parm_vr->max);
5213 else
5214 streamer_write_uhwi (ob, 0);
5216 if (ts && vec_safe_length (ts->bits) > 0)
5218 count = ts->bits->length ();
5219 streamer_write_uhwi (ob, count);
5221 for (unsigned i = 0; i < count; ++i)
5223 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5224 struct bitpack_d bp = bitpack_create (ob->main_stream);
5225 bp_pack_value (&bp, bits_jfunc.known, 1);
5226 streamer_write_bitpack (&bp);
5227 if (bits_jfunc.known)
5229 streamer_write_widest_int (ob, bits_jfunc.value);
5230 streamer_write_widest_int (ob, bits_jfunc.mask);
5234 else
5235 streamer_write_uhwi (ob, 0);
5238 /* Stream in the aggregate value replacement chain for NODE from IB. */
5240 static void
5241 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5242 data_in *data_in)
5244 struct ipa_agg_replacement_value *aggvals = NULL;
5245 unsigned int count, i;
5247 count = streamer_read_uhwi (ib);
5248 for (i = 0; i <count; i++)
5250 struct ipa_agg_replacement_value *av;
5251 struct bitpack_d bp;
5253 av = ggc_alloc<ipa_agg_replacement_value> ();
5254 av->offset = streamer_read_uhwi (ib);
5255 av->index = streamer_read_uhwi (ib);
5256 av->value = stream_read_tree (ib, data_in);
5257 bp = streamer_read_bitpack (ib);
5258 av->by_ref = bp_unpack_value (&bp, 1);
5259 av->next = aggvals;
5260 aggvals = av;
5262 ipa_set_node_agg_value_chain (node, aggvals);
5264 count = streamer_read_uhwi (ib);
5265 if (count > 0)
5267 ipcp_grow_transformations_if_necessary ();
5269 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5270 vec_safe_grow_cleared (ts->alignments, count);
5272 for (i = 0; i < count; i++)
5274 ipa_alignment *parm_al;
5275 parm_al = &(*ts->alignments)[i];
5276 struct bitpack_d bp;
5277 bp = streamer_read_bitpack (ib);
5278 parm_al->known = bp_unpack_value (&bp, 1);
5279 if (parm_al->known)
5281 parm_al->align = streamer_read_uhwi (ib);
5282 parm_al->misalign
5283 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
5284 0, parm_al->align);
5289 count = streamer_read_uhwi (ib);
5290 if (count > 0)
5292 ipcp_grow_transformations_if_necessary ();
5294 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5295 vec_safe_grow_cleared (ts->m_vr, count);
5296 for (i = 0; i < count; i++)
5298 ipa_vr *parm_vr;
5299 parm_vr = &(*ts->m_vr)[i];
5300 struct bitpack_d bp;
5301 bp = streamer_read_bitpack (ib);
5302 parm_vr->known = bp_unpack_value (&bp, 1);
5303 if (parm_vr->known)
5305 parm_vr->type = streamer_read_enum (ib, value_range_type,
5306 VR_LAST);
5307 parm_vr->min = streamer_read_wide_int (ib);
5308 parm_vr->max = streamer_read_wide_int (ib);
5312 count = streamer_read_uhwi (ib);
5313 if (count > 0)
5315 ipcp_grow_transformations_if_necessary ();
5317 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5318 vec_safe_grow_cleared (ts->bits, count);
5320 for (i = 0; i < count; i++)
5322 ipa_bits& bits_jfunc = (*ts->bits)[i];
5323 struct bitpack_d bp = streamer_read_bitpack (ib);
5324 bits_jfunc.known = bp_unpack_value (&bp, 1);
5325 if (bits_jfunc.known)
5327 bits_jfunc.value = streamer_read_widest_int (ib);
5328 bits_jfunc.mask = streamer_read_widest_int (ib);
5334 /* Write all aggregate replacement for nodes in set. */
5336 void
5337 ipcp_write_transformation_summaries (void)
5339 struct cgraph_node *node;
5340 struct output_block *ob;
5341 unsigned int count = 0;
5342 lto_symtab_encoder_iterator lsei;
5343 lto_symtab_encoder_t encoder;
5345 ob = create_output_block (LTO_section_ipcp_transform);
5346 encoder = ob->decl_state->symtab_node_encoder;
5347 ob->symbol = NULL;
5348 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5349 lsei_next_function_in_partition (&lsei))
5351 node = lsei_cgraph_node (lsei);
5352 if (node->has_gimple_body_p ())
5353 count++;
5356 streamer_write_uhwi (ob, count);
5358 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5359 lsei_next_function_in_partition (&lsei))
5361 node = lsei_cgraph_node (lsei);
5362 if (node->has_gimple_body_p ())
5363 write_ipcp_transformation_info (ob, node);
5365 streamer_write_char_stream (ob->main_stream, 0);
5366 produce_asm (ob, NULL);
5367 destroy_output_block (ob);
5370 /* Read replacements section in file FILE_DATA of length LEN with data
5371 DATA. */
5373 static void
5374 read_replacements_section (struct lto_file_decl_data *file_data,
5375 const char *data,
5376 size_t len)
5378 const struct lto_function_header *header =
5379 (const struct lto_function_header *) data;
5380 const int cfg_offset = sizeof (struct lto_function_header);
5381 const int main_offset = cfg_offset + header->cfg_size;
5382 const int string_offset = main_offset + header->main_size;
5383 struct data_in *data_in;
5384 unsigned int i;
5385 unsigned int count;
5387 lto_input_block ib_main ((const char *) data + main_offset,
5388 header->main_size, file_data->mode_table);
5390 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5391 header->string_size, vNULL);
5392 count = streamer_read_uhwi (&ib_main);
5394 for (i = 0; i < count; i++)
5396 unsigned int index;
5397 struct cgraph_node *node;
5398 lto_symtab_encoder_t encoder;
5400 index = streamer_read_uhwi (&ib_main);
5401 encoder = file_data->symtab_node_encoder;
5402 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5403 index));
5404 gcc_assert (node->definition);
5405 read_ipcp_transformation_info (&ib_main, node, data_in);
5407 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5408 len);
5409 lto_data_in_delete (data_in);
5412 /* Read IPA-CP aggregate replacements. */
5414 void
5415 ipcp_read_transformation_summaries (void)
5417 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5418 struct lto_file_decl_data *file_data;
5419 unsigned int j = 0;
5421 while ((file_data = file_data_vec[j++]))
5423 size_t len;
5424 const char *data = lto_get_section_data (file_data,
5425 LTO_section_ipcp_transform,
5426 NULL, &len);
5427 if (data)
5428 read_replacements_section (file_data, data, len);
5432 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5433 NODE. */
5435 static void
5436 adjust_agg_replacement_values (struct cgraph_node *node,
5437 struct ipa_agg_replacement_value *aggval)
5439 struct ipa_agg_replacement_value *v;
5440 int i, c = 0, d = 0, *adj;
5442 if (!node->clone.combined_args_to_skip)
5443 return;
5445 for (v = aggval; v; v = v->next)
5447 gcc_assert (v->index >= 0);
5448 if (c < v->index)
5449 c = v->index;
5451 c++;
5453 adj = XALLOCAVEC (int, c);
5454 for (i = 0; i < c; i++)
5455 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5457 adj[i] = -1;
5458 d++;
5460 else
5461 adj[i] = i - d;
5463 for (v = aggval; v; v = v->next)
5464 v->index = adj[v->index];
5467 /* Dominator walker driving the ipcp modification phase. */
5469 class ipcp_modif_dom_walker : public dom_walker
5471 public:
5472 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5473 vec<ipa_param_descriptor> descs,
5474 struct ipa_agg_replacement_value *av,
5475 bool *sc, bool *cc)
5476 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5477 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5479 virtual edge before_dom_children (basic_block);
5481 private:
5482 struct ipa_func_body_info *m_fbi;
5483 vec<ipa_param_descriptor> m_descriptors;
5484 struct ipa_agg_replacement_value *m_aggval;
5485 bool *m_something_changed, *m_cfg_changed;
5488 edge
5489 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5491 gimple_stmt_iterator gsi;
5492 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5494 struct ipa_agg_replacement_value *v;
5495 gimple *stmt = gsi_stmt (gsi);
5496 tree rhs, val, t;
5497 HOST_WIDE_INT offset, size;
5498 int index;
5499 bool by_ref, vce;
5501 if (!gimple_assign_load_p (stmt))
5502 continue;
5503 rhs = gimple_assign_rhs1 (stmt);
5504 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5505 continue;
5507 vce = false;
5508 t = rhs;
5509 while (handled_component_p (t))
5511 /* V_C_E can do things like convert an array of integers to one
5512 bigger integer and similar things we do not handle below. */
5513 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5515 vce = true;
5516 break;
5518 t = TREE_OPERAND (t, 0);
5520 if (vce)
5521 continue;
5523 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5524 &offset, &size, &by_ref))
5525 continue;
5526 for (v = m_aggval; v; v = v->next)
5527 if (v->index == index
5528 && v->offset == offset)
5529 break;
5530 if (!v
5531 || v->by_ref != by_ref
5532 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5533 continue;
5535 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5536 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5538 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5539 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5540 else if (TYPE_SIZE (TREE_TYPE (rhs))
5541 == TYPE_SIZE (TREE_TYPE (v->value)))
5542 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5543 else
5545 if (dump_file)
5547 fprintf (dump_file, " const ");
5548 print_generic_expr (dump_file, v->value, 0);
5549 fprintf (dump_file, " can't be converted to type of ");
5550 print_generic_expr (dump_file, rhs, 0);
5551 fprintf (dump_file, "\n");
5553 continue;
5556 else
5557 val = v->value;
5559 if (dump_file && (dump_flags & TDF_DETAILS))
5561 fprintf (dump_file, "Modifying stmt:\n ");
5562 print_gimple_stmt (dump_file, stmt, 0, 0);
5564 gimple_assign_set_rhs_from_tree (&gsi, val);
5565 update_stmt (stmt);
5567 if (dump_file && (dump_flags & TDF_DETAILS))
5569 fprintf (dump_file, "into:\n ");
5570 print_gimple_stmt (dump_file, stmt, 0, 0);
5571 fprintf (dump_file, "\n");
5574 *m_something_changed = true;
5575 if (maybe_clean_eh_stmt (stmt)
5576 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5577 *m_cfg_changed = true;
5579 return NULL;
5582 /* Update alignment of formal parameters as described in
5583 ipcp_transformation_summary. */
5585 static void
5586 ipcp_update_alignments (struct cgraph_node *node)
5588 tree fndecl = node->decl;
5589 tree parm = DECL_ARGUMENTS (fndecl);
5590 tree next_parm = parm;
5591 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5592 if (!ts || vec_safe_length (ts->alignments) == 0)
5593 return;
5594 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5595 unsigned count = alignments.length ();
5597 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5599 if (node->clone.combined_args_to_skip
5600 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5601 continue;
5602 gcc_checking_assert (parm);
5603 next_parm = DECL_CHAIN (parm);
5605 if (!alignments[i].known || !is_gimple_reg (parm))
5606 continue;
5607 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5608 if (!ddef)
5609 continue;
5611 if (dump_file)
5612 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5613 "misalignment to %u\n", i, alignments[i].align,
5614 alignments[i].misalign);
5616 struct ptr_info_def *pi = get_ptr_info (ddef);
5617 gcc_checking_assert (pi);
5618 unsigned old_align;
5619 unsigned old_misalign;
5620 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5622 if (old_known
5623 && old_align >= alignments[i].align)
5625 if (dump_file)
5626 fprintf (dump_file, " But the alignment was already %u.\n",
5627 old_align);
5628 continue;
5630 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5634 /* Update bits info of formal parameters as described in
5635 ipcp_transformation_summary. */
5637 static void
5638 ipcp_update_bits (struct cgraph_node *node)
5640 tree parm = DECL_ARGUMENTS (node->decl);
5641 tree next_parm = parm;
5642 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5644 if (!ts || vec_safe_length (ts->bits) == 0)
5645 return;
5647 vec<ipa_bits, va_gc> &bits = *ts->bits;
5648 unsigned count = bits.length ();
5650 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5652 if (node->clone.combined_args_to_skip
5653 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5654 continue;
5656 gcc_checking_assert (parm);
5657 next_parm = DECL_CHAIN (parm);
5659 if (!bits[i].known
5660 || !INTEGRAL_TYPE_P (TREE_TYPE (parm))
5661 || !is_gimple_reg (parm))
5662 continue;
5664 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5665 if (!ddef)
5666 continue;
5668 if (dump_file)
5670 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5671 print_hex (bits[i].mask, dump_file);
5672 fprintf (dump_file, "\n");
5675 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5676 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5678 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5679 | wide_int::from (bits[i].value, prec, sgn);
5680 set_nonzero_bits (ddef, nonzero_bits);
5684 /* Update value range of formal parameters as described in
5685 ipcp_transformation_summary. */
5687 static void
5688 ipcp_update_vr (struct cgraph_node *node)
5690 tree fndecl = node->decl;
5691 tree parm = DECL_ARGUMENTS (fndecl);
5692 tree next_parm = parm;
5693 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5694 if (!ts || vec_safe_length (ts->m_vr) == 0)
5695 return;
5696 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5697 unsigned count = vr.length ();
5699 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5701 if (node->clone.combined_args_to_skip
5702 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5703 continue;
5704 gcc_checking_assert (parm);
5705 next_parm = DECL_CHAIN (parm);
5706 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5708 if (!ddef || !is_gimple_reg (parm))
5709 continue;
5711 if (vr[i].known
5712 && INTEGRAL_TYPE_P (TREE_TYPE (ddef))
5713 && !POINTER_TYPE_P (TREE_TYPE (ddef))
5714 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5716 tree type = TREE_TYPE (ddef);
5717 unsigned prec = TYPE_PRECISION (type);
5718 if (dump_file)
5720 fprintf (dump_file, "Setting value range of param %u ", i);
5721 fprintf (dump_file, "%s[",
5722 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5723 print_decs (vr[i].min, dump_file);
5724 fprintf (dump_file, ", ");
5725 print_decs (vr[i].max, dump_file);
5726 fprintf (dump_file, "]\n");
5728 set_range_info (ddef, vr[i].type,
5729 wide_int_storage::from (vr[i].min, prec,
5730 TYPE_SIGN (type)),
5731 wide_int_storage::from (vr[i].max, prec,
5732 TYPE_SIGN (type)));
5737 /* IPCP transformation phase doing propagation of aggregate values. */
5739 unsigned int
5740 ipcp_transform_function (struct cgraph_node *node)
5742 vec<ipa_param_descriptor> descriptors = vNULL;
5743 struct ipa_func_body_info fbi;
5744 struct ipa_agg_replacement_value *aggval;
5745 int param_count;
5746 bool cfg_changed = false, something_changed = false;
5748 gcc_checking_assert (cfun);
5749 gcc_checking_assert (current_function_decl);
5751 if (dump_file)
5752 fprintf (dump_file, "Modification phase of node %s/%i\n",
5753 node->name (), node->order);
5755 ipcp_update_alignments (node);
5756 ipcp_update_bits (node);
5757 ipcp_update_vr (node);
5758 aggval = ipa_get_agg_replacements_for_node (node);
5759 if (!aggval)
5760 return 0;
5761 param_count = count_formal_params (node->decl);
5762 if (param_count == 0)
5763 return 0;
5764 adjust_agg_replacement_values (node, aggval);
5765 if (dump_file)
5766 ipa_dump_agg_replacement_values (dump_file, aggval);
5768 fbi.node = node;
5769 fbi.info = NULL;
5770 fbi.bb_infos = vNULL;
5771 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5772 fbi.param_count = param_count;
5773 fbi.aa_walked = 0;
5775 descriptors.safe_grow_cleared (param_count);
5776 ipa_populate_param_decls (node, descriptors);
5777 calculate_dominance_info (CDI_DOMINATORS);
5778 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5779 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5781 int i;
5782 struct ipa_bb_info *bi;
5783 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5784 free_ipa_bb_info (bi);
5785 fbi.bb_infos.release ();
5786 free_dominance_info (CDI_DOMINATORS);
5787 (*ipcp_transformations)[node->uid].agg_values = NULL;
5788 (*ipcp_transformations)[node->uid].alignments = NULL;
5789 descriptors.release ();
5791 if (!something_changed)
5792 return 0;
5793 else if (cfg_changed)
5794 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5795 else
5796 return TODO_update_ssa_only_virtuals;