2016-09-08 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / ipa-prop.c
blob16297817f95423126430979158f2788605126e1e
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
80 /* Allocation pool for reference descriptions. */
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
104 int i, count;
106 count = descriptors.length ();
107 for (i = 0; i < count; i++)
108 if (descriptors[i].decl_or_type == ptree)
109 return i;
111 return -1;
114 /* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
126 static void
127 ipa_populate_param_decls (struct cgraph_node *node,
128 vec<ipa_param_descriptor> &descriptors)
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
135 fndecl = node->decl;
136 gcc_assert (gimple_has_body_p (fndecl));
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
141 descriptors[param_num].decl_or_type = parm;
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
144 param_num++;
148 /* Return how many formal parameters FNDECL has. */
151 count_formal_params (tree fndecl)
153 tree parm;
154 int count = 0;
155 gcc_assert (gimple_has_body_p (fndecl));
157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
158 count++;
160 return count;
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
167 void
168 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
170 fprintf (file, "param #%i", i);
171 if (info->descriptors[i].decl_or_type)
173 fprintf (file, " ");
174 print_generic_expr (file, info->descriptors[i].decl_or_type, 0);
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
181 void
182 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
184 struct ipa_node_params *info = IPA_NODE_REF (node);
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
194 void
195 ipa_initialize_node_params (struct cgraph_node *node)
197 struct ipa_node_params *info = IPA_NODE_REF (node);
199 if (!info->descriptors.exists ())
201 ipa_alloc_node_params (node, count_formal_params (node->decl));
202 ipa_populate_param_decls (node, info->descriptors);
206 /* Print the jump functions associated with call graph edge CS to file F. */
208 static void
209 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
211 int i, count;
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
225 else if (type == IPA_JF_CONST)
227 tree val = jump_func->value.constant.value;
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
237 fprintf (f, "\n");
239 else if (type == IPA_JF_PASS_THROUGH)
241 fprintf (f, "PASS THROUGH: ");
242 fprintf (f, "%d, op %s",
243 jump_func->value.pass_through.formal_id,
244 get_tree_code_name(jump_func->value.pass_through.operation));
245 if (jump_func->value.pass_through.operation != NOP_EXPR)
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
253 fprintf (f, "\n");
255 else if (type == IPA_JF_ANCESTOR)
257 fprintf (f, "ANCESTOR: ");
258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
263 fprintf (f, "\n");
266 if (jump_func->agg.items)
268 struct ipa_agg_jf_item *item;
269 int j;
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
279 tree_to_uhwi (TYPE_SIZE (item->value)));
280 else
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
285 fprintf (f, "\n");
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
297 if (jump_func->alignment.known)
299 fprintf (f, " Alignment: %u, misalignment: %u\n",
300 jump_func->alignment.align,
301 jump_func->alignment.misalign);
303 else
304 fprintf (f, " Unknown alignment\n");
306 if (jump_func->bits.known)
308 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
309 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
310 fprintf (f, "\n");
312 else
313 fprintf (f, " Unknown bits\n");
318 /* Print the jump functions of all arguments on all call graph edges going from
319 NODE to file F. */
321 void
322 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
324 struct cgraph_edge *cs;
326 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
327 node->order);
328 for (cs = node->callees; cs; cs = cs->next_callee)
330 if (!ipa_edge_args_info_available_for_edge_p (cs))
331 continue;
333 fprintf (f, " callsite %s/%i -> %s/%i : \n",
334 xstrdup_for_dump (node->name ()), node->order,
335 xstrdup_for_dump (cs->callee->name ()),
336 cs->callee->order);
337 ipa_print_node_jump_functions_for_edge (f, cs);
340 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
342 struct cgraph_indirect_call_info *ii;
343 if (!ipa_edge_args_info_available_for_edge_p (cs))
344 continue;
346 ii = cs->indirect_info;
347 if (ii->agg_contents)
348 fprintf (f, " indirect %s callsite, calling param %i, "
349 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
350 ii->member_ptr ? "member ptr" : "aggregate",
351 ii->param_index, ii->offset,
352 ii->by_ref ? "by reference" : "by_value");
353 else
354 fprintf (f, " indirect %s callsite, calling param %i, "
355 "offset " HOST_WIDE_INT_PRINT_DEC,
356 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
357 ii->offset);
359 if (cs->call_stmt)
361 fprintf (f, ", for stmt ");
362 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
364 else
365 fprintf (f, "\n");
366 if (ii->polymorphic)
367 ii->context.dump (f);
368 ipa_print_node_jump_functions_for_edge (f, cs);
372 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
374 void
375 ipa_print_all_jump_functions (FILE *f)
377 struct cgraph_node *node;
379 fprintf (f, "\nJump functions:\n");
380 FOR_EACH_FUNCTION (node)
382 ipa_print_node_jump_functions (f, node);
386 /* Set jfunc to be a know-really nothing jump function. */
388 static void
389 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
391 jfunc->type = IPA_JF_UNKNOWN;
392 jfunc->alignment.known = false;
393 jfunc->bits.known = false;
396 /* Set JFUNC to be a copy of another jmp (to be used by jump function
397 combination code). The two functions will share their rdesc. */
399 static void
400 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
401 struct ipa_jump_func *src)
404 gcc_checking_assert (src->type == IPA_JF_CONST);
405 dst->type = IPA_JF_CONST;
406 dst->value.constant = src->value.constant;
409 /* Set JFUNC to be a constant jmp function. */
411 static void
412 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
413 struct cgraph_edge *cs)
415 jfunc->type = IPA_JF_CONST;
416 jfunc->value.constant.value = unshare_expr_without_location (constant);
418 if (TREE_CODE (constant) == ADDR_EXPR
419 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
421 struct ipa_cst_ref_desc *rdesc;
423 rdesc = ipa_refdesc_pool.allocate ();
424 rdesc->cs = cs;
425 rdesc->next_duplicate = NULL;
426 rdesc->refcount = 1;
427 jfunc->value.constant.rdesc = rdesc;
429 else
430 jfunc->value.constant.rdesc = NULL;
433 /* Set JFUNC to be a simple pass-through jump function. */
434 static void
435 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
436 bool agg_preserved)
438 jfunc->type = IPA_JF_PASS_THROUGH;
439 jfunc->value.pass_through.operand = NULL_TREE;
440 jfunc->value.pass_through.formal_id = formal_id;
441 jfunc->value.pass_through.operation = NOP_EXPR;
442 jfunc->value.pass_through.agg_preserved = agg_preserved;
445 /* Set JFUNC to be an arithmetic pass through jump function. */
447 static void
448 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
449 tree operand, enum tree_code operation)
451 jfunc->type = IPA_JF_PASS_THROUGH;
452 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
453 jfunc->value.pass_through.formal_id = formal_id;
454 jfunc->value.pass_through.operation = operation;
455 jfunc->value.pass_through.agg_preserved = false;
458 /* Set JFUNC to be an ancestor jump function. */
460 static void
461 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
462 int formal_id, bool agg_preserved)
464 jfunc->type = IPA_JF_ANCESTOR;
465 jfunc->value.ancestor.formal_id = formal_id;
466 jfunc->value.ancestor.offset = offset;
467 jfunc->value.ancestor.agg_preserved = agg_preserved;
470 /* Get IPA BB information about the given BB. FBI is the context of analyzis
471 of this function body. */
473 static struct ipa_bb_info *
474 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
476 gcc_checking_assert (fbi);
477 return &fbi->bb_infos[bb->index];
480 /* Structure to be passed in between detect_type_change and
481 check_stmt_for_type_change. */
483 struct prop_type_change_info
485 /* Offset into the object where there is the virtual method pointer we are
486 looking for. */
487 HOST_WIDE_INT offset;
488 /* The declaration or SSA_NAME pointer of the base that we are checking for
489 type change. */
490 tree object;
491 /* Set to true if dynamic type change has been detected. */
492 bool type_maybe_changed;
495 /* Return true if STMT can modify a virtual method table pointer.
497 This function makes special assumptions about both constructors and
498 destructors which are all the functions that are allowed to alter the VMT
499 pointers. It assumes that destructors begin with assignment into all VMT
500 pointers and that constructors essentially look in the following way:
502 1) The very first thing they do is that they call constructors of ancestor
503 sub-objects that have them.
505 2) Then VMT pointers of this and all its ancestors is set to new values
506 corresponding to the type corresponding to the constructor.
508 3) Only afterwards, other stuff such as constructor of member sub-objects
509 and the code written by the user is run. Only this may include calling
510 virtual functions, directly or indirectly.
512 There is no way to call a constructor of an ancestor sub-object in any
513 other way.
515 This means that we do not have to care whether constructors get the correct
516 type information because they will always change it (in fact, if we define
517 the type to be given by the VMT pointer, it is undefined).
519 The most important fact to derive from the above is that if, for some
520 statement in the section 3, we try to detect whether the dynamic type has
521 changed, we can safely ignore all calls as we examine the function body
522 backwards until we reach statements in section 2 because these calls cannot
523 be ancestor constructors or destructors (if the input is not bogus) and so
524 do not change the dynamic type (this holds true only for automatically
525 allocated objects but at the moment we devirtualize only these). We then
526 must detect that statements in section 2 change the dynamic type and can try
527 to derive the new type. That is enough and we can stop, we will never see
528 the calls into constructors of sub-objects in this code. Therefore we can
529 safely ignore all call statements that we traverse.
532 static bool
533 stmt_may_be_vtbl_ptr_store (gimple *stmt)
535 if (is_gimple_call (stmt))
536 return false;
537 if (gimple_clobber_p (stmt))
538 return false;
539 else if (is_gimple_assign (stmt))
541 tree lhs = gimple_assign_lhs (stmt);
543 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
545 if (flag_strict_aliasing
546 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
547 return false;
549 if (TREE_CODE (lhs) == COMPONENT_REF
550 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
551 return false;
552 /* In the future we might want to use get_base_ref_and_offset to find
553 if there is a field corresponding to the offset and if so, proceed
554 almost like if it was a component ref. */
557 return true;
560 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
561 to check whether a particular statement may modify the virtual table
562 pointerIt stores its result into DATA, which points to a
563 prop_type_change_info structure. */
565 static bool
566 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
568 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
569 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
571 if (stmt_may_be_vtbl_ptr_store (stmt))
573 tci->type_maybe_changed = true;
574 return true;
576 else
577 return false;
580 /* See if ARG is PARAM_DECl describing instance passed by pointer
581 or reference in FUNCTION. Return false if the dynamic type may change
582 in between beggining of the function until CALL is invoked.
584 Generally functions are not allowed to change type of such instances,
585 but they call destructors. We assume that methods can not destroy the THIS
586 pointer. Also as a special cases, constructor and destructors may change
587 type of the THIS pointer. */
589 static bool
590 param_type_may_change_p (tree function, tree arg, gimple *call)
592 /* Pure functions can not do any changes on the dynamic type;
593 that require writting to memory. */
594 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
595 return false;
596 /* We need to check if we are within inlined consturctor
597 or destructor (ideally we would have way to check that the
598 inline cdtor is actually working on ARG, but we don't have
599 easy tie on this, so punt on all non-pure cdtors.
600 We may also record the types of cdtors and once we know type
601 of the instance match them.
603 Also code unification optimizations may merge calls from
604 different blocks making return values unreliable. So
605 do nothing during late optimization. */
606 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
607 return true;
608 if (TREE_CODE (arg) == SSA_NAME
609 && SSA_NAME_IS_DEFAULT_DEF (arg)
610 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
612 /* Normal (non-THIS) argument. */
613 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
614 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
615 /* THIS pointer of an method - here we want to watch constructors
616 and destructors as those definitely may change the dynamic
617 type. */
618 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
619 && !DECL_CXX_CONSTRUCTOR_P (function)
620 && !DECL_CXX_DESTRUCTOR_P (function)
621 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
623 /* Walk the inline stack and watch out for ctors/dtors. */
624 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
625 block = BLOCK_SUPERCONTEXT (block))
626 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
627 return true;
628 return false;
631 return true;
634 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
635 callsite CALL) by looking for assignments to its virtual table pointer. If
636 it is, return true and fill in the jump function JFUNC with relevant type
637 information or set it to unknown. ARG is the object itself (not a pointer
638 to it, unless dereferenced). BASE is the base of the memory access as
639 returned by get_ref_base_and_extent, as is the offset.
641 This is helper function for detect_type_change and detect_type_change_ssa
642 that does the heavy work which is usually unnecesary. */
644 static bool
645 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
646 gcall *call, struct ipa_jump_func *jfunc,
647 HOST_WIDE_INT offset)
649 struct prop_type_change_info tci;
650 ao_ref ao;
651 bool entry_reached = false;
653 gcc_checking_assert (DECL_P (arg)
654 || TREE_CODE (arg) == MEM_REF
655 || handled_component_p (arg));
657 comp_type = TYPE_MAIN_VARIANT (comp_type);
659 /* Const calls cannot call virtual methods through VMT and so type changes do
660 not matter. */
661 if (!flag_devirtualize || !gimple_vuse (call)
662 /* Be sure expected_type is polymorphic. */
663 || !comp_type
664 || TREE_CODE (comp_type) != RECORD_TYPE
665 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
666 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
667 return true;
669 ao_ref_init (&ao, arg);
670 ao.base = base;
671 ao.offset = offset;
672 ao.size = POINTER_SIZE;
673 ao.max_size = ao.size;
675 tci.offset = offset;
676 tci.object = get_base_address (arg);
677 tci.type_maybe_changed = false;
679 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
680 &tci, NULL, &entry_reached);
681 if (!tci.type_maybe_changed)
682 return false;
684 ipa_set_jf_unknown (jfunc);
685 return true;
688 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
689 If it is, return true and fill in the jump function JFUNC with relevant type
690 information or set it to unknown. ARG is the object itself (not a pointer
691 to it, unless dereferenced). BASE is the base of the memory access as
692 returned by get_ref_base_and_extent, as is the offset. */
694 static bool
695 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
696 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
698 if (!flag_devirtualize)
699 return false;
701 if (TREE_CODE (base) == MEM_REF
702 && !param_type_may_change_p (current_function_decl,
703 TREE_OPERAND (base, 0),
704 call))
705 return false;
706 return detect_type_change_from_memory_writes (arg, base, comp_type,
707 call, jfunc, offset);
710 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
711 SSA name (its dereference will become the base and the offset is assumed to
712 be zero). */
714 static bool
715 detect_type_change_ssa (tree arg, tree comp_type,
716 gcall *call, struct ipa_jump_func *jfunc)
718 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
719 if (!flag_devirtualize
720 || !POINTER_TYPE_P (TREE_TYPE (arg)))
721 return false;
723 if (!param_type_may_change_p (current_function_decl, arg, call))
724 return false;
726 arg = build2 (MEM_REF, ptr_type_node, arg,
727 build_int_cst (ptr_type_node, 0));
729 return detect_type_change_from_memory_writes (arg, arg, comp_type,
730 call, jfunc, 0);
733 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
734 boolean variable pointed to by DATA. */
736 static bool
737 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
738 void *data)
740 bool *b = (bool *) data;
741 *b = true;
742 return true;
745 /* Return true if we have already walked so many statements in AA that we
746 should really just start giving up. */
748 static bool
749 aa_overwalked (struct ipa_func_body_info *fbi)
751 gcc_checking_assert (fbi);
752 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
755 /* Find the nearest valid aa status for parameter specified by INDEX that
756 dominates BB. */
758 static struct ipa_param_aa_status *
759 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
760 int index)
762 while (true)
764 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
765 if (!bb)
766 return NULL;
767 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
768 if (!bi->param_aa_statuses.is_empty ()
769 && bi->param_aa_statuses[index].valid)
770 return &bi->param_aa_statuses[index];
774 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
775 structures and/or intialize the result with a dominating description as
776 necessary. */
778 static struct ipa_param_aa_status *
779 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
780 int index)
782 gcc_checking_assert (fbi);
783 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
784 if (bi->param_aa_statuses.is_empty ())
785 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
786 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
787 if (!paa->valid)
789 gcc_checking_assert (!paa->parm_modified
790 && !paa->ref_modified
791 && !paa->pt_modified);
792 struct ipa_param_aa_status *dom_paa;
793 dom_paa = find_dominating_aa_status (fbi, bb, index);
794 if (dom_paa)
795 *paa = *dom_paa;
796 else
797 paa->valid = true;
800 return paa;
803 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
804 a value known not to be modified in this function before reaching the
805 statement STMT. FBI holds information about the function we have so far
806 gathered but do not survive the summary building stage. */
808 static bool
809 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
810 gimple *stmt, tree parm_load)
812 struct ipa_param_aa_status *paa;
813 bool modified = false;
814 ao_ref refd;
816 tree base = get_base_address (parm_load);
817 gcc_assert (TREE_CODE (base) == PARM_DECL);
818 if (TREE_READONLY (base))
819 return true;
821 /* FIXME: FBI can be NULL if we are being called from outside
822 ipa_node_analysis or ipcp_transform_function, which currently happens
823 during inlining analysis. It would be great to extend fbi's lifetime and
824 always have it. Currently, we are just not afraid of too much walking in
825 that case. */
826 if (fbi)
828 if (aa_overwalked (fbi))
829 return false;
830 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
831 if (paa->parm_modified)
832 return false;
834 else
835 paa = NULL;
837 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
838 ao_ref_init (&refd, parm_load);
839 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
840 &modified, NULL);
841 if (fbi)
842 fbi->aa_walked += walked;
843 if (paa && modified)
844 paa->parm_modified = true;
845 return !modified;
848 /* If STMT is an assignment that loads a value from an parameter declaration,
849 return the index of the parameter in ipa_node_params which has not been
850 modified. Otherwise return -1. */
852 static int
853 load_from_unmodified_param (struct ipa_func_body_info *fbi,
854 vec<ipa_param_descriptor> descriptors,
855 gimple *stmt)
857 int index;
858 tree op1;
860 if (!gimple_assign_single_p (stmt))
861 return -1;
863 op1 = gimple_assign_rhs1 (stmt);
864 if (TREE_CODE (op1) != PARM_DECL)
865 return -1;
867 index = ipa_get_param_decl_index_1 (descriptors, op1);
868 if (index < 0
869 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
870 return -1;
872 return index;
875 /* Return true if memory reference REF (which must be a load through parameter
876 with INDEX) loads data that are known to be unmodified in this function
877 before reaching statement STMT. */
879 static bool
880 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
881 int index, gimple *stmt, tree ref)
883 struct ipa_param_aa_status *paa;
884 bool modified = false;
885 ao_ref refd;
887 /* FIXME: FBI can be NULL if we are being called from outside
888 ipa_node_analysis or ipcp_transform_function, which currently happens
889 during inlining analysis. It would be great to extend fbi's lifetime and
890 always have it. Currently, we are just not afraid of too much walking in
891 that case. */
892 if (fbi)
894 if (aa_overwalked (fbi))
895 return false;
896 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
897 if (paa->ref_modified)
898 return false;
900 else
901 paa = NULL;
903 gcc_checking_assert (gimple_vuse (stmt));
904 ao_ref_init (&refd, ref);
905 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
906 &modified, NULL);
907 if (fbi)
908 fbi->aa_walked += walked;
909 if (paa && modified)
910 paa->ref_modified = true;
911 return !modified;
914 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
915 is known to be unmodified in this function before reaching call statement
916 CALL into which it is passed. FBI describes the function body. */
918 static bool
919 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
920 gimple *call, tree parm)
922 bool modified = false;
923 ao_ref refd;
925 /* It's unnecessary to calculate anything about memory contnets for a const
926 function because it is not goin to use it. But do not cache the result
927 either. Also, no such calculations for non-pointers. */
928 if (!gimple_vuse (call)
929 || !POINTER_TYPE_P (TREE_TYPE (parm))
930 || aa_overwalked (fbi))
931 return false;
933 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
934 gimple_bb (call),
935 index);
936 if (paa->pt_modified)
937 return false;
939 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
940 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
941 &modified, NULL);
942 fbi->aa_walked += walked;
943 if (modified)
944 paa->pt_modified = true;
945 return !modified;
948 /* Return true if we can prove that OP is a memory reference loading
949 data from an aggregate passed as a parameter.
951 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
952 false if it cannot prove that the value has not been modified before the
953 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
954 if it cannot prove the value has not been modified, in that case it will
955 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
957 INFO and PARMS_AINFO describe parameters of the current function (but the
958 latter can be NULL), STMT is the load statement. If function returns true,
959 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
960 within the aggregate and whether it is a load from a value passed by
961 reference respectively. */
963 bool
964 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
965 vec<ipa_param_descriptor> descriptors,
966 gimple *stmt, tree op, int *index_p,
967 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
968 bool *by_ref_p, bool *guaranteed_unmodified)
970 int index;
971 HOST_WIDE_INT size, max_size;
972 bool reverse;
973 tree base
974 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
976 if (max_size == -1 || max_size != size || *offset_p < 0)
977 return false;
979 if (DECL_P (base))
981 int index = ipa_get_param_decl_index_1 (descriptors, base);
982 if (index >= 0
983 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
985 *index_p = index;
986 *by_ref_p = false;
987 if (size_p)
988 *size_p = size;
989 if (guaranteed_unmodified)
990 *guaranteed_unmodified = true;
991 return true;
993 return false;
996 if (TREE_CODE (base) != MEM_REF
997 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
998 || !integer_zerop (TREE_OPERAND (base, 1)))
999 return false;
1001 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1003 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1004 index = ipa_get_param_decl_index_1 (descriptors, parm);
1006 else
1008 /* This branch catches situations where a pointer parameter is not a
1009 gimple register, for example:
1011 void hip7(S*) (struct S * p)
1013 void (*<T2e4>) (struct S *) D.1867;
1014 struct S * p.1;
1016 <bb 2>:
1017 p.1_1 = p;
1018 D.1867_2 = p.1_1->f;
1019 D.1867_2 ();
1020 gdp = &p;
1023 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1024 index = load_from_unmodified_param (fbi, descriptors, def);
1027 if (index >= 0)
1029 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1030 if (!data_preserved && !guaranteed_unmodified)
1031 return false;
1033 *index_p = index;
1034 *by_ref_p = true;
1035 if (size_p)
1036 *size_p = size;
1037 if (guaranteed_unmodified)
1038 *guaranteed_unmodified = data_preserved;
1039 return true;
1041 return false;
1044 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1045 of an assignment statement STMT, try to determine whether we are actually
1046 handling any of the following cases and construct an appropriate jump
1047 function into JFUNC if so:
1049 1) The passed value is loaded from a formal parameter which is not a gimple
1050 register (most probably because it is addressable, the value has to be
1051 scalar) and we can guarantee the value has not changed. This case can
1052 therefore be described by a simple pass-through jump function. For example:
1054 foo (int a)
1056 int a.0;
1058 a.0_2 = a;
1059 bar (a.0_2);
1061 2) The passed value can be described by a simple arithmetic pass-through
1062 jump function. E.g.
1064 foo (int a)
1066 int D.2064;
1068 D.2064_4 = a.1(D) + 4;
1069 bar (D.2064_4);
1071 This case can also occur in combination of the previous one, e.g.:
1073 foo (int a, int z)
1075 int a.0;
1076 int D.2064;
1078 a.0_3 = a;
1079 D.2064_4 = a.0_3 + 4;
1080 foo (D.2064_4);
1082 3) The passed value is an address of an object within another one (which
1083 also passed by reference). Such situations are described by an ancestor
1084 jump function and describe situations such as:
1086 B::foo() (struct B * const this)
1088 struct A * D.1845;
1090 D.1845_2 = &this_1(D)->D.1748;
1091 A::bar (D.1845_2);
1093 INFO is the structure describing individual parameters access different
1094 stages of IPA optimizations. PARMS_AINFO contains the information that is
1095 only needed for intraprocedural analysis. */
1097 static void
1098 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1099 struct ipa_node_params *info,
1100 struct ipa_jump_func *jfunc,
1101 gcall *call, gimple *stmt, tree name,
1102 tree param_type)
1104 HOST_WIDE_INT offset, size, max_size;
1105 tree op1, tc_ssa, base, ssa;
1106 bool reverse;
1107 int index;
1109 op1 = gimple_assign_rhs1 (stmt);
1111 if (TREE_CODE (op1) == SSA_NAME)
1113 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1114 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1115 else
1116 index = load_from_unmodified_param (fbi, info->descriptors,
1117 SSA_NAME_DEF_STMT (op1));
1118 tc_ssa = op1;
1120 else
1122 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1123 tc_ssa = gimple_assign_lhs (stmt);
1126 if (index >= 0)
1128 tree op2 = gimple_assign_rhs2 (stmt);
1130 if (op2)
1132 if (!is_gimple_ip_invariant (op2)
1133 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1134 && !useless_type_conversion_p (TREE_TYPE (name),
1135 TREE_TYPE (op1))))
1136 return;
1138 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1139 gimple_assign_rhs_code (stmt));
1141 else if (gimple_assign_single_p (stmt))
1143 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1144 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1146 return;
1149 if (TREE_CODE (op1) != ADDR_EXPR)
1150 return;
1151 op1 = TREE_OPERAND (op1, 0);
1152 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1153 return;
1154 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1155 if (TREE_CODE (base) != MEM_REF
1156 /* If this is a varying address, punt. */
1157 || max_size == -1
1158 || max_size != size)
1159 return;
1160 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1161 ssa = TREE_OPERAND (base, 0);
1162 if (TREE_CODE (ssa) != SSA_NAME
1163 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1164 || offset < 0)
1165 return;
1167 /* Dynamic types are changed in constructors and destructors. */
1168 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1169 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1170 ipa_set_ancestor_jf (jfunc, offset, index,
1171 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1174 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1175 it looks like:
1177 iftmp.1_3 = &obj_2(D)->D.1762;
1179 The base of the MEM_REF must be a default definition SSA NAME of a
1180 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1181 whole MEM_REF expression is returned and the offset calculated from any
1182 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1183 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1185 static tree
1186 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1188 HOST_WIDE_INT size, max_size;
1189 tree expr, parm, obj;
1190 bool reverse;
1192 if (!gimple_assign_single_p (assign))
1193 return NULL_TREE;
1194 expr = gimple_assign_rhs1 (assign);
1196 if (TREE_CODE (expr) != ADDR_EXPR)
1197 return NULL_TREE;
1198 expr = TREE_OPERAND (expr, 0);
1199 obj = expr;
1200 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1202 if (TREE_CODE (expr) != MEM_REF
1203 /* If this is a varying address, punt. */
1204 || max_size == -1
1205 || max_size != size
1206 || *offset < 0)
1207 return NULL_TREE;
1208 parm = TREE_OPERAND (expr, 0);
1209 if (TREE_CODE (parm) != SSA_NAME
1210 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1211 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1212 return NULL_TREE;
1214 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1215 *obj_p = obj;
1216 return expr;
1220 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1221 statement PHI, try to find out whether NAME is in fact a
1222 multiple-inheritance typecast from a descendant into an ancestor of a formal
1223 parameter and thus can be described by an ancestor jump function and if so,
1224 write the appropriate function into JFUNC.
1226 Essentially we want to match the following pattern:
1228 if (obj_2(D) != 0B)
1229 goto <bb 3>;
1230 else
1231 goto <bb 4>;
1233 <bb 3>:
1234 iftmp.1_3 = &obj_2(D)->D.1762;
1236 <bb 4>:
1237 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1238 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1239 return D.1879_6; */
1241 static void
1242 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1243 struct ipa_node_params *info,
1244 struct ipa_jump_func *jfunc,
1245 gcall *call, gphi *phi)
1247 HOST_WIDE_INT offset;
1248 gimple *assign, *cond;
1249 basic_block phi_bb, assign_bb, cond_bb;
1250 tree tmp, parm, expr, obj;
1251 int index, i;
1253 if (gimple_phi_num_args (phi) != 2)
1254 return;
1256 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1257 tmp = PHI_ARG_DEF (phi, 0);
1258 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1259 tmp = PHI_ARG_DEF (phi, 1);
1260 else
1261 return;
1262 if (TREE_CODE (tmp) != SSA_NAME
1263 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1264 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1265 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1266 return;
1268 assign = SSA_NAME_DEF_STMT (tmp);
1269 assign_bb = gimple_bb (assign);
1270 if (!single_pred_p (assign_bb))
1271 return;
1272 expr = get_ancestor_addr_info (assign, &obj, &offset);
1273 if (!expr)
1274 return;
1275 parm = TREE_OPERAND (expr, 0);
1276 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1277 if (index < 0)
1278 return;
1280 cond_bb = single_pred (assign_bb);
1281 cond = last_stmt (cond_bb);
1282 if (!cond
1283 || gimple_code (cond) != GIMPLE_COND
1284 || gimple_cond_code (cond) != NE_EXPR
1285 || gimple_cond_lhs (cond) != parm
1286 || !integer_zerop (gimple_cond_rhs (cond)))
1287 return;
1289 phi_bb = gimple_bb (phi);
1290 for (i = 0; i < 2; i++)
1292 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1293 if (pred != assign_bb && pred != cond_bb)
1294 return;
1297 ipa_set_ancestor_jf (jfunc, offset, index,
1298 parm_ref_data_pass_through_p (fbi, index, call, parm));
1301 /* Inspect the given TYPE and return true iff it has the same structure (the
1302 same number of fields of the same types) as a C++ member pointer. If
1303 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1304 corresponding fields there. */
1306 static bool
1307 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1309 tree fld;
1311 if (TREE_CODE (type) != RECORD_TYPE)
1312 return false;
1314 fld = TYPE_FIELDS (type);
1315 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1316 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1317 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1318 return false;
1320 if (method_ptr)
1321 *method_ptr = fld;
1323 fld = DECL_CHAIN (fld);
1324 if (!fld || INTEGRAL_TYPE_P (fld)
1325 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1326 return false;
1327 if (delta)
1328 *delta = fld;
1330 if (DECL_CHAIN (fld))
1331 return false;
1333 return true;
1336 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1337 return the rhs of its defining statement. Otherwise return RHS as it
1338 is. */
1340 static inline tree
1341 get_ssa_def_if_simple_copy (tree rhs)
1343 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1345 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1347 if (gimple_assign_single_p (def_stmt))
1348 rhs = gimple_assign_rhs1 (def_stmt);
1349 else
1350 break;
1352 return rhs;
1355 /* Simple linked list, describing known contents of an aggregate beforere
1356 call. */
1358 struct ipa_known_agg_contents_list
1360 /* Offset and size of the described part of the aggregate. */
1361 HOST_WIDE_INT offset, size;
1362 /* Known constant value or NULL if the contents is known to be unknown. */
1363 tree constant;
1364 /* Pointer to the next structure in the list. */
1365 struct ipa_known_agg_contents_list *next;
1368 /* Find the proper place in linked list of ipa_known_agg_contents_list
1369 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1370 unless there is a partial overlap, in which case return NULL, or such
1371 element is already there, in which case set *ALREADY_THERE to true. */
1373 static struct ipa_known_agg_contents_list **
1374 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1375 HOST_WIDE_INT lhs_offset,
1376 HOST_WIDE_INT lhs_size,
1377 bool *already_there)
1379 struct ipa_known_agg_contents_list **p = list;
1380 while (*p && (*p)->offset < lhs_offset)
1382 if ((*p)->offset + (*p)->size > lhs_offset)
1383 return NULL;
1384 p = &(*p)->next;
1387 if (*p && (*p)->offset < lhs_offset + lhs_size)
1389 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1390 /* We already know this value is subsequently overwritten with
1391 something else. */
1392 *already_there = true;
1393 else
1394 /* Otherwise this is a partial overlap which we cannot
1395 represent. */
1396 return NULL;
1398 return p;
1401 /* Build aggregate jump function from LIST, assuming there are exactly
1402 CONST_COUNT constant entries there and that th offset of the passed argument
1403 is ARG_OFFSET and store it into JFUNC. */
1405 static void
1406 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1407 int const_count, HOST_WIDE_INT arg_offset,
1408 struct ipa_jump_func *jfunc)
1410 vec_alloc (jfunc->agg.items, const_count);
1411 while (list)
1413 if (list->constant)
1415 struct ipa_agg_jf_item item;
1416 item.offset = list->offset - arg_offset;
1417 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1418 item.value = unshare_expr_without_location (list->constant);
1419 jfunc->agg.items->quick_push (item);
1421 list = list->next;
1425 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1426 in ARG is filled in with constant values. ARG can either be an aggregate
1427 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1428 aggregate. JFUNC is the jump function into which the constants are
1429 subsequently stored. */
1431 static void
1432 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1433 tree arg_type,
1434 struct ipa_jump_func *jfunc)
1436 struct ipa_known_agg_contents_list *list = NULL;
1437 int item_count = 0, const_count = 0;
1438 HOST_WIDE_INT arg_offset, arg_size;
1439 gimple_stmt_iterator gsi;
1440 tree arg_base;
1441 bool check_ref, by_ref;
1442 ao_ref r;
1444 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1445 return;
1447 /* The function operates in three stages. First, we prepare check_ref, r,
1448 arg_base and arg_offset based on what is actually passed as an actual
1449 argument. */
1451 if (POINTER_TYPE_P (arg_type))
1453 by_ref = true;
1454 if (TREE_CODE (arg) == SSA_NAME)
1456 tree type_size;
1457 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1458 return;
1459 check_ref = true;
1460 arg_base = arg;
1461 arg_offset = 0;
1462 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1463 arg_size = tree_to_uhwi (type_size);
1464 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1466 else if (TREE_CODE (arg) == ADDR_EXPR)
1468 HOST_WIDE_INT arg_max_size;
1469 bool reverse;
1471 arg = TREE_OPERAND (arg, 0);
1472 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1473 &arg_max_size, &reverse);
1474 if (arg_max_size == -1
1475 || arg_max_size != arg_size
1476 || arg_offset < 0)
1477 return;
1478 if (DECL_P (arg_base))
1480 check_ref = false;
1481 ao_ref_init (&r, arg_base);
1483 else
1484 return;
1486 else
1487 return;
1489 else
1491 HOST_WIDE_INT arg_max_size;
1492 bool reverse;
1494 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1496 by_ref = false;
1497 check_ref = false;
1498 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1499 &arg_max_size, &reverse);
1500 if (arg_max_size == -1
1501 || arg_max_size != arg_size
1502 || arg_offset < 0)
1503 return;
1505 ao_ref_init (&r, arg);
1508 /* Second stage walks back the BB, looks at individual statements and as long
1509 as it is confident of how the statements affect contents of the
1510 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1511 describing it. */
1512 gsi = gsi_for_stmt (call);
1513 gsi_prev (&gsi);
1514 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1516 struct ipa_known_agg_contents_list *n, **p;
1517 gimple *stmt = gsi_stmt (gsi);
1518 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1519 tree lhs, rhs, lhs_base;
1520 bool reverse;
1522 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1523 continue;
1524 if (!gimple_assign_single_p (stmt))
1525 break;
1527 lhs = gimple_assign_lhs (stmt);
1528 rhs = gimple_assign_rhs1 (stmt);
1529 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1530 || TREE_CODE (lhs) == BIT_FIELD_REF
1531 || contains_bitfld_component_ref_p (lhs))
1532 break;
1534 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1535 &lhs_max_size, &reverse);
1536 if (lhs_max_size == -1
1537 || lhs_max_size != lhs_size)
1538 break;
1540 if (check_ref)
1542 if (TREE_CODE (lhs_base) != MEM_REF
1543 || TREE_OPERAND (lhs_base, 0) != arg_base
1544 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1545 break;
1547 else if (lhs_base != arg_base)
1549 if (DECL_P (lhs_base))
1550 continue;
1551 else
1552 break;
1555 bool already_there = false;
1556 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1557 &already_there);
1558 if (!p)
1559 break;
1560 if (already_there)
1561 continue;
1563 rhs = get_ssa_def_if_simple_copy (rhs);
1564 n = XALLOCA (struct ipa_known_agg_contents_list);
1565 n->size = lhs_size;
1566 n->offset = lhs_offset;
1567 if (is_gimple_ip_invariant (rhs))
1569 n->constant = rhs;
1570 const_count++;
1572 else
1573 n->constant = NULL_TREE;
1574 n->next = *p;
1575 *p = n;
1577 item_count++;
1578 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1579 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1580 break;
1583 /* Third stage just goes over the list and creates an appropriate vector of
1584 ipa_agg_jf_item structures out of it, of sourse only if there are
1585 any known constants to begin with. */
1587 if (const_count)
1589 jfunc->agg.by_ref = by_ref;
1590 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1594 static tree
1595 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1597 int n;
1598 tree type = (e->callee
1599 ? TREE_TYPE (e->callee->decl)
1600 : gimple_call_fntype (e->call_stmt));
1601 tree t = TYPE_ARG_TYPES (type);
1603 for (n = 0; n < i; n++)
1605 if (!t)
1606 break;
1607 t = TREE_CHAIN (t);
1609 if (t)
1610 return TREE_VALUE (t);
1611 if (!e->callee)
1612 return NULL;
1613 t = DECL_ARGUMENTS (e->callee->decl);
1614 for (n = 0; n < i; n++)
1616 if (!t)
1617 return NULL;
1618 t = TREE_CHAIN (t);
1620 if (t)
1621 return TREE_TYPE (t);
1622 return NULL;
1625 /* Compute jump function for all arguments of callsite CS and insert the
1626 information in the jump_functions array in the ipa_edge_args corresponding
1627 to this callsite. */
1629 static void
1630 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1631 struct cgraph_edge *cs)
1633 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1634 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1635 gcall *call = cs->call_stmt;
1636 int n, arg_num = gimple_call_num_args (call);
1637 bool useful_context = false;
1639 if (arg_num == 0 || args->jump_functions)
1640 return;
1641 vec_safe_grow_cleared (args->jump_functions, arg_num);
1642 if (flag_devirtualize)
1643 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1645 if (gimple_call_internal_p (call))
1646 return;
1647 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1648 return;
1650 for (n = 0; n < arg_num; n++)
1652 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1653 tree arg = gimple_call_arg (call, n);
1654 tree param_type = ipa_get_callee_param_type (cs, n);
1655 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1657 tree instance;
1658 struct ipa_polymorphic_call_context context (cs->caller->decl,
1659 arg, cs->call_stmt,
1660 &instance);
1661 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1662 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1663 if (!context.useless_p ())
1664 useful_context = true;
1667 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1669 unsigned HOST_WIDE_INT hwi_bitpos;
1670 unsigned align;
1672 get_pointer_alignment_1 (arg, &align, &hwi_bitpos);
1673 if (align > BITS_PER_UNIT
1674 && align % BITS_PER_UNIT == 0
1675 && hwi_bitpos % BITS_PER_UNIT == 0)
1677 jfunc->alignment.known = true;
1678 jfunc->alignment.align = align / BITS_PER_UNIT;
1679 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1681 else
1682 gcc_assert (!jfunc->alignment.known);
1684 else
1685 gcc_assert (!jfunc->alignment.known);
1687 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1688 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1690 jfunc->bits.known = true;
1692 if (TREE_CODE (arg) == SSA_NAME)
1694 jfunc->bits.value = 0;
1695 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1696 TYPE_SIGN (TREE_TYPE (arg)));
1698 else
1700 jfunc->bits.value = wi::to_widest (arg);
1701 jfunc->bits.mask = 0;
1704 else
1705 gcc_assert (!jfunc->bits.known);
1707 if (is_gimple_ip_invariant (arg)
1708 || (TREE_CODE (arg) == VAR_DECL
1709 && is_global_var (arg)
1710 && TREE_READONLY (arg)))
1711 ipa_set_jf_constant (jfunc, arg, cs);
1712 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1713 && TREE_CODE (arg) == PARM_DECL)
1715 int index = ipa_get_param_decl_index (info, arg);
1717 gcc_assert (index >=0);
1718 /* Aggregate passed by value, check for pass-through, otherwise we
1719 will attempt to fill in aggregate contents later in this
1720 for cycle. */
1721 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1723 ipa_set_jf_simple_pass_through (jfunc, index, false);
1724 continue;
1727 else if (TREE_CODE (arg) == SSA_NAME)
1729 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1731 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1732 if (index >= 0)
1734 bool agg_p;
1735 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1736 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1739 else
1741 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1742 if (is_gimple_assign (stmt))
1743 compute_complex_assign_jump_func (fbi, info, jfunc,
1744 call, stmt, arg, param_type);
1745 else if (gimple_code (stmt) == GIMPLE_PHI)
1746 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1747 call,
1748 as_a <gphi *> (stmt));
1752 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1753 passed (because type conversions are ignored in gimple). Usually we can
1754 safely get type from function declaration, but in case of K&R prototypes or
1755 variadic functions we can try our luck with type of the pointer passed.
1756 TODO: Since we look for actual initialization of the memory object, we may better
1757 work out the type based on the memory stores we find. */
1758 if (!param_type)
1759 param_type = TREE_TYPE (arg);
1761 if ((jfunc->type != IPA_JF_PASS_THROUGH
1762 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1763 && (jfunc->type != IPA_JF_ANCESTOR
1764 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1765 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1766 || POINTER_TYPE_P (param_type)))
1767 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1769 if (!useful_context)
1770 vec_free (args->polymorphic_call_contexts);
1773 /* Compute jump functions for all edges - both direct and indirect - outgoing
1774 from BB. */
1776 static void
1777 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1779 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1780 int i;
1781 struct cgraph_edge *cs;
1783 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1785 struct cgraph_node *callee = cs->callee;
1787 if (callee)
1789 callee->ultimate_alias_target ();
1790 /* We do not need to bother analyzing calls to unknown functions
1791 unless they may become known during lto/whopr. */
1792 if (!callee->definition && !flag_lto)
1793 continue;
1795 ipa_compute_jump_functions_for_edge (fbi, cs);
1799 /* If STMT looks like a statement loading a value from a member pointer formal
1800 parameter, return that parameter and store the offset of the field to
1801 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1802 might be clobbered). If USE_DELTA, then we look for a use of the delta
1803 field rather than the pfn. */
1805 static tree
1806 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1807 HOST_WIDE_INT *offset_p)
1809 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1811 if (!gimple_assign_single_p (stmt))
1812 return NULL_TREE;
1814 rhs = gimple_assign_rhs1 (stmt);
1815 if (TREE_CODE (rhs) == COMPONENT_REF)
1817 ref_field = TREE_OPERAND (rhs, 1);
1818 rhs = TREE_OPERAND (rhs, 0);
1820 else
1821 ref_field = NULL_TREE;
1822 if (TREE_CODE (rhs) != MEM_REF)
1823 return NULL_TREE;
1824 rec = TREE_OPERAND (rhs, 0);
1825 if (TREE_CODE (rec) != ADDR_EXPR)
1826 return NULL_TREE;
1827 rec = TREE_OPERAND (rec, 0);
1828 if (TREE_CODE (rec) != PARM_DECL
1829 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1830 return NULL_TREE;
1831 ref_offset = TREE_OPERAND (rhs, 1);
1833 if (use_delta)
1834 fld = delta_field;
1835 else
1836 fld = ptr_field;
1837 if (offset_p)
1838 *offset_p = int_bit_position (fld);
1840 if (ref_field)
1842 if (integer_nonzerop (ref_offset))
1843 return NULL_TREE;
1844 return ref_field == fld ? rec : NULL_TREE;
1846 else
1847 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1848 : NULL_TREE;
1851 /* Returns true iff T is an SSA_NAME defined by a statement. */
1853 static bool
1854 ipa_is_ssa_with_stmt_def (tree t)
1856 if (TREE_CODE (t) == SSA_NAME
1857 && !SSA_NAME_IS_DEFAULT_DEF (t))
1858 return true;
1859 else
1860 return false;
1863 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1864 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1865 indirect call graph edge. */
1867 static struct cgraph_edge *
1868 ipa_note_param_call (struct cgraph_node *node, int param_index,
1869 gcall *stmt)
1871 struct cgraph_edge *cs;
1873 cs = node->get_edge (stmt);
1874 cs->indirect_info->param_index = param_index;
1875 cs->indirect_info->agg_contents = 0;
1876 cs->indirect_info->member_ptr = 0;
1877 cs->indirect_info->guaranteed_unmodified = 0;
1878 return cs;
1881 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1882 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1883 intermediate information about each formal parameter. Currently it checks
1884 whether the call calls a pointer that is a formal parameter and if so, the
1885 parameter is marked with the called flag and an indirect call graph edge
1886 describing the call is created. This is very simple for ordinary pointers
1887 represented in SSA but not-so-nice when it comes to member pointers. The
1888 ugly part of this function does nothing more than trying to match the
1889 pattern of such a call. An example of such a pattern is the gimple dump
1890 below, the call is on the last line:
1892 <bb 2>:
1893 f$__delta_5 = f.__delta;
1894 f$__pfn_24 = f.__pfn;
1897 <bb 2>:
1898 f$__delta_5 = MEM[(struct *)&f];
1899 f$__pfn_24 = MEM[(struct *)&f + 4B];
1901 and a few lines below:
1903 <bb 5>
1904 D.2496_3 = (int) f$__pfn_24;
1905 D.2497_4 = D.2496_3 & 1;
1906 if (D.2497_4 != 0)
1907 goto <bb 3>;
1908 else
1909 goto <bb 4>;
1911 <bb 6>:
1912 D.2500_7 = (unsigned int) f$__delta_5;
1913 D.2501_8 = &S + D.2500_7;
1914 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1915 D.2503_10 = *D.2502_9;
1916 D.2504_12 = f$__pfn_24 + -1;
1917 D.2505_13 = (unsigned int) D.2504_12;
1918 D.2506_14 = D.2503_10 + D.2505_13;
1919 D.2507_15 = *D.2506_14;
1920 iftmp.11_16 = (String:: *) D.2507_15;
1922 <bb 7>:
1923 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1924 D.2500_19 = (unsigned int) f$__delta_5;
1925 D.2508_20 = &S + D.2500_19;
1926 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1928 Such patterns are results of simple calls to a member pointer:
1930 int doprinting (int (MyString::* f)(int) const)
1932 MyString S ("somestring");
1934 return (S.*f)(4);
1937 Moreover, the function also looks for called pointers loaded from aggregates
1938 passed by value or reference. */
1940 static void
1941 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
1942 tree target)
1944 struct ipa_node_params *info = fbi->info;
1945 HOST_WIDE_INT offset;
1946 bool by_ref;
1948 if (SSA_NAME_IS_DEFAULT_DEF (target))
1950 tree var = SSA_NAME_VAR (target);
1951 int index = ipa_get_param_decl_index (info, var);
1952 if (index >= 0)
1953 ipa_note_param_call (fbi->node, index, call);
1954 return;
1957 int index;
1958 gimple *def = SSA_NAME_DEF_STMT (target);
1959 bool guaranteed_unmodified;
1960 if (gimple_assign_single_p (def)
1961 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
1962 gimple_assign_rhs1 (def), &index, &offset,
1963 NULL, &by_ref, &guaranteed_unmodified))
1965 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
1966 cs->indirect_info->offset = offset;
1967 cs->indirect_info->agg_contents = 1;
1968 cs->indirect_info->by_ref = by_ref;
1969 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
1970 return;
1973 /* Now we need to try to match the complex pattern of calling a member
1974 pointer. */
1975 if (gimple_code (def) != GIMPLE_PHI
1976 || gimple_phi_num_args (def) != 2
1977 || !POINTER_TYPE_P (TREE_TYPE (target))
1978 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1979 return;
1981 /* First, we need to check whether one of these is a load from a member
1982 pointer that is a parameter to this function. */
1983 tree n1 = PHI_ARG_DEF (def, 0);
1984 tree n2 = PHI_ARG_DEF (def, 1);
1985 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1986 return;
1987 gimple *d1 = SSA_NAME_DEF_STMT (n1);
1988 gimple *d2 = SSA_NAME_DEF_STMT (n2);
1990 tree rec;
1991 basic_block bb, virt_bb;
1992 basic_block join = gimple_bb (def);
1993 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1995 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1996 return;
1998 bb = EDGE_PRED (join, 0)->src;
1999 virt_bb = gimple_bb (d2);
2001 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2003 bb = EDGE_PRED (join, 1)->src;
2004 virt_bb = gimple_bb (d1);
2006 else
2007 return;
2009 /* Second, we need to check that the basic blocks are laid out in the way
2010 corresponding to the pattern. */
2012 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2013 || single_pred (virt_bb) != bb
2014 || single_succ (virt_bb) != join)
2015 return;
2017 /* Third, let's see that the branching is done depending on the least
2018 significant bit of the pfn. */
2020 gimple *branch = last_stmt (bb);
2021 if (!branch || gimple_code (branch) != GIMPLE_COND)
2022 return;
2024 if ((gimple_cond_code (branch) != NE_EXPR
2025 && gimple_cond_code (branch) != EQ_EXPR)
2026 || !integer_zerop (gimple_cond_rhs (branch)))
2027 return;
2029 tree cond = gimple_cond_lhs (branch);
2030 if (!ipa_is_ssa_with_stmt_def (cond))
2031 return;
2033 def = SSA_NAME_DEF_STMT (cond);
2034 if (!is_gimple_assign (def)
2035 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2036 || !integer_onep (gimple_assign_rhs2 (def)))
2037 return;
2039 cond = gimple_assign_rhs1 (def);
2040 if (!ipa_is_ssa_with_stmt_def (cond))
2041 return;
2043 def = SSA_NAME_DEF_STMT (cond);
2045 if (is_gimple_assign (def)
2046 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2048 cond = gimple_assign_rhs1 (def);
2049 if (!ipa_is_ssa_with_stmt_def (cond))
2050 return;
2051 def = SSA_NAME_DEF_STMT (cond);
2054 tree rec2;
2055 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2056 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2057 == ptrmemfunc_vbit_in_delta),
2058 NULL);
2059 if (rec != rec2)
2060 return;
2062 index = ipa_get_param_decl_index (info, rec);
2063 if (index >= 0
2064 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2066 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2067 cs->indirect_info->offset = offset;
2068 cs->indirect_info->agg_contents = 1;
2069 cs->indirect_info->member_ptr = 1;
2070 cs->indirect_info->guaranteed_unmodified = 1;
2073 return;
2076 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2077 object referenced in the expression is a formal parameter of the caller
2078 FBI->node (described by FBI->info), create a call note for the
2079 statement. */
2081 static void
2082 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2083 gcall *call, tree target)
2085 tree obj = OBJ_TYPE_REF_OBJECT (target);
2086 int index;
2087 HOST_WIDE_INT anc_offset;
2089 if (!flag_devirtualize)
2090 return;
2092 if (TREE_CODE (obj) != SSA_NAME)
2093 return;
2095 struct ipa_node_params *info = fbi->info;
2096 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2098 struct ipa_jump_func jfunc;
2099 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2100 return;
2102 anc_offset = 0;
2103 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2104 gcc_assert (index >= 0);
2105 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2106 call, &jfunc))
2107 return;
2109 else
2111 struct ipa_jump_func jfunc;
2112 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2113 tree expr;
2115 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2116 if (!expr)
2117 return;
2118 index = ipa_get_param_decl_index (info,
2119 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2120 gcc_assert (index >= 0);
2121 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2122 call, &jfunc, anc_offset))
2123 return;
2126 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2127 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2128 ii->offset = anc_offset;
2129 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2130 ii->otr_type = obj_type_ref_class (target);
2131 ii->polymorphic = 1;
2134 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2135 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2136 containing intermediate information about each formal parameter. */
2138 static void
2139 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2141 tree target = gimple_call_fn (call);
2143 if (!target
2144 || (TREE_CODE (target) != SSA_NAME
2145 && !virtual_method_call_p (target)))
2146 return;
2148 struct cgraph_edge *cs = fbi->node->get_edge (call);
2149 /* If we previously turned the call into a direct call, there is
2150 no need to analyze. */
2151 if (cs && !cs->indirect_unknown_callee)
2152 return;
2154 if (cs->indirect_info->polymorphic && flag_devirtualize)
2156 tree instance;
2157 tree target = gimple_call_fn (call);
2158 ipa_polymorphic_call_context context (current_function_decl,
2159 target, call, &instance);
2161 gcc_checking_assert (cs->indirect_info->otr_type
2162 == obj_type_ref_class (target));
2163 gcc_checking_assert (cs->indirect_info->otr_token
2164 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2166 cs->indirect_info->vptr_changed
2167 = !context.get_dynamic_type (instance,
2168 OBJ_TYPE_REF_OBJECT (target),
2169 obj_type_ref_class (target), call);
2170 cs->indirect_info->context = context;
2173 if (TREE_CODE (target) == SSA_NAME)
2174 ipa_analyze_indirect_call_uses (fbi, call, target);
2175 else if (virtual_method_call_p (target))
2176 ipa_analyze_virtual_call_uses (fbi, call, target);
2180 /* Analyze the call statement STMT with respect to formal parameters (described
2181 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2182 formal parameters are called. */
2184 static void
2185 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2187 if (is_gimple_call (stmt))
2188 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2191 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2192 If OP is a parameter declaration, mark it as used in the info structure
2193 passed in DATA. */
2195 static bool
2196 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2198 struct ipa_node_params *info = (struct ipa_node_params *) data;
2200 op = get_base_address (op);
2201 if (op
2202 && TREE_CODE (op) == PARM_DECL)
2204 int index = ipa_get_param_decl_index (info, op);
2205 gcc_assert (index >= 0);
2206 ipa_set_param_used (info, index, true);
2209 return false;
2212 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2213 the findings in various structures of the associated ipa_node_params
2214 structure, such as parameter flags, notes etc. FBI holds various data about
2215 the function being analyzed. */
2217 static void
2218 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2220 gimple_stmt_iterator gsi;
2221 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2223 gimple *stmt = gsi_stmt (gsi);
2225 if (is_gimple_debug (stmt))
2226 continue;
2228 ipa_analyze_stmt_uses (fbi, stmt);
2229 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2230 visit_ref_for_mod_analysis,
2231 visit_ref_for_mod_analysis,
2232 visit_ref_for_mod_analysis);
2234 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2235 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2236 visit_ref_for_mod_analysis,
2237 visit_ref_for_mod_analysis,
2238 visit_ref_for_mod_analysis);
2241 /* Calculate controlled uses of parameters of NODE. */
2243 static void
2244 ipa_analyze_controlled_uses (struct cgraph_node *node)
2246 struct ipa_node_params *info = IPA_NODE_REF (node);
2248 for (int i = 0; i < ipa_get_param_count (info); i++)
2250 tree parm = ipa_get_param (info, i);
2251 int controlled_uses = 0;
2253 /* For SSA regs see if parameter is used. For non-SSA we compute
2254 the flag during modification analysis. */
2255 if (is_gimple_reg (parm))
2257 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2258 parm);
2259 if (ddef && !has_zero_uses (ddef))
2261 imm_use_iterator imm_iter;
2262 use_operand_p use_p;
2264 ipa_set_param_used (info, i, true);
2265 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2266 if (!is_gimple_call (USE_STMT (use_p)))
2268 if (!is_gimple_debug (USE_STMT (use_p)))
2270 controlled_uses = IPA_UNDESCRIBED_USE;
2271 break;
2274 else
2275 controlled_uses++;
2277 else
2278 controlled_uses = 0;
2280 else
2281 controlled_uses = IPA_UNDESCRIBED_USE;
2282 ipa_set_controlled_uses (info, i, controlled_uses);
2286 /* Free stuff in BI. */
2288 static void
2289 free_ipa_bb_info (struct ipa_bb_info *bi)
2291 bi->cg_edges.release ();
2292 bi->param_aa_statuses.release ();
2295 /* Dominator walker driving the analysis. */
2297 class analysis_dom_walker : public dom_walker
2299 public:
2300 analysis_dom_walker (struct ipa_func_body_info *fbi)
2301 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2303 virtual edge before_dom_children (basic_block);
2305 private:
2306 struct ipa_func_body_info *m_fbi;
2309 edge
2310 analysis_dom_walker::before_dom_children (basic_block bb)
2312 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2313 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2314 return NULL;
2317 /* Release body info FBI. */
2319 void
2320 ipa_release_body_info (struct ipa_func_body_info *fbi)
2322 int i;
2323 struct ipa_bb_info *bi;
2325 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2326 free_ipa_bb_info (bi);
2327 fbi->bb_infos.release ();
2330 /* Initialize the array describing properties of formal parameters
2331 of NODE, analyze their uses and compute jump functions associated
2332 with actual arguments of calls from within NODE. */
2334 void
2335 ipa_analyze_node (struct cgraph_node *node)
2337 struct ipa_func_body_info fbi;
2338 struct ipa_node_params *info;
2340 ipa_check_create_node_params ();
2341 ipa_check_create_edge_args ();
2342 info = IPA_NODE_REF (node);
2344 if (info->analysis_done)
2345 return;
2346 info->analysis_done = 1;
2348 if (ipa_func_spec_opts_forbid_analysis_p (node))
2350 for (int i = 0; i < ipa_get_param_count (info); i++)
2352 ipa_set_param_used (info, i, true);
2353 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2355 return;
2358 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2359 push_cfun (func);
2360 calculate_dominance_info (CDI_DOMINATORS);
2361 ipa_initialize_node_params (node);
2362 ipa_analyze_controlled_uses (node);
2364 fbi.node = node;
2365 fbi.info = IPA_NODE_REF (node);
2366 fbi.bb_infos = vNULL;
2367 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2368 fbi.param_count = ipa_get_param_count (info);
2369 fbi.aa_walked = 0;
2371 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2373 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2374 bi->cg_edges.safe_push (cs);
2377 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2379 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2380 bi->cg_edges.safe_push (cs);
2383 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2385 ipa_release_body_info (&fbi);
2386 free_dominance_info (CDI_DOMINATORS);
2387 pop_cfun ();
2390 /* Update the jump functions associated with call graph edge E when the call
2391 graph edge CS is being inlined, assuming that E->caller is already (possibly
2392 indirectly) inlined into CS->callee and that E has not been inlined. */
2394 static void
2395 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2396 struct cgraph_edge *e)
2398 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2399 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2400 int count = ipa_get_cs_argument_count (args);
2401 int i;
2403 for (i = 0; i < count; i++)
2405 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2406 struct ipa_polymorphic_call_context *dst_ctx
2407 = ipa_get_ith_polymorhic_call_context (args, i);
2409 if (dst->type == IPA_JF_ANCESTOR)
2411 struct ipa_jump_func *src;
2412 int dst_fid = dst->value.ancestor.formal_id;
2413 struct ipa_polymorphic_call_context *src_ctx
2414 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2416 /* Variable number of arguments can cause havoc if we try to access
2417 one that does not exist in the inlined edge. So make sure we
2418 don't. */
2419 if (dst_fid >= ipa_get_cs_argument_count (top))
2421 ipa_set_jf_unknown (dst);
2422 continue;
2425 src = ipa_get_ith_jump_func (top, dst_fid);
2427 if (src_ctx && !src_ctx->useless_p ())
2429 struct ipa_polymorphic_call_context ctx = *src_ctx;
2431 /* TODO: Make type preserved safe WRT contexts. */
2432 if (!ipa_get_jf_ancestor_type_preserved (dst))
2433 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2434 ctx.offset_by (dst->value.ancestor.offset);
2435 if (!ctx.useless_p ())
2437 if (!dst_ctx)
2439 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2440 count);
2441 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2444 dst_ctx->combine_with (ctx);
2448 if (src->agg.items
2449 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2451 struct ipa_agg_jf_item *item;
2452 int j;
2454 /* Currently we do not produce clobber aggregate jump functions,
2455 replace with merging when we do. */
2456 gcc_assert (!dst->agg.items);
2458 dst->agg.items = vec_safe_copy (src->agg.items);
2459 dst->agg.by_ref = src->agg.by_ref;
2460 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2461 item->offset -= dst->value.ancestor.offset;
2464 if (src->type == IPA_JF_PASS_THROUGH
2465 && src->value.pass_through.operation == NOP_EXPR)
2467 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2468 dst->value.ancestor.agg_preserved &=
2469 src->value.pass_through.agg_preserved;
2471 else if (src->type == IPA_JF_ANCESTOR)
2473 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2474 dst->value.ancestor.offset += src->value.ancestor.offset;
2475 dst->value.ancestor.agg_preserved &=
2476 src->value.ancestor.agg_preserved;
2478 else
2479 ipa_set_jf_unknown (dst);
2481 else if (dst->type == IPA_JF_PASS_THROUGH)
2483 struct ipa_jump_func *src;
2484 /* We must check range due to calls with variable number of arguments
2485 and we cannot combine jump functions with operations. */
2486 if (dst->value.pass_through.operation == NOP_EXPR
2487 && (dst->value.pass_through.formal_id
2488 < ipa_get_cs_argument_count (top)))
2490 int dst_fid = dst->value.pass_through.formal_id;
2491 src = ipa_get_ith_jump_func (top, dst_fid);
2492 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2493 struct ipa_polymorphic_call_context *src_ctx
2494 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2496 if (src_ctx && !src_ctx->useless_p ())
2498 struct ipa_polymorphic_call_context ctx = *src_ctx;
2500 /* TODO: Make type preserved safe WRT contexts. */
2501 if (!ipa_get_jf_pass_through_type_preserved (dst))
2502 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2503 if (!ctx.useless_p ())
2505 if (!dst_ctx)
2507 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2508 count);
2509 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2511 dst_ctx->combine_with (ctx);
2514 switch (src->type)
2516 case IPA_JF_UNKNOWN:
2517 ipa_set_jf_unknown (dst);
2518 break;
2519 case IPA_JF_CONST:
2520 ipa_set_jf_cst_copy (dst, src);
2521 break;
2523 case IPA_JF_PASS_THROUGH:
2525 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2526 enum tree_code operation;
2527 operation = ipa_get_jf_pass_through_operation (src);
2529 if (operation == NOP_EXPR)
2531 bool agg_p;
2532 agg_p = dst_agg_p
2533 && ipa_get_jf_pass_through_agg_preserved (src);
2534 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2536 else
2538 tree operand = ipa_get_jf_pass_through_operand (src);
2539 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2540 operation);
2542 break;
2544 case IPA_JF_ANCESTOR:
2546 bool agg_p;
2547 agg_p = dst_agg_p
2548 && ipa_get_jf_ancestor_agg_preserved (src);
2549 ipa_set_ancestor_jf (dst,
2550 ipa_get_jf_ancestor_offset (src),
2551 ipa_get_jf_ancestor_formal_id (src),
2552 agg_p);
2553 break;
2555 default:
2556 gcc_unreachable ();
2559 if (src->agg.items
2560 && (dst_agg_p || !src->agg.by_ref))
2562 /* Currently we do not produce clobber aggregate jump
2563 functions, replace with merging when we do. */
2564 gcc_assert (!dst->agg.items);
2566 dst->agg.by_ref = src->agg.by_ref;
2567 dst->agg.items = vec_safe_copy (src->agg.items);
2570 else
2571 ipa_set_jf_unknown (dst);
2576 /* If TARGET is an addr_expr of a function declaration, make it the
2577 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2578 Otherwise, return NULL. */
2580 struct cgraph_edge *
2581 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2582 bool speculative)
2584 struct cgraph_node *callee;
2585 struct inline_edge_summary *es = inline_edge_summary (ie);
2586 bool unreachable = false;
2588 if (TREE_CODE (target) == ADDR_EXPR)
2589 target = TREE_OPERAND (target, 0);
2590 if (TREE_CODE (target) != FUNCTION_DECL)
2592 target = canonicalize_constructor_val (target, NULL);
2593 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2595 /* Member pointer call that goes through a VMT lookup. */
2596 if (ie->indirect_info->member_ptr
2597 /* Or if target is not an invariant expression and we do not
2598 know if it will evaulate to function at runtime.
2599 This can happen when folding through &VAR, where &VAR
2600 is IP invariant, but VAR itself is not.
2602 TODO: Revisit this when GCC 5 is branched. It seems that
2603 member_ptr check is not needed and that we may try to fold
2604 the expression and see if VAR is readonly. */
2605 || !is_gimple_ip_invariant (target))
2607 if (dump_enabled_p ())
2609 location_t loc = gimple_location_safe (ie->call_stmt);
2610 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2611 "discovered direct call non-invariant "
2612 "%s/%i\n",
2613 ie->caller->name (), ie->caller->order);
2615 return NULL;
2619 if (dump_enabled_p ())
2621 location_t loc = gimple_location_safe (ie->call_stmt);
2622 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2623 "discovered direct call to non-function in %s/%i, "
2624 "making it __builtin_unreachable\n",
2625 ie->caller->name (), ie->caller->order);
2628 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2629 callee = cgraph_node::get_create (target);
2630 unreachable = true;
2632 else
2633 callee = cgraph_node::get (target);
2635 else
2636 callee = cgraph_node::get (target);
2638 /* Because may-edges are not explicitely represented and vtable may be external,
2639 we may create the first reference to the object in the unit. */
2640 if (!callee || callee->global.inlined_to)
2643 /* We are better to ensure we can refer to it.
2644 In the case of static functions we are out of luck, since we already
2645 removed its body. In the case of public functions we may or may
2646 not introduce the reference. */
2647 if (!canonicalize_constructor_val (target, NULL)
2648 || !TREE_PUBLIC (target))
2650 if (dump_file)
2651 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2652 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2653 xstrdup_for_dump (ie->caller->name ()),
2654 ie->caller->order,
2655 xstrdup_for_dump (ie->callee->name ()),
2656 ie->callee->order);
2657 return NULL;
2659 callee = cgraph_node::get_create (target);
2662 /* If the edge is already speculated. */
2663 if (speculative && ie->speculative)
2665 struct cgraph_edge *e2;
2666 struct ipa_ref *ref;
2667 ie->speculative_call_info (e2, ie, ref);
2668 if (e2->callee->ultimate_alias_target ()
2669 != callee->ultimate_alias_target ())
2671 if (dump_file)
2672 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2673 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2674 xstrdup_for_dump (ie->caller->name ()),
2675 ie->caller->order,
2676 xstrdup_for_dump (callee->name ()),
2677 callee->order,
2678 xstrdup_for_dump (e2->callee->name ()),
2679 e2->callee->order);
2681 else
2683 if (dump_file)
2684 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2685 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2686 xstrdup_for_dump (ie->caller->name ()),
2687 ie->caller->order,
2688 xstrdup_for_dump (callee->name ()),
2689 callee->order);
2691 return NULL;
2694 if (!dbg_cnt (devirt))
2695 return NULL;
2697 ipa_check_create_node_params ();
2699 /* We can not make edges to inline clones. It is bug that someone removed
2700 the cgraph node too early. */
2701 gcc_assert (!callee->global.inlined_to);
2703 if (dump_file && !unreachable)
2705 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2706 "(%s/%i -> %s/%i), for stmt ",
2707 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2708 speculative ? "speculative" : "known",
2709 xstrdup_for_dump (ie->caller->name ()),
2710 ie->caller->order,
2711 xstrdup_for_dump (callee->name ()),
2712 callee->order);
2713 if (ie->call_stmt)
2714 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2715 else
2716 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2718 if (dump_enabled_p ())
2720 location_t loc = gimple_location_safe (ie->call_stmt);
2722 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2723 "converting indirect call in %s to direct call to %s\n",
2724 ie->caller->name (), callee->name ());
2726 if (!speculative)
2728 struct cgraph_edge *orig = ie;
2729 ie = ie->make_direct (callee);
2730 /* If we resolved speculative edge the cost is already up to date
2731 for direct call (adjusted by inline_edge_duplication_hook). */
2732 if (ie == orig)
2734 es = inline_edge_summary (ie);
2735 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2736 - eni_size_weights.call_cost);
2737 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2738 - eni_time_weights.call_cost);
2741 else
2743 if (!callee->can_be_discarded_p ())
2745 cgraph_node *alias;
2746 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2747 if (alias)
2748 callee = alias;
2750 /* make_speculative will update ie's cost to direct call cost. */
2751 ie = ie->make_speculative
2752 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2755 return ie;
2758 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2759 CONSTRUCTOR and return it. Return NULL if the search fails for some
2760 reason. */
2762 static tree
2763 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2765 tree type = TREE_TYPE (constructor);
2766 if (TREE_CODE (type) != ARRAY_TYPE
2767 && TREE_CODE (type) != RECORD_TYPE)
2768 return NULL;
2770 unsigned ix;
2771 tree index, val;
2772 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2774 HOST_WIDE_INT elt_offset;
2775 if (TREE_CODE (type) == ARRAY_TYPE)
2777 offset_int off;
2778 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2779 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2781 if (index)
2783 off = wi::to_offset (index);
2784 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2786 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2787 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2788 off = wi::sext (off - wi::to_offset (low_bound),
2789 TYPE_PRECISION (TREE_TYPE (index)));
2791 off *= wi::to_offset (unit_size);
2793 else
2794 off = wi::to_offset (unit_size) * ix;
2796 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2797 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2798 continue;
2799 elt_offset = off.to_shwi ();
2801 else if (TREE_CODE (type) == RECORD_TYPE)
2803 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2804 if (DECL_BIT_FIELD (index))
2805 continue;
2806 elt_offset = int_bit_position (index);
2808 else
2809 gcc_unreachable ();
2811 if (elt_offset > req_offset)
2812 return NULL;
2814 if (TREE_CODE (val) == CONSTRUCTOR)
2815 return find_constructor_constant_at_offset (val,
2816 req_offset - elt_offset);
2818 if (elt_offset == req_offset
2819 && is_gimple_reg_type (TREE_TYPE (val))
2820 && is_gimple_ip_invariant (val))
2821 return val;
2823 return NULL;
2826 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2827 invariant from a static constructor and if so, return it. Otherwise return
2828 NULL. */
2830 static tree
2831 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2833 if (by_ref)
2835 if (TREE_CODE (scalar) != ADDR_EXPR)
2836 return NULL;
2837 scalar = TREE_OPERAND (scalar, 0);
2840 if (TREE_CODE (scalar) != VAR_DECL
2841 || !is_global_var (scalar)
2842 || !TREE_READONLY (scalar)
2843 || !DECL_INITIAL (scalar)
2844 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2845 return NULL;
2847 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2850 /* Retrieve value from aggregate jump function AGG or static initializer of
2851 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2852 none. BY_REF specifies whether the value has to be passed by reference or
2853 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2854 to is set to true if the value comes from an initializer of a constant. */
2856 tree
2857 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2858 HOST_WIDE_INT offset, bool by_ref,
2859 bool *from_global_constant)
2861 struct ipa_agg_jf_item *item;
2862 int i;
2864 if (scalar)
2866 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2867 if (res)
2869 if (from_global_constant)
2870 *from_global_constant = true;
2871 return res;
2875 if (!agg
2876 || by_ref != agg->by_ref)
2877 return NULL;
2879 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2880 if (item->offset == offset)
2882 /* Currently we do not have clobber values, return NULL for them once
2883 we do. */
2884 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2885 if (from_global_constant)
2886 *from_global_constant = false;
2887 return item->value;
2889 return NULL;
2892 /* Remove a reference to SYMBOL from the list of references of a node given by
2893 reference description RDESC. Return true if the reference has been
2894 successfully found and removed. */
2896 static bool
2897 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2899 struct ipa_ref *to_del;
2900 struct cgraph_edge *origin;
2902 origin = rdesc->cs;
2903 if (!origin)
2904 return false;
2905 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2906 origin->lto_stmt_uid);
2907 if (!to_del)
2908 return false;
2910 to_del->remove_reference ();
2911 if (dump_file)
2912 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2913 xstrdup_for_dump (origin->caller->name ()),
2914 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2915 return true;
2918 /* If JFUNC has a reference description with refcount different from
2919 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2920 NULL. JFUNC must be a constant jump function. */
2922 static struct ipa_cst_ref_desc *
2923 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2925 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2926 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2927 return rdesc;
2928 else
2929 return NULL;
2932 /* If the value of constant jump function JFUNC is an address of a function
2933 declaration, return the associated call graph node. Otherwise return
2934 NULL. */
2936 static cgraph_node *
2937 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2939 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2940 tree cst = ipa_get_jf_constant (jfunc);
2941 if (TREE_CODE (cst) != ADDR_EXPR
2942 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2943 return NULL;
2945 return cgraph_node::get (TREE_OPERAND (cst, 0));
2949 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2950 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2951 the edge specified in the rdesc. Return false if either the symbol or the
2952 reference could not be found, otherwise return true. */
2954 static bool
2955 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2957 struct ipa_cst_ref_desc *rdesc;
2958 if (jfunc->type == IPA_JF_CONST
2959 && (rdesc = jfunc_rdesc_usable (jfunc))
2960 && --rdesc->refcount == 0)
2962 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2963 if (!symbol)
2964 return false;
2966 return remove_described_reference (symbol, rdesc);
2968 return true;
2971 /* Try to find a destination for indirect edge IE that corresponds to a simple
2972 call or a call of a member function pointer and where the destination is a
2973 pointer formal parameter described by jump function JFUNC. If it can be
2974 determined, return the newly direct edge, otherwise return NULL.
2975 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2977 static struct cgraph_edge *
2978 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2979 struct ipa_jump_func *jfunc,
2980 struct ipa_node_params *new_root_info)
2982 struct cgraph_edge *cs;
2983 tree target;
2984 bool agg_contents = ie->indirect_info->agg_contents;
2985 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
2986 if (agg_contents)
2988 bool from_global_constant;
2989 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
2990 ie->indirect_info->offset,
2991 ie->indirect_info->by_ref,
2992 &from_global_constant);
2993 if (target
2994 && !from_global_constant
2995 && !ie->indirect_info->guaranteed_unmodified)
2996 return NULL;
2998 else
2999 target = scalar;
3000 if (!target)
3001 return NULL;
3002 cs = ipa_make_edge_direct_to_target (ie, target);
3004 if (cs && !agg_contents)
3006 bool ok;
3007 gcc_checking_assert (cs->callee
3008 && (cs != ie
3009 || jfunc->type != IPA_JF_CONST
3010 || !cgraph_node_for_jfunc (jfunc)
3011 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3012 ok = try_decrement_rdesc_refcount (jfunc);
3013 gcc_checking_assert (ok);
3016 return cs;
3019 /* Return the target to be used in cases of impossible devirtualization. IE
3020 and target (the latter can be NULL) are dumped when dumping is enabled. */
3022 tree
3023 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3025 if (dump_file)
3027 if (target)
3028 fprintf (dump_file,
3029 "Type inconsistent devirtualization: %s/%i->%s\n",
3030 ie->caller->name (), ie->caller->order,
3031 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3032 else
3033 fprintf (dump_file,
3034 "No devirtualization target in %s/%i\n",
3035 ie->caller->name (), ie->caller->order);
3037 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3038 cgraph_node::get_create (new_target);
3039 return new_target;
3042 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3043 call based on a formal parameter which is described by jump function JFUNC
3044 and if it can be determined, make it direct and return the direct edge.
3045 Otherwise, return NULL. CTX describes the polymorphic context that the
3046 parameter the call is based on brings along with it. */
3048 static struct cgraph_edge *
3049 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3050 struct ipa_jump_func *jfunc,
3051 struct ipa_polymorphic_call_context ctx)
3053 tree target = NULL;
3054 bool speculative = false;
3056 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3057 return NULL;
3059 gcc_assert (!ie->indirect_info->by_ref);
3061 /* Try to do lookup via known virtual table pointer value. */
3062 if (!ie->indirect_info->vptr_changed
3063 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3065 tree vtable;
3066 unsigned HOST_WIDE_INT offset;
3067 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3068 : NULL;
3069 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3070 ie->indirect_info->offset,
3071 true);
3072 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3074 bool can_refer;
3075 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3076 vtable, offset, &can_refer);
3077 if (can_refer)
3079 if (!t
3080 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3081 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3082 || !possible_polymorphic_call_target_p
3083 (ie, cgraph_node::get (t)))
3085 /* Do not speculate builtin_unreachable, it is stupid! */
3086 if (!ie->indirect_info->vptr_changed)
3087 target = ipa_impossible_devirt_target (ie, target);
3088 else
3089 target = NULL;
3091 else
3093 target = t;
3094 speculative = ie->indirect_info->vptr_changed;
3100 ipa_polymorphic_call_context ie_context (ie);
3101 vec <cgraph_node *>targets;
3102 bool final;
3104 ctx.offset_by (ie->indirect_info->offset);
3105 if (ie->indirect_info->vptr_changed)
3106 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3107 ie->indirect_info->otr_type);
3108 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3109 targets = possible_polymorphic_call_targets
3110 (ie->indirect_info->otr_type,
3111 ie->indirect_info->otr_token,
3112 ctx, &final);
3113 if (final && targets.length () <= 1)
3115 speculative = false;
3116 if (targets.length () == 1)
3117 target = targets[0]->decl;
3118 else
3119 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3121 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3122 && !ie->speculative && ie->maybe_hot_p ())
3124 cgraph_node *n;
3125 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3126 ie->indirect_info->otr_token,
3127 ie->indirect_info->context);
3128 if (n)
3130 target = n->decl;
3131 speculative = true;
3135 if (target)
3137 if (!possible_polymorphic_call_target_p
3138 (ie, cgraph_node::get_create (target)))
3140 if (speculative)
3141 return NULL;
3142 target = ipa_impossible_devirt_target (ie, target);
3144 return ipa_make_edge_direct_to_target (ie, target, speculative);
3146 else
3147 return NULL;
3150 /* Update the param called notes associated with NODE when CS is being inlined,
3151 assuming NODE is (potentially indirectly) inlined into CS->callee.
3152 Moreover, if the callee is discovered to be constant, create a new cgraph
3153 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3154 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3156 static bool
3157 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3158 struct cgraph_node *node,
3159 vec<cgraph_edge *> *new_edges)
3161 struct ipa_edge_args *top;
3162 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3163 struct ipa_node_params *new_root_info;
3164 bool res = false;
3166 ipa_check_create_edge_args ();
3167 top = IPA_EDGE_REF (cs);
3168 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3169 ? cs->caller->global.inlined_to
3170 : cs->caller);
3172 for (ie = node->indirect_calls; ie; ie = next_ie)
3174 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3175 struct ipa_jump_func *jfunc;
3176 int param_index;
3177 cgraph_node *spec_target = NULL;
3179 next_ie = ie->next_callee;
3181 if (ici->param_index == -1)
3182 continue;
3184 /* We must check range due to calls with variable number of arguments: */
3185 if (ici->param_index >= ipa_get_cs_argument_count (top))
3187 ici->param_index = -1;
3188 continue;
3191 param_index = ici->param_index;
3192 jfunc = ipa_get_ith_jump_func (top, param_index);
3194 if (ie->speculative)
3196 struct cgraph_edge *de;
3197 struct ipa_ref *ref;
3198 ie->speculative_call_info (de, ie, ref);
3199 spec_target = de->callee;
3202 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3203 new_direct_edge = NULL;
3204 else if (ici->polymorphic)
3206 ipa_polymorphic_call_context ctx;
3207 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3208 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3210 else
3211 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3212 new_root_info);
3213 /* If speculation was removed, then we need to do nothing. */
3214 if (new_direct_edge && new_direct_edge != ie
3215 && new_direct_edge->callee == spec_target)
3217 new_direct_edge->indirect_inlining_edge = 1;
3218 top = IPA_EDGE_REF (cs);
3219 res = true;
3220 if (!new_direct_edge->speculative)
3221 continue;
3223 else if (new_direct_edge)
3225 new_direct_edge->indirect_inlining_edge = 1;
3226 if (new_direct_edge->call_stmt)
3227 new_direct_edge->call_stmt_cannot_inline_p
3228 = !gimple_check_call_matching_types (
3229 new_direct_edge->call_stmt,
3230 new_direct_edge->callee->decl, false);
3231 if (new_edges)
3233 new_edges->safe_push (new_direct_edge);
3234 res = true;
3236 top = IPA_EDGE_REF (cs);
3237 /* If speculative edge was introduced we still need to update
3238 call info of the indirect edge. */
3239 if (!new_direct_edge->speculative)
3240 continue;
3242 if (jfunc->type == IPA_JF_PASS_THROUGH
3243 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3245 if (ici->agg_contents
3246 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3247 && !ici->polymorphic)
3248 ici->param_index = -1;
3249 else
3251 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3252 if (ici->polymorphic
3253 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3254 ici->vptr_changed = true;
3257 else if (jfunc->type == IPA_JF_ANCESTOR)
3259 if (ici->agg_contents
3260 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3261 && !ici->polymorphic)
3262 ici->param_index = -1;
3263 else
3265 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3266 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3267 if (ici->polymorphic
3268 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3269 ici->vptr_changed = true;
3272 else
3273 /* Either we can find a destination for this edge now or never. */
3274 ici->param_index = -1;
3277 return res;
3280 /* Recursively traverse subtree of NODE (including node) made of inlined
3281 cgraph_edges when CS has been inlined and invoke
3282 update_indirect_edges_after_inlining on all nodes and
3283 update_jump_functions_after_inlining on all non-inlined edges that lead out
3284 of this subtree. Newly discovered indirect edges will be added to
3285 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3286 created. */
3288 static bool
3289 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3290 struct cgraph_node *node,
3291 vec<cgraph_edge *> *new_edges)
3293 struct cgraph_edge *e;
3294 bool res;
3296 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3298 for (e = node->callees; e; e = e->next_callee)
3299 if (!e->inline_failed)
3300 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3301 else
3302 update_jump_functions_after_inlining (cs, e);
3303 for (e = node->indirect_calls; e; e = e->next_callee)
3304 update_jump_functions_after_inlining (cs, e);
3306 return res;
3309 /* Combine two controlled uses counts as done during inlining. */
3311 static int
3312 combine_controlled_uses_counters (int c, int d)
3314 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3315 return IPA_UNDESCRIBED_USE;
3316 else
3317 return c + d - 1;
3320 /* Propagate number of controlled users from CS->caleee to the new root of the
3321 tree of inlined nodes. */
3323 static void
3324 propagate_controlled_uses (struct cgraph_edge *cs)
3326 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3327 struct cgraph_node *new_root = cs->caller->global.inlined_to
3328 ? cs->caller->global.inlined_to : cs->caller;
3329 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3330 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3331 int count, i;
3333 count = MIN (ipa_get_cs_argument_count (args),
3334 ipa_get_param_count (old_root_info));
3335 for (i = 0; i < count; i++)
3337 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3338 struct ipa_cst_ref_desc *rdesc;
3340 if (jf->type == IPA_JF_PASS_THROUGH)
3342 int src_idx, c, d;
3343 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3344 c = ipa_get_controlled_uses (new_root_info, src_idx);
3345 d = ipa_get_controlled_uses (old_root_info, i);
3347 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3348 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3349 c = combine_controlled_uses_counters (c, d);
3350 ipa_set_controlled_uses (new_root_info, src_idx, c);
3351 if (c == 0 && new_root_info->ipcp_orig_node)
3353 struct cgraph_node *n;
3354 struct ipa_ref *ref;
3355 tree t = new_root_info->known_csts[src_idx];
3357 if (t && TREE_CODE (t) == ADDR_EXPR
3358 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3359 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3360 && (ref = new_root->find_reference (n, NULL, 0)))
3362 if (dump_file)
3363 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3364 "reference from %s/%i to %s/%i.\n",
3365 xstrdup_for_dump (new_root->name ()),
3366 new_root->order,
3367 xstrdup_for_dump (n->name ()), n->order);
3368 ref->remove_reference ();
3372 else if (jf->type == IPA_JF_CONST
3373 && (rdesc = jfunc_rdesc_usable (jf)))
3375 int d = ipa_get_controlled_uses (old_root_info, i);
3376 int c = rdesc->refcount;
3377 rdesc->refcount = combine_controlled_uses_counters (c, d);
3378 if (rdesc->refcount == 0)
3380 tree cst = ipa_get_jf_constant (jf);
3381 struct cgraph_node *n;
3382 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3383 && TREE_CODE (TREE_OPERAND (cst, 0))
3384 == FUNCTION_DECL);
3385 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3386 if (n)
3388 struct cgraph_node *clone;
3389 bool ok;
3390 ok = remove_described_reference (n, rdesc);
3391 gcc_checking_assert (ok);
3393 clone = cs->caller;
3394 while (clone->global.inlined_to
3395 && clone != rdesc->cs->caller
3396 && IPA_NODE_REF (clone)->ipcp_orig_node)
3398 struct ipa_ref *ref;
3399 ref = clone->find_reference (n, NULL, 0);
3400 if (ref)
3402 if (dump_file)
3403 fprintf (dump_file, "ipa-prop: Removing "
3404 "cloning-created reference "
3405 "from %s/%i to %s/%i.\n",
3406 xstrdup_for_dump (clone->name ()),
3407 clone->order,
3408 xstrdup_for_dump (n->name ()),
3409 n->order);
3410 ref->remove_reference ();
3412 clone = clone->callers->caller;
3419 for (i = ipa_get_param_count (old_root_info);
3420 i < ipa_get_cs_argument_count (args);
3421 i++)
3423 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3425 if (jf->type == IPA_JF_CONST)
3427 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3428 if (rdesc)
3429 rdesc->refcount = IPA_UNDESCRIBED_USE;
3431 else if (jf->type == IPA_JF_PASS_THROUGH)
3432 ipa_set_controlled_uses (new_root_info,
3433 jf->value.pass_through.formal_id,
3434 IPA_UNDESCRIBED_USE);
3438 /* Update jump functions and call note functions on inlining the call site CS.
3439 CS is expected to lead to a node already cloned by
3440 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3441 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3442 created. */
3444 bool
3445 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3446 vec<cgraph_edge *> *new_edges)
3448 bool changed;
3449 /* Do nothing if the preparation phase has not been carried out yet
3450 (i.e. during early inlining). */
3451 if (!ipa_node_params_sum)
3452 return false;
3453 gcc_assert (ipa_edge_args_vector);
3455 propagate_controlled_uses (cs);
3456 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3458 return changed;
3461 /* Frees all dynamically allocated structures that the argument info points
3462 to. */
3464 void
3465 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3467 vec_free (args->jump_functions);
3468 memset (args, 0, sizeof (*args));
3471 /* Free all ipa_edge structures. */
3473 void
3474 ipa_free_all_edge_args (void)
3476 int i;
3477 struct ipa_edge_args *args;
3479 if (!ipa_edge_args_vector)
3480 return;
3482 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3483 ipa_free_edge_args_substructures (args);
3485 vec_free (ipa_edge_args_vector);
3488 /* Frees all dynamically allocated structures that the param info points
3489 to. */
3491 ipa_node_params::~ipa_node_params ()
3493 descriptors.release ();
3494 free (lattices);
3495 /* Lattice values and their sources are deallocated with their alocation
3496 pool. */
3497 known_csts.release ();
3498 known_contexts.release ();
3500 lattices = NULL;
3501 ipcp_orig_node = NULL;
3502 analysis_done = 0;
3503 node_enqueued = 0;
3504 do_clone_for_all_contexts = 0;
3505 is_all_contexts_clone = 0;
3506 node_dead = 0;
3509 /* Free all ipa_node_params structures. */
3511 void
3512 ipa_free_all_node_params (void)
3514 delete ipa_node_params_sum;
3515 ipa_node_params_sum = NULL;
3518 /* Grow ipcp_transformations if necessary. */
3520 void
3521 ipcp_grow_transformations_if_necessary (void)
3523 if (vec_safe_length (ipcp_transformations)
3524 <= (unsigned) symtab->cgraph_max_uid)
3525 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3528 /* Set the aggregate replacements of NODE to be AGGVALS. */
3530 void
3531 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3532 struct ipa_agg_replacement_value *aggvals)
3534 ipcp_grow_transformations_if_necessary ();
3535 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3538 /* Hook that is called by cgraph.c when an edge is removed. */
3540 static void
3541 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3543 struct ipa_edge_args *args;
3545 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3546 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3547 return;
3549 args = IPA_EDGE_REF (cs);
3550 if (args->jump_functions)
3552 struct ipa_jump_func *jf;
3553 int i;
3554 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3556 struct ipa_cst_ref_desc *rdesc;
3557 try_decrement_rdesc_refcount (jf);
3558 if (jf->type == IPA_JF_CONST
3559 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3560 && rdesc->cs == cs)
3561 rdesc->cs = NULL;
3565 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3568 /* Hook that is called by cgraph.c when an edge is duplicated. */
3570 static void
3571 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3572 void *)
3574 struct ipa_edge_args *old_args, *new_args;
3575 unsigned int i;
3577 ipa_check_create_edge_args ();
3579 old_args = IPA_EDGE_REF (src);
3580 new_args = IPA_EDGE_REF (dst);
3582 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3583 if (old_args->polymorphic_call_contexts)
3584 new_args->polymorphic_call_contexts
3585 = vec_safe_copy (old_args->polymorphic_call_contexts);
3587 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3589 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3590 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3592 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3594 if (src_jf->type == IPA_JF_CONST)
3596 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3598 if (!src_rdesc)
3599 dst_jf->value.constant.rdesc = NULL;
3600 else if (src->caller == dst->caller)
3602 struct ipa_ref *ref;
3603 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3604 gcc_checking_assert (n);
3605 ref = src->caller->find_reference (n, src->call_stmt,
3606 src->lto_stmt_uid);
3607 gcc_checking_assert (ref);
3608 dst->caller->clone_reference (ref, ref->stmt);
3610 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3611 dst_rdesc->cs = dst;
3612 dst_rdesc->refcount = src_rdesc->refcount;
3613 dst_rdesc->next_duplicate = NULL;
3614 dst_jf->value.constant.rdesc = dst_rdesc;
3616 else if (src_rdesc->cs == src)
3618 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3619 dst_rdesc->cs = dst;
3620 dst_rdesc->refcount = src_rdesc->refcount;
3621 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3622 src_rdesc->next_duplicate = dst_rdesc;
3623 dst_jf->value.constant.rdesc = dst_rdesc;
3625 else
3627 struct ipa_cst_ref_desc *dst_rdesc;
3628 /* This can happen during inlining, when a JFUNC can refer to a
3629 reference taken in a function up in the tree of inline clones.
3630 We need to find the duplicate that refers to our tree of
3631 inline clones. */
3633 gcc_assert (dst->caller->global.inlined_to);
3634 for (dst_rdesc = src_rdesc->next_duplicate;
3635 dst_rdesc;
3636 dst_rdesc = dst_rdesc->next_duplicate)
3638 struct cgraph_node *top;
3639 top = dst_rdesc->cs->caller->global.inlined_to
3640 ? dst_rdesc->cs->caller->global.inlined_to
3641 : dst_rdesc->cs->caller;
3642 if (dst->caller->global.inlined_to == top)
3643 break;
3645 gcc_assert (dst_rdesc);
3646 dst_jf->value.constant.rdesc = dst_rdesc;
3649 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3650 && src->caller == dst->caller)
3652 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3653 ? dst->caller->global.inlined_to : dst->caller;
3654 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3655 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3657 int c = ipa_get_controlled_uses (root_info, idx);
3658 if (c != IPA_UNDESCRIBED_USE)
3660 c++;
3661 ipa_set_controlled_uses (root_info, idx, c);
3667 /* Analyze newly added function into callgraph. */
3669 static void
3670 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3672 if (node->has_gimple_body_p ())
3673 ipa_analyze_node (node);
3676 /* Hook that is called by summary when a node is duplicated. */
3678 void
3679 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3680 ipa_node_params *old_info,
3681 ipa_node_params *new_info)
3683 ipa_agg_replacement_value *old_av, *new_av;
3685 new_info->descriptors = old_info->descriptors.copy ();
3686 new_info->lattices = NULL;
3687 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3689 new_info->analysis_done = old_info->analysis_done;
3690 new_info->node_enqueued = old_info->node_enqueued;
3691 new_info->versionable = old_info->versionable;
3693 old_av = ipa_get_agg_replacements_for_node (src);
3694 if (old_av)
3696 new_av = NULL;
3697 while (old_av)
3699 struct ipa_agg_replacement_value *v;
3701 v = ggc_alloc<ipa_agg_replacement_value> ();
3702 memcpy (v, old_av, sizeof (*v));
3703 v->next = new_av;
3704 new_av = v;
3705 old_av = old_av->next;
3707 ipa_set_node_agg_value_chain (dst, new_av);
3710 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3712 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3714 ipcp_grow_transformations_if_necessary ();
3715 src_trans = ipcp_get_transformation_summary (src);
3716 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3717 vec<ipa_alignment, va_gc> *&dst_alignments
3718 = ipcp_get_transformation_summary (dst)->alignments;
3719 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3720 for (unsigned i = 0; i < src_alignments->length (); ++i)
3721 dst_alignments->quick_push ((*src_alignments)[i]);
3724 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3726 ipcp_grow_transformations_if_necessary ();
3727 src_trans = ipcp_get_transformation_summary (src);
3728 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3729 vec<ipa_bits, va_gc> *&dst_bits
3730 = ipcp_get_transformation_summary (dst)->bits;
3731 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3732 for (unsigned i = 0; i < src_bits->length (); ++i)
3733 dst_bits->quick_push ((*src_bits)[i]);
3737 /* Register our cgraph hooks if they are not already there. */
3739 void
3740 ipa_register_cgraph_hooks (void)
3742 ipa_check_create_node_params ();
3744 if (!edge_removal_hook_holder)
3745 edge_removal_hook_holder =
3746 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3747 if (!edge_duplication_hook_holder)
3748 edge_duplication_hook_holder =
3749 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3750 function_insertion_hook_holder =
3751 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3754 /* Unregister our cgraph hooks if they are not already there. */
3756 static void
3757 ipa_unregister_cgraph_hooks (void)
3759 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3760 edge_removal_hook_holder = NULL;
3761 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3762 edge_duplication_hook_holder = NULL;
3763 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3764 function_insertion_hook_holder = NULL;
3767 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3768 longer needed after ipa-cp. */
3770 void
3771 ipa_free_all_structures_after_ipa_cp (void)
3773 if (!optimize && !in_lto_p)
3775 ipa_free_all_edge_args ();
3776 ipa_free_all_node_params ();
3777 ipcp_sources_pool.release ();
3778 ipcp_cst_values_pool.release ();
3779 ipcp_poly_ctx_values_pool.release ();
3780 ipcp_agg_lattice_pool.release ();
3781 ipa_unregister_cgraph_hooks ();
3782 ipa_refdesc_pool.release ();
3786 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3787 longer needed after indirect inlining. */
3789 void
3790 ipa_free_all_structures_after_iinln (void)
3792 ipa_free_all_edge_args ();
3793 ipa_free_all_node_params ();
3794 ipa_unregister_cgraph_hooks ();
3795 ipcp_sources_pool.release ();
3796 ipcp_cst_values_pool.release ();
3797 ipcp_poly_ctx_values_pool.release ();
3798 ipcp_agg_lattice_pool.release ();
3799 ipa_refdesc_pool.release ();
3802 /* Print ipa_tree_map data structures of all functions in the
3803 callgraph to F. */
3805 void
3806 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3808 int i, count;
3809 struct ipa_node_params *info;
3811 if (!node->definition)
3812 return;
3813 info = IPA_NODE_REF (node);
3814 fprintf (f, " function %s/%i parameter descriptors:\n",
3815 node->name (), node->order);
3816 count = ipa_get_param_count (info);
3817 for (i = 0; i < count; i++)
3819 int c;
3821 fprintf (f, " ");
3822 ipa_dump_param (f, info, i);
3823 if (ipa_is_param_used (info, i))
3824 fprintf (f, " used");
3825 c = ipa_get_controlled_uses (info, i);
3826 if (c == IPA_UNDESCRIBED_USE)
3827 fprintf (f, " undescribed_use");
3828 else
3829 fprintf (f, " controlled_uses=%i", c);
3830 fprintf (f, "\n");
3834 /* Print ipa_tree_map data structures of all functions in the
3835 callgraph to F. */
3837 void
3838 ipa_print_all_params (FILE * f)
3840 struct cgraph_node *node;
3842 fprintf (f, "\nFunction parameters:\n");
3843 FOR_EACH_FUNCTION (node)
3844 ipa_print_node_params (f, node);
3847 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3849 vec<tree>
3850 ipa_get_vector_of_formal_parms (tree fndecl)
3852 vec<tree> args;
3853 int count;
3854 tree parm;
3856 gcc_assert (!flag_wpa);
3857 count = count_formal_params (fndecl);
3858 args.create (count);
3859 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3860 args.quick_push (parm);
3862 return args;
3865 /* Return a heap allocated vector containing types of formal parameters of
3866 function type FNTYPE. */
3868 vec<tree>
3869 ipa_get_vector_of_formal_parm_types (tree fntype)
3871 vec<tree> types;
3872 int count = 0;
3873 tree t;
3875 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3876 count++;
3878 types.create (count);
3879 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3880 types.quick_push (TREE_VALUE (t));
3882 return types;
3885 /* Modify the function declaration FNDECL and its type according to the plan in
3886 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3887 to reflect the actual parameters being modified which are determined by the
3888 base_index field. */
3890 void
3891 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3893 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3894 tree orig_type = TREE_TYPE (fndecl);
3895 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3897 /* The following test is an ugly hack, some functions simply don't have any
3898 arguments in their type. This is probably a bug but well... */
3899 bool care_for_types = (old_arg_types != NULL_TREE);
3900 bool last_parm_void;
3901 vec<tree> otypes;
3902 if (care_for_types)
3904 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3905 == void_type_node);
3906 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3907 if (last_parm_void)
3908 gcc_assert (oparms.length () + 1 == otypes.length ());
3909 else
3910 gcc_assert (oparms.length () == otypes.length ());
3912 else
3914 last_parm_void = false;
3915 otypes.create (0);
3918 int len = adjustments.length ();
3919 tree *link = &DECL_ARGUMENTS (fndecl);
3920 tree new_arg_types = NULL;
3921 for (int i = 0; i < len; i++)
3923 struct ipa_parm_adjustment *adj;
3924 gcc_assert (link);
3926 adj = &adjustments[i];
3927 tree parm;
3928 if (adj->op == IPA_PARM_OP_NEW)
3929 parm = NULL;
3930 else
3931 parm = oparms[adj->base_index];
3932 adj->base = parm;
3934 if (adj->op == IPA_PARM_OP_COPY)
3936 if (care_for_types)
3937 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3938 new_arg_types);
3939 *link = parm;
3940 link = &DECL_CHAIN (parm);
3942 else if (adj->op != IPA_PARM_OP_REMOVE)
3944 tree new_parm;
3945 tree ptype;
3947 if (adj->by_ref)
3948 ptype = build_pointer_type (adj->type);
3949 else
3951 ptype = adj->type;
3952 if (is_gimple_reg_type (ptype))
3954 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3955 if (TYPE_ALIGN (ptype) != malign)
3956 ptype = build_aligned_type (ptype, malign);
3960 if (care_for_types)
3961 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3963 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3964 ptype);
3965 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3966 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3967 DECL_ARTIFICIAL (new_parm) = 1;
3968 DECL_ARG_TYPE (new_parm) = ptype;
3969 DECL_CONTEXT (new_parm) = fndecl;
3970 TREE_USED (new_parm) = 1;
3971 DECL_IGNORED_P (new_parm) = 1;
3972 layout_decl (new_parm, 0);
3974 if (adj->op == IPA_PARM_OP_NEW)
3975 adj->base = NULL;
3976 else
3977 adj->base = parm;
3978 adj->new_decl = new_parm;
3980 *link = new_parm;
3981 link = &DECL_CHAIN (new_parm);
3985 *link = NULL_TREE;
3987 tree new_reversed = NULL;
3988 if (care_for_types)
3990 new_reversed = nreverse (new_arg_types);
3991 if (last_parm_void)
3993 if (new_reversed)
3994 TREE_CHAIN (new_arg_types) = void_list_node;
3995 else
3996 new_reversed = void_list_node;
4000 /* Use copy_node to preserve as much as possible from original type
4001 (debug info, attribute lists etc.)
4002 Exception is METHOD_TYPEs must have THIS argument.
4003 When we are asked to remove it, we need to build new FUNCTION_TYPE
4004 instead. */
4005 tree new_type = NULL;
4006 if (TREE_CODE (orig_type) != METHOD_TYPE
4007 || (adjustments[0].op == IPA_PARM_OP_COPY
4008 && adjustments[0].base_index == 0))
4010 new_type = build_distinct_type_copy (orig_type);
4011 TYPE_ARG_TYPES (new_type) = new_reversed;
4013 else
4015 new_type
4016 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4017 new_reversed));
4018 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4019 DECL_VINDEX (fndecl) = NULL_TREE;
4022 /* When signature changes, we need to clear builtin info. */
4023 if (DECL_BUILT_IN (fndecl))
4025 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4026 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4029 TREE_TYPE (fndecl) = new_type;
4030 DECL_VIRTUAL_P (fndecl) = 0;
4031 DECL_LANG_SPECIFIC (fndecl) = NULL;
4032 otypes.release ();
4033 oparms.release ();
4036 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4037 If this is a directly recursive call, CS must be NULL. Otherwise it must
4038 contain the corresponding call graph edge. */
4040 void
4041 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4042 ipa_parm_adjustment_vec adjustments)
4044 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4045 vec<tree> vargs;
4046 vec<tree, va_gc> **debug_args = NULL;
4047 gcall *new_stmt;
4048 gimple_stmt_iterator gsi, prev_gsi;
4049 tree callee_decl;
4050 int i, len;
4052 len = adjustments.length ();
4053 vargs.create (len);
4054 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4055 current_node->remove_stmt_references (stmt);
4057 gsi = gsi_for_stmt (stmt);
4058 prev_gsi = gsi;
4059 gsi_prev (&prev_gsi);
4060 for (i = 0; i < len; i++)
4062 struct ipa_parm_adjustment *adj;
4064 adj = &adjustments[i];
4066 if (adj->op == IPA_PARM_OP_COPY)
4068 tree arg = gimple_call_arg (stmt, adj->base_index);
4070 vargs.quick_push (arg);
4072 else if (adj->op != IPA_PARM_OP_REMOVE)
4074 tree expr, base, off;
4075 location_t loc;
4076 unsigned int deref_align = 0;
4077 bool deref_base = false;
4079 /* We create a new parameter out of the value of the old one, we can
4080 do the following kind of transformations:
4082 - A scalar passed by reference is converted to a scalar passed by
4083 value. (adj->by_ref is false and the type of the original
4084 actual argument is a pointer to a scalar).
4086 - A part of an aggregate is passed instead of the whole aggregate.
4087 The part can be passed either by value or by reference, this is
4088 determined by value of adj->by_ref. Moreover, the code below
4089 handles both situations when the original aggregate is passed by
4090 value (its type is not a pointer) and when it is passed by
4091 reference (it is a pointer to an aggregate).
4093 When the new argument is passed by reference (adj->by_ref is true)
4094 it must be a part of an aggregate and therefore we form it by
4095 simply taking the address of a reference inside the original
4096 aggregate. */
4098 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4099 base = gimple_call_arg (stmt, adj->base_index);
4100 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4101 : EXPR_LOCATION (base);
4103 if (TREE_CODE (base) != ADDR_EXPR
4104 && POINTER_TYPE_P (TREE_TYPE (base)))
4105 off = build_int_cst (adj->alias_ptr_type,
4106 adj->offset / BITS_PER_UNIT);
4107 else
4109 HOST_WIDE_INT base_offset;
4110 tree prev_base;
4111 bool addrof;
4113 if (TREE_CODE (base) == ADDR_EXPR)
4115 base = TREE_OPERAND (base, 0);
4116 addrof = true;
4118 else
4119 addrof = false;
4120 prev_base = base;
4121 base = get_addr_base_and_unit_offset (base, &base_offset);
4122 /* Aggregate arguments can have non-invariant addresses. */
4123 if (!base)
4125 base = build_fold_addr_expr (prev_base);
4126 off = build_int_cst (adj->alias_ptr_type,
4127 adj->offset / BITS_PER_UNIT);
4129 else if (TREE_CODE (base) == MEM_REF)
4131 if (!addrof)
4133 deref_base = true;
4134 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4136 off = build_int_cst (adj->alias_ptr_type,
4137 base_offset
4138 + adj->offset / BITS_PER_UNIT);
4139 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4140 off);
4141 base = TREE_OPERAND (base, 0);
4143 else
4145 off = build_int_cst (adj->alias_ptr_type,
4146 base_offset
4147 + adj->offset / BITS_PER_UNIT);
4148 base = build_fold_addr_expr (base);
4152 if (!adj->by_ref)
4154 tree type = adj->type;
4155 unsigned int align;
4156 unsigned HOST_WIDE_INT misalign;
4158 if (deref_base)
4160 align = deref_align;
4161 misalign = 0;
4163 else
4165 get_pointer_alignment_1 (base, &align, &misalign);
4166 if (TYPE_ALIGN (type) > align)
4167 align = TYPE_ALIGN (type);
4169 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4170 * BITS_PER_UNIT);
4171 misalign = misalign & (align - 1);
4172 if (misalign != 0)
4173 align = (misalign & -misalign);
4174 if (align < TYPE_ALIGN (type))
4175 type = build_aligned_type (type, align);
4176 base = force_gimple_operand_gsi (&gsi, base,
4177 true, NULL, true, GSI_SAME_STMT);
4178 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4179 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4180 /* If expr is not a valid gimple call argument emit
4181 a load into a temporary. */
4182 if (is_gimple_reg_type (TREE_TYPE (expr)))
4184 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4185 if (gimple_in_ssa_p (cfun))
4187 gimple_set_vuse (tem, gimple_vuse (stmt));
4188 expr = make_ssa_name (TREE_TYPE (expr), tem);
4190 else
4191 expr = create_tmp_reg (TREE_TYPE (expr));
4192 gimple_assign_set_lhs (tem, expr);
4193 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4196 else
4198 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4199 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4200 expr = build_fold_addr_expr (expr);
4201 expr = force_gimple_operand_gsi (&gsi, expr,
4202 true, NULL, true, GSI_SAME_STMT);
4204 vargs.quick_push (expr);
4206 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4208 unsigned int ix;
4209 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4210 gimple *def_temp;
4212 arg = gimple_call_arg (stmt, adj->base_index);
4213 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4215 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4216 continue;
4217 arg = fold_convert_loc (gimple_location (stmt),
4218 TREE_TYPE (origin), arg);
4220 if (debug_args == NULL)
4221 debug_args = decl_debug_args_insert (callee_decl);
4222 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4223 if (ddecl == origin)
4225 ddecl = (**debug_args)[ix + 1];
4226 break;
4228 if (ddecl == NULL)
4230 ddecl = make_node (DEBUG_EXPR_DECL);
4231 DECL_ARTIFICIAL (ddecl) = 1;
4232 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4233 DECL_MODE (ddecl) = DECL_MODE (origin);
4235 vec_safe_push (*debug_args, origin);
4236 vec_safe_push (*debug_args, ddecl);
4238 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4239 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4243 if (dump_file && (dump_flags & TDF_DETAILS))
4245 fprintf (dump_file, "replacing stmt:");
4246 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4249 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4250 vargs.release ();
4251 if (gimple_call_lhs (stmt))
4252 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4254 gimple_set_block (new_stmt, gimple_block (stmt));
4255 if (gimple_has_location (stmt))
4256 gimple_set_location (new_stmt, gimple_location (stmt));
4257 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4258 gimple_call_copy_flags (new_stmt, stmt);
4259 if (gimple_in_ssa_p (cfun))
4261 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4262 if (gimple_vdef (stmt))
4264 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4265 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4269 if (dump_file && (dump_flags & TDF_DETAILS))
4271 fprintf (dump_file, "with stmt:");
4272 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4273 fprintf (dump_file, "\n");
4275 gsi_replace (&gsi, new_stmt, true);
4276 if (cs)
4277 cs->set_call_stmt (new_stmt);
4280 current_node->record_stmt_references (gsi_stmt (gsi));
4281 gsi_prev (&gsi);
4283 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4286 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4287 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4288 specifies whether the function should care about type incompatibility the
4289 current and new expressions. If it is false, the function will leave
4290 incompatibility issues to the caller. Return true iff the expression
4291 was modified. */
4293 bool
4294 ipa_modify_expr (tree *expr, bool convert,
4295 ipa_parm_adjustment_vec adjustments)
4297 struct ipa_parm_adjustment *cand
4298 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4299 if (!cand)
4300 return false;
4302 tree src;
4303 if (cand->by_ref)
4305 src = build_simple_mem_ref (cand->new_decl);
4306 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4308 else
4309 src = cand->new_decl;
4311 if (dump_file && (dump_flags & TDF_DETAILS))
4313 fprintf (dump_file, "About to replace expr ");
4314 print_generic_expr (dump_file, *expr, 0);
4315 fprintf (dump_file, " with ");
4316 print_generic_expr (dump_file, src, 0);
4317 fprintf (dump_file, "\n");
4320 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4322 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4323 *expr = vce;
4325 else
4326 *expr = src;
4327 return true;
4330 /* If T is an SSA_NAME, return NULL if it is not a default def or
4331 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4332 the base variable is always returned, regardless if it is a default
4333 def. Return T if it is not an SSA_NAME. */
4335 static tree
4336 get_ssa_base_param (tree t, bool ignore_default_def)
4338 if (TREE_CODE (t) == SSA_NAME)
4340 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4341 return SSA_NAME_VAR (t);
4342 else
4343 return NULL_TREE;
4345 return t;
4348 /* Given an expression, return an adjustment entry specifying the
4349 transformation to be done on EXPR. If no suitable adjustment entry
4350 was found, returns NULL.
4352 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4353 default def, otherwise bail on them.
4355 If CONVERT is non-NULL, this function will set *CONVERT if the
4356 expression provided is a component reference. ADJUSTMENTS is the
4357 adjustments vector. */
4359 ipa_parm_adjustment *
4360 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4361 ipa_parm_adjustment_vec adjustments,
4362 bool ignore_default_def)
4364 if (TREE_CODE (**expr) == BIT_FIELD_REF
4365 || TREE_CODE (**expr) == IMAGPART_EXPR
4366 || TREE_CODE (**expr) == REALPART_EXPR)
4368 *expr = &TREE_OPERAND (**expr, 0);
4369 if (convert)
4370 *convert = true;
4373 HOST_WIDE_INT offset, size, max_size;
4374 bool reverse;
4375 tree base
4376 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4377 if (!base || size == -1 || max_size == -1)
4378 return NULL;
4380 if (TREE_CODE (base) == MEM_REF)
4382 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4383 base = TREE_OPERAND (base, 0);
4386 base = get_ssa_base_param (base, ignore_default_def);
4387 if (!base || TREE_CODE (base) != PARM_DECL)
4388 return NULL;
4390 struct ipa_parm_adjustment *cand = NULL;
4391 unsigned int len = adjustments.length ();
4392 for (unsigned i = 0; i < len; i++)
4394 struct ipa_parm_adjustment *adj = &adjustments[i];
4396 if (adj->base == base
4397 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4399 cand = adj;
4400 break;
4404 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4405 return NULL;
4406 return cand;
4409 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4411 static bool
4412 index_in_adjustments_multiple_times_p (int base_index,
4413 ipa_parm_adjustment_vec adjustments)
4415 int i, len = adjustments.length ();
4416 bool one = false;
4418 for (i = 0; i < len; i++)
4420 struct ipa_parm_adjustment *adj;
4421 adj = &adjustments[i];
4423 if (adj->base_index == base_index)
4425 if (one)
4426 return true;
4427 else
4428 one = true;
4431 return false;
4435 /* Return adjustments that should have the same effect on function parameters
4436 and call arguments as if they were first changed according to adjustments in
4437 INNER and then by adjustments in OUTER. */
4439 ipa_parm_adjustment_vec
4440 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4441 ipa_parm_adjustment_vec outer)
4443 int i, outlen = outer.length ();
4444 int inlen = inner.length ();
4445 int removals = 0;
4446 ipa_parm_adjustment_vec adjustments, tmp;
4448 tmp.create (inlen);
4449 for (i = 0; i < inlen; i++)
4451 struct ipa_parm_adjustment *n;
4452 n = &inner[i];
4454 if (n->op == IPA_PARM_OP_REMOVE)
4455 removals++;
4456 else
4458 /* FIXME: Handling of new arguments are not implemented yet. */
4459 gcc_assert (n->op != IPA_PARM_OP_NEW);
4460 tmp.quick_push (*n);
4464 adjustments.create (outlen + removals);
4465 for (i = 0; i < outlen; i++)
4467 struct ipa_parm_adjustment r;
4468 struct ipa_parm_adjustment *out = &outer[i];
4469 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4471 memset (&r, 0, sizeof (r));
4472 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4473 if (out->op == IPA_PARM_OP_REMOVE)
4475 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4477 r.op = IPA_PARM_OP_REMOVE;
4478 adjustments.quick_push (r);
4480 continue;
4482 else
4484 /* FIXME: Handling of new arguments are not implemented yet. */
4485 gcc_assert (out->op != IPA_PARM_OP_NEW);
4488 r.base_index = in->base_index;
4489 r.type = out->type;
4491 /* FIXME: Create nonlocal value too. */
4493 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4494 r.op = IPA_PARM_OP_COPY;
4495 else if (in->op == IPA_PARM_OP_COPY)
4496 r.offset = out->offset;
4497 else if (out->op == IPA_PARM_OP_COPY)
4498 r.offset = in->offset;
4499 else
4500 r.offset = in->offset + out->offset;
4501 adjustments.quick_push (r);
4504 for (i = 0; i < inlen; i++)
4506 struct ipa_parm_adjustment *n = &inner[i];
4508 if (n->op == IPA_PARM_OP_REMOVE)
4509 adjustments.quick_push (*n);
4512 tmp.release ();
4513 return adjustments;
4516 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4517 friendly way, assuming they are meant to be applied to FNDECL. */
4519 void
4520 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4521 tree fndecl)
4523 int i, len = adjustments.length ();
4524 bool first = true;
4525 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4527 fprintf (file, "IPA param adjustments: ");
4528 for (i = 0; i < len; i++)
4530 struct ipa_parm_adjustment *adj;
4531 adj = &adjustments[i];
4533 if (!first)
4534 fprintf (file, " ");
4535 else
4536 first = false;
4538 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4539 print_generic_expr (file, parms[adj->base_index], 0);
4540 if (adj->base)
4542 fprintf (file, ", base: ");
4543 print_generic_expr (file, adj->base, 0);
4545 if (adj->new_decl)
4547 fprintf (file, ", new_decl: ");
4548 print_generic_expr (file, adj->new_decl, 0);
4550 if (adj->new_ssa_base)
4552 fprintf (file, ", new_ssa_base: ");
4553 print_generic_expr (file, adj->new_ssa_base, 0);
4556 if (adj->op == IPA_PARM_OP_COPY)
4557 fprintf (file, ", copy_param");
4558 else if (adj->op == IPA_PARM_OP_REMOVE)
4559 fprintf (file, ", remove_param");
4560 else
4561 fprintf (file, ", offset %li", (long) adj->offset);
4562 if (adj->by_ref)
4563 fprintf (file, ", by_ref");
4564 print_node_brief (file, ", type: ", adj->type, 0);
4565 fprintf (file, "\n");
4567 parms.release ();
4570 /* Dump the AV linked list. */
4572 void
4573 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4575 bool comma = false;
4576 fprintf (f, " Aggregate replacements:");
4577 for (; av; av = av->next)
4579 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4580 av->index, av->offset);
4581 print_generic_expr (f, av->value, 0);
4582 comma = true;
4584 fprintf (f, "\n");
4587 /* Stream out jump function JUMP_FUNC to OB. */
4589 static void
4590 ipa_write_jump_function (struct output_block *ob,
4591 struct ipa_jump_func *jump_func)
4593 struct ipa_agg_jf_item *item;
4594 struct bitpack_d bp;
4595 int i, count;
4597 streamer_write_uhwi (ob, jump_func->type);
4598 switch (jump_func->type)
4600 case IPA_JF_UNKNOWN:
4601 break;
4602 case IPA_JF_CONST:
4603 gcc_assert (
4604 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4605 stream_write_tree (ob, jump_func->value.constant.value, true);
4606 break;
4607 case IPA_JF_PASS_THROUGH:
4608 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4609 if (jump_func->value.pass_through.operation == NOP_EXPR)
4611 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4612 bp = bitpack_create (ob->main_stream);
4613 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4614 streamer_write_bitpack (&bp);
4616 else
4618 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4619 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4621 break;
4622 case IPA_JF_ANCESTOR:
4623 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4624 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4625 bp = bitpack_create (ob->main_stream);
4626 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4627 streamer_write_bitpack (&bp);
4628 break;
4631 count = vec_safe_length (jump_func->agg.items);
4632 streamer_write_uhwi (ob, count);
4633 if (count)
4635 bp = bitpack_create (ob->main_stream);
4636 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4637 streamer_write_bitpack (&bp);
4640 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4642 streamer_write_uhwi (ob, item->offset);
4643 stream_write_tree (ob, item->value, true);
4646 bp = bitpack_create (ob->main_stream);
4647 bp_pack_value (&bp, jump_func->alignment.known, 1);
4648 streamer_write_bitpack (&bp);
4649 if (jump_func->alignment.known)
4651 streamer_write_uhwi (ob, jump_func->alignment.align);
4652 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4655 bp = bitpack_create (ob->main_stream);
4656 bp_pack_value (&bp, jump_func->bits.known, 1);
4657 streamer_write_bitpack (&bp);
4658 if (jump_func->bits.known)
4660 streamer_write_widest_int (ob, jump_func->bits.value);
4661 streamer_write_widest_int (ob, jump_func->bits.mask);
4665 /* Read in jump function JUMP_FUNC from IB. */
4667 static void
4668 ipa_read_jump_function (struct lto_input_block *ib,
4669 struct ipa_jump_func *jump_func,
4670 struct cgraph_edge *cs,
4671 struct data_in *data_in)
4673 enum jump_func_type jftype;
4674 enum tree_code operation;
4675 int i, count;
4677 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4678 switch (jftype)
4680 case IPA_JF_UNKNOWN:
4681 ipa_set_jf_unknown (jump_func);
4682 break;
4683 case IPA_JF_CONST:
4684 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4685 break;
4686 case IPA_JF_PASS_THROUGH:
4687 operation = (enum tree_code) streamer_read_uhwi (ib);
4688 if (operation == NOP_EXPR)
4690 int formal_id = streamer_read_uhwi (ib);
4691 struct bitpack_d bp = streamer_read_bitpack (ib);
4692 bool agg_preserved = bp_unpack_value (&bp, 1);
4693 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4695 else
4697 tree operand = stream_read_tree (ib, data_in);
4698 int formal_id = streamer_read_uhwi (ib);
4699 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4700 operation);
4702 break;
4703 case IPA_JF_ANCESTOR:
4705 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4706 int formal_id = streamer_read_uhwi (ib);
4707 struct bitpack_d bp = streamer_read_bitpack (ib);
4708 bool agg_preserved = bp_unpack_value (&bp, 1);
4709 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4710 break;
4714 count = streamer_read_uhwi (ib);
4715 vec_alloc (jump_func->agg.items, count);
4716 if (count)
4718 struct bitpack_d bp = streamer_read_bitpack (ib);
4719 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4721 for (i = 0; i < count; i++)
4723 struct ipa_agg_jf_item item;
4724 item.offset = streamer_read_uhwi (ib);
4725 item.value = stream_read_tree (ib, data_in);
4726 jump_func->agg.items->quick_push (item);
4729 struct bitpack_d bp = streamer_read_bitpack (ib);
4730 bool alignment_known = bp_unpack_value (&bp, 1);
4731 if (alignment_known)
4733 jump_func->alignment.known = true;
4734 jump_func->alignment.align = streamer_read_uhwi (ib);
4735 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4737 else
4738 jump_func->alignment.known = false;
4740 bp = streamer_read_bitpack (ib);
4741 bool bits_known = bp_unpack_value (&bp, 1);
4742 if (bits_known)
4744 jump_func->bits.known = true;
4745 jump_func->bits.value = streamer_read_widest_int (ib);
4746 jump_func->bits.mask = streamer_read_widest_int (ib);
4748 else
4749 jump_func->bits.known = false;
4752 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4753 relevant to indirect inlining to OB. */
4755 static void
4756 ipa_write_indirect_edge_info (struct output_block *ob,
4757 struct cgraph_edge *cs)
4759 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4760 struct bitpack_d bp;
4762 streamer_write_hwi (ob, ii->param_index);
4763 bp = bitpack_create (ob->main_stream);
4764 bp_pack_value (&bp, ii->polymorphic, 1);
4765 bp_pack_value (&bp, ii->agg_contents, 1);
4766 bp_pack_value (&bp, ii->member_ptr, 1);
4767 bp_pack_value (&bp, ii->by_ref, 1);
4768 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4769 bp_pack_value (&bp, ii->vptr_changed, 1);
4770 streamer_write_bitpack (&bp);
4771 if (ii->agg_contents || ii->polymorphic)
4772 streamer_write_hwi (ob, ii->offset);
4773 else
4774 gcc_assert (ii->offset == 0);
4776 if (ii->polymorphic)
4778 streamer_write_hwi (ob, ii->otr_token);
4779 stream_write_tree (ob, ii->otr_type, true);
4780 ii->context.stream_out (ob);
4784 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4785 relevant to indirect inlining from IB. */
4787 static void
4788 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4789 struct data_in *data_in,
4790 struct cgraph_edge *cs)
4792 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4793 struct bitpack_d bp;
4795 ii->param_index = (int) streamer_read_hwi (ib);
4796 bp = streamer_read_bitpack (ib);
4797 ii->polymorphic = bp_unpack_value (&bp, 1);
4798 ii->agg_contents = bp_unpack_value (&bp, 1);
4799 ii->member_ptr = bp_unpack_value (&bp, 1);
4800 ii->by_ref = bp_unpack_value (&bp, 1);
4801 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4802 ii->vptr_changed = bp_unpack_value (&bp, 1);
4803 if (ii->agg_contents || ii->polymorphic)
4804 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4805 else
4806 ii->offset = 0;
4807 if (ii->polymorphic)
4809 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4810 ii->otr_type = stream_read_tree (ib, data_in);
4811 ii->context.stream_in (ib, data_in);
4815 /* Stream out NODE info to OB. */
4817 static void
4818 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4820 int node_ref;
4821 lto_symtab_encoder_t encoder;
4822 struct ipa_node_params *info = IPA_NODE_REF (node);
4823 int j;
4824 struct cgraph_edge *e;
4825 struct bitpack_d bp;
4827 encoder = ob->decl_state->symtab_node_encoder;
4828 node_ref = lto_symtab_encoder_encode (encoder, node);
4829 streamer_write_uhwi (ob, node_ref);
4831 streamer_write_uhwi (ob, ipa_get_param_count (info));
4832 for (j = 0; j < ipa_get_param_count (info); j++)
4833 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4834 bp = bitpack_create (ob->main_stream);
4835 gcc_assert (info->analysis_done
4836 || ipa_get_param_count (info) == 0);
4837 gcc_assert (!info->node_enqueued);
4838 gcc_assert (!info->ipcp_orig_node);
4839 for (j = 0; j < ipa_get_param_count (info); j++)
4840 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4841 streamer_write_bitpack (&bp);
4842 for (j = 0; j < ipa_get_param_count (info); j++)
4843 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4844 for (e = node->callees; e; e = e->next_callee)
4846 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4848 streamer_write_uhwi (ob,
4849 ipa_get_cs_argument_count (args) * 2
4850 + (args->polymorphic_call_contexts != NULL));
4851 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4853 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4854 if (args->polymorphic_call_contexts != NULL)
4855 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4858 for (e = node->indirect_calls; e; e = e->next_callee)
4860 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4862 streamer_write_uhwi (ob,
4863 ipa_get_cs_argument_count (args) * 2
4864 + (args->polymorphic_call_contexts != NULL));
4865 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4867 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4868 if (args->polymorphic_call_contexts != NULL)
4869 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4871 ipa_write_indirect_edge_info (ob, e);
4875 /* Stream in NODE info from IB. */
4877 static void
4878 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4879 struct data_in *data_in)
4881 struct ipa_node_params *info = IPA_NODE_REF (node);
4882 int k;
4883 struct cgraph_edge *e;
4884 struct bitpack_d bp;
4886 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4888 for (k = 0; k < ipa_get_param_count (info); k++)
4889 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4891 bp = streamer_read_bitpack (ib);
4892 if (ipa_get_param_count (info) != 0)
4893 info->analysis_done = true;
4894 info->node_enqueued = false;
4895 for (k = 0; k < ipa_get_param_count (info); k++)
4896 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4897 for (k = 0; k < ipa_get_param_count (info); k++)
4898 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4899 for (e = node->callees; e; e = e->next_callee)
4901 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4902 int count = streamer_read_uhwi (ib);
4903 bool contexts_computed = count & 1;
4904 count /= 2;
4906 if (!count)
4907 continue;
4908 vec_safe_grow_cleared (args->jump_functions, count);
4909 if (contexts_computed)
4910 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4912 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4914 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4915 data_in);
4916 if (contexts_computed)
4917 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4920 for (e = node->indirect_calls; e; e = e->next_callee)
4922 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4923 int count = streamer_read_uhwi (ib);
4924 bool contexts_computed = count & 1;
4925 count /= 2;
4927 if (count)
4929 vec_safe_grow_cleared (args->jump_functions, count);
4930 if (contexts_computed)
4931 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4932 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4934 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4935 data_in);
4936 if (contexts_computed)
4937 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4940 ipa_read_indirect_edge_info (ib, data_in, e);
4944 /* Write jump functions for nodes in SET. */
4946 void
4947 ipa_prop_write_jump_functions (void)
4949 struct cgraph_node *node;
4950 struct output_block *ob;
4951 unsigned int count = 0;
4952 lto_symtab_encoder_iterator lsei;
4953 lto_symtab_encoder_t encoder;
4955 if (!ipa_node_params_sum)
4956 return;
4958 ob = create_output_block (LTO_section_jump_functions);
4959 encoder = ob->decl_state->symtab_node_encoder;
4960 ob->symbol = NULL;
4961 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4962 lsei_next_function_in_partition (&lsei))
4964 node = lsei_cgraph_node (lsei);
4965 if (node->has_gimple_body_p ()
4966 && IPA_NODE_REF (node) != NULL)
4967 count++;
4970 streamer_write_uhwi (ob, count);
4972 /* Process all of the functions. */
4973 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4974 lsei_next_function_in_partition (&lsei))
4976 node = lsei_cgraph_node (lsei);
4977 if (node->has_gimple_body_p ()
4978 && IPA_NODE_REF (node) != NULL)
4979 ipa_write_node_info (ob, node);
4981 streamer_write_char_stream (ob->main_stream, 0);
4982 produce_asm (ob, NULL);
4983 destroy_output_block (ob);
4986 /* Read section in file FILE_DATA of length LEN with data DATA. */
4988 static void
4989 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4990 size_t len)
4992 const struct lto_function_header *header =
4993 (const struct lto_function_header *) data;
4994 const int cfg_offset = sizeof (struct lto_function_header);
4995 const int main_offset = cfg_offset + header->cfg_size;
4996 const int string_offset = main_offset + header->main_size;
4997 struct data_in *data_in;
4998 unsigned int i;
4999 unsigned int count;
5001 lto_input_block ib_main ((const char *) data + main_offset,
5002 header->main_size, file_data->mode_table);
5004 data_in =
5005 lto_data_in_create (file_data, (const char *) data + string_offset,
5006 header->string_size, vNULL);
5007 count = streamer_read_uhwi (&ib_main);
5009 for (i = 0; i < count; i++)
5011 unsigned int index;
5012 struct cgraph_node *node;
5013 lto_symtab_encoder_t encoder;
5015 index = streamer_read_uhwi (&ib_main);
5016 encoder = file_data->symtab_node_encoder;
5017 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5018 index));
5019 gcc_assert (node->definition);
5020 ipa_read_node_info (&ib_main, node, data_in);
5022 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5023 len);
5024 lto_data_in_delete (data_in);
5027 /* Read ipcp jump functions. */
5029 void
5030 ipa_prop_read_jump_functions (void)
5032 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5033 struct lto_file_decl_data *file_data;
5034 unsigned int j = 0;
5036 ipa_check_create_node_params ();
5037 ipa_check_create_edge_args ();
5038 ipa_register_cgraph_hooks ();
5040 while ((file_data = file_data_vec[j++]))
5042 size_t len;
5043 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5045 if (data)
5046 ipa_prop_read_section (file_data, data, len);
5050 /* After merging units, we can get mismatch in argument counts.
5051 Also decl merging might've rendered parameter lists obsolete.
5052 Also compute called_with_variable_arg info. */
5054 void
5055 ipa_update_after_lto_read (void)
5057 ipa_check_create_node_params ();
5058 ipa_check_create_edge_args ();
5061 void
5062 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5064 int node_ref;
5065 unsigned int count = 0;
5066 lto_symtab_encoder_t encoder;
5067 struct ipa_agg_replacement_value *aggvals, *av;
5069 aggvals = ipa_get_agg_replacements_for_node (node);
5070 encoder = ob->decl_state->symtab_node_encoder;
5071 node_ref = lto_symtab_encoder_encode (encoder, node);
5072 streamer_write_uhwi (ob, node_ref);
5074 for (av = aggvals; av; av = av->next)
5075 count++;
5076 streamer_write_uhwi (ob, count);
5078 for (av = aggvals; av; av = av->next)
5080 struct bitpack_d bp;
5082 streamer_write_uhwi (ob, av->offset);
5083 streamer_write_uhwi (ob, av->index);
5084 stream_write_tree (ob, av->value, true);
5086 bp = bitpack_create (ob->main_stream);
5087 bp_pack_value (&bp, av->by_ref, 1);
5088 streamer_write_bitpack (&bp);
5091 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5092 if (ts && vec_safe_length (ts->alignments) > 0)
5094 count = ts->alignments->length ();
5096 streamer_write_uhwi (ob, count);
5097 for (unsigned i = 0; i < count; ++i)
5099 ipa_alignment *parm_al = &(*ts->alignments)[i];
5101 struct bitpack_d bp;
5102 bp = bitpack_create (ob->main_stream);
5103 bp_pack_value (&bp, parm_al->known, 1);
5104 streamer_write_bitpack (&bp);
5105 if (parm_al->known)
5107 streamer_write_uhwi (ob, parm_al->align);
5108 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
5109 parm_al->misalign);
5113 else
5114 streamer_write_uhwi (ob, 0);
5116 ts = ipcp_get_transformation_summary (node);
5117 if (ts && vec_safe_length (ts->bits) > 0)
5119 count = ts->bits->length ();
5120 streamer_write_uhwi (ob, count);
5122 for (unsigned i = 0; i < count; ++i)
5124 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5125 struct bitpack_d bp = bitpack_create (ob->main_stream);
5126 bp_pack_value (&bp, bits_jfunc.known, 1);
5127 streamer_write_bitpack (&bp);
5128 if (bits_jfunc.known)
5130 streamer_write_widest_int (ob, bits_jfunc.value);
5131 streamer_write_widest_int (ob, bits_jfunc.mask);
5135 else
5136 streamer_write_uhwi (ob, 0);
5139 /* Stream in the aggregate value replacement chain for NODE from IB. */
5141 static void
5142 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5143 data_in *data_in)
5145 struct ipa_agg_replacement_value *aggvals = NULL;
5146 unsigned int count, i;
5148 count = streamer_read_uhwi (ib);
5149 for (i = 0; i <count; i++)
5151 struct ipa_agg_replacement_value *av;
5152 struct bitpack_d bp;
5154 av = ggc_alloc<ipa_agg_replacement_value> ();
5155 av->offset = streamer_read_uhwi (ib);
5156 av->index = streamer_read_uhwi (ib);
5157 av->value = stream_read_tree (ib, data_in);
5158 bp = streamer_read_bitpack (ib);
5159 av->by_ref = bp_unpack_value (&bp, 1);
5160 av->next = aggvals;
5161 aggvals = av;
5163 ipa_set_node_agg_value_chain (node, aggvals);
5165 count = streamer_read_uhwi (ib);
5166 if (count > 0)
5168 ipcp_grow_transformations_if_necessary ();
5170 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5171 vec_safe_grow_cleared (ts->alignments, count);
5173 for (i = 0; i < count; i++)
5175 ipa_alignment *parm_al;
5176 parm_al = &(*ts->alignments)[i];
5177 struct bitpack_d bp;
5178 bp = streamer_read_bitpack (ib);
5179 parm_al->known = bp_unpack_value (&bp, 1);
5180 if (parm_al->known)
5182 parm_al->align = streamer_read_uhwi (ib);
5183 parm_al->misalign
5184 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
5185 0, parm_al->align);
5190 count = streamer_read_uhwi (ib);
5191 if (count > 0)
5193 ipcp_grow_transformations_if_necessary ();
5194 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5195 vec_safe_grow_cleared (ts->bits, count);
5197 for (i = 0; i < count; i++)
5199 ipa_bits& bits_jfunc = (*ts->bits)[i];
5200 struct bitpack_d bp = streamer_read_bitpack (ib);
5201 bits_jfunc.known = bp_unpack_value (&bp, 1);
5202 if (bits_jfunc.known)
5204 bits_jfunc.value = streamer_read_widest_int (ib);
5205 bits_jfunc.mask = streamer_read_widest_int (ib);
5211 /* Write all aggregate replacement for nodes in set. */
5213 void
5214 ipcp_write_transformation_summaries (void)
5216 struct cgraph_node *node;
5217 struct output_block *ob;
5218 unsigned int count = 0;
5219 lto_symtab_encoder_iterator lsei;
5220 lto_symtab_encoder_t encoder;
5222 ob = create_output_block (LTO_section_ipcp_transform);
5223 encoder = ob->decl_state->symtab_node_encoder;
5224 ob->symbol = NULL;
5225 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5226 lsei_next_function_in_partition (&lsei))
5228 node = lsei_cgraph_node (lsei);
5229 if (node->has_gimple_body_p ())
5230 count++;
5233 streamer_write_uhwi (ob, count);
5235 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5236 lsei_next_function_in_partition (&lsei))
5238 node = lsei_cgraph_node (lsei);
5239 if (node->has_gimple_body_p ())
5240 write_ipcp_transformation_info (ob, node);
5242 streamer_write_char_stream (ob->main_stream, 0);
5243 produce_asm (ob, NULL);
5244 destroy_output_block (ob);
5247 /* Read replacements section in file FILE_DATA of length LEN with data
5248 DATA. */
5250 static void
5251 read_replacements_section (struct lto_file_decl_data *file_data,
5252 const char *data,
5253 size_t len)
5255 const struct lto_function_header *header =
5256 (const struct lto_function_header *) data;
5257 const int cfg_offset = sizeof (struct lto_function_header);
5258 const int main_offset = cfg_offset + header->cfg_size;
5259 const int string_offset = main_offset + header->main_size;
5260 struct data_in *data_in;
5261 unsigned int i;
5262 unsigned int count;
5264 lto_input_block ib_main ((const char *) data + main_offset,
5265 header->main_size, file_data->mode_table);
5267 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5268 header->string_size, vNULL);
5269 count = streamer_read_uhwi (&ib_main);
5271 for (i = 0; i < count; i++)
5273 unsigned int index;
5274 struct cgraph_node *node;
5275 lto_symtab_encoder_t encoder;
5277 index = streamer_read_uhwi (&ib_main);
5278 encoder = file_data->symtab_node_encoder;
5279 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5280 index));
5281 gcc_assert (node->definition);
5282 read_ipcp_transformation_info (&ib_main, node, data_in);
5284 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5285 len);
5286 lto_data_in_delete (data_in);
5289 /* Read IPA-CP aggregate replacements. */
5291 void
5292 ipcp_read_transformation_summaries (void)
5294 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5295 struct lto_file_decl_data *file_data;
5296 unsigned int j = 0;
5298 while ((file_data = file_data_vec[j++]))
5300 size_t len;
5301 const char *data = lto_get_section_data (file_data,
5302 LTO_section_ipcp_transform,
5303 NULL, &len);
5304 if (data)
5305 read_replacements_section (file_data, data, len);
5309 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5310 NODE. */
5312 static void
5313 adjust_agg_replacement_values (struct cgraph_node *node,
5314 struct ipa_agg_replacement_value *aggval)
5316 struct ipa_agg_replacement_value *v;
5317 int i, c = 0, d = 0, *adj;
5319 if (!node->clone.combined_args_to_skip)
5320 return;
5322 for (v = aggval; v; v = v->next)
5324 gcc_assert (v->index >= 0);
5325 if (c < v->index)
5326 c = v->index;
5328 c++;
5330 adj = XALLOCAVEC (int, c);
5331 for (i = 0; i < c; i++)
5332 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5334 adj[i] = -1;
5335 d++;
5337 else
5338 adj[i] = i - d;
5340 for (v = aggval; v; v = v->next)
5341 v->index = adj[v->index];
5344 /* Dominator walker driving the ipcp modification phase. */
5346 class ipcp_modif_dom_walker : public dom_walker
5348 public:
5349 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5350 vec<ipa_param_descriptor> descs,
5351 struct ipa_agg_replacement_value *av,
5352 bool *sc, bool *cc)
5353 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5354 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5356 virtual edge before_dom_children (basic_block);
5358 private:
5359 struct ipa_func_body_info *m_fbi;
5360 vec<ipa_param_descriptor> m_descriptors;
5361 struct ipa_agg_replacement_value *m_aggval;
5362 bool *m_something_changed, *m_cfg_changed;
5365 edge
5366 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5368 gimple_stmt_iterator gsi;
5369 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5371 struct ipa_agg_replacement_value *v;
5372 gimple *stmt = gsi_stmt (gsi);
5373 tree rhs, val, t;
5374 HOST_WIDE_INT offset, size;
5375 int index;
5376 bool by_ref, vce;
5378 if (!gimple_assign_load_p (stmt))
5379 continue;
5380 rhs = gimple_assign_rhs1 (stmt);
5381 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5382 continue;
5384 vce = false;
5385 t = rhs;
5386 while (handled_component_p (t))
5388 /* V_C_E can do things like convert an array of integers to one
5389 bigger integer and similar things we do not handle below. */
5390 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5392 vce = true;
5393 break;
5395 t = TREE_OPERAND (t, 0);
5397 if (vce)
5398 continue;
5400 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5401 &offset, &size, &by_ref))
5402 continue;
5403 for (v = m_aggval; v; v = v->next)
5404 if (v->index == index
5405 && v->offset == offset)
5406 break;
5407 if (!v
5408 || v->by_ref != by_ref
5409 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5410 continue;
5412 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5413 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5415 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5416 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5417 else if (TYPE_SIZE (TREE_TYPE (rhs))
5418 == TYPE_SIZE (TREE_TYPE (v->value)))
5419 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5420 else
5422 if (dump_file)
5424 fprintf (dump_file, " const ");
5425 print_generic_expr (dump_file, v->value, 0);
5426 fprintf (dump_file, " can't be converted to type of ");
5427 print_generic_expr (dump_file, rhs, 0);
5428 fprintf (dump_file, "\n");
5430 continue;
5433 else
5434 val = v->value;
5436 if (dump_file && (dump_flags & TDF_DETAILS))
5438 fprintf (dump_file, "Modifying stmt:\n ");
5439 print_gimple_stmt (dump_file, stmt, 0, 0);
5441 gimple_assign_set_rhs_from_tree (&gsi, val);
5442 update_stmt (stmt);
5444 if (dump_file && (dump_flags & TDF_DETAILS))
5446 fprintf (dump_file, "into:\n ");
5447 print_gimple_stmt (dump_file, stmt, 0, 0);
5448 fprintf (dump_file, "\n");
5451 *m_something_changed = true;
5452 if (maybe_clean_eh_stmt (stmt)
5453 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5454 *m_cfg_changed = true;
5456 return NULL;
5459 /* Update alignment of formal parameters as described in
5460 ipcp_transformation_summary. */
5462 static void
5463 ipcp_update_alignments (struct cgraph_node *node)
5465 tree fndecl = node->decl;
5466 tree parm = DECL_ARGUMENTS (fndecl);
5467 tree next_parm = parm;
5468 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5469 if (!ts || vec_safe_length (ts->alignments) == 0)
5470 return;
5471 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5472 unsigned count = alignments.length ();
5474 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5476 if (node->clone.combined_args_to_skip
5477 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5478 continue;
5479 gcc_checking_assert (parm);
5480 next_parm = DECL_CHAIN (parm);
5482 if (!alignments[i].known || !is_gimple_reg (parm))
5483 continue;
5484 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5485 if (!ddef)
5486 continue;
5488 if (dump_file)
5489 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5490 "misalignment to %u\n", i, alignments[i].align,
5491 alignments[i].misalign);
5493 struct ptr_info_def *pi = get_ptr_info (ddef);
5494 gcc_checking_assert (pi);
5495 unsigned old_align;
5496 unsigned old_misalign;
5497 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5499 if (old_known
5500 && old_align >= alignments[i].align)
5502 if (dump_file)
5503 fprintf (dump_file, " But the alignment was already %u.\n",
5504 old_align);
5505 continue;
5507 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5511 /* Update bits info of formal parameters as described in
5512 ipcp_transformation_summary. */
5514 static void
5515 ipcp_update_bits (struct cgraph_node *node)
5517 tree parm = DECL_ARGUMENTS (node->decl);
5518 tree next_parm = parm;
5519 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5521 if (!ts || vec_safe_length (ts->bits) == 0)
5522 return;
5524 vec<ipa_bits, va_gc> &bits = *ts->bits;
5525 unsigned count = bits.length ();
5527 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5529 if (node->clone.combined_args_to_skip
5530 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5531 continue;
5533 gcc_checking_assert (parm);
5534 next_parm = DECL_CHAIN (parm);
5536 if (!bits[i].known
5537 || !INTEGRAL_TYPE_P (TREE_TYPE (parm))
5538 || !is_gimple_reg (parm))
5539 continue;
5541 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5542 if (!ddef)
5543 continue;
5545 if (dump_file)
5547 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5548 print_hex (bits[i].mask, dump_file);
5549 fprintf (dump_file, "\n");
5552 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5553 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5555 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5556 | wide_int::from (bits[i].value, prec, sgn);
5557 set_nonzero_bits (ddef, nonzero_bits);
5561 /* IPCP transformation phase doing propagation of aggregate values. */
5563 unsigned int
5564 ipcp_transform_function (struct cgraph_node *node)
5566 vec<ipa_param_descriptor> descriptors = vNULL;
5567 struct ipa_func_body_info fbi;
5568 struct ipa_agg_replacement_value *aggval;
5569 int param_count;
5570 bool cfg_changed = false, something_changed = false;
5572 gcc_checking_assert (cfun);
5573 gcc_checking_assert (current_function_decl);
5575 if (dump_file)
5576 fprintf (dump_file, "Modification phase of node %s/%i\n",
5577 node->name (), node->order);
5579 ipcp_update_alignments (node);
5580 ipcp_update_bits (node);
5581 aggval = ipa_get_agg_replacements_for_node (node);
5582 if (!aggval)
5583 return 0;
5584 param_count = count_formal_params (node->decl);
5585 if (param_count == 0)
5586 return 0;
5587 adjust_agg_replacement_values (node, aggval);
5588 if (dump_file)
5589 ipa_dump_agg_replacement_values (dump_file, aggval);
5591 fbi.node = node;
5592 fbi.info = NULL;
5593 fbi.bb_infos = vNULL;
5594 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5595 fbi.param_count = param_count;
5596 fbi.aa_walked = 0;
5598 descriptors.safe_grow_cleared (param_count);
5599 ipa_populate_param_decls (node, descriptors);
5600 calculate_dominance_info (CDI_DOMINATORS);
5601 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5602 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5604 int i;
5605 struct ipa_bb_info *bi;
5606 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5607 free_ipa_bb_info (bi);
5608 fbi.bb_infos.release ();
5609 free_dominance_info (CDI_DOMINATORS);
5610 (*ipcp_transformations)[node->uid].agg_values = NULL;
5611 (*ipcp_transformations)[node->uid].alignments = NULL;
5612 descriptors.release ();
5614 if (!something_changed)
5615 return 0;
5616 else if (cfg_changed)
5617 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5618 else
5619 return TODO_update_ssa_only_virtuals;