S/390: Add static OSC breaker if necessary.
[official-gcc.git] / gcc / ipa-prop.c
blob16298703fa695e5efc84f45aa5f3d6c1719ec1e9
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
80 /* Allocation pool for reference descriptions. */
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
104 int i, count;
106 count = descriptors.length ();
107 for (i = 0; i < count; i++)
108 if (descriptors[i].decl_or_type == ptree)
109 return i;
111 return -1;
114 /* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
126 static void
127 ipa_populate_param_decls (struct cgraph_node *node,
128 vec<ipa_param_descriptor> &descriptors)
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
135 fndecl = node->decl;
136 gcc_assert (gimple_has_body_p (fndecl));
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
141 descriptors[param_num].decl_or_type = parm;
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
144 param_num++;
148 /* Return how many formal parameters FNDECL has. */
151 count_formal_params (tree fndecl)
153 tree parm;
154 int count = 0;
155 gcc_assert (gimple_has_body_p (fndecl));
157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
158 count++;
160 return count;
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
167 void
168 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
170 fprintf (file, "param #%i", i);
171 if (info->descriptors[i].decl_or_type)
173 fprintf (file, " ");
174 print_generic_expr (file, info->descriptors[i].decl_or_type, 0);
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
181 void
182 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
184 struct ipa_node_params *info = IPA_NODE_REF (node);
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
194 void
195 ipa_initialize_node_params (struct cgraph_node *node)
197 struct ipa_node_params *info = IPA_NODE_REF (node);
199 if (!info->descriptors.exists ())
201 ipa_alloc_node_params (node, count_formal_params (node->decl));
202 ipa_populate_param_decls (node, info->descriptors);
206 /* Print the jump functions associated with call graph edge CS to file F. */
208 static void
209 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
211 int i, count;
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
225 else if (type == IPA_JF_CONST)
227 tree val = jump_func->value.constant.value;
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
237 fprintf (f, "\n");
239 else if (type == IPA_JF_PASS_THROUGH)
241 fprintf (f, "PASS THROUGH: ");
242 fprintf (f, "%d, op %s",
243 jump_func->value.pass_through.formal_id,
244 get_tree_code_name(jump_func->value.pass_through.operation));
245 if (jump_func->value.pass_through.operation != NOP_EXPR)
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
253 fprintf (f, "\n");
255 else if (type == IPA_JF_ANCESTOR)
257 fprintf (f, "ANCESTOR: ");
258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
263 fprintf (f, "\n");
266 if (jump_func->agg.items)
268 struct ipa_agg_jf_item *item;
269 int j;
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
279 tree_to_uhwi (TYPE_SIZE (item->value)));
280 else
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
285 fprintf (f, "\n");
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
297 if (jump_func->bits.known)
299 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
300 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
301 fprintf (f, "\n");
303 else
304 fprintf (f, " Unknown bits\n");
306 if (jump_func->vr_known)
308 fprintf (f, " VR ");
309 fprintf (f, "%s[",
310 (jump_func->m_vr.type == VR_ANTI_RANGE) ? "~" : "");
311 print_decs (jump_func->m_vr.min, f);
312 fprintf (f, ", ");
313 print_decs (jump_func->m_vr.max, f);
314 fprintf (f, "]\n");
316 else
317 fprintf (f, " Unknown VR\n");
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
328 struct cgraph_edge *cs;
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup_for_dump (node->name ()), node->order,
339 xstrdup_for_dump (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i, "
359 "offset " HOST_WIDE_INT_PRINT_DEC,
360 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
361 ii->offset);
363 if (cs->call_stmt)
365 fprintf (f, ", for stmt ");
366 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
368 else
369 fprintf (f, "\n");
370 if (ii->polymorphic)
371 ii->context.dump (f);
372 ipa_print_node_jump_functions_for_edge (f, cs);
376 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
378 void
379 ipa_print_all_jump_functions (FILE *f)
381 struct cgraph_node *node;
383 fprintf (f, "\nJump functions:\n");
384 FOR_EACH_FUNCTION (node)
386 ipa_print_node_jump_functions (f, node);
390 /* Set jfunc to be a know-really nothing jump function. */
392 static void
393 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
395 jfunc->type = IPA_JF_UNKNOWN;
396 jfunc->bits.known = false;
397 jfunc->vr_known = false;
400 /* Set JFUNC to be a copy of another jmp (to be used by jump function
401 combination code). The two functions will share their rdesc. */
403 static void
404 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
405 struct ipa_jump_func *src)
408 gcc_checking_assert (src->type == IPA_JF_CONST);
409 dst->type = IPA_JF_CONST;
410 dst->value.constant = src->value.constant;
413 /* Set JFUNC to be a constant jmp function. */
415 static void
416 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
417 struct cgraph_edge *cs)
419 jfunc->type = IPA_JF_CONST;
420 jfunc->value.constant.value = unshare_expr_without_location (constant);
422 if (TREE_CODE (constant) == ADDR_EXPR
423 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
425 struct ipa_cst_ref_desc *rdesc;
427 rdesc = ipa_refdesc_pool.allocate ();
428 rdesc->cs = cs;
429 rdesc->next_duplicate = NULL;
430 rdesc->refcount = 1;
431 jfunc->value.constant.rdesc = rdesc;
433 else
434 jfunc->value.constant.rdesc = NULL;
437 /* Set JFUNC to be a simple pass-through jump function. */
438 static void
439 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
440 bool agg_preserved)
442 jfunc->type = IPA_JF_PASS_THROUGH;
443 jfunc->value.pass_through.operand = NULL_TREE;
444 jfunc->value.pass_through.formal_id = formal_id;
445 jfunc->value.pass_through.operation = NOP_EXPR;
446 jfunc->value.pass_through.agg_preserved = agg_preserved;
449 /* Set JFUNC to be an arithmetic pass through jump function. */
451 static void
452 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
453 tree operand, enum tree_code operation)
455 jfunc->type = IPA_JF_PASS_THROUGH;
456 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
457 jfunc->value.pass_through.formal_id = formal_id;
458 jfunc->value.pass_through.operation = operation;
459 jfunc->value.pass_through.agg_preserved = false;
462 /* Set JFUNC to be an ancestor jump function. */
464 static void
465 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
466 int formal_id, bool agg_preserved)
468 jfunc->type = IPA_JF_ANCESTOR;
469 jfunc->value.ancestor.formal_id = formal_id;
470 jfunc->value.ancestor.offset = offset;
471 jfunc->value.ancestor.agg_preserved = agg_preserved;
474 /* Get IPA BB information about the given BB. FBI is the context of analyzis
475 of this function body. */
477 static struct ipa_bb_info *
478 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
480 gcc_checking_assert (fbi);
481 return &fbi->bb_infos[bb->index];
484 /* Structure to be passed in between detect_type_change and
485 check_stmt_for_type_change. */
487 struct prop_type_change_info
489 /* Offset into the object where there is the virtual method pointer we are
490 looking for. */
491 HOST_WIDE_INT offset;
492 /* The declaration or SSA_NAME pointer of the base that we are checking for
493 type change. */
494 tree object;
495 /* Set to true if dynamic type change has been detected. */
496 bool type_maybe_changed;
499 /* Return true if STMT can modify a virtual method table pointer.
501 This function makes special assumptions about both constructors and
502 destructors which are all the functions that are allowed to alter the VMT
503 pointers. It assumes that destructors begin with assignment into all VMT
504 pointers and that constructors essentially look in the following way:
506 1) The very first thing they do is that they call constructors of ancestor
507 sub-objects that have them.
509 2) Then VMT pointers of this and all its ancestors is set to new values
510 corresponding to the type corresponding to the constructor.
512 3) Only afterwards, other stuff such as constructor of member sub-objects
513 and the code written by the user is run. Only this may include calling
514 virtual functions, directly or indirectly.
516 There is no way to call a constructor of an ancestor sub-object in any
517 other way.
519 This means that we do not have to care whether constructors get the correct
520 type information because they will always change it (in fact, if we define
521 the type to be given by the VMT pointer, it is undefined).
523 The most important fact to derive from the above is that if, for some
524 statement in the section 3, we try to detect whether the dynamic type has
525 changed, we can safely ignore all calls as we examine the function body
526 backwards until we reach statements in section 2 because these calls cannot
527 be ancestor constructors or destructors (if the input is not bogus) and so
528 do not change the dynamic type (this holds true only for automatically
529 allocated objects but at the moment we devirtualize only these). We then
530 must detect that statements in section 2 change the dynamic type and can try
531 to derive the new type. That is enough and we can stop, we will never see
532 the calls into constructors of sub-objects in this code. Therefore we can
533 safely ignore all call statements that we traverse.
536 static bool
537 stmt_may_be_vtbl_ptr_store (gimple *stmt)
539 if (is_gimple_call (stmt))
540 return false;
541 if (gimple_clobber_p (stmt))
542 return false;
543 else if (is_gimple_assign (stmt))
545 tree lhs = gimple_assign_lhs (stmt);
547 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
549 if (flag_strict_aliasing
550 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
551 return false;
553 if (TREE_CODE (lhs) == COMPONENT_REF
554 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
555 return false;
556 /* In the future we might want to use get_base_ref_and_offset to find
557 if there is a field corresponding to the offset and if so, proceed
558 almost like if it was a component ref. */
561 return true;
564 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
565 to check whether a particular statement may modify the virtual table
566 pointerIt stores its result into DATA, which points to a
567 prop_type_change_info structure. */
569 static bool
570 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
572 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
573 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
575 if (stmt_may_be_vtbl_ptr_store (stmt))
577 tci->type_maybe_changed = true;
578 return true;
580 else
581 return false;
584 /* See if ARG is PARAM_DECl describing instance passed by pointer
585 or reference in FUNCTION. Return false if the dynamic type may change
586 in between beggining of the function until CALL is invoked.
588 Generally functions are not allowed to change type of such instances,
589 but they call destructors. We assume that methods can not destroy the THIS
590 pointer. Also as a special cases, constructor and destructors may change
591 type of the THIS pointer. */
593 static bool
594 param_type_may_change_p (tree function, tree arg, gimple *call)
596 /* Pure functions can not do any changes on the dynamic type;
597 that require writting to memory. */
598 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
599 return false;
600 /* We need to check if we are within inlined consturctor
601 or destructor (ideally we would have way to check that the
602 inline cdtor is actually working on ARG, but we don't have
603 easy tie on this, so punt on all non-pure cdtors.
604 We may also record the types of cdtors and once we know type
605 of the instance match them.
607 Also code unification optimizations may merge calls from
608 different blocks making return values unreliable. So
609 do nothing during late optimization. */
610 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
611 return true;
612 if (TREE_CODE (arg) == SSA_NAME
613 && SSA_NAME_IS_DEFAULT_DEF (arg)
614 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
616 /* Normal (non-THIS) argument. */
617 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
618 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
619 /* THIS pointer of an method - here we want to watch constructors
620 and destructors as those definitely may change the dynamic
621 type. */
622 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
623 && !DECL_CXX_CONSTRUCTOR_P (function)
624 && !DECL_CXX_DESTRUCTOR_P (function)
625 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
627 /* Walk the inline stack and watch out for ctors/dtors. */
628 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
629 block = BLOCK_SUPERCONTEXT (block))
630 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
631 return true;
632 return false;
635 return true;
638 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
639 callsite CALL) by looking for assignments to its virtual table pointer. If
640 it is, return true and fill in the jump function JFUNC with relevant type
641 information or set it to unknown. ARG is the object itself (not a pointer
642 to it, unless dereferenced). BASE is the base of the memory access as
643 returned by get_ref_base_and_extent, as is the offset.
645 This is helper function for detect_type_change and detect_type_change_ssa
646 that does the heavy work which is usually unnecesary. */
648 static bool
649 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
650 gcall *call, struct ipa_jump_func *jfunc,
651 HOST_WIDE_INT offset)
653 struct prop_type_change_info tci;
654 ao_ref ao;
655 bool entry_reached = false;
657 gcc_checking_assert (DECL_P (arg)
658 || TREE_CODE (arg) == MEM_REF
659 || handled_component_p (arg));
661 comp_type = TYPE_MAIN_VARIANT (comp_type);
663 /* Const calls cannot call virtual methods through VMT and so type changes do
664 not matter. */
665 if (!flag_devirtualize || !gimple_vuse (call)
666 /* Be sure expected_type is polymorphic. */
667 || !comp_type
668 || TREE_CODE (comp_type) != RECORD_TYPE
669 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
670 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
671 return true;
673 ao_ref_init (&ao, arg);
674 ao.base = base;
675 ao.offset = offset;
676 ao.size = POINTER_SIZE;
677 ao.max_size = ao.size;
679 tci.offset = offset;
680 tci.object = get_base_address (arg);
681 tci.type_maybe_changed = false;
683 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
684 &tci, NULL, &entry_reached);
685 if (!tci.type_maybe_changed)
686 return false;
688 ipa_set_jf_unknown (jfunc);
689 return true;
692 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
693 If it is, return true and fill in the jump function JFUNC with relevant type
694 information or set it to unknown. ARG is the object itself (not a pointer
695 to it, unless dereferenced). BASE is the base of the memory access as
696 returned by get_ref_base_and_extent, as is the offset. */
698 static bool
699 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
700 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
702 if (!flag_devirtualize)
703 return false;
705 if (TREE_CODE (base) == MEM_REF
706 && !param_type_may_change_p (current_function_decl,
707 TREE_OPERAND (base, 0),
708 call))
709 return false;
710 return detect_type_change_from_memory_writes (arg, base, comp_type,
711 call, jfunc, offset);
714 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
715 SSA name (its dereference will become the base and the offset is assumed to
716 be zero). */
718 static bool
719 detect_type_change_ssa (tree arg, tree comp_type,
720 gcall *call, struct ipa_jump_func *jfunc)
722 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
723 if (!flag_devirtualize
724 || !POINTER_TYPE_P (TREE_TYPE (arg)))
725 return false;
727 if (!param_type_may_change_p (current_function_decl, arg, call))
728 return false;
730 arg = build2 (MEM_REF, ptr_type_node, arg,
731 build_int_cst (ptr_type_node, 0));
733 return detect_type_change_from_memory_writes (arg, arg, comp_type,
734 call, jfunc, 0);
737 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
738 boolean variable pointed to by DATA. */
740 static bool
741 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
742 void *data)
744 bool *b = (bool *) data;
745 *b = true;
746 return true;
749 /* Return true if we have already walked so many statements in AA that we
750 should really just start giving up. */
752 static bool
753 aa_overwalked (struct ipa_func_body_info *fbi)
755 gcc_checking_assert (fbi);
756 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
759 /* Find the nearest valid aa status for parameter specified by INDEX that
760 dominates BB. */
762 static struct ipa_param_aa_status *
763 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
764 int index)
766 while (true)
768 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
769 if (!bb)
770 return NULL;
771 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
772 if (!bi->param_aa_statuses.is_empty ()
773 && bi->param_aa_statuses[index].valid)
774 return &bi->param_aa_statuses[index];
778 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
779 structures and/or intialize the result with a dominating description as
780 necessary. */
782 static struct ipa_param_aa_status *
783 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
784 int index)
786 gcc_checking_assert (fbi);
787 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
788 if (bi->param_aa_statuses.is_empty ())
789 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
790 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
791 if (!paa->valid)
793 gcc_checking_assert (!paa->parm_modified
794 && !paa->ref_modified
795 && !paa->pt_modified);
796 struct ipa_param_aa_status *dom_paa;
797 dom_paa = find_dominating_aa_status (fbi, bb, index);
798 if (dom_paa)
799 *paa = *dom_paa;
800 else
801 paa->valid = true;
804 return paa;
807 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
808 a value known not to be modified in this function before reaching the
809 statement STMT. FBI holds information about the function we have so far
810 gathered but do not survive the summary building stage. */
812 static bool
813 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
814 gimple *stmt, tree parm_load)
816 struct ipa_param_aa_status *paa;
817 bool modified = false;
818 ao_ref refd;
820 tree base = get_base_address (parm_load);
821 gcc_assert (TREE_CODE (base) == PARM_DECL);
822 if (TREE_READONLY (base))
823 return true;
825 /* FIXME: FBI can be NULL if we are being called from outside
826 ipa_node_analysis or ipcp_transform_function, which currently happens
827 during inlining analysis. It would be great to extend fbi's lifetime and
828 always have it. Currently, we are just not afraid of too much walking in
829 that case. */
830 if (fbi)
832 if (aa_overwalked (fbi))
833 return false;
834 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
835 if (paa->parm_modified)
836 return false;
838 else
839 paa = NULL;
841 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
842 ao_ref_init (&refd, parm_load);
843 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
844 &modified, NULL);
845 if (fbi)
846 fbi->aa_walked += walked;
847 if (paa && modified)
848 paa->parm_modified = true;
849 return !modified;
852 /* If STMT is an assignment that loads a value from an parameter declaration,
853 return the index of the parameter in ipa_node_params which has not been
854 modified. Otherwise return -1. */
856 static int
857 load_from_unmodified_param (struct ipa_func_body_info *fbi,
858 vec<ipa_param_descriptor> descriptors,
859 gimple *stmt)
861 int index;
862 tree op1;
864 if (!gimple_assign_single_p (stmt))
865 return -1;
867 op1 = gimple_assign_rhs1 (stmt);
868 if (TREE_CODE (op1) != PARM_DECL)
869 return -1;
871 index = ipa_get_param_decl_index_1 (descriptors, op1);
872 if (index < 0
873 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
874 return -1;
876 return index;
879 /* Return true if memory reference REF (which must be a load through parameter
880 with INDEX) loads data that are known to be unmodified in this function
881 before reaching statement STMT. */
883 static bool
884 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
885 int index, gimple *stmt, tree ref)
887 struct ipa_param_aa_status *paa;
888 bool modified = false;
889 ao_ref refd;
891 /* FIXME: FBI can be NULL if we are being called from outside
892 ipa_node_analysis or ipcp_transform_function, which currently happens
893 during inlining analysis. It would be great to extend fbi's lifetime and
894 always have it. Currently, we are just not afraid of too much walking in
895 that case. */
896 if (fbi)
898 if (aa_overwalked (fbi))
899 return false;
900 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
901 if (paa->ref_modified)
902 return false;
904 else
905 paa = NULL;
907 gcc_checking_assert (gimple_vuse (stmt));
908 ao_ref_init (&refd, ref);
909 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
910 &modified, NULL);
911 if (fbi)
912 fbi->aa_walked += walked;
913 if (paa && modified)
914 paa->ref_modified = true;
915 return !modified;
918 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
919 is known to be unmodified in this function before reaching call statement
920 CALL into which it is passed. FBI describes the function body. */
922 static bool
923 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
924 gimple *call, tree parm)
926 bool modified = false;
927 ao_ref refd;
929 /* It's unnecessary to calculate anything about memory contnets for a const
930 function because it is not goin to use it. But do not cache the result
931 either. Also, no such calculations for non-pointers. */
932 if (!gimple_vuse (call)
933 || !POINTER_TYPE_P (TREE_TYPE (parm))
934 || aa_overwalked (fbi))
935 return false;
937 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
938 gimple_bb (call),
939 index);
940 if (paa->pt_modified)
941 return false;
943 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
944 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
945 &modified, NULL);
946 fbi->aa_walked += walked;
947 if (modified)
948 paa->pt_modified = true;
949 return !modified;
952 /* Return true if we can prove that OP is a memory reference loading
953 data from an aggregate passed as a parameter.
955 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
956 false if it cannot prove that the value has not been modified before the
957 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
958 if it cannot prove the value has not been modified, in that case it will
959 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
961 INFO and PARMS_AINFO describe parameters of the current function (but the
962 latter can be NULL), STMT is the load statement. If function returns true,
963 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
964 within the aggregate and whether it is a load from a value passed by
965 reference respectively. */
967 bool
968 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
969 vec<ipa_param_descriptor> descriptors,
970 gimple *stmt, tree op, int *index_p,
971 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
972 bool *by_ref_p, bool *guaranteed_unmodified)
974 int index;
975 HOST_WIDE_INT size, max_size;
976 bool reverse;
977 tree base
978 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
980 if (max_size == -1 || max_size != size || *offset_p < 0)
981 return false;
983 if (DECL_P (base))
985 int index = ipa_get_param_decl_index_1 (descriptors, base);
986 if (index >= 0
987 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
989 *index_p = index;
990 *by_ref_p = false;
991 if (size_p)
992 *size_p = size;
993 if (guaranteed_unmodified)
994 *guaranteed_unmodified = true;
995 return true;
997 return false;
1000 if (TREE_CODE (base) != MEM_REF
1001 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1002 || !integer_zerop (TREE_OPERAND (base, 1)))
1003 return false;
1005 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1007 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1008 index = ipa_get_param_decl_index_1 (descriptors, parm);
1010 else
1012 /* This branch catches situations where a pointer parameter is not a
1013 gimple register, for example:
1015 void hip7(S*) (struct S * p)
1017 void (*<T2e4>) (struct S *) D.1867;
1018 struct S * p.1;
1020 <bb 2>:
1021 p.1_1 = p;
1022 D.1867_2 = p.1_1->f;
1023 D.1867_2 ();
1024 gdp = &p;
1027 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1028 index = load_from_unmodified_param (fbi, descriptors, def);
1031 if (index >= 0)
1033 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1034 if (!data_preserved && !guaranteed_unmodified)
1035 return false;
1037 *index_p = index;
1038 *by_ref_p = true;
1039 if (size_p)
1040 *size_p = size;
1041 if (guaranteed_unmodified)
1042 *guaranteed_unmodified = data_preserved;
1043 return true;
1045 return false;
1048 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1049 of an assignment statement STMT, try to determine whether we are actually
1050 handling any of the following cases and construct an appropriate jump
1051 function into JFUNC if so:
1053 1) The passed value is loaded from a formal parameter which is not a gimple
1054 register (most probably because it is addressable, the value has to be
1055 scalar) and we can guarantee the value has not changed. This case can
1056 therefore be described by a simple pass-through jump function. For example:
1058 foo (int a)
1060 int a.0;
1062 a.0_2 = a;
1063 bar (a.0_2);
1065 2) The passed value can be described by a simple arithmetic pass-through
1066 jump function. E.g.
1068 foo (int a)
1070 int D.2064;
1072 D.2064_4 = a.1(D) + 4;
1073 bar (D.2064_4);
1075 This case can also occur in combination of the previous one, e.g.:
1077 foo (int a, int z)
1079 int a.0;
1080 int D.2064;
1082 a.0_3 = a;
1083 D.2064_4 = a.0_3 + 4;
1084 foo (D.2064_4);
1086 3) The passed value is an address of an object within another one (which
1087 also passed by reference). Such situations are described by an ancestor
1088 jump function and describe situations such as:
1090 B::foo() (struct B * const this)
1092 struct A * D.1845;
1094 D.1845_2 = &this_1(D)->D.1748;
1095 A::bar (D.1845_2);
1097 INFO is the structure describing individual parameters access different
1098 stages of IPA optimizations. PARMS_AINFO contains the information that is
1099 only needed for intraprocedural analysis. */
1101 static void
1102 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1103 struct ipa_node_params *info,
1104 struct ipa_jump_func *jfunc,
1105 gcall *call, gimple *stmt, tree name,
1106 tree param_type)
1108 HOST_WIDE_INT offset, size, max_size;
1109 tree op1, tc_ssa, base, ssa;
1110 bool reverse;
1111 int index;
1113 op1 = gimple_assign_rhs1 (stmt);
1115 if (TREE_CODE (op1) == SSA_NAME)
1117 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1118 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1119 else
1120 index = load_from_unmodified_param (fbi, info->descriptors,
1121 SSA_NAME_DEF_STMT (op1));
1122 tc_ssa = op1;
1124 else
1126 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1127 tc_ssa = gimple_assign_lhs (stmt);
1130 if (index >= 0)
1132 tree op2 = gimple_assign_rhs2 (stmt);
1134 if (op2)
1136 if (!is_gimple_ip_invariant (op2)
1137 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1138 && !useless_type_conversion_p (TREE_TYPE (name),
1139 TREE_TYPE (op1))))
1140 return;
1142 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1143 gimple_assign_rhs_code (stmt));
1145 else if (gimple_assign_single_p (stmt))
1147 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1148 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1150 return;
1153 if (TREE_CODE (op1) != ADDR_EXPR)
1154 return;
1155 op1 = TREE_OPERAND (op1, 0);
1156 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1157 return;
1158 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1159 if (TREE_CODE (base) != MEM_REF
1160 /* If this is a varying address, punt. */
1161 || max_size == -1
1162 || max_size != size)
1163 return;
1164 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1165 ssa = TREE_OPERAND (base, 0);
1166 if (TREE_CODE (ssa) != SSA_NAME
1167 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1168 || offset < 0)
1169 return;
1171 /* Dynamic types are changed in constructors and destructors. */
1172 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1173 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1174 ipa_set_ancestor_jf (jfunc, offset, index,
1175 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1178 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1179 it looks like:
1181 iftmp.1_3 = &obj_2(D)->D.1762;
1183 The base of the MEM_REF must be a default definition SSA NAME of a
1184 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1185 whole MEM_REF expression is returned and the offset calculated from any
1186 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1187 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1189 static tree
1190 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1192 HOST_WIDE_INT size, max_size;
1193 tree expr, parm, obj;
1194 bool reverse;
1196 if (!gimple_assign_single_p (assign))
1197 return NULL_TREE;
1198 expr = gimple_assign_rhs1 (assign);
1200 if (TREE_CODE (expr) != ADDR_EXPR)
1201 return NULL_TREE;
1202 expr = TREE_OPERAND (expr, 0);
1203 obj = expr;
1204 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1206 if (TREE_CODE (expr) != MEM_REF
1207 /* If this is a varying address, punt. */
1208 || max_size == -1
1209 || max_size != size
1210 || *offset < 0)
1211 return NULL_TREE;
1212 parm = TREE_OPERAND (expr, 0);
1213 if (TREE_CODE (parm) != SSA_NAME
1214 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1215 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1216 return NULL_TREE;
1218 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1219 *obj_p = obj;
1220 return expr;
1224 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1225 statement PHI, try to find out whether NAME is in fact a
1226 multiple-inheritance typecast from a descendant into an ancestor of a formal
1227 parameter and thus can be described by an ancestor jump function and if so,
1228 write the appropriate function into JFUNC.
1230 Essentially we want to match the following pattern:
1232 if (obj_2(D) != 0B)
1233 goto <bb 3>;
1234 else
1235 goto <bb 4>;
1237 <bb 3>:
1238 iftmp.1_3 = &obj_2(D)->D.1762;
1240 <bb 4>:
1241 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1242 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1243 return D.1879_6; */
1245 static void
1246 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1247 struct ipa_node_params *info,
1248 struct ipa_jump_func *jfunc,
1249 gcall *call, gphi *phi)
1251 HOST_WIDE_INT offset;
1252 gimple *assign, *cond;
1253 basic_block phi_bb, assign_bb, cond_bb;
1254 tree tmp, parm, expr, obj;
1255 int index, i;
1257 if (gimple_phi_num_args (phi) != 2)
1258 return;
1260 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1261 tmp = PHI_ARG_DEF (phi, 0);
1262 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1263 tmp = PHI_ARG_DEF (phi, 1);
1264 else
1265 return;
1266 if (TREE_CODE (tmp) != SSA_NAME
1267 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1268 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1269 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1270 return;
1272 assign = SSA_NAME_DEF_STMT (tmp);
1273 assign_bb = gimple_bb (assign);
1274 if (!single_pred_p (assign_bb))
1275 return;
1276 expr = get_ancestor_addr_info (assign, &obj, &offset);
1277 if (!expr)
1278 return;
1279 parm = TREE_OPERAND (expr, 0);
1280 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1281 if (index < 0)
1282 return;
1284 cond_bb = single_pred (assign_bb);
1285 cond = last_stmt (cond_bb);
1286 if (!cond
1287 || gimple_code (cond) != GIMPLE_COND
1288 || gimple_cond_code (cond) != NE_EXPR
1289 || gimple_cond_lhs (cond) != parm
1290 || !integer_zerop (gimple_cond_rhs (cond)))
1291 return;
1293 phi_bb = gimple_bb (phi);
1294 for (i = 0; i < 2; i++)
1296 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1297 if (pred != assign_bb && pred != cond_bb)
1298 return;
1301 ipa_set_ancestor_jf (jfunc, offset, index,
1302 parm_ref_data_pass_through_p (fbi, index, call, parm));
1305 /* Inspect the given TYPE and return true iff it has the same structure (the
1306 same number of fields of the same types) as a C++ member pointer. If
1307 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1308 corresponding fields there. */
1310 static bool
1311 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1313 tree fld;
1315 if (TREE_CODE (type) != RECORD_TYPE)
1316 return false;
1318 fld = TYPE_FIELDS (type);
1319 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1320 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1321 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1322 return false;
1324 if (method_ptr)
1325 *method_ptr = fld;
1327 fld = DECL_CHAIN (fld);
1328 if (!fld || INTEGRAL_TYPE_P (fld)
1329 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1330 return false;
1331 if (delta)
1332 *delta = fld;
1334 if (DECL_CHAIN (fld))
1335 return false;
1337 return true;
1340 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1341 return the rhs of its defining statement. Otherwise return RHS as it
1342 is. */
1344 static inline tree
1345 get_ssa_def_if_simple_copy (tree rhs)
1347 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1349 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1351 if (gimple_assign_single_p (def_stmt))
1352 rhs = gimple_assign_rhs1 (def_stmt);
1353 else
1354 break;
1356 return rhs;
1359 /* Simple linked list, describing known contents of an aggregate beforere
1360 call. */
1362 struct ipa_known_agg_contents_list
1364 /* Offset and size of the described part of the aggregate. */
1365 HOST_WIDE_INT offset, size;
1366 /* Known constant value or NULL if the contents is known to be unknown. */
1367 tree constant;
1368 /* Pointer to the next structure in the list. */
1369 struct ipa_known_agg_contents_list *next;
1372 /* Find the proper place in linked list of ipa_known_agg_contents_list
1373 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1374 unless there is a partial overlap, in which case return NULL, or such
1375 element is already there, in which case set *ALREADY_THERE to true. */
1377 static struct ipa_known_agg_contents_list **
1378 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1379 HOST_WIDE_INT lhs_offset,
1380 HOST_WIDE_INT lhs_size,
1381 bool *already_there)
1383 struct ipa_known_agg_contents_list **p = list;
1384 while (*p && (*p)->offset < lhs_offset)
1386 if ((*p)->offset + (*p)->size > lhs_offset)
1387 return NULL;
1388 p = &(*p)->next;
1391 if (*p && (*p)->offset < lhs_offset + lhs_size)
1393 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1394 /* We already know this value is subsequently overwritten with
1395 something else. */
1396 *already_there = true;
1397 else
1398 /* Otherwise this is a partial overlap which we cannot
1399 represent. */
1400 return NULL;
1402 return p;
1405 /* Build aggregate jump function from LIST, assuming there are exactly
1406 CONST_COUNT constant entries there and that th offset of the passed argument
1407 is ARG_OFFSET and store it into JFUNC. */
1409 static void
1410 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1411 int const_count, HOST_WIDE_INT arg_offset,
1412 struct ipa_jump_func *jfunc)
1414 vec_alloc (jfunc->agg.items, const_count);
1415 while (list)
1417 if (list->constant)
1419 struct ipa_agg_jf_item item;
1420 item.offset = list->offset - arg_offset;
1421 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1422 item.value = unshare_expr_without_location (list->constant);
1423 jfunc->agg.items->quick_push (item);
1425 list = list->next;
1429 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1430 in ARG is filled in with constant values. ARG can either be an aggregate
1431 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1432 aggregate. JFUNC is the jump function into which the constants are
1433 subsequently stored. */
1435 static void
1436 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1437 tree arg_type,
1438 struct ipa_jump_func *jfunc)
1440 struct ipa_known_agg_contents_list *list = NULL;
1441 int item_count = 0, const_count = 0;
1442 HOST_WIDE_INT arg_offset, arg_size;
1443 gimple_stmt_iterator gsi;
1444 tree arg_base;
1445 bool check_ref, by_ref;
1446 ao_ref r;
1448 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1449 return;
1451 /* The function operates in three stages. First, we prepare check_ref, r,
1452 arg_base and arg_offset based on what is actually passed as an actual
1453 argument. */
1455 if (POINTER_TYPE_P (arg_type))
1457 by_ref = true;
1458 if (TREE_CODE (arg) == SSA_NAME)
1460 tree type_size;
1461 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1462 return;
1463 check_ref = true;
1464 arg_base = arg;
1465 arg_offset = 0;
1466 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1467 arg_size = tree_to_uhwi (type_size);
1468 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1470 else if (TREE_CODE (arg) == ADDR_EXPR)
1472 HOST_WIDE_INT arg_max_size;
1473 bool reverse;
1475 arg = TREE_OPERAND (arg, 0);
1476 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1477 &arg_max_size, &reverse);
1478 if (arg_max_size == -1
1479 || arg_max_size != arg_size
1480 || arg_offset < 0)
1481 return;
1482 if (DECL_P (arg_base))
1484 check_ref = false;
1485 ao_ref_init (&r, arg_base);
1487 else
1488 return;
1490 else
1491 return;
1493 else
1495 HOST_WIDE_INT arg_max_size;
1496 bool reverse;
1498 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1500 by_ref = false;
1501 check_ref = false;
1502 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1503 &arg_max_size, &reverse);
1504 if (arg_max_size == -1
1505 || arg_max_size != arg_size
1506 || arg_offset < 0)
1507 return;
1509 ao_ref_init (&r, arg);
1512 /* Second stage walks back the BB, looks at individual statements and as long
1513 as it is confident of how the statements affect contents of the
1514 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1515 describing it. */
1516 gsi = gsi_for_stmt (call);
1517 gsi_prev (&gsi);
1518 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1520 struct ipa_known_agg_contents_list *n, **p;
1521 gimple *stmt = gsi_stmt (gsi);
1522 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1523 tree lhs, rhs, lhs_base;
1524 bool reverse;
1526 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1527 continue;
1528 if (!gimple_assign_single_p (stmt))
1529 break;
1531 lhs = gimple_assign_lhs (stmt);
1532 rhs = gimple_assign_rhs1 (stmt);
1533 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1534 || TREE_CODE (lhs) == BIT_FIELD_REF
1535 || contains_bitfld_component_ref_p (lhs))
1536 break;
1538 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1539 &lhs_max_size, &reverse);
1540 if (lhs_max_size == -1
1541 || lhs_max_size != lhs_size)
1542 break;
1544 if (check_ref)
1546 if (TREE_CODE (lhs_base) != MEM_REF
1547 || TREE_OPERAND (lhs_base, 0) != arg_base
1548 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1549 break;
1551 else if (lhs_base != arg_base)
1553 if (DECL_P (lhs_base))
1554 continue;
1555 else
1556 break;
1559 bool already_there = false;
1560 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1561 &already_there);
1562 if (!p)
1563 break;
1564 if (already_there)
1565 continue;
1567 rhs = get_ssa_def_if_simple_copy (rhs);
1568 n = XALLOCA (struct ipa_known_agg_contents_list);
1569 n->size = lhs_size;
1570 n->offset = lhs_offset;
1571 if (is_gimple_ip_invariant (rhs))
1573 n->constant = rhs;
1574 const_count++;
1576 else
1577 n->constant = NULL_TREE;
1578 n->next = *p;
1579 *p = n;
1581 item_count++;
1582 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1583 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1584 break;
1587 /* Third stage just goes over the list and creates an appropriate vector of
1588 ipa_agg_jf_item structures out of it, of sourse only if there are
1589 any known constants to begin with. */
1591 if (const_count)
1593 jfunc->agg.by_ref = by_ref;
1594 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1598 static tree
1599 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1601 int n;
1602 tree type = (e->callee
1603 ? TREE_TYPE (e->callee->decl)
1604 : gimple_call_fntype (e->call_stmt));
1605 tree t = TYPE_ARG_TYPES (type);
1607 for (n = 0; n < i; n++)
1609 if (!t)
1610 break;
1611 t = TREE_CHAIN (t);
1613 if (t)
1614 return TREE_VALUE (t);
1615 if (!e->callee)
1616 return NULL;
1617 t = DECL_ARGUMENTS (e->callee->decl);
1618 for (n = 0; n < i; n++)
1620 if (!t)
1621 return NULL;
1622 t = TREE_CHAIN (t);
1624 if (t)
1625 return TREE_TYPE (t);
1626 return NULL;
1629 /* Compute jump function for all arguments of callsite CS and insert the
1630 information in the jump_functions array in the ipa_edge_args corresponding
1631 to this callsite. */
1633 static void
1634 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1635 struct cgraph_edge *cs)
1637 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1638 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1639 gcall *call = cs->call_stmt;
1640 int n, arg_num = gimple_call_num_args (call);
1641 bool useful_context = false;
1643 if (arg_num == 0 || args->jump_functions)
1644 return;
1645 vec_safe_grow_cleared (args->jump_functions, arg_num);
1646 if (flag_devirtualize)
1647 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1649 if (gimple_call_internal_p (call))
1650 return;
1651 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1652 return;
1654 for (n = 0; n < arg_num; n++)
1656 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1657 tree arg = gimple_call_arg (call, n);
1658 tree param_type = ipa_get_callee_param_type (cs, n);
1659 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1661 tree instance;
1662 struct ipa_polymorphic_call_context context (cs->caller->decl,
1663 arg, cs->call_stmt,
1664 &instance);
1665 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1666 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1667 if (!context.useless_p ())
1668 useful_context = true;
1671 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1673 bool addr_nonzero = false;
1674 bool strict_overflow = false;
1676 if (TREE_CODE (arg) == SSA_NAME
1677 && param_type
1678 && get_ptr_nonnull (arg))
1679 addr_nonzero = true;
1680 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1681 addr_nonzero = true;
1683 if (addr_nonzero)
1685 jfunc->vr_known = true;
1686 jfunc->m_vr.type = VR_ANTI_RANGE;
1687 jfunc->m_vr.min = build_int_cst (TREE_TYPE (arg), 0);
1688 jfunc->m_vr.max = build_int_cst (TREE_TYPE (arg), 0);
1689 jfunc->m_vr.equiv = NULL;
1691 else
1692 gcc_assert (!jfunc->vr_known);
1694 else
1696 wide_int min, max;
1697 value_range_type type;
1698 if (TREE_CODE (arg) == SSA_NAME
1699 && param_type
1700 && (type = get_range_info (arg, &min, &max))
1701 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1703 value_range vr;
1705 vr.type = type;
1706 vr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1707 vr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1708 vr.equiv = NULL;
1709 extract_range_from_unary_expr (&jfunc->m_vr,
1710 NOP_EXPR,
1711 param_type,
1712 &vr, TREE_TYPE (arg));
1713 if (jfunc->m_vr.type == VR_RANGE
1714 || jfunc->m_vr.type == VR_ANTI_RANGE)
1715 jfunc->vr_known = true;
1716 else
1717 jfunc->vr_known = false;
1719 else
1720 gcc_assert (!jfunc->vr_known);
1723 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1724 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1726 jfunc->bits.known = true;
1728 if (TREE_CODE (arg) == SSA_NAME)
1730 jfunc->bits.value = 0;
1731 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1732 TYPE_SIGN (TREE_TYPE (arg)));
1734 else
1736 jfunc->bits.value = wi::to_widest (arg);
1737 jfunc->bits.mask = 0;
1740 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1742 unsigned HOST_WIDE_INT bitpos;
1743 unsigned align;
1745 jfunc->bits.known = true;
1746 get_pointer_alignment_1 (arg, &align, &bitpos);
1747 jfunc->bits.mask = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1748 .and_not (align / BITS_PER_UNIT - 1);
1749 jfunc->bits.value = bitpos / BITS_PER_UNIT;
1751 else
1752 gcc_assert (!jfunc->bits.known);
1754 if (is_gimple_ip_invariant (arg)
1755 || (VAR_P (arg)
1756 && is_global_var (arg)
1757 && TREE_READONLY (arg)))
1758 ipa_set_jf_constant (jfunc, arg, cs);
1759 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1760 && TREE_CODE (arg) == PARM_DECL)
1762 int index = ipa_get_param_decl_index (info, arg);
1764 gcc_assert (index >=0);
1765 /* Aggregate passed by value, check for pass-through, otherwise we
1766 will attempt to fill in aggregate contents later in this
1767 for cycle. */
1768 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1770 ipa_set_jf_simple_pass_through (jfunc, index, false);
1771 continue;
1774 else if (TREE_CODE (arg) == SSA_NAME)
1776 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1778 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1779 if (index >= 0)
1781 bool agg_p;
1782 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1783 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1786 else
1788 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1789 if (is_gimple_assign (stmt))
1790 compute_complex_assign_jump_func (fbi, info, jfunc,
1791 call, stmt, arg, param_type);
1792 else if (gimple_code (stmt) == GIMPLE_PHI)
1793 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1794 call,
1795 as_a <gphi *> (stmt));
1799 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1800 passed (because type conversions are ignored in gimple). Usually we can
1801 safely get type from function declaration, but in case of K&R prototypes or
1802 variadic functions we can try our luck with type of the pointer passed.
1803 TODO: Since we look for actual initialization of the memory object, we may better
1804 work out the type based on the memory stores we find. */
1805 if (!param_type)
1806 param_type = TREE_TYPE (arg);
1808 if ((jfunc->type != IPA_JF_PASS_THROUGH
1809 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1810 && (jfunc->type != IPA_JF_ANCESTOR
1811 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1812 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1813 || POINTER_TYPE_P (param_type)))
1814 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1816 if (!useful_context)
1817 vec_free (args->polymorphic_call_contexts);
1820 /* Compute jump functions for all edges - both direct and indirect - outgoing
1821 from BB. */
1823 static void
1824 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1826 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1827 int i;
1828 struct cgraph_edge *cs;
1830 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1832 struct cgraph_node *callee = cs->callee;
1834 if (callee)
1836 callee->ultimate_alias_target ();
1837 /* We do not need to bother analyzing calls to unknown functions
1838 unless they may become known during lto/whopr. */
1839 if (!callee->definition && !flag_lto)
1840 continue;
1842 ipa_compute_jump_functions_for_edge (fbi, cs);
1846 /* If STMT looks like a statement loading a value from a member pointer formal
1847 parameter, return that parameter and store the offset of the field to
1848 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1849 might be clobbered). If USE_DELTA, then we look for a use of the delta
1850 field rather than the pfn. */
1852 static tree
1853 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1854 HOST_WIDE_INT *offset_p)
1856 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1858 if (!gimple_assign_single_p (stmt))
1859 return NULL_TREE;
1861 rhs = gimple_assign_rhs1 (stmt);
1862 if (TREE_CODE (rhs) == COMPONENT_REF)
1864 ref_field = TREE_OPERAND (rhs, 1);
1865 rhs = TREE_OPERAND (rhs, 0);
1867 else
1868 ref_field = NULL_TREE;
1869 if (TREE_CODE (rhs) != MEM_REF)
1870 return NULL_TREE;
1871 rec = TREE_OPERAND (rhs, 0);
1872 if (TREE_CODE (rec) != ADDR_EXPR)
1873 return NULL_TREE;
1874 rec = TREE_OPERAND (rec, 0);
1875 if (TREE_CODE (rec) != PARM_DECL
1876 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1877 return NULL_TREE;
1878 ref_offset = TREE_OPERAND (rhs, 1);
1880 if (use_delta)
1881 fld = delta_field;
1882 else
1883 fld = ptr_field;
1884 if (offset_p)
1885 *offset_p = int_bit_position (fld);
1887 if (ref_field)
1889 if (integer_nonzerop (ref_offset))
1890 return NULL_TREE;
1891 return ref_field == fld ? rec : NULL_TREE;
1893 else
1894 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1895 : NULL_TREE;
1898 /* Returns true iff T is an SSA_NAME defined by a statement. */
1900 static bool
1901 ipa_is_ssa_with_stmt_def (tree t)
1903 if (TREE_CODE (t) == SSA_NAME
1904 && !SSA_NAME_IS_DEFAULT_DEF (t))
1905 return true;
1906 else
1907 return false;
1910 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1911 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1912 indirect call graph edge. */
1914 static struct cgraph_edge *
1915 ipa_note_param_call (struct cgraph_node *node, int param_index,
1916 gcall *stmt)
1918 struct cgraph_edge *cs;
1920 cs = node->get_edge (stmt);
1921 cs->indirect_info->param_index = param_index;
1922 cs->indirect_info->agg_contents = 0;
1923 cs->indirect_info->member_ptr = 0;
1924 cs->indirect_info->guaranteed_unmodified = 0;
1925 return cs;
1928 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1929 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1930 intermediate information about each formal parameter. Currently it checks
1931 whether the call calls a pointer that is a formal parameter and if so, the
1932 parameter is marked with the called flag and an indirect call graph edge
1933 describing the call is created. This is very simple for ordinary pointers
1934 represented in SSA but not-so-nice when it comes to member pointers. The
1935 ugly part of this function does nothing more than trying to match the
1936 pattern of such a call. An example of such a pattern is the gimple dump
1937 below, the call is on the last line:
1939 <bb 2>:
1940 f$__delta_5 = f.__delta;
1941 f$__pfn_24 = f.__pfn;
1944 <bb 2>:
1945 f$__delta_5 = MEM[(struct *)&f];
1946 f$__pfn_24 = MEM[(struct *)&f + 4B];
1948 and a few lines below:
1950 <bb 5>
1951 D.2496_3 = (int) f$__pfn_24;
1952 D.2497_4 = D.2496_3 & 1;
1953 if (D.2497_4 != 0)
1954 goto <bb 3>;
1955 else
1956 goto <bb 4>;
1958 <bb 6>:
1959 D.2500_7 = (unsigned int) f$__delta_5;
1960 D.2501_8 = &S + D.2500_7;
1961 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1962 D.2503_10 = *D.2502_9;
1963 D.2504_12 = f$__pfn_24 + -1;
1964 D.2505_13 = (unsigned int) D.2504_12;
1965 D.2506_14 = D.2503_10 + D.2505_13;
1966 D.2507_15 = *D.2506_14;
1967 iftmp.11_16 = (String:: *) D.2507_15;
1969 <bb 7>:
1970 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1971 D.2500_19 = (unsigned int) f$__delta_5;
1972 D.2508_20 = &S + D.2500_19;
1973 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1975 Such patterns are results of simple calls to a member pointer:
1977 int doprinting (int (MyString::* f)(int) const)
1979 MyString S ("somestring");
1981 return (S.*f)(4);
1984 Moreover, the function also looks for called pointers loaded from aggregates
1985 passed by value or reference. */
1987 static void
1988 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
1989 tree target)
1991 struct ipa_node_params *info = fbi->info;
1992 HOST_WIDE_INT offset;
1993 bool by_ref;
1995 if (SSA_NAME_IS_DEFAULT_DEF (target))
1997 tree var = SSA_NAME_VAR (target);
1998 int index = ipa_get_param_decl_index (info, var);
1999 if (index >= 0)
2000 ipa_note_param_call (fbi->node, index, call);
2001 return;
2004 int index;
2005 gimple *def = SSA_NAME_DEF_STMT (target);
2006 bool guaranteed_unmodified;
2007 if (gimple_assign_single_p (def)
2008 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2009 gimple_assign_rhs1 (def), &index, &offset,
2010 NULL, &by_ref, &guaranteed_unmodified))
2012 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2013 cs->indirect_info->offset = offset;
2014 cs->indirect_info->agg_contents = 1;
2015 cs->indirect_info->by_ref = by_ref;
2016 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2017 return;
2020 /* Now we need to try to match the complex pattern of calling a member
2021 pointer. */
2022 if (gimple_code (def) != GIMPLE_PHI
2023 || gimple_phi_num_args (def) != 2
2024 || !POINTER_TYPE_P (TREE_TYPE (target))
2025 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2026 return;
2028 /* First, we need to check whether one of these is a load from a member
2029 pointer that is a parameter to this function. */
2030 tree n1 = PHI_ARG_DEF (def, 0);
2031 tree n2 = PHI_ARG_DEF (def, 1);
2032 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2033 return;
2034 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2035 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2037 tree rec;
2038 basic_block bb, virt_bb;
2039 basic_block join = gimple_bb (def);
2040 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2042 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2043 return;
2045 bb = EDGE_PRED (join, 0)->src;
2046 virt_bb = gimple_bb (d2);
2048 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2050 bb = EDGE_PRED (join, 1)->src;
2051 virt_bb = gimple_bb (d1);
2053 else
2054 return;
2056 /* Second, we need to check that the basic blocks are laid out in the way
2057 corresponding to the pattern. */
2059 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2060 || single_pred (virt_bb) != bb
2061 || single_succ (virt_bb) != join)
2062 return;
2064 /* Third, let's see that the branching is done depending on the least
2065 significant bit of the pfn. */
2067 gimple *branch = last_stmt (bb);
2068 if (!branch || gimple_code (branch) != GIMPLE_COND)
2069 return;
2071 if ((gimple_cond_code (branch) != NE_EXPR
2072 && gimple_cond_code (branch) != EQ_EXPR)
2073 || !integer_zerop (gimple_cond_rhs (branch)))
2074 return;
2076 tree cond = gimple_cond_lhs (branch);
2077 if (!ipa_is_ssa_with_stmt_def (cond))
2078 return;
2080 def = SSA_NAME_DEF_STMT (cond);
2081 if (!is_gimple_assign (def)
2082 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2083 || !integer_onep (gimple_assign_rhs2 (def)))
2084 return;
2086 cond = gimple_assign_rhs1 (def);
2087 if (!ipa_is_ssa_with_stmt_def (cond))
2088 return;
2090 def = SSA_NAME_DEF_STMT (cond);
2092 if (is_gimple_assign (def)
2093 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2095 cond = gimple_assign_rhs1 (def);
2096 if (!ipa_is_ssa_with_stmt_def (cond))
2097 return;
2098 def = SSA_NAME_DEF_STMT (cond);
2101 tree rec2;
2102 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2103 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2104 == ptrmemfunc_vbit_in_delta),
2105 NULL);
2106 if (rec != rec2)
2107 return;
2109 index = ipa_get_param_decl_index (info, rec);
2110 if (index >= 0
2111 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2113 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2114 cs->indirect_info->offset = offset;
2115 cs->indirect_info->agg_contents = 1;
2116 cs->indirect_info->member_ptr = 1;
2117 cs->indirect_info->guaranteed_unmodified = 1;
2120 return;
2123 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2124 object referenced in the expression is a formal parameter of the caller
2125 FBI->node (described by FBI->info), create a call note for the
2126 statement. */
2128 static void
2129 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2130 gcall *call, tree target)
2132 tree obj = OBJ_TYPE_REF_OBJECT (target);
2133 int index;
2134 HOST_WIDE_INT anc_offset;
2136 if (!flag_devirtualize)
2137 return;
2139 if (TREE_CODE (obj) != SSA_NAME)
2140 return;
2142 struct ipa_node_params *info = fbi->info;
2143 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2145 struct ipa_jump_func jfunc;
2146 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2147 return;
2149 anc_offset = 0;
2150 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2151 gcc_assert (index >= 0);
2152 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2153 call, &jfunc))
2154 return;
2156 else
2158 struct ipa_jump_func jfunc;
2159 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2160 tree expr;
2162 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2163 if (!expr)
2164 return;
2165 index = ipa_get_param_decl_index (info,
2166 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2167 gcc_assert (index >= 0);
2168 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2169 call, &jfunc, anc_offset))
2170 return;
2173 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2174 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2175 ii->offset = anc_offset;
2176 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2177 ii->otr_type = obj_type_ref_class (target);
2178 ii->polymorphic = 1;
2181 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2182 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2183 containing intermediate information about each formal parameter. */
2185 static void
2186 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2188 tree target = gimple_call_fn (call);
2190 if (!target
2191 || (TREE_CODE (target) != SSA_NAME
2192 && !virtual_method_call_p (target)))
2193 return;
2195 struct cgraph_edge *cs = fbi->node->get_edge (call);
2196 /* If we previously turned the call into a direct call, there is
2197 no need to analyze. */
2198 if (cs && !cs->indirect_unknown_callee)
2199 return;
2201 if (cs->indirect_info->polymorphic && flag_devirtualize)
2203 tree instance;
2204 tree target = gimple_call_fn (call);
2205 ipa_polymorphic_call_context context (current_function_decl,
2206 target, call, &instance);
2208 gcc_checking_assert (cs->indirect_info->otr_type
2209 == obj_type_ref_class (target));
2210 gcc_checking_assert (cs->indirect_info->otr_token
2211 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2213 cs->indirect_info->vptr_changed
2214 = !context.get_dynamic_type (instance,
2215 OBJ_TYPE_REF_OBJECT (target),
2216 obj_type_ref_class (target), call);
2217 cs->indirect_info->context = context;
2220 if (TREE_CODE (target) == SSA_NAME)
2221 ipa_analyze_indirect_call_uses (fbi, call, target);
2222 else if (virtual_method_call_p (target))
2223 ipa_analyze_virtual_call_uses (fbi, call, target);
2227 /* Analyze the call statement STMT with respect to formal parameters (described
2228 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2229 formal parameters are called. */
2231 static void
2232 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2234 if (is_gimple_call (stmt))
2235 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2238 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2239 If OP is a parameter declaration, mark it as used in the info structure
2240 passed in DATA. */
2242 static bool
2243 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2245 struct ipa_node_params *info = (struct ipa_node_params *) data;
2247 op = get_base_address (op);
2248 if (op
2249 && TREE_CODE (op) == PARM_DECL)
2251 int index = ipa_get_param_decl_index (info, op);
2252 gcc_assert (index >= 0);
2253 ipa_set_param_used (info, index, true);
2256 return false;
2259 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2260 the findings in various structures of the associated ipa_node_params
2261 structure, such as parameter flags, notes etc. FBI holds various data about
2262 the function being analyzed. */
2264 static void
2265 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2267 gimple_stmt_iterator gsi;
2268 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2270 gimple *stmt = gsi_stmt (gsi);
2272 if (is_gimple_debug (stmt))
2273 continue;
2275 ipa_analyze_stmt_uses (fbi, stmt);
2276 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2277 visit_ref_for_mod_analysis,
2278 visit_ref_for_mod_analysis,
2279 visit_ref_for_mod_analysis);
2281 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2282 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2283 visit_ref_for_mod_analysis,
2284 visit_ref_for_mod_analysis,
2285 visit_ref_for_mod_analysis);
2288 /* Calculate controlled uses of parameters of NODE. */
2290 static void
2291 ipa_analyze_controlled_uses (struct cgraph_node *node)
2293 struct ipa_node_params *info = IPA_NODE_REF (node);
2295 for (int i = 0; i < ipa_get_param_count (info); i++)
2297 tree parm = ipa_get_param (info, i);
2298 int controlled_uses = 0;
2300 /* For SSA regs see if parameter is used. For non-SSA we compute
2301 the flag during modification analysis. */
2302 if (is_gimple_reg (parm))
2304 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2305 parm);
2306 if (ddef && !has_zero_uses (ddef))
2308 imm_use_iterator imm_iter;
2309 use_operand_p use_p;
2311 ipa_set_param_used (info, i, true);
2312 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2313 if (!is_gimple_call (USE_STMT (use_p)))
2315 if (!is_gimple_debug (USE_STMT (use_p)))
2317 controlled_uses = IPA_UNDESCRIBED_USE;
2318 break;
2321 else
2322 controlled_uses++;
2324 else
2325 controlled_uses = 0;
2327 else
2328 controlled_uses = IPA_UNDESCRIBED_USE;
2329 ipa_set_controlled_uses (info, i, controlled_uses);
2333 /* Free stuff in BI. */
2335 static void
2336 free_ipa_bb_info (struct ipa_bb_info *bi)
2338 bi->cg_edges.release ();
2339 bi->param_aa_statuses.release ();
2342 /* Dominator walker driving the analysis. */
2344 class analysis_dom_walker : public dom_walker
2346 public:
2347 analysis_dom_walker (struct ipa_func_body_info *fbi)
2348 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2350 virtual edge before_dom_children (basic_block);
2352 private:
2353 struct ipa_func_body_info *m_fbi;
2356 edge
2357 analysis_dom_walker::before_dom_children (basic_block bb)
2359 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2360 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2361 return NULL;
2364 /* Release body info FBI. */
2366 void
2367 ipa_release_body_info (struct ipa_func_body_info *fbi)
2369 int i;
2370 struct ipa_bb_info *bi;
2372 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2373 free_ipa_bb_info (bi);
2374 fbi->bb_infos.release ();
2377 /* Initialize the array describing properties of formal parameters
2378 of NODE, analyze their uses and compute jump functions associated
2379 with actual arguments of calls from within NODE. */
2381 void
2382 ipa_analyze_node (struct cgraph_node *node)
2384 struct ipa_func_body_info fbi;
2385 struct ipa_node_params *info;
2387 ipa_check_create_node_params ();
2388 ipa_check_create_edge_args ();
2389 info = IPA_NODE_REF (node);
2391 if (info->analysis_done)
2392 return;
2393 info->analysis_done = 1;
2395 if (ipa_func_spec_opts_forbid_analysis_p (node))
2397 for (int i = 0; i < ipa_get_param_count (info); i++)
2399 ipa_set_param_used (info, i, true);
2400 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2402 return;
2405 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2406 push_cfun (func);
2407 calculate_dominance_info (CDI_DOMINATORS);
2408 ipa_initialize_node_params (node);
2409 ipa_analyze_controlled_uses (node);
2411 fbi.node = node;
2412 fbi.info = IPA_NODE_REF (node);
2413 fbi.bb_infos = vNULL;
2414 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2415 fbi.param_count = ipa_get_param_count (info);
2416 fbi.aa_walked = 0;
2418 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2420 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2421 bi->cg_edges.safe_push (cs);
2424 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2426 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2427 bi->cg_edges.safe_push (cs);
2430 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2432 ipa_release_body_info (&fbi);
2433 free_dominance_info (CDI_DOMINATORS);
2434 pop_cfun ();
2437 /* Update the jump functions associated with call graph edge E when the call
2438 graph edge CS is being inlined, assuming that E->caller is already (possibly
2439 indirectly) inlined into CS->callee and that E has not been inlined. */
2441 static void
2442 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2443 struct cgraph_edge *e)
2445 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2446 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2447 int count = ipa_get_cs_argument_count (args);
2448 int i;
2450 for (i = 0; i < count; i++)
2452 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2453 struct ipa_polymorphic_call_context *dst_ctx
2454 = ipa_get_ith_polymorhic_call_context (args, i);
2456 if (dst->type == IPA_JF_ANCESTOR)
2458 struct ipa_jump_func *src;
2459 int dst_fid = dst->value.ancestor.formal_id;
2460 struct ipa_polymorphic_call_context *src_ctx
2461 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2463 /* Variable number of arguments can cause havoc if we try to access
2464 one that does not exist in the inlined edge. So make sure we
2465 don't. */
2466 if (dst_fid >= ipa_get_cs_argument_count (top))
2468 ipa_set_jf_unknown (dst);
2469 continue;
2472 src = ipa_get_ith_jump_func (top, dst_fid);
2474 if (src_ctx && !src_ctx->useless_p ())
2476 struct ipa_polymorphic_call_context ctx = *src_ctx;
2478 /* TODO: Make type preserved safe WRT contexts. */
2479 if (!ipa_get_jf_ancestor_type_preserved (dst))
2480 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2481 ctx.offset_by (dst->value.ancestor.offset);
2482 if (!ctx.useless_p ())
2484 if (!dst_ctx)
2486 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2487 count);
2488 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2491 dst_ctx->combine_with (ctx);
2495 if (src->agg.items
2496 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2498 struct ipa_agg_jf_item *item;
2499 int j;
2501 /* Currently we do not produce clobber aggregate jump functions,
2502 replace with merging when we do. */
2503 gcc_assert (!dst->agg.items);
2505 dst->agg.items = vec_safe_copy (src->agg.items);
2506 dst->agg.by_ref = src->agg.by_ref;
2507 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2508 item->offset -= dst->value.ancestor.offset;
2511 if (src->type == IPA_JF_PASS_THROUGH
2512 && src->value.pass_through.operation == NOP_EXPR)
2514 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2515 dst->value.ancestor.agg_preserved &=
2516 src->value.pass_through.agg_preserved;
2518 else if (src->type == IPA_JF_ANCESTOR)
2520 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2521 dst->value.ancestor.offset += src->value.ancestor.offset;
2522 dst->value.ancestor.agg_preserved &=
2523 src->value.ancestor.agg_preserved;
2525 else
2526 ipa_set_jf_unknown (dst);
2528 else if (dst->type == IPA_JF_PASS_THROUGH)
2530 struct ipa_jump_func *src;
2531 /* We must check range due to calls with variable number of arguments
2532 and we cannot combine jump functions with operations. */
2533 if (dst->value.pass_through.operation == NOP_EXPR
2534 && (dst->value.pass_through.formal_id
2535 < ipa_get_cs_argument_count (top)))
2537 int dst_fid = dst->value.pass_through.formal_id;
2538 src = ipa_get_ith_jump_func (top, dst_fid);
2539 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2540 struct ipa_polymorphic_call_context *src_ctx
2541 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2543 if (src_ctx && !src_ctx->useless_p ())
2545 struct ipa_polymorphic_call_context ctx = *src_ctx;
2547 /* TODO: Make type preserved safe WRT contexts. */
2548 if (!ipa_get_jf_pass_through_type_preserved (dst))
2549 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2550 if (!ctx.useless_p ())
2552 if (!dst_ctx)
2554 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2555 count);
2556 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2558 dst_ctx->combine_with (ctx);
2561 switch (src->type)
2563 case IPA_JF_UNKNOWN:
2564 ipa_set_jf_unknown (dst);
2565 break;
2566 case IPA_JF_CONST:
2567 ipa_set_jf_cst_copy (dst, src);
2568 break;
2570 case IPA_JF_PASS_THROUGH:
2572 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2573 enum tree_code operation;
2574 operation = ipa_get_jf_pass_through_operation (src);
2576 if (operation == NOP_EXPR)
2578 bool agg_p;
2579 agg_p = dst_agg_p
2580 && ipa_get_jf_pass_through_agg_preserved (src);
2581 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2583 else
2585 tree operand = ipa_get_jf_pass_through_operand (src);
2586 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2587 operation);
2589 break;
2591 case IPA_JF_ANCESTOR:
2593 bool agg_p;
2594 agg_p = dst_agg_p
2595 && ipa_get_jf_ancestor_agg_preserved (src);
2596 ipa_set_ancestor_jf (dst,
2597 ipa_get_jf_ancestor_offset (src),
2598 ipa_get_jf_ancestor_formal_id (src),
2599 agg_p);
2600 break;
2602 default:
2603 gcc_unreachable ();
2606 if (src->agg.items
2607 && (dst_agg_p || !src->agg.by_ref))
2609 /* Currently we do not produce clobber aggregate jump
2610 functions, replace with merging when we do. */
2611 gcc_assert (!dst->agg.items);
2613 dst->agg.by_ref = src->agg.by_ref;
2614 dst->agg.items = vec_safe_copy (src->agg.items);
2617 else
2618 ipa_set_jf_unknown (dst);
2623 /* If TARGET is an addr_expr of a function declaration, make it the
2624 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2625 Otherwise, return NULL. */
2627 struct cgraph_edge *
2628 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2629 bool speculative)
2631 struct cgraph_node *callee;
2632 struct inline_edge_summary *es = inline_edge_summary (ie);
2633 bool unreachable = false;
2635 if (TREE_CODE (target) == ADDR_EXPR)
2636 target = TREE_OPERAND (target, 0);
2637 if (TREE_CODE (target) != FUNCTION_DECL)
2639 target = canonicalize_constructor_val (target, NULL);
2640 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2642 /* Member pointer call that goes through a VMT lookup. */
2643 if (ie->indirect_info->member_ptr
2644 /* Or if target is not an invariant expression and we do not
2645 know if it will evaulate to function at runtime.
2646 This can happen when folding through &VAR, where &VAR
2647 is IP invariant, but VAR itself is not.
2649 TODO: Revisit this when GCC 5 is branched. It seems that
2650 member_ptr check is not needed and that we may try to fold
2651 the expression and see if VAR is readonly. */
2652 || !is_gimple_ip_invariant (target))
2654 if (dump_enabled_p ())
2656 location_t loc = gimple_location_safe (ie->call_stmt);
2657 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2658 "discovered direct call non-invariant "
2659 "%s/%i\n",
2660 ie->caller->name (), ie->caller->order);
2662 return NULL;
2666 if (dump_enabled_p ())
2668 location_t loc = gimple_location_safe (ie->call_stmt);
2669 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2670 "discovered direct call to non-function in %s/%i, "
2671 "making it __builtin_unreachable\n",
2672 ie->caller->name (), ie->caller->order);
2675 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2676 callee = cgraph_node::get_create (target);
2677 unreachable = true;
2679 else
2680 callee = cgraph_node::get (target);
2682 else
2683 callee = cgraph_node::get (target);
2685 /* Because may-edges are not explicitely represented and vtable may be external,
2686 we may create the first reference to the object in the unit. */
2687 if (!callee || callee->global.inlined_to)
2690 /* We are better to ensure we can refer to it.
2691 In the case of static functions we are out of luck, since we already
2692 removed its body. In the case of public functions we may or may
2693 not introduce the reference. */
2694 if (!canonicalize_constructor_val (target, NULL)
2695 || !TREE_PUBLIC (target))
2697 if (dump_file)
2698 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2699 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2700 xstrdup_for_dump (ie->caller->name ()),
2701 ie->caller->order,
2702 xstrdup_for_dump (ie->callee->name ()),
2703 ie->callee->order);
2704 return NULL;
2706 callee = cgraph_node::get_create (target);
2709 /* If the edge is already speculated. */
2710 if (speculative && ie->speculative)
2712 struct cgraph_edge *e2;
2713 struct ipa_ref *ref;
2714 ie->speculative_call_info (e2, ie, ref);
2715 if (e2->callee->ultimate_alias_target ()
2716 != callee->ultimate_alias_target ())
2718 if (dump_file)
2719 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2720 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2721 xstrdup_for_dump (ie->caller->name ()),
2722 ie->caller->order,
2723 xstrdup_for_dump (callee->name ()),
2724 callee->order,
2725 xstrdup_for_dump (e2->callee->name ()),
2726 e2->callee->order);
2728 else
2730 if (dump_file)
2731 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2732 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2733 xstrdup_for_dump (ie->caller->name ()),
2734 ie->caller->order,
2735 xstrdup_for_dump (callee->name ()),
2736 callee->order);
2738 return NULL;
2741 if (!dbg_cnt (devirt))
2742 return NULL;
2744 ipa_check_create_node_params ();
2746 /* We can not make edges to inline clones. It is bug that someone removed
2747 the cgraph node too early. */
2748 gcc_assert (!callee->global.inlined_to);
2750 if (dump_file && !unreachable)
2752 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2753 "(%s/%i -> %s/%i), for stmt ",
2754 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2755 speculative ? "speculative" : "known",
2756 xstrdup_for_dump (ie->caller->name ()),
2757 ie->caller->order,
2758 xstrdup_for_dump (callee->name ()),
2759 callee->order);
2760 if (ie->call_stmt)
2761 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2762 else
2763 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2765 if (dump_enabled_p ())
2767 location_t loc = gimple_location_safe (ie->call_stmt);
2769 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2770 "converting indirect call in %s to direct call to %s\n",
2771 ie->caller->name (), callee->name ());
2773 if (!speculative)
2775 struct cgraph_edge *orig = ie;
2776 ie = ie->make_direct (callee);
2777 /* If we resolved speculative edge the cost is already up to date
2778 for direct call (adjusted by inline_edge_duplication_hook). */
2779 if (ie == orig)
2781 es = inline_edge_summary (ie);
2782 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2783 - eni_size_weights.call_cost);
2784 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2785 - eni_time_weights.call_cost);
2788 else
2790 if (!callee->can_be_discarded_p ())
2792 cgraph_node *alias;
2793 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2794 if (alias)
2795 callee = alias;
2797 /* make_speculative will update ie's cost to direct call cost. */
2798 ie = ie->make_speculative
2799 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2802 return ie;
2805 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2806 CONSTRUCTOR and return it. Return NULL if the search fails for some
2807 reason. */
2809 static tree
2810 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2812 tree type = TREE_TYPE (constructor);
2813 if (TREE_CODE (type) != ARRAY_TYPE
2814 && TREE_CODE (type) != RECORD_TYPE)
2815 return NULL;
2817 unsigned ix;
2818 tree index, val;
2819 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2821 HOST_WIDE_INT elt_offset;
2822 if (TREE_CODE (type) == ARRAY_TYPE)
2824 offset_int off;
2825 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2826 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2828 if (index)
2830 off = wi::to_offset (index);
2831 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2833 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2834 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2835 off = wi::sext (off - wi::to_offset (low_bound),
2836 TYPE_PRECISION (TREE_TYPE (index)));
2838 off *= wi::to_offset (unit_size);
2840 else
2841 off = wi::to_offset (unit_size) * ix;
2843 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2844 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2845 continue;
2846 elt_offset = off.to_shwi ();
2848 else if (TREE_CODE (type) == RECORD_TYPE)
2850 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2851 if (DECL_BIT_FIELD (index))
2852 continue;
2853 elt_offset = int_bit_position (index);
2855 else
2856 gcc_unreachable ();
2858 if (elt_offset > req_offset)
2859 return NULL;
2861 if (TREE_CODE (val) == CONSTRUCTOR)
2862 return find_constructor_constant_at_offset (val,
2863 req_offset - elt_offset);
2865 if (elt_offset == req_offset
2866 && is_gimple_reg_type (TREE_TYPE (val))
2867 && is_gimple_ip_invariant (val))
2868 return val;
2870 return NULL;
2873 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2874 invariant from a static constructor and if so, return it. Otherwise return
2875 NULL. */
2877 static tree
2878 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2880 if (by_ref)
2882 if (TREE_CODE (scalar) != ADDR_EXPR)
2883 return NULL;
2884 scalar = TREE_OPERAND (scalar, 0);
2887 if (!VAR_P (scalar)
2888 || !is_global_var (scalar)
2889 || !TREE_READONLY (scalar)
2890 || !DECL_INITIAL (scalar)
2891 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2892 return NULL;
2894 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2897 /* Retrieve value from aggregate jump function AGG or static initializer of
2898 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2899 none. BY_REF specifies whether the value has to be passed by reference or
2900 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2901 to is set to true if the value comes from an initializer of a constant. */
2903 tree
2904 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2905 HOST_WIDE_INT offset, bool by_ref,
2906 bool *from_global_constant)
2908 struct ipa_agg_jf_item *item;
2909 int i;
2911 if (scalar)
2913 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2914 if (res)
2916 if (from_global_constant)
2917 *from_global_constant = true;
2918 return res;
2922 if (!agg
2923 || by_ref != agg->by_ref)
2924 return NULL;
2926 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2927 if (item->offset == offset)
2929 /* Currently we do not have clobber values, return NULL for them once
2930 we do. */
2931 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2932 if (from_global_constant)
2933 *from_global_constant = false;
2934 return item->value;
2936 return NULL;
2939 /* Remove a reference to SYMBOL from the list of references of a node given by
2940 reference description RDESC. Return true if the reference has been
2941 successfully found and removed. */
2943 static bool
2944 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2946 struct ipa_ref *to_del;
2947 struct cgraph_edge *origin;
2949 origin = rdesc->cs;
2950 if (!origin)
2951 return false;
2952 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2953 origin->lto_stmt_uid);
2954 if (!to_del)
2955 return false;
2957 to_del->remove_reference ();
2958 if (dump_file)
2959 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2960 xstrdup_for_dump (origin->caller->name ()),
2961 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2962 return true;
2965 /* If JFUNC has a reference description with refcount different from
2966 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2967 NULL. JFUNC must be a constant jump function. */
2969 static struct ipa_cst_ref_desc *
2970 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2972 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2973 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2974 return rdesc;
2975 else
2976 return NULL;
2979 /* If the value of constant jump function JFUNC is an address of a function
2980 declaration, return the associated call graph node. Otherwise return
2981 NULL. */
2983 static cgraph_node *
2984 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2986 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2987 tree cst = ipa_get_jf_constant (jfunc);
2988 if (TREE_CODE (cst) != ADDR_EXPR
2989 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2990 return NULL;
2992 return cgraph_node::get (TREE_OPERAND (cst, 0));
2996 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2997 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2998 the edge specified in the rdesc. Return false if either the symbol or the
2999 reference could not be found, otherwise return true. */
3001 static bool
3002 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3004 struct ipa_cst_ref_desc *rdesc;
3005 if (jfunc->type == IPA_JF_CONST
3006 && (rdesc = jfunc_rdesc_usable (jfunc))
3007 && --rdesc->refcount == 0)
3009 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3010 if (!symbol)
3011 return false;
3013 return remove_described_reference (symbol, rdesc);
3015 return true;
3018 /* Try to find a destination for indirect edge IE that corresponds to a simple
3019 call or a call of a member function pointer and where the destination is a
3020 pointer formal parameter described by jump function JFUNC. If it can be
3021 determined, return the newly direct edge, otherwise return NULL.
3022 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3024 static struct cgraph_edge *
3025 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3026 struct ipa_jump_func *jfunc,
3027 struct ipa_node_params *new_root_info)
3029 struct cgraph_edge *cs;
3030 tree target;
3031 bool agg_contents = ie->indirect_info->agg_contents;
3032 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3033 if (agg_contents)
3035 bool from_global_constant;
3036 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3037 ie->indirect_info->offset,
3038 ie->indirect_info->by_ref,
3039 &from_global_constant);
3040 if (target
3041 && !from_global_constant
3042 && !ie->indirect_info->guaranteed_unmodified)
3043 return NULL;
3045 else
3046 target = scalar;
3047 if (!target)
3048 return NULL;
3049 cs = ipa_make_edge_direct_to_target (ie, target);
3051 if (cs && !agg_contents)
3053 bool ok;
3054 gcc_checking_assert (cs->callee
3055 && (cs != ie
3056 || jfunc->type != IPA_JF_CONST
3057 || !cgraph_node_for_jfunc (jfunc)
3058 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3059 ok = try_decrement_rdesc_refcount (jfunc);
3060 gcc_checking_assert (ok);
3063 return cs;
3066 /* Return the target to be used in cases of impossible devirtualization. IE
3067 and target (the latter can be NULL) are dumped when dumping is enabled. */
3069 tree
3070 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3072 if (dump_file)
3074 if (target)
3075 fprintf (dump_file,
3076 "Type inconsistent devirtualization: %s/%i->%s\n",
3077 ie->caller->name (), ie->caller->order,
3078 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3079 else
3080 fprintf (dump_file,
3081 "No devirtualization target in %s/%i\n",
3082 ie->caller->name (), ie->caller->order);
3084 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3085 cgraph_node::get_create (new_target);
3086 return new_target;
3089 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3090 call based on a formal parameter which is described by jump function JFUNC
3091 and if it can be determined, make it direct and return the direct edge.
3092 Otherwise, return NULL. CTX describes the polymorphic context that the
3093 parameter the call is based on brings along with it. */
3095 static struct cgraph_edge *
3096 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3097 struct ipa_jump_func *jfunc,
3098 struct ipa_polymorphic_call_context ctx)
3100 tree target = NULL;
3101 bool speculative = false;
3103 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3104 return NULL;
3106 gcc_assert (!ie->indirect_info->by_ref);
3108 /* Try to do lookup via known virtual table pointer value. */
3109 if (!ie->indirect_info->vptr_changed
3110 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3112 tree vtable;
3113 unsigned HOST_WIDE_INT offset;
3114 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3115 : NULL;
3116 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3117 ie->indirect_info->offset,
3118 true);
3119 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3121 bool can_refer;
3122 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3123 vtable, offset, &can_refer);
3124 if (can_refer)
3126 if (!t
3127 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3128 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3129 || !possible_polymorphic_call_target_p
3130 (ie, cgraph_node::get (t)))
3132 /* Do not speculate builtin_unreachable, it is stupid! */
3133 if (!ie->indirect_info->vptr_changed)
3134 target = ipa_impossible_devirt_target (ie, target);
3135 else
3136 target = NULL;
3138 else
3140 target = t;
3141 speculative = ie->indirect_info->vptr_changed;
3147 ipa_polymorphic_call_context ie_context (ie);
3148 vec <cgraph_node *>targets;
3149 bool final;
3151 ctx.offset_by (ie->indirect_info->offset);
3152 if (ie->indirect_info->vptr_changed)
3153 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3154 ie->indirect_info->otr_type);
3155 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3156 targets = possible_polymorphic_call_targets
3157 (ie->indirect_info->otr_type,
3158 ie->indirect_info->otr_token,
3159 ctx, &final);
3160 if (final && targets.length () <= 1)
3162 speculative = false;
3163 if (targets.length () == 1)
3164 target = targets[0]->decl;
3165 else
3166 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3168 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3169 && !ie->speculative && ie->maybe_hot_p ())
3171 cgraph_node *n;
3172 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3173 ie->indirect_info->otr_token,
3174 ie->indirect_info->context);
3175 if (n)
3177 target = n->decl;
3178 speculative = true;
3182 if (target)
3184 if (!possible_polymorphic_call_target_p
3185 (ie, cgraph_node::get_create (target)))
3187 if (speculative)
3188 return NULL;
3189 target = ipa_impossible_devirt_target (ie, target);
3191 return ipa_make_edge_direct_to_target (ie, target, speculative);
3193 else
3194 return NULL;
3197 /* Update the param called notes associated with NODE when CS is being inlined,
3198 assuming NODE is (potentially indirectly) inlined into CS->callee.
3199 Moreover, if the callee is discovered to be constant, create a new cgraph
3200 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3201 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3203 static bool
3204 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3205 struct cgraph_node *node,
3206 vec<cgraph_edge *> *new_edges)
3208 struct ipa_edge_args *top;
3209 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3210 struct ipa_node_params *new_root_info;
3211 bool res = false;
3213 ipa_check_create_edge_args ();
3214 top = IPA_EDGE_REF (cs);
3215 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3216 ? cs->caller->global.inlined_to
3217 : cs->caller);
3219 for (ie = node->indirect_calls; ie; ie = next_ie)
3221 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3222 struct ipa_jump_func *jfunc;
3223 int param_index;
3224 cgraph_node *spec_target = NULL;
3226 next_ie = ie->next_callee;
3228 if (ici->param_index == -1)
3229 continue;
3231 /* We must check range due to calls with variable number of arguments: */
3232 if (ici->param_index >= ipa_get_cs_argument_count (top))
3234 ici->param_index = -1;
3235 continue;
3238 param_index = ici->param_index;
3239 jfunc = ipa_get_ith_jump_func (top, param_index);
3241 if (ie->speculative)
3243 struct cgraph_edge *de;
3244 struct ipa_ref *ref;
3245 ie->speculative_call_info (de, ie, ref);
3246 spec_target = de->callee;
3249 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3250 new_direct_edge = NULL;
3251 else if (ici->polymorphic)
3253 ipa_polymorphic_call_context ctx;
3254 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3255 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3257 else
3258 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3259 new_root_info);
3260 /* If speculation was removed, then we need to do nothing. */
3261 if (new_direct_edge && new_direct_edge != ie
3262 && new_direct_edge->callee == spec_target)
3264 new_direct_edge->indirect_inlining_edge = 1;
3265 top = IPA_EDGE_REF (cs);
3266 res = true;
3267 if (!new_direct_edge->speculative)
3268 continue;
3270 else if (new_direct_edge)
3272 new_direct_edge->indirect_inlining_edge = 1;
3273 if (new_direct_edge->call_stmt)
3274 new_direct_edge->call_stmt_cannot_inline_p
3275 = !gimple_check_call_matching_types (
3276 new_direct_edge->call_stmt,
3277 new_direct_edge->callee->decl, false);
3278 if (new_edges)
3280 new_edges->safe_push (new_direct_edge);
3281 res = true;
3283 top = IPA_EDGE_REF (cs);
3284 /* If speculative edge was introduced we still need to update
3285 call info of the indirect edge. */
3286 if (!new_direct_edge->speculative)
3287 continue;
3289 if (jfunc->type == IPA_JF_PASS_THROUGH
3290 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3292 if (ici->agg_contents
3293 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3294 && !ici->polymorphic)
3295 ici->param_index = -1;
3296 else
3298 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3299 if (ici->polymorphic
3300 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3301 ici->vptr_changed = true;
3304 else if (jfunc->type == IPA_JF_ANCESTOR)
3306 if (ici->agg_contents
3307 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3308 && !ici->polymorphic)
3309 ici->param_index = -1;
3310 else
3312 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3313 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3314 if (ici->polymorphic
3315 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3316 ici->vptr_changed = true;
3319 else
3320 /* Either we can find a destination for this edge now or never. */
3321 ici->param_index = -1;
3324 return res;
3327 /* Recursively traverse subtree of NODE (including node) made of inlined
3328 cgraph_edges when CS has been inlined and invoke
3329 update_indirect_edges_after_inlining on all nodes and
3330 update_jump_functions_after_inlining on all non-inlined edges that lead out
3331 of this subtree. Newly discovered indirect edges will be added to
3332 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3333 created. */
3335 static bool
3336 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3337 struct cgraph_node *node,
3338 vec<cgraph_edge *> *new_edges)
3340 struct cgraph_edge *e;
3341 bool res;
3343 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3345 for (e = node->callees; e; e = e->next_callee)
3346 if (!e->inline_failed)
3347 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3348 else
3349 update_jump_functions_after_inlining (cs, e);
3350 for (e = node->indirect_calls; e; e = e->next_callee)
3351 update_jump_functions_after_inlining (cs, e);
3353 return res;
3356 /* Combine two controlled uses counts as done during inlining. */
3358 static int
3359 combine_controlled_uses_counters (int c, int d)
3361 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3362 return IPA_UNDESCRIBED_USE;
3363 else
3364 return c + d - 1;
3367 /* Propagate number of controlled users from CS->caleee to the new root of the
3368 tree of inlined nodes. */
3370 static void
3371 propagate_controlled_uses (struct cgraph_edge *cs)
3373 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3374 struct cgraph_node *new_root = cs->caller->global.inlined_to
3375 ? cs->caller->global.inlined_to : cs->caller;
3376 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3377 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3378 int count, i;
3380 count = MIN (ipa_get_cs_argument_count (args),
3381 ipa_get_param_count (old_root_info));
3382 for (i = 0; i < count; i++)
3384 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3385 struct ipa_cst_ref_desc *rdesc;
3387 if (jf->type == IPA_JF_PASS_THROUGH)
3389 int src_idx, c, d;
3390 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3391 c = ipa_get_controlled_uses (new_root_info, src_idx);
3392 d = ipa_get_controlled_uses (old_root_info, i);
3394 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3395 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3396 c = combine_controlled_uses_counters (c, d);
3397 ipa_set_controlled_uses (new_root_info, src_idx, c);
3398 if (c == 0 && new_root_info->ipcp_orig_node)
3400 struct cgraph_node *n;
3401 struct ipa_ref *ref;
3402 tree t = new_root_info->known_csts[src_idx];
3404 if (t && TREE_CODE (t) == ADDR_EXPR
3405 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3406 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3407 && (ref = new_root->find_reference (n, NULL, 0)))
3409 if (dump_file)
3410 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3411 "reference from %s/%i to %s/%i.\n",
3412 xstrdup_for_dump (new_root->name ()),
3413 new_root->order,
3414 xstrdup_for_dump (n->name ()), n->order);
3415 ref->remove_reference ();
3419 else if (jf->type == IPA_JF_CONST
3420 && (rdesc = jfunc_rdesc_usable (jf)))
3422 int d = ipa_get_controlled_uses (old_root_info, i);
3423 int c = rdesc->refcount;
3424 rdesc->refcount = combine_controlled_uses_counters (c, d);
3425 if (rdesc->refcount == 0)
3427 tree cst = ipa_get_jf_constant (jf);
3428 struct cgraph_node *n;
3429 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3430 && TREE_CODE (TREE_OPERAND (cst, 0))
3431 == FUNCTION_DECL);
3432 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3433 if (n)
3435 struct cgraph_node *clone;
3436 bool ok;
3437 ok = remove_described_reference (n, rdesc);
3438 gcc_checking_assert (ok);
3440 clone = cs->caller;
3441 while (clone->global.inlined_to
3442 && clone != rdesc->cs->caller
3443 && IPA_NODE_REF (clone)->ipcp_orig_node)
3445 struct ipa_ref *ref;
3446 ref = clone->find_reference (n, NULL, 0);
3447 if (ref)
3449 if (dump_file)
3450 fprintf (dump_file, "ipa-prop: Removing "
3451 "cloning-created reference "
3452 "from %s/%i to %s/%i.\n",
3453 xstrdup_for_dump (clone->name ()),
3454 clone->order,
3455 xstrdup_for_dump (n->name ()),
3456 n->order);
3457 ref->remove_reference ();
3459 clone = clone->callers->caller;
3466 for (i = ipa_get_param_count (old_root_info);
3467 i < ipa_get_cs_argument_count (args);
3468 i++)
3470 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3472 if (jf->type == IPA_JF_CONST)
3474 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3475 if (rdesc)
3476 rdesc->refcount = IPA_UNDESCRIBED_USE;
3478 else if (jf->type == IPA_JF_PASS_THROUGH)
3479 ipa_set_controlled_uses (new_root_info,
3480 jf->value.pass_through.formal_id,
3481 IPA_UNDESCRIBED_USE);
3485 /* Update jump functions and call note functions on inlining the call site CS.
3486 CS is expected to lead to a node already cloned by
3487 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3488 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3489 created. */
3491 bool
3492 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3493 vec<cgraph_edge *> *new_edges)
3495 bool changed;
3496 /* Do nothing if the preparation phase has not been carried out yet
3497 (i.e. during early inlining). */
3498 if (!ipa_node_params_sum)
3499 return false;
3500 gcc_assert (ipa_edge_args_vector);
3502 propagate_controlled_uses (cs);
3503 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3505 return changed;
3508 /* Frees all dynamically allocated structures that the argument info points
3509 to. */
3511 void
3512 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3514 vec_free (args->jump_functions);
3515 memset (args, 0, sizeof (*args));
3518 /* Free all ipa_edge structures. */
3520 void
3521 ipa_free_all_edge_args (void)
3523 int i;
3524 struct ipa_edge_args *args;
3526 if (!ipa_edge_args_vector)
3527 return;
3529 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3530 ipa_free_edge_args_substructures (args);
3532 vec_free (ipa_edge_args_vector);
3535 /* Frees all dynamically allocated structures that the param info points
3536 to. */
3538 ipa_node_params::~ipa_node_params ()
3540 descriptors.release ();
3541 free (lattices);
3542 /* Lattice values and their sources are deallocated with their alocation
3543 pool. */
3544 known_csts.release ();
3545 known_contexts.release ();
3547 lattices = NULL;
3548 ipcp_orig_node = NULL;
3549 analysis_done = 0;
3550 node_enqueued = 0;
3551 do_clone_for_all_contexts = 0;
3552 is_all_contexts_clone = 0;
3553 node_dead = 0;
3556 /* Free all ipa_node_params structures. */
3558 void
3559 ipa_free_all_node_params (void)
3561 delete ipa_node_params_sum;
3562 ipa_node_params_sum = NULL;
3565 /* Grow ipcp_transformations if necessary. */
3567 void
3568 ipcp_grow_transformations_if_necessary (void)
3570 if (vec_safe_length (ipcp_transformations)
3571 <= (unsigned) symtab->cgraph_max_uid)
3572 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3575 /* Set the aggregate replacements of NODE to be AGGVALS. */
3577 void
3578 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3579 struct ipa_agg_replacement_value *aggvals)
3581 ipcp_grow_transformations_if_necessary ();
3582 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3585 /* Hook that is called by cgraph.c when an edge is removed. */
3587 static void
3588 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3590 struct ipa_edge_args *args;
3592 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3593 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3594 return;
3596 args = IPA_EDGE_REF (cs);
3597 if (args->jump_functions)
3599 struct ipa_jump_func *jf;
3600 int i;
3601 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3603 struct ipa_cst_ref_desc *rdesc;
3604 try_decrement_rdesc_refcount (jf);
3605 if (jf->type == IPA_JF_CONST
3606 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3607 && rdesc->cs == cs)
3608 rdesc->cs = NULL;
3612 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3615 /* Hook that is called by cgraph.c when an edge is duplicated. */
3617 static void
3618 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3619 void *)
3621 struct ipa_edge_args *old_args, *new_args;
3622 unsigned int i;
3624 ipa_check_create_edge_args ();
3626 old_args = IPA_EDGE_REF (src);
3627 new_args = IPA_EDGE_REF (dst);
3629 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3630 if (old_args->polymorphic_call_contexts)
3631 new_args->polymorphic_call_contexts
3632 = vec_safe_copy (old_args->polymorphic_call_contexts);
3634 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3636 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3637 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3639 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3641 if (src_jf->type == IPA_JF_CONST)
3643 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3645 if (!src_rdesc)
3646 dst_jf->value.constant.rdesc = NULL;
3647 else if (src->caller == dst->caller)
3649 struct ipa_ref *ref;
3650 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3651 gcc_checking_assert (n);
3652 ref = src->caller->find_reference (n, src->call_stmt,
3653 src->lto_stmt_uid);
3654 gcc_checking_assert (ref);
3655 dst->caller->clone_reference (ref, ref->stmt);
3657 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3658 dst_rdesc->cs = dst;
3659 dst_rdesc->refcount = src_rdesc->refcount;
3660 dst_rdesc->next_duplicate = NULL;
3661 dst_jf->value.constant.rdesc = dst_rdesc;
3663 else if (src_rdesc->cs == src)
3665 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3666 dst_rdesc->cs = dst;
3667 dst_rdesc->refcount = src_rdesc->refcount;
3668 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3669 src_rdesc->next_duplicate = dst_rdesc;
3670 dst_jf->value.constant.rdesc = dst_rdesc;
3672 else
3674 struct ipa_cst_ref_desc *dst_rdesc;
3675 /* This can happen during inlining, when a JFUNC can refer to a
3676 reference taken in a function up in the tree of inline clones.
3677 We need to find the duplicate that refers to our tree of
3678 inline clones. */
3680 gcc_assert (dst->caller->global.inlined_to);
3681 for (dst_rdesc = src_rdesc->next_duplicate;
3682 dst_rdesc;
3683 dst_rdesc = dst_rdesc->next_duplicate)
3685 struct cgraph_node *top;
3686 top = dst_rdesc->cs->caller->global.inlined_to
3687 ? dst_rdesc->cs->caller->global.inlined_to
3688 : dst_rdesc->cs->caller;
3689 if (dst->caller->global.inlined_to == top)
3690 break;
3692 gcc_assert (dst_rdesc);
3693 dst_jf->value.constant.rdesc = dst_rdesc;
3696 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3697 && src->caller == dst->caller)
3699 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3700 ? dst->caller->global.inlined_to : dst->caller;
3701 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3702 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3704 int c = ipa_get_controlled_uses (root_info, idx);
3705 if (c != IPA_UNDESCRIBED_USE)
3707 c++;
3708 ipa_set_controlled_uses (root_info, idx, c);
3714 /* Analyze newly added function into callgraph. */
3716 static void
3717 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3719 if (node->has_gimple_body_p ())
3720 ipa_analyze_node (node);
3723 /* Hook that is called by summary when a node is duplicated. */
3725 void
3726 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3727 ipa_node_params *old_info,
3728 ipa_node_params *new_info)
3730 ipa_agg_replacement_value *old_av, *new_av;
3732 new_info->descriptors = old_info->descriptors.copy ();
3733 new_info->lattices = NULL;
3734 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3736 new_info->analysis_done = old_info->analysis_done;
3737 new_info->node_enqueued = old_info->node_enqueued;
3738 new_info->versionable = old_info->versionable;
3740 old_av = ipa_get_agg_replacements_for_node (src);
3741 if (old_av)
3743 new_av = NULL;
3744 while (old_av)
3746 struct ipa_agg_replacement_value *v;
3748 v = ggc_alloc<ipa_agg_replacement_value> ();
3749 memcpy (v, old_av, sizeof (*v));
3750 v->next = new_av;
3751 new_av = v;
3752 old_av = old_av->next;
3754 ipa_set_node_agg_value_chain (dst, new_av);
3757 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3759 if (src_trans)
3761 ipcp_grow_transformations_if_necessary ();
3762 src_trans = ipcp_get_transformation_summary (src);
3763 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3764 vec<ipa_vr, va_gc> *&dst_vr
3765 = ipcp_get_transformation_summary (dst)->m_vr;
3766 if (vec_safe_length (src_trans->m_vr) > 0)
3768 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3769 for (unsigned i = 0; i < src_vr->length (); ++i)
3770 dst_vr->quick_push ((*src_vr)[i]);
3774 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3776 ipcp_grow_transformations_if_necessary ();
3777 src_trans = ipcp_get_transformation_summary (src);
3778 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3779 vec<ipa_bits, va_gc> *&dst_bits
3780 = ipcp_get_transformation_summary (dst)->bits;
3781 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3782 for (unsigned i = 0; i < src_bits->length (); ++i)
3783 dst_bits->quick_push ((*src_bits)[i]);
3787 /* Register our cgraph hooks if they are not already there. */
3789 void
3790 ipa_register_cgraph_hooks (void)
3792 ipa_check_create_node_params ();
3794 if (!edge_removal_hook_holder)
3795 edge_removal_hook_holder =
3796 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3797 if (!edge_duplication_hook_holder)
3798 edge_duplication_hook_holder =
3799 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3800 function_insertion_hook_holder =
3801 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3804 /* Unregister our cgraph hooks if they are not already there. */
3806 static void
3807 ipa_unregister_cgraph_hooks (void)
3809 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3810 edge_removal_hook_holder = NULL;
3811 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3812 edge_duplication_hook_holder = NULL;
3813 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3814 function_insertion_hook_holder = NULL;
3817 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3818 longer needed after ipa-cp. */
3820 void
3821 ipa_free_all_structures_after_ipa_cp (void)
3823 if (!optimize && !in_lto_p)
3825 ipa_free_all_edge_args ();
3826 ipa_free_all_node_params ();
3827 ipcp_sources_pool.release ();
3828 ipcp_cst_values_pool.release ();
3829 ipcp_poly_ctx_values_pool.release ();
3830 ipcp_agg_lattice_pool.release ();
3831 ipa_unregister_cgraph_hooks ();
3832 ipa_refdesc_pool.release ();
3836 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3837 longer needed after indirect inlining. */
3839 void
3840 ipa_free_all_structures_after_iinln (void)
3842 ipa_free_all_edge_args ();
3843 ipa_free_all_node_params ();
3844 ipa_unregister_cgraph_hooks ();
3845 ipcp_sources_pool.release ();
3846 ipcp_cst_values_pool.release ();
3847 ipcp_poly_ctx_values_pool.release ();
3848 ipcp_agg_lattice_pool.release ();
3849 ipa_refdesc_pool.release ();
3852 /* Print ipa_tree_map data structures of all functions in the
3853 callgraph to F. */
3855 void
3856 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3858 int i, count;
3859 struct ipa_node_params *info;
3861 if (!node->definition)
3862 return;
3863 info = IPA_NODE_REF (node);
3864 fprintf (f, " function %s/%i parameter descriptors:\n",
3865 node->name (), node->order);
3866 count = ipa_get_param_count (info);
3867 for (i = 0; i < count; i++)
3869 int c;
3871 fprintf (f, " ");
3872 ipa_dump_param (f, info, i);
3873 if (ipa_is_param_used (info, i))
3874 fprintf (f, " used");
3875 c = ipa_get_controlled_uses (info, i);
3876 if (c == IPA_UNDESCRIBED_USE)
3877 fprintf (f, " undescribed_use");
3878 else
3879 fprintf (f, " controlled_uses=%i", c);
3880 fprintf (f, "\n");
3884 /* Print ipa_tree_map data structures of all functions in the
3885 callgraph to F. */
3887 void
3888 ipa_print_all_params (FILE * f)
3890 struct cgraph_node *node;
3892 fprintf (f, "\nFunction parameters:\n");
3893 FOR_EACH_FUNCTION (node)
3894 ipa_print_node_params (f, node);
3897 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3899 vec<tree>
3900 ipa_get_vector_of_formal_parms (tree fndecl)
3902 vec<tree> args;
3903 int count;
3904 tree parm;
3906 gcc_assert (!flag_wpa);
3907 count = count_formal_params (fndecl);
3908 args.create (count);
3909 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3910 args.quick_push (parm);
3912 return args;
3915 /* Return a heap allocated vector containing types of formal parameters of
3916 function type FNTYPE. */
3918 vec<tree>
3919 ipa_get_vector_of_formal_parm_types (tree fntype)
3921 vec<tree> types;
3922 int count = 0;
3923 tree t;
3925 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3926 count++;
3928 types.create (count);
3929 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3930 types.quick_push (TREE_VALUE (t));
3932 return types;
3935 /* Modify the function declaration FNDECL and its type according to the plan in
3936 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3937 to reflect the actual parameters being modified which are determined by the
3938 base_index field. */
3940 void
3941 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3943 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3944 tree orig_type = TREE_TYPE (fndecl);
3945 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3947 /* The following test is an ugly hack, some functions simply don't have any
3948 arguments in their type. This is probably a bug but well... */
3949 bool care_for_types = (old_arg_types != NULL_TREE);
3950 bool last_parm_void;
3951 vec<tree> otypes;
3952 if (care_for_types)
3954 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3955 == void_type_node);
3956 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3957 if (last_parm_void)
3958 gcc_assert (oparms.length () + 1 == otypes.length ());
3959 else
3960 gcc_assert (oparms.length () == otypes.length ());
3962 else
3964 last_parm_void = false;
3965 otypes.create (0);
3968 int len = adjustments.length ();
3969 tree *link = &DECL_ARGUMENTS (fndecl);
3970 tree new_arg_types = NULL;
3971 for (int i = 0; i < len; i++)
3973 struct ipa_parm_adjustment *adj;
3974 gcc_assert (link);
3976 adj = &adjustments[i];
3977 tree parm;
3978 if (adj->op == IPA_PARM_OP_NEW)
3979 parm = NULL;
3980 else
3981 parm = oparms[adj->base_index];
3982 adj->base = parm;
3984 if (adj->op == IPA_PARM_OP_COPY)
3986 if (care_for_types)
3987 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3988 new_arg_types);
3989 *link = parm;
3990 link = &DECL_CHAIN (parm);
3992 else if (adj->op != IPA_PARM_OP_REMOVE)
3994 tree new_parm;
3995 tree ptype;
3997 if (adj->by_ref)
3998 ptype = build_pointer_type (adj->type);
3999 else
4001 ptype = adj->type;
4002 if (is_gimple_reg_type (ptype))
4004 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4005 if (TYPE_ALIGN (ptype) != malign)
4006 ptype = build_aligned_type (ptype, malign);
4010 if (care_for_types)
4011 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4013 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4014 ptype);
4015 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4016 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4017 DECL_ARTIFICIAL (new_parm) = 1;
4018 DECL_ARG_TYPE (new_parm) = ptype;
4019 DECL_CONTEXT (new_parm) = fndecl;
4020 TREE_USED (new_parm) = 1;
4021 DECL_IGNORED_P (new_parm) = 1;
4022 layout_decl (new_parm, 0);
4024 if (adj->op == IPA_PARM_OP_NEW)
4025 adj->base = NULL;
4026 else
4027 adj->base = parm;
4028 adj->new_decl = new_parm;
4030 *link = new_parm;
4031 link = &DECL_CHAIN (new_parm);
4035 *link = NULL_TREE;
4037 tree new_reversed = NULL;
4038 if (care_for_types)
4040 new_reversed = nreverse (new_arg_types);
4041 if (last_parm_void)
4043 if (new_reversed)
4044 TREE_CHAIN (new_arg_types) = void_list_node;
4045 else
4046 new_reversed = void_list_node;
4050 /* Use copy_node to preserve as much as possible from original type
4051 (debug info, attribute lists etc.)
4052 Exception is METHOD_TYPEs must have THIS argument.
4053 When we are asked to remove it, we need to build new FUNCTION_TYPE
4054 instead. */
4055 tree new_type = NULL;
4056 if (TREE_CODE (orig_type) != METHOD_TYPE
4057 || (adjustments[0].op == IPA_PARM_OP_COPY
4058 && adjustments[0].base_index == 0))
4060 new_type = build_distinct_type_copy (orig_type);
4061 TYPE_ARG_TYPES (new_type) = new_reversed;
4063 else
4065 new_type
4066 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4067 new_reversed));
4068 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4069 DECL_VINDEX (fndecl) = NULL_TREE;
4072 /* When signature changes, we need to clear builtin info. */
4073 if (DECL_BUILT_IN (fndecl))
4075 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4076 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4079 TREE_TYPE (fndecl) = new_type;
4080 DECL_VIRTUAL_P (fndecl) = 0;
4081 DECL_LANG_SPECIFIC (fndecl) = NULL;
4082 otypes.release ();
4083 oparms.release ();
4086 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4087 If this is a directly recursive call, CS must be NULL. Otherwise it must
4088 contain the corresponding call graph edge. */
4090 void
4091 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4092 ipa_parm_adjustment_vec adjustments)
4094 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4095 vec<tree> vargs;
4096 vec<tree, va_gc> **debug_args = NULL;
4097 gcall *new_stmt;
4098 gimple_stmt_iterator gsi, prev_gsi;
4099 tree callee_decl;
4100 int i, len;
4102 len = adjustments.length ();
4103 vargs.create (len);
4104 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4105 current_node->remove_stmt_references (stmt);
4107 gsi = gsi_for_stmt (stmt);
4108 prev_gsi = gsi;
4109 gsi_prev (&prev_gsi);
4110 for (i = 0; i < len; i++)
4112 struct ipa_parm_adjustment *adj;
4114 adj = &adjustments[i];
4116 if (adj->op == IPA_PARM_OP_COPY)
4118 tree arg = gimple_call_arg (stmt, adj->base_index);
4120 vargs.quick_push (arg);
4122 else if (adj->op != IPA_PARM_OP_REMOVE)
4124 tree expr, base, off;
4125 location_t loc;
4126 unsigned int deref_align = 0;
4127 bool deref_base = false;
4129 /* We create a new parameter out of the value of the old one, we can
4130 do the following kind of transformations:
4132 - A scalar passed by reference is converted to a scalar passed by
4133 value. (adj->by_ref is false and the type of the original
4134 actual argument is a pointer to a scalar).
4136 - A part of an aggregate is passed instead of the whole aggregate.
4137 The part can be passed either by value or by reference, this is
4138 determined by value of adj->by_ref. Moreover, the code below
4139 handles both situations when the original aggregate is passed by
4140 value (its type is not a pointer) and when it is passed by
4141 reference (it is a pointer to an aggregate).
4143 When the new argument is passed by reference (adj->by_ref is true)
4144 it must be a part of an aggregate and therefore we form it by
4145 simply taking the address of a reference inside the original
4146 aggregate. */
4148 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4149 base = gimple_call_arg (stmt, adj->base_index);
4150 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4151 : EXPR_LOCATION (base);
4153 if (TREE_CODE (base) != ADDR_EXPR
4154 && POINTER_TYPE_P (TREE_TYPE (base)))
4155 off = build_int_cst (adj->alias_ptr_type,
4156 adj->offset / BITS_PER_UNIT);
4157 else
4159 HOST_WIDE_INT base_offset;
4160 tree prev_base;
4161 bool addrof;
4163 if (TREE_CODE (base) == ADDR_EXPR)
4165 base = TREE_OPERAND (base, 0);
4166 addrof = true;
4168 else
4169 addrof = false;
4170 prev_base = base;
4171 base = get_addr_base_and_unit_offset (base, &base_offset);
4172 /* Aggregate arguments can have non-invariant addresses. */
4173 if (!base)
4175 base = build_fold_addr_expr (prev_base);
4176 off = build_int_cst (adj->alias_ptr_type,
4177 adj->offset / BITS_PER_UNIT);
4179 else if (TREE_CODE (base) == MEM_REF)
4181 if (!addrof)
4183 deref_base = true;
4184 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4186 off = build_int_cst (adj->alias_ptr_type,
4187 base_offset
4188 + adj->offset / BITS_PER_UNIT);
4189 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4190 off);
4191 base = TREE_OPERAND (base, 0);
4193 else
4195 off = build_int_cst (adj->alias_ptr_type,
4196 base_offset
4197 + adj->offset / BITS_PER_UNIT);
4198 base = build_fold_addr_expr (base);
4202 if (!adj->by_ref)
4204 tree type = adj->type;
4205 unsigned int align;
4206 unsigned HOST_WIDE_INT misalign;
4208 if (deref_base)
4210 align = deref_align;
4211 misalign = 0;
4213 else
4215 get_pointer_alignment_1 (base, &align, &misalign);
4216 if (TYPE_ALIGN (type) > align)
4217 align = TYPE_ALIGN (type);
4219 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4220 * BITS_PER_UNIT);
4221 misalign = misalign & (align - 1);
4222 if (misalign != 0)
4223 align = least_bit_hwi (misalign);
4224 if (align < TYPE_ALIGN (type))
4225 type = build_aligned_type (type, align);
4226 base = force_gimple_operand_gsi (&gsi, base,
4227 true, NULL, true, GSI_SAME_STMT);
4228 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4229 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4230 /* If expr is not a valid gimple call argument emit
4231 a load into a temporary. */
4232 if (is_gimple_reg_type (TREE_TYPE (expr)))
4234 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4235 if (gimple_in_ssa_p (cfun))
4237 gimple_set_vuse (tem, gimple_vuse (stmt));
4238 expr = make_ssa_name (TREE_TYPE (expr), tem);
4240 else
4241 expr = create_tmp_reg (TREE_TYPE (expr));
4242 gimple_assign_set_lhs (tem, expr);
4243 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4246 else
4248 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4249 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4250 expr = build_fold_addr_expr (expr);
4251 expr = force_gimple_operand_gsi (&gsi, expr,
4252 true, NULL, true, GSI_SAME_STMT);
4254 vargs.quick_push (expr);
4256 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4258 unsigned int ix;
4259 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4260 gimple *def_temp;
4262 arg = gimple_call_arg (stmt, adj->base_index);
4263 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4265 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4266 continue;
4267 arg = fold_convert_loc (gimple_location (stmt),
4268 TREE_TYPE (origin), arg);
4270 if (debug_args == NULL)
4271 debug_args = decl_debug_args_insert (callee_decl);
4272 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4273 if (ddecl == origin)
4275 ddecl = (**debug_args)[ix + 1];
4276 break;
4278 if (ddecl == NULL)
4280 ddecl = make_node (DEBUG_EXPR_DECL);
4281 DECL_ARTIFICIAL (ddecl) = 1;
4282 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4283 DECL_MODE (ddecl) = DECL_MODE (origin);
4285 vec_safe_push (*debug_args, origin);
4286 vec_safe_push (*debug_args, ddecl);
4288 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4289 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4293 if (dump_file && (dump_flags & TDF_DETAILS))
4295 fprintf (dump_file, "replacing stmt:");
4296 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4299 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4300 vargs.release ();
4301 if (gimple_call_lhs (stmt))
4302 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4304 gimple_set_block (new_stmt, gimple_block (stmt));
4305 if (gimple_has_location (stmt))
4306 gimple_set_location (new_stmt, gimple_location (stmt));
4307 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4308 gimple_call_copy_flags (new_stmt, stmt);
4309 if (gimple_in_ssa_p (cfun))
4311 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4312 if (gimple_vdef (stmt))
4314 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4315 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4319 if (dump_file && (dump_flags & TDF_DETAILS))
4321 fprintf (dump_file, "with stmt:");
4322 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4323 fprintf (dump_file, "\n");
4325 gsi_replace (&gsi, new_stmt, true);
4326 if (cs)
4327 cs->set_call_stmt (new_stmt);
4330 current_node->record_stmt_references (gsi_stmt (gsi));
4331 gsi_prev (&gsi);
4333 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4336 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4337 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4338 specifies whether the function should care about type incompatibility the
4339 current and new expressions. If it is false, the function will leave
4340 incompatibility issues to the caller. Return true iff the expression
4341 was modified. */
4343 bool
4344 ipa_modify_expr (tree *expr, bool convert,
4345 ipa_parm_adjustment_vec adjustments)
4347 struct ipa_parm_adjustment *cand
4348 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4349 if (!cand)
4350 return false;
4352 tree src;
4353 if (cand->by_ref)
4355 src = build_simple_mem_ref (cand->new_decl);
4356 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4358 else
4359 src = cand->new_decl;
4361 if (dump_file && (dump_flags & TDF_DETAILS))
4363 fprintf (dump_file, "About to replace expr ");
4364 print_generic_expr (dump_file, *expr, 0);
4365 fprintf (dump_file, " with ");
4366 print_generic_expr (dump_file, src, 0);
4367 fprintf (dump_file, "\n");
4370 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4372 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4373 *expr = vce;
4375 else
4376 *expr = src;
4377 return true;
4380 /* If T is an SSA_NAME, return NULL if it is not a default def or
4381 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4382 the base variable is always returned, regardless if it is a default
4383 def. Return T if it is not an SSA_NAME. */
4385 static tree
4386 get_ssa_base_param (tree t, bool ignore_default_def)
4388 if (TREE_CODE (t) == SSA_NAME)
4390 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4391 return SSA_NAME_VAR (t);
4392 else
4393 return NULL_TREE;
4395 return t;
4398 /* Given an expression, return an adjustment entry specifying the
4399 transformation to be done on EXPR. If no suitable adjustment entry
4400 was found, returns NULL.
4402 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4403 default def, otherwise bail on them.
4405 If CONVERT is non-NULL, this function will set *CONVERT if the
4406 expression provided is a component reference. ADJUSTMENTS is the
4407 adjustments vector. */
4409 ipa_parm_adjustment *
4410 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4411 ipa_parm_adjustment_vec adjustments,
4412 bool ignore_default_def)
4414 if (TREE_CODE (**expr) == BIT_FIELD_REF
4415 || TREE_CODE (**expr) == IMAGPART_EXPR
4416 || TREE_CODE (**expr) == REALPART_EXPR)
4418 *expr = &TREE_OPERAND (**expr, 0);
4419 if (convert)
4420 *convert = true;
4423 HOST_WIDE_INT offset, size, max_size;
4424 bool reverse;
4425 tree base
4426 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4427 if (!base || size == -1 || max_size == -1)
4428 return NULL;
4430 if (TREE_CODE (base) == MEM_REF)
4432 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4433 base = TREE_OPERAND (base, 0);
4436 base = get_ssa_base_param (base, ignore_default_def);
4437 if (!base || TREE_CODE (base) != PARM_DECL)
4438 return NULL;
4440 struct ipa_parm_adjustment *cand = NULL;
4441 unsigned int len = adjustments.length ();
4442 for (unsigned i = 0; i < len; i++)
4444 struct ipa_parm_adjustment *adj = &adjustments[i];
4446 if (adj->base == base
4447 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4449 cand = adj;
4450 break;
4454 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4455 return NULL;
4456 return cand;
4459 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4461 static bool
4462 index_in_adjustments_multiple_times_p (int base_index,
4463 ipa_parm_adjustment_vec adjustments)
4465 int i, len = adjustments.length ();
4466 bool one = false;
4468 for (i = 0; i < len; i++)
4470 struct ipa_parm_adjustment *adj;
4471 adj = &adjustments[i];
4473 if (adj->base_index == base_index)
4475 if (one)
4476 return true;
4477 else
4478 one = true;
4481 return false;
4485 /* Return adjustments that should have the same effect on function parameters
4486 and call arguments as if they were first changed according to adjustments in
4487 INNER and then by adjustments in OUTER. */
4489 ipa_parm_adjustment_vec
4490 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4491 ipa_parm_adjustment_vec outer)
4493 int i, outlen = outer.length ();
4494 int inlen = inner.length ();
4495 int removals = 0;
4496 ipa_parm_adjustment_vec adjustments, tmp;
4498 tmp.create (inlen);
4499 for (i = 0; i < inlen; i++)
4501 struct ipa_parm_adjustment *n;
4502 n = &inner[i];
4504 if (n->op == IPA_PARM_OP_REMOVE)
4505 removals++;
4506 else
4508 /* FIXME: Handling of new arguments are not implemented yet. */
4509 gcc_assert (n->op != IPA_PARM_OP_NEW);
4510 tmp.quick_push (*n);
4514 adjustments.create (outlen + removals);
4515 for (i = 0; i < outlen; i++)
4517 struct ipa_parm_adjustment r;
4518 struct ipa_parm_adjustment *out = &outer[i];
4519 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4521 memset (&r, 0, sizeof (r));
4522 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4523 if (out->op == IPA_PARM_OP_REMOVE)
4525 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4527 r.op = IPA_PARM_OP_REMOVE;
4528 adjustments.quick_push (r);
4530 continue;
4532 else
4534 /* FIXME: Handling of new arguments are not implemented yet. */
4535 gcc_assert (out->op != IPA_PARM_OP_NEW);
4538 r.base_index = in->base_index;
4539 r.type = out->type;
4541 /* FIXME: Create nonlocal value too. */
4543 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4544 r.op = IPA_PARM_OP_COPY;
4545 else if (in->op == IPA_PARM_OP_COPY)
4546 r.offset = out->offset;
4547 else if (out->op == IPA_PARM_OP_COPY)
4548 r.offset = in->offset;
4549 else
4550 r.offset = in->offset + out->offset;
4551 adjustments.quick_push (r);
4554 for (i = 0; i < inlen; i++)
4556 struct ipa_parm_adjustment *n = &inner[i];
4558 if (n->op == IPA_PARM_OP_REMOVE)
4559 adjustments.quick_push (*n);
4562 tmp.release ();
4563 return adjustments;
4566 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4567 friendly way, assuming they are meant to be applied to FNDECL. */
4569 void
4570 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4571 tree fndecl)
4573 int i, len = adjustments.length ();
4574 bool first = true;
4575 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4577 fprintf (file, "IPA param adjustments: ");
4578 for (i = 0; i < len; i++)
4580 struct ipa_parm_adjustment *adj;
4581 adj = &adjustments[i];
4583 if (!first)
4584 fprintf (file, " ");
4585 else
4586 first = false;
4588 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4589 print_generic_expr (file, parms[adj->base_index], 0);
4590 if (adj->base)
4592 fprintf (file, ", base: ");
4593 print_generic_expr (file, adj->base, 0);
4595 if (adj->new_decl)
4597 fprintf (file, ", new_decl: ");
4598 print_generic_expr (file, adj->new_decl, 0);
4600 if (adj->new_ssa_base)
4602 fprintf (file, ", new_ssa_base: ");
4603 print_generic_expr (file, adj->new_ssa_base, 0);
4606 if (adj->op == IPA_PARM_OP_COPY)
4607 fprintf (file, ", copy_param");
4608 else if (adj->op == IPA_PARM_OP_REMOVE)
4609 fprintf (file, ", remove_param");
4610 else
4611 fprintf (file, ", offset %li", (long) adj->offset);
4612 if (adj->by_ref)
4613 fprintf (file, ", by_ref");
4614 print_node_brief (file, ", type: ", adj->type, 0);
4615 fprintf (file, "\n");
4617 parms.release ();
4620 /* Dump the AV linked list. */
4622 void
4623 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4625 bool comma = false;
4626 fprintf (f, " Aggregate replacements:");
4627 for (; av; av = av->next)
4629 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4630 av->index, av->offset);
4631 print_generic_expr (f, av->value, 0);
4632 comma = true;
4634 fprintf (f, "\n");
4637 /* Stream out jump function JUMP_FUNC to OB. */
4639 static void
4640 ipa_write_jump_function (struct output_block *ob,
4641 struct ipa_jump_func *jump_func)
4643 struct ipa_agg_jf_item *item;
4644 struct bitpack_d bp;
4645 int i, count;
4647 streamer_write_uhwi (ob, jump_func->type);
4648 switch (jump_func->type)
4650 case IPA_JF_UNKNOWN:
4651 break;
4652 case IPA_JF_CONST:
4653 gcc_assert (
4654 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4655 stream_write_tree (ob, jump_func->value.constant.value, true);
4656 break;
4657 case IPA_JF_PASS_THROUGH:
4658 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4659 if (jump_func->value.pass_through.operation == NOP_EXPR)
4661 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4662 bp = bitpack_create (ob->main_stream);
4663 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4664 streamer_write_bitpack (&bp);
4666 else
4668 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4669 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4671 break;
4672 case IPA_JF_ANCESTOR:
4673 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4674 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4675 bp = bitpack_create (ob->main_stream);
4676 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4677 streamer_write_bitpack (&bp);
4678 break;
4681 count = vec_safe_length (jump_func->agg.items);
4682 streamer_write_uhwi (ob, count);
4683 if (count)
4685 bp = bitpack_create (ob->main_stream);
4686 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4687 streamer_write_bitpack (&bp);
4690 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4692 streamer_write_uhwi (ob, item->offset);
4693 stream_write_tree (ob, item->value, true);
4696 bp = bitpack_create (ob->main_stream);
4697 bp_pack_value (&bp, jump_func->bits.known, 1);
4698 streamer_write_bitpack (&bp);
4699 if (jump_func->bits.known)
4701 streamer_write_widest_int (ob, jump_func->bits.value);
4702 streamer_write_widest_int (ob, jump_func->bits.mask);
4704 bp_pack_value (&bp, jump_func->vr_known, 1);
4705 streamer_write_bitpack (&bp);
4706 if (jump_func->vr_known)
4708 streamer_write_enum (ob->main_stream, value_rang_type,
4709 VR_LAST, jump_func->m_vr.type);
4710 stream_write_tree (ob, jump_func->m_vr.min, true);
4711 stream_write_tree (ob, jump_func->m_vr.max, true);
4715 /* Read in jump function JUMP_FUNC from IB. */
4717 static void
4718 ipa_read_jump_function (struct lto_input_block *ib,
4719 struct ipa_jump_func *jump_func,
4720 struct cgraph_edge *cs,
4721 struct data_in *data_in)
4723 enum jump_func_type jftype;
4724 enum tree_code operation;
4725 int i, count;
4727 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4728 switch (jftype)
4730 case IPA_JF_UNKNOWN:
4731 ipa_set_jf_unknown (jump_func);
4732 break;
4733 case IPA_JF_CONST:
4734 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4735 break;
4736 case IPA_JF_PASS_THROUGH:
4737 operation = (enum tree_code) streamer_read_uhwi (ib);
4738 if (operation == NOP_EXPR)
4740 int formal_id = streamer_read_uhwi (ib);
4741 struct bitpack_d bp = streamer_read_bitpack (ib);
4742 bool agg_preserved = bp_unpack_value (&bp, 1);
4743 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4745 else
4747 tree operand = stream_read_tree (ib, data_in);
4748 int formal_id = streamer_read_uhwi (ib);
4749 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4750 operation);
4752 break;
4753 case IPA_JF_ANCESTOR:
4755 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4756 int formal_id = streamer_read_uhwi (ib);
4757 struct bitpack_d bp = streamer_read_bitpack (ib);
4758 bool agg_preserved = bp_unpack_value (&bp, 1);
4759 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4760 break;
4764 count = streamer_read_uhwi (ib);
4765 vec_alloc (jump_func->agg.items, count);
4766 if (count)
4768 struct bitpack_d bp = streamer_read_bitpack (ib);
4769 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4771 for (i = 0; i < count; i++)
4773 struct ipa_agg_jf_item item;
4774 item.offset = streamer_read_uhwi (ib);
4775 item.value = stream_read_tree (ib, data_in);
4776 jump_func->agg.items->quick_push (item);
4779 struct bitpack_d bp = streamer_read_bitpack (ib);
4780 bool bits_known = bp_unpack_value (&bp, 1);
4781 if (bits_known)
4783 jump_func->bits.known = true;
4784 jump_func->bits.value = streamer_read_widest_int (ib);
4785 jump_func->bits.mask = streamer_read_widest_int (ib);
4787 else
4788 jump_func->bits.known = false;
4790 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4791 bool vr_known = bp_unpack_value (&vr_bp, 1);
4792 if (vr_known)
4794 jump_func->vr_known = true;
4795 jump_func->m_vr.type = streamer_read_enum (ib,
4796 value_range_type,
4797 VR_LAST);
4798 jump_func->m_vr.min = stream_read_tree (ib, data_in);
4799 jump_func->m_vr.max = stream_read_tree (ib, data_in);
4801 else
4802 jump_func->vr_known = false;
4805 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4806 relevant to indirect inlining to OB. */
4808 static void
4809 ipa_write_indirect_edge_info (struct output_block *ob,
4810 struct cgraph_edge *cs)
4812 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4813 struct bitpack_d bp;
4815 streamer_write_hwi (ob, ii->param_index);
4816 bp = bitpack_create (ob->main_stream);
4817 bp_pack_value (&bp, ii->polymorphic, 1);
4818 bp_pack_value (&bp, ii->agg_contents, 1);
4819 bp_pack_value (&bp, ii->member_ptr, 1);
4820 bp_pack_value (&bp, ii->by_ref, 1);
4821 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4822 bp_pack_value (&bp, ii->vptr_changed, 1);
4823 streamer_write_bitpack (&bp);
4824 if (ii->agg_contents || ii->polymorphic)
4825 streamer_write_hwi (ob, ii->offset);
4826 else
4827 gcc_assert (ii->offset == 0);
4829 if (ii->polymorphic)
4831 streamer_write_hwi (ob, ii->otr_token);
4832 stream_write_tree (ob, ii->otr_type, true);
4833 ii->context.stream_out (ob);
4837 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4838 relevant to indirect inlining from IB. */
4840 static void
4841 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4842 struct data_in *data_in,
4843 struct cgraph_edge *cs)
4845 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4846 struct bitpack_d bp;
4848 ii->param_index = (int) streamer_read_hwi (ib);
4849 bp = streamer_read_bitpack (ib);
4850 ii->polymorphic = bp_unpack_value (&bp, 1);
4851 ii->agg_contents = bp_unpack_value (&bp, 1);
4852 ii->member_ptr = bp_unpack_value (&bp, 1);
4853 ii->by_ref = bp_unpack_value (&bp, 1);
4854 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4855 ii->vptr_changed = bp_unpack_value (&bp, 1);
4856 if (ii->agg_contents || ii->polymorphic)
4857 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4858 else
4859 ii->offset = 0;
4860 if (ii->polymorphic)
4862 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4863 ii->otr_type = stream_read_tree (ib, data_in);
4864 ii->context.stream_in (ib, data_in);
4868 /* Stream out NODE info to OB. */
4870 static void
4871 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4873 int node_ref;
4874 lto_symtab_encoder_t encoder;
4875 struct ipa_node_params *info = IPA_NODE_REF (node);
4876 int j;
4877 struct cgraph_edge *e;
4878 struct bitpack_d bp;
4880 encoder = ob->decl_state->symtab_node_encoder;
4881 node_ref = lto_symtab_encoder_encode (encoder, node);
4882 streamer_write_uhwi (ob, node_ref);
4884 streamer_write_uhwi (ob, ipa_get_param_count (info));
4885 for (j = 0; j < ipa_get_param_count (info); j++)
4886 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4887 bp = bitpack_create (ob->main_stream);
4888 gcc_assert (info->analysis_done
4889 || ipa_get_param_count (info) == 0);
4890 gcc_assert (!info->node_enqueued);
4891 gcc_assert (!info->ipcp_orig_node);
4892 for (j = 0; j < ipa_get_param_count (info); j++)
4893 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4894 streamer_write_bitpack (&bp);
4895 for (j = 0; j < ipa_get_param_count (info); j++)
4896 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4897 for (e = node->callees; e; e = e->next_callee)
4899 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4901 streamer_write_uhwi (ob,
4902 ipa_get_cs_argument_count (args) * 2
4903 + (args->polymorphic_call_contexts != NULL));
4904 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4906 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4907 if (args->polymorphic_call_contexts != NULL)
4908 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4911 for (e = node->indirect_calls; e; e = e->next_callee)
4913 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4915 streamer_write_uhwi (ob,
4916 ipa_get_cs_argument_count (args) * 2
4917 + (args->polymorphic_call_contexts != NULL));
4918 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4920 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4921 if (args->polymorphic_call_contexts != NULL)
4922 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4924 ipa_write_indirect_edge_info (ob, e);
4928 /* Stream in NODE info from IB. */
4930 static void
4931 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4932 struct data_in *data_in)
4934 struct ipa_node_params *info = IPA_NODE_REF (node);
4935 int k;
4936 struct cgraph_edge *e;
4937 struct bitpack_d bp;
4939 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4941 for (k = 0; k < ipa_get_param_count (info); k++)
4942 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4944 bp = streamer_read_bitpack (ib);
4945 if (ipa_get_param_count (info) != 0)
4946 info->analysis_done = true;
4947 info->node_enqueued = false;
4948 for (k = 0; k < ipa_get_param_count (info); k++)
4949 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4950 for (k = 0; k < ipa_get_param_count (info); k++)
4951 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4952 for (e = node->callees; e; e = e->next_callee)
4954 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4955 int count = streamer_read_uhwi (ib);
4956 bool contexts_computed = count & 1;
4957 count /= 2;
4959 if (!count)
4960 continue;
4961 vec_safe_grow_cleared (args->jump_functions, count);
4962 if (contexts_computed)
4963 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4965 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4967 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4968 data_in);
4969 if (contexts_computed)
4970 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4973 for (e = node->indirect_calls; e; e = e->next_callee)
4975 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4976 int count = streamer_read_uhwi (ib);
4977 bool contexts_computed = count & 1;
4978 count /= 2;
4980 if (count)
4982 vec_safe_grow_cleared (args->jump_functions, count);
4983 if (contexts_computed)
4984 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4985 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4987 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4988 data_in);
4989 if (contexts_computed)
4990 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4993 ipa_read_indirect_edge_info (ib, data_in, e);
4997 /* Write jump functions for nodes in SET. */
4999 void
5000 ipa_prop_write_jump_functions (void)
5002 struct cgraph_node *node;
5003 struct output_block *ob;
5004 unsigned int count = 0;
5005 lto_symtab_encoder_iterator lsei;
5006 lto_symtab_encoder_t encoder;
5008 if (!ipa_node_params_sum)
5009 return;
5011 ob = create_output_block (LTO_section_jump_functions);
5012 encoder = ob->decl_state->symtab_node_encoder;
5013 ob->symbol = NULL;
5014 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5015 lsei_next_function_in_partition (&lsei))
5017 node = lsei_cgraph_node (lsei);
5018 if (node->has_gimple_body_p ()
5019 && IPA_NODE_REF (node) != NULL)
5020 count++;
5023 streamer_write_uhwi (ob, count);
5025 /* Process all of the functions. */
5026 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5027 lsei_next_function_in_partition (&lsei))
5029 node = lsei_cgraph_node (lsei);
5030 if (node->has_gimple_body_p ()
5031 && IPA_NODE_REF (node) != NULL)
5032 ipa_write_node_info (ob, node);
5034 streamer_write_char_stream (ob->main_stream, 0);
5035 produce_asm (ob, NULL);
5036 destroy_output_block (ob);
5039 /* Read section in file FILE_DATA of length LEN with data DATA. */
5041 static void
5042 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5043 size_t len)
5045 const struct lto_function_header *header =
5046 (const struct lto_function_header *) data;
5047 const int cfg_offset = sizeof (struct lto_function_header);
5048 const int main_offset = cfg_offset + header->cfg_size;
5049 const int string_offset = main_offset + header->main_size;
5050 struct data_in *data_in;
5051 unsigned int i;
5052 unsigned int count;
5054 lto_input_block ib_main ((const char *) data + main_offset,
5055 header->main_size, file_data->mode_table);
5057 data_in =
5058 lto_data_in_create (file_data, (const char *) data + string_offset,
5059 header->string_size, vNULL);
5060 count = streamer_read_uhwi (&ib_main);
5062 for (i = 0; i < count; i++)
5064 unsigned int index;
5065 struct cgraph_node *node;
5066 lto_symtab_encoder_t encoder;
5068 index = streamer_read_uhwi (&ib_main);
5069 encoder = file_data->symtab_node_encoder;
5070 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5071 index));
5072 gcc_assert (node->definition);
5073 ipa_read_node_info (&ib_main, node, data_in);
5075 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5076 len);
5077 lto_data_in_delete (data_in);
5080 /* Read ipcp jump functions. */
5082 void
5083 ipa_prop_read_jump_functions (void)
5085 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5086 struct lto_file_decl_data *file_data;
5087 unsigned int j = 0;
5089 ipa_check_create_node_params ();
5090 ipa_check_create_edge_args ();
5091 ipa_register_cgraph_hooks ();
5093 while ((file_data = file_data_vec[j++]))
5095 size_t len;
5096 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5098 if (data)
5099 ipa_prop_read_section (file_data, data, len);
5103 /* After merging units, we can get mismatch in argument counts.
5104 Also decl merging might've rendered parameter lists obsolete.
5105 Also compute called_with_variable_arg info. */
5107 void
5108 ipa_update_after_lto_read (void)
5110 ipa_check_create_node_params ();
5111 ipa_check_create_edge_args ();
5114 void
5115 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5117 int node_ref;
5118 unsigned int count = 0;
5119 lto_symtab_encoder_t encoder;
5120 struct ipa_agg_replacement_value *aggvals, *av;
5122 aggvals = ipa_get_agg_replacements_for_node (node);
5123 encoder = ob->decl_state->symtab_node_encoder;
5124 node_ref = lto_symtab_encoder_encode (encoder, node);
5125 streamer_write_uhwi (ob, node_ref);
5127 for (av = aggvals; av; av = av->next)
5128 count++;
5129 streamer_write_uhwi (ob, count);
5131 for (av = aggvals; av; av = av->next)
5133 struct bitpack_d bp;
5135 streamer_write_uhwi (ob, av->offset);
5136 streamer_write_uhwi (ob, av->index);
5137 stream_write_tree (ob, av->value, true);
5139 bp = bitpack_create (ob->main_stream);
5140 bp_pack_value (&bp, av->by_ref, 1);
5141 streamer_write_bitpack (&bp);
5144 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5145 if (ts && vec_safe_length (ts->m_vr) > 0)
5147 count = ts->m_vr->length ();
5148 streamer_write_uhwi (ob, count);
5149 for (unsigned i = 0; i < count; ++i)
5151 struct bitpack_d bp;
5152 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5153 bp = bitpack_create (ob->main_stream);
5154 bp_pack_value (&bp, parm_vr->known, 1);
5155 streamer_write_bitpack (&bp);
5156 if (parm_vr->known)
5158 streamer_write_enum (ob->main_stream, value_rang_type,
5159 VR_LAST, parm_vr->type);
5160 streamer_write_wide_int (ob, parm_vr->min);
5161 streamer_write_wide_int (ob, parm_vr->max);
5165 else
5166 streamer_write_uhwi (ob, 0);
5168 if (ts && vec_safe_length (ts->bits) > 0)
5170 count = ts->bits->length ();
5171 streamer_write_uhwi (ob, count);
5173 for (unsigned i = 0; i < count; ++i)
5175 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5176 struct bitpack_d bp = bitpack_create (ob->main_stream);
5177 bp_pack_value (&bp, bits_jfunc.known, 1);
5178 streamer_write_bitpack (&bp);
5179 if (bits_jfunc.known)
5181 streamer_write_widest_int (ob, bits_jfunc.value);
5182 streamer_write_widest_int (ob, bits_jfunc.mask);
5186 else
5187 streamer_write_uhwi (ob, 0);
5190 /* Stream in the aggregate value replacement chain for NODE from IB. */
5192 static void
5193 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5194 data_in *data_in)
5196 struct ipa_agg_replacement_value *aggvals = NULL;
5197 unsigned int count, i;
5199 count = streamer_read_uhwi (ib);
5200 for (i = 0; i <count; i++)
5202 struct ipa_agg_replacement_value *av;
5203 struct bitpack_d bp;
5205 av = ggc_alloc<ipa_agg_replacement_value> ();
5206 av->offset = streamer_read_uhwi (ib);
5207 av->index = streamer_read_uhwi (ib);
5208 av->value = stream_read_tree (ib, data_in);
5209 bp = streamer_read_bitpack (ib);
5210 av->by_ref = bp_unpack_value (&bp, 1);
5211 av->next = aggvals;
5212 aggvals = av;
5214 ipa_set_node_agg_value_chain (node, aggvals);
5216 count = streamer_read_uhwi (ib);
5217 if (count > 0)
5219 ipcp_grow_transformations_if_necessary ();
5221 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5222 vec_safe_grow_cleared (ts->m_vr, count);
5223 for (i = 0; i < count; i++)
5225 ipa_vr *parm_vr;
5226 parm_vr = &(*ts->m_vr)[i];
5227 struct bitpack_d bp;
5228 bp = streamer_read_bitpack (ib);
5229 parm_vr->known = bp_unpack_value (&bp, 1);
5230 if (parm_vr->known)
5232 parm_vr->type = streamer_read_enum (ib, value_range_type,
5233 VR_LAST);
5234 parm_vr->min = streamer_read_wide_int (ib);
5235 parm_vr->max = streamer_read_wide_int (ib);
5239 count = streamer_read_uhwi (ib);
5240 if (count > 0)
5242 ipcp_grow_transformations_if_necessary ();
5244 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5245 vec_safe_grow_cleared (ts->bits, count);
5247 for (i = 0; i < count; i++)
5249 ipa_bits& bits_jfunc = (*ts->bits)[i];
5250 struct bitpack_d bp = streamer_read_bitpack (ib);
5251 bits_jfunc.known = bp_unpack_value (&bp, 1);
5252 if (bits_jfunc.known)
5254 bits_jfunc.value = streamer_read_widest_int (ib);
5255 bits_jfunc.mask = streamer_read_widest_int (ib);
5261 /* Write all aggregate replacement for nodes in set. */
5263 void
5264 ipcp_write_transformation_summaries (void)
5266 struct cgraph_node *node;
5267 struct output_block *ob;
5268 unsigned int count = 0;
5269 lto_symtab_encoder_iterator lsei;
5270 lto_symtab_encoder_t encoder;
5272 ob = create_output_block (LTO_section_ipcp_transform);
5273 encoder = ob->decl_state->symtab_node_encoder;
5274 ob->symbol = NULL;
5275 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5276 lsei_next_function_in_partition (&lsei))
5278 node = lsei_cgraph_node (lsei);
5279 if (node->has_gimple_body_p ())
5280 count++;
5283 streamer_write_uhwi (ob, count);
5285 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5286 lsei_next_function_in_partition (&lsei))
5288 node = lsei_cgraph_node (lsei);
5289 if (node->has_gimple_body_p ())
5290 write_ipcp_transformation_info (ob, node);
5292 streamer_write_char_stream (ob->main_stream, 0);
5293 produce_asm (ob, NULL);
5294 destroy_output_block (ob);
5297 /* Read replacements section in file FILE_DATA of length LEN with data
5298 DATA. */
5300 static void
5301 read_replacements_section (struct lto_file_decl_data *file_data,
5302 const char *data,
5303 size_t len)
5305 const struct lto_function_header *header =
5306 (const struct lto_function_header *) data;
5307 const int cfg_offset = sizeof (struct lto_function_header);
5308 const int main_offset = cfg_offset + header->cfg_size;
5309 const int string_offset = main_offset + header->main_size;
5310 struct data_in *data_in;
5311 unsigned int i;
5312 unsigned int count;
5314 lto_input_block ib_main ((const char *) data + main_offset,
5315 header->main_size, file_data->mode_table);
5317 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5318 header->string_size, vNULL);
5319 count = streamer_read_uhwi (&ib_main);
5321 for (i = 0; i < count; i++)
5323 unsigned int index;
5324 struct cgraph_node *node;
5325 lto_symtab_encoder_t encoder;
5327 index = streamer_read_uhwi (&ib_main);
5328 encoder = file_data->symtab_node_encoder;
5329 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5330 index));
5331 gcc_assert (node->definition);
5332 read_ipcp_transformation_info (&ib_main, node, data_in);
5334 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5335 len);
5336 lto_data_in_delete (data_in);
5339 /* Read IPA-CP aggregate replacements. */
5341 void
5342 ipcp_read_transformation_summaries (void)
5344 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5345 struct lto_file_decl_data *file_data;
5346 unsigned int j = 0;
5348 while ((file_data = file_data_vec[j++]))
5350 size_t len;
5351 const char *data = lto_get_section_data (file_data,
5352 LTO_section_ipcp_transform,
5353 NULL, &len);
5354 if (data)
5355 read_replacements_section (file_data, data, len);
5359 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5360 NODE. */
5362 static void
5363 adjust_agg_replacement_values (struct cgraph_node *node,
5364 struct ipa_agg_replacement_value *aggval)
5366 struct ipa_agg_replacement_value *v;
5367 int i, c = 0, d = 0, *adj;
5369 if (!node->clone.combined_args_to_skip)
5370 return;
5372 for (v = aggval; v; v = v->next)
5374 gcc_assert (v->index >= 0);
5375 if (c < v->index)
5376 c = v->index;
5378 c++;
5380 adj = XALLOCAVEC (int, c);
5381 for (i = 0; i < c; i++)
5382 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5384 adj[i] = -1;
5385 d++;
5387 else
5388 adj[i] = i - d;
5390 for (v = aggval; v; v = v->next)
5391 v->index = adj[v->index];
5394 /* Dominator walker driving the ipcp modification phase. */
5396 class ipcp_modif_dom_walker : public dom_walker
5398 public:
5399 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5400 vec<ipa_param_descriptor> descs,
5401 struct ipa_agg_replacement_value *av,
5402 bool *sc, bool *cc)
5403 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5404 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5406 virtual edge before_dom_children (basic_block);
5408 private:
5409 struct ipa_func_body_info *m_fbi;
5410 vec<ipa_param_descriptor> m_descriptors;
5411 struct ipa_agg_replacement_value *m_aggval;
5412 bool *m_something_changed, *m_cfg_changed;
5415 edge
5416 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5418 gimple_stmt_iterator gsi;
5419 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5421 struct ipa_agg_replacement_value *v;
5422 gimple *stmt = gsi_stmt (gsi);
5423 tree rhs, val, t;
5424 HOST_WIDE_INT offset, size;
5425 int index;
5426 bool by_ref, vce;
5428 if (!gimple_assign_load_p (stmt))
5429 continue;
5430 rhs = gimple_assign_rhs1 (stmt);
5431 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5432 continue;
5434 vce = false;
5435 t = rhs;
5436 while (handled_component_p (t))
5438 /* V_C_E can do things like convert an array of integers to one
5439 bigger integer and similar things we do not handle below. */
5440 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5442 vce = true;
5443 break;
5445 t = TREE_OPERAND (t, 0);
5447 if (vce)
5448 continue;
5450 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5451 &offset, &size, &by_ref))
5452 continue;
5453 for (v = m_aggval; v; v = v->next)
5454 if (v->index == index
5455 && v->offset == offset)
5456 break;
5457 if (!v
5458 || v->by_ref != by_ref
5459 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5460 continue;
5462 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5463 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5465 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5466 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5467 else if (TYPE_SIZE (TREE_TYPE (rhs))
5468 == TYPE_SIZE (TREE_TYPE (v->value)))
5469 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5470 else
5472 if (dump_file)
5474 fprintf (dump_file, " const ");
5475 print_generic_expr (dump_file, v->value, 0);
5476 fprintf (dump_file, " can't be converted to type of ");
5477 print_generic_expr (dump_file, rhs, 0);
5478 fprintf (dump_file, "\n");
5480 continue;
5483 else
5484 val = v->value;
5486 if (dump_file && (dump_flags & TDF_DETAILS))
5488 fprintf (dump_file, "Modifying stmt:\n ");
5489 print_gimple_stmt (dump_file, stmt, 0, 0);
5491 gimple_assign_set_rhs_from_tree (&gsi, val);
5492 update_stmt (stmt);
5494 if (dump_file && (dump_flags & TDF_DETAILS))
5496 fprintf (dump_file, "into:\n ");
5497 print_gimple_stmt (dump_file, stmt, 0, 0);
5498 fprintf (dump_file, "\n");
5501 *m_something_changed = true;
5502 if (maybe_clean_eh_stmt (stmt)
5503 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5504 *m_cfg_changed = true;
5506 return NULL;
5509 /* Update bits info of formal parameters as described in
5510 ipcp_transformation_summary. */
5512 static void
5513 ipcp_update_bits (struct cgraph_node *node)
5515 tree parm = DECL_ARGUMENTS (node->decl);
5516 tree next_parm = parm;
5517 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5519 if (!ts || vec_safe_length (ts->bits) == 0)
5520 return;
5522 vec<ipa_bits, va_gc> &bits = *ts->bits;
5523 unsigned count = bits.length ();
5525 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5527 if (node->clone.combined_args_to_skip
5528 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5529 continue;
5531 gcc_checking_assert (parm);
5532 next_parm = DECL_CHAIN (parm);
5534 if (!bits[i].known
5535 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm)) || POINTER_TYPE_P (TREE_TYPE (parm)))
5536 || !is_gimple_reg (parm))
5537 continue;
5539 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5540 if (!ddef)
5541 continue;
5543 if (dump_file)
5545 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5546 print_hex (bits[i].mask, dump_file);
5547 fprintf (dump_file, "\n");
5550 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5552 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5553 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5555 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5556 | wide_int::from (bits[i].value, prec, sgn);
5557 set_nonzero_bits (ddef, nonzero_bits);
5559 else
5561 unsigned tem = bits[i].mask.to_uhwi ();
5562 unsigned HOST_WIDE_INT bitpos = bits[i].value.to_uhwi ();
5563 unsigned align = tem & -tem;
5564 unsigned misalign = bitpos & (align - 1);
5566 if (align > 1)
5568 if (dump_file)
5569 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5571 unsigned old_align, old_misalign;
5572 struct ptr_info_def *pi = get_ptr_info (ddef);
5573 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5575 if (old_known
5576 && old_align > align)
5578 if (dump_file)
5580 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5581 if ((old_misalign & (align - 1)) != misalign)
5582 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5583 old_misalign, misalign);
5585 continue;
5588 if (old_known
5589 && ((misalign & (old_align - 1)) != old_misalign)
5590 && dump_file)
5591 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5592 old_misalign, misalign);
5594 set_ptr_info_alignment (pi, align, misalign);
5600 /* Update value range of formal parameters as described in
5601 ipcp_transformation_summary. */
5603 static void
5604 ipcp_update_vr (struct cgraph_node *node)
5606 tree fndecl = node->decl;
5607 tree parm = DECL_ARGUMENTS (fndecl);
5608 tree next_parm = parm;
5609 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5610 if (!ts || vec_safe_length (ts->m_vr) == 0)
5611 return;
5612 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5613 unsigned count = vr.length ();
5615 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5617 if (node->clone.combined_args_to_skip
5618 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5619 continue;
5620 gcc_checking_assert (parm);
5621 next_parm = DECL_CHAIN (parm);
5622 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5624 if (!ddef || !is_gimple_reg (parm))
5625 continue;
5627 if (vr[i].known
5628 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5630 tree type = TREE_TYPE (ddef);
5631 unsigned prec = TYPE_PRECISION (type);
5632 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5634 if (dump_file)
5636 fprintf (dump_file, "Setting value range of param %u ", i);
5637 fprintf (dump_file, "%s[",
5638 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5639 print_decs (vr[i].min, dump_file);
5640 fprintf (dump_file, ", ");
5641 print_decs (vr[i].max, dump_file);
5642 fprintf (dump_file, "]\n");
5644 set_range_info (ddef, vr[i].type,
5645 wide_int_storage::from (vr[i].min, prec,
5646 TYPE_SIGN (type)),
5647 wide_int_storage::from (vr[i].max, prec,
5648 TYPE_SIGN (type)));
5650 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5651 && vr[i].type == VR_ANTI_RANGE
5652 && wi::eq_p (vr[i].min, 0)
5653 && wi::eq_p (vr[i].max, 0))
5655 if (dump_file)
5656 fprintf (dump_file, "Setting nonnull for %u\n", i);
5657 set_ptr_nonnull (ddef);
5663 /* IPCP transformation phase doing propagation of aggregate values. */
5665 unsigned int
5666 ipcp_transform_function (struct cgraph_node *node)
5668 vec<ipa_param_descriptor> descriptors = vNULL;
5669 struct ipa_func_body_info fbi;
5670 struct ipa_agg_replacement_value *aggval;
5671 int param_count;
5672 bool cfg_changed = false, something_changed = false;
5674 gcc_checking_assert (cfun);
5675 gcc_checking_assert (current_function_decl);
5677 if (dump_file)
5678 fprintf (dump_file, "Modification phase of node %s/%i\n",
5679 node->name (), node->order);
5681 ipcp_update_bits (node);
5682 ipcp_update_vr (node);
5683 aggval = ipa_get_agg_replacements_for_node (node);
5684 if (!aggval)
5685 return 0;
5686 param_count = count_formal_params (node->decl);
5687 if (param_count == 0)
5688 return 0;
5689 adjust_agg_replacement_values (node, aggval);
5690 if (dump_file)
5691 ipa_dump_agg_replacement_values (dump_file, aggval);
5693 fbi.node = node;
5694 fbi.info = NULL;
5695 fbi.bb_infos = vNULL;
5696 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5697 fbi.param_count = param_count;
5698 fbi.aa_walked = 0;
5700 descriptors.safe_grow_cleared (param_count);
5701 ipa_populate_param_decls (node, descriptors);
5702 calculate_dominance_info (CDI_DOMINATORS);
5703 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5704 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5706 int i;
5707 struct ipa_bb_info *bi;
5708 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5709 free_ipa_bb_info (bi);
5710 fbi.bb_infos.release ();
5711 free_dominance_info (CDI_DOMINATORS);
5712 (*ipcp_transformations)[node->uid].agg_values = NULL;
5713 (*ipcp_transformations)[node->uid].bits = NULL;
5714 (*ipcp_transformations)[node->uid].m_vr = NULL;
5716 descriptors.release ();
5718 if (!something_changed)
5719 return 0;
5720 else if (cfg_changed)
5721 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5722 else
5723 return TODO_update_ssa_only_virtuals;