* tree-ssa.c (target_for_debug_bind, verify_phi_args,
[official-gcc.git] / gcc / ipa-prop.c
blobe2d3ead065c16277a2d364d86200b3f26d11695a
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
80 /* Allocation pool for reference descriptions. */
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
104 int i, count;
106 count = descriptors.length ();
107 for (i = 0; i < count; i++)
108 if (descriptors[i].decl_or_type == ptree)
109 return i;
111 return -1;
114 /* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
126 static void
127 ipa_populate_param_decls (struct cgraph_node *node,
128 vec<ipa_param_descriptor> &descriptors)
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
135 fndecl = node->decl;
136 gcc_assert (gimple_has_body_p (fndecl));
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
141 descriptors[param_num].decl_or_type = parm;
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
144 param_num++;
148 /* Return how many formal parameters FNDECL has. */
151 count_formal_params (tree fndecl)
153 tree parm;
154 int count = 0;
155 gcc_assert (gimple_has_body_p (fndecl));
157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
158 count++;
160 return count;
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
167 void
168 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
170 fprintf (file, "param #%i", i);
171 if (info->descriptors[i].decl_or_type)
173 fprintf (file, " ");
174 print_generic_expr (file, info->descriptors[i].decl_or_type, 0);
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
181 void
182 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
184 struct ipa_node_params *info = IPA_NODE_REF (node);
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
194 void
195 ipa_initialize_node_params (struct cgraph_node *node)
197 struct ipa_node_params *info = IPA_NODE_REF (node);
199 if (!info->descriptors.exists ())
201 ipa_alloc_node_params (node, count_formal_params (node->decl));
202 ipa_populate_param_decls (node, info->descriptors);
206 /* Print the jump functions associated with call graph edge CS to file F. */
208 static void
209 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
211 int i, count;
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
225 else if (type == IPA_JF_CONST)
227 tree val = jump_func->value.constant.value;
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
237 fprintf (f, "\n");
239 else if (type == IPA_JF_PASS_THROUGH)
241 fprintf (f, "PASS THROUGH: ");
242 fprintf (f, "%d, op %s",
243 jump_func->value.pass_through.formal_id,
244 get_tree_code_name(jump_func->value.pass_through.operation));
245 if (jump_func->value.pass_through.operation != NOP_EXPR)
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
253 fprintf (f, "\n");
255 else if (type == IPA_JF_ANCESTOR)
257 fprintf (f, "ANCESTOR: ");
258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
263 fprintf (f, "\n");
266 if (jump_func->agg.items)
268 struct ipa_agg_jf_item *item;
269 int j;
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
279 tree_to_uhwi (TYPE_SIZE (item->value)));
280 else
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
285 fprintf (f, "\n");
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
297 if (jump_func->bits.known)
299 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
300 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
301 fprintf (f, "\n");
303 else
304 fprintf (f, " Unknown bits\n");
306 if (jump_func->vr_known)
308 fprintf (f, " VR ");
309 fprintf (f, "%s[",
310 (jump_func->m_vr.type == VR_ANTI_RANGE) ? "~" : "");
311 print_decs (jump_func->m_vr.min, f);
312 fprintf (f, ", ");
313 print_decs (jump_func->m_vr.max, f);
314 fprintf (f, "]\n");
316 else
317 fprintf (f, " Unknown VR\n");
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
328 struct cgraph_edge *cs;
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup_for_dump (node->name ()), node->order,
339 xstrdup_for_dump (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i, "
359 "offset " HOST_WIDE_INT_PRINT_DEC,
360 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
361 ii->offset);
363 if (cs->call_stmt)
365 fprintf (f, ", for stmt ");
366 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
368 else
369 fprintf (f, "\n");
370 if (ii->polymorphic)
371 ii->context.dump (f);
372 ipa_print_node_jump_functions_for_edge (f, cs);
376 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
378 void
379 ipa_print_all_jump_functions (FILE *f)
381 struct cgraph_node *node;
383 fprintf (f, "\nJump functions:\n");
384 FOR_EACH_FUNCTION (node)
386 ipa_print_node_jump_functions (f, node);
390 /* Set jfunc to be a know-really nothing jump function. */
392 static void
393 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
395 jfunc->type = IPA_JF_UNKNOWN;
396 jfunc->bits.known = false;
397 jfunc->vr_known = false;
400 /* Set JFUNC to be a copy of another jmp (to be used by jump function
401 combination code). The two functions will share their rdesc. */
403 static void
404 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
405 struct ipa_jump_func *src)
408 gcc_checking_assert (src->type == IPA_JF_CONST);
409 dst->type = IPA_JF_CONST;
410 dst->value.constant = src->value.constant;
413 /* Set JFUNC to be a constant jmp function. */
415 static void
416 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
417 struct cgraph_edge *cs)
419 jfunc->type = IPA_JF_CONST;
420 jfunc->value.constant.value = unshare_expr_without_location (constant);
422 if (TREE_CODE (constant) == ADDR_EXPR
423 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
425 struct ipa_cst_ref_desc *rdesc;
427 rdesc = ipa_refdesc_pool.allocate ();
428 rdesc->cs = cs;
429 rdesc->next_duplicate = NULL;
430 rdesc->refcount = 1;
431 jfunc->value.constant.rdesc = rdesc;
433 else
434 jfunc->value.constant.rdesc = NULL;
437 /* Set JFUNC to be a simple pass-through jump function. */
438 static void
439 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
440 bool agg_preserved)
442 jfunc->type = IPA_JF_PASS_THROUGH;
443 jfunc->value.pass_through.operand = NULL_TREE;
444 jfunc->value.pass_through.formal_id = formal_id;
445 jfunc->value.pass_through.operation = NOP_EXPR;
446 jfunc->value.pass_through.agg_preserved = agg_preserved;
449 /* Set JFUNC to be an arithmetic pass through jump function. */
451 static void
452 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
453 tree operand, enum tree_code operation)
455 jfunc->type = IPA_JF_PASS_THROUGH;
456 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
457 jfunc->value.pass_through.formal_id = formal_id;
458 jfunc->value.pass_through.operation = operation;
459 jfunc->value.pass_through.agg_preserved = false;
462 /* Set JFUNC to be an ancestor jump function. */
464 static void
465 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
466 int formal_id, bool agg_preserved)
468 jfunc->type = IPA_JF_ANCESTOR;
469 jfunc->value.ancestor.formal_id = formal_id;
470 jfunc->value.ancestor.offset = offset;
471 jfunc->value.ancestor.agg_preserved = agg_preserved;
474 /* Get IPA BB information about the given BB. FBI is the context of analyzis
475 of this function body. */
477 static struct ipa_bb_info *
478 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
480 gcc_checking_assert (fbi);
481 return &fbi->bb_infos[bb->index];
484 /* Structure to be passed in between detect_type_change and
485 check_stmt_for_type_change. */
487 struct prop_type_change_info
489 /* Offset into the object where there is the virtual method pointer we are
490 looking for. */
491 HOST_WIDE_INT offset;
492 /* The declaration or SSA_NAME pointer of the base that we are checking for
493 type change. */
494 tree object;
495 /* Set to true if dynamic type change has been detected. */
496 bool type_maybe_changed;
499 /* Return true if STMT can modify a virtual method table pointer.
501 This function makes special assumptions about both constructors and
502 destructors which are all the functions that are allowed to alter the VMT
503 pointers. It assumes that destructors begin with assignment into all VMT
504 pointers and that constructors essentially look in the following way:
506 1) The very first thing they do is that they call constructors of ancestor
507 sub-objects that have them.
509 2) Then VMT pointers of this and all its ancestors is set to new values
510 corresponding to the type corresponding to the constructor.
512 3) Only afterwards, other stuff such as constructor of member sub-objects
513 and the code written by the user is run. Only this may include calling
514 virtual functions, directly or indirectly.
516 There is no way to call a constructor of an ancestor sub-object in any
517 other way.
519 This means that we do not have to care whether constructors get the correct
520 type information because they will always change it (in fact, if we define
521 the type to be given by the VMT pointer, it is undefined).
523 The most important fact to derive from the above is that if, for some
524 statement in the section 3, we try to detect whether the dynamic type has
525 changed, we can safely ignore all calls as we examine the function body
526 backwards until we reach statements in section 2 because these calls cannot
527 be ancestor constructors or destructors (if the input is not bogus) and so
528 do not change the dynamic type (this holds true only for automatically
529 allocated objects but at the moment we devirtualize only these). We then
530 must detect that statements in section 2 change the dynamic type and can try
531 to derive the new type. That is enough and we can stop, we will never see
532 the calls into constructors of sub-objects in this code. Therefore we can
533 safely ignore all call statements that we traverse.
536 static bool
537 stmt_may_be_vtbl_ptr_store (gimple *stmt)
539 if (is_gimple_call (stmt))
540 return false;
541 if (gimple_clobber_p (stmt))
542 return false;
543 else if (is_gimple_assign (stmt))
545 tree lhs = gimple_assign_lhs (stmt);
547 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
549 if (flag_strict_aliasing
550 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
551 return false;
553 if (TREE_CODE (lhs) == COMPONENT_REF
554 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
555 return false;
556 /* In the future we might want to use get_base_ref_and_offset to find
557 if there is a field corresponding to the offset and if so, proceed
558 almost like if it was a component ref. */
561 return true;
564 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
565 to check whether a particular statement may modify the virtual table
566 pointerIt stores its result into DATA, which points to a
567 prop_type_change_info structure. */
569 static bool
570 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
572 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
573 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
575 if (stmt_may_be_vtbl_ptr_store (stmt))
577 tci->type_maybe_changed = true;
578 return true;
580 else
581 return false;
584 /* See if ARG is PARAM_DECl describing instance passed by pointer
585 or reference in FUNCTION. Return false if the dynamic type may change
586 in between beggining of the function until CALL is invoked.
588 Generally functions are not allowed to change type of such instances,
589 but they call destructors. We assume that methods can not destroy the THIS
590 pointer. Also as a special cases, constructor and destructors may change
591 type of the THIS pointer. */
593 static bool
594 param_type_may_change_p (tree function, tree arg, gimple *call)
596 /* Pure functions can not do any changes on the dynamic type;
597 that require writting to memory. */
598 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
599 return false;
600 /* We need to check if we are within inlined consturctor
601 or destructor (ideally we would have way to check that the
602 inline cdtor is actually working on ARG, but we don't have
603 easy tie on this, so punt on all non-pure cdtors.
604 We may also record the types of cdtors and once we know type
605 of the instance match them.
607 Also code unification optimizations may merge calls from
608 different blocks making return values unreliable. So
609 do nothing during late optimization. */
610 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
611 return true;
612 if (TREE_CODE (arg) == SSA_NAME
613 && SSA_NAME_IS_DEFAULT_DEF (arg)
614 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
616 /* Normal (non-THIS) argument. */
617 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
618 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
619 /* THIS pointer of an method - here we want to watch constructors
620 and destructors as those definitely may change the dynamic
621 type. */
622 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
623 && !DECL_CXX_CONSTRUCTOR_P (function)
624 && !DECL_CXX_DESTRUCTOR_P (function)
625 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
627 /* Walk the inline stack and watch out for ctors/dtors. */
628 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
629 block = BLOCK_SUPERCONTEXT (block))
630 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
631 return true;
632 return false;
635 return true;
638 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
639 callsite CALL) by looking for assignments to its virtual table pointer. If
640 it is, return true and fill in the jump function JFUNC with relevant type
641 information or set it to unknown. ARG is the object itself (not a pointer
642 to it, unless dereferenced). BASE is the base of the memory access as
643 returned by get_ref_base_and_extent, as is the offset.
645 This is helper function for detect_type_change and detect_type_change_ssa
646 that does the heavy work which is usually unnecesary. */
648 static bool
649 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
650 gcall *call, struct ipa_jump_func *jfunc,
651 HOST_WIDE_INT offset)
653 struct prop_type_change_info tci;
654 ao_ref ao;
655 bool entry_reached = false;
657 gcc_checking_assert (DECL_P (arg)
658 || TREE_CODE (arg) == MEM_REF
659 || handled_component_p (arg));
661 comp_type = TYPE_MAIN_VARIANT (comp_type);
663 /* Const calls cannot call virtual methods through VMT and so type changes do
664 not matter. */
665 if (!flag_devirtualize || !gimple_vuse (call)
666 /* Be sure expected_type is polymorphic. */
667 || !comp_type
668 || TREE_CODE (comp_type) != RECORD_TYPE
669 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
670 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
671 return true;
673 ao_ref_init (&ao, arg);
674 ao.base = base;
675 ao.offset = offset;
676 ao.size = POINTER_SIZE;
677 ao.max_size = ao.size;
679 tci.offset = offset;
680 tci.object = get_base_address (arg);
681 tci.type_maybe_changed = false;
683 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
684 &tci, NULL, &entry_reached);
685 if (!tci.type_maybe_changed)
686 return false;
688 ipa_set_jf_unknown (jfunc);
689 return true;
692 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
693 If it is, return true and fill in the jump function JFUNC with relevant type
694 information or set it to unknown. ARG is the object itself (not a pointer
695 to it, unless dereferenced). BASE is the base of the memory access as
696 returned by get_ref_base_and_extent, as is the offset. */
698 static bool
699 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
700 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
702 if (!flag_devirtualize)
703 return false;
705 if (TREE_CODE (base) == MEM_REF
706 && !param_type_may_change_p (current_function_decl,
707 TREE_OPERAND (base, 0),
708 call))
709 return false;
710 return detect_type_change_from_memory_writes (arg, base, comp_type,
711 call, jfunc, offset);
714 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
715 SSA name (its dereference will become the base and the offset is assumed to
716 be zero). */
718 static bool
719 detect_type_change_ssa (tree arg, tree comp_type,
720 gcall *call, struct ipa_jump_func *jfunc)
722 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
723 if (!flag_devirtualize
724 || !POINTER_TYPE_P (TREE_TYPE (arg)))
725 return false;
727 if (!param_type_may_change_p (current_function_decl, arg, call))
728 return false;
730 arg = build2 (MEM_REF, ptr_type_node, arg,
731 build_int_cst (ptr_type_node, 0));
733 return detect_type_change_from_memory_writes (arg, arg, comp_type,
734 call, jfunc, 0);
737 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
738 boolean variable pointed to by DATA. */
740 static bool
741 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
742 void *data)
744 bool *b = (bool *) data;
745 *b = true;
746 return true;
749 /* Return true if we have already walked so many statements in AA that we
750 should really just start giving up. */
752 static bool
753 aa_overwalked (struct ipa_func_body_info *fbi)
755 gcc_checking_assert (fbi);
756 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
759 /* Find the nearest valid aa status for parameter specified by INDEX that
760 dominates BB. */
762 static struct ipa_param_aa_status *
763 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
764 int index)
766 while (true)
768 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
769 if (!bb)
770 return NULL;
771 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
772 if (!bi->param_aa_statuses.is_empty ()
773 && bi->param_aa_statuses[index].valid)
774 return &bi->param_aa_statuses[index];
778 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
779 structures and/or intialize the result with a dominating description as
780 necessary. */
782 static struct ipa_param_aa_status *
783 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
784 int index)
786 gcc_checking_assert (fbi);
787 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
788 if (bi->param_aa_statuses.is_empty ())
789 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
790 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
791 if (!paa->valid)
793 gcc_checking_assert (!paa->parm_modified
794 && !paa->ref_modified
795 && !paa->pt_modified);
796 struct ipa_param_aa_status *dom_paa;
797 dom_paa = find_dominating_aa_status (fbi, bb, index);
798 if (dom_paa)
799 *paa = *dom_paa;
800 else
801 paa->valid = true;
804 return paa;
807 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
808 a value known not to be modified in this function before reaching the
809 statement STMT. FBI holds information about the function we have so far
810 gathered but do not survive the summary building stage. */
812 static bool
813 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
814 gimple *stmt, tree parm_load)
816 struct ipa_param_aa_status *paa;
817 bool modified = false;
818 ao_ref refd;
820 tree base = get_base_address (parm_load);
821 gcc_assert (TREE_CODE (base) == PARM_DECL);
822 if (TREE_READONLY (base))
823 return true;
825 /* FIXME: FBI can be NULL if we are being called from outside
826 ipa_node_analysis or ipcp_transform_function, which currently happens
827 during inlining analysis. It would be great to extend fbi's lifetime and
828 always have it. Currently, we are just not afraid of too much walking in
829 that case. */
830 if (fbi)
832 if (aa_overwalked (fbi))
833 return false;
834 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
835 if (paa->parm_modified)
836 return false;
838 else
839 paa = NULL;
841 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
842 ao_ref_init (&refd, parm_load);
843 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
844 &modified, NULL);
845 if (fbi)
846 fbi->aa_walked += walked;
847 if (paa && modified)
848 paa->parm_modified = true;
849 return !modified;
852 /* If STMT is an assignment that loads a value from an parameter declaration,
853 return the index of the parameter in ipa_node_params which has not been
854 modified. Otherwise return -1. */
856 static int
857 load_from_unmodified_param (struct ipa_func_body_info *fbi,
858 vec<ipa_param_descriptor> descriptors,
859 gimple *stmt)
861 int index;
862 tree op1;
864 if (!gimple_assign_single_p (stmt))
865 return -1;
867 op1 = gimple_assign_rhs1 (stmt);
868 if (TREE_CODE (op1) != PARM_DECL)
869 return -1;
871 index = ipa_get_param_decl_index_1 (descriptors, op1);
872 if (index < 0
873 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
874 return -1;
876 return index;
879 /* Return true if memory reference REF (which must be a load through parameter
880 with INDEX) loads data that are known to be unmodified in this function
881 before reaching statement STMT. */
883 static bool
884 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
885 int index, gimple *stmt, tree ref)
887 struct ipa_param_aa_status *paa;
888 bool modified = false;
889 ao_ref refd;
891 /* FIXME: FBI can be NULL if we are being called from outside
892 ipa_node_analysis or ipcp_transform_function, which currently happens
893 during inlining analysis. It would be great to extend fbi's lifetime and
894 always have it. Currently, we are just not afraid of too much walking in
895 that case. */
896 if (fbi)
898 if (aa_overwalked (fbi))
899 return false;
900 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
901 if (paa->ref_modified)
902 return false;
904 else
905 paa = NULL;
907 gcc_checking_assert (gimple_vuse (stmt));
908 ao_ref_init (&refd, ref);
909 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
910 &modified, NULL);
911 if (fbi)
912 fbi->aa_walked += walked;
913 if (paa && modified)
914 paa->ref_modified = true;
915 return !modified;
918 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
919 is known to be unmodified in this function before reaching call statement
920 CALL into which it is passed. FBI describes the function body. */
922 static bool
923 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
924 gimple *call, tree parm)
926 bool modified = false;
927 ao_ref refd;
929 /* It's unnecessary to calculate anything about memory contnets for a const
930 function because it is not goin to use it. But do not cache the result
931 either. Also, no such calculations for non-pointers. */
932 if (!gimple_vuse (call)
933 || !POINTER_TYPE_P (TREE_TYPE (parm))
934 || aa_overwalked (fbi))
935 return false;
937 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
938 gimple_bb (call),
939 index);
940 if (paa->pt_modified)
941 return false;
943 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
944 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
945 &modified, NULL);
946 fbi->aa_walked += walked;
947 if (modified)
948 paa->pt_modified = true;
949 return !modified;
952 /* Return true if we can prove that OP is a memory reference loading
953 data from an aggregate passed as a parameter.
955 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
956 false if it cannot prove that the value has not been modified before the
957 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
958 if it cannot prove the value has not been modified, in that case it will
959 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
961 INFO and PARMS_AINFO describe parameters of the current function (but the
962 latter can be NULL), STMT is the load statement. If function returns true,
963 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
964 within the aggregate and whether it is a load from a value passed by
965 reference respectively. */
967 bool
968 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
969 vec<ipa_param_descriptor> descriptors,
970 gimple *stmt, tree op, int *index_p,
971 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
972 bool *by_ref_p, bool *guaranteed_unmodified)
974 int index;
975 HOST_WIDE_INT size, max_size;
976 bool reverse;
977 tree base
978 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
980 if (max_size == -1 || max_size != size || *offset_p < 0)
981 return false;
983 if (DECL_P (base))
985 int index = ipa_get_param_decl_index_1 (descriptors, base);
986 if (index >= 0
987 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
989 *index_p = index;
990 *by_ref_p = false;
991 if (size_p)
992 *size_p = size;
993 if (guaranteed_unmodified)
994 *guaranteed_unmodified = true;
995 return true;
997 return false;
1000 if (TREE_CODE (base) != MEM_REF
1001 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1002 || !integer_zerop (TREE_OPERAND (base, 1)))
1003 return false;
1005 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1007 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1008 index = ipa_get_param_decl_index_1 (descriptors, parm);
1010 else
1012 /* This branch catches situations where a pointer parameter is not a
1013 gimple register, for example:
1015 void hip7(S*) (struct S * p)
1017 void (*<T2e4>) (struct S *) D.1867;
1018 struct S * p.1;
1020 <bb 2>:
1021 p.1_1 = p;
1022 D.1867_2 = p.1_1->f;
1023 D.1867_2 ();
1024 gdp = &p;
1027 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1028 index = load_from_unmodified_param (fbi, descriptors, def);
1031 if (index >= 0)
1033 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1034 if (!data_preserved && !guaranteed_unmodified)
1035 return false;
1037 *index_p = index;
1038 *by_ref_p = true;
1039 if (size_p)
1040 *size_p = size;
1041 if (guaranteed_unmodified)
1042 *guaranteed_unmodified = data_preserved;
1043 return true;
1045 return false;
1048 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1049 of an assignment statement STMT, try to determine whether we are actually
1050 handling any of the following cases and construct an appropriate jump
1051 function into JFUNC if so:
1053 1) The passed value is loaded from a formal parameter which is not a gimple
1054 register (most probably because it is addressable, the value has to be
1055 scalar) and we can guarantee the value has not changed. This case can
1056 therefore be described by a simple pass-through jump function. For example:
1058 foo (int a)
1060 int a.0;
1062 a.0_2 = a;
1063 bar (a.0_2);
1065 2) The passed value can be described by a simple arithmetic pass-through
1066 jump function. E.g.
1068 foo (int a)
1070 int D.2064;
1072 D.2064_4 = a.1(D) + 4;
1073 bar (D.2064_4);
1075 This case can also occur in combination of the previous one, e.g.:
1077 foo (int a, int z)
1079 int a.0;
1080 int D.2064;
1082 a.0_3 = a;
1083 D.2064_4 = a.0_3 + 4;
1084 foo (D.2064_4);
1086 3) The passed value is an address of an object within another one (which
1087 also passed by reference). Such situations are described by an ancestor
1088 jump function and describe situations such as:
1090 B::foo() (struct B * const this)
1092 struct A * D.1845;
1094 D.1845_2 = &this_1(D)->D.1748;
1095 A::bar (D.1845_2);
1097 INFO is the structure describing individual parameters access different
1098 stages of IPA optimizations. PARMS_AINFO contains the information that is
1099 only needed for intraprocedural analysis. */
1101 static void
1102 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1103 struct ipa_node_params *info,
1104 struct ipa_jump_func *jfunc,
1105 gcall *call, gimple *stmt, tree name,
1106 tree param_type)
1108 HOST_WIDE_INT offset, size, max_size;
1109 tree op1, tc_ssa, base, ssa;
1110 bool reverse;
1111 int index;
1113 op1 = gimple_assign_rhs1 (stmt);
1115 if (TREE_CODE (op1) == SSA_NAME)
1117 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1118 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1119 else
1120 index = load_from_unmodified_param (fbi, info->descriptors,
1121 SSA_NAME_DEF_STMT (op1));
1122 tc_ssa = op1;
1124 else
1126 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1127 tc_ssa = gimple_assign_lhs (stmt);
1130 if (index >= 0)
1132 tree op2 = gimple_assign_rhs2 (stmt);
1134 if (op2)
1136 if (!is_gimple_ip_invariant (op2)
1137 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1138 && !useless_type_conversion_p (TREE_TYPE (name),
1139 TREE_TYPE (op1))))
1140 return;
1142 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1143 gimple_assign_rhs_code (stmt));
1145 else if (gimple_assign_single_p (stmt))
1147 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1148 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1150 return;
1153 if (TREE_CODE (op1) != ADDR_EXPR)
1154 return;
1155 op1 = TREE_OPERAND (op1, 0);
1156 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1157 return;
1158 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1159 if (TREE_CODE (base) != MEM_REF
1160 /* If this is a varying address, punt. */
1161 || max_size == -1
1162 || max_size != size)
1163 return;
1164 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1165 ssa = TREE_OPERAND (base, 0);
1166 if (TREE_CODE (ssa) != SSA_NAME
1167 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1168 || offset < 0)
1169 return;
1171 /* Dynamic types are changed in constructors and destructors. */
1172 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1173 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1174 ipa_set_ancestor_jf (jfunc, offset, index,
1175 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1178 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1179 it looks like:
1181 iftmp.1_3 = &obj_2(D)->D.1762;
1183 The base of the MEM_REF must be a default definition SSA NAME of a
1184 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1185 whole MEM_REF expression is returned and the offset calculated from any
1186 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1187 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1189 static tree
1190 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1192 HOST_WIDE_INT size, max_size;
1193 tree expr, parm, obj;
1194 bool reverse;
1196 if (!gimple_assign_single_p (assign))
1197 return NULL_TREE;
1198 expr = gimple_assign_rhs1 (assign);
1200 if (TREE_CODE (expr) != ADDR_EXPR)
1201 return NULL_TREE;
1202 expr = TREE_OPERAND (expr, 0);
1203 obj = expr;
1204 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1206 if (TREE_CODE (expr) != MEM_REF
1207 /* If this is a varying address, punt. */
1208 || max_size == -1
1209 || max_size != size
1210 || *offset < 0)
1211 return NULL_TREE;
1212 parm = TREE_OPERAND (expr, 0);
1213 if (TREE_CODE (parm) != SSA_NAME
1214 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1215 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1216 return NULL_TREE;
1218 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1219 *obj_p = obj;
1220 return expr;
1224 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1225 statement PHI, try to find out whether NAME is in fact a
1226 multiple-inheritance typecast from a descendant into an ancestor of a formal
1227 parameter and thus can be described by an ancestor jump function and if so,
1228 write the appropriate function into JFUNC.
1230 Essentially we want to match the following pattern:
1232 if (obj_2(D) != 0B)
1233 goto <bb 3>;
1234 else
1235 goto <bb 4>;
1237 <bb 3>:
1238 iftmp.1_3 = &obj_2(D)->D.1762;
1240 <bb 4>:
1241 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1242 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1243 return D.1879_6; */
1245 static void
1246 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1247 struct ipa_node_params *info,
1248 struct ipa_jump_func *jfunc,
1249 gcall *call, gphi *phi)
1251 HOST_WIDE_INT offset;
1252 gimple *assign, *cond;
1253 basic_block phi_bb, assign_bb, cond_bb;
1254 tree tmp, parm, expr, obj;
1255 int index, i;
1257 if (gimple_phi_num_args (phi) != 2)
1258 return;
1260 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1261 tmp = PHI_ARG_DEF (phi, 0);
1262 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1263 tmp = PHI_ARG_DEF (phi, 1);
1264 else
1265 return;
1266 if (TREE_CODE (tmp) != SSA_NAME
1267 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1268 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1269 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1270 return;
1272 assign = SSA_NAME_DEF_STMT (tmp);
1273 assign_bb = gimple_bb (assign);
1274 if (!single_pred_p (assign_bb))
1275 return;
1276 expr = get_ancestor_addr_info (assign, &obj, &offset);
1277 if (!expr)
1278 return;
1279 parm = TREE_OPERAND (expr, 0);
1280 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1281 if (index < 0)
1282 return;
1284 cond_bb = single_pred (assign_bb);
1285 cond = last_stmt (cond_bb);
1286 if (!cond
1287 || gimple_code (cond) != GIMPLE_COND
1288 || gimple_cond_code (cond) != NE_EXPR
1289 || gimple_cond_lhs (cond) != parm
1290 || !integer_zerop (gimple_cond_rhs (cond)))
1291 return;
1293 phi_bb = gimple_bb (phi);
1294 for (i = 0; i < 2; i++)
1296 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1297 if (pred != assign_bb && pred != cond_bb)
1298 return;
1301 ipa_set_ancestor_jf (jfunc, offset, index,
1302 parm_ref_data_pass_through_p (fbi, index, call, parm));
1305 /* Inspect the given TYPE and return true iff it has the same structure (the
1306 same number of fields of the same types) as a C++ member pointer. If
1307 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1308 corresponding fields there. */
1310 static bool
1311 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1313 tree fld;
1315 if (TREE_CODE (type) != RECORD_TYPE)
1316 return false;
1318 fld = TYPE_FIELDS (type);
1319 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1320 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1321 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1322 return false;
1324 if (method_ptr)
1325 *method_ptr = fld;
1327 fld = DECL_CHAIN (fld);
1328 if (!fld || INTEGRAL_TYPE_P (fld)
1329 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1330 return false;
1331 if (delta)
1332 *delta = fld;
1334 if (DECL_CHAIN (fld))
1335 return false;
1337 return true;
1340 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1341 return the rhs of its defining statement. Otherwise return RHS as it
1342 is. */
1344 static inline tree
1345 get_ssa_def_if_simple_copy (tree rhs)
1347 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1349 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1351 if (gimple_assign_single_p (def_stmt))
1352 rhs = gimple_assign_rhs1 (def_stmt);
1353 else
1354 break;
1356 return rhs;
1359 /* Simple linked list, describing known contents of an aggregate beforere
1360 call. */
1362 struct ipa_known_agg_contents_list
1364 /* Offset and size of the described part of the aggregate. */
1365 HOST_WIDE_INT offset, size;
1366 /* Known constant value or NULL if the contents is known to be unknown. */
1367 tree constant;
1368 /* Pointer to the next structure in the list. */
1369 struct ipa_known_agg_contents_list *next;
1372 /* Find the proper place in linked list of ipa_known_agg_contents_list
1373 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1374 unless there is a partial overlap, in which case return NULL, or such
1375 element is already there, in which case set *ALREADY_THERE to true. */
1377 static struct ipa_known_agg_contents_list **
1378 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1379 HOST_WIDE_INT lhs_offset,
1380 HOST_WIDE_INT lhs_size,
1381 bool *already_there)
1383 struct ipa_known_agg_contents_list **p = list;
1384 while (*p && (*p)->offset < lhs_offset)
1386 if ((*p)->offset + (*p)->size > lhs_offset)
1387 return NULL;
1388 p = &(*p)->next;
1391 if (*p && (*p)->offset < lhs_offset + lhs_size)
1393 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1394 /* We already know this value is subsequently overwritten with
1395 something else. */
1396 *already_there = true;
1397 else
1398 /* Otherwise this is a partial overlap which we cannot
1399 represent. */
1400 return NULL;
1402 return p;
1405 /* Build aggregate jump function from LIST, assuming there are exactly
1406 CONST_COUNT constant entries there and that th offset of the passed argument
1407 is ARG_OFFSET and store it into JFUNC. */
1409 static void
1410 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1411 int const_count, HOST_WIDE_INT arg_offset,
1412 struct ipa_jump_func *jfunc)
1414 vec_alloc (jfunc->agg.items, const_count);
1415 while (list)
1417 if (list->constant)
1419 struct ipa_agg_jf_item item;
1420 item.offset = list->offset - arg_offset;
1421 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1422 item.value = unshare_expr_without_location (list->constant);
1423 jfunc->agg.items->quick_push (item);
1425 list = list->next;
1429 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1430 in ARG is filled in with constant values. ARG can either be an aggregate
1431 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1432 aggregate. JFUNC is the jump function into which the constants are
1433 subsequently stored. */
1435 static void
1436 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1437 tree arg_type,
1438 struct ipa_jump_func *jfunc)
1440 struct ipa_known_agg_contents_list *list = NULL;
1441 int item_count = 0, const_count = 0;
1442 HOST_WIDE_INT arg_offset, arg_size;
1443 gimple_stmt_iterator gsi;
1444 tree arg_base;
1445 bool check_ref, by_ref;
1446 ao_ref r;
1448 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1449 return;
1451 /* The function operates in three stages. First, we prepare check_ref, r,
1452 arg_base and arg_offset based on what is actually passed as an actual
1453 argument. */
1455 if (POINTER_TYPE_P (arg_type))
1457 by_ref = true;
1458 if (TREE_CODE (arg) == SSA_NAME)
1460 tree type_size;
1461 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1462 return;
1463 check_ref = true;
1464 arg_base = arg;
1465 arg_offset = 0;
1466 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1467 arg_size = tree_to_uhwi (type_size);
1468 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1470 else if (TREE_CODE (arg) == ADDR_EXPR)
1472 HOST_WIDE_INT arg_max_size;
1473 bool reverse;
1475 arg = TREE_OPERAND (arg, 0);
1476 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1477 &arg_max_size, &reverse);
1478 if (arg_max_size == -1
1479 || arg_max_size != arg_size
1480 || arg_offset < 0)
1481 return;
1482 if (DECL_P (arg_base))
1484 check_ref = false;
1485 ao_ref_init (&r, arg_base);
1487 else
1488 return;
1490 else
1491 return;
1493 else
1495 HOST_WIDE_INT arg_max_size;
1496 bool reverse;
1498 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1500 by_ref = false;
1501 check_ref = false;
1502 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1503 &arg_max_size, &reverse);
1504 if (arg_max_size == -1
1505 || arg_max_size != arg_size
1506 || arg_offset < 0)
1507 return;
1509 ao_ref_init (&r, arg);
1512 /* Second stage walks back the BB, looks at individual statements and as long
1513 as it is confident of how the statements affect contents of the
1514 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1515 describing it. */
1516 gsi = gsi_for_stmt (call);
1517 gsi_prev (&gsi);
1518 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1520 struct ipa_known_agg_contents_list *n, **p;
1521 gimple *stmt = gsi_stmt (gsi);
1522 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1523 tree lhs, rhs, lhs_base;
1524 bool reverse;
1526 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1527 continue;
1528 if (!gimple_assign_single_p (stmt))
1529 break;
1531 lhs = gimple_assign_lhs (stmt);
1532 rhs = gimple_assign_rhs1 (stmt);
1533 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1534 || TREE_CODE (lhs) == BIT_FIELD_REF
1535 || contains_bitfld_component_ref_p (lhs))
1536 break;
1538 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1539 &lhs_max_size, &reverse);
1540 if (lhs_max_size == -1
1541 || lhs_max_size != lhs_size)
1542 break;
1544 if (check_ref)
1546 if (TREE_CODE (lhs_base) != MEM_REF
1547 || TREE_OPERAND (lhs_base, 0) != arg_base
1548 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1549 break;
1551 else if (lhs_base != arg_base)
1553 if (DECL_P (lhs_base))
1554 continue;
1555 else
1556 break;
1559 bool already_there = false;
1560 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1561 &already_there);
1562 if (!p)
1563 break;
1564 if (already_there)
1565 continue;
1567 rhs = get_ssa_def_if_simple_copy (rhs);
1568 n = XALLOCA (struct ipa_known_agg_contents_list);
1569 n->size = lhs_size;
1570 n->offset = lhs_offset;
1571 if (is_gimple_ip_invariant (rhs))
1573 n->constant = rhs;
1574 const_count++;
1576 else
1577 n->constant = NULL_TREE;
1578 n->next = *p;
1579 *p = n;
1581 item_count++;
1582 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1583 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1584 break;
1587 /* Third stage just goes over the list and creates an appropriate vector of
1588 ipa_agg_jf_item structures out of it, of sourse only if there are
1589 any known constants to begin with. */
1591 if (const_count)
1593 jfunc->agg.by_ref = by_ref;
1594 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1598 static tree
1599 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1601 int n;
1602 tree type = (e->callee
1603 ? TREE_TYPE (e->callee->decl)
1604 : gimple_call_fntype (e->call_stmt));
1605 tree t = TYPE_ARG_TYPES (type);
1607 for (n = 0; n < i; n++)
1609 if (!t)
1610 break;
1611 t = TREE_CHAIN (t);
1613 if (t)
1614 return TREE_VALUE (t);
1615 if (!e->callee)
1616 return NULL;
1617 t = DECL_ARGUMENTS (e->callee->decl);
1618 for (n = 0; n < i; n++)
1620 if (!t)
1621 return NULL;
1622 t = TREE_CHAIN (t);
1624 if (t)
1625 return TREE_TYPE (t);
1626 return NULL;
1629 /* Compute jump function for all arguments of callsite CS and insert the
1630 information in the jump_functions array in the ipa_edge_args corresponding
1631 to this callsite. */
1633 static void
1634 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1635 struct cgraph_edge *cs)
1637 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1638 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1639 gcall *call = cs->call_stmt;
1640 int n, arg_num = gimple_call_num_args (call);
1641 bool useful_context = false;
1643 if (arg_num == 0 || args->jump_functions)
1644 return;
1645 vec_safe_grow_cleared (args->jump_functions, arg_num);
1646 if (flag_devirtualize)
1647 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1649 if (gimple_call_internal_p (call))
1650 return;
1651 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1652 return;
1654 for (n = 0; n < arg_num; n++)
1656 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1657 tree arg = gimple_call_arg (call, n);
1658 tree param_type = ipa_get_callee_param_type (cs, n);
1659 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1661 tree instance;
1662 struct ipa_polymorphic_call_context context (cs->caller->decl,
1663 arg, cs->call_stmt,
1664 &instance);
1665 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1666 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1667 if (!context.useless_p ())
1668 useful_context = true;
1671 if (!POINTER_TYPE_P (TREE_TYPE (arg)))
1673 wide_int min, max;
1674 value_range_type type;
1675 if (TREE_CODE (arg) == SSA_NAME
1676 && param_type
1677 && (type = get_range_info (arg, &min, &max))
1678 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1680 value_range vr;
1682 vr.type = type;
1683 vr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1684 vr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1685 vr.equiv = NULL;
1686 extract_range_from_unary_expr (&jfunc->m_vr,
1687 NOP_EXPR,
1688 param_type,
1689 &vr, TREE_TYPE (arg));
1690 if (jfunc->m_vr.type == VR_RANGE
1691 || jfunc->m_vr.type == VR_ANTI_RANGE)
1692 jfunc->vr_known = true;
1693 else
1694 jfunc->vr_known = false;
1696 else
1697 gcc_assert (!jfunc->vr_known);
1700 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1701 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1703 jfunc->bits.known = true;
1705 if (TREE_CODE (arg) == SSA_NAME)
1707 jfunc->bits.value = 0;
1708 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1709 TYPE_SIGN (TREE_TYPE (arg)));
1711 else
1713 jfunc->bits.value = wi::to_widest (arg);
1714 jfunc->bits.mask = 0;
1717 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1719 unsigned HOST_WIDE_INT bitpos;
1720 unsigned align;
1722 jfunc->bits.known = true;
1723 get_pointer_alignment_1 (arg, &align, &bitpos);
1724 jfunc->bits.mask = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1725 .and_not (align / BITS_PER_UNIT - 1);
1726 jfunc->bits.value = bitpos / BITS_PER_UNIT;
1728 else
1729 gcc_assert (!jfunc->bits.known);
1731 if (is_gimple_ip_invariant (arg)
1732 || (VAR_P (arg)
1733 && is_global_var (arg)
1734 && TREE_READONLY (arg)))
1735 ipa_set_jf_constant (jfunc, arg, cs);
1736 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1737 && TREE_CODE (arg) == PARM_DECL)
1739 int index = ipa_get_param_decl_index (info, arg);
1741 gcc_assert (index >=0);
1742 /* Aggregate passed by value, check for pass-through, otherwise we
1743 will attempt to fill in aggregate contents later in this
1744 for cycle. */
1745 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1747 ipa_set_jf_simple_pass_through (jfunc, index, false);
1748 continue;
1751 else if (TREE_CODE (arg) == SSA_NAME)
1753 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1755 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1756 if (index >= 0)
1758 bool agg_p;
1759 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1760 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1763 else
1765 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1766 if (is_gimple_assign (stmt))
1767 compute_complex_assign_jump_func (fbi, info, jfunc,
1768 call, stmt, arg, param_type);
1769 else if (gimple_code (stmt) == GIMPLE_PHI)
1770 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1771 call,
1772 as_a <gphi *> (stmt));
1776 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1777 passed (because type conversions are ignored in gimple). Usually we can
1778 safely get type from function declaration, but in case of K&R prototypes or
1779 variadic functions we can try our luck with type of the pointer passed.
1780 TODO: Since we look for actual initialization of the memory object, we may better
1781 work out the type based on the memory stores we find. */
1782 if (!param_type)
1783 param_type = TREE_TYPE (arg);
1785 if ((jfunc->type != IPA_JF_PASS_THROUGH
1786 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1787 && (jfunc->type != IPA_JF_ANCESTOR
1788 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1789 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1790 || POINTER_TYPE_P (param_type)))
1791 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1793 if (!useful_context)
1794 vec_free (args->polymorphic_call_contexts);
1797 /* Compute jump functions for all edges - both direct and indirect - outgoing
1798 from BB. */
1800 static void
1801 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1803 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1804 int i;
1805 struct cgraph_edge *cs;
1807 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1809 struct cgraph_node *callee = cs->callee;
1811 if (callee)
1813 callee->ultimate_alias_target ();
1814 /* We do not need to bother analyzing calls to unknown functions
1815 unless they may become known during lto/whopr. */
1816 if (!callee->definition && !flag_lto)
1817 continue;
1819 ipa_compute_jump_functions_for_edge (fbi, cs);
1823 /* If STMT looks like a statement loading a value from a member pointer formal
1824 parameter, return that parameter and store the offset of the field to
1825 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1826 might be clobbered). If USE_DELTA, then we look for a use of the delta
1827 field rather than the pfn. */
1829 static tree
1830 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1831 HOST_WIDE_INT *offset_p)
1833 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1835 if (!gimple_assign_single_p (stmt))
1836 return NULL_TREE;
1838 rhs = gimple_assign_rhs1 (stmt);
1839 if (TREE_CODE (rhs) == COMPONENT_REF)
1841 ref_field = TREE_OPERAND (rhs, 1);
1842 rhs = TREE_OPERAND (rhs, 0);
1844 else
1845 ref_field = NULL_TREE;
1846 if (TREE_CODE (rhs) != MEM_REF)
1847 return NULL_TREE;
1848 rec = TREE_OPERAND (rhs, 0);
1849 if (TREE_CODE (rec) != ADDR_EXPR)
1850 return NULL_TREE;
1851 rec = TREE_OPERAND (rec, 0);
1852 if (TREE_CODE (rec) != PARM_DECL
1853 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1854 return NULL_TREE;
1855 ref_offset = TREE_OPERAND (rhs, 1);
1857 if (use_delta)
1858 fld = delta_field;
1859 else
1860 fld = ptr_field;
1861 if (offset_p)
1862 *offset_p = int_bit_position (fld);
1864 if (ref_field)
1866 if (integer_nonzerop (ref_offset))
1867 return NULL_TREE;
1868 return ref_field == fld ? rec : NULL_TREE;
1870 else
1871 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1872 : NULL_TREE;
1875 /* Returns true iff T is an SSA_NAME defined by a statement. */
1877 static bool
1878 ipa_is_ssa_with_stmt_def (tree t)
1880 if (TREE_CODE (t) == SSA_NAME
1881 && !SSA_NAME_IS_DEFAULT_DEF (t))
1882 return true;
1883 else
1884 return false;
1887 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1888 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1889 indirect call graph edge. */
1891 static struct cgraph_edge *
1892 ipa_note_param_call (struct cgraph_node *node, int param_index,
1893 gcall *stmt)
1895 struct cgraph_edge *cs;
1897 cs = node->get_edge (stmt);
1898 cs->indirect_info->param_index = param_index;
1899 cs->indirect_info->agg_contents = 0;
1900 cs->indirect_info->member_ptr = 0;
1901 cs->indirect_info->guaranteed_unmodified = 0;
1902 return cs;
1905 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1906 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1907 intermediate information about each formal parameter. Currently it checks
1908 whether the call calls a pointer that is a formal parameter and if so, the
1909 parameter is marked with the called flag and an indirect call graph edge
1910 describing the call is created. This is very simple for ordinary pointers
1911 represented in SSA but not-so-nice when it comes to member pointers. The
1912 ugly part of this function does nothing more than trying to match the
1913 pattern of such a call. An example of such a pattern is the gimple dump
1914 below, the call is on the last line:
1916 <bb 2>:
1917 f$__delta_5 = f.__delta;
1918 f$__pfn_24 = f.__pfn;
1921 <bb 2>:
1922 f$__delta_5 = MEM[(struct *)&f];
1923 f$__pfn_24 = MEM[(struct *)&f + 4B];
1925 and a few lines below:
1927 <bb 5>
1928 D.2496_3 = (int) f$__pfn_24;
1929 D.2497_4 = D.2496_3 & 1;
1930 if (D.2497_4 != 0)
1931 goto <bb 3>;
1932 else
1933 goto <bb 4>;
1935 <bb 6>:
1936 D.2500_7 = (unsigned int) f$__delta_5;
1937 D.2501_8 = &S + D.2500_7;
1938 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1939 D.2503_10 = *D.2502_9;
1940 D.2504_12 = f$__pfn_24 + -1;
1941 D.2505_13 = (unsigned int) D.2504_12;
1942 D.2506_14 = D.2503_10 + D.2505_13;
1943 D.2507_15 = *D.2506_14;
1944 iftmp.11_16 = (String:: *) D.2507_15;
1946 <bb 7>:
1947 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1948 D.2500_19 = (unsigned int) f$__delta_5;
1949 D.2508_20 = &S + D.2500_19;
1950 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1952 Such patterns are results of simple calls to a member pointer:
1954 int doprinting (int (MyString::* f)(int) const)
1956 MyString S ("somestring");
1958 return (S.*f)(4);
1961 Moreover, the function also looks for called pointers loaded from aggregates
1962 passed by value or reference. */
1964 static void
1965 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
1966 tree target)
1968 struct ipa_node_params *info = fbi->info;
1969 HOST_WIDE_INT offset;
1970 bool by_ref;
1972 if (SSA_NAME_IS_DEFAULT_DEF (target))
1974 tree var = SSA_NAME_VAR (target);
1975 int index = ipa_get_param_decl_index (info, var);
1976 if (index >= 0)
1977 ipa_note_param_call (fbi->node, index, call);
1978 return;
1981 int index;
1982 gimple *def = SSA_NAME_DEF_STMT (target);
1983 bool guaranteed_unmodified;
1984 if (gimple_assign_single_p (def)
1985 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
1986 gimple_assign_rhs1 (def), &index, &offset,
1987 NULL, &by_ref, &guaranteed_unmodified))
1989 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
1990 cs->indirect_info->offset = offset;
1991 cs->indirect_info->agg_contents = 1;
1992 cs->indirect_info->by_ref = by_ref;
1993 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
1994 return;
1997 /* Now we need to try to match the complex pattern of calling a member
1998 pointer. */
1999 if (gimple_code (def) != GIMPLE_PHI
2000 || gimple_phi_num_args (def) != 2
2001 || !POINTER_TYPE_P (TREE_TYPE (target))
2002 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2003 return;
2005 /* First, we need to check whether one of these is a load from a member
2006 pointer that is a parameter to this function. */
2007 tree n1 = PHI_ARG_DEF (def, 0);
2008 tree n2 = PHI_ARG_DEF (def, 1);
2009 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2010 return;
2011 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2012 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2014 tree rec;
2015 basic_block bb, virt_bb;
2016 basic_block join = gimple_bb (def);
2017 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2019 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2020 return;
2022 bb = EDGE_PRED (join, 0)->src;
2023 virt_bb = gimple_bb (d2);
2025 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2027 bb = EDGE_PRED (join, 1)->src;
2028 virt_bb = gimple_bb (d1);
2030 else
2031 return;
2033 /* Second, we need to check that the basic blocks are laid out in the way
2034 corresponding to the pattern. */
2036 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2037 || single_pred (virt_bb) != bb
2038 || single_succ (virt_bb) != join)
2039 return;
2041 /* Third, let's see that the branching is done depending on the least
2042 significant bit of the pfn. */
2044 gimple *branch = last_stmt (bb);
2045 if (!branch || gimple_code (branch) != GIMPLE_COND)
2046 return;
2048 if ((gimple_cond_code (branch) != NE_EXPR
2049 && gimple_cond_code (branch) != EQ_EXPR)
2050 || !integer_zerop (gimple_cond_rhs (branch)))
2051 return;
2053 tree cond = gimple_cond_lhs (branch);
2054 if (!ipa_is_ssa_with_stmt_def (cond))
2055 return;
2057 def = SSA_NAME_DEF_STMT (cond);
2058 if (!is_gimple_assign (def)
2059 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2060 || !integer_onep (gimple_assign_rhs2 (def)))
2061 return;
2063 cond = gimple_assign_rhs1 (def);
2064 if (!ipa_is_ssa_with_stmt_def (cond))
2065 return;
2067 def = SSA_NAME_DEF_STMT (cond);
2069 if (is_gimple_assign (def)
2070 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2072 cond = gimple_assign_rhs1 (def);
2073 if (!ipa_is_ssa_with_stmt_def (cond))
2074 return;
2075 def = SSA_NAME_DEF_STMT (cond);
2078 tree rec2;
2079 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2080 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2081 == ptrmemfunc_vbit_in_delta),
2082 NULL);
2083 if (rec != rec2)
2084 return;
2086 index = ipa_get_param_decl_index (info, rec);
2087 if (index >= 0
2088 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2090 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2091 cs->indirect_info->offset = offset;
2092 cs->indirect_info->agg_contents = 1;
2093 cs->indirect_info->member_ptr = 1;
2094 cs->indirect_info->guaranteed_unmodified = 1;
2097 return;
2100 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2101 object referenced in the expression is a formal parameter of the caller
2102 FBI->node (described by FBI->info), create a call note for the
2103 statement. */
2105 static void
2106 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2107 gcall *call, tree target)
2109 tree obj = OBJ_TYPE_REF_OBJECT (target);
2110 int index;
2111 HOST_WIDE_INT anc_offset;
2113 if (!flag_devirtualize)
2114 return;
2116 if (TREE_CODE (obj) != SSA_NAME)
2117 return;
2119 struct ipa_node_params *info = fbi->info;
2120 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2122 struct ipa_jump_func jfunc;
2123 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2124 return;
2126 anc_offset = 0;
2127 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2128 gcc_assert (index >= 0);
2129 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2130 call, &jfunc))
2131 return;
2133 else
2135 struct ipa_jump_func jfunc;
2136 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2137 tree expr;
2139 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2140 if (!expr)
2141 return;
2142 index = ipa_get_param_decl_index (info,
2143 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2144 gcc_assert (index >= 0);
2145 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2146 call, &jfunc, anc_offset))
2147 return;
2150 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2151 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2152 ii->offset = anc_offset;
2153 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2154 ii->otr_type = obj_type_ref_class (target);
2155 ii->polymorphic = 1;
2158 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2159 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2160 containing intermediate information about each formal parameter. */
2162 static void
2163 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2165 tree target = gimple_call_fn (call);
2167 if (!target
2168 || (TREE_CODE (target) != SSA_NAME
2169 && !virtual_method_call_p (target)))
2170 return;
2172 struct cgraph_edge *cs = fbi->node->get_edge (call);
2173 /* If we previously turned the call into a direct call, there is
2174 no need to analyze. */
2175 if (cs && !cs->indirect_unknown_callee)
2176 return;
2178 if (cs->indirect_info->polymorphic && flag_devirtualize)
2180 tree instance;
2181 tree target = gimple_call_fn (call);
2182 ipa_polymorphic_call_context context (current_function_decl,
2183 target, call, &instance);
2185 gcc_checking_assert (cs->indirect_info->otr_type
2186 == obj_type_ref_class (target));
2187 gcc_checking_assert (cs->indirect_info->otr_token
2188 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2190 cs->indirect_info->vptr_changed
2191 = !context.get_dynamic_type (instance,
2192 OBJ_TYPE_REF_OBJECT (target),
2193 obj_type_ref_class (target), call);
2194 cs->indirect_info->context = context;
2197 if (TREE_CODE (target) == SSA_NAME)
2198 ipa_analyze_indirect_call_uses (fbi, call, target);
2199 else if (virtual_method_call_p (target))
2200 ipa_analyze_virtual_call_uses (fbi, call, target);
2204 /* Analyze the call statement STMT with respect to formal parameters (described
2205 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2206 formal parameters are called. */
2208 static void
2209 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2211 if (is_gimple_call (stmt))
2212 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2215 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2216 If OP is a parameter declaration, mark it as used in the info structure
2217 passed in DATA. */
2219 static bool
2220 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2222 struct ipa_node_params *info = (struct ipa_node_params *) data;
2224 op = get_base_address (op);
2225 if (op
2226 && TREE_CODE (op) == PARM_DECL)
2228 int index = ipa_get_param_decl_index (info, op);
2229 gcc_assert (index >= 0);
2230 ipa_set_param_used (info, index, true);
2233 return false;
2236 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2237 the findings in various structures of the associated ipa_node_params
2238 structure, such as parameter flags, notes etc. FBI holds various data about
2239 the function being analyzed. */
2241 static void
2242 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2244 gimple_stmt_iterator gsi;
2245 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2247 gimple *stmt = gsi_stmt (gsi);
2249 if (is_gimple_debug (stmt))
2250 continue;
2252 ipa_analyze_stmt_uses (fbi, stmt);
2253 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2254 visit_ref_for_mod_analysis,
2255 visit_ref_for_mod_analysis,
2256 visit_ref_for_mod_analysis);
2258 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2259 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2260 visit_ref_for_mod_analysis,
2261 visit_ref_for_mod_analysis,
2262 visit_ref_for_mod_analysis);
2265 /* Calculate controlled uses of parameters of NODE. */
2267 static void
2268 ipa_analyze_controlled_uses (struct cgraph_node *node)
2270 struct ipa_node_params *info = IPA_NODE_REF (node);
2272 for (int i = 0; i < ipa_get_param_count (info); i++)
2274 tree parm = ipa_get_param (info, i);
2275 int controlled_uses = 0;
2277 /* For SSA regs see if parameter is used. For non-SSA we compute
2278 the flag during modification analysis. */
2279 if (is_gimple_reg (parm))
2281 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2282 parm);
2283 if (ddef && !has_zero_uses (ddef))
2285 imm_use_iterator imm_iter;
2286 use_operand_p use_p;
2288 ipa_set_param_used (info, i, true);
2289 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2290 if (!is_gimple_call (USE_STMT (use_p)))
2292 if (!is_gimple_debug (USE_STMT (use_p)))
2294 controlled_uses = IPA_UNDESCRIBED_USE;
2295 break;
2298 else
2299 controlled_uses++;
2301 else
2302 controlled_uses = 0;
2304 else
2305 controlled_uses = IPA_UNDESCRIBED_USE;
2306 ipa_set_controlled_uses (info, i, controlled_uses);
2310 /* Free stuff in BI. */
2312 static void
2313 free_ipa_bb_info (struct ipa_bb_info *bi)
2315 bi->cg_edges.release ();
2316 bi->param_aa_statuses.release ();
2319 /* Dominator walker driving the analysis. */
2321 class analysis_dom_walker : public dom_walker
2323 public:
2324 analysis_dom_walker (struct ipa_func_body_info *fbi)
2325 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2327 virtual edge before_dom_children (basic_block);
2329 private:
2330 struct ipa_func_body_info *m_fbi;
2333 edge
2334 analysis_dom_walker::before_dom_children (basic_block bb)
2336 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2337 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2338 return NULL;
2341 /* Release body info FBI. */
2343 void
2344 ipa_release_body_info (struct ipa_func_body_info *fbi)
2346 int i;
2347 struct ipa_bb_info *bi;
2349 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2350 free_ipa_bb_info (bi);
2351 fbi->bb_infos.release ();
2354 /* Initialize the array describing properties of formal parameters
2355 of NODE, analyze their uses and compute jump functions associated
2356 with actual arguments of calls from within NODE. */
2358 void
2359 ipa_analyze_node (struct cgraph_node *node)
2361 struct ipa_func_body_info fbi;
2362 struct ipa_node_params *info;
2364 ipa_check_create_node_params ();
2365 ipa_check_create_edge_args ();
2366 info = IPA_NODE_REF (node);
2368 if (info->analysis_done)
2369 return;
2370 info->analysis_done = 1;
2372 if (ipa_func_spec_opts_forbid_analysis_p (node))
2374 for (int i = 0; i < ipa_get_param_count (info); i++)
2376 ipa_set_param_used (info, i, true);
2377 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2379 return;
2382 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2383 push_cfun (func);
2384 calculate_dominance_info (CDI_DOMINATORS);
2385 ipa_initialize_node_params (node);
2386 ipa_analyze_controlled_uses (node);
2388 fbi.node = node;
2389 fbi.info = IPA_NODE_REF (node);
2390 fbi.bb_infos = vNULL;
2391 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2392 fbi.param_count = ipa_get_param_count (info);
2393 fbi.aa_walked = 0;
2395 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2397 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2398 bi->cg_edges.safe_push (cs);
2401 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2403 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2404 bi->cg_edges.safe_push (cs);
2407 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2409 ipa_release_body_info (&fbi);
2410 free_dominance_info (CDI_DOMINATORS);
2411 pop_cfun ();
2414 /* Update the jump functions associated with call graph edge E when the call
2415 graph edge CS is being inlined, assuming that E->caller is already (possibly
2416 indirectly) inlined into CS->callee and that E has not been inlined. */
2418 static void
2419 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2420 struct cgraph_edge *e)
2422 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2423 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2424 int count = ipa_get_cs_argument_count (args);
2425 int i;
2427 for (i = 0; i < count; i++)
2429 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2430 struct ipa_polymorphic_call_context *dst_ctx
2431 = ipa_get_ith_polymorhic_call_context (args, i);
2433 if (dst->type == IPA_JF_ANCESTOR)
2435 struct ipa_jump_func *src;
2436 int dst_fid = dst->value.ancestor.formal_id;
2437 struct ipa_polymorphic_call_context *src_ctx
2438 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2440 /* Variable number of arguments can cause havoc if we try to access
2441 one that does not exist in the inlined edge. So make sure we
2442 don't. */
2443 if (dst_fid >= ipa_get_cs_argument_count (top))
2445 ipa_set_jf_unknown (dst);
2446 continue;
2449 src = ipa_get_ith_jump_func (top, dst_fid);
2451 if (src_ctx && !src_ctx->useless_p ())
2453 struct ipa_polymorphic_call_context ctx = *src_ctx;
2455 /* TODO: Make type preserved safe WRT contexts. */
2456 if (!ipa_get_jf_ancestor_type_preserved (dst))
2457 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2458 ctx.offset_by (dst->value.ancestor.offset);
2459 if (!ctx.useless_p ())
2461 if (!dst_ctx)
2463 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2464 count);
2465 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2468 dst_ctx->combine_with (ctx);
2472 if (src->agg.items
2473 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2475 struct ipa_agg_jf_item *item;
2476 int j;
2478 /* Currently we do not produce clobber aggregate jump functions,
2479 replace with merging when we do. */
2480 gcc_assert (!dst->agg.items);
2482 dst->agg.items = vec_safe_copy (src->agg.items);
2483 dst->agg.by_ref = src->agg.by_ref;
2484 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2485 item->offset -= dst->value.ancestor.offset;
2488 if (src->type == IPA_JF_PASS_THROUGH
2489 && src->value.pass_through.operation == NOP_EXPR)
2491 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2492 dst->value.ancestor.agg_preserved &=
2493 src->value.pass_through.agg_preserved;
2495 else if (src->type == IPA_JF_ANCESTOR)
2497 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2498 dst->value.ancestor.offset += src->value.ancestor.offset;
2499 dst->value.ancestor.agg_preserved &=
2500 src->value.ancestor.agg_preserved;
2502 else
2503 ipa_set_jf_unknown (dst);
2505 else if (dst->type == IPA_JF_PASS_THROUGH)
2507 struct ipa_jump_func *src;
2508 /* We must check range due to calls with variable number of arguments
2509 and we cannot combine jump functions with operations. */
2510 if (dst->value.pass_through.operation == NOP_EXPR
2511 && (dst->value.pass_through.formal_id
2512 < ipa_get_cs_argument_count (top)))
2514 int dst_fid = dst->value.pass_through.formal_id;
2515 src = ipa_get_ith_jump_func (top, dst_fid);
2516 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2517 struct ipa_polymorphic_call_context *src_ctx
2518 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2520 if (src_ctx && !src_ctx->useless_p ())
2522 struct ipa_polymorphic_call_context ctx = *src_ctx;
2524 /* TODO: Make type preserved safe WRT contexts. */
2525 if (!ipa_get_jf_pass_through_type_preserved (dst))
2526 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2527 if (!ctx.useless_p ())
2529 if (!dst_ctx)
2531 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2532 count);
2533 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2535 dst_ctx->combine_with (ctx);
2538 switch (src->type)
2540 case IPA_JF_UNKNOWN:
2541 ipa_set_jf_unknown (dst);
2542 break;
2543 case IPA_JF_CONST:
2544 ipa_set_jf_cst_copy (dst, src);
2545 break;
2547 case IPA_JF_PASS_THROUGH:
2549 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2550 enum tree_code operation;
2551 operation = ipa_get_jf_pass_through_operation (src);
2553 if (operation == NOP_EXPR)
2555 bool agg_p;
2556 agg_p = dst_agg_p
2557 && ipa_get_jf_pass_through_agg_preserved (src);
2558 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2560 else
2562 tree operand = ipa_get_jf_pass_through_operand (src);
2563 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2564 operation);
2566 break;
2568 case IPA_JF_ANCESTOR:
2570 bool agg_p;
2571 agg_p = dst_agg_p
2572 && ipa_get_jf_ancestor_agg_preserved (src);
2573 ipa_set_ancestor_jf (dst,
2574 ipa_get_jf_ancestor_offset (src),
2575 ipa_get_jf_ancestor_formal_id (src),
2576 agg_p);
2577 break;
2579 default:
2580 gcc_unreachable ();
2583 if (src->agg.items
2584 && (dst_agg_p || !src->agg.by_ref))
2586 /* Currently we do not produce clobber aggregate jump
2587 functions, replace with merging when we do. */
2588 gcc_assert (!dst->agg.items);
2590 dst->agg.by_ref = src->agg.by_ref;
2591 dst->agg.items = vec_safe_copy (src->agg.items);
2594 else
2595 ipa_set_jf_unknown (dst);
2600 /* If TARGET is an addr_expr of a function declaration, make it the
2601 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2602 Otherwise, return NULL. */
2604 struct cgraph_edge *
2605 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2606 bool speculative)
2608 struct cgraph_node *callee;
2609 struct inline_edge_summary *es = inline_edge_summary (ie);
2610 bool unreachable = false;
2612 if (TREE_CODE (target) == ADDR_EXPR)
2613 target = TREE_OPERAND (target, 0);
2614 if (TREE_CODE (target) != FUNCTION_DECL)
2616 target = canonicalize_constructor_val (target, NULL);
2617 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2619 /* Member pointer call that goes through a VMT lookup. */
2620 if (ie->indirect_info->member_ptr
2621 /* Or if target is not an invariant expression and we do not
2622 know if it will evaulate to function at runtime.
2623 This can happen when folding through &VAR, where &VAR
2624 is IP invariant, but VAR itself is not.
2626 TODO: Revisit this when GCC 5 is branched. It seems that
2627 member_ptr check is not needed and that we may try to fold
2628 the expression and see if VAR is readonly. */
2629 || !is_gimple_ip_invariant (target))
2631 if (dump_enabled_p ())
2633 location_t loc = gimple_location_safe (ie->call_stmt);
2634 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2635 "discovered direct call non-invariant "
2636 "%s/%i\n",
2637 ie->caller->name (), ie->caller->order);
2639 return NULL;
2643 if (dump_enabled_p ())
2645 location_t loc = gimple_location_safe (ie->call_stmt);
2646 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2647 "discovered direct call to non-function in %s/%i, "
2648 "making it __builtin_unreachable\n",
2649 ie->caller->name (), ie->caller->order);
2652 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2653 callee = cgraph_node::get_create (target);
2654 unreachable = true;
2656 else
2657 callee = cgraph_node::get (target);
2659 else
2660 callee = cgraph_node::get (target);
2662 /* Because may-edges are not explicitely represented and vtable may be external,
2663 we may create the first reference to the object in the unit. */
2664 if (!callee || callee->global.inlined_to)
2667 /* We are better to ensure we can refer to it.
2668 In the case of static functions we are out of luck, since we already
2669 removed its body. In the case of public functions we may or may
2670 not introduce the reference. */
2671 if (!canonicalize_constructor_val (target, NULL)
2672 || !TREE_PUBLIC (target))
2674 if (dump_file)
2675 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2676 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2677 xstrdup_for_dump (ie->caller->name ()),
2678 ie->caller->order,
2679 xstrdup_for_dump (ie->callee->name ()),
2680 ie->callee->order);
2681 return NULL;
2683 callee = cgraph_node::get_create (target);
2686 /* If the edge is already speculated. */
2687 if (speculative && ie->speculative)
2689 struct cgraph_edge *e2;
2690 struct ipa_ref *ref;
2691 ie->speculative_call_info (e2, ie, ref);
2692 if (e2->callee->ultimate_alias_target ()
2693 != callee->ultimate_alias_target ())
2695 if (dump_file)
2696 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2697 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2698 xstrdup_for_dump (ie->caller->name ()),
2699 ie->caller->order,
2700 xstrdup_for_dump (callee->name ()),
2701 callee->order,
2702 xstrdup_for_dump (e2->callee->name ()),
2703 e2->callee->order);
2705 else
2707 if (dump_file)
2708 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2709 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2710 xstrdup_for_dump (ie->caller->name ()),
2711 ie->caller->order,
2712 xstrdup_for_dump (callee->name ()),
2713 callee->order);
2715 return NULL;
2718 if (!dbg_cnt (devirt))
2719 return NULL;
2721 ipa_check_create_node_params ();
2723 /* We can not make edges to inline clones. It is bug that someone removed
2724 the cgraph node too early. */
2725 gcc_assert (!callee->global.inlined_to);
2727 if (dump_file && !unreachable)
2729 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2730 "(%s/%i -> %s/%i), for stmt ",
2731 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2732 speculative ? "speculative" : "known",
2733 xstrdup_for_dump (ie->caller->name ()),
2734 ie->caller->order,
2735 xstrdup_for_dump (callee->name ()),
2736 callee->order);
2737 if (ie->call_stmt)
2738 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2739 else
2740 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2742 if (dump_enabled_p ())
2744 location_t loc = gimple_location_safe (ie->call_stmt);
2746 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2747 "converting indirect call in %s to direct call to %s\n",
2748 ie->caller->name (), callee->name ());
2750 if (!speculative)
2752 struct cgraph_edge *orig = ie;
2753 ie = ie->make_direct (callee);
2754 /* If we resolved speculative edge the cost is already up to date
2755 for direct call (adjusted by inline_edge_duplication_hook). */
2756 if (ie == orig)
2758 es = inline_edge_summary (ie);
2759 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2760 - eni_size_weights.call_cost);
2761 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2762 - eni_time_weights.call_cost);
2765 else
2767 if (!callee->can_be_discarded_p ())
2769 cgraph_node *alias;
2770 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2771 if (alias)
2772 callee = alias;
2774 /* make_speculative will update ie's cost to direct call cost. */
2775 ie = ie->make_speculative
2776 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2779 return ie;
2782 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2783 CONSTRUCTOR and return it. Return NULL if the search fails for some
2784 reason. */
2786 static tree
2787 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2789 tree type = TREE_TYPE (constructor);
2790 if (TREE_CODE (type) != ARRAY_TYPE
2791 && TREE_CODE (type) != RECORD_TYPE)
2792 return NULL;
2794 unsigned ix;
2795 tree index, val;
2796 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2798 HOST_WIDE_INT elt_offset;
2799 if (TREE_CODE (type) == ARRAY_TYPE)
2801 offset_int off;
2802 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2803 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2805 if (index)
2807 off = wi::to_offset (index);
2808 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2810 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2811 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2812 off = wi::sext (off - wi::to_offset (low_bound),
2813 TYPE_PRECISION (TREE_TYPE (index)));
2815 off *= wi::to_offset (unit_size);
2817 else
2818 off = wi::to_offset (unit_size) * ix;
2820 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2821 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2822 continue;
2823 elt_offset = off.to_shwi ();
2825 else if (TREE_CODE (type) == RECORD_TYPE)
2827 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2828 if (DECL_BIT_FIELD (index))
2829 continue;
2830 elt_offset = int_bit_position (index);
2832 else
2833 gcc_unreachable ();
2835 if (elt_offset > req_offset)
2836 return NULL;
2838 if (TREE_CODE (val) == CONSTRUCTOR)
2839 return find_constructor_constant_at_offset (val,
2840 req_offset - elt_offset);
2842 if (elt_offset == req_offset
2843 && is_gimple_reg_type (TREE_TYPE (val))
2844 && is_gimple_ip_invariant (val))
2845 return val;
2847 return NULL;
2850 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2851 invariant from a static constructor and if so, return it. Otherwise return
2852 NULL. */
2854 static tree
2855 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2857 if (by_ref)
2859 if (TREE_CODE (scalar) != ADDR_EXPR)
2860 return NULL;
2861 scalar = TREE_OPERAND (scalar, 0);
2864 if (!VAR_P (scalar)
2865 || !is_global_var (scalar)
2866 || !TREE_READONLY (scalar)
2867 || !DECL_INITIAL (scalar)
2868 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2869 return NULL;
2871 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2874 /* Retrieve value from aggregate jump function AGG or static initializer of
2875 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2876 none. BY_REF specifies whether the value has to be passed by reference or
2877 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2878 to is set to true if the value comes from an initializer of a constant. */
2880 tree
2881 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2882 HOST_WIDE_INT offset, bool by_ref,
2883 bool *from_global_constant)
2885 struct ipa_agg_jf_item *item;
2886 int i;
2888 if (scalar)
2890 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2891 if (res)
2893 if (from_global_constant)
2894 *from_global_constant = true;
2895 return res;
2899 if (!agg
2900 || by_ref != agg->by_ref)
2901 return NULL;
2903 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2904 if (item->offset == offset)
2906 /* Currently we do not have clobber values, return NULL for them once
2907 we do. */
2908 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2909 if (from_global_constant)
2910 *from_global_constant = false;
2911 return item->value;
2913 return NULL;
2916 /* Remove a reference to SYMBOL from the list of references of a node given by
2917 reference description RDESC. Return true if the reference has been
2918 successfully found and removed. */
2920 static bool
2921 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2923 struct ipa_ref *to_del;
2924 struct cgraph_edge *origin;
2926 origin = rdesc->cs;
2927 if (!origin)
2928 return false;
2929 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2930 origin->lto_stmt_uid);
2931 if (!to_del)
2932 return false;
2934 to_del->remove_reference ();
2935 if (dump_file)
2936 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2937 xstrdup_for_dump (origin->caller->name ()),
2938 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2939 return true;
2942 /* If JFUNC has a reference description with refcount different from
2943 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2944 NULL. JFUNC must be a constant jump function. */
2946 static struct ipa_cst_ref_desc *
2947 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2949 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2950 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2951 return rdesc;
2952 else
2953 return NULL;
2956 /* If the value of constant jump function JFUNC is an address of a function
2957 declaration, return the associated call graph node. Otherwise return
2958 NULL. */
2960 static cgraph_node *
2961 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2963 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2964 tree cst = ipa_get_jf_constant (jfunc);
2965 if (TREE_CODE (cst) != ADDR_EXPR
2966 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2967 return NULL;
2969 return cgraph_node::get (TREE_OPERAND (cst, 0));
2973 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2974 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2975 the edge specified in the rdesc. Return false if either the symbol or the
2976 reference could not be found, otherwise return true. */
2978 static bool
2979 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2981 struct ipa_cst_ref_desc *rdesc;
2982 if (jfunc->type == IPA_JF_CONST
2983 && (rdesc = jfunc_rdesc_usable (jfunc))
2984 && --rdesc->refcount == 0)
2986 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2987 if (!symbol)
2988 return false;
2990 return remove_described_reference (symbol, rdesc);
2992 return true;
2995 /* Try to find a destination for indirect edge IE that corresponds to a simple
2996 call or a call of a member function pointer and where the destination is a
2997 pointer formal parameter described by jump function JFUNC. If it can be
2998 determined, return the newly direct edge, otherwise return NULL.
2999 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3001 static struct cgraph_edge *
3002 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3003 struct ipa_jump_func *jfunc,
3004 struct ipa_node_params *new_root_info)
3006 struct cgraph_edge *cs;
3007 tree target;
3008 bool agg_contents = ie->indirect_info->agg_contents;
3009 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3010 if (agg_contents)
3012 bool from_global_constant;
3013 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3014 ie->indirect_info->offset,
3015 ie->indirect_info->by_ref,
3016 &from_global_constant);
3017 if (target
3018 && !from_global_constant
3019 && !ie->indirect_info->guaranteed_unmodified)
3020 return NULL;
3022 else
3023 target = scalar;
3024 if (!target)
3025 return NULL;
3026 cs = ipa_make_edge_direct_to_target (ie, target);
3028 if (cs && !agg_contents)
3030 bool ok;
3031 gcc_checking_assert (cs->callee
3032 && (cs != ie
3033 || jfunc->type != IPA_JF_CONST
3034 || !cgraph_node_for_jfunc (jfunc)
3035 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3036 ok = try_decrement_rdesc_refcount (jfunc);
3037 gcc_checking_assert (ok);
3040 return cs;
3043 /* Return the target to be used in cases of impossible devirtualization. IE
3044 and target (the latter can be NULL) are dumped when dumping is enabled. */
3046 tree
3047 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3049 if (dump_file)
3051 if (target)
3052 fprintf (dump_file,
3053 "Type inconsistent devirtualization: %s/%i->%s\n",
3054 ie->caller->name (), ie->caller->order,
3055 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3056 else
3057 fprintf (dump_file,
3058 "No devirtualization target in %s/%i\n",
3059 ie->caller->name (), ie->caller->order);
3061 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3062 cgraph_node::get_create (new_target);
3063 return new_target;
3066 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3067 call based on a formal parameter which is described by jump function JFUNC
3068 and if it can be determined, make it direct and return the direct edge.
3069 Otherwise, return NULL. CTX describes the polymorphic context that the
3070 parameter the call is based on brings along with it. */
3072 static struct cgraph_edge *
3073 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3074 struct ipa_jump_func *jfunc,
3075 struct ipa_polymorphic_call_context ctx)
3077 tree target = NULL;
3078 bool speculative = false;
3080 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3081 return NULL;
3083 gcc_assert (!ie->indirect_info->by_ref);
3085 /* Try to do lookup via known virtual table pointer value. */
3086 if (!ie->indirect_info->vptr_changed
3087 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3089 tree vtable;
3090 unsigned HOST_WIDE_INT offset;
3091 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3092 : NULL;
3093 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3094 ie->indirect_info->offset,
3095 true);
3096 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3098 bool can_refer;
3099 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3100 vtable, offset, &can_refer);
3101 if (can_refer)
3103 if (!t
3104 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3105 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3106 || !possible_polymorphic_call_target_p
3107 (ie, cgraph_node::get (t)))
3109 /* Do not speculate builtin_unreachable, it is stupid! */
3110 if (!ie->indirect_info->vptr_changed)
3111 target = ipa_impossible_devirt_target (ie, target);
3112 else
3113 target = NULL;
3115 else
3117 target = t;
3118 speculative = ie->indirect_info->vptr_changed;
3124 ipa_polymorphic_call_context ie_context (ie);
3125 vec <cgraph_node *>targets;
3126 bool final;
3128 ctx.offset_by (ie->indirect_info->offset);
3129 if (ie->indirect_info->vptr_changed)
3130 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3131 ie->indirect_info->otr_type);
3132 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3133 targets = possible_polymorphic_call_targets
3134 (ie->indirect_info->otr_type,
3135 ie->indirect_info->otr_token,
3136 ctx, &final);
3137 if (final && targets.length () <= 1)
3139 speculative = false;
3140 if (targets.length () == 1)
3141 target = targets[0]->decl;
3142 else
3143 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3145 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3146 && !ie->speculative && ie->maybe_hot_p ())
3148 cgraph_node *n;
3149 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3150 ie->indirect_info->otr_token,
3151 ie->indirect_info->context);
3152 if (n)
3154 target = n->decl;
3155 speculative = true;
3159 if (target)
3161 if (!possible_polymorphic_call_target_p
3162 (ie, cgraph_node::get_create (target)))
3164 if (speculative)
3165 return NULL;
3166 target = ipa_impossible_devirt_target (ie, target);
3168 return ipa_make_edge_direct_to_target (ie, target, speculative);
3170 else
3171 return NULL;
3174 /* Update the param called notes associated with NODE when CS is being inlined,
3175 assuming NODE is (potentially indirectly) inlined into CS->callee.
3176 Moreover, if the callee is discovered to be constant, create a new cgraph
3177 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3178 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3180 static bool
3181 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3182 struct cgraph_node *node,
3183 vec<cgraph_edge *> *new_edges)
3185 struct ipa_edge_args *top;
3186 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3187 struct ipa_node_params *new_root_info;
3188 bool res = false;
3190 ipa_check_create_edge_args ();
3191 top = IPA_EDGE_REF (cs);
3192 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3193 ? cs->caller->global.inlined_to
3194 : cs->caller);
3196 for (ie = node->indirect_calls; ie; ie = next_ie)
3198 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3199 struct ipa_jump_func *jfunc;
3200 int param_index;
3201 cgraph_node *spec_target = NULL;
3203 next_ie = ie->next_callee;
3205 if (ici->param_index == -1)
3206 continue;
3208 /* We must check range due to calls with variable number of arguments: */
3209 if (ici->param_index >= ipa_get_cs_argument_count (top))
3211 ici->param_index = -1;
3212 continue;
3215 param_index = ici->param_index;
3216 jfunc = ipa_get_ith_jump_func (top, param_index);
3218 if (ie->speculative)
3220 struct cgraph_edge *de;
3221 struct ipa_ref *ref;
3222 ie->speculative_call_info (de, ie, ref);
3223 spec_target = de->callee;
3226 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3227 new_direct_edge = NULL;
3228 else if (ici->polymorphic)
3230 ipa_polymorphic_call_context ctx;
3231 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3232 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3234 else
3235 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3236 new_root_info);
3237 /* If speculation was removed, then we need to do nothing. */
3238 if (new_direct_edge && new_direct_edge != ie
3239 && new_direct_edge->callee == spec_target)
3241 new_direct_edge->indirect_inlining_edge = 1;
3242 top = IPA_EDGE_REF (cs);
3243 res = true;
3244 if (!new_direct_edge->speculative)
3245 continue;
3247 else if (new_direct_edge)
3249 new_direct_edge->indirect_inlining_edge = 1;
3250 if (new_direct_edge->call_stmt)
3251 new_direct_edge->call_stmt_cannot_inline_p
3252 = !gimple_check_call_matching_types (
3253 new_direct_edge->call_stmt,
3254 new_direct_edge->callee->decl, false);
3255 if (new_edges)
3257 new_edges->safe_push (new_direct_edge);
3258 res = true;
3260 top = IPA_EDGE_REF (cs);
3261 /* If speculative edge was introduced we still need to update
3262 call info of the indirect edge. */
3263 if (!new_direct_edge->speculative)
3264 continue;
3266 if (jfunc->type == IPA_JF_PASS_THROUGH
3267 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3269 if (ici->agg_contents
3270 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3271 && !ici->polymorphic)
3272 ici->param_index = -1;
3273 else
3275 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3276 if (ici->polymorphic
3277 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3278 ici->vptr_changed = true;
3281 else if (jfunc->type == IPA_JF_ANCESTOR)
3283 if (ici->agg_contents
3284 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3285 && !ici->polymorphic)
3286 ici->param_index = -1;
3287 else
3289 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3290 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3291 if (ici->polymorphic
3292 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3293 ici->vptr_changed = true;
3296 else
3297 /* Either we can find a destination for this edge now or never. */
3298 ici->param_index = -1;
3301 return res;
3304 /* Recursively traverse subtree of NODE (including node) made of inlined
3305 cgraph_edges when CS has been inlined and invoke
3306 update_indirect_edges_after_inlining on all nodes and
3307 update_jump_functions_after_inlining on all non-inlined edges that lead out
3308 of this subtree. Newly discovered indirect edges will be added to
3309 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3310 created. */
3312 static bool
3313 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3314 struct cgraph_node *node,
3315 vec<cgraph_edge *> *new_edges)
3317 struct cgraph_edge *e;
3318 bool res;
3320 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3322 for (e = node->callees; e; e = e->next_callee)
3323 if (!e->inline_failed)
3324 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3325 else
3326 update_jump_functions_after_inlining (cs, e);
3327 for (e = node->indirect_calls; e; e = e->next_callee)
3328 update_jump_functions_after_inlining (cs, e);
3330 return res;
3333 /* Combine two controlled uses counts as done during inlining. */
3335 static int
3336 combine_controlled_uses_counters (int c, int d)
3338 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3339 return IPA_UNDESCRIBED_USE;
3340 else
3341 return c + d - 1;
3344 /* Propagate number of controlled users from CS->caleee to the new root of the
3345 tree of inlined nodes. */
3347 static void
3348 propagate_controlled_uses (struct cgraph_edge *cs)
3350 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3351 struct cgraph_node *new_root = cs->caller->global.inlined_to
3352 ? cs->caller->global.inlined_to : cs->caller;
3353 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3354 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3355 int count, i;
3357 count = MIN (ipa_get_cs_argument_count (args),
3358 ipa_get_param_count (old_root_info));
3359 for (i = 0; i < count; i++)
3361 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3362 struct ipa_cst_ref_desc *rdesc;
3364 if (jf->type == IPA_JF_PASS_THROUGH)
3366 int src_idx, c, d;
3367 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3368 c = ipa_get_controlled_uses (new_root_info, src_idx);
3369 d = ipa_get_controlled_uses (old_root_info, i);
3371 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3372 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3373 c = combine_controlled_uses_counters (c, d);
3374 ipa_set_controlled_uses (new_root_info, src_idx, c);
3375 if (c == 0 && new_root_info->ipcp_orig_node)
3377 struct cgraph_node *n;
3378 struct ipa_ref *ref;
3379 tree t = new_root_info->known_csts[src_idx];
3381 if (t && TREE_CODE (t) == ADDR_EXPR
3382 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3383 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3384 && (ref = new_root->find_reference (n, NULL, 0)))
3386 if (dump_file)
3387 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3388 "reference from %s/%i to %s/%i.\n",
3389 xstrdup_for_dump (new_root->name ()),
3390 new_root->order,
3391 xstrdup_for_dump (n->name ()), n->order);
3392 ref->remove_reference ();
3396 else if (jf->type == IPA_JF_CONST
3397 && (rdesc = jfunc_rdesc_usable (jf)))
3399 int d = ipa_get_controlled_uses (old_root_info, i);
3400 int c = rdesc->refcount;
3401 rdesc->refcount = combine_controlled_uses_counters (c, d);
3402 if (rdesc->refcount == 0)
3404 tree cst = ipa_get_jf_constant (jf);
3405 struct cgraph_node *n;
3406 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3407 && TREE_CODE (TREE_OPERAND (cst, 0))
3408 == FUNCTION_DECL);
3409 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3410 if (n)
3412 struct cgraph_node *clone;
3413 bool ok;
3414 ok = remove_described_reference (n, rdesc);
3415 gcc_checking_assert (ok);
3417 clone = cs->caller;
3418 while (clone->global.inlined_to
3419 && clone != rdesc->cs->caller
3420 && IPA_NODE_REF (clone)->ipcp_orig_node)
3422 struct ipa_ref *ref;
3423 ref = clone->find_reference (n, NULL, 0);
3424 if (ref)
3426 if (dump_file)
3427 fprintf (dump_file, "ipa-prop: Removing "
3428 "cloning-created reference "
3429 "from %s/%i to %s/%i.\n",
3430 xstrdup_for_dump (clone->name ()),
3431 clone->order,
3432 xstrdup_for_dump (n->name ()),
3433 n->order);
3434 ref->remove_reference ();
3436 clone = clone->callers->caller;
3443 for (i = ipa_get_param_count (old_root_info);
3444 i < ipa_get_cs_argument_count (args);
3445 i++)
3447 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3449 if (jf->type == IPA_JF_CONST)
3451 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3452 if (rdesc)
3453 rdesc->refcount = IPA_UNDESCRIBED_USE;
3455 else if (jf->type == IPA_JF_PASS_THROUGH)
3456 ipa_set_controlled_uses (new_root_info,
3457 jf->value.pass_through.formal_id,
3458 IPA_UNDESCRIBED_USE);
3462 /* Update jump functions and call note functions on inlining the call site CS.
3463 CS is expected to lead to a node already cloned by
3464 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3465 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3466 created. */
3468 bool
3469 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3470 vec<cgraph_edge *> *new_edges)
3472 bool changed;
3473 /* Do nothing if the preparation phase has not been carried out yet
3474 (i.e. during early inlining). */
3475 if (!ipa_node_params_sum)
3476 return false;
3477 gcc_assert (ipa_edge_args_vector);
3479 propagate_controlled_uses (cs);
3480 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3482 return changed;
3485 /* Frees all dynamically allocated structures that the argument info points
3486 to. */
3488 void
3489 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3491 vec_free (args->jump_functions);
3492 memset (args, 0, sizeof (*args));
3495 /* Free all ipa_edge structures. */
3497 void
3498 ipa_free_all_edge_args (void)
3500 int i;
3501 struct ipa_edge_args *args;
3503 if (!ipa_edge_args_vector)
3504 return;
3506 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3507 ipa_free_edge_args_substructures (args);
3509 vec_free (ipa_edge_args_vector);
3512 /* Frees all dynamically allocated structures that the param info points
3513 to. */
3515 ipa_node_params::~ipa_node_params ()
3517 descriptors.release ();
3518 free (lattices);
3519 /* Lattice values and their sources are deallocated with their alocation
3520 pool. */
3521 known_csts.release ();
3522 known_contexts.release ();
3524 lattices = NULL;
3525 ipcp_orig_node = NULL;
3526 analysis_done = 0;
3527 node_enqueued = 0;
3528 do_clone_for_all_contexts = 0;
3529 is_all_contexts_clone = 0;
3530 node_dead = 0;
3533 /* Free all ipa_node_params structures. */
3535 void
3536 ipa_free_all_node_params (void)
3538 delete ipa_node_params_sum;
3539 ipa_node_params_sum = NULL;
3542 /* Grow ipcp_transformations if necessary. */
3544 void
3545 ipcp_grow_transformations_if_necessary (void)
3547 if (vec_safe_length (ipcp_transformations)
3548 <= (unsigned) symtab->cgraph_max_uid)
3549 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3552 /* Set the aggregate replacements of NODE to be AGGVALS. */
3554 void
3555 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3556 struct ipa_agg_replacement_value *aggvals)
3558 ipcp_grow_transformations_if_necessary ();
3559 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3562 /* Hook that is called by cgraph.c when an edge is removed. */
3564 static void
3565 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3567 struct ipa_edge_args *args;
3569 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3570 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3571 return;
3573 args = IPA_EDGE_REF (cs);
3574 if (args->jump_functions)
3576 struct ipa_jump_func *jf;
3577 int i;
3578 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3580 struct ipa_cst_ref_desc *rdesc;
3581 try_decrement_rdesc_refcount (jf);
3582 if (jf->type == IPA_JF_CONST
3583 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3584 && rdesc->cs == cs)
3585 rdesc->cs = NULL;
3589 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3592 /* Hook that is called by cgraph.c when an edge is duplicated. */
3594 static void
3595 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3596 void *)
3598 struct ipa_edge_args *old_args, *new_args;
3599 unsigned int i;
3601 ipa_check_create_edge_args ();
3603 old_args = IPA_EDGE_REF (src);
3604 new_args = IPA_EDGE_REF (dst);
3606 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3607 if (old_args->polymorphic_call_contexts)
3608 new_args->polymorphic_call_contexts
3609 = vec_safe_copy (old_args->polymorphic_call_contexts);
3611 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3613 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3614 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3616 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3618 if (src_jf->type == IPA_JF_CONST)
3620 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3622 if (!src_rdesc)
3623 dst_jf->value.constant.rdesc = NULL;
3624 else if (src->caller == dst->caller)
3626 struct ipa_ref *ref;
3627 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3628 gcc_checking_assert (n);
3629 ref = src->caller->find_reference (n, src->call_stmt,
3630 src->lto_stmt_uid);
3631 gcc_checking_assert (ref);
3632 dst->caller->clone_reference (ref, ref->stmt);
3634 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3635 dst_rdesc->cs = dst;
3636 dst_rdesc->refcount = src_rdesc->refcount;
3637 dst_rdesc->next_duplicate = NULL;
3638 dst_jf->value.constant.rdesc = dst_rdesc;
3640 else if (src_rdesc->cs == src)
3642 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3643 dst_rdesc->cs = dst;
3644 dst_rdesc->refcount = src_rdesc->refcount;
3645 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3646 src_rdesc->next_duplicate = dst_rdesc;
3647 dst_jf->value.constant.rdesc = dst_rdesc;
3649 else
3651 struct ipa_cst_ref_desc *dst_rdesc;
3652 /* This can happen during inlining, when a JFUNC can refer to a
3653 reference taken in a function up in the tree of inline clones.
3654 We need to find the duplicate that refers to our tree of
3655 inline clones. */
3657 gcc_assert (dst->caller->global.inlined_to);
3658 for (dst_rdesc = src_rdesc->next_duplicate;
3659 dst_rdesc;
3660 dst_rdesc = dst_rdesc->next_duplicate)
3662 struct cgraph_node *top;
3663 top = dst_rdesc->cs->caller->global.inlined_to
3664 ? dst_rdesc->cs->caller->global.inlined_to
3665 : dst_rdesc->cs->caller;
3666 if (dst->caller->global.inlined_to == top)
3667 break;
3669 gcc_assert (dst_rdesc);
3670 dst_jf->value.constant.rdesc = dst_rdesc;
3673 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3674 && src->caller == dst->caller)
3676 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3677 ? dst->caller->global.inlined_to : dst->caller;
3678 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3679 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3681 int c = ipa_get_controlled_uses (root_info, idx);
3682 if (c != IPA_UNDESCRIBED_USE)
3684 c++;
3685 ipa_set_controlled_uses (root_info, idx, c);
3691 /* Analyze newly added function into callgraph. */
3693 static void
3694 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3696 if (node->has_gimple_body_p ())
3697 ipa_analyze_node (node);
3700 /* Hook that is called by summary when a node is duplicated. */
3702 void
3703 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3704 ipa_node_params *old_info,
3705 ipa_node_params *new_info)
3707 ipa_agg_replacement_value *old_av, *new_av;
3709 new_info->descriptors = old_info->descriptors.copy ();
3710 new_info->lattices = NULL;
3711 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3713 new_info->analysis_done = old_info->analysis_done;
3714 new_info->node_enqueued = old_info->node_enqueued;
3715 new_info->versionable = old_info->versionable;
3717 old_av = ipa_get_agg_replacements_for_node (src);
3718 if (old_av)
3720 new_av = NULL;
3721 while (old_av)
3723 struct ipa_agg_replacement_value *v;
3725 v = ggc_alloc<ipa_agg_replacement_value> ();
3726 memcpy (v, old_av, sizeof (*v));
3727 v->next = new_av;
3728 new_av = v;
3729 old_av = old_av->next;
3731 ipa_set_node_agg_value_chain (dst, new_av);
3734 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3736 if (src_trans)
3738 ipcp_grow_transformations_if_necessary ();
3739 src_trans = ipcp_get_transformation_summary (src);
3740 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3741 vec<ipa_vr, va_gc> *&dst_vr
3742 = ipcp_get_transformation_summary (dst)->m_vr;
3743 if (vec_safe_length (src_trans->m_vr) > 0)
3745 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3746 for (unsigned i = 0; i < src_vr->length (); ++i)
3747 dst_vr->quick_push ((*src_vr)[i]);
3751 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3753 ipcp_grow_transformations_if_necessary ();
3754 src_trans = ipcp_get_transformation_summary (src);
3755 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3756 vec<ipa_bits, va_gc> *&dst_bits
3757 = ipcp_get_transformation_summary (dst)->bits;
3758 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3759 for (unsigned i = 0; i < src_bits->length (); ++i)
3760 dst_bits->quick_push ((*src_bits)[i]);
3764 /* Register our cgraph hooks if they are not already there. */
3766 void
3767 ipa_register_cgraph_hooks (void)
3769 ipa_check_create_node_params ();
3771 if (!edge_removal_hook_holder)
3772 edge_removal_hook_holder =
3773 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3774 if (!edge_duplication_hook_holder)
3775 edge_duplication_hook_holder =
3776 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3777 function_insertion_hook_holder =
3778 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3781 /* Unregister our cgraph hooks if they are not already there. */
3783 static void
3784 ipa_unregister_cgraph_hooks (void)
3786 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3787 edge_removal_hook_holder = NULL;
3788 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3789 edge_duplication_hook_holder = NULL;
3790 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3791 function_insertion_hook_holder = NULL;
3794 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3795 longer needed after ipa-cp. */
3797 void
3798 ipa_free_all_structures_after_ipa_cp (void)
3800 if (!optimize && !in_lto_p)
3802 ipa_free_all_edge_args ();
3803 ipa_free_all_node_params ();
3804 ipcp_sources_pool.release ();
3805 ipcp_cst_values_pool.release ();
3806 ipcp_poly_ctx_values_pool.release ();
3807 ipcp_agg_lattice_pool.release ();
3808 ipa_unregister_cgraph_hooks ();
3809 ipa_refdesc_pool.release ();
3813 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3814 longer needed after indirect inlining. */
3816 void
3817 ipa_free_all_structures_after_iinln (void)
3819 ipa_free_all_edge_args ();
3820 ipa_free_all_node_params ();
3821 ipa_unregister_cgraph_hooks ();
3822 ipcp_sources_pool.release ();
3823 ipcp_cst_values_pool.release ();
3824 ipcp_poly_ctx_values_pool.release ();
3825 ipcp_agg_lattice_pool.release ();
3826 ipa_refdesc_pool.release ();
3829 /* Print ipa_tree_map data structures of all functions in the
3830 callgraph to F. */
3832 void
3833 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3835 int i, count;
3836 struct ipa_node_params *info;
3838 if (!node->definition)
3839 return;
3840 info = IPA_NODE_REF (node);
3841 fprintf (f, " function %s/%i parameter descriptors:\n",
3842 node->name (), node->order);
3843 count = ipa_get_param_count (info);
3844 for (i = 0; i < count; i++)
3846 int c;
3848 fprintf (f, " ");
3849 ipa_dump_param (f, info, i);
3850 if (ipa_is_param_used (info, i))
3851 fprintf (f, " used");
3852 c = ipa_get_controlled_uses (info, i);
3853 if (c == IPA_UNDESCRIBED_USE)
3854 fprintf (f, " undescribed_use");
3855 else
3856 fprintf (f, " controlled_uses=%i", c);
3857 fprintf (f, "\n");
3861 /* Print ipa_tree_map data structures of all functions in the
3862 callgraph to F. */
3864 void
3865 ipa_print_all_params (FILE * f)
3867 struct cgraph_node *node;
3869 fprintf (f, "\nFunction parameters:\n");
3870 FOR_EACH_FUNCTION (node)
3871 ipa_print_node_params (f, node);
3874 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3876 vec<tree>
3877 ipa_get_vector_of_formal_parms (tree fndecl)
3879 vec<tree> args;
3880 int count;
3881 tree parm;
3883 gcc_assert (!flag_wpa);
3884 count = count_formal_params (fndecl);
3885 args.create (count);
3886 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3887 args.quick_push (parm);
3889 return args;
3892 /* Return a heap allocated vector containing types of formal parameters of
3893 function type FNTYPE. */
3895 vec<tree>
3896 ipa_get_vector_of_formal_parm_types (tree fntype)
3898 vec<tree> types;
3899 int count = 0;
3900 tree t;
3902 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3903 count++;
3905 types.create (count);
3906 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3907 types.quick_push (TREE_VALUE (t));
3909 return types;
3912 /* Modify the function declaration FNDECL and its type according to the plan in
3913 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3914 to reflect the actual parameters being modified which are determined by the
3915 base_index field. */
3917 void
3918 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3920 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3921 tree orig_type = TREE_TYPE (fndecl);
3922 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3924 /* The following test is an ugly hack, some functions simply don't have any
3925 arguments in their type. This is probably a bug but well... */
3926 bool care_for_types = (old_arg_types != NULL_TREE);
3927 bool last_parm_void;
3928 vec<tree> otypes;
3929 if (care_for_types)
3931 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3932 == void_type_node);
3933 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3934 if (last_parm_void)
3935 gcc_assert (oparms.length () + 1 == otypes.length ());
3936 else
3937 gcc_assert (oparms.length () == otypes.length ());
3939 else
3941 last_parm_void = false;
3942 otypes.create (0);
3945 int len = adjustments.length ();
3946 tree *link = &DECL_ARGUMENTS (fndecl);
3947 tree new_arg_types = NULL;
3948 for (int i = 0; i < len; i++)
3950 struct ipa_parm_adjustment *adj;
3951 gcc_assert (link);
3953 adj = &adjustments[i];
3954 tree parm;
3955 if (adj->op == IPA_PARM_OP_NEW)
3956 parm = NULL;
3957 else
3958 parm = oparms[adj->base_index];
3959 adj->base = parm;
3961 if (adj->op == IPA_PARM_OP_COPY)
3963 if (care_for_types)
3964 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3965 new_arg_types);
3966 *link = parm;
3967 link = &DECL_CHAIN (parm);
3969 else if (adj->op != IPA_PARM_OP_REMOVE)
3971 tree new_parm;
3972 tree ptype;
3974 if (adj->by_ref)
3975 ptype = build_pointer_type (adj->type);
3976 else
3978 ptype = adj->type;
3979 if (is_gimple_reg_type (ptype))
3981 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3982 if (TYPE_ALIGN (ptype) != malign)
3983 ptype = build_aligned_type (ptype, malign);
3987 if (care_for_types)
3988 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3990 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3991 ptype);
3992 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3993 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3994 DECL_ARTIFICIAL (new_parm) = 1;
3995 DECL_ARG_TYPE (new_parm) = ptype;
3996 DECL_CONTEXT (new_parm) = fndecl;
3997 TREE_USED (new_parm) = 1;
3998 DECL_IGNORED_P (new_parm) = 1;
3999 layout_decl (new_parm, 0);
4001 if (adj->op == IPA_PARM_OP_NEW)
4002 adj->base = NULL;
4003 else
4004 adj->base = parm;
4005 adj->new_decl = new_parm;
4007 *link = new_parm;
4008 link = &DECL_CHAIN (new_parm);
4012 *link = NULL_TREE;
4014 tree new_reversed = NULL;
4015 if (care_for_types)
4017 new_reversed = nreverse (new_arg_types);
4018 if (last_parm_void)
4020 if (new_reversed)
4021 TREE_CHAIN (new_arg_types) = void_list_node;
4022 else
4023 new_reversed = void_list_node;
4027 /* Use copy_node to preserve as much as possible from original type
4028 (debug info, attribute lists etc.)
4029 Exception is METHOD_TYPEs must have THIS argument.
4030 When we are asked to remove it, we need to build new FUNCTION_TYPE
4031 instead. */
4032 tree new_type = NULL;
4033 if (TREE_CODE (orig_type) != METHOD_TYPE
4034 || (adjustments[0].op == IPA_PARM_OP_COPY
4035 && adjustments[0].base_index == 0))
4037 new_type = build_distinct_type_copy (orig_type);
4038 TYPE_ARG_TYPES (new_type) = new_reversed;
4040 else
4042 new_type
4043 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4044 new_reversed));
4045 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4046 DECL_VINDEX (fndecl) = NULL_TREE;
4049 /* When signature changes, we need to clear builtin info. */
4050 if (DECL_BUILT_IN (fndecl))
4052 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4053 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4056 TREE_TYPE (fndecl) = new_type;
4057 DECL_VIRTUAL_P (fndecl) = 0;
4058 DECL_LANG_SPECIFIC (fndecl) = NULL;
4059 otypes.release ();
4060 oparms.release ();
4063 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4064 If this is a directly recursive call, CS must be NULL. Otherwise it must
4065 contain the corresponding call graph edge. */
4067 void
4068 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4069 ipa_parm_adjustment_vec adjustments)
4071 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4072 vec<tree> vargs;
4073 vec<tree, va_gc> **debug_args = NULL;
4074 gcall *new_stmt;
4075 gimple_stmt_iterator gsi, prev_gsi;
4076 tree callee_decl;
4077 int i, len;
4079 len = adjustments.length ();
4080 vargs.create (len);
4081 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4082 current_node->remove_stmt_references (stmt);
4084 gsi = gsi_for_stmt (stmt);
4085 prev_gsi = gsi;
4086 gsi_prev (&prev_gsi);
4087 for (i = 0; i < len; i++)
4089 struct ipa_parm_adjustment *adj;
4091 adj = &adjustments[i];
4093 if (adj->op == IPA_PARM_OP_COPY)
4095 tree arg = gimple_call_arg (stmt, adj->base_index);
4097 vargs.quick_push (arg);
4099 else if (adj->op != IPA_PARM_OP_REMOVE)
4101 tree expr, base, off;
4102 location_t loc;
4103 unsigned int deref_align = 0;
4104 bool deref_base = false;
4106 /* We create a new parameter out of the value of the old one, we can
4107 do the following kind of transformations:
4109 - A scalar passed by reference is converted to a scalar passed by
4110 value. (adj->by_ref is false and the type of the original
4111 actual argument is a pointer to a scalar).
4113 - A part of an aggregate is passed instead of the whole aggregate.
4114 The part can be passed either by value or by reference, this is
4115 determined by value of adj->by_ref. Moreover, the code below
4116 handles both situations when the original aggregate is passed by
4117 value (its type is not a pointer) and when it is passed by
4118 reference (it is a pointer to an aggregate).
4120 When the new argument is passed by reference (adj->by_ref is true)
4121 it must be a part of an aggregate and therefore we form it by
4122 simply taking the address of a reference inside the original
4123 aggregate. */
4125 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4126 base = gimple_call_arg (stmt, adj->base_index);
4127 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4128 : EXPR_LOCATION (base);
4130 if (TREE_CODE (base) != ADDR_EXPR
4131 && POINTER_TYPE_P (TREE_TYPE (base)))
4132 off = build_int_cst (adj->alias_ptr_type,
4133 adj->offset / BITS_PER_UNIT);
4134 else
4136 HOST_WIDE_INT base_offset;
4137 tree prev_base;
4138 bool addrof;
4140 if (TREE_CODE (base) == ADDR_EXPR)
4142 base = TREE_OPERAND (base, 0);
4143 addrof = true;
4145 else
4146 addrof = false;
4147 prev_base = base;
4148 base = get_addr_base_and_unit_offset (base, &base_offset);
4149 /* Aggregate arguments can have non-invariant addresses. */
4150 if (!base)
4152 base = build_fold_addr_expr (prev_base);
4153 off = build_int_cst (adj->alias_ptr_type,
4154 adj->offset / BITS_PER_UNIT);
4156 else if (TREE_CODE (base) == MEM_REF)
4158 if (!addrof)
4160 deref_base = true;
4161 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4163 off = build_int_cst (adj->alias_ptr_type,
4164 base_offset
4165 + adj->offset / BITS_PER_UNIT);
4166 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4167 off);
4168 base = TREE_OPERAND (base, 0);
4170 else
4172 off = build_int_cst (adj->alias_ptr_type,
4173 base_offset
4174 + adj->offset / BITS_PER_UNIT);
4175 base = build_fold_addr_expr (base);
4179 if (!adj->by_ref)
4181 tree type = adj->type;
4182 unsigned int align;
4183 unsigned HOST_WIDE_INT misalign;
4185 if (deref_base)
4187 align = deref_align;
4188 misalign = 0;
4190 else
4192 get_pointer_alignment_1 (base, &align, &misalign);
4193 if (TYPE_ALIGN (type) > align)
4194 align = TYPE_ALIGN (type);
4196 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4197 * BITS_PER_UNIT);
4198 misalign = misalign & (align - 1);
4199 if (misalign != 0)
4200 align = least_bit_hwi (misalign);
4201 if (align < TYPE_ALIGN (type))
4202 type = build_aligned_type (type, align);
4203 base = force_gimple_operand_gsi (&gsi, base,
4204 true, NULL, true, GSI_SAME_STMT);
4205 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4206 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4207 /* If expr is not a valid gimple call argument emit
4208 a load into a temporary. */
4209 if (is_gimple_reg_type (TREE_TYPE (expr)))
4211 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4212 if (gimple_in_ssa_p (cfun))
4214 gimple_set_vuse (tem, gimple_vuse (stmt));
4215 expr = make_ssa_name (TREE_TYPE (expr), tem);
4217 else
4218 expr = create_tmp_reg (TREE_TYPE (expr));
4219 gimple_assign_set_lhs (tem, expr);
4220 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4223 else
4225 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4226 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4227 expr = build_fold_addr_expr (expr);
4228 expr = force_gimple_operand_gsi (&gsi, expr,
4229 true, NULL, true, GSI_SAME_STMT);
4231 vargs.quick_push (expr);
4233 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4235 unsigned int ix;
4236 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4237 gimple *def_temp;
4239 arg = gimple_call_arg (stmt, adj->base_index);
4240 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4242 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4243 continue;
4244 arg = fold_convert_loc (gimple_location (stmt),
4245 TREE_TYPE (origin), arg);
4247 if (debug_args == NULL)
4248 debug_args = decl_debug_args_insert (callee_decl);
4249 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4250 if (ddecl == origin)
4252 ddecl = (**debug_args)[ix + 1];
4253 break;
4255 if (ddecl == NULL)
4257 ddecl = make_node (DEBUG_EXPR_DECL);
4258 DECL_ARTIFICIAL (ddecl) = 1;
4259 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4260 DECL_MODE (ddecl) = DECL_MODE (origin);
4262 vec_safe_push (*debug_args, origin);
4263 vec_safe_push (*debug_args, ddecl);
4265 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4266 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4270 if (dump_file && (dump_flags & TDF_DETAILS))
4272 fprintf (dump_file, "replacing stmt:");
4273 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4276 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4277 vargs.release ();
4278 if (gimple_call_lhs (stmt))
4279 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4281 gimple_set_block (new_stmt, gimple_block (stmt));
4282 if (gimple_has_location (stmt))
4283 gimple_set_location (new_stmt, gimple_location (stmt));
4284 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4285 gimple_call_copy_flags (new_stmt, stmt);
4286 if (gimple_in_ssa_p (cfun))
4288 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4289 if (gimple_vdef (stmt))
4291 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4292 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4296 if (dump_file && (dump_flags & TDF_DETAILS))
4298 fprintf (dump_file, "with stmt:");
4299 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4300 fprintf (dump_file, "\n");
4302 gsi_replace (&gsi, new_stmt, true);
4303 if (cs)
4304 cs->set_call_stmt (new_stmt);
4307 current_node->record_stmt_references (gsi_stmt (gsi));
4308 gsi_prev (&gsi);
4310 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4313 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4314 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4315 specifies whether the function should care about type incompatibility the
4316 current and new expressions. If it is false, the function will leave
4317 incompatibility issues to the caller. Return true iff the expression
4318 was modified. */
4320 bool
4321 ipa_modify_expr (tree *expr, bool convert,
4322 ipa_parm_adjustment_vec adjustments)
4324 struct ipa_parm_adjustment *cand
4325 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4326 if (!cand)
4327 return false;
4329 tree src;
4330 if (cand->by_ref)
4332 src = build_simple_mem_ref (cand->new_decl);
4333 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4335 else
4336 src = cand->new_decl;
4338 if (dump_file && (dump_flags & TDF_DETAILS))
4340 fprintf (dump_file, "About to replace expr ");
4341 print_generic_expr (dump_file, *expr, 0);
4342 fprintf (dump_file, " with ");
4343 print_generic_expr (dump_file, src, 0);
4344 fprintf (dump_file, "\n");
4347 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4349 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4350 *expr = vce;
4352 else
4353 *expr = src;
4354 return true;
4357 /* If T is an SSA_NAME, return NULL if it is not a default def or
4358 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4359 the base variable is always returned, regardless if it is a default
4360 def. Return T if it is not an SSA_NAME. */
4362 static tree
4363 get_ssa_base_param (tree t, bool ignore_default_def)
4365 if (TREE_CODE (t) == SSA_NAME)
4367 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4368 return SSA_NAME_VAR (t);
4369 else
4370 return NULL_TREE;
4372 return t;
4375 /* Given an expression, return an adjustment entry specifying the
4376 transformation to be done on EXPR. If no suitable adjustment entry
4377 was found, returns NULL.
4379 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4380 default def, otherwise bail on them.
4382 If CONVERT is non-NULL, this function will set *CONVERT if the
4383 expression provided is a component reference. ADJUSTMENTS is the
4384 adjustments vector. */
4386 ipa_parm_adjustment *
4387 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4388 ipa_parm_adjustment_vec adjustments,
4389 bool ignore_default_def)
4391 if (TREE_CODE (**expr) == BIT_FIELD_REF
4392 || TREE_CODE (**expr) == IMAGPART_EXPR
4393 || TREE_CODE (**expr) == REALPART_EXPR)
4395 *expr = &TREE_OPERAND (**expr, 0);
4396 if (convert)
4397 *convert = true;
4400 HOST_WIDE_INT offset, size, max_size;
4401 bool reverse;
4402 tree base
4403 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4404 if (!base || size == -1 || max_size == -1)
4405 return NULL;
4407 if (TREE_CODE (base) == MEM_REF)
4409 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4410 base = TREE_OPERAND (base, 0);
4413 base = get_ssa_base_param (base, ignore_default_def);
4414 if (!base || TREE_CODE (base) != PARM_DECL)
4415 return NULL;
4417 struct ipa_parm_adjustment *cand = NULL;
4418 unsigned int len = adjustments.length ();
4419 for (unsigned i = 0; i < len; i++)
4421 struct ipa_parm_adjustment *adj = &adjustments[i];
4423 if (adj->base == base
4424 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4426 cand = adj;
4427 break;
4431 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4432 return NULL;
4433 return cand;
4436 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4438 static bool
4439 index_in_adjustments_multiple_times_p (int base_index,
4440 ipa_parm_adjustment_vec adjustments)
4442 int i, len = adjustments.length ();
4443 bool one = false;
4445 for (i = 0; i < len; i++)
4447 struct ipa_parm_adjustment *adj;
4448 adj = &adjustments[i];
4450 if (adj->base_index == base_index)
4452 if (one)
4453 return true;
4454 else
4455 one = true;
4458 return false;
4462 /* Return adjustments that should have the same effect on function parameters
4463 and call arguments as if they were first changed according to adjustments in
4464 INNER and then by adjustments in OUTER. */
4466 ipa_parm_adjustment_vec
4467 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4468 ipa_parm_adjustment_vec outer)
4470 int i, outlen = outer.length ();
4471 int inlen = inner.length ();
4472 int removals = 0;
4473 ipa_parm_adjustment_vec adjustments, tmp;
4475 tmp.create (inlen);
4476 for (i = 0; i < inlen; i++)
4478 struct ipa_parm_adjustment *n;
4479 n = &inner[i];
4481 if (n->op == IPA_PARM_OP_REMOVE)
4482 removals++;
4483 else
4485 /* FIXME: Handling of new arguments are not implemented yet. */
4486 gcc_assert (n->op != IPA_PARM_OP_NEW);
4487 tmp.quick_push (*n);
4491 adjustments.create (outlen + removals);
4492 for (i = 0; i < outlen; i++)
4494 struct ipa_parm_adjustment r;
4495 struct ipa_parm_adjustment *out = &outer[i];
4496 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4498 memset (&r, 0, sizeof (r));
4499 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4500 if (out->op == IPA_PARM_OP_REMOVE)
4502 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4504 r.op = IPA_PARM_OP_REMOVE;
4505 adjustments.quick_push (r);
4507 continue;
4509 else
4511 /* FIXME: Handling of new arguments are not implemented yet. */
4512 gcc_assert (out->op != IPA_PARM_OP_NEW);
4515 r.base_index = in->base_index;
4516 r.type = out->type;
4518 /* FIXME: Create nonlocal value too. */
4520 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4521 r.op = IPA_PARM_OP_COPY;
4522 else if (in->op == IPA_PARM_OP_COPY)
4523 r.offset = out->offset;
4524 else if (out->op == IPA_PARM_OP_COPY)
4525 r.offset = in->offset;
4526 else
4527 r.offset = in->offset + out->offset;
4528 adjustments.quick_push (r);
4531 for (i = 0; i < inlen; i++)
4533 struct ipa_parm_adjustment *n = &inner[i];
4535 if (n->op == IPA_PARM_OP_REMOVE)
4536 adjustments.quick_push (*n);
4539 tmp.release ();
4540 return adjustments;
4543 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4544 friendly way, assuming they are meant to be applied to FNDECL. */
4546 void
4547 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4548 tree fndecl)
4550 int i, len = adjustments.length ();
4551 bool first = true;
4552 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4554 fprintf (file, "IPA param adjustments: ");
4555 for (i = 0; i < len; i++)
4557 struct ipa_parm_adjustment *adj;
4558 adj = &adjustments[i];
4560 if (!first)
4561 fprintf (file, " ");
4562 else
4563 first = false;
4565 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4566 print_generic_expr (file, parms[adj->base_index], 0);
4567 if (adj->base)
4569 fprintf (file, ", base: ");
4570 print_generic_expr (file, adj->base, 0);
4572 if (adj->new_decl)
4574 fprintf (file, ", new_decl: ");
4575 print_generic_expr (file, adj->new_decl, 0);
4577 if (adj->new_ssa_base)
4579 fprintf (file, ", new_ssa_base: ");
4580 print_generic_expr (file, adj->new_ssa_base, 0);
4583 if (adj->op == IPA_PARM_OP_COPY)
4584 fprintf (file, ", copy_param");
4585 else if (adj->op == IPA_PARM_OP_REMOVE)
4586 fprintf (file, ", remove_param");
4587 else
4588 fprintf (file, ", offset %li", (long) adj->offset);
4589 if (adj->by_ref)
4590 fprintf (file, ", by_ref");
4591 print_node_brief (file, ", type: ", adj->type, 0);
4592 fprintf (file, "\n");
4594 parms.release ();
4597 /* Dump the AV linked list. */
4599 void
4600 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4602 bool comma = false;
4603 fprintf (f, " Aggregate replacements:");
4604 for (; av; av = av->next)
4606 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4607 av->index, av->offset);
4608 print_generic_expr (f, av->value, 0);
4609 comma = true;
4611 fprintf (f, "\n");
4614 /* Stream out jump function JUMP_FUNC to OB. */
4616 static void
4617 ipa_write_jump_function (struct output_block *ob,
4618 struct ipa_jump_func *jump_func)
4620 struct ipa_agg_jf_item *item;
4621 struct bitpack_d bp;
4622 int i, count;
4624 streamer_write_uhwi (ob, jump_func->type);
4625 switch (jump_func->type)
4627 case IPA_JF_UNKNOWN:
4628 break;
4629 case IPA_JF_CONST:
4630 gcc_assert (
4631 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4632 stream_write_tree (ob, jump_func->value.constant.value, true);
4633 break;
4634 case IPA_JF_PASS_THROUGH:
4635 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4636 if (jump_func->value.pass_through.operation == NOP_EXPR)
4638 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4639 bp = bitpack_create (ob->main_stream);
4640 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4641 streamer_write_bitpack (&bp);
4643 else
4645 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4646 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4648 break;
4649 case IPA_JF_ANCESTOR:
4650 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4651 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4652 bp = bitpack_create (ob->main_stream);
4653 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4654 streamer_write_bitpack (&bp);
4655 break;
4658 count = vec_safe_length (jump_func->agg.items);
4659 streamer_write_uhwi (ob, count);
4660 if (count)
4662 bp = bitpack_create (ob->main_stream);
4663 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4664 streamer_write_bitpack (&bp);
4667 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4669 streamer_write_uhwi (ob, item->offset);
4670 stream_write_tree (ob, item->value, true);
4673 bp = bitpack_create (ob->main_stream);
4674 bp_pack_value (&bp, jump_func->bits.known, 1);
4675 streamer_write_bitpack (&bp);
4676 if (jump_func->bits.known)
4678 streamer_write_widest_int (ob, jump_func->bits.value);
4679 streamer_write_widest_int (ob, jump_func->bits.mask);
4681 bp_pack_value (&bp, jump_func->vr_known, 1);
4682 streamer_write_bitpack (&bp);
4683 if (jump_func->vr_known)
4685 streamer_write_enum (ob->main_stream, value_rang_type,
4686 VR_LAST, jump_func->m_vr.type);
4687 stream_write_tree (ob, jump_func->m_vr.min, true);
4688 stream_write_tree (ob, jump_func->m_vr.max, true);
4692 /* Read in jump function JUMP_FUNC from IB. */
4694 static void
4695 ipa_read_jump_function (struct lto_input_block *ib,
4696 struct ipa_jump_func *jump_func,
4697 struct cgraph_edge *cs,
4698 struct data_in *data_in)
4700 enum jump_func_type jftype;
4701 enum tree_code operation;
4702 int i, count;
4704 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4705 switch (jftype)
4707 case IPA_JF_UNKNOWN:
4708 ipa_set_jf_unknown (jump_func);
4709 break;
4710 case IPA_JF_CONST:
4711 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4712 break;
4713 case IPA_JF_PASS_THROUGH:
4714 operation = (enum tree_code) streamer_read_uhwi (ib);
4715 if (operation == NOP_EXPR)
4717 int formal_id = streamer_read_uhwi (ib);
4718 struct bitpack_d bp = streamer_read_bitpack (ib);
4719 bool agg_preserved = bp_unpack_value (&bp, 1);
4720 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4722 else
4724 tree operand = stream_read_tree (ib, data_in);
4725 int formal_id = streamer_read_uhwi (ib);
4726 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4727 operation);
4729 break;
4730 case IPA_JF_ANCESTOR:
4732 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4733 int formal_id = streamer_read_uhwi (ib);
4734 struct bitpack_d bp = streamer_read_bitpack (ib);
4735 bool agg_preserved = bp_unpack_value (&bp, 1);
4736 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4737 break;
4741 count = streamer_read_uhwi (ib);
4742 vec_alloc (jump_func->agg.items, count);
4743 if (count)
4745 struct bitpack_d bp = streamer_read_bitpack (ib);
4746 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4748 for (i = 0; i < count; i++)
4750 struct ipa_agg_jf_item item;
4751 item.offset = streamer_read_uhwi (ib);
4752 item.value = stream_read_tree (ib, data_in);
4753 jump_func->agg.items->quick_push (item);
4756 struct bitpack_d bp = streamer_read_bitpack (ib);
4757 bool bits_known = bp_unpack_value (&bp, 1);
4758 if (bits_known)
4760 jump_func->bits.known = true;
4761 jump_func->bits.value = streamer_read_widest_int (ib);
4762 jump_func->bits.mask = streamer_read_widest_int (ib);
4764 else
4765 jump_func->bits.known = false;
4767 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4768 bool vr_known = bp_unpack_value (&vr_bp, 1);
4769 if (vr_known)
4771 jump_func->vr_known = true;
4772 jump_func->m_vr.type = streamer_read_enum (ib,
4773 value_range_type,
4774 VR_LAST);
4775 jump_func->m_vr.min = stream_read_tree (ib, data_in);
4776 jump_func->m_vr.max = stream_read_tree (ib, data_in);
4778 else
4779 jump_func->vr_known = false;
4782 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4783 relevant to indirect inlining to OB. */
4785 static void
4786 ipa_write_indirect_edge_info (struct output_block *ob,
4787 struct cgraph_edge *cs)
4789 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4790 struct bitpack_d bp;
4792 streamer_write_hwi (ob, ii->param_index);
4793 bp = bitpack_create (ob->main_stream);
4794 bp_pack_value (&bp, ii->polymorphic, 1);
4795 bp_pack_value (&bp, ii->agg_contents, 1);
4796 bp_pack_value (&bp, ii->member_ptr, 1);
4797 bp_pack_value (&bp, ii->by_ref, 1);
4798 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4799 bp_pack_value (&bp, ii->vptr_changed, 1);
4800 streamer_write_bitpack (&bp);
4801 if (ii->agg_contents || ii->polymorphic)
4802 streamer_write_hwi (ob, ii->offset);
4803 else
4804 gcc_assert (ii->offset == 0);
4806 if (ii->polymorphic)
4808 streamer_write_hwi (ob, ii->otr_token);
4809 stream_write_tree (ob, ii->otr_type, true);
4810 ii->context.stream_out (ob);
4814 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4815 relevant to indirect inlining from IB. */
4817 static void
4818 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4819 struct data_in *data_in,
4820 struct cgraph_edge *cs)
4822 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4823 struct bitpack_d bp;
4825 ii->param_index = (int) streamer_read_hwi (ib);
4826 bp = streamer_read_bitpack (ib);
4827 ii->polymorphic = bp_unpack_value (&bp, 1);
4828 ii->agg_contents = bp_unpack_value (&bp, 1);
4829 ii->member_ptr = bp_unpack_value (&bp, 1);
4830 ii->by_ref = bp_unpack_value (&bp, 1);
4831 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4832 ii->vptr_changed = bp_unpack_value (&bp, 1);
4833 if (ii->agg_contents || ii->polymorphic)
4834 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4835 else
4836 ii->offset = 0;
4837 if (ii->polymorphic)
4839 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4840 ii->otr_type = stream_read_tree (ib, data_in);
4841 ii->context.stream_in (ib, data_in);
4845 /* Stream out NODE info to OB. */
4847 static void
4848 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4850 int node_ref;
4851 lto_symtab_encoder_t encoder;
4852 struct ipa_node_params *info = IPA_NODE_REF (node);
4853 int j;
4854 struct cgraph_edge *e;
4855 struct bitpack_d bp;
4857 encoder = ob->decl_state->symtab_node_encoder;
4858 node_ref = lto_symtab_encoder_encode (encoder, node);
4859 streamer_write_uhwi (ob, node_ref);
4861 streamer_write_uhwi (ob, ipa_get_param_count (info));
4862 for (j = 0; j < ipa_get_param_count (info); j++)
4863 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4864 bp = bitpack_create (ob->main_stream);
4865 gcc_assert (info->analysis_done
4866 || ipa_get_param_count (info) == 0);
4867 gcc_assert (!info->node_enqueued);
4868 gcc_assert (!info->ipcp_orig_node);
4869 for (j = 0; j < ipa_get_param_count (info); j++)
4870 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4871 streamer_write_bitpack (&bp);
4872 for (j = 0; j < ipa_get_param_count (info); j++)
4873 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4874 for (e = node->callees; e; e = e->next_callee)
4876 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4878 streamer_write_uhwi (ob,
4879 ipa_get_cs_argument_count (args) * 2
4880 + (args->polymorphic_call_contexts != NULL));
4881 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4883 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4884 if (args->polymorphic_call_contexts != NULL)
4885 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4888 for (e = node->indirect_calls; e; e = e->next_callee)
4890 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4892 streamer_write_uhwi (ob,
4893 ipa_get_cs_argument_count (args) * 2
4894 + (args->polymorphic_call_contexts != NULL));
4895 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4897 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4898 if (args->polymorphic_call_contexts != NULL)
4899 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4901 ipa_write_indirect_edge_info (ob, e);
4905 /* Stream in NODE info from IB. */
4907 static void
4908 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4909 struct data_in *data_in)
4911 struct ipa_node_params *info = IPA_NODE_REF (node);
4912 int k;
4913 struct cgraph_edge *e;
4914 struct bitpack_d bp;
4916 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4918 for (k = 0; k < ipa_get_param_count (info); k++)
4919 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4921 bp = streamer_read_bitpack (ib);
4922 if (ipa_get_param_count (info) != 0)
4923 info->analysis_done = true;
4924 info->node_enqueued = false;
4925 for (k = 0; k < ipa_get_param_count (info); k++)
4926 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4927 for (k = 0; k < ipa_get_param_count (info); k++)
4928 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4929 for (e = node->callees; e; e = e->next_callee)
4931 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4932 int count = streamer_read_uhwi (ib);
4933 bool contexts_computed = count & 1;
4934 count /= 2;
4936 if (!count)
4937 continue;
4938 vec_safe_grow_cleared (args->jump_functions, count);
4939 if (contexts_computed)
4940 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4942 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4944 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4945 data_in);
4946 if (contexts_computed)
4947 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4950 for (e = node->indirect_calls; e; e = e->next_callee)
4952 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4953 int count = streamer_read_uhwi (ib);
4954 bool contexts_computed = count & 1;
4955 count /= 2;
4957 if (count)
4959 vec_safe_grow_cleared (args->jump_functions, count);
4960 if (contexts_computed)
4961 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4962 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4964 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4965 data_in);
4966 if (contexts_computed)
4967 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4970 ipa_read_indirect_edge_info (ib, data_in, e);
4974 /* Write jump functions for nodes in SET. */
4976 void
4977 ipa_prop_write_jump_functions (void)
4979 struct cgraph_node *node;
4980 struct output_block *ob;
4981 unsigned int count = 0;
4982 lto_symtab_encoder_iterator lsei;
4983 lto_symtab_encoder_t encoder;
4985 if (!ipa_node_params_sum)
4986 return;
4988 ob = create_output_block (LTO_section_jump_functions);
4989 encoder = ob->decl_state->symtab_node_encoder;
4990 ob->symbol = NULL;
4991 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4992 lsei_next_function_in_partition (&lsei))
4994 node = lsei_cgraph_node (lsei);
4995 if (node->has_gimple_body_p ()
4996 && IPA_NODE_REF (node) != NULL)
4997 count++;
5000 streamer_write_uhwi (ob, count);
5002 /* Process all of the functions. */
5003 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5004 lsei_next_function_in_partition (&lsei))
5006 node = lsei_cgraph_node (lsei);
5007 if (node->has_gimple_body_p ()
5008 && IPA_NODE_REF (node) != NULL)
5009 ipa_write_node_info (ob, node);
5011 streamer_write_char_stream (ob->main_stream, 0);
5012 produce_asm (ob, NULL);
5013 destroy_output_block (ob);
5016 /* Read section in file FILE_DATA of length LEN with data DATA. */
5018 static void
5019 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5020 size_t len)
5022 const struct lto_function_header *header =
5023 (const struct lto_function_header *) data;
5024 const int cfg_offset = sizeof (struct lto_function_header);
5025 const int main_offset = cfg_offset + header->cfg_size;
5026 const int string_offset = main_offset + header->main_size;
5027 struct data_in *data_in;
5028 unsigned int i;
5029 unsigned int count;
5031 lto_input_block ib_main ((const char *) data + main_offset,
5032 header->main_size, file_data->mode_table);
5034 data_in =
5035 lto_data_in_create (file_data, (const char *) data + string_offset,
5036 header->string_size, vNULL);
5037 count = streamer_read_uhwi (&ib_main);
5039 for (i = 0; i < count; i++)
5041 unsigned int index;
5042 struct cgraph_node *node;
5043 lto_symtab_encoder_t encoder;
5045 index = streamer_read_uhwi (&ib_main);
5046 encoder = file_data->symtab_node_encoder;
5047 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5048 index));
5049 gcc_assert (node->definition);
5050 ipa_read_node_info (&ib_main, node, data_in);
5052 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5053 len);
5054 lto_data_in_delete (data_in);
5057 /* Read ipcp jump functions. */
5059 void
5060 ipa_prop_read_jump_functions (void)
5062 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5063 struct lto_file_decl_data *file_data;
5064 unsigned int j = 0;
5066 ipa_check_create_node_params ();
5067 ipa_check_create_edge_args ();
5068 ipa_register_cgraph_hooks ();
5070 while ((file_data = file_data_vec[j++]))
5072 size_t len;
5073 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5075 if (data)
5076 ipa_prop_read_section (file_data, data, len);
5080 /* After merging units, we can get mismatch in argument counts.
5081 Also decl merging might've rendered parameter lists obsolete.
5082 Also compute called_with_variable_arg info. */
5084 void
5085 ipa_update_after_lto_read (void)
5087 ipa_check_create_node_params ();
5088 ipa_check_create_edge_args ();
5091 void
5092 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5094 int node_ref;
5095 unsigned int count = 0;
5096 lto_symtab_encoder_t encoder;
5097 struct ipa_agg_replacement_value *aggvals, *av;
5099 aggvals = ipa_get_agg_replacements_for_node (node);
5100 encoder = ob->decl_state->symtab_node_encoder;
5101 node_ref = lto_symtab_encoder_encode (encoder, node);
5102 streamer_write_uhwi (ob, node_ref);
5104 for (av = aggvals; av; av = av->next)
5105 count++;
5106 streamer_write_uhwi (ob, count);
5108 for (av = aggvals; av; av = av->next)
5110 struct bitpack_d bp;
5112 streamer_write_uhwi (ob, av->offset);
5113 streamer_write_uhwi (ob, av->index);
5114 stream_write_tree (ob, av->value, true);
5116 bp = bitpack_create (ob->main_stream);
5117 bp_pack_value (&bp, av->by_ref, 1);
5118 streamer_write_bitpack (&bp);
5121 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5122 if (ts && vec_safe_length (ts->m_vr) > 0)
5124 count = ts->m_vr->length ();
5125 streamer_write_uhwi (ob, count);
5126 for (unsigned i = 0; i < count; ++i)
5128 struct bitpack_d bp;
5129 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5130 bp = bitpack_create (ob->main_stream);
5131 bp_pack_value (&bp, parm_vr->known, 1);
5132 streamer_write_bitpack (&bp);
5133 if (parm_vr->known)
5135 streamer_write_enum (ob->main_stream, value_rang_type,
5136 VR_LAST, parm_vr->type);
5137 streamer_write_wide_int (ob, parm_vr->min);
5138 streamer_write_wide_int (ob, parm_vr->max);
5142 else
5143 streamer_write_uhwi (ob, 0);
5145 if (ts && vec_safe_length (ts->bits) > 0)
5147 count = ts->bits->length ();
5148 streamer_write_uhwi (ob, count);
5150 for (unsigned i = 0; i < count; ++i)
5152 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5153 struct bitpack_d bp = bitpack_create (ob->main_stream);
5154 bp_pack_value (&bp, bits_jfunc.known, 1);
5155 streamer_write_bitpack (&bp);
5156 if (bits_jfunc.known)
5158 streamer_write_widest_int (ob, bits_jfunc.value);
5159 streamer_write_widest_int (ob, bits_jfunc.mask);
5163 else
5164 streamer_write_uhwi (ob, 0);
5167 /* Stream in the aggregate value replacement chain for NODE from IB. */
5169 static void
5170 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5171 data_in *data_in)
5173 struct ipa_agg_replacement_value *aggvals = NULL;
5174 unsigned int count, i;
5176 count = streamer_read_uhwi (ib);
5177 for (i = 0; i <count; i++)
5179 struct ipa_agg_replacement_value *av;
5180 struct bitpack_d bp;
5182 av = ggc_alloc<ipa_agg_replacement_value> ();
5183 av->offset = streamer_read_uhwi (ib);
5184 av->index = streamer_read_uhwi (ib);
5185 av->value = stream_read_tree (ib, data_in);
5186 bp = streamer_read_bitpack (ib);
5187 av->by_ref = bp_unpack_value (&bp, 1);
5188 av->next = aggvals;
5189 aggvals = av;
5191 ipa_set_node_agg_value_chain (node, aggvals);
5193 count = streamer_read_uhwi (ib);
5194 if (count > 0)
5196 ipcp_grow_transformations_if_necessary ();
5198 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5199 vec_safe_grow_cleared (ts->m_vr, count);
5200 for (i = 0; i < count; i++)
5202 ipa_vr *parm_vr;
5203 parm_vr = &(*ts->m_vr)[i];
5204 struct bitpack_d bp;
5205 bp = streamer_read_bitpack (ib);
5206 parm_vr->known = bp_unpack_value (&bp, 1);
5207 if (parm_vr->known)
5209 parm_vr->type = streamer_read_enum (ib, value_range_type,
5210 VR_LAST);
5211 parm_vr->min = streamer_read_wide_int (ib);
5212 parm_vr->max = streamer_read_wide_int (ib);
5216 count = streamer_read_uhwi (ib);
5217 if (count > 0)
5219 ipcp_grow_transformations_if_necessary ();
5221 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5222 vec_safe_grow_cleared (ts->bits, count);
5224 for (i = 0; i < count; i++)
5226 ipa_bits& bits_jfunc = (*ts->bits)[i];
5227 struct bitpack_d bp = streamer_read_bitpack (ib);
5228 bits_jfunc.known = bp_unpack_value (&bp, 1);
5229 if (bits_jfunc.known)
5231 bits_jfunc.value = streamer_read_widest_int (ib);
5232 bits_jfunc.mask = streamer_read_widest_int (ib);
5238 /* Write all aggregate replacement for nodes in set. */
5240 void
5241 ipcp_write_transformation_summaries (void)
5243 struct cgraph_node *node;
5244 struct output_block *ob;
5245 unsigned int count = 0;
5246 lto_symtab_encoder_iterator lsei;
5247 lto_symtab_encoder_t encoder;
5249 ob = create_output_block (LTO_section_ipcp_transform);
5250 encoder = ob->decl_state->symtab_node_encoder;
5251 ob->symbol = NULL;
5252 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5253 lsei_next_function_in_partition (&lsei))
5255 node = lsei_cgraph_node (lsei);
5256 if (node->has_gimple_body_p ())
5257 count++;
5260 streamer_write_uhwi (ob, count);
5262 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5263 lsei_next_function_in_partition (&lsei))
5265 node = lsei_cgraph_node (lsei);
5266 if (node->has_gimple_body_p ())
5267 write_ipcp_transformation_info (ob, node);
5269 streamer_write_char_stream (ob->main_stream, 0);
5270 produce_asm (ob, NULL);
5271 destroy_output_block (ob);
5274 /* Read replacements section in file FILE_DATA of length LEN with data
5275 DATA. */
5277 static void
5278 read_replacements_section (struct lto_file_decl_data *file_data,
5279 const char *data,
5280 size_t len)
5282 const struct lto_function_header *header =
5283 (const struct lto_function_header *) data;
5284 const int cfg_offset = sizeof (struct lto_function_header);
5285 const int main_offset = cfg_offset + header->cfg_size;
5286 const int string_offset = main_offset + header->main_size;
5287 struct data_in *data_in;
5288 unsigned int i;
5289 unsigned int count;
5291 lto_input_block ib_main ((const char *) data + main_offset,
5292 header->main_size, file_data->mode_table);
5294 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5295 header->string_size, vNULL);
5296 count = streamer_read_uhwi (&ib_main);
5298 for (i = 0; i < count; i++)
5300 unsigned int index;
5301 struct cgraph_node *node;
5302 lto_symtab_encoder_t encoder;
5304 index = streamer_read_uhwi (&ib_main);
5305 encoder = file_data->symtab_node_encoder;
5306 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5307 index));
5308 gcc_assert (node->definition);
5309 read_ipcp_transformation_info (&ib_main, node, data_in);
5311 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5312 len);
5313 lto_data_in_delete (data_in);
5316 /* Read IPA-CP aggregate replacements. */
5318 void
5319 ipcp_read_transformation_summaries (void)
5321 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5322 struct lto_file_decl_data *file_data;
5323 unsigned int j = 0;
5325 while ((file_data = file_data_vec[j++]))
5327 size_t len;
5328 const char *data = lto_get_section_data (file_data,
5329 LTO_section_ipcp_transform,
5330 NULL, &len);
5331 if (data)
5332 read_replacements_section (file_data, data, len);
5336 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5337 NODE. */
5339 static void
5340 adjust_agg_replacement_values (struct cgraph_node *node,
5341 struct ipa_agg_replacement_value *aggval)
5343 struct ipa_agg_replacement_value *v;
5344 int i, c = 0, d = 0, *adj;
5346 if (!node->clone.combined_args_to_skip)
5347 return;
5349 for (v = aggval; v; v = v->next)
5351 gcc_assert (v->index >= 0);
5352 if (c < v->index)
5353 c = v->index;
5355 c++;
5357 adj = XALLOCAVEC (int, c);
5358 for (i = 0; i < c; i++)
5359 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5361 adj[i] = -1;
5362 d++;
5364 else
5365 adj[i] = i - d;
5367 for (v = aggval; v; v = v->next)
5368 v->index = adj[v->index];
5371 /* Dominator walker driving the ipcp modification phase. */
5373 class ipcp_modif_dom_walker : public dom_walker
5375 public:
5376 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5377 vec<ipa_param_descriptor> descs,
5378 struct ipa_agg_replacement_value *av,
5379 bool *sc, bool *cc)
5380 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5381 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5383 virtual edge before_dom_children (basic_block);
5385 private:
5386 struct ipa_func_body_info *m_fbi;
5387 vec<ipa_param_descriptor> m_descriptors;
5388 struct ipa_agg_replacement_value *m_aggval;
5389 bool *m_something_changed, *m_cfg_changed;
5392 edge
5393 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5395 gimple_stmt_iterator gsi;
5396 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5398 struct ipa_agg_replacement_value *v;
5399 gimple *stmt = gsi_stmt (gsi);
5400 tree rhs, val, t;
5401 HOST_WIDE_INT offset, size;
5402 int index;
5403 bool by_ref, vce;
5405 if (!gimple_assign_load_p (stmt))
5406 continue;
5407 rhs = gimple_assign_rhs1 (stmt);
5408 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5409 continue;
5411 vce = false;
5412 t = rhs;
5413 while (handled_component_p (t))
5415 /* V_C_E can do things like convert an array of integers to one
5416 bigger integer and similar things we do not handle below. */
5417 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5419 vce = true;
5420 break;
5422 t = TREE_OPERAND (t, 0);
5424 if (vce)
5425 continue;
5427 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5428 &offset, &size, &by_ref))
5429 continue;
5430 for (v = m_aggval; v; v = v->next)
5431 if (v->index == index
5432 && v->offset == offset)
5433 break;
5434 if (!v
5435 || v->by_ref != by_ref
5436 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5437 continue;
5439 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5440 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5442 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5443 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5444 else if (TYPE_SIZE (TREE_TYPE (rhs))
5445 == TYPE_SIZE (TREE_TYPE (v->value)))
5446 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5447 else
5449 if (dump_file)
5451 fprintf (dump_file, " const ");
5452 print_generic_expr (dump_file, v->value, 0);
5453 fprintf (dump_file, " can't be converted to type of ");
5454 print_generic_expr (dump_file, rhs, 0);
5455 fprintf (dump_file, "\n");
5457 continue;
5460 else
5461 val = v->value;
5463 if (dump_file && (dump_flags & TDF_DETAILS))
5465 fprintf (dump_file, "Modifying stmt:\n ");
5466 print_gimple_stmt (dump_file, stmt, 0, 0);
5468 gimple_assign_set_rhs_from_tree (&gsi, val);
5469 update_stmt (stmt);
5471 if (dump_file && (dump_flags & TDF_DETAILS))
5473 fprintf (dump_file, "into:\n ");
5474 print_gimple_stmt (dump_file, stmt, 0, 0);
5475 fprintf (dump_file, "\n");
5478 *m_something_changed = true;
5479 if (maybe_clean_eh_stmt (stmt)
5480 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5481 *m_cfg_changed = true;
5483 return NULL;
5486 /* Update bits info of formal parameters as described in
5487 ipcp_transformation_summary. */
5489 static void
5490 ipcp_update_bits (struct cgraph_node *node)
5492 tree parm = DECL_ARGUMENTS (node->decl);
5493 tree next_parm = parm;
5494 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5496 if (!ts || vec_safe_length (ts->bits) == 0)
5497 return;
5499 vec<ipa_bits, va_gc> &bits = *ts->bits;
5500 unsigned count = bits.length ();
5502 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5504 if (node->clone.combined_args_to_skip
5505 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5506 continue;
5508 gcc_checking_assert (parm);
5509 next_parm = DECL_CHAIN (parm);
5511 if (!bits[i].known
5512 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm)) || POINTER_TYPE_P (TREE_TYPE (parm)))
5513 || !is_gimple_reg (parm))
5514 continue;
5516 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5517 if (!ddef)
5518 continue;
5520 if (dump_file)
5522 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5523 print_hex (bits[i].mask, dump_file);
5524 fprintf (dump_file, "\n");
5527 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5529 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5530 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5532 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5533 | wide_int::from (bits[i].value, prec, sgn);
5534 set_nonzero_bits (ddef, nonzero_bits);
5536 else
5538 unsigned tem = bits[i].mask.to_uhwi ();
5539 unsigned HOST_WIDE_INT bitpos = bits[i].value.to_uhwi ();
5540 unsigned align = tem & -tem;
5541 unsigned misalign = bitpos & (align - 1);
5543 if (align > 1)
5545 if (dump_file)
5546 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5548 unsigned old_align, old_misalign;
5549 struct ptr_info_def *pi = get_ptr_info (ddef);
5550 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5552 if (old_known
5553 && old_align > align)
5555 if (dump_file)
5557 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5558 if ((old_misalign & (align - 1)) != misalign)
5559 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5560 old_misalign, misalign);
5562 continue;
5565 if (old_known
5566 && ((misalign & (old_align - 1)) != old_misalign)
5567 && dump_file)
5568 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5569 old_misalign, misalign);
5571 set_ptr_info_alignment (pi, align, misalign);
5577 /* Update value range of formal parameters as described in
5578 ipcp_transformation_summary. */
5580 static void
5581 ipcp_update_vr (struct cgraph_node *node)
5583 tree fndecl = node->decl;
5584 tree parm = DECL_ARGUMENTS (fndecl);
5585 tree next_parm = parm;
5586 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5587 if (!ts || vec_safe_length (ts->m_vr) == 0)
5588 return;
5589 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5590 unsigned count = vr.length ();
5592 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5594 if (node->clone.combined_args_to_skip
5595 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5596 continue;
5597 gcc_checking_assert (parm);
5598 next_parm = DECL_CHAIN (parm);
5599 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5601 if (!ddef || !is_gimple_reg (parm))
5602 continue;
5604 if (vr[i].known
5605 && INTEGRAL_TYPE_P (TREE_TYPE (ddef))
5606 && !POINTER_TYPE_P (TREE_TYPE (ddef))
5607 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5609 tree type = TREE_TYPE (ddef);
5610 unsigned prec = TYPE_PRECISION (type);
5611 if (dump_file)
5613 fprintf (dump_file, "Setting value range of param %u ", i);
5614 fprintf (dump_file, "%s[",
5615 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5616 print_decs (vr[i].min, dump_file);
5617 fprintf (dump_file, ", ");
5618 print_decs (vr[i].max, dump_file);
5619 fprintf (dump_file, "]\n");
5621 set_range_info (ddef, vr[i].type,
5622 wide_int_storage::from (vr[i].min, prec,
5623 TYPE_SIGN (type)),
5624 wide_int_storage::from (vr[i].max, prec,
5625 TYPE_SIGN (type)));
5630 /* IPCP transformation phase doing propagation of aggregate values. */
5632 unsigned int
5633 ipcp_transform_function (struct cgraph_node *node)
5635 vec<ipa_param_descriptor> descriptors = vNULL;
5636 struct ipa_func_body_info fbi;
5637 struct ipa_agg_replacement_value *aggval;
5638 int param_count;
5639 bool cfg_changed = false, something_changed = false;
5641 gcc_checking_assert (cfun);
5642 gcc_checking_assert (current_function_decl);
5644 if (dump_file)
5645 fprintf (dump_file, "Modification phase of node %s/%i\n",
5646 node->name (), node->order);
5648 ipcp_update_bits (node);
5649 ipcp_update_vr (node);
5650 aggval = ipa_get_agg_replacements_for_node (node);
5651 if (!aggval)
5652 return 0;
5653 param_count = count_formal_params (node->decl);
5654 if (param_count == 0)
5655 return 0;
5656 adjust_agg_replacement_values (node, aggval);
5657 if (dump_file)
5658 ipa_dump_agg_replacement_values (dump_file, aggval);
5660 fbi.node = node;
5661 fbi.info = NULL;
5662 fbi.bb_infos = vNULL;
5663 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5664 fbi.param_count = param_count;
5665 fbi.aa_walked = 0;
5667 descriptors.safe_grow_cleared (param_count);
5668 ipa_populate_param_decls (node, descriptors);
5669 calculate_dominance_info (CDI_DOMINATORS);
5670 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5671 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5673 int i;
5674 struct ipa_bb_info *bi;
5675 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5676 free_ipa_bb_info (bi);
5677 fbi.bb_infos.release ();
5678 free_dominance_info (CDI_DOMINATORS);
5679 (*ipcp_transformations)[node->uid].agg_values = NULL;
5680 descriptors.release ();
5682 if (!something_changed)
5683 return 0;
5684 else if (cfg_changed)
5685 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5686 else
5687 return TODO_update_ssa_only_virtuals;