* gcc.dg/guality/pr41447-1.c: Remove xfail.
[official-gcc.git] / gcc / ipa-prop.c
blob9c8a785ceebd573a60252fc86a8f6d21f01af51f
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hash-set.h"
24 #include "machmode.h"
25 #include "vec.h"
26 #include "double-int.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "options.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "predict.h"
36 #include "tm.h"
37 #include "hard-reg-set.h"
38 #include "function.h"
39 #include "dominance.h"
40 #include "cfg.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "hashtab.h"
50 #include "rtl.h"
51 #include "flags.h"
52 #include "statistics.h"
53 #include "real.h"
54 #include "fixed-value.h"
55 #include "insn-config.h"
56 #include "expmed.h"
57 #include "dojump.h"
58 #include "explow.h"
59 #include "calls.h"
60 #include "emit-rtl.h"
61 #include "varasm.h"
62 #include "stmt.h"
63 #include "expr.h"
64 #include "stor-layout.h"
65 #include "print-tree.h"
66 #include "gimplify.h"
67 #include "gimple-iterator.h"
68 #include "gimplify-me.h"
69 #include "gimple-walk.h"
70 #include "langhooks.h"
71 #include "target.h"
72 #include "hash-map.h"
73 #include "plugin-api.h"
74 #include "ipa-ref.h"
75 #include "cgraph.h"
76 #include "alloc-pool.h"
77 #include "symbol-summary.h"
78 #include "ipa-prop.h"
79 #include "bitmap.h"
80 #include "gimple-ssa.h"
81 #include "tree-cfg.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-into-ssa.h"
85 #include "tree-dfa.h"
86 #include "tree-pass.h"
87 #include "tree-inline.h"
88 #include "ipa-inline.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "lto-streamer.h"
92 #include "data-streamer.h"
93 #include "tree-streamer.h"
94 #include "params.h"
95 #include "ipa-utils.h"
96 #include "stringpool.h"
97 #include "tree-ssanames.h"
98 #include "dbgcnt.h"
99 #include "domwalk.h"
100 #include "builtins.h"
102 /* Intermediate information that we get from alias analysis about a particular
103 parameter in a particular basic_block. When a parameter or the memory it
104 references is marked modified, we use that information in all dominatd
105 blocks without cosulting alias analysis oracle. */
107 struct param_aa_status
109 /* Set when this structure contains meaningful information. If not, the
110 structure describing a dominating BB should be used instead. */
111 bool valid;
113 /* Whether we have seen something which might have modified the data in
114 question. PARM is for the parameter itself, REF is for data it points to
115 but using the alias type of individual accesses and PT is the same thing
116 but for computing aggregate pass-through functions using a very inclusive
117 ao_ref. */
118 bool parm_modified, ref_modified, pt_modified;
121 /* Information related to a given BB that used only when looking at function
122 body. */
124 struct ipa_bb_info
126 /* Call graph edges going out of this BB. */
127 vec<cgraph_edge *> cg_edges;
128 /* Alias analysis statuses of each formal parameter at this bb. */
129 vec<param_aa_status> param_aa_statuses;
132 /* Structure with global information that is only used when looking at function
133 body. */
135 struct func_body_info
137 /* The node that is being analyzed. */
138 cgraph_node *node;
140 /* Its info. */
141 struct ipa_node_params *info;
143 /* Information about individual BBs. */
144 vec<ipa_bb_info> bb_infos;
146 /* Number of parameters. */
147 int param_count;
149 /* Number of statements already walked by when analyzing this function. */
150 unsigned int aa_walked;
153 /* Function summary where the parameter infos are actually stored. */
154 ipa_node_params_t *ipa_node_params_sum = NULL;
155 /* Vector of IPA-CP transformation data for each clone. */
156 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
157 /* Vector where the parameter infos are actually stored. */
158 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
160 /* Holders of ipa cgraph hooks: */
161 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
162 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
163 static struct cgraph_node_hook_list *function_insertion_hook_holder;
165 /* Description of a reference to an IPA constant. */
166 struct ipa_cst_ref_desc
168 /* Edge that corresponds to the statement which took the reference. */
169 struct cgraph_edge *cs;
170 /* Linked list of duplicates created when call graph edges are cloned. */
171 struct ipa_cst_ref_desc *next_duplicate;
172 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
173 if out of control. */
174 int refcount;
177 /* Allocation pool for reference descriptions. */
179 static alloc_pool ipa_refdesc_pool;
181 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
182 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
184 static bool
185 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
187 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
189 if (!fs_opts)
190 return false;
191 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
194 /* Return index of the formal whose tree is PTREE in function which corresponds
195 to INFO. */
197 static int
198 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
200 int i, count;
202 count = descriptors.length ();
203 for (i = 0; i < count; i++)
204 if (descriptors[i].decl == ptree)
205 return i;
207 return -1;
210 /* Return index of the formal whose tree is PTREE in function which corresponds
211 to INFO. */
214 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
216 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
219 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
220 NODE. */
222 static void
223 ipa_populate_param_decls (struct cgraph_node *node,
224 vec<ipa_param_descriptor> &descriptors)
226 tree fndecl;
227 tree fnargs;
228 tree parm;
229 int param_num;
231 fndecl = node->decl;
232 gcc_assert (gimple_has_body_p (fndecl));
233 fnargs = DECL_ARGUMENTS (fndecl);
234 param_num = 0;
235 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
237 descriptors[param_num].decl = parm;
238 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
239 true);
240 param_num++;
244 /* Return how many formal parameters FNDECL has. */
247 count_formal_params (tree fndecl)
249 tree parm;
250 int count = 0;
251 gcc_assert (gimple_has_body_p (fndecl));
253 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
254 count++;
256 return count;
259 /* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
263 void
264 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
266 fprintf (file, "param #%i", i);
267 if (info->descriptors[i].decl)
269 fprintf (file, " ");
270 print_generic_expr (file, info->descriptors[i].decl, 0);
274 /* Initialize the ipa_node_params structure associated with NODE
275 to hold PARAM_COUNT parameters. */
277 void
278 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
280 struct ipa_node_params *info = IPA_NODE_REF (node);
282 if (!info->descriptors.exists () && param_count)
283 info->descriptors.safe_grow_cleared (param_count);
286 /* Initialize the ipa_node_params structure associated with NODE by counting
287 the function parameters, creating the descriptors and populating their
288 param_decls. */
290 void
291 ipa_initialize_node_params (struct cgraph_node *node)
293 struct ipa_node_params *info = IPA_NODE_REF (node);
295 if (!info->descriptors.exists ())
297 ipa_alloc_node_params (node, count_formal_params (node->decl));
298 ipa_populate_param_decls (node, info->descriptors);
302 /* Print the jump functions associated with call graph edge CS to file F. */
304 static void
305 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
307 int i, count;
309 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
310 for (i = 0; i < count; i++)
312 struct ipa_jump_func *jump_func;
313 enum jump_func_type type;
315 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
316 type = jump_func->type;
318 fprintf (f, " param %d: ", i);
319 if (type == IPA_JF_UNKNOWN)
320 fprintf (f, "UNKNOWN\n");
321 else if (type == IPA_JF_CONST)
323 tree val = jump_func->value.constant.value;
324 fprintf (f, "CONST: ");
325 print_generic_expr (f, val, 0);
326 if (TREE_CODE (val) == ADDR_EXPR
327 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
329 fprintf (f, " -> ");
330 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
333 fprintf (f, "\n");
335 else if (type == IPA_JF_PASS_THROUGH)
337 fprintf (f, "PASS THROUGH: ");
338 fprintf (f, "%d, op %s",
339 jump_func->value.pass_through.formal_id,
340 get_tree_code_name(jump_func->value.pass_through.operation));
341 if (jump_func->value.pass_through.operation != NOP_EXPR)
343 fprintf (f, " ");
344 print_generic_expr (f,
345 jump_func->value.pass_through.operand, 0);
347 if (jump_func->value.pass_through.agg_preserved)
348 fprintf (f, ", agg_preserved");
349 fprintf (f, "\n");
351 else if (type == IPA_JF_ANCESTOR)
353 fprintf (f, "ANCESTOR: ");
354 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC,
355 jump_func->value.ancestor.formal_id,
356 jump_func->value.ancestor.offset);
357 if (jump_func->value.ancestor.agg_preserved)
358 fprintf (f, ", agg_preserved");
359 fprintf (f, "\n");
362 if (jump_func->agg.items)
364 struct ipa_agg_jf_item *item;
365 int j;
367 fprintf (f, " Aggregate passed by %s:\n",
368 jump_func->agg.by_ref ? "reference" : "value");
369 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
371 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
372 item->offset);
373 if (TYPE_P (item->value))
374 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
375 tree_to_uhwi (TYPE_SIZE (item->value)));
376 else
378 fprintf (f, "cst: ");
379 print_generic_expr (f, item->value, 0);
381 fprintf (f, "\n");
385 struct ipa_polymorphic_call_context *ctx
386 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
387 if (ctx && !ctx->useless_p ())
389 fprintf (f, " Context: ");
390 ctx->dump (dump_file);
393 if (jump_func->alignment.known)
395 fprintf (f, " Alignment: %u, misalignment: %u\n",
396 jump_func->alignment.align,
397 jump_func->alignment.misalign);
399 else
400 fprintf (f, " Unknown alignment\n");
405 /* Print the jump functions of all arguments on all call graph edges going from
406 NODE to file F. */
408 void
409 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
411 struct cgraph_edge *cs;
413 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
414 node->order);
415 for (cs = node->callees; cs; cs = cs->next_callee)
417 if (!ipa_edge_args_info_available_for_edge_p (cs))
418 continue;
420 fprintf (f, " callsite %s/%i -> %s/%i : \n",
421 xstrdup_for_dump (node->name ()), node->order,
422 xstrdup_for_dump (cs->callee->name ()),
423 cs->callee->order);
424 ipa_print_node_jump_functions_for_edge (f, cs);
427 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
429 struct cgraph_indirect_call_info *ii;
430 if (!ipa_edge_args_info_available_for_edge_p (cs))
431 continue;
433 ii = cs->indirect_info;
434 if (ii->agg_contents)
435 fprintf (f, " indirect %s callsite, calling param %i, "
436 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
437 ii->member_ptr ? "member ptr" : "aggregate",
438 ii->param_index, ii->offset,
439 ii->by_ref ? "by reference" : "by_value");
440 else
441 fprintf (f, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC,
443 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
444 ii->offset);
446 if (cs->call_stmt)
448 fprintf (f, ", for stmt ");
449 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
451 else
452 fprintf (f, "\n");
453 if (ii->polymorphic)
454 ii->context.dump (f);
455 ipa_print_node_jump_functions_for_edge (f, cs);
459 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
461 void
462 ipa_print_all_jump_functions (FILE *f)
464 struct cgraph_node *node;
466 fprintf (f, "\nJump functions:\n");
467 FOR_EACH_FUNCTION (node)
469 ipa_print_node_jump_functions (f, node);
473 /* Set jfunc to be a know-really nothing jump function. */
475 static void
476 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
478 jfunc->type = IPA_JF_UNKNOWN;
479 jfunc->alignment.known = false;
482 /* Set JFUNC to be a copy of another jmp (to be used by jump function
483 combination code). The two functions will share their rdesc. */
485 static void
486 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
487 struct ipa_jump_func *src)
490 gcc_checking_assert (src->type == IPA_JF_CONST);
491 dst->type = IPA_JF_CONST;
492 dst->value.constant = src->value.constant;
495 /* Set JFUNC to be a constant jmp function. */
497 static void
498 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
499 struct cgraph_edge *cs)
501 constant = unshare_expr (constant);
502 if (constant && EXPR_P (constant))
503 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
504 jfunc->type = IPA_JF_CONST;
505 jfunc->value.constant.value = unshare_expr_without_location (constant);
507 if (TREE_CODE (constant) == ADDR_EXPR
508 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
510 struct ipa_cst_ref_desc *rdesc;
511 if (!ipa_refdesc_pool)
512 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
513 sizeof (struct ipa_cst_ref_desc), 32);
515 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
516 rdesc->cs = cs;
517 rdesc->next_duplicate = NULL;
518 rdesc->refcount = 1;
519 jfunc->value.constant.rdesc = rdesc;
521 else
522 jfunc->value.constant.rdesc = NULL;
525 /* Set JFUNC to be a simple pass-through jump function. */
526 static void
527 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
528 bool agg_preserved)
530 jfunc->type = IPA_JF_PASS_THROUGH;
531 jfunc->value.pass_through.operand = NULL_TREE;
532 jfunc->value.pass_through.formal_id = formal_id;
533 jfunc->value.pass_through.operation = NOP_EXPR;
534 jfunc->value.pass_through.agg_preserved = agg_preserved;
537 /* Set JFUNC to be an arithmetic pass through jump function. */
539 static void
540 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
541 tree operand, enum tree_code operation)
543 jfunc->type = IPA_JF_PASS_THROUGH;
544 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
545 jfunc->value.pass_through.formal_id = formal_id;
546 jfunc->value.pass_through.operation = operation;
547 jfunc->value.pass_through.agg_preserved = false;
550 /* Set JFUNC to be an ancestor jump function. */
552 static void
553 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
554 int formal_id, bool agg_preserved)
556 jfunc->type = IPA_JF_ANCESTOR;
557 jfunc->value.ancestor.formal_id = formal_id;
558 jfunc->value.ancestor.offset = offset;
559 jfunc->value.ancestor.agg_preserved = agg_preserved;
562 /* Get IPA BB information about the given BB. FBI is the context of analyzis
563 of this function body. */
565 static struct ipa_bb_info *
566 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
568 gcc_checking_assert (fbi);
569 return &fbi->bb_infos[bb->index];
572 /* Structure to be passed in between detect_type_change and
573 check_stmt_for_type_change. */
575 struct prop_type_change_info
577 /* Offset into the object where there is the virtual method pointer we are
578 looking for. */
579 HOST_WIDE_INT offset;
580 /* The declaration or SSA_NAME pointer of the base that we are checking for
581 type change. */
582 tree object;
583 /* Set to true if dynamic type change has been detected. */
584 bool type_maybe_changed;
587 /* Return true if STMT can modify a virtual method table pointer.
589 This function makes special assumptions about both constructors and
590 destructors which are all the functions that are allowed to alter the VMT
591 pointers. It assumes that destructors begin with assignment into all VMT
592 pointers and that constructors essentially look in the following way:
594 1) The very first thing they do is that they call constructors of ancestor
595 sub-objects that have them.
597 2) Then VMT pointers of this and all its ancestors is set to new values
598 corresponding to the type corresponding to the constructor.
600 3) Only afterwards, other stuff such as constructor of member sub-objects
601 and the code written by the user is run. Only this may include calling
602 virtual functions, directly or indirectly.
604 There is no way to call a constructor of an ancestor sub-object in any
605 other way.
607 This means that we do not have to care whether constructors get the correct
608 type information because they will always change it (in fact, if we define
609 the type to be given by the VMT pointer, it is undefined).
611 The most important fact to derive from the above is that if, for some
612 statement in the section 3, we try to detect whether the dynamic type has
613 changed, we can safely ignore all calls as we examine the function body
614 backwards until we reach statements in section 2 because these calls cannot
615 be ancestor constructors or destructors (if the input is not bogus) and so
616 do not change the dynamic type (this holds true only for automatically
617 allocated objects but at the moment we devirtualize only these). We then
618 must detect that statements in section 2 change the dynamic type and can try
619 to derive the new type. That is enough and we can stop, we will never see
620 the calls into constructors of sub-objects in this code. Therefore we can
621 safely ignore all call statements that we traverse.
624 static bool
625 stmt_may_be_vtbl_ptr_store (gimple stmt)
627 if (is_gimple_call (stmt))
628 return false;
629 if (gimple_clobber_p (stmt))
630 return false;
631 else if (is_gimple_assign (stmt))
633 tree lhs = gimple_assign_lhs (stmt);
635 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
637 if (flag_strict_aliasing
638 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
639 return false;
641 if (TREE_CODE (lhs) == COMPONENT_REF
642 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
643 return false;
644 /* In the future we might want to use get_base_ref_and_offset to find
645 if there is a field corresponding to the offset and if so, proceed
646 almost like if it was a component ref. */
649 return true;
652 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
653 to check whether a particular statement may modify the virtual table
654 pointerIt stores its result into DATA, which points to a
655 prop_type_change_info structure. */
657 static bool
658 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
660 gimple stmt = SSA_NAME_DEF_STMT (vdef);
661 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
663 if (stmt_may_be_vtbl_ptr_store (stmt))
665 tci->type_maybe_changed = true;
666 return true;
668 else
669 return false;
672 /* See if ARG is PARAM_DECl describing instance passed by pointer
673 or reference in FUNCTION. Return false if the dynamic type may change
674 in between beggining of the function until CALL is invoked.
676 Generally functions are not allowed to change type of such instances,
677 but they call destructors. We assume that methods can not destroy the THIS
678 pointer. Also as a special cases, constructor and destructors may change
679 type of the THIS pointer. */
681 static bool
682 param_type_may_change_p (tree function, tree arg, gimple call)
684 /* Pure functions can not do any changes on the dynamic type;
685 that require writting to memory. */
686 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
687 return false;
688 /* We need to check if we are within inlined consturctor
689 or destructor (ideally we would have way to check that the
690 inline cdtor is actually working on ARG, but we don't have
691 easy tie on this, so punt on all non-pure cdtors.
692 We may also record the types of cdtors and once we know type
693 of the instance match them.
695 Also code unification optimizations may merge calls from
696 different blocks making return values unreliable. So
697 do nothing during late optimization. */
698 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
699 return true;
700 if (TREE_CODE (arg) == SSA_NAME
701 && SSA_NAME_IS_DEFAULT_DEF (arg)
702 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
704 /* Normal (non-THIS) argument. */
705 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
706 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
707 /* THIS pointer of an method - here we we want to watch constructors
708 and destructors as those definitely may change the dynamic
709 type. */
710 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
711 && !DECL_CXX_CONSTRUCTOR_P (function)
712 && !DECL_CXX_DESTRUCTOR_P (function)
713 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
715 /* Walk the inline stack and watch out for ctors/dtors. */
716 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
717 block = BLOCK_SUPERCONTEXT (block))
718 if (BLOCK_ABSTRACT_ORIGIN (block)
719 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
721 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
723 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
724 continue;
725 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
726 && (DECL_CXX_CONSTRUCTOR_P (fn)
727 || DECL_CXX_DESTRUCTOR_P (fn)))
728 return true;
730 return false;
733 return true;
736 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
737 callsite CALL) by looking for assignments to its virtual table pointer. If
738 it is, return true and fill in the jump function JFUNC with relevant type
739 information or set it to unknown. ARG is the object itself (not a pointer
740 to it, unless dereferenced). BASE is the base of the memory access as
741 returned by get_ref_base_and_extent, as is the offset.
743 This is helper function for detect_type_change and detect_type_change_ssa
744 that does the heavy work which is usually unnecesary. */
746 static bool
747 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
748 gcall *call, struct ipa_jump_func *jfunc,
749 HOST_WIDE_INT offset)
751 struct prop_type_change_info tci;
752 ao_ref ao;
753 bool entry_reached = false;
755 gcc_checking_assert (DECL_P (arg)
756 || TREE_CODE (arg) == MEM_REF
757 || handled_component_p (arg));
759 comp_type = TYPE_MAIN_VARIANT (comp_type);
761 /* Const calls cannot call virtual methods through VMT and so type changes do
762 not matter. */
763 if (!flag_devirtualize || !gimple_vuse (call)
764 /* Be sure expected_type is polymorphic. */
765 || !comp_type
766 || TREE_CODE (comp_type) != RECORD_TYPE
767 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
768 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
769 return true;
771 ao_ref_init (&ao, arg);
772 ao.base = base;
773 ao.offset = offset;
774 ao.size = POINTER_SIZE;
775 ao.max_size = ao.size;
777 tci.offset = offset;
778 tci.object = get_base_address (arg);
779 tci.type_maybe_changed = false;
781 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
782 &tci, NULL, &entry_reached);
783 if (!tci.type_maybe_changed)
784 return false;
786 ipa_set_jf_unknown (jfunc);
787 return true;
790 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
791 If it is, return true and fill in the jump function JFUNC with relevant type
792 information or set it to unknown. ARG is the object itself (not a pointer
793 to it, unless dereferenced). BASE is the base of the memory access as
794 returned by get_ref_base_and_extent, as is the offset. */
796 static bool
797 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
798 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
800 if (!flag_devirtualize)
801 return false;
803 if (TREE_CODE (base) == MEM_REF
804 && !param_type_may_change_p (current_function_decl,
805 TREE_OPERAND (base, 0),
806 call))
807 return false;
808 return detect_type_change_from_memory_writes (arg, base, comp_type,
809 call, jfunc, offset);
812 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
813 SSA name (its dereference will become the base and the offset is assumed to
814 be zero). */
816 static bool
817 detect_type_change_ssa (tree arg, tree comp_type,
818 gcall *call, struct ipa_jump_func *jfunc)
820 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
821 if (!flag_devirtualize
822 || !POINTER_TYPE_P (TREE_TYPE (arg)))
823 return false;
825 if (!param_type_may_change_p (current_function_decl, arg, call))
826 return false;
828 arg = build2 (MEM_REF, ptr_type_node, arg,
829 build_int_cst (ptr_type_node, 0));
831 return detect_type_change_from_memory_writes (arg, arg, comp_type,
832 call, jfunc, 0);
835 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
836 boolean variable pointed to by DATA. */
838 static bool
839 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
840 void *data)
842 bool *b = (bool *) data;
843 *b = true;
844 return true;
847 /* Return true if we have already walked so many statements in AA that we
848 should really just start giving up. */
850 static bool
851 aa_overwalked (struct func_body_info *fbi)
853 gcc_checking_assert (fbi);
854 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
857 /* Find the nearest valid aa status for parameter specified by INDEX that
858 dominates BB. */
860 static struct param_aa_status *
861 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
862 int index)
864 while (true)
866 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
867 if (!bb)
868 return NULL;
869 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
870 if (!bi->param_aa_statuses.is_empty ()
871 && bi->param_aa_statuses[index].valid)
872 return &bi->param_aa_statuses[index];
876 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
877 structures and/or intialize the result with a dominating description as
878 necessary. */
880 static struct param_aa_status *
881 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
882 int index)
884 gcc_checking_assert (fbi);
885 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
886 if (bi->param_aa_statuses.is_empty ())
887 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
888 struct param_aa_status *paa = &bi->param_aa_statuses[index];
889 if (!paa->valid)
891 gcc_checking_assert (!paa->parm_modified
892 && !paa->ref_modified
893 && !paa->pt_modified);
894 struct param_aa_status *dom_paa;
895 dom_paa = find_dominating_aa_status (fbi, bb, index);
896 if (dom_paa)
897 *paa = *dom_paa;
898 else
899 paa->valid = true;
902 return paa;
905 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
906 a value known not to be modified in this function before reaching the
907 statement STMT. FBI holds information about the function we have so far
908 gathered but do not survive the summary building stage. */
910 static bool
911 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
912 gimple stmt, tree parm_load)
914 struct param_aa_status *paa;
915 bool modified = false;
916 ao_ref refd;
918 /* FIXME: FBI can be NULL if we are being called from outside
919 ipa_node_analysis or ipcp_transform_function, which currently happens
920 during inlining analysis. It would be great to extend fbi's lifetime and
921 always have it. Currently, we are just not afraid of too much walking in
922 that case. */
923 if (fbi)
925 if (aa_overwalked (fbi))
926 return false;
927 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
928 if (paa->parm_modified)
929 return false;
931 else
932 paa = NULL;
934 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
935 ao_ref_init (&refd, parm_load);
936 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
937 &modified, NULL);
938 if (fbi)
939 fbi->aa_walked += walked;
940 if (paa && modified)
941 paa->parm_modified = true;
942 return !modified;
945 /* If STMT is an assignment that loads a value from an parameter declaration,
946 return the index of the parameter in ipa_node_params which has not been
947 modified. Otherwise return -1. */
949 static int
950 load_from_unmodified_param (struct func_body_info *fbi,
951 vec<ipa_param_descriptor> descriptors,
952 gimple stmt)
954 int index;
955 tree op1;
957 if (!gimple_assign_single_p (stmt))
958 return -1;
960 op1 = gimple_assign_rhs1 (stmt);
961 if (TREE_CODE (op1) != PARM_DECL)
962 return -1;
964 index = ipa_get_param_decl_index_1 (descriptors, op1);
965 if (index < 0
966 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
967 return -1;
969 return index;
972 /* Return true if memory reference REF (which must be a load through parameter
973 with INDEX) loads data that are known to be unmodified in this function
974 before reaching statement STMT. */
976 static bool
977 parm_ref_data_preserved_p (struct func_body_info *fbi,
978 int index, gimple stmt, tree ref)
980 struct param_aa_status *paa;
981 bool modified = false;
982 ao_ref refd;
984 /* FIXME: FBI can be NULL if we are being called from outside
985 ipa_node_analysis or ipcp_transform_function, which currently happens
986 during inlining analysis. It would be great to extend fbi's lifetime and
987 always have it. Currently, we are just not afraid of too much walking in
988 that case. */
989 if (fbi)
991 if (aa_overwalked (fbi))
992 return false;
993 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
994 if (paa->ref_modified)
995 return false;
997 else
998 paa = NULL;
1000 gcc_checking_assert (gimple_vuse (stmt));
1001 ao_ref_init (&refd, ref);
1002 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1003 &modified, NULL);
1004 if (fbi)
1005 fbi->aa_walked += walked;
1006 if (paa && modified)
1007 paa->ref_modified = true;
1008 return !modified;
1011 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1012 is known to be unmodified in this function before reaching call statement
1013 CALL into which it is passed. FBI describes the function body. */
1015 static bool
1016 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1017 gimple call, tree parm)
1019 bool modified = false;
1020 ao_ref refd;
1022 /* It's unnecessary to calculate anything about memory contnets for a const
1023 function because it is not goin to use it. But do not cache the result
1024 either. Also, no such calculations for non-pointers. */
1025 if (!gimple_vuse (call)
1026 || !POINTER_TYPE_P (TREE_TYPE (parm))
1027 || aa_overwalked (fbi))
1028 return false;
1030 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1031 index);
1032 if (paa->pt_modified)
1033 return false;
1035 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1036 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1037 &modified, NULL);
1038 fbi->aa_walked += walked;
1039 if (modified)
1040 paa->pt_modified = true;
1041 return !modified;
1044 /* Return true if we can prove that OP is a memory reference loading unmodified
1045 data from an aggregate passed as a parameter and if the aggregate is passed
1046 by reference, that the alias type of the load corresponds to the type of the
1047 formal parameter (so that we can rely on this type for TBAA in callers).
1048 INFO and PARMS_AINFO describe parameters of the current function (but the
1049 latter can be NULL), STMT is the load statement. If function returns true,
1050 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1051 within the aggregate and whether it is a load from a value passed by
1052 reference respectively. */
1054 static bool
1055 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1056 vec<ipa_param_descriptor> descriptors,
1057 gimple stmt, tree op, int *index_p,
1058 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1059 bool *by_ref_p)
1061 int index;
1062 HOST_WIDE_INT size, max_size;
1063 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1065 if (max_size == -1 || max_size != size || *offset_p < 0)
1066 return false;
1068 if (DECL_P (base))
1070 int index = ipa_get_param_decl_index_1 (descriptors, base);
1071 if (index >= 0
1072 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1074 *index_p = index;
1075 *by_ref_p = false;
1076 if (size_p)
1077 *size_p = size;
1078 return true;
1080 return false;
1083 if (TREE_CODE (base) != MEM_REF
1084 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1085 || !integer_zerop (TREE_OPERAND (base, 1)))
1086 return false;
1088 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1090 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1091 index = ipa_get_param_decl_index_1 (descriptors, parm);
1093 else
1095 /* This branch catches situations where a pointer parameter is not a
1096 gimple register, for example:
1098 void hip7(S*) (struct S * p)
1100 void (*<T2e4>) (struct S *) D.1867;
1101 struct S * p.1;
1103 <bb 2>:
1104 p.1_1 = p;
1105 D.1867_2 = p.1_1->f;
1106 D.1867_2 ();
1107 gdp = &p;
1110 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1111 index = load_from_unmodified_param (fbi, descriptors, def);
1114 if (index >= 0
1115 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1117 *index_p = index;
1118 *by_ref_p = true;
1119 if (size_p)
1120 *size_p = size;
1121 return true;
1123 return false;
1126 /* Just like the previous function, just without the param_analysis_info
1127 pointer, for users outside of this file. */
1129 bool
1130 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1131 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1132 bool *by_ref_p)
1134 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1135 offset_p, NULL, by_ref_p);
1138 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1139 of an assignment statement STMT, try to determine whether we are actually
1140 handling any of the following cases and construct an appropriate jump
1141 function into JFUNC if so:
1143 1) The passed value is loaded from a formal parameter which is not a gimple
1144 register (most probably because it is addressable, the value has to be
1145 scalar) and we can guarantee the value has not changed. This case can
1146 therefore be described by a simple pass-through jump function. For example:
1148 foo (int a)
1150 int a.0;
1152 a.0_2 = a;
1153 bar (a.0_2);
1155 2) The passed value can be described by a simple arithmetic pass-through
1156 jump function. E.g.
1158 foo (int a)
1160 int D.2064;
1162 D.2064_4 = a.1(D) + 4;
1163 bar (D.2064_4);
1165 This case can also occur in combination of the previous one, e.g.:
1167 foo (int a, int z)
1169 int a.0;
1170 int D.2064;
1172 a.0_3 = a;
1173 D.2064_4 = a.0_3 + 4;
1174 foo (D.2064_4);
1176 3) The passed value is an address of an object within another one (which
1177 also passed by reference). Such situations are described by an ancestor
1178 jump function and describe situations such as:
1180 B::foo() (struct B * const this)
1182 struct A * D.1845;
1184 D.1845_2 = &this_1(D)->D.1748;
1185 A::bar (D.1845_2);
1187 INFO is the structure describing individual parameters access different
1188 stages of IPA optimizations. PARMS_AINFO contains the information that is
1189 only needed for intraprocedural analysis. */
1191 static void
1192 compute_complex_assign_jump_func (struct func_body_info *fbi,
1193 struct ipa_node_params *info,
1194 struct ipa_jump_func *jfunc,
1195 gcall *call, gimple stmt, tree name,
1196 tree param_type)
1198 HOST_WIDE_INT offset, size, max_size;
1199 tree op1, tc_ssa, base, ssa;
1200 int index;
1202 op1 = gimple_assign_rhs1 (stmt);
1204 if (TREE_CODE (op1) == SSA_NAME)
1206 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1207 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1208 else
1209 index = load_from_unmodified_param (fbi, info->descriptors,
1210 SSA_NAME_DEF_STMT (op1));
1211 tc_ssa = op1;
1213 else
1215 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1216 tc_ssa = gimple_assign_lhs (stmt);
1219 if (index >= 0)
1221 tree op2 = gimple_assign_rhs2 (stmt);
1223 if (op2)
1225 if (!is_gimple_ip_invariant (op2)
1226 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1227 && !useless_type_conversion_p (TREE_TYPE (name),
1228 TREE_TYPE (op1))))
1229 return;
1231 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1232 gimple_assign_rhs_code (stmt));
1234 else if (gimple_assign_single_p (stmt))
1236 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1237 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1239 return;
1242 if (TREE_CODE (op1) != ADDR_EXPR)
1243 return;
1244 op1 = TREE_OPERAND (op1, 0);
1245 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1246 return;
1247 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1248 if (TREE_CODE (base) != MEM_REF
1249 /* If this is a varying address, punt. */
1250 || max_size == -1
1251 || max_size != size)
1252 return;
1253 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1254 ssa = TREE_OPERAND (base, 0);
1255 if (TREE_CODE (ssa) != SSA_NAME
1256 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1257 || offset < 0)
1258 return;
1260 /* Dynamic types are changed in constructors and destructors. */
1261 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1262 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1263 ipa_set_ancestor_jf (jfunc, offset, index,
1264 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1267 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1268 it looks like:
1270 iftmp.1_3 = &obj_2(D)->D.1762;
1272 The base of the MEM_REF must be a default definition SSA NAME of a
1273 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1274 whole MEM_REF expression is returned and the offset calculated from any
1275 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1276 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1278 static tree
1279 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1281 HOST_WIDE_INT size, max_size;
1282 tree expr, parm, obj;
1284 if (!gimple_assign_single_p (assign))
1285 return NULL_TREE;
1286 expr = gimple_assign_rhs1 (assign);
1288 if (TREE_CODE (expr) != ADDR_EXPR)
1289 return NULL_TREE;
1290 expr = TREE_OPERAND (expr, 0);
1291 obj = expr;
1292 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1294 if (TREE_CODE (expr) != MEM_REF
1295 /* If this is a varying address, punt. */
1296 || max_size == -1
1297 || max_size != size
1298 || *offset < 0)
1299 return NULL_TREE;
1300 parm = TREE_OPERAND (expr, 0);
1301 if (TREE_CODE (parm) != SSA_NAME
1302 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1303 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1304 return NULL_TREE;
1306 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1307 *obj_p = obj;
1308 return expr;
1312 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1313 statement PHI, try to find out whether NAME is in fact a
1314 multiple-inheritance typecast from a descendant into an ancestor of a formal
1315 parameter and thus can be described by an ancestor jump function and if so,
1316 write the appropriate function into JFUNC.
1318 Essentially we want to match the following pattern:
1320 if (obj_2(D) != 0B)
1321 goto <bb 3>;
1322 else
1323 goto <bb 4>;
1325 <bb 3>:
1326 iftmp.1_3 = &obj_2(D)->D.1762;
1328 <bb 4>:
1329 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1330 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1331 return D.1879_6; */
1333 static void
1334 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1335 struct ipa_node_params *info,
1336 struct ipa_jump_func *jfunc,
1337 gcall *call, gphi *phi)
1339 HOST_WIDE_INT offset;
1340 gimple assign, cond;
1341 basic_block phi_bb, assign_bb, cond_bb;
1342 tree tmp, parm, expr, obj;
1343 int index, i;
1345 if (gimple_phi_num_args (phi) != 2)
1346 return;
1348 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1349 tmp = PHI_ARG_DEF (phi, 0);
1350 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1351 tmp = PHI_ARG_DEF (phi, 1);
1352 else
1353 return;
1354 if (TREE_CODE (tmp) != SSA_NAME
1355 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1356 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1357 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1358 return;
1360 assign = SSA_NAME_DEF_STMT (tmp);
1361 assign_bb = gimple_bb (assign);
1362 if (!single_pred_p (assign_bb))
1363 return;
1364 expr = get_ancestor_addr_info (assign, &obj, &offset);
1365 if (!expr)
1366 return;
1367 parm = TREE_OPERAND (expr, 0);
1368 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1369 if (index < 0)
1370 return;
1372 cond_bb = single_pred (assign_bb);
1373 cond = last_stmt (cond_bb);
1374 if (!cond
1375 || gimple_code (cond) != GIMPLE_COND
1376 || gimple_cond_code (cond) != NE_EXPR
1377 || gimple_cond_lhs (cond) != parm
1378 || !integer_zerop (gimple_cond_rhs (cond)))
1379 return;
1381 phi_bb = gimple_bb (phi);
1382 for (i = 0; i < 2; i++)
1384 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1385 if (pred != assign_bb && pred != cond_bb)
1386 return;
1389 ipa_set_ancestor_jf (jfunc, offset, index,
1390 parm_ref_data_pass_through_p (fbi, index, call, parm));
1393 /* Inspect the given TYPE and return true iff it has the same structure (the
1394 same number of fields of the same types) as a C++ member pointer. If
1395 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1396 corresponding fields there. */
1398 static bool
1399 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1401 tree fld;
1403 if (TREE_CODE (type) != RECORD_TYPE)
1404 return false;
1406 fld = TYPE_FIELDS (type);
1407 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1408 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1409 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1410 return false;
1412 if (method_ptr)
1413 *method_ptr = fld;
1415 fld = DECL_CHAIN (fld);
1416 if (!fld || INTEGRAL_TYPE_P (fld)
1417 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1418 return false;
1419 if (delta)
1420 *delta = fld;
1422 if (DECL_CHAIN (fld))
1423 return false;
1425 return true;
1428 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1429 return the rhs of its defining statement. Otherwise return RHS as it
1430 is. */
1432 static inline tree
1433 get_ssa_def_if_simple_copy (tree rhs)
1435 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1437 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1439 if (gimple_assign_single_p (def_stmt))
1440 rhs = gimple_assign_rhs1 (def_stmt);
1441 else
1442 break;
1444 return rhs;
1447 /* Simple linked list, describing known contents of an aggregate beforere
1448 call. */
1450 struct ipa_known_agg_contents_list
1452 /* Offset and size of the described part of the aggregate. */
1453 HOST_WIDE_INT offset, size;
1454 /* Known constant value or NULL if the contents is known to be unknown. */
1455 tree constant;
1456 /* Pointer to the next structure in the list. */
1457 struct ipa_known_agg_contents_list *next;
1460 /* Find the proper place in linked list of ipa_known_agg_contents_list
1461 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1462 unless there is a partial overlap, in which case return NULL, or such
1463 element is already there, in which case set *ALREADY_THERE to true. */
1465 static struct ipa_known_agg_contents_list **
1466 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1467 HOST_WIDE_INT lhs_offset,
1468 HOST_WIDE_INT lhs_size,
1469 bool *already_there)
1471 struct ipa_known_agg_contents_list **p = list;
1472 while (*p && (*p)->offset < lhs_offset)
1474 if ((*p)->offset + (*p)->size > lhs_offset)
1475 return NULL;
1476 p = &(*p)->next;
1479 if (*p && (*p)->offset < lhs_offset + lhs_size)
1481 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1482 /* We already know this value is subsequently overwritten with
1483 something else. */
1484 *already_there = true;
1485 else
1486 /* Otherwise this is a partial overlap which we cannot
1487 represent. */
1488 return NULL;
1490 return p;
1493 /* Build aggregate jump function from LIST, assuming there are exactly
1494 CONST_COUNT constant entries there and that th offset of the passed argument
1495 is ARG_OFFSET and store it into JFUNC. */
1497 static void
1498 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1499 int const_count, HOST_WIDE_INT arg_offset,
1500 struct ipa_jump_func *jfunc)
1502 vec_alloc (jfunc->agg.items, const_count);
1503 while (list)
1505 if (list->constant)
1507 struct ipa_agg_jf_item item;
1508 item.offset = list->offset - arg_offset;
1509 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1510 item.value = unshare_expr_without_location (list->constant);
1511 jfunc->agg.items->quick_push (item);
1513 list = list->next;
1517 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1518 in ARG is filled in with constant values. ARG can either be an aggregate
1519 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1520 aggregate. JFUNC is the jump function into which the constants are
1521 subsequently stored. */
1523 static void
1524 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1525 tree arg_type,
1526 struct ipa_jump_func *jfunc)
1528 struct ipa_known_agg_contents_list *list = NULL;
1529 int item_count = 0, const_count = 0;
1530 HOST_WIDE_INT arg_offset, arg_size;
1531 gimple_stmt_iterator gsi;
1532 tree arg_base;
1533 bool check_ref, by_ref;
1534 ao_ref r;
1536 /* The function operates in three stages. First, we prepare check_ref, r,
1537 arg_base and arg_offset based on what is actually passed as an actual
1538 argument. */
1540 if (POINTER_TYPE_P (arg_type))
1542 by_ref = true;
1543 if (TREE_CODE (arg) == SSA_NAME)
1545 tree type_size;
1546 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1547 return;
1548 check_ref = true;
1549 arg_base = arg;
1550 arg_offset = 0;
1551 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1552 arg_size = tree_to_uhwi (type_size);
1553 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1555 else if (TREE_CODE (arg) == ADDR_EXPR)
1557 HOST_WIDE_INT arg_max_size;
1559 arg = TREE_OPERAND (arg, 0);
1560 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1561 &arg_max_size);
1562 if (arg_max_size == -1
1563 || arg_max_size != arg_size
1564 || arg_offset < 0)
1565 return;
1566 if (DECL_P (arg_base))
1568 check_ref = false;
1569 ao_ref_init (&r, arg_base);
1571 else
1572 return;
1574 else
1575 return;
1577 else
1579 HOST_WIDE_INT arg_max_size;
1581 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1583 by_ref = false;
1584 check_ref = false;
1585 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1586 &arg_max_size);
1587 if (arg_max_size == -1
1588 || arg_max_size != arg_size
1589 || arg_offset < 0)
1590 return;
1592 ao_ref_init (&r, arg);
1595 /* Second stage walks back the BB, looks at individual statements and as long
1596 as it is confident of how the statements affect contents of the
1597 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1598 describing it. */
1599 gsi = gsi_for_stmt (call);
1600 gsi_prev (&gsi);
1601 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1603 struct ipa_known_agg_contents_list *n, **p;
1604 gimple stmt = gsi_stmt (gsi);
1605 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1606 tree lhs, rhs, lhs_base;
1608 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1609 continue;
1610 if (!gimple_assign_single_p (stmt))
1611 break;
1613 lhs = gimple_assign_lhs (stmt);
1614 rhs = gimple_assign_rhs1 (stmt);
1615 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1616 || TREE_CODE (lhs) == BIT_FIELD_REF
1617 || contains_bitfld_component_ref_p (lhs))
1618 break;
1620 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1621 &lhs_max_size);
1622 if (lhs_max_size == -1
1623 || lhs_max_size != lhs_size)
1624 break;
1626 if (check_ref)
1628 if (TREE_CODE (lhs_base) != MEM_REF
1629 || TREE_OPERAND (lhs_base, 0) != arg_base
1630 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1631 break;
1633 else if (lhs_base != arg_base)
1635 if (DECL_P (lhs_base))
1636 continue;
1637 else
1638 break;
1641 bool already_there = false;
1642 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1643 &already_there);
1644 if (!p)
1645 break;
1646 if (already_there)
1647 continue;
1649 rhs = get_ssa_def_if_simple_copy (rhs);
1650 n = XALLOCA (struct ipa_known_agg_contents_list);
1651 n->size = lhs_size;
1652 n->offset = lhs_offset;
1653 if (is_gimple_ip_invariant (rhs))
1655 n->constant = rhs;
1656 const_count++;
1658 else
1659 n->constant = NULL_TREE;
1660 n->next = *p;
1661 *p = n;
1663 item_count++;
1664 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1665 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1666 break;
1669 /* Third stage just goes over the list and creates an appropriate vector of
1670 ipa_agg_jf_item structures out of it, of sourse only if there are
1671 any known constants to begin with. */
1673 if (const_count)
1675 jfunc->agg.by_ref = by_ref;
1676 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1680 static tree
1681 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1683 int n;
1684 tree type = (e->callee
1685 ? TREE_TYPE (e->callee->decl)
1686 : gimple_call_fntype (e->call_stmt));
1687 tree t = TYPE_ARG_TYPES (type);
1689 for (n = 0; n < i; n++)
1691 if (!t)
1692 break;
1693 t = TREE_CHAIN (t);
1695 if (t)
1696 return TREE_VALUE (t);
1697 if (!e->callee)
1698 return NULL;
1699 t = DECL_ARGUMENTS (e->callee->decl);
1700 for (n = 0; n < i; n++)
1702 if (!t)
1703 return NULL;
1704 t = TREE_CHAIN (t);
1706 if (t)
1707 return TREE_TYPE (t);
1708 return NULL;
1711 /* Compute jump function for all arguments of callsite CS and insert the
1712 information in the jump_functions array in the ipa_edge_args corresponding
1713 to this callsite. */
1715 static void
1716 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1717 struct cgraph_edge *cs)
1719 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1720 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1721 gcall *call = cs->call_stmt;
1722 int n, arg_num = gimple_call_num_args (call);
1723 bool useful_context = false;
1725 if (arg_num == 0 || args->jump_functions)
1726 return;
1727 vec_safe_grow_cleared (args->jump_functions, arg_num);
1728 if (flag_devirtualize)
1729 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1731 if (gimple_call_internal_p (call))
1732 return;
1733 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1734 return;
1736 for (n = 0; n < arg_num; n++)
1738 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1739 tree arg = gimple_call_arg (call, n);
1740 tree param_type = ipa_get_callee_param_type (cs, n);
1741 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1743 tree instance;
1744 struct ipa_polymorphic_call_context context (cs->caller->decl,
1745 arg, cs->call_stmt,
1746 &instance);
1747 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1748 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1749 if (!context.useless_p ())
1750 useful_context = true;
1753 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1755 unsigned HOST_WIDE_INT hwi_bitpos;
1756 unsigned align;
1758 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1759 && align % BITS_PER_UNIT == 0
1760 && hwi_bitpos % BITS_PER_UNIT == 0)
1762 jfunc->alignment.known = true;
1763 jfunc->alignment.align = align / BITS_PER_UNIT;
1764 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1766 else
1767 gcc_assert (!jfunc->alignment.known);
1769 else
1770 gcc_assert (!jfunc->alignment.known);
1772 if (is_gimple_ip_invariant (arg))
1773 ipa_set_jf_constant (jfunc, arg, cs);
1774 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1775 && TREE_CODE (arg) == PARM_DECL)
1777 int index = ipa_get_param_decl_index (info, arg);
1779 gcc_assert (index >=0);
1780 /* Aggregate passed by value, check for pass-through, otherwise we
1781 will attempt to fill in aggregate contents later in this
1782 for cycle. */
1783 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1785 ipa_set_jf_simple_pass_through (jfunc, index, false);
1786 continue;
1789 else if (TREE_CODE (arg) == SSA_NAME)
1791 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1793 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1794 if (index >= 0)
1796 bool agg_p;
1797 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1798 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1801 else
1803 gimple stmt = SSA_NAME_DEF_STMT (arg);
1804 if (is_gimple_assign (stmt))
1805 compute_complex_assign_jump_func (fbi, info, jfunc,
1806 call, stmt, arg, param_type);
1807 else if (gimple_code (stmt) == GIMPLE_PHI)
1808 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1809 call,
1810 as_a <gphi *> (stmt));
1814 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1815 passed (because type conversions are ignored in gimple). Usually we can
1816 safely get type from function declaration, but in case of K&R prototypes or
1817 variadic functions we can try our luck with type of the pointer passed.
1818 TODO: Since we look for actual initialization of the memory object, we may better
1819 work out the type based on the memory stores we find. */
1820 if (!param_type)
1821 param_type = TREE_TYPE (arg);
1823 if ((jfunc->type != IPA_JF_PASS_THROUGH
1824 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1825 && (jfunc->type != IPA_JF_ANCESTOR
1826 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1827 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1828 || POINTER_TYPE_P (param_type)))
1829 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1831 if (!useful_context)
1832 vec_free (args->polymorphic_call_contexts);
1835 /* Compute jump functions for all edges - both direct and indirect - outgoing
1836 from BB. */
1838 static void
1839 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1841 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1842 int i;
1843 struct cgraph_edge *cs;
1845 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1847 struct cgraph_node *callee = cs->callee;
1849 if (callee)
1851 callee->ultimate_alias_target ();
1852 /* We do not need to bother analyzing calls to unknown functions
1853 unless they may become known during lto/whopr. */
1854 if (!callee->definition && !flag_lto)
1855 continue;
1857 ipa_compute_jump_functions_for_edge (fbi, cs);
1861 /* If STMT looks like a statement loading a value from a member pointer formal
1862 parameter, return that parameter and store the offset of the field to
1863 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1864 might be clobbered). If USE_DELTA, then we look for a use of the delta
1865 field rather than the pfn. */
1867 static tree
1868 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1869 HOST_WIDE_INT *offset_p)
1871 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1873 if (!gimple_assign_single_p (stmt))
1874 return NULL_TREE;
1876 rhs = gimple_assign_rhs1 (stmt);
1877 if (TREE_CODE (rhs) == COMPONENT_REF)
1879 ref_field = TREE_OPERAND (rhs, 1);
1880 rhs = TREE_OPERAND (rhs, 0);
1882 else
1883 ref_field = NULL_TREE;
1884 if (TREE_CODE (rhs) != MEM_REF)
1885 return NULL_TREE;
1886 rec = TREE_OPERAND (rhs, 0);
1887 if (TREE_CODE (rec) != ADDR_EXPR)
1888 return NULL_TREE;
1889 rec = TREE_OPERAND (rec, 0);
1890 if (TREE_CODE (rec) != PARM_DECL
1891 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1892 return NULL_TREE;
1893 ref_offset = TREE_OPERAND (rhs, 1);
1895 if (use_delta)
1896 fld = delta_field;
1897 else
1898 fld = ptr_field;
1899 if (offset_p)
1900 *offset_p = int_bit_position (fld);
1902 if (ref_field)
1904 if (integer_nonzerop (ref_offset))
1905 return NULL_TREE;
1906 return ref_field == fld ? rec : NULL_TREE;
1908 else
1909 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1910 : NULL_TREE;
1913 /* Returns true iff T is an SSA_NAME defined by a statement. */
1915 static bool
1916 ipa_is_ssa_with_stmt_def (tree t)
1918 if (TREE_CODE (t) == SSA_NAME
1919 && !SSA_NAME_IS_DEFAULT_DEF (t))
1920 return true;
1921 else
1922 return false;
1925 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1926 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1927 indirect call graph edge. */
1929 static struct cgraph_edge *
1930 ipa_note_param_call (struct cgraph_node *node, int param_index,
1931 gcall *stmt)
1933 struct cgraph_edge *cs;
1935 cs = node->get_edge (stmt);
1936 cs->indirect_info->param_index = param_index;
1937 cs->indirect_info->agg_contents = 0;
1938 cs->indirect_info->member_ptr = 0;
1939 return cs;
1942 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1943 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1944 intermediate information about each formal parameter. Currently it checks
1945 whether the call calls a pointer that is a formal parameter and if so, the
1946 parameter is marked with the called flag and an indirect call graph edge
1947 describing the call is created. This is very simple for ordinary pointers
1948 represented in SSA but not-so-nice when it comes to member pointers. The
1949 ugly part of this function does nothing more than trying to match the
1950 pattern of such a call. An example of such a pattern is the gimple dump
1951 below, the call is on the last line:
1953 <bb 2>:
1954 f$__delta_5 = f.__delta;
1955 f$__pfn_24 = f.__pfn;
1958 <bb 2>:
1959 f$__delta_5 = MEM[(struct *)&f];
1960 f$__pfn_24 = MEM[(struct *)&f + 4B];
1962 and a few lines below:
1964 <bb 5>
1965 D.2496_3 = (int) f$__pfn_24;
1966 D.2497_4 = D.2496_3 & 1;
1967 if (D.2497_4 != 0)
1968 goto <bb 3>;
1969 else
1970 goto <bb 4>;
1972 <bb 6>:
1973 D.2500_7 = (unsigned int) f$__delta_5;
1974 D.2501_8 = &S + D.2500_7;
1975 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1976 D.2503_10 = *D.2502_9;
1977 D.2504_12 = f$__pfn_24 + -1;
1978 D.2505_13 = (unsigned int) D.2504_12;
1979 D.2506_14 = D.2503_10 + D.2505_13;
1980 D.2507_15 = *D.2506_14;
1981 iftmp.11_16 = (String:: *) D.2507_15;
1983 <bb 7>:
1984 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1985 D.2500_19 = (unsigned int) f$__delta_5;
1986 D.2508_20 = &S + D.2500_19;
1987 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1989 Such patterns are results of simple calls to a member pointer:
1991 int doprinting (int (MyString::* f)(int) const)
1993 MyString S ("somestring");
1995 return (S.*f)(4);
1998 Moreover, the function also looks for called pointers loaded from aggregates
1999 passed by value or reference. */
2001 static void
2002 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
2003 tree target)
2005 struct ipa_node_params *info = fbi->info;
2006 HOST_WIDE_INT offset;
2007 bool by_ref;
2009 if (SSA_NAME_IS_DEFAULT_DEF (target))
2011 tree var = SSA_NAME_VAR (target);
2012 int index = ipa_get_param_decl_index (info, var);
2013 if (index >= 0)
2014 ipa_note_param_call (fbi->node, index, call);
2015 return;
2018 int index;
2019 gimple def = SSA_NAME_DEF_STMT (target);
2020 if (gimple_assign_single_p (def)
2021 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2022 gimple_assign_rhs1 (def), &index, &offset,
2023 NULL, &by_ref))
2025 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2026 cs->indirect_info->offset = offset;
2027 cs->indirect_info->agg_contents = 1;
2028 cs->indirect_info->by_ref = by_ref;
2029 return;
2032 /* Now we need to try to match the complex pattern of calling a member
2033 pointer. */
2034 if (gimple_code (def) != GIMPLE_PHI
2035 || gimple_phi_num_args (def) != 2
2036 || !POINTER_TYPE_P (TREE_TYPE (target))
2037 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2038 return;
2040 /* First, we need to check whether one of these is a load from a member
2041 pointer that is a parameter to this function. */
2042 tree n1 = PHI_ARG_DEF (def, 0);
2043 tree n2 = PHI_ARG_DEF (def, 1);
2044 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2045 return;
2046 gimple d1 = SSA_NAME_DEF_STMT (n1);
2047 gimple d2 = SSA_NAME_DEF_STMT (n2);
2049 tree rec;
2050 basic_block bb, virt_bb;
2051 basic_block join = gimple_bb (def);
2052 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2054 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2055 return;
2057 bb = EDGE_PRED (join, 0)->src;
2058 virt_bb = gimple_bb (d2);
2060 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2062 bb = EDGE_PRED (join, 1)->src;
2063 virt_bb = gimple_bb (d1);
2065 else
2066 return;
2068 /* Second, we need to check that the basic blocks are laid out in the way
2069 corresponding to the pattern. */
2071 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2072 || single_pred (virt_bb) != bb
2073 || single_succ (virt_bb) != join)
2074 return;
2076 /* Third, let's see that the branching is done depending on the least
2077 significant bit of the pfn. */
2079 gimple branch = last_stmt (bb);
2080 if (!branch || gimple_code (branch) != GIMPLE_COND)
2081 return;
2083 if ((gimple_cond_code (branch) != NE_EXPR
2084 && gimple_cond_code (branch) != EQ_EXPR)
2085 || !integer_zerop (gimple_cond_rhs (branch)))
2086 return;
2088 tree cond = gimple_cond_lhs (branch);
2089 if (!ipa_is_ssa_with_stmt_def (cond))
2090 return;
2092 def = SSA_NAME_DEF_STMT (cond);
2093 if (!is_gimple_assign (def)
2094 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2095 || !integer_onep (gimple_assign_rhs2 (def)))
2096 return;
2098 cond = gimple_assign_rhs1 (def);
2099 if (!ipa_is_ssa_with_stmt_def (cond))
2100 return;
2102 def = SSA_NAME_DEF_STMT (cond);
2104 if (is_gimple_assign (def)
2105 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2107 cond = gimple_assign_rhs1 (def);
2108 if (!ipa_is_ssa_with_stmt_def (cond))
2109 return;
2110 def = SSA_NAME_DEF_STMT (cond);
2113 tree rec2;
2114 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2115 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2116 == ptrmemfunc_vbit_in_delta),
2117 NULL);
2118 if (rec != rec2)
2119 return;
2121 index = ipa_get_param_decl_index (info, rec);
2122 if (index >= 0
2123 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2125 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2126 cs->indirect_info->offset = offset;
2127 cs->indirect_info->agg_contents = 1;
2128 cs->indirect_info->member_ptr = 1;
2131 return;
2134 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2135 object referenced in the expression is a formal parameter of the caller
2136 FBI->node (described by FBI->info), create a call note for the
2137 statement. */
2139 static void
2140 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2141 gcall *call, tree target)
2143 tree obj = OBJ_TYPE_REF_OBJECT (target);
2144 int index;
2145 HOST_WIDE_INT anc_offset;
2147 if (!flag_devirtualize)
2148 return;
2150 if (TREE_CODE (obj) != SSA_NAME)
2151 return;
2153 struct ipa_node_params *info = fbi->info;
2154 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2156 struct ipa_jump_func jfunc;
2157 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2158 return;
2160 anc_offset = 0;
2161 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2162 gcc_assert (index >= 0);
2163 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2164 call, &jfunc))
2165 return;
2167 else
2169 struct ipa_jump_func jfunc;
2170 gimple stmt = SSA_NAME_DEF_STMT (obj);
2171 tree expr;
2173 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2174 if (!expr)
2175 return;
2176 index = ipa_get_param_decl_index (info,
2177 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2178 gcc_assert (index >= 0);
2179 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2180 call, &jfunc, anc_offset))
2181 return;
2184 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2185 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2186 ii->offset = anc_offset;
2187 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2188 ii->otr_type = obj_type_ref_class (target);
2189 ii->polymorphic = 1;
2192 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2193 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2194 containing intermediate information about each formal parameter. */
2196 static void
2197 ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
2199 tree target = gimple_call_fn (call);
2201 if (!target
2202 || (TREE_CODE (target) != SSA_NAME
2203 && !virtual_method_call_p (target)))
2204 return;
2206 struct cgraph_edge *cs = fbi->node->get_edge (call);
2207 /* If we previously turned the call into a direct call, there is
2208 no need to analyze. */
2209 if (cs && !cs->indirect_unknown_callee)
2210 return;
2212 if (cs->indirect_info->polymorphic && flag_devirtualize)
2214 tree instance;
2215 tree target = gimple_call_fn (call);
2216 ipa_polymorphic_call_context context (current_function_decl,
2217 target, call, &instance);
2219 gcc_checking_assert (cs->indirect_info->otr_type
2220 == obj_type_ref_class (target));
2221 gcc_checking_assert (cs->indirect_info->otr_token
2222 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2224 cs->indirect_info->vptr_changed
2225 = !context.get_dynamic_type (instance,
2226 OBJ_TYPE_REF_OBJECT (target),
2227 obj_type_ref_class (target), call);
2228 cs->indirect_info->context = context;
2231 if (TREE_CODE (target) == SSA_NAME)
2232 ipa_analyze_indirect_call_uses (fbi, call, target);
2233 else if (virtual_method_call_p (target))
2234 ipa_analyze_virtual_call_uses (fbi, call, target);
2238 /* Analyze the call statement STMT with respect to formal parameters (described
2239 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2240 formal parameters are called. */
2242 static void
2243 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2245 if (is_gimple_call (stmt))
2246 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2249 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2250 If OP is a parameter declaration, mark it as used in the info structure
2251 passed in DATA. */
2253 static bool
2254 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2256 struct ipa_node_params *info = (struct ipa_node_params *) data;
2258 op = get_base_address (op);
2259 if (op
2260 && TREE_CODE (op) == PARM_DECL)
2262 int index = ipa_get_param_decl_index (info, op);
2263 gcc_assert (index >= 0);
2264 ipa_set_param_used (info, index, true);
2267 return false;
2270 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2271 the findings in various structures of the associated ipa_node_params
2272 structure, such as parameter flags, notes etc. FBI holds various data about
2273 the function being analyzed. */
2275 static void
2276 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2278 gimple_stmt_iterator gsi;
2279 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2281 gimple stmt = gsi_stmt (gsi);
2283 if (is_gimple_debug (stmt))
2284 continue;
2286 ipa_analyze_stmt_uses (fbi, stmt);
2287 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2288 visit_ref_for_mod_analysis,
2289 visit_ref_for_mod_analysis,
2290 visit_ref_for_mod_analysis);
2292 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2293 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2294 visit_ref_for_mod_analysis,
2295 visit_ref_for_mod_analysis,
2296 visit_ref_for_mod_analysis);
2299 /* Calculate controlled uses of parameters of NODE. */
2301 static void
2302 ipa_analyze_controlled_uses (struct cgraph_node *node)
2304 struct ipa_node_params *info = IPA_NODE_REF (node);
2306 for (int i = 0; i < ipa_get_param_count (info); i++)
2308 tree parm = ipa_get_param (info, i);
2309 int controlled_uses = 0;
2311 /* For SSA regs see if parameter is used. For non-SSA we compute
2312 the flag during modification analysis. */
2313 if (is_gimple_reg (parm))
2315 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2316 parm);
2317 if (ddef && !has_zero_uses (ddef))
2319 imm_use_iterator imm_iter;
2320 use_operand_p use_p;
2322 ipa_set_param_used (info, i, true);
2323 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2324 if (!is_gimple_call (USE_STMT (use_p)))
2326 if (!is_gimple_debug (USE_STMT (use_p)))
2328 controlled_uses = IPA_UNDESCRIBED_USE;
2329 break;
2332 else
2333 controlled_uses++;
2335 else
2336 controlled_uses = 0;
2338 else
2339 controlled_uses = IPA_UNDESCRIBED_USE;
2340 ipa_set_controlled_uses (info, i, controlled_uses);
2344 /* Free stuff in BI. */
2346 static void
2347 free_ipa_bb_info (struct ipa_bb_info *bi)
2349 bi->cg_edges.release ();
2350 bi->param_aa_statuses.release ();
2353 /* Dominator walker driving the analysis. */
2355 class analysis_dom_walker : public dom_walker
2357 public:
2358 analysis_dom_walker (struct func_body_info *fbi)
2359 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2361 virtual void before_dom_children (basic_block);
2363 private:
2364 struct func_body_info *m_fbi;
2367 void
2368 analysis_dom_walker::before_dom_children (basic_block bb)
2370 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2371 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2374 /* Initialize the array describing properties of of formal parameters
2375 of NODE, analyze their uses and compute jump functions associated
2376 with actual arguments of calls from within NODE. */
2378 void
2379 ipa_analyze_node (struct cgraph_node *node)
2381 struct func_body_info fbi;
2382 struct ipa_node_params *info;
2384 ipa_check_create_node_params ();
2385 ipa_check_create_edge_args ();
2386 info = IPA_NODE_REF (node);
2388 if (info->analysis_done)
2389 return;
2390 info->analysis_done = 1;
2392 if (ipa_func_spec_opts_forbid_analysis_p (node))
2394 for (int i = 0; i < ipa_get_param_count (info); i++)
2396 ipa_set_param_used (info, i, true);
2397 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2399 return;
2402 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2403 push_cfun (func);
2404 calculate_dominance_info (CDI_DOMINATORS);
2405 ipa_initialize_node_params (node);
2406 ipa_analyze_controlled_uses (node);
2408 fbi.node = node;
2409 fbi.info = IPA_NODE_REF (node);
2410 fbi.bb_infos = vNULL;
2411 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2412 fbi.param_count = ipa_get_param_count (info);
2413 fbi.aa_walked = 0;
2415 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2417 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2418 bi->cg_edges.safe_push (cs);
2421 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2423 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2424 bi->cg_edges.safe_push (cs);
2427 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2429 int i;
2430 struct ipa_bb_info *bi;
2431 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2432 free_ipa_bb_info (bi);
2433 fbi.bb_infos.release ();
2434 free_dominance_info (CDI_DOMINATORS);
2435 pop_cfun ();
2438 /* Update the jump functions associated with call graph edge E when the call
2439 graph edge CS is being inlined, assuming that E->caller is already (possibly
2440 indirectly) inlined into CS->callee and that E has not been inlined. */
2442 static void
2443 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2444 struct cgraph_edge *e)
2446 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2447 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2448 int count = ipa_get_cs_argument_count (args);
2449 int i;
2451 for (i = 0; i < count; i++)
2453 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2454 struct ipa_polymorphic_call_context *dst_ctx
2455 = ipa_get_ith_polymorhic_call_context (args, i);
2457 if (dst->type == IPA_JF_ANCESTOR)
2459 struct ipa_jump_func *src;
2460 int dst_fid = dst->value.ancestor.formal_id;
2461 struct ipa_polymorphic_call_context *src_ctx
2462 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2464 /* Variable number of arguments can cause havoc if we try to access
2465 one that does not exist in the inlined edge. So make sure we
2466 don't. */
2467 if (dst_fid >= ipa_get_cs_argument_count (top))
2469 ipa_set_jf_unknown (dst);
2470 continue;
2473 src = ipa_get_ith_jump_func (top, dst_fid);
2475 if (src_ctx && !src_ctx->useless_p ())
2477 struct ipa_polymorphic_call_context ctx = *src_ctx;
2479 /* TODO: Make type preserved safe WRT contexts. */
2480 if (!ipa_get_jf_ancestor_type_preserved (dst))
2481 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2482 ctx.offset_by (dst->value.ancestor.offset);
2483 if (!ctx.useless_p ())
2485 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2486 count);
2487 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2489 dst_ctx->combine_with (ctx);
2492 if (src->agg.items
2493 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2495 struct ipa_agg_jf_item *item;
2496 int j;
2498 /* Currently we do not produce clobber aggregate jump functions,
2499 replace with merging when we do. */
2500 gcc_assert (!dst->agg.items);
2502 dst->agg.items = vec_safe_copy (src->agg.items);
2503 dst->agg.by_ref = src->agg.by_ref;
2504 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2505 item->offset -= dst->value.ancestor.offset;
2508 if (src->type == IPA_JF_PASS_THROUGH
2509 && src->value.pass_through.operation == NOP_EXPR)
2511 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2512 dst->value.ancestor.agg_preserved &=
2513 src->value.pass_through.agg_preserved;
2515 else if (src->type == IPA_JF_ANCESTOR)
2517 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2518 dst->value.ancestor.offset += src->value.ancestor.offset;
2519 dst->value.ancestor.agg_preserved &=
2520 src->value.ancestor.agg_preserved;
2522 else
2523 ipa_set_jf_unknown (dst);
2525 else if (dst->type == IPA_JF_PASS_THROUGH)
2527 struct ipa_jump_func *src;
2528 /* We must check range due to calls with variable number of arguments
2529 and we cannot combine jump functions with operations. */
2530 if (dst->value.pass_through.operation == NOP_EXPR
2531 && (dst->value.pass_through.formal_id
2532 < ipa_get_cs_argument_count (top)))
2534 int dst_fid = dst->value.pass_through.formal_id;
2535 src = ipa_get_ith_jump_func (top, dst_fid);
2536 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2537 struct ipa_polymorphic_call_context *src_ctx
2538 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2540 if (src_ctx && !src_ctx->useless_p ())
2542 struct ipa_polymorphic_call_context ctx = *src_ctx;
2544 /* TODO: Make type preserved safe WRT contexts. */
2545 if (!ipa_get_jf_pass_through_type_preserved (dst))
2546 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2547 if (!ctx.useless_p ())
2549 if (!dst_ctx)
2551 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2552 count);
2553 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2555 dst_ctx->combine_with (ctx);
2558 switch (src->type)
2560 case IPA_JF_UNKNOWN:
2561 ipa_set_jf_unknown (dst);
2562 break;
2563 case IPA_JF_CONST:
2564 ipa_set_jf_cst_copy (dst, src);
2565 break;
2567 case IPA_JF_PASS_THROUGH:
2569 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2570 enum tree_code operation;
2571 operation = ipa_get_jf_pass_through_operation (src);
2573 if (operation == NOP_EXPR)
2575 bool agg_p;
2576 agg_p = dst_agg_p
2577 && ipa_get_jf_pass_through_agg_preserved (src);
2578 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2580 else
2582 tree operand = ipa_get_jf_pass_through_operand (src);
2583 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2584 operation);
2586 break;
2588 case IPA_JF_ANCESTOR:
2590 bool agg_p;
2591 agg_p = dst_agg_p
2592 && ipa_get_jf_ancestor_agg_preserved (src);
2593 ipa_set_ancestor_jf (dst,
2594 ipa_get_jf_ancestor_offset (src),
2595 ipa_get_jf_ancestor_formal_id (src),
2596 agg_p);
2597 break;
2599 default:
2600 gcc_unreachable ();
2603 if (src->agg.items
2604 && (dst_agg_p || !src->agg.by_ref))
2606 /* Currently we do not produce clobber aggregate jump
2607 functions, replace with merging when we do. */
2608 gcc_assert (!dst->agg.items);
2610 dst->agg.by_ref = src->agg.by_ref;
2611 dst->agg.items = vec_safe_copy (src->agg.items);
2614 else
2615 ipa_set_jf_unknown (dst);
2620 /* If TARGET is an addr_expr of a function declaration, make it the
2621 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2622 Otherwise, return NULL. */
2624 struct cgraph_edge *
2625 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2626 bool speculative)
2628 struct cgraph_node *callee;
2629 struct inline_edge_summary *es = inline_edge_summary (ie);
2630 bool unreachable = false;
2632 if (TREE_CODE (target) == ADDR_EXPR)
2633 target = TREE_OPERAND (target, 0);
2634 if (TREE_CODE (target) != FUNCTION_DECL)
2636 target = canonicalize_constructor_val (target, NULL);
2637 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2639 if (ie->indirect_info->member_ptr)
2640 /* Member pointer call that goes through a VMT lookup. */
2641 return NULL;
2643 if (dump_enabled_p ())
2645 location_t loc = gimple_location_safe (ie->call_stmt);
2646 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2647 "discovered direct call to non-function in %s/%i, "
2648 "making it __builtin_unreachable\n",
2649 ie->caller->name (), ie->caller->order);
2652 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2653 callee = cgraph_node::get_create (target);
2654 unreachable = true;
2656 else
2657 callee = cgraph_node::get (target);
2659 else
2660 callee = cgraph_node::get (target);
2662 /* Because may-edges are not explicitely represented and vtable may be external,
2663 we may create the first reference to the object in the unit. */
2664 if (!callee || callee->global.inlined_to)
2667 /* We are better to ensure we can refer to it.
2668 In the case of static functions we are out of luck, since we already
2669 removed its body. In the case of public functions we may or may
2670 not introduce the reference. */
2671 if (!canonicalize_constructor_val (target, NULL)
2672 || !TREE_PUBLIC (target))
2674 if (dump_file)
2675 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2676 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2677 xstrdup_for_dump (ie->caller->name ()),
2678 ie->caller->order,
2679 xstrdup_for_dump (ie->callee->name ()),
2680 ie->callee->order);
2681 return NULL;
2683 callee = cgraph_node::get_create (target);
2686 /* If the edge is already speculated. */
2687 if (speculative && ie->speculative)
2689 struct cgraph_edge *e2;
2690 struct ipa_ref *ref;
2691 ie->speculative_call_info (e2, ie, ref);
2692 if (e2->callee->ultimate_alias_target ()
2693 != callee->ultimate_alias_target ())
2695 if (dump_file)
2696 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2697 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2698 xstrdup_for_dump (ie->caller->name ()),
2699 ie->caller->order,
2700 xstrdup_for_dump (callee->name ()),
2701 callee->order,
2702 xstrdup_for_dump (e2->callee->name ()),
2703 e2->callee->order);
2705 else
2707 if (dump_file)
2708 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2709 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2710 xstrdup_for_dump (ie->caller->name ()),
2711 ie->caller->order,
2712 xstrdup_for_dump (callee->name ()),
2713 callee->order);
2715 return NULL;
2718 if (!dbg_cnt (devirt))
2719 return NULL;
2721 ipa_check_create_node_params ();
2723 /* We can not make edges to inline clones. It is bug that someone removed
2724 the cgraph node too early. */
2725 gcc_assert (!callee->global.inlined_to);
2727 if (dump_file && !unreachable)
2729 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2730 "(%s/%i -> %s/%i), for stmt ",
2731 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2732 speculative ? "speculative" : "known",
2733 xstrdup_for_dump (ie->caller->name ()),
2734 ie->caller->order,
2735 xstrdup_for_dump (callee->name ()),
2736 callee->order);
2737 if (ie->call_stmt)
2738 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2739 else
2740 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2742 if (dump_enabled_p ())
2744 location_t loc = gimple_location_safe (ie->call_stmt);
2746 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2747 "converting indirect call in %s to direct call to %s\n",
2748 ie->caller->name (), callee->name ());
2750 if (!speculative)
2752 struct cgraph_edge *orig = ie;
2753 ie = ie->make_direct (callee);
2754 /* If we resolved speculative edge the cost is already up to date
2755 for direct call (adjusted by inline_edge_duplication_hook). */
2756 if (ie == orig)
2758 es = inline_edge_summary (ie);
2759 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2760 - eni_size_weights.call_cost);
2761 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2762 - eni_time_weights.call_cost);
2765 else
2767 if (!callee->can_be_discarded_p ())
2769 cgraph_node *alias;
2770 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2771 if (alias)
2772 callee = alias;
2774 /* make_speculative will update ie's cost to direct call cost. */
2775 ie = ie->make_speculative
2776 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2779 return ie;
2782 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2783 return NULL if there is not any. BY_REF specifies whether the value has to
2784 be passed by reference or by value. */
2786 tree
2787 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2788 HOST_WIDE_INT offset, bool by_ref)
2790 struct ipa_agg_jf_item *item;
2791 int i;
2793 if (by_ref != agg->by_ref)
2794 return NULL;
2796 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2797 if (item->offset == offset)
2799 /* Currently we do not have clobber values, return NULL for them once
2800 we do. */
2801 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2802 return item->value;
2804 return NULL;
2807 /* Remove a reference to SYMBOL from the list of references of a node given by
2808 reference description RDESC. Return true if the reference has been
2809 successfully found and removed. */
2811 static bool
2812 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2814 struct ipa_ref *to_del;
2815 struct cgraph_edge *origin;
2817 origin = rdesc->cs;
2818 if (!origin)
2819 return false;
2820 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2821 origin->lto_stmt_uid);
2822 if (!to_del)
2823 return false;
2825 to_del->remove_reference ();
2826 if (dump_file)
2827 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2828 xstrdup_for_dump (origin->caller->name ()),
2829 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2830 return true;
2833 /* If JFUNC has a reference description with refcount different from
2834 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2835 NULL. JFUNC must be a constant jump function. */
2837 static struct ipa_cst_ref_desc *
2838 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2840 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2841 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2842 return rdesc;
2843 else
2844 return NULL;
2847 /* If the value of constant jump function JFUNC is an address of a function
2848 declaration, return the associated call graph node. Otherwise return
2849 NULL. */
2851 static cgraph_node *
2852 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2854 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2855 tree cst = ipa_get_jf_constant (jfunc);
2856 if (TREE_CODE (cst) != ADDR_EXPR
2857 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2858 return NULL;
2860 return cgraph_node::get (TREE_OPERAND (cst, 0));
2864 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2865 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2866 the edge specified in the rdesc. Return false if either the symbol or the
2867 reference could not be found, otherwise return true. */
2869 static bool
2870 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2872 struct ipa_cst_ref_desc *rdesc;
2873 if (jfunc->type == IPA_JF_CONST
2874 && (rdesc = jfunc_rdesc_usable (jfunc))
2875 && --rdesc->refcount == 0)
2877 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2878 if (!symbol)
2879 return false;
2881 return remove_described_reference (symbol, rdesc);
2883 return true;
2886 /* Try to find a destination for indirect edge IE that corresponds to a simple
2887 call or a call of a member function pointer and where the destination is a
2888 pointer formal parameter described by jump function JFUNC. If it can be
2889 determined, return the newly direct edge, otherwise return NULL.
2890 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2892 static struct cgraph_edge *
2893 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2894 struct ipa_jump_func *jfunc,
2895 struct ipa_node_params *new_root_info)
2897 struct cgraph_edge *cs;
2898 tree target;
2899 bool agg_contents = ie->indirect_info->agg_contents;
2901 if (ie->indirect_info->agg_contents)
2902 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2903 ie->indirect_info->offset,
2904 ie->indirect_info->by_ref);
2905 else
2906 target = ipa_value_from_jfunc (new_root_info, jfunc);
2907 if (!target)
2908 return NULL;
2909 cs = ipa_make_edge_direct_to_target (ie, target);
2911 if (cs && !agg_contents)
2913 bool ok;
2914 gcc_checking_assert (cs->callee
2915 && (cs != ie
2916 || jfunc->type != IPA_JF_CONST
2917 || !cgraph_node_for_jfunc (jfunc)
2918 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2919 ok = try_decrement_rdesc_refcount (jfunc);
2920 gcc_checking_assert (ok);
2923 return cs;
2926 /* Return the target to be used in cases of impossible devirtualization. IE
2927 and target (the latter can be NULL) are dumped when dumping is enabled. */
2929 tree
2930 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2932 if (dump_file)
2934 if (target)
2935 fprintf (dump_file,
2936 "Type inconsistent devirtualization: %s/%i->%s\n",
2937 ie->caller->name (), ie->caller->order,
2938 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2939 else
2940 fprintf (dump_file,
2941 "No devirtualization target in %s/%i\n",
2942 ie->caller->name (), ie->caller->order);
2944 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2945 cgraph_node::get_create (new_target);
2946 return new_target;
2949 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2950 call based on a formal parameter which is described by jump function JFUNC
2951 and if it can be determined, make it direct and return the direct edge.
2952 Otherwise, return NULL. CTX describes the polymorphic context that the
2953 parameter the call is based on brings along with it. */
2955 static struct cgraph_edge *
2956 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2957 struct ipa_jump_func *jfunc,
2958 struct ipa_polymorphic_call_context ctx)
2960 tree target = NULL;
2961 bool speculative = false;
2963 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2964 return NULL;
2966 gcc_assert (!ie->indirect_info->by_ref);
2968 /* Try to do lookup via known virtual table pointer value. */
2969 if (!ie->indirect_info->vptr_changed
2970 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2972 tree vtable;
2973 unsigned HOST_WIDE_INT offset;
2974 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2975 ie->indirect_info->offset,
2976 true);
2977 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2979 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2980 vtable, offset);
2981 if (t)
2983 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2984 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2985 || !possible_polymorphic_call_target_p
2986 (ie, cgraph_node::get (t)))
2988 /* Do not speculate builtin_unreachable, it is stupid! */
2989 if (!ie->indirect_info->vptr_changed)
2990 target = ipa_impossible_devirt_target (ie, target);
2992 else
2994 target = t;
2995 speculative = ie->indirect_info->vptr_changed;
3001 ipa_polymorphic_call_context ie_context (ie);
3002 vec <cgraph_node *>targets;
3003 bool final;
3005 ctx.offset_by (ie->indirect_info->offset);
3006 if (ie->indirect_info->vptr_changed)
3007 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3008 ie->indirect_info->otr_type);
3009 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3010 targets = possible_polymorphic_call_targets
3011 (ie->indirect_info->otr_type,
3012 ie->indirect_info->otr_token,
3013 ctx, &final);
3014 if (final && targets.length () <= 1)
3016 speculative = false;
3017 if (targets.length () == 1)
3018 target = targets[0]->decl;
3019 else
3020 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3022 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3023 && !ie->speculative && ie->maybe_hot_p ())
3025 cgraph_node *n;
3026 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3027 ie->indirect_info->otr_token,
3028 ie->indirect_info->context);
3029 if (n)
3031 target = n->decl;
3032 speculative = true;
3036 if (target)
3038 if (!possible_polymorphic_call_target_p
3039 (ie, cgraph_node::get_create (target)))
3041 if (speculative)
3042 return NULL;
3043 target = ipa_impossible_devirt_target (ie, target);
3045 return ipa_make_edge_direct_to_target (ie, target, speculative);
3047 else
3048 return NULL;
3051 /* Update the param called notes associated with NODE when CS is being inlined,
3052 assuming NODE is (potentially indirectly) inlined into CS->callee.
3053 Moreover, if the callee is discovered to be constant, create a new cgraph
3054 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3055 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3057 static bool
3058 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3059 struct cgraph_node *node,
3060 vec<cgraph_edge *> *new_edges)
3062 struct ipa_edge_args *top;
3063 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3064 struct ipa_node_params *new_root_info;
3065 bool res = false;
3067 ipa_check_create_edge_args ();
3068 top = IPA_EDGE_REF (cs);
3069 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3070 ? cs->caller->global.inlined_to
3071 : cs->caller);
3073 for (ie = node->indirect_calls; ie; ie = next_ie)
3075 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3076 struct ipa_jump_func *jfunc;
3077 int param_index;
3079 next_ie = ie->next_callee;
3081 if (ici->param_index == -1)
3082 continue;
3084 /* We must check range due to calls with variable number of arguments: */
3085 if (ici->param_index >= ipa_get_cs_argument_count (top))
3087 ici->param_index = -1;
3088 continue;
3091 param_index = ici->param_index;
3092 jfunc = ipa_get_ith_jump_func (top, param_index);
3094 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3095 new_direct_edge = NULL;
3096 else if (ici->polymorphic)
3098 ipa_polymorphic_call_context ctx;
3099 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3100 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3102 else
3103 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3104 new_root_info);
3105 /* If speculation was removed, then we need to do nothing. */
3106 if (new_direct_edge && new_direct_edge != ie)
3108 new_direct_edge->indirect_inlining_edge = 1;
3109 top = IPA_EDGE_REF (cs);
3110 res = true;
3112 else if (new_direct_edge)
3114 new_direct_edge->indirect_inlining_edge = 1;
3115 if (new_direct_edge->call_stmt)
3116 new_direct_edge->call_stmt_cannot_inline_p
3117 = !gimple_check_call_matching_types (
3118 new_direct_edge->call_stmt,
3119 new_direct_edge->callee->decl, false);
3120 if (new_edges)
3122 new_edges->safe_push (new_direct_edge);
3123 res = true;
3125 top = IPA_EDGE_REF (cs);
3127 else if (jfunc->type == IPA_JF_PASS_THROUGH
3128 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3130 if ((ici->agg_contents
3131 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3132 || (ici->polymorphic
3133 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3134 ici->param_index = -1;
3135 else
3136 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3138 else if (jfunc->type == IPA_JF_ANCESTOR)
3140 if ((ici->agg_contents
3141 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3142 || (ici->polymorphic
3143 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3144 ici->param_index = -1;
3145 else
3147 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3148 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3151 else
3152 /* Either we can find a destination for this edge now or never. */
3153 ici->param_index = -1;
3156 return res;
3159 /* Recursively traverse subtree of NODE (including node) made of inlined
3160 cgraph_edges when CS has been inlined and invoke
3161 update_indirect_edges_after_inlining on all nodes and
3162 update_jump_functions_after_inlining on all non-inlined edges that lead out
3163 of this subtree. Newly discovered indirect edges will be added to
3164 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3165 created. */
3167 static bool
3168 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3169 struct cgraph_node *node,
3170 vec<cgraph_edge *> *new_edges)
3172 struct cgraph_edge *e;
3173 bool res;
3175 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3177 for (e = node->callees; e; e = e->next_callee)
3178 if (!e->inline_failed)
3179 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3180 else
3181 update_jump_functions_after_inlining (cs, e);
3182 for (e = node->indirect_calls; e; e = e->next_callee)
3183 update_jump_functions_after_inlining (cs, e);
3185 return res;
3188 /* Combine two controlled uses counts as done during inlining. */
3190 static int
3191 combine_controlled_uses_counters (int c, int d)
3193 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3194 return IPA_UNDESCRIBED_USE;
3195 else
3196 return c + d - 1;
3199 /* Propagate number of controlled users from CS->caleee to the new root of the
3200 tree of inlined nodes. */
3202 static void
3203 propagate_controlled_uses (struct cgraph_edge *cs)
3205 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3206 struct cgraph_node *new_root = cs->caller->global.inlined_to
3207 ? cs->caller->global.inlined_to : cs->caller;
3208 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3209 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3210 int count, i;
3212 count = MIN (ipa_get_cs_argument_count (args),
3213 ipa_get_param_count (old_root_info));
3214 for (i = 0; i < count; i++)
3216 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3217 struct ipa_cst_ref_desc *rdesc;
3219 if (jf->type == IPA_JF_PASS_THROUGH)
3221 int src_idx, c, d;
3222 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3223 c = ipa_get_controlled_uses (new_root_info, src_idx);
3224 d = ipa_get_controlled_uses (old_root_info, i);
3226 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3227 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3228 c = combine_controlled_uses_counters (c, d);
3229 ipa_set_controlled_uses (new_root_info, src_idx, c);
3230 if (c == 0 && new_root_info->ipcp_orig_node)
3232 struct cgraph_node *n;
3233 struct ipa_ref *ref;
3234 tree t = new_root_info->known_csts[src_idx];
3236 if (t && TREE_CODE (t) == ADDR_EXPR
3237 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3238 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3239 && (ref = new_root->find_reference (n, NULL, 0)))
3241 if (dump_file)
3242 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3243 "reference from %s/%i to %s/%i.\n",
3244 xstrdup_for_dump (new_root->name ()),
3245 new_root->order,
3246 xstrdup_for_dump (n->name ()), n->order);
3247 ref->remove_reference ();
3251 else if (jf->type == IPA_JF_CONST
3252 && (rdesc = jfunc_rdesc_usable (jf)))
3254 int d = ipa_get_controlled_uses (old_root_info, i);
3255 int c = rdesc->refcount;
3256 rdesc->refcount = combine_controlled_uses_counters (c, d);
3257 if (rdesc->refcount == 0)
3259 tree cst = ipa_get_jf_constant (jf);
3260 struct cgraph_node *n;
3261 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3262 && TREE_CODE (TREE_OPERAND (cst, 0))
3263 == FUNCTION_DECL);
3264 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3265 if (n)
3267 struct cgraph_node *clone;
3268 bool ok;
3269 ok = remove_described_reference (n, rdesc);
3270 gcc_checking_assert (ok);
3272 clone = cs->caller;
3273 while (clone->global.inlined_to
3274 && clone != rdesc->cs->caller
3275 && IPA_NODE_REF (clone)->ipcp_orig_node)
3277 struct ipa_ref *ref;
3278 ref = clone->find_reference (n, NULL, 0);
3279 if (ref)
3281 if (dump_file)
3282 fprintf (dump_file, "ipa-prop: Removing "
3283 "cloning-created reference "
3284 "from %s/%i to %s/%i.\n",
3285 xstrdup_for_dump (clone->name ()),
3286 clone->order,
3287 xstrdup_for_dump (n->name ()),
3288 n->order);
3289 ref->remove_reference ();
3291 clone = clone->callers->caller;
3298 for (i = ipa_get_param_count (old_root_info);
3299 i < ipa_get_cs_argument_count (args);
3300 i++)
3302 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3304 if (jf->type == IPA_JF_CONST)
3306 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3307 if (rdesc)
3308 rdesc->refcount = IPA_UNDESCRIBED_USE;
3310 else if (jf->type == IPA_JF_PASS_THROUGH)
3311 ipa_set_controlled_uses (new_root_info,
3312 jf->value.pass_through.formal_id,
3313 IPA_UNDESCRIBED_USE);
3317 /* Update jump functions and call note functions on inlining the call site CS.
3318 CS is expected to lead to a node already cloned by
3319 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3320 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3321 created. */
3323 bool
3324 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3325 vec<cgraph_edge *> *new_edges)
3327 bool changed;
3328 /* Do nothing if the preparation phase has not been carried out yet
3329 (i.e. during early inlining). */
3330 if (!ipa_node_params_sum)
3331 return false;
3332 gcc_assert (ipa_edge_args_vector);
3334 propagate_controlled_uses (cs);
3335 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3337 return changed;
3340 /* Frees all dynamically allocated structures that the argument info points
3341 to. */
3343 void
3344 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3346 vec_free (args->jump_functions);
3347 memset (args, 0, sizeof (*args));
3350 /* Free all ipa_edge structures. */
3352 void
3353 ipa_free_all_edge_args (void)
3355 int i;
3356 struct ipa_edge_args *args;
3358 if (!ipa_edge_args_vector)
3359 return;
3361 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3362 ipa_free_edge_args_substructures (args);
3364 vec_free (ipa_edge_args_vector);
3367 /* Frees all dynamically allocated structures that the param info points
3368 to. */
3370 ipa_node_params::~ipa_node_params ()
3372 descriptors.release ();
3373 free (lattices);
3374 /* Lattice values and their sources are deallocated with their alocation
3375 pool. */
3376 known_contexts.release ();
3378 lattices = NULL;
3379 ipcp_orig_node = NULL;
3380 analysis_done = 0;
3381 node_enqueued = 0;
3382 do_clone_for_all_contexts = 0;
3383 is_all_contexts_clone = 0;
3384 node_dead = 0;
3387 /* Free all ipa_node_params structures. */
3389 void
3390 ipa_free_all_node_params (void)
3392 delete ipa_node_params_sum;
3393 ipa_node_params_sum = NULL;
3396 /* Grow ipcp_transformations if necessary. */
3398 void
3399 ipcp_grow_transformations_if_necessary (void)
3401 if (vec_safe_length (ipcp_transformations)
3402 <= (unsigned) symtab->cgraph_max_uid)
3403 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3406 /* Set the aggregate replacements of NODE to be AGGVALS. */
3408 void
3409 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3410 struct ipa_agg_replacement_value *aggvals)
3412 ipcp_grow_transformations_if_necessary ();
3413 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3416 /* Hook that is called by cgraph.c when an edge is removed. */
3418 static void
3419 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3421 struct ipa_edge_args *args;
3423 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3424 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3425 return;
3427 args = IPA_EDGE_REF (cs);
3428 if (args->jump_functions)
3430 struct ipa_jump_func *jf;
3431 int i;
3432 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3434 struct ipa_cst_ref_desc *rdesc;
3435 try_decrement_rdesc_refcount (jf);
3436 if (jf->type == IPA_JF_CONST
3437 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3438 && rdesc->cs == cs)
3439 rdesc->cs = NULL;
3443 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3446 /* Hook that is called by cgraph.c when an edge is duplicated. */
3448 static void
3449 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3450 void *)
3452 struct ipa_edge_args *old_args, *new_args;
3453 unsigned int i;
3455 ipa_check_create_edge_args ();
3457 old_args = IPA_EDGE_REF (src);
3458 new_args = IPA_EDGE_REF (dst);
3460 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3461 if (old_args->polymorphic_call_contexts)
3462 new_args->polymorphic_call_contexts
3463 = vec_safe_copy (old_args->polymorphic_call_contexts);
3465 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3467 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3468 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3470 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3472 if (src_jf->type == IPA_JF_CONST)
3474 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3476 if (!src_rdesc)
3477 dst_jf->value.constant.rdesc = NULL;
3478 else if (src->caller == dst->caller)
3480 struct ipa_ref *ref;
3481 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3482 gcc_checking_assert (n);
3483 ref = src->caller->find_reference (n, src->call_stmt,
3484 src->lto_stmt_uid);
3485 gcc_checking_assert (ref);
3486 dst->caller->clone_reference (ref, ref->stmt);
3488 gcc_checking_assert (ipa_refdesc_pool);
3489 struct ipa_cst_ref_desc *dst_rdesc
3490 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3491 dst_rdesc->cs = dst;
3492 dst_rdesc->refcount = src_rdesc->refcount;
3493 dst_rdesc->next_duplicate = NULL;
3494 dst_jf->value.constant.rdesc = dst_rdesc;
3496 else if (src_rdesc->cs == src)
3498 struct ipa_cst_ref_desc *dst_rdesc;
3499 gcc_checking_assert (ipa_refdesc_pool);
3500 dst_rdesc
3501 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3502 dst_rdesc->cs = dst;
3503 dst_rdesc->refcount = src_rdesc->refcount;
3504 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3505 src_rdesc->next_duplicate = dst_rdesc;
3506 dst_jf->value.constant.rdesc = dst_rdesc;
3508 else
3510 struct ipa_cst_ref_desc *dst_rdesc;
3511 /* This can happen during inlining, when a JFUNC can refer to a
3512 reference taken in a function up in the tree of inline clones.
3513 We need to find the duplicate that refers to our tree of
3514 inline clones. */
3516 gcc_assert (dst->caller->global.inlined_to);
3517 for (dst_rdesc = src_rdesc->next_duplicate;
3518 dst_rdesc;
3519 dst_rdesc = dst_rdesc->next_duplicate)
3521 struct cgraph_node *top;
3522 top = dst_rdesc->cs->caller->global.inlined_to
3523 ? dst_rdesc->cs->caller->global.inlined_to
3524 : dst_rdesc->cs->caller;
3525 if (dst->caller->global.inlined_to == top)
3526 break;
3528 gcc_assert (dst_rdesc);
3529 dst_jf->value.constant.rdesc = dst_rdesc;
3532 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3533 && src->caller == dst->caller)
3535 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3536 ? dst->caller->global.inlined_to : dst->caller;
3537 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3538 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3540 int c = ipa_get_controlled_uses (root_info, idx);
3541 if (c != IPA_UNDESCRIBED_USE)
3543 c++;
3544 ipa_set_controlled_uses (root_info, idx, c);
3550 /* Analyze newly added function into callgraph. */
3552 static void
3553 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3555 if (node->has_gimple_body_p ())
3556 ipa_analyze_node (node);
3559 /* Hook that is called by summary when a node is duplicated. */
3561 void
3562 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3563 ipa_node_params *old_info,
3564 ipa_node_params *new_info)
3566 ipa_agg_replacement_value *old_av, *new_av;
3568 new_info->descriptors = old_info->descriptors.copy ();
3569 new_info->lattices = NULL;
3570 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3572 new_info->analysis_done = old_info->analysis_done;
3573 new_info->node_enqueued = old_info->node_enqueued;
3575 old_av = ipa_get_agg_replacements_for_node (src);
3576 if (old_av)
3578 new_av = NULL;
3579 while (old_av)
3581 struct ipa_agg_replacement_value *v;
3583 v = ggc_alloc<ipa_agg_replacement_value> ();
3584 memcpy (v, old_av, sizeof (*v));
3585 v->next = new_av;
3586 new_av = v;
3587 old_av = old_av->next;
3589 ipa_set_node_agg_value_chain (dst, new_av);
3592 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3594 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3596 ipcp_grow_transformations_if_necessary ();
3597 src_trans = ipcp_get_transformation_summary (src);
3598 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3599 vec<ipa_alignment, va_gc> *&dst_alignments
3600 = ipcp_get_transformation_summary (dst)->alignments;
3601 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3602 for (unsigned i = 0; i < src_alignments->length (); ++i)
3603 dst_alignments->quick_push ((*src_alignments)[i]);
3607 /* Register our cgraph hooks if they are not already there. */
3609 void
3610 ipa_register_cgraph_hooks (void)
3612 ipa_check_create_node_params ();
3614 if (!edge_removal_hook_holder)
3615 edge_removal_hook_holder =
3616 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3617 if (!edge_duplication_hook_holder)
3618 edge_duplication_hook_holder =
3619 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3620 function_insertion_hook_holder =
3621 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3624 /* Unregister our cgraph hooks if they are not already there. */
3626 static void
3627 ipa_unregister_cgraph_hooks (void)
3629 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3630 edge_removal_hook_holder = NULL;
3631 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3632 edge_duplication_hook_holder = NULL;
3633 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3634 function_insertion_hook_holder = NULL;
3637 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3638 longer needed after ipa-cp. */
3640 void
3641 ipa_free_all_structures_after_ipa_cp (void)
3643 if (!optimize && !in_lto_p)
3645 ipa_free_all_edge_args ();
3646 ipa_free_all_node_params ();
3647 free_alloc_pool (ipcp_sources_pool);
3648 free_alloc_pool (ipcp_cst_values_pool);
3649 free_alloc_pool (ipcp_poly_ctx_values_pool);
3650 free_alloc_pool (ipcp_agg_lattice_pool);
3651 ipa_unregister_cgraph_hooks ();
3652 if (ipa_refdesc_pool)
3653 free_alloc_pool (ipa_refdesc_pool);
3657 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3658 longer needed after indirect inlining. */
3660 void
3661 ipa_free_all_structures_after_iinln (void)
3663 ipa_free_all_edge_args ();
3664 ipa_free_all_node_params ();
3665 ipa_unregister_cgraph_hooks ();
3666 if (ipcp_sources_pool)
3667 free_alloc_pool (ipcp_sources_pool);
3668 if (ipcp_cst_values_pool)
3669 free_alloc_pool (ipcp_cst_values_pool);
3670 if (ipcp_poly_ctx_values_pool)
3671 free_alloc_pool (ipcp_poly_ctx_values_pool);
3672 if (ipcp_agg_lattice_pool)
3673 free_alloc_pool (ipcp_agg_lattice_pool);
3674 if (ipa_refdesc_pool)
3675 free_alloc_pool (ipa_refdesc_pool);
3678 /* Print ipa_tree_map data structures of all functions in the
3679 callgraph to F. */
3681 void
3682 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3684 int i, count;
3685 struct ipa_node_params *info;
3687 if (!node->definition)
3688 return;
3689 info = IPA_NODE_REF (node);
3690 fprintf (f, " function %s/%i parameter descriptors:\n",
3691 node->name (), node->order);
3692 count = ipa_get_param_count (info);
3693 for (i = 0; i < count; i++)
3695 int c;
3697 fprintf (f, " ");
3698 ipa_dump_param (f, info, i);
3699 if (ipa_is_param_used (info, i))
3700 fprintf (f, " used");
3701 c = ipa_get_controlled_uses (info, i);
3702 if (c == IPA_UNDESCRIBED_USE)
3703 fprintf (f, " undescribed_use");
3704 else
3705 fprintf (f, " controlled_uses=%i", c);
3706 fprintf (f, "\n");
3710 /* Print ipa_tree_map data structures of all functions in the
3711 callgraph to F. */
3713 void
3714 ipa_print_all_params (FILE * f)
3716 struct cgraph_node *node;
3718 fprintf (f, "\nFunction parameters:\n");
3719 FOR_EACH_FUNCTION (node)
3720 ipa_print_node_params (f, node);
3723 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3725 vec<tree>
3726 ipa_get_vector_of_formal_parms (tree fndecl)
3728 vec<tree> args;
3729 int count;
3730 tree parm;
3732 gcc_assert (!flag_wpa);
3733 count = count_formal_params (fndecl);
3734 args.create (count);
3735 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3736 args.quick_push (parm);
3738 return args;
3741 /* Return a heap allocated vector containing types of formal parameters of
3742 function type FNTYPE. */
3744 vec<tree>
3745 ipa_get_vector_of_formal_parm_types (tree fntype)
3747 vec<tree> types;
3748 int count = 0;
3749 tree t;
3751 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3752 count++;
3754 types.create (count);
3755 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3756 types.quick_push (TREE_VALUE (t));
3758 return types;
3761 /* Modify the function declaration FNDECL and its type according to the plan in
3762 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3763 to reflect the actual parameters being modified which are determined by the
3764 base_index field. */
3766 void
3767 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3769 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3770 tree orig_type = TREE_TYPE (fndecl);
3771 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3773 /* The following test is an ugly hack, some functions simply don't have any
3774 arguments in their type. This is probably a bug but well... */
3775 bool care_for_types = (old_arg_types != NULL_TREE);
3776 bool last_parm_void;
3777 vec<tree> otypes;
3778 if (care_for_types)
3780 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3781 == void_type_node);
3782 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3783 if (last_parm_void)
3784 gcc_assert (oparms.length () + 1 == otypes.length ());
3785 else
3786 gcc_assert (oparms.length () == otypes.length ());
3788 else
3790 last_parm_void = false;
3791 otypes.create (0);
3794 int len = adjustments.length ();
3795 tree *link = &DECL_ARGUMENTS (fndecl);
3796 tree new_arg_types = NULL;
3797 for (int i = 0; i < len; i++)
3799 struct ipa_parm_adjustment *adj;
3800 gcc_assert (link);
3802 adj = &adjustments[i];
3803 tree parm;
3804 if (adj->op == IPA_PARM_OP_NEW)
3805 parm = NULL;
3806 else
3807 parm = oparms[adj->base_index];
3808 adj->base = parm;
3810 if (adj->op == IPA_PARM_OP_COPY)
3812 if (care_for_types)
3813 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3814 new_arg_types);
3815 *link = parm;
3816 link = &DECL_CHAIN (parm);
3818 else if (adj->op != IPA_PARM_OP_REMOVE)
3820 tree new_parm;
3821 tree ptype;
3823 if (adj->by_ref)
3824 ptype = build_pointer_type (adj->type);
3825 else
3827 ptype = adj->type;
3828 if (is_gimple_reg_type (ptype))
3830 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3831 if (TYPE_ALIGN (ptype) < malign)
3832 ptype = build_aligned_type (ptype, malign);
3836 if (care_for_types)
3837 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3839 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3840 ptype);
3841 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3842 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3843 DECL_ARTIFICIAL (new_parm) = 1;
3844 DECL_ARG_TYPE (new_parm) = ptype;
3845 DECL_CONTEXT (new_parm) = fndecl;
3846 TREE_USED (new_parm) = 1;
3847 DECL_IGNORED_P (new_parm) = 1;
3848 layout_decl (new_parm, 0);
3850 if (adj->op == IPA_PARM_OP_NEW)
3851 adj->base = NULL;
3852 else
3853 adj->base = parm;
3854 adj->new_decl = new_parm;
3856 *link = new_parm;
3857 link = &DECL_CHAIN (new_parm);
3861 *link = NULL_TREE;
3863 tree new_reversed = NULL;
3864 if (care_for_types)
3866 new_reversed = nreverse (new_arg_types);
3867 if (last_parm_void)
3869 if (new_reversed)
3870 TREE_CHAIN (new_arg_types) = void_list_node;
3871 else
3872 new_reversed = void_list_node;
3876 /* Use copy_node to preserve as much as possible from original type
3877 (debug info, attribute lists etc.)
3878 Exception is METHOD_TYPEs must have THIS argument.
3879 When we are asked to remove it, we need to build new FUNCTION_TYPE
3880 instead. */
3881 tree new_type = NULL;
3882 if (TREE_CODE (orig_type) != METHOD_TYPE
3883 || (adjustments[0].op == IPA_PARM_OP_COPY
3884 && adjustments[0].base_index == 0))
3886 new_type = build_distinct_type_copy (orig_type);
3887 TYPE_ARG_TYPES (new_type) = new_reversed;
3889 else
3891 new_type
3892 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3893 new_reversed));
3894 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3895 DECL_VINDEX (fndecl) = NULL_TREE;
3898 /* When signature changes, we need to clear builtin info. */
3899 if (DECL_BUILT_IN (fndecl))
3901 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3902 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3905 TREE_TYPE (fndecl) = new_type;
3906 DECL_VIRTUAL_P (fndecl) = 0;
3907 DECL_LANG_SPECIFIC (fndecl) = NULL;
3908 otypes.release ();
3909 oparms.release ();
3912 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3913 If this is a directly recursive call, CS must be NULL. Otherwise it must
3914 contain the corresponding call graph edge. */
3916 void
3917 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3918 ipa_parm_adjustment_vec adjustments)
3920 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3921 vec<tree> vargs;
3922 vec<tree, va_gc> **debug_args = NULL;
3923 gcall *new_stmt;
3924 gimple_stmt_iterator gsi, prev_gsi;
3925 tree callee_decl;
3926 int i, len;
3928 len = adjustments.length ();
3929 vargs.create (len);
3930 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3931 current_node->remove_stmt_references (stmt);
3933 gsi = gsi_for_stmt (stmt);
3934 prev_gsi = gsi;
3935 gsi_prev (&prev_gsi);
3936 for (i = 0; i < len; i++)
3938 struct ipa_parm_adjustment *adj;
3940 adj = &adjustments[i];
3942 if (adj->op == IPA_PARM_OP_COPY)
3944 tree arg = gimple_call_arg (stmt, adj->base_index);
3946 vargs.quick_push (arg);
3948 else if (adj->op != IPA_PARM_OP_REMOVE)
3950 tree expr, base, off;
3951 location_t loc;
3952 unsigned int deref_align = 0;
3953 bool deref_base = false;
3955 /* We create a new parameter out of the value of the old one, we can
3956 do the following kind of transformations:
3958 - A scalar passed by reference is converted to a scalar passed by
3959 value. (adj->by_ref is false and the type of the original
3960 actual argument is a pointer to a scalar).
3962 - A part of an aggregate is passed instead of the whole aggregate.
3963 The part can be passed either by value or by reference, this is
3964 determined by value of adj->by_ref. Moreover, the code below
3965 handles both situations when the original aggregate is passed by
3966 value (its type is not a pointer) and when it is passed by
3967 reference (it is a pointer to an aggregate).
3969 When the new argument is passed by reference (adj->by_ref is true)
3970 it must be a part of an aggregate and therefore we form it by
3971 simply taking the address of a reference inside the original
3972 aggregate. */
3974 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3975 base = gimple_call_arg (stmt, adj->base_index);
3976 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3977 : EXPR_LOCATION (base);
3979 if (TREE_CODE (base) != ADDR_EXPR
3980 && POINTER_TYPE_P (TREE_TYPE (base)))
3981 off = build_int_cst (adj->alias_ptr_type,
3982 adj->offset / BITS_PER_UNIT);
3983 else
3985 HOST_WIDE_INT base_offset;
3986 tree prev_base;
3987 bool addrof;
3989 if (TREE_CODE (base) == ADDR_EXPR)
3991 base = TREE_OPERAND (base, 0);
3992 addrof = true;
3994 else
3995 addrof = false;
3996 prev_base = base;
3997 base = get_addr_base_and_unit_offset (base, &base_offset);
3998 /* Aggregate arguments can have non-invariant addresses. */
3999 if (!base)
4001 base = build_fold_addr_expr (prev_base);
4002 off = build_int_cst (adj->alias_ptr_type,
4003 adj->offset / BITS_PER_UNIT);
4005 else if (TREE_CODE (base) == MEM_REF)
4007 if (!addrof)
4009 deref_base = true;
4010 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4012 off = build_int_cst (adj->alias_ptr_type,
4013 base_offset
4014 + adj->offset / BITS_PER_UNIT);
4015 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4016 off);
4017 base = TREE_OPERAND (base, 0);
4019 else
4021 off = build_int_cst (adj->alias_ptr_type,
4022 base_offset
4023 + adj->offset / BITS_PER_UNIT);
4024 base = build_fold_addr_expr (base);
4028 if (!adj->by_ref)
4030 tree type = adj->type;
4031 unsigned int align;
4032 unsigned HOST_WIDE_INT misalign;
4034 if (deref_base)
4036 align = deref_align;
4037 misalign = 0;
4039 else
4041 get_pointer_alignment_1 (base, &align, &misalign);
4042 if (TYPE_ALIGN (type) > align)
4043 align = TYPE_ALIGN (type);
4045 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4046 * BITS_PER_UNIT);
4047 misalign = misalign & (align - 1);
4048 if (misalign != 0)
4049 align = (misalign & -misalign);
4050 if (align < TYPE_ALIGN (type))
4051 type = build_aligned_type (type, align);
4052 base = force_gimple_operand_gsi (&gsi, base,
4053 true, NULL, true, GSI_SAME_STMT);
4054 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4055 /* If expr is not a valid gimple call argument emit
4056 a load into a temporary. */
4057 if (is_gimple_reg_type (TREE_TYPE (expr)))
4059 gimple tem = gimple_build_assign (NULL_TREE, expr);
4060 if (gimple_in_ssa_p (cfun))
4062 gimple_set_vuse (tem, gimple_vuse (stmt));
4063 expr = make_ssa_name (TREE_TYPE (expr), tem);
4065 else
4066 expr = create_tmp_reg (TREE_TYPE (expr));
4067 gimple_assign_set_lhs (tem, expr);
4068 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4071 else
4073 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4074 expr = build_fold_addr_expr (expr);
4075 expr = force_gimple_operand_gsi (&gsi, expr,
4076 true, NULL, true, GSI_SAME_STMT);
4078 vargs.quick_push (expr);
4080 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4082 unsigned int ix;
4083 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4084 gimple def_temp;
4086 arg = gimple_call_arg (stmt, adj->base_index);
4087 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4089 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4090 continue;
4091 arg = fold_convert_loc (gimple_location (stmt),
4092 TREE_TYPE (origin), arg);
4094 if (debug_args == NULL)
4095 debug_args = decl_debug_args_insert (callee_decl);
4096 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4097 if (ddecl == origin)
4099 ddecl = (**debug_args)[ix + 1];
4100 break;
4102 if (ddecl == NULL)
4104 ddecl = make_node (DEBUG_EXPR_DECL);
4105 DECL_ARTIFICIAL (ddecl) = 1;
4106 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4107 DECL_MODE (ddecl) = DECL_MODE (origin);
4109 vec_safe_push (*debug_args, origin);
4110 vec_safe_push (*debug_args, ddecl);
4112 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4113 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4117 if (dump_file && (dump_flags & TDF_DETAILS))
4119 fprintf (dump_file, "replacing stmt:");
4120 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4123 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4124 vargs.release ();
4125 if (gimple_call_lhs (stmt))
4126 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4128 gimple_set_block (new_stmt, gimple_block (stmt));
4129 if (gimple_has_location (stmt))
4130 gimple_set_location (new_stmt, gimple_location (stmt));
4131 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4132 gimple_call_copy_flags (new_stmt, stmt);
4133 if (gimple_in_ssa_p (cfun))
4135 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4136 if (gimple_vdef (stmt))
4138 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4139 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4143 if (dump_file && (dump_flags & TDF_DETAILS))
4145 fprintf (dump_file, "with stmt:");
4146 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4147 fprintf (dump_file, "\n");
4149 gsi_replace (&gsi, new_stmt, true);
4150 if (cs)
4151 cs->set_call_stmt (new_stmt);
4154 current_node->record_stmt_references (gsi_stmt (gsi));
4155 gsi_prev (&gsi);
4157 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4160 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4161 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4162 specifies whether the function should care about type incompatibility the
4163 current and new expressions. If it is false, the function will leave
4164 incompatibility issues to the caller. Return true iff the expression
4165 was modified. */
4167 bool
4168 ipa_modify_expr (tree *expr, bool convert,
4169 ipa_parm_adjustment_vec adjustments)
4171 struct ipa_parm_adjustment *cand
4172 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4173 if (!cand)
4174 return false;
4176 tree src;
4177 if (cand->by_ref)
4178 src = build_simple_mem_ref (cand->new_decl);
4179 else
4180 src = cand->new_decl;
4182 if (dump_file && (dump_flags & TDF_DETAILS))
4184 fprintf (dump_file, "About to replace expr ");
4185 print_generic_expr (dump_file, *expr, 0);
4186 fprintf (dump_file, " with ");
4187 print_generic_expr (dump_file, src, 0);
4188 fprintf (dump_file, "\n");
4191 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4193 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4194 *expr = vce;
4196 else
4197 *expr = src;
4198 return true;
4201 /* If T is an SSA_NAME, return NULL if it is not a default def or
4202 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4203 the base variable is always returned, regardless if it is a default
4204 def. Return T if it is not an SSA_NAME. */
4206 static tree
4207 get_ssa_base_param (tree t, bool ignore_default_def)
4209 if (TREE_CODE (t) == SSA_NAME)
4211 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4212 return SSA_NAME_VAR (t);
4213 else
4214 return NULL_TREE;
4216 return t;
4219 /* Given an expression, return an adjustment entry specifying the
4220 transformation to be done on EXPR. If no suitable adjustment entry
4221 was found, returns NULL.
4223 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4224 default def, otherwise bail on them.
4226 If CONVERT is non-NULL, this function will set *CONVERT if the
4227 expression provided is a component reference. ADJUSTMENTS is the
4228 adjustments vector. */
4230 ipa_parm_adjustment *
4231 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4232 ipa_parm_adjustment_vec adjustments,
4233 bool ignore_default_def)
4235 if (TREE_CODE (**expr) == BIT_FIELD_REF
4236 || TREE_CODE (**expr) == IMAGPART_EXPR
4237 || TREE_CODE (**expr) == REALPART_EXPR)
4239 *expr = &TREE_OPERAND (**expr, 0);
4240 if (convert)
4241 *convert = true;
4244 HOST_WIDE_INT offset, size, max_size;
4245 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4246 if (!base || size == -1 || max_size == -1)
4247 return NULL;
4249 if (TREE_CODE (base) == MEM_REF)
4251 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4252 base = TREE_OPERAND (base, 0);
4255 base = get_ssa_base_param (base, ignore_default_def);
4256 if (!base || TREE_CODE (base) != PARM_DECL)
4257 return NULL;
4259 struct ipa_parm_adjustment *cand = NULL;
4260 unsigned int len = adjustments.length ();
4261 for (unsigned i = 0; i < len; i++)
4263 struct ipa_parm_adjustment *adj = &adjustments[i];
4265 if (adj->base == base
4266 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4268 cand = adj;
4269 break;
4273 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4274 return NULL;
4275 return cand;
4278 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4280 static bool
4281 index_in_adjustments_multiple_times_p (int base_index,
4282 ipa_parm_adjustment_vec adjustments)
4284 int i, len = adjustments.length ();
4285 bool one = false;
4287 for (i = 0; i < len; i++)
4289 struct ipa_parm_adjustment *adj;
4290 adj = &adjustments[i];
4292 if (adj->base_index == base_index)
4294 if (one)
4295 return true;
4296 else
4297 one = true;
4300 return false;
4304 /* Return adjustments that should have the same effect on function parameters
4305 and call arguments as if they were first changed according to adjustments in
4306 INNER and then by adjustments in OUTER. */
4308 ipa_parm_adjustment_vec
4309 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4310 ipa_parm_adjustment_vec outer)
4312 int i, outlen = outer.length ();
4313 int inlen = inner.length ();
4314 int removals = 0;
4315 ipa_parm_adjustment_vec adjustments, tmp;
4317 tmp.create (inlen);
4318 for (i = 0; i < inlen; i++)
4320 struct ipa_parm_adjustment *n;
4321 n = &inner[i];
4323 if (n->op == IPA_PARM_OP_REMOVE)
4324 removals++;
4325 else
4327 /* FIXME: Handling of new arguments are not implemented yet. */
4328 gcc_assert (n->op != IPA_PARM_OP_NEW);
4329 tmp.quick_push (*n);
4333 adjustments.create (outlen + removals);
4334 for (i = 0; i < outlen; i++)
4336 struct ipa_parm_adjustment r;
4337 struct ipa_parm_adjustment *out = &outer[i];
4338 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4340 memset (&r, 0, sizeof (r));
4341 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4342 if (out->op == IPA_PARM_OP_REMOVE)
4344 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4346 r.op = IPA_PARM_OP_REMOVE;
4347 adjustments.quick_push (r);
4349 continue;
4351 else
4353 /* FIXME: Handling of new arguments are not implemented yet. */
4354 gcc_assert (out->op != IPA_PARM_OP_NEW);
4357 r.base_index = in->base_index;
4358 r.type = out->type;
4360 /* FIXME: Create nonlocal value too. */
4362 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4363 r.op = IPA_PARM_OP_COPY;
4364 else if (in->op == IPA_PARM_OP_COPY)
4365 r.offset = out->offset;
4366 else if (out->op == IPA_PARM_OP_COPY)
4367 r.offset = in->offset;
4368 else
4369 r.offset = in->offset + out->offset;
4370 adjustments.quick_push (r);
4373 for (i = 0; i < inlen; i++)
4375 struct ipa_parm_adjustment *n = &inner[i];
4377 if (n->op == IPA_PARM_OP_REMOVE)
4378 adjustments.quick_push (*n);
4381 tmp.release ();
4382 return adjustments;
4385 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4386 friendly way, assuming they are meant to be applied to FNDECL. */
4388 void
4389 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4390 tree fndecl)
4392 int i, len = adjustments.length ();
4393 bool first = true;
4394 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4396 fprintf (file, "IPA param adjustments: ");
4397 for (i = 0; i < len; i++)
4399 struct ipa_parm_adjustment *adj;
4400 adj = &adjustments[i];
4402 if (!first)
4403 fprintf (file, " ");
4404 else
4405 first = false;
4407 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4408 print_generic_expr (file, parms[adj->base_index], 0);
4409 if (adj->base)
4411 fprintf (file, ", base: ");
4412 print_generic_expr (file, adj->base, 0);
4414 if (adj->new_decl)
4416 fprintf (file, ", new_decl: ");
4417 print_generic_expr (file, adj->new_decl, 0);
4419 if (adj->new_ssa_base)
4421 fprintf (file, ", new_ssa_base: ");
4422 print_generic_expr (file, adj->new_ssa_base, 0);
4425 if (adj->op == IPA_PARM_OP_COPY)
4426 fprintf (file, ", copy_param");
4427 else if (adj->op == IPA_PARM_OP_REMOVE)
4428 fprintf (file, ", remove_param");
4429 else
4430 fprintf (file, ", offset %li", (long) adj->offset);
4431 if (adj->by_ref)
4432 fprintf (file, ", by_ref");
4433 print_node_brief (file, ", type: ", adj->type, 0);
4434 fprintf (file, "\n");
4436 parms.release ();
4439 /* Dump the AV linked list. */
4441 void
4442 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4444 bool comma = false;
4445 fprintf (f, " Aggregate replacements:");
4446 for (; av; av = av->next)
4448 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4449 av->index, av->offset);
4450 print_generic_expr (f, av->value, 0);
4451 comma = true;
4453 fprintf (f, "\n");
4456 /* Stream out jump function JUMP_FUNC to OB. */
4458 static void
4459 ipa_write_jump_function (struct output_block *ob,
4460 struct ipa_jump_func *jump_func)
4462 struct ipa_agg_jf_item *item;
4463 struct bitpack_d bp;
4464 int i, count;
4466 streamer_write_uhwi (ob, jump_func->type);
4467 switch (jump_func->type)
4469 case IPA_JF_UNKNOWN:
4470 break;
4471 case IPA_JF_CONST:
4472 gcc_assert (
4473 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4474 stream_write_tree (ob, jump_func->value.constant.value, true);
4475 break;
4476 case IPA_JF_PASS_THROUGH:
4477 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4478 if (jump_func->value.pass_through.operation == NOP_EXPR)
4480 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4481 bp = bitpack_create (ob->main_stream);
4482 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4483 streamer_write_bitpack (&bp);
4485 else
4487 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4488 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4490 break;
4491 case IPA_JF_ANCESTOR:
4492 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4493 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4494 bp = bitpack_create (ob->main_stream);
4495 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4496 streamer_write_bitpack (&bp);
4497 break;
4500 count = vec_safe_length (jump_func->agg.items);
4501 streamer_write_uhwi (ob, count);
4502 if (count)
4504 bp = bitpack_create (ob->main_stream);
4505 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4506 streamer_write_bitpack (&bp);
4509 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4511 streamer_write_uhwi (ob, item->offset);
4512 stream_write_tree (ob, item->value, true);
4515 bp = bitpack_create (ob->main_stream);
4516 bp_pack_value (&bp, jump_func->alignment.known, 1);
4517 streamer_write_bitpack (&bp);
4518 if (jump_func->alignment.known)
4520 streamer_write_uhwi (ob, jump_func->alignment.align);
4521 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4525 /* Read in jump function JUMP_FUNC from IB. */
4527 static void
4528 ipa_read_jump_function (struct lto_input_block *ib,
4529 struct ipa_jump_func *jump_func,
4530 struct cgraph_edge *cs,
4531 struct data_in *data_in)
4533 enum jump_func_type jftype;
4534 enum tree_code operation;
4535 int i, count;
4537 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4538 switch (jftype)
4540 case IPA_JF_UNKNOWN:
4541 ipa_set_jf_unknown (jump_func);
4542 break;
4543 case IPA_JF_CONST:
4544 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4545 break;
4546 case IPA_JF_PASS_THROUGH:
4547 operation = (enum tree_code) streamer_read_uhwi (ib);
4548 if (operation == NOP_EXPR)
4550 int formal_id = streamer_read_uhwi (ib);
4551 struct bitpack_d bp = streamer_read_bitpack (ib);
4552 bool agg_preserved = bp_unpack_value (&bp, 1);
4553 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4555 else
4557 tree operand = stream_read_tree (ib, data_in);
4558 int formal_id = streamer_read_uhwi (ib);
4559 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4560 operation);
4562 break;
4563 case IPA_JF_ANCESTOR:
4565 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4566 int formal_id = streamer_read_uhwi (ib);
4567 struct bitpack_d bp = streamer_read_bitpack (ib);
4568 bool agg_preserved = bp_unpack_value (&bp, 1);
4569 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4570 break;
4574 count = streamer_read_uhwi (ib);
4575 vec_alloc (jump_func->agg.items, count);
4576 if (count)
4578 struct bitpack_d bp = streamer_read_bitpack (ib);
4579 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4581 for (i = 0; i < count; i++)
4583 struct ipa_agg_jf_item item;
4584 item.offset = streamer_read_uhwi (ib);
4585 item.value = stream_read_tree (ib, data_in);
4586 jump_func->agg.items->quick_push (item);
4589 struct bitpack_d bp = streamer_read_bitpack (ib);
4590 bool alignment_known = bp_unpack_value (&bp, 1);
4591 if (alignment_known)
4593 jump_func->alignment.known = true;
4594 jump_func->alignment.align = streamer_read_uhwi (ib);
4595 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4597 else
4598 jump_func->alignment.known = false;
4601 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4602 relevant to indirect inlining to OB. */
4604 static void
4605 ipa_write_indirect_edge_info (struct output_block *ob,
4606 struct cgraph_edge *cs)
4608 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4609 struct bitpack_d bp;
4611 streamer_write_hwi (ob, ii->param_index);
4612 bp = bitpack_create (ob->main_stream);
4613 bp_pack_value (&bp, ii->polymorphic, 1);
4614 bp_pack_value (&bp, ii->agg_contents, 1);
4615 bp_pack_value (&bp, ii->member_ptr, 1);
4616 bp_pack_value (&bp, ii->by_ref, 1);
4617 bp_pack_value (&bp, ii->vptr_changed, 1);
4618 streamer_write_bitpack (&bp);
4619 if (ii->agg_contents || ii->polymorphic)
4620 streamer_write_hwi (ob, ii->offset);
4621 else
4622 gcc_assert (ii->offset == 0);
4624 if (ii->polymorphic)
4626 streamer_write_hwi (ob, ii->otr_token);
4627 stream_write_tree (ob, ii->otr_type, true);
4628 ii->context.stream_out (ob);
4632 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4633 relevant to indirect inlining from IB. */
4635 static void
4636 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4637 struct data_in *data_in,
4638 struct cgraph_edge *cs)
4640 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4641 struct bitpack_d bp;
4643 ii->param_index = (int) streamer_read_hwi (ib);
4644 bp = streamer_read_bitpack (ib);
4645 ii->polymorphic = bp_unpack_value (&bp, 1);
4646 ii->agg_contents = bp_unpack_value (&bp, 1);
4647 ii->member_ptr = bp_unpack_value (&bp, 1);
4648 ii->by_ref = bp_unpack_value (&bp, 1);
4649 ii->vptr_changed = bp_unpack_value (&bp, 1);
4650 if (ii->agg_contents || ii->polymorphic)
4651 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4652 else
4653 ii->offset = 0;
4654 if (ii->polymorphic)
4656 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4657 ii->otr_type = stream_read_tree (ib, data_in);
4658 ii->context.stream_in (ib, data_in);
4662 /* Stream out NODE info to OB. */
4664 static void
4665 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4667 int node_ref;
4668 lto_symtab_encoder_t encoder;
4669 struct ipa_node_params *info = IPA_NODE_REF (node);
4670 int j;
4671 struct cgraph_edge *e;
4672 struct bitpack_d bp;
4674 encoder = ob->decl_state->symtab_node_encoder;
4675 node_ref = lto_symtab_encoder_encode (encoder, node);
4676 streamer_write_uhwi (ob, node_ref);
4678 streamer_write_uhwi (ob, ipa_get_param_count (info));
4679 for (j = 0; j < ipa_get_param_count (info); j++)
4680 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4681 bp = bitpack_create (ob->main_stream);
4682 gcc_assert (info->analysis_done
4683 || ipa_get_param_count (info) == 0);
4684 gcc_assert (!info->node_enqueued);
4685 gcc_assert (!info->ipcp_orig_node);
4686 for (j = 0; j < ipa_get_param_count (info); j++)
4687 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4688 streamer_write_bitpack (&bp);
4689 for (j = 0; j < ipa_get_param_count (info); j++)
4690 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4691 for (e = node->callees; e; e = e->next_callee)
4693 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4695 streamer_write_uhwi (ob,
4696 ipa_get_cs_argument_count (args) * 2
4697 + (args->polymorphic_call_contexts != NULL));
4698 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4700 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4701 if (args->polymorphic_call_contexts != NULL)
4702 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4705 for (e = node->indirect_calls; e; e = e->next_callee)
4707 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4709 streamer_write_uhwi (ob,
4710 ipa_get_cs_argument_count (args) * 2
4711 + (args->polymorphic_call_contexts != NULL));
4712 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4714 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4715 if (args->polymorphic_call_contexts != NULL)
4716 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4718 ipa_write_indirect_edge_info (ob, e);
4722 /* Stream in NODE info from IB. */
4724 static void
4725 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4726 struct data_in *data_in)
4728 struct ipa_node_params *info = IPA_NODE_REF (node);
4729 int k;
4730 struct cgraph_edge *e;
4731 struct bitpack_d bp;
4733 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4735 for (k = 0; k < ipa_get_param_count (info); k++)
4736 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4738 bp = streamer_read_bitpack (ib);
4739 if (ipa_get_param_count (info) != 0)
4740 info->analysis_done = true;
4741 info->node_enqueued = false;
4742 for (k = 0; k < ipa_get_param_count (info); k++)
4743 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4744 for (k = 0; k < ipa_get_param_count (info); k++)
4745 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4746 for (e = node->callees; e; e = e->next_callee)
4748 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4749 int count = streamer_read_uhwi (ib);
4750 bool contexts_computed = count & 1;
4751 count /= 2;
4753 if (!count)
4754 continue;
4755 vec_safe_grow_cleared (args->jump_functions, count);
4756 if (contexts_computed)
4757 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4759 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4761 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4762 data_in);
4763 if (contexts_computed)
4764 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4767 for (e = node->indirect_calls; e; e = e->next_callee)
4769 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4770 int count = streamer_read_uhwi (ib);
4771 bool contexts_computed = count & 1;
4772 count /= 2;
4774 if (count)
4776 vec_safe_grow_cleared (args->jump_functions, count);
4777 if (contexts_computed)
4778 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4779 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4781 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4782 data_in);
4783 if (contexts_computed)
4784 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4787 ipa_read_indirect_edge_info (ib, data_in, e);
4791 /* Write jump functions for nodes in SET. */
4793 void
4794 ipa_prop_write_jump_functions (void)
4796 struct cgraph_node *node;
4797 struct output_block *ob;
4798 unsigned int count = 0;
4799 lto_symtab_encoder_iterator lsei;
4800 lto_symtab_encoder_t encoder;
4802 if (!ipa_node_params_sum)
4803 return;
4805 ob = create_output_block (LTO_section_jump_functions);
4806 encoder = ob->decl_state->symtab_node_encoder;
4807 ob->symbol = NULL;
4808 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4809 lsei_next_function_in_partition (&lsei))
4811 node = lsei_cgraph_node (lsei);
4812 if (node->has_gimple_body_p ()
4813 && IPA_NODE_REF (node) != NULL)
4814 count++;
4817 streamer_write_uhwi (ob, count);
4819 /* Process all of the functions. */
4820 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4821 lsei_next_function_in_partition (&lsei))
4823 node = lsei_cgraph_node (lsei);
4824 if (node->has_gimple_body_p ()
4825 && IPA_NODE_REF (node) != NULL)
4826 ipa_write_node_info (ob, node);
4828 streamer_write_char_stream (ob->main_stream, 0);
4829 produce_asm (ob, NULL);
4830 destroy_output_block (ob);
4833 /* Read section in file FILE_DATA of length LEN with data DATA. */
4835 static void
4836 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4837 size_t len)
4839 const struct lto_function_header *header =
4840 (const struct lto_function_header *) data;
4841 const int cfg_offset = sizeof (struct lto_function_header);
4842 const int main_offset = cfg_offset + header->cfg_size;
4843 const int string_offset = main_offset + header->main_size;
4844 struct data_in *data_in;
4845 unsigned int i;
4846 unsigned int count;
4848 lto_input_block ib_main ((const char *) data + main_offset,
4849 header->main_size);
4851 data_in =
4852 lto_data_in_create (file_data, (const char *) data + string_offset,
4853 header->string_size, vNULL);
4854 count = streamer_read_uhwi (&ib_main);
4856 for (i = 0; i < count; i++)
4858 unsigned int index;
4859 struct cgraph_node *node;
4860 lto_symtab_encoder_t encoder;
4862 index = streamer_read_uhwi (&ib_main);
4863 encoder = file_data->symtab_node_encoder;
4864 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4865 index));
4866 gcc_assert (node->definition);
4867 ipa_read_node_info (&ib_main, node, data_in);
4869 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4870 len);
4871 lto_data_in_delete (data_in);
4874 /* Read ipcp jump functions. */
4876 void
4877 ipa_prop_read_jump_functions (void)
4879 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4880 struct lto_file_decl_data *file_data;
4881 unsigned int j = 0;
4883 ipa_check_create_node_params ();
4884 ipa_check_create_edge_args ();
4885 ipa_register_cgraph_hooks ();
4887 while ((file_data = file_data_vec[j++]))
4889 size_t len;
4890 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4892 if (data)
4893 ipa_prop_read_section (file_data, data, len);
4897 /* After merging units, we can get mismatch in argument counts.
4898 Also decl merging might've rendered parameter lists obsolete.
4899 Also compute called_with_variable_arg info. */
4901 void
4902 ipa_update_after_lto_read (void)
4904 ipa_check_create_node_params ();
4905 ipa_check_create_edge_args ();
4908 void
4909 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4911 int node_ref;
4912 unsigned int count = 0;
4913 lto_symtab_encoder_t encoder;
4914 struct ipa_agg_replacement_value *aggvals, *av;
4916 aggvals = ipa_get_agg_replacements_for_node (node);
4917 encoder = ob->decl_state->symtab_node_encoder;
4918 node_ref = lto_symtab_encoder_encode (encoder, node);
4919 streamer_write_uhwi (ob, node_ref);
4921 for (av = aggvals; av; av = av->next)
4922 count++;
4923 streamer_write_uhwi (ob, count);
4925 for (av = aggvals; av; av = av->next)
4927 struct bitpack_d bp;
4929 streamer_write_uhwi (ob, av->offset);
4930 streamer_write_uhwi (ob, av->index);
4931 stream_write_tree (ob, av->value, true);
4933 bp = bitpack_create (ob->main_stream);
4934 bp_pack_value (&bp, av->by_ref, 1);
4935 streamer_write_bitpack (&bp);
4938 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4939 if (ts && vec_safe_length (ts->alignments) > 0)
4941 count = ts->alignments->length ();
4943 streamer_write_uhwi (ob, count);
4944 for (unsigned i = 0; i < count; ++i)
4946 ipa_alignment *parm_al = &(*ts->alignments)[i];
4948 struct bitpack_d bp;
4949 bp = bitpack_create (ob->main_stream);
4950 bp_pack_value (&bp, parm_al->known, 1);
4951 streamer_write_bitpack (&bp);
4952 if (parm_al->known)
4954 streamer_write_uhwi (ob, parm_al->align);
4955 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4956 parm_al->misalign);
4960 else
4961 streamer_write_uhwi (ob, 0);
4964 /* Stream in the aggregate value replacement chain for NODE from IB. */
4966 static void
4967 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4968 data_in *data_in)
4970 struct ipa_agg_replacement_value *aggvals = NULL;
4971 unsigned int count, i;
4973 count = streamer_read_uhwi (ib);
4974 for (i = 0; i <count; i++)
4976 struct ipa_agg_replacement_value *av;
4977 struct bitpack_d bp;
4979 av = ggc_alloc<ipa_agg_replacement_value> ();
4980 av->offset = streamer_read_uhwi (ib);
4981 av->index = streamer_read_uhwi (ib);
4982 av->value = stream_read_tree (ib, data_in);
4983 bp = streamer_read_bitpack (ib);
4984 av->by_ref = bp_unpack_value (&bp, 1);
4985 av->next = aggvals;
4986 aggvals = av;
4988 ipa_set_node_agg_value_chain (node, aggvals);
4990 count = streamer_read_uhwi (ib);
4991 if (count > 0)
4993 ipcp_grow_transformations_if_necessary ();
4995 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4996 vec_safe_grow_cleared (ts->alignments, count);
4998 for (i = 0; i < count; i++)
5000 ipa_alignment *parm_al;
5001 parm_al = &(*ts->alignments)[i];
5002 struct bitpack_d bp;
5003 bp = streamer_read_bitpack (ib);
5004 parm_al->known = bp_unpack_value (&bp, 1);
5005 if (parm_al->known)
5007 parm_al->align = streamer_read_uhwi (ib);
5008 parm_al->misalign
5009 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
5010 0, parm_al->align);
5016 /* Write all aggregate replacement for nodes in set. */
5018 void
5019 ipcp_write_transformation_summaries (void)
5021 struct cgraph_node *node;
5022 struct output_block *ob;
5023 unsigned int count = 0;
5024 lto_symtab_encoder_iterator lsei;
5025 lto_symtab_encoder_t encoder;
5027 ob = create_output_block (LTO_section_ipcp_transform);
5028 encoder = ob->decl_state->symtab_node_encoder;
5029 ob->symbol = NULL;
5030 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5031 lsei_next_function_in_partition (&lsei))
5033 node = lsei_cgraph_node (lsei);
5034 if (node->has_gimple_body_p ())
5035 count++;
5038 streamer_write_uhwi (ob, count);
5040 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5041 lsei_next_function_in_partition (&lsei))
5043 node = lsei_cgraph_node (lsei);
5044 if (node->has_gimple_body_p ())
5045 write_ipcp_transformation_info (ob, node);
5047 streamer_write_char_stream (ob->main_stream, 0);
5048 produce_asm (ob, NULL);
5049 destroy_output_block (ob);
5052 /* Read replacements section in file FILE_DATA of length LEN with data
5053 DATA. */
5055 static void
5056 read_replacements_section (struct lto_file_decl_data *file_data,
5057 const char *data,
5058 size_t len)
5060 const struct lto_function_header *header =
5061 (const struct lto_function_header *) data;
5062 const int cfg_offset = sizeof (struct lto_function_header);
5063 const int main_offset = cfg_offset + header->cfg_size;
5064 const int string_offset = main_offset + header->main_size;
5065 struct data_in *data_in;
5066 unsigned int i;
5067 unsigned int count;
5069 lto_input_block ib_main ((const char *) data + main_offset,
5070 header->main_size);
5072 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5073 header->string_size, vNULL);
5074 count = streamer_read_uhwi (&ib_main);
5076 for (i = 0; i < count; i++)
5078 unsigned int index;
5079 struct cgraph_node *node;
5080 lto_symtab_encoder_t encoder;
5082 index = streamer_read_uhwi (&ib_main);
5083 encoder = file_data->symtab_node_encoder;
5084 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5085 index));
5086 gcc_assert (node->definition);
5087 read_ipcp_transformation_info (&ib_main, node, data_in);
5089 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5090 len);
5091 lto_data_in_delete (data_in);
5094 /* Read IPA-CP aggregate replacements. */
5096 void
5097 ipcp_read_transformation_summaries (void)
5099 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5100 struct lto_file_decl_data *file_data;
5101 unsigned int j = 0;
5103 while ((file_data = file_data_vec[j++]))
5105 size_t len;
5106 const char *data = lto_get_section_data (file_data,
5107 LTO_section_ipcp_transform,
5108 NULL, &len);
5109 if (data)
5110 read_replacements_section (file_data, data, len);
5114 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5115 NODE. */
5117 static void
5118 adjust_agg_replacement_values (struct cgraph_node *node,
5119 struct ipa_agg_replacement_value *aggval)
5121 struct ipa_agg_replacement_value *v;
5122 int i, c = 0, d = 0, *adj;
5124 if (!node->clone.combined_args_to_skip)
5125 return;
5127 for (v = aggval; v; v = v->next)
5129 gcc_assert (v->index >= 0);
5130 if (c < v->index)
5131 c = v->index;
5133 c++;
5135 adj = XALLOCAVEC (int, c);
5136 for (i = 0; i < c; i++)
5137 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5139 adj[i] = -1;
5140 d++;
5142 else
5143 adj[i] = i - d;
5145 for (v = aggval; v; v = v->next)
5146 v->index = adj[v->index];
5149 /* Dominator walker driving the ipcp modification phase. */
5151 class ipcp_modif_dom_walker : public dom_walker
5153 public:
5154 ipcp_modif_dom_walker (struct func_body_info *fbi,
5155 vec<ipa_param_descriptor> descs,
5156 struct ipa_agg_replacement_value *av,
5157 bool *sc, bool *cc)
5158 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5159 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5161 virtual void before_dom_children (basic_block);
5163 private:
5164 struct func_body_info *m_fbi;
5165 vec<ipa_param_descriptor> m_descriptors;
5166 struct ipa_agg_replacement_value *m_aggval;
5167 bool *m_something_changed, *m_cfg_changed;
5170 void
5171 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5173 gimple_stmt_iterator gsi;
5174 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5176 struct ipa_agg_replacement_value *v;
5177 gimple stmt = gsi_stmt (gsi);
5178 tree rhs, val, t;
5179 HOST_WIDE_INT offset, size;
5180 int index;
5181 bool by_ref, vce;
5183 if (!gimple_assign_load_p (stmt))
5184 continue;
5185 rhs = gimple_assign_rhs1 (stmt);
5186 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5187 continue;
5189 vce = false;
5190 t = rhs;
5191 while (handled_component_p (t))
5193 /* V_C_E can do things like convert an array of integers to one
5194 bigger integer and similar things we do not handle below. */
5195 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5197 vce = true;
5198 break;
5200 t = TREE_OPERAND (t, 0);
5202 if (vce)
5203 continue;
5205 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5206 &offset, &size, &by_ref))
5207 continue;
5208 for (v = m_aggval; v; v = v->next)
5209 if (v->index == index
5210 && v->offset == offset)
5211 break;
5212 if (!v
5213 || v->by_ref != by_ref
5214 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5215 continue;
5217 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5218 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5220 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5221 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5222 else if (TYPE_SIZE (TREE_TYPE (rhs))
5223 == TYPE_SIZE (TREE_TYPE (v->value)))
5224 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5225 else
5227 if (dump_file)
5229 fprintf (dump_file, " const ");
5230 print_generic_expr (dump_file, v->value, 0);
5231 fprintf (dump_file, " can't be converted to type of ");
5232 print_generic_expr (dump_file, rhs, 0);
5233 fprintf (dump_file, "\n");
5235 continue;
5238 else
5239 val = v->value;
5241 if (dump_file && (dump_flags & TDF_DETAILS))
5243 fprintf (dump_file, "Modifying stmt:\n ");
5244 print_gimple_stmt (dump_file, stmt, 0, 0);
5246 gimple_assign_set_rhs_from_tree (&gsi, val);
5247 update_stmt (stmt);
5249 if (dump_file && (dump_flags & TDF_DETAILS))
5251 fprintf (dump_file, "into:\n ");
5252 print_gimple_stmt (dump_file, stmt, 0, 0);
5253 fprintf (dump_file, "\n");
5256 *m_something_changed = true;
5257 if (maybe_clean_eh_stmt (stmt)
5258 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5259 *m_cfg_changed = true;
5264 /* Update alignment of formal parameters as described in
5265 ipcp_transformation_summary. */
5267 static void
5268 ipcp_update_alignments (struct cgraph_node *node)
5270 tree fndecl = node->decl;
5271 tree parm = DECL_ARGUMENTS (fndecl);
5272 tree next_parm = parm;
5273 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5274 if (!ts || vec_safe_length (ts->alignments) == 0)
5275 return;
5276 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5277 unsigned count = alignments.length ();
5279 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5281 if (node->clone.combined_args_to_skip
5282 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5283 continue;
5284 gcc_checking_assert (parm);
5285 next_parm = DECL_CHAIN (parm);
5287 if (!alignments[i].known || !is_gimple_reg (parm))
5288 continue;
5289 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5290 if (!ddef)
5291 continue;
5293 if (dump_file)
5294 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5295 "misalignment to %u\n", i, alignments[i].align,
5296 alignments[i].misalign);
5298 struct ptr_info_def *pi = get_ptr_info (ddef);
5299 gcc_checking_assert (pi);
5300 unsigned old_align;
5301 unsigned old_misalign;
5302 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5304 if (old_known
5305 && old_align >= alignments[i].align)
5307 if (dump_file)
5308 fprintf (dump_file, " But the alignment was already %u.\n",
5309 old_align);
5310 continue;
5312 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5316 /* IPCP transformation phase doing propagation of aggregate values. */
5318 unsigned int
5319 ipcp_transform_function (struct cgraph_node *node)
5321 vec<ipa_param_descriptor> descriptors = vNULL;
5322 struct func_body_info fbi;
5323 struct ipa_agg_replacement_value *aggval;
5324 int param_count;
5325 bool cfg_changed = false, something_changed = false;
5327 gcc_checking_assert (cfun);
5328 gcc_checking_assert (current_function_decl);
5330 if (dump_file)
5331 fprintf (dump_file, "Modification phase of node %s/%i\n",
5332 node->name (), node->order);
5334 ipcp_update_alignments (node);
5335 aggval = ipa_get_agg_replacements_for_node (node);
5336 if (!aggval)
5337 return 0;
5338 param_count = count_formal_params (node->decl);
5339 if (param_count == 0)
5340 return 0;
5341 adjust_agg_replacement_values (node, aggval);
5342 if (dump_file)
5343 ipa_dump_agg_replacement_values (dump_file, aggval);
5345 fbi.node = node;
5346 fbi.info = NULL;
5347 fbi.bb_infos = vNULL;
5348 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5349 fbi.param_count = param_count;
5350 fbi.aa_walked = 0;
5352 descriptors.safe_grow_cleared (param_count);
5353 ipa_populate_param_decls (node, descriptors);
5354 calculate_dominance_info (CDI_DOMINATORS);
5355 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5356 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5358 int i;
5359 struct ipa_bb_info *bi;
5360 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5361 free_ipa_bb_info (bi);
5362 fbi.bb_infos.release ();
5363 free_dominance_info (CDI_DOMINATORS);
5364 (*ipcp_transformations)[node->uid].agg_values = NULL;
5365 (*ipcp_transformations)[node->uid].alignments = NULL;
5366 descriptors.release ();
5368 if (!something_changed)
5369 return 0;
5370 else if (cfg_changed)
5371 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5372 else
5373 return TODO_update_ssa_only_virtuals;