libgomp: Use pthread mutexes in the nvptx plugin.
[official-gcc.git] / gcc / ipa-prop.c
bloba96b2be44d1698b2db38d151c3445e6ea2f5b931
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hash-set.h"
24 #include "machmode.h"
25 #include "vec.h"
26 #include "double-int.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "options.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "predict.h"
36 #include "tm.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimple-expr.h"
48 #include "is-a.h"
49 #include "gimple.h"
50 #include "expr.h"
51 #include "stor-layout.h"
52 #include "print-tree.h"
53 #include "gimplify.h"
54 #include "gimple-iterator.h"
55 #include "gimplify-me.h"
56 #include "gimple-walk.h"
57 #include "langhooks.h"
58 #include "target.h"
59 #include "hash-map.h"
60 #include "plugin-api.h"
61 #include "ipa-ref.h"
62 #include "cgraph.h"
63 #include "alloc-pool.h"
64 #include "symbol-summary.h"
65 #include "ipa-prop.h"
66 #include "bitmap.h"
67 #include "gimple-ssa.h"
68 #include "tree-cfg.h"
69 #include "tree-phinodes.h"
70 #include "ssa-iterators.h"
71 #include "tree-into-ssa.h"
72 #include "tree-dfa.h"
73 #include "tree-pass.h"
74 #include "tree-inline.h"
75 #include "ipa-inline.h"
76 #include "flags.h"
77 #include "diagnostic.h"
78 #include "gimple-pretty-print.h"
79 #include "lto-streamer.h"
80 #include "data-streamer.h"
81 #include "tree-streamer.h"
82 #include "params.h"
83 #include "ipa-utils.h"
84 #include "stringpool.h"
85 #include "tree-ssanames.h"
86 #include "dbgcnt.h"
87 #include "domwalk.h"
88 #include "builtins.h"
89 #include "calls.h"
91 /* Intermediate information that we get from alias analysis about a particular
92 parameter in a particular basic_block. When a parameter or the memory it
93 references is marked modified, we use that information in all dominatd
94 blocks without cosulting alias analysis oracle. */
96 struct param_aa_status
98 /* Set when this structure contains meaningful information. If not, the
99 structure describing a dominating BB should be used instead. */
100 bool valid;
102 /* Whether we have seen something which might have modified the data in
103 question. PARM is for the parameter itself, REF is for data it points to
104 but using the alias type of individual accesses and PT is the same thing
105 but for computing aggregate pass-through functions using a very inclusive
106 ao_ref. */
107 bool parm_modified, ref_modified, pt_modified;
110 /* Information related to a given BB that used only when looking at function
111 body. */
113 struct ipa_bb_info
115 /* Call graph edges going out of this BB. */
116 vec<cgraph_edge *> cg_edges;
117 /* Alias analysis statuses of each formal parameter at this bb. */
118 vec<param_aa_status> param_aa_statuses;
121 /* Structure with global information that is only used when looking at function
122 body. */
124 struct func_body_info
126 /* The node that is being analyzed. */
127 cgraph_node *node;
129 /* Its info. */
130 struct ipa_node_params *info;
132 /* Information about individual BBs. */
133 vec<ipa_bb_info> bb_infos;
135 /* Number of parameters. */
136 int param_count;
138 /* Number of statements already walked by when analyzing this function. */
139 unsigned int aa_walked;
142 /* Function summary where the parameter infos are actually stored. */
143 ipa_node_params_t *ipa_node_params_sum = NULL;
144 /* Vector of IPA-CP transformation data for each clone. */
145 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
146 /* Vector where the parameter infos are actually stored. */
147 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
149 /* Holders of ipa cgraph hooks: */
150 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
151 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
152 static struct cgraph_node_hook_list *function_insertion_hook_holder;
154 /* Description of a reference to an IPA constant. */
155 struct ipa_cst_ref_desc
157 /* Edge that corresponds to the statement which took the reference. */
158 struct cgraph_edge *cs;
159 /* Linked list of duplicates created when call graph edges are cloned. */
160 struct ipa_cst_ref_desc *next_duplicate;
161 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
162 if out of control. */
163 int refcount;
166 /* Allocation pool for reference descriptions. */
168 static alloc_pool ipa_refdesc_pool;
170 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
171 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
173 static bool
174 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
176 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
178 if (!fs_opts)
179 return false;
180 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
183 /* Return index of the formal whose tree is PTREE in function which corresponds
184 to INFO. */
186 static int
187 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
189 int i, count;
191 count = descriptors.length ();
192 for (i = 0; i < count; i++)
193 if (descriptors[i].decl == ptree)
194 return i;
196 return -1;
199 /* Return index of the formal whose tree is PTREE in function which corresponds
200 to INFO. */
203 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
205 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
208 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
209 NODE. */
211 static void
212 ipa_populate_param_decls (struct cgraph_node *node,
213 vec<ipa_param_descriptor> &descriptors)
215 tree fndecl;
216 tree fnargs;
217 tree parm;
218 int param_num;
220 fndecl = node->decl;
221 gcc_assert (gimple_has_body_p (fndecl));
222 fnargs = DECL_ARGUMENTS (fndecl);
223 param_num = 0;
224 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
226 descriptors[param_num].decl = parm;
227 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
228 true);
229 param_num++;
233 /* Return how many formal parameters FNDECL has. */
236 count_formal_params (tree fndecl)
238 tree parm;
239 int count = 0;
240 gcc_assert (gimple_has_body_p (fndecl));
242 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
243 count++;
245 return count;
248 /* Return the declaration of Ith formal parameter of the function corresponding
249 to INFO. Note there is no setter function as this array is built just once
250 using ipa_initialize_node_params. */
252 void
253 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
255 fprintf (file, "param #%i", i);
256 if (info->descriptors[i].decl)
258 fprintf (file, " ");
259 print_generic_expr (file, info->descriptors[i].decl, 0);
263 /* Initialize the ipa_node_params structure associated with NODE
264 to hold PARAM_COUNT parameters. */
266 void
267 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
269 struct ipa_node_params *info = IPA_NODE_REF (node);
271 if (!info->descriptors.exists () && param_count)
272 info->descriptors.safe_grow_cleared (param_count);
275 /* Initialize the ipa_node_params structure associated with NODE by counting
276 the function parameters, creating the descriptors and populating their
277 param_decls. */
279 void
280 ipa_initialize_node_params (struct cgraph_node *node)
282 struct ipa_node_params *info = IPA_NODE_REF (node);
284 if (!info->descriptors.exists ())
286 ipa_alloc_node_params (node, count_formal_params (node->decl));
287 ipa_populate_param_decls (node, info->descriptors);
291 /* Print the jump functions associated with call graph edge CS to file F. */
293 static void
294 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
296 int i, count;
298 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
299 for (i = 0; i < count; i++)
301 struct ipa_jump_func *jump_func;
302 enum jump_func_type type;
304 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
305 type = jump_func->type;
307 fprintf (f, " param %d: ", i);
308 if (type == IPA_JF_UNKNOWN)
309 fprintf (f, "UNKNOWN\n");
310 else if (type == IPA_JF_CONST)
312 tree val = jump_func->value.constant.value;
313 fprintf (f, "CONST: ");
314 print_generic_expr (f, val, 0);
315 if (TREE_CODE (val) == ADDR_EXPR
316 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
318 fprintf (f, " -> ");
319 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
322 fprintf (f, "\n");
324 else if (type == IPA_JF_PASS_THROUGH)
326 fprintf (f, "PASS THROUGH: ");
327 fprintf (f, "%d, op %s",
328 jump_func->value.pass_through.formal_id,
329 get_tree_code_name(jump_func->value.pass_through.operation));
330 if (jump_func->value.pass_through.operation != NOP_EXPR)
332 fprintf (f, " ");
333 print_generic_expr (f,
334 jump_func->value.pass_through.operand, 0);
336 if (jump_func->value.pass_through.agg_preserved)
337 fprintf (f, ", agg_preserved");
338 fprintf (f, "\n");
340 else if (type == IPA_JF_ANCESTOR)
342 fprintf (f, "ANCESTOR: ");
343 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC,
344 jump_func->value.ancestor.formal_id,
345 jump_func->value.ancestor.offset);
346 if (jump_func->value.ancestor.agg_preserved)
347 fprintf (f, ", agg_preserved");
348 fprintf (f, "\n");
351 if (jump_func->agg.items)
353 struct ipa_agg_jf_item *item;
354 int j;
356 fprintf (f, " Aggregate passed by %s:\n",
357 jump_func->agg.by_ref ? "reference" : "value");
358 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
360 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
361 item->offset);
362 if (TYPE_P (item->value))
363 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
364 tree_to_uhwi (TYPE_SIZE (item->value)));
365 else
367 fprintf (f, "cst: ");
368 print_generic_expr (f, item->value, 0);
370 fprintf (f, "\n");
374 struct ipa_polymorphic_call_context *ctx
375 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
376 if (ctx && !ctx->useless_p ())
378 fprintf (f, " Context: ");
379 ctx->dump (dump_file);
382 if (jump_func->alignment.known)
384 fprintf (f, " Alignment: %u, misalignment: %u\n",
385 jump_func->alignment.align,
386 jump_func->alignment.misalign);
388 else
389 fprintf (f, " Unknown alignment\n");
394 /* Print the jump functions of all arguments on all call graph edges going from
395 NODE to file F. */
397 void
398 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
400 struct cgraph_edge *cs;
402 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
403 node->order);
404 for (cs = node->callees; cs; cs = cs->next_callee)
406 if (!ipa_edge_args_info_available_for_edge_p (cs))
407 continue;
409 fprintf (f, " callsite %s/%i -> %s/%i : \n",
410 xstrdup_for_dump (node->name ()), node->order,
411 xstrdup_for_dump (cs->callee->name ()),
412 cs->callee->order);
413 ipa_print_node_jump_functions_for_edge (f, cs);
416 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
418 struct cgraph_indirect_call_info *ii;
419 if (!ipa_edge_args_info_available_for_edge_p (cs))
420 continue;
422 ii = cs->indirect_info;
423 if (ii->agg_contents)
424 fprintf (f, " indirect %s callsite, calling param %i, "
425 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
426 ii->member_ptr ? "member ptr" : "aggregate",
427 ii->param_index, ii->offset,
428 ii->by_ref ? "by reference" : "by_value");
429 else
430 fprintf (f, " indirect %s callsite, calling param %i, "
431 "offset " HOST_WIDE_INT_PRINT_DEC,
432 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
433 ii->offset);
435 if (cs->call_stmt)
437 fprintf (f, ", for stmt ");
438 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
440 else
441 fprintf (f, "\n");
442 if (ii->polymorphic)
443 ii->context.dump (f);
444 ipa_print_node_jump_functions_for_edge (f, cs);
448 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
450 void
451 ipa_print_all_jump_functions (FILE *f)
453 struct cgraph_node *node;
455 fprintf (f, "\nJump functions:\n");
456 FOR_EACH_FUNCTION (node)
458 ipa_print_node_jump_functions (f, node);
462 /* Set jfunc to be a know-really nothing jump function. */
464 static void
465 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
467 jfunc->type = IPA_JF_UNKNOWN;
468 jfunc->alignment.known = false;
471 /* Set JFUNC to be a copy of another jmp (to be used by jump function
472 combination code). The two functions will share their rdesc. */
474 static void
475 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
476 struct ipa_jump_func *src)
479 gcc_checking_assert (src->type == IPA_JF_CONST);
480 dst->type = IPA_JF_CONST;
481 dst->value.constant = src->value.constant;
484 /* Set JFUNC to be a constant jmp function. */
486 static void
487 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
488 struct cgraph_edge *cs)
490 constant = unshare_expr (constant);
491 if (constant && EXPR_P (constant))
492 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
493 jfunc->type = IPA_JF_CONST;
494 jfunc->value.constant.value = unshare_expr_without_location (constant);
496 if (TREE_CODE (constant) == ADDR_EXPR
497 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
499 struct ipa_cst_ref_desc *rdesc;
500 if (!ipa_refdesc_pool)
501 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
502 sizeof (struct ipa_cst_ref_desc), 32);
504 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
505 rdesc->cs = cs;
506 rdesc->next_duplicate = NULL;
507 rdesc->refcount = 1;
508 jfunc->value.constant.rdesc = rdesc;
510 else
511 jfunc->value.constant.rdesc = NULL;
514 /* Set JFUNC to be a simple pass-through jump function. */
515 static void
516 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
517 bool agg_preserved)
519 jfunc->type = IPA_JF_PASS_THROUGH;
520 jfunc->value.pass_through.operand = NULL_TREE;
521 jfunc->value.pass_through.formal_id = formal_id;
522 jfunc->value.pass_through.operation = NOP_EXPR;
523 jfunc->value.pass_through.agg_preserved = agg_preserved;
526 /* Set JFUNC to be an arithmetic pass through jump function. */
528 static void
529 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
530 tree operand, enum tree_code operation)
532 jfunc->type = IPA_JF_PASS_THROUGH;
533 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
534 jfunc->value.pass_through.formal_id = formal_id;
535 jfunc->value.pass_through.operation = operation;
536 jfunc->value.pass_through.agg_preserved = false;
539 /* Set JFUNC to be an ancestor jump function. */
541 static void
542 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
543 int formal_id, bool agg_preserved)
545 jfunc->type = IPA_JF_ANCESTOR;
546 jfunc->value.ancestor.formal_id = formal_id;
547 jfunc->value.ancestor.offset = offset;
548 jfunc->value.ancestor.agg_preserved = agg_preserved;
551 /* Get IPA BB information about the given BB. FBI is the context of analyzis
552 of this function body. */
554 static struct ipa_bb_info *
555 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
557 gcc_checking_assert (fbi);
558 return &fbi->bb_infos[bb->index];
561 /* Structure to be passed in between detect_type_change and
562 check_stmt_for_type_change. */
564 struct prop_type_change_info
566 /* Offset into the object where there is the virtual method pointer we are
567 looking for. */
568 HOST_WIDE_INT offset;
569 /* The declaration or SSA_NAME pointer of the base that we are checking for
570 type change. */
571 tree object;
572 /* Set to true if dynamic type change has been detected. */
573 bool type_maybe_changed;
576 /* Return true if STMT can modify a virtual method table pointer.
578 This function makes special assumptions about both constructors and
579 destructors which are all the functions that are allowed to alter the VMT
580 pointers. It assumes that destructors begin with assignment into all VMT
581 pointers and that constructors essentially look in the following way:
583 1) The very first thing they do is that they call constructors of ancestor
584 sub-objects that have them.
586 2) Then VMT pointers of this and all its ancestors is set to new values
587 corresponding to the type corresponding to the constructor.
589 3) Only afterwards, other stuff such as constructor of member sub-objects
590 and the code written by the user is run. Only this may include calling
591 virtual functions, directly or indirectly.
593 There is no way to call a constructor of an ancestor sub-object in any
594 other way.
596 This means that we do not have to care whether constructors get the correct
597 type information because they will always change it (in fact, if we define
598 the type to be given by the VMT pointer, it is undefined).
600 The most important fact to derive from the above is that if, for some
601 statement in the section 3, we try to detect whether the dynamic type has
602 changed, we can safely ignore all calls as we examine the function body
603 backwards until we reach statements in section 2 because these calls cannot
604 be ancestor constructors or destructors (if the input is not bogus) and so
605 do not change the dynamic type (this holds true only for automatically
606 allocated objects but at the moment we devirtualize only these). We then
607 must detect that statements in section 2 change the dynamic type and can try
608 to derive the new type. That is enough and we can stop, we will never see
609 the calls into constructors of sub-objects in this code. Therefore we can
610 safely ignore all call statements that we traverse.
613 static bool
614 stmt_may_be_vtbl_ptr_store (gimple stmt)
616 if (is_gimple_call (stmt))
617 return false;
618 if (gimple_clobber_p (stmt))
619 return false;
620 else if (is_gimple_assign (stmt))
622 tree lhs = gimple_assign_lhs (stmt);
624 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
626 if (flag_strict_aliasing
627 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
628 return false;
630 if (TREE_CODE (lhs) == COMPONENT_REF
631 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
632 return false;
633 /* In the future we might want to use get_base_ref_and_offset to find
634 if there is a field corresponding to the offset and if so, proceed
635 almost like if it was a component ref. */
638 return true;
641 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
642 to check whether a particular statement may modify the virtual table
643 pointerIt stores its result into DATA, which points to a
644 prop_type_change_info structure. */
646 static bool
647 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
649 gimple stmt = SSA_NAME_DEF_STMT (vdef);
650 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
652 if (stmt_may_be_vtbl_ptr_store (stmt))
654 tci->type_maybe_changed = true;
655 return true;
657 else
658 return false;
661 /* See if ARG is PARAM_DECl describing instance passed by pointer
662 or reference in FUNCTION. Return false if the dynamic type may change
663 in between beggining of the function until CALL is invoked.
665 Generally functions are not allowed to change type of such instances,
666 but they call destructors. We assume that methods can not destroy the THIS
667 pointer. Also as a special cases, constructor and destructors may change
668 type of the THIS pointer. */
670 static bool
671 param_type_may_change_p (tree function, tree arg, gimple call)
673 /* Pure functions can not do any changes on the dynamic type;
674 that require writting to memory. */
675 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
676 return false;
677 /* We need to check if we are within inlined consturctor
678 or destructor (ideally we would have way to check that the
679 inline cdtor is actually working on ARG, but we don't have
680 easy tie on this, so punt on all non-pure cdtors.
681 We may also record the types of cdtors and once we know type
682 of the instance match them.
684 Also code unification optimizations may merge calls from
685 different blocks making return values unreliable. So
686 do nothing during late optimization. */
687 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
688 return true;
689 if (TREE_CODE (arg) == SSA_NAME
690 && SSA_NAME_IS_DEFAULT_DEF (arg)
691 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
693 /* Normal (non-THIS) argument. */
694 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
695 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
696 /* THIS pointer of an method - here we we want to watch constructors
697 and destructors as those definitely may change the dynamic
698 type. */
699 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
700 && !DECL_CXX_CONSTRUCTOR_P (function)
701 && !DECL_CXX_DESTRUCTOR_P (function)
702 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
704 /* Walk the inline stack and watch out for ctors/dtors. */
705 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
706 block = BLOCK_SUPERCONTEXT (block))
707 if (BLOCK_ABSTRACT_ORIGIN (block)
708 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
710 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
712 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
713 continue;
714 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
715 && (DECL_CXX_CONSTRUCTOR_P (fn)
716 || DECL_CXX_DESTRUCTOR_P (fn)))
717 return true;
719 return false;
722 return true;
725 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
726 callsite CALL) by looking for assignments to its virtual table pointer. If
727 it is, return true and fill in the jump function JFUNC with relevant type
728 information or set it to unknown. ARG is the object itself (not a pointer
729 to it, unless dereferenced). BASE is the base of the memory access as
730 returned by get_ref_base_and_extent, as is the offset.
732 This is helper function for detect_type_change and detect_type_change_ssa
733 that does the heavy work which is usually unnecesary. */
735 static bool
736 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
737 gcall *call, struct ipa_jump_func *jfunc,
738 HOST_WIDE_INT offset)
740 struct prop_type_change_info tci;
741 ao_ref ao;
742 bool entry_reached = false;
744 gcc_checking_assert (DECL_P (arg)
745 || TREE_CODE (arg) == MEM_REF
746 || handled_component_p (arg));
748 comp_type = TYPE_MAIN_VARIANT (comp_type);
750 /* Const calls cannot call virtual methods through VMT and so type changes do
751 not matter. */
752 if (!flag_devirtualize || !gimple_vuse (call)
753 /* Be sure expected_type is polymorphic. */
754 || !comp_type
755 || TREE_CODE (comp_type) != RECORD_TYPE
756 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
757 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
758 return true;
760 ao_ref_init (&ao, arg);
761 ao.base = base;
762 ao.offset = offset;
763 ao.size = POINTER_SIZE;
764 ao.max_size = ao.size;
766 tci.offset = offset;
767 tci.object = get_base_address (arg);
768 tci.type_maybe_changed = false;
770 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
771 &tci, NULL, &entry_reached);
772 if (!tci.type_maybe_changed)
773 return false;
775 ipa_set_jf_unknown (jfunc);
776 return true;
779 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
780 If it is, return true and fill in the jump function JFUNC with relevant type
781 information or set it to unknown. ARG is the object itself (not a pointer
782 to it, unless dereferenced). BASE is the base of the memory access as
783 returned by get_ref_base_and_extent, as is the offset. */
785 static bool
786 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
787 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
789 if (!flag_devirtualize)
790 return false;
792 if (TREE_CODE (base) == MEM_REF
793 && !param_type_may_change_p (current_function_decl,
794 TREE_OPERAND (base, 0),
795 call))
796 return false;
797 return detect_type_change_from_memory_writes (arg, base, comp_type,
798 call, jfunc, offset);
801 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
802 SSA name (its dereference will become the base and the offset is assumed to
803 be zero). */
805 static bool
806 detect_type_change_ssa (tree arg, tree comp_type,
807 gcall *call, struct ipa_jump_func *jfunc)
809 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
810 if (!flag_devirtualize
811 || !POINTER_TYPE_P (TREE_TYPE (arg)))
812 return false;
814 if (!param_type_may_change_p (current_function_decl, arg, call))
815 return false;
817 arg = build2 (MEM_REF, ptr_type_node, arg,
818 build_int_cst (ptr_type_node, 0));
820 return detect_type_change_from_memory_writes (arg, arg, comp_type,
821 call, jfunc, 0);
824 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
825 boolean variable pointed to by DATA. */
827 static bool
828 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
829 void *data)
831 bool *b = (bool *) data;
832 *b = true;
833 return true;
836 /* Return true if we have already walked so many statements in AA that we
837 should really just start giving up. */
839 static bool
840 aa_overwalked (struct func_body_info *fbi)
842 gcc_checking_assert (fbi);
843 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
846 /* Find the nearest valid aa status for parameter specified by INDEX that
847 dominates BB. */
849 static struct param_aa_status *
850 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
851 int index)
853 while (true)
855 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
856 if (!bb)
857 return NULL;
858 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
859 if (!bi->param_aa_statuses.is_empty ()
860 && bi->param_aa_statuses[index].valid)
861 return &bi->param_aa_statuses[index];
865 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
866 structures and/or intialize the result with a dominating description as
867 necessary. */
869 static struct param_aa_status *
870 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
871 int index)
873 gcc_checking_assert (fbi);
874 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
875 if (bi->param_aa_statuses.is_empty ())
876 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
877 struct param_aa_status *paa = &bi->param_aa_statuses[index];
878 if (!paa->valid)
880 gcc_checking_assert (!paa->parm_modified
881 && !paa->ref_modified
882 && !paa->pt_modified);
883 struct param_aa_status *dom_paa;
884 dom_paa = find_dominating_aa_status (fbi, bb, index);
885 if (dom_paa)
886 *paa = *dom_paa;
887 else
888 paa->valid = true;
891 return paa;
894 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
895 a value known not to be modified in this function before reaching the
896 statement STMT. FBI holds information about the function we have so far
897 gathered but do not survive the summary building stage. */
899 static bool
900 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
901 gimple stmt, tree parm_load)
903 struct param_aa_status *paa;
904 bool modified = false;
905 ao_ref refd;
907 /* FIXME: FBI can be NULL if we are being called from outside
908 ipa_node_analysis or ipcp_transform_function, which currently happens
909 during inlining analysis. It would be great to extend fbi's lifetime and
910 always have it. Currently, we are just not afraid of too much walking in
911 that case. */
912 if (fbi)
914 if (aa_overwalked (fbi))
915 return false;
916 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
917 if (paa->parm_modified)
918 return false;
920 else
921 paa = NULL;
923 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
924 ao_ref_init (&refd, parm_load);
925 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
926 &modified, NULL);
927 if (fbi)
928 fbi->aa_walked += walked;
929 if (paa && modified)
930 paa->parm_modified = true;
931 return !modified;
934 /* If STMT is an assignment that loads a value from an parameter declaration,
935 return the index of the parameter in ipa_node_params which has not been
936 modified. Otherwise return -1. */
938 static int
939 load_from_unmodified_param (struct func_body_info *fbi,
940 vec<ipa_param_descriptor> descriptors,
941 gimple stmt)
943 int index;
944 tree op1;
946 if (!gimple_assign_single_p (stmt))
947 return -1;
949 op1 = gimple_assign_rhs1 (stmt);
950 if (TREE_CODE (op1) != PARM_DECL)
951 return -1;
953 index = ipa_get_param_decl_index_1 (descriptors, op1);
954 if (index < 0
955 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
956 return -1;
958 return index;
961 /* Return true if memory reference REF (which must be a load through parameter
962 with INDEX) loads data that are known to be unmodified in this function
963 before reaching statement STMT. */
965 static bool
966 parm_ref_data_preserved_p (struct func_body_info *fbi,
967 int index, gimple stmt, tree ref)
969 struct param_aa_status *paa;
970 bool modified = false;
971 ao_ref refd;
973 /* FIXME: FBI can be NULL if we are being called from outside
974 ipa_node_analysis or ipcp_transform_function, which currently happens
975 during inlining analysis. It would be great to extend fbi's lifetime and
976 always have it. Currently, we are just not afraid of too much walking in
977 that case. */
978 if (fbi)
980 if (aa_overwalked (fbi))
981 return false;
982 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
983 if (paa->ref_modified)
984 return false;
986 else
987 paa = NULL;
989 gcc_checking_assert (gimple_vuse (stmt));
990 ao_ref_init (&refd, ref);
991 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
992 &modified, NULL);
993 if (fbi)
994 fbi->aa_walked += walked;
995 if (paa && modified)
996 paa->ref_modified = true;
997 return !modified;
1000 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1001 is known to be unmodified in this function before reaching call statement
1002 CALL into which it is passed. FBI describes the function body. */
1004 static bool
1005 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1006 gimple call, tree parm)
1008 bool modified = false;
1009 ao_ref refd;
1011 /* It's unnecessary to calculate anything about memory contnets for a const
1012 function because it is not goin to use it. But do not cache the result
1013 either. Also, no such calculations for non-pointers. */
1014 if (!gimple_vuse (call)
1015 || !POINTER_TYPE_P (TREE_TYPE (parm))
1016 || aa_overwalked (fbi))
1017 return false;
1019 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1020 index);
1021 if (paa->pt_modified)
1022 return false;
1024 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1025 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1026 &modified, NULL);
1027 fbi->aa_walked += walked;
1028 if (modified)
1029 paa->pt_modified = true;
1030 return !modified;
1033 /* Return true if we can prove that OP is a memory reference loading unmodified
1034 data from an aggregate passed as a parameter and if the aggregate is passed
1035 by reference, that the alias type of the load corresponds to the type of the
1036 formal parameter (so that we can rely on this type for TBAA in callers).
1037 INFO and PARMS_AINFO describe parameters of the current function (but the
1038 latter can be NULL), STMT is the load statement. If function returns true,
1039 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1040 within the aggregate and whether it is a load from a value passed by
1041 reference respectively. */
1043 static bool
1044 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1045 vec<ipa_param_descriptor> descriptors,
1046 gimple stmt, tree op, int *index_p,
1047 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1048 bool *by_ref_p)
1050 int index;
1051 HOST_WIDE_INT size, max_size;
1052 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1054 if (max_size == -1 || max_size != size || *offset_p < 0)
1055 return false;
1057 if (DECL_P (base))
1059 int index = ipa_get_param_decl_index_1 (descriptors, base);
1060 if (index >= 0
1061 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1063 *index_p = index;
1064 *by_ref_p = false;
1065 if (size_p)
1066 *size_p = size;
1067 return true;
1069 return false;
1072 if (TREE_CODE (base) != MEM_REF
1073 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1074 || !integer_zerop (TREE_OPERAND (base, 1)))
1075 return false;
1077 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1079 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1080 index = ipa_get_param_decl_index_1 (descriptors, parm);
1082 else
1084 /* This branch catches situations where a pointer parameter is not a
1085 gimple register, for example:
1087 void hip7(S*) (struct S * p)
1089 void (*<T2e4>) (struct S *) D.1867;
1090 struct S * p.1;
1092 <bb 2>:
1093 p.1_1 = p;
1094 D.1867_2 = p.1_1->f;
1095 D.1867_2 ();
1096 gdp = &p;
1099 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1100 index = load_from_unmodified_param (fbi, descriptors, def);
1103 if (index >= 0
1104 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1106 *index_p = index;
1107 *by_ref_p = true;
1108 if (size_p)
1109 *size_p = size;
1110 return true;
1112 return false;
1115 /* Just like the previous function, just without the param_analysis_info
1116 pointer, for users outside of this file. */
1118 bool
1119 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1120 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1121 bool *by_ref_p)
1123 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1124 offset_p, NULL, by_ref_p);
1127 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1128 of an assignment statement STMT, try to determine whether we are actually
1129 handling any of the following cases and construct an appropriate jump
1130 function into JFUNC if so:
1132 1) The passed value is loaded from a formal parameter which is not a gimple
1133 register (most probably because it is addressable, the value has to be
1134 scalar) and we can guarantee the value has not changed. This case can
1135 therefore be described by a simple pass-through jump function. For example:
1137 foo (int a)
1139 int a.0;
1141 a.0_2 = a;
1142 bar (a.0_2);
1144 2) The passed value can be described by a simple arithmetic pass-through
1145 jump function. E.g.
1147 foo (int a)
1149 int D.2064;
1151 D.2064_4 = a.1(D) + 4;
1152 bar (D.2064_4);
1154 This case can also occur in combination of the previous one, e.g.:
1156 foo (int a, int z)
1158 int a.0;
1159 int D.2064;
1161 a.0_3 = a;
1162 D.2064_4 = a.0_3 + 4;
1163 foo (D.2064_4);
1165 3) The passed value is an address of an object within another one (which
1166 also passed by reference). Such situations are described by an ancestor
1167 jump function and describe situations such as:
1169 B::foo() (struct B * const this)
1171 struct A * D.1845;
1173 D.1845_2 = &this_1(D)->D.1748;
1174 A::bar (D.1845_2);
1176 INFO is the structure describing individual parameters access different
1177 stages of IPA optimizations. PARMS_AINFO contains the information that is
1178 only needed for intraprocedural analysis. */
1180 static void
1181 compute_complex_assign_jump_func (struct func_body_info *fbi,
1182 struct ipa_node_params *info,
1183 struct ipa_jump_func *jfunc,
1184 gcall *call, gimple stmt, tree name,
1185 tree param_type)
1187 HOST_WIDE_INT offset, size, max_size;
1188 tree op1, tc_ssa, base, ssa;
1189 int index;
1191 op1 = gimple_assign_rhs1 (stmt);
1193 if (TREE_CODE (op1) == SSA_NAME)
1195 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1196 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1197 else
1198 index = load_from_unmodified_param (fbi, info->descriptors,
1199 SSA_NAME_DEF_STMT (op1));
1200 tc_ssa = op1;
1202 else
1204 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1205 tc_ssa = gimple_assign_lhs (stmt);
1208 if (index >= 0)
1210 tree op2 = gimple_assign_rhs2 (stmt);
1212 if (op2)
1214 if (!is_gimple_ip_invariant (op2)
1215 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1216 && !useless_type_conversion_p (TREE_TYPE (name),
1217 TREE_TYPE (op1))))
1218 return;
1220 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1221 gimple_assign_rhs_code (stmt));
1223 else if (gimple_assign_single_p (stmt))
1225 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1226 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1228 return;
1231 if (TREE_CODE (op1) != ADDR_EXPR)
1232 return;
1233 op1 = TREE_OPERAND (op1, 0);
1234 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1235 return;
1236 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1237 if (TREE_CODE (base) != MEM_REF
1238 /* If this is a varying address, punt. */
1239 || max_size == -1
1240 || max_size != size)
1241 return;
1242 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1243 ssa = TREE_OPERAND (base, 0);
1244 if (TREE_CODE (ssa) != SSA_NAME
1245 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1246 || offset < 0)
1247 return;
1249 /* Dynamic types are changed in constructors and destructors. */
1250 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1251 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1252 ipa_set_ancestor_jf (jfunc, offset, index,
1253 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1256 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1257 it looks like:
1259 iftmp.1_3 = &obj_2(D)->D.1762;
1261 The base of the MEM_REF must be a default definition SSA NAME of a
1262 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1263 whole MEM_REF expression is returned and the offset calculated from any
1264 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1265 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1267 static tree
1268 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1270 HOST_WIDE_INT size, max_size;
1271 tree expr, parm, obj;
1273 if (!gimple_assign_single_p (assign))
1274 return NULL_TREE;
1275 expr = gimple_assign_rhs1 (assign);
1277 if (TREE_CODE (expr) != ADDR_EXPR)
1278 return NULL_TREE;
1279 expr = TREE_OPERAND (expr, 0);
1280 obj = expr;
1281 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1283 if (TREE_CODE (expr) != MEM_REF
1284 /* If this is a varying address, punt. */
1285 || max_size == -1
1286 || max_size != size
1287 || *offset < 0)
1288 return NULL_TREE;
1289 parm = TREE_OPERAND (expr, 0);
1290 if (TREE_CODE (parm) != SSA_NAME
1291 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1292 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1293 return NULL_TREE;
1295 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1296 *obj_p = obj;
1297 return expr;
1301 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1302 statement PHI, try to find out whether NAME is in fact a
1303 multiple-inheritance typecast from a descendant into an ancestor of a formal
1304 parameter and thus can be described by an ancestor jump function and if so,
1305 write the appropriate function into JFUNC.
1307 Essentially we want to match the following pattern:
1309 if (obj_2(D) != 0B)
1310 goto <bb 3>;
1311 else
1312 goto <bb 4>;
1314 <bb 3>:
1315 iftmp.1_3 = &obj_2(D)->D.1762;
1317 <bb 4>:
1318 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1319 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1320 return D.1879_6; */
1322 static void
1323 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1324 struct ipa_node_params *info,
1325 struct ipa_jump_func *jfunc,
1326 gcall *call, gphi *phi)
1328 HOST_WIDE_INT offset;
1329 gimple assign, cond;
1330 basic_block phi_bb, assign_bb, cond_bb;
1331 tree tmp, parm, expr, obj;
1332 int index, i;
1334 if (gimple_phi_num_args (phi) != 2)
1335 return;
1337 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1338 tmp = PHI_ARG_DEF (phi, 0);
1339 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1340 tmp = PHI_ARG_DEF (phi, 1);
1341 else
1342 return;
1343 if (TREE_CODE (tmp) != SSA_NAME
1344 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1345 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1346 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1347 return;
1349 assign = SSA_NAME_DEF_STMT (tmp);
1350 assign_bb = gimple_bb (assign);
1351 if (!single_pred_p (assign_bb))
1352 return;
1353 expr = get_ancestor_addr_info (assign, &obj, &offset);
1354 if (!expr)
1355 return;
1356 parm = TREE_OPERAND (expr, 0);
1357 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1358 if (index < 0)
1359 return;
1361 cond_bb = single_pred (assign_bb);
1362 cond = last_stmt (cond_bb);
1363 if (!cond
1364 || gimple_code (cond) != GIMPLE_COND
1365 || gimple_cond_code (cond) != NE_EXPR
1366 || gimple_cond_lhs (cond) != parm
1367 || !integer_zerop (gimple_cond_rhs (cond)))
1368 return;
1370 phi_bb = gimple_bb (phi);
1371 for (i = 0; i < 2; i++)
1373 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1374 if (pred != assign_bb && pred != cond_bb)
1375 return;
1378 ipa_set_ancestor_jf (jfunc, offset, index,
1379 parm_ref_data_pass_through_p (fbi, index, call, parm));
1382 /* Inspect the given TYPE and return true iff it has the same structure (the
1383 same number of fields of the same types) as a C++ member pointer. If
1384 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1385 corresponding fields there. */
1387 static bool
1388 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1390 tree fld;
1392 if (TREE_CODE (type) != RECORD_TYPE)
1393 return false;
1395 fld = TYPE_FIELDS (type);
1396 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1397 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1398 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1399 return false;
1401 if (method_ptr)
1402 *method_ptr = fld;
1404 fld = DECL_CHAIN (fld);
1405 if (!fld || INTEGRAL_TYPE_P (fld)
1406 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1407 return false;
1408 if (delta)
1409 *delta = fld;
1411 if (DECL_CHAIN (fld))
1412 return false;
1414 return true;
1417 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1418 return the rhs of its defining statement. Otherwise return RHS as it
1419 is. */
1421 static inline tree
1422 get_ssa_def_if_simple_copy (tree rhs)
1424 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1426 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1428 if (gimple_assign_single_p (def_stmt))
1429 rhs = gimple_assign_rhs1 (def_stmt);
1430 else
1431 break;
1433 return rhs;
1436 /* Simple linked list, describing known contents of an aggregate beforere
1437 call. */
1439 struct ipa_known_agg_contents_list
1441 /* Offset and size of the described part of the aggregate. */
1442 HOST_WIDE_INT offset, size;
1443 /* Known constant value or NULL if the contents is known to be unknown. */
1444 tree constant;
1445 /* Pointer to the next structure in the list. */
1446 struct ipa_known_agg_contents_list *next;
1449 /* Find the proper place in linked list of ipa_known_agg_contents_list
1450 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1451 unless there is a partial overlap, in which case return NULL, or such
1452 element is already there, in which case set *ALREADY_THERE to true. */
1454 static struct ipa_known_agg_contents_list **
1455 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1456 HOST_WIDE_INT lhs_offset,
1457 HOST_WIDE_INT lhs_size,
1458 bool *already_there)
1460 struct ipa_known_agg_contents_list **p = list;
1461 while (*p && (*p)->offset < lhs_offset)
1463 if ((*p)->offset + (*p)->size > lhs_offset)
1464 return NULL;
1465 p = &(*p)->next;
1468 if (*p && (*p)->offset < lhs_offset + lhs_size)
1470 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1471 /* We already know this value is subsequently overwritten with
1472 something else. */
1473 *already_there = true;
1474 else
1475 /* Otherwise this is a partial overlap which we cannot
1476 represent. */
1477 return NULL;
1479 return p;
1482 /* Build aggregate jump function from LIST, assuming there are exactly
1483 CONST_COUNT constant entries there and that th offset of the passed argument
1484 is ARG_OFFSET and store it into JFUNC. */
1486 static void
1487 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1488 int const_count, HOST_WIDE_INT arg_offset,
1489 struct ipa_jump_func *jfunc)
1491 vec_alloc (jfunc->agg.items, const_count);
1492 while (list)
1494 if (list->constant)
1496 struct ipa_agg_jf_item item;
1497 item.offset = list->offset - arg_offset;
1498 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1499 item.value = unshare_expr_without_location (list->constant);
1500 jfunc->agg.items->quick_push (item);
1502 list = list->next;
1506 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1507 in ARG is filled in with constant values. ARG can either be an aggregate
1508 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1509 aggregate. JFUNC is the jump function into which the constants are
1510 subsequently stored. */
1512 static void
1513 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1514 tree arg_type,
1515 struct ipa_jump_func *jfunc)
1517 struct ipa_known_agg_contents_list *list = NULL;
1518 int item_count = 0, const_count = 0;
1519 HOST_WIDE_INT arg_offset, arg_size;
1520 gimple_stmt_iterator gsi;
1521 tree arg_base;
1522 bool check_ref, by_ref;
1523 ao_ref r;
1525 /* The function operates in three stages. First, we prepare check_ref, r,
1526 arg_base and arg_offset based on what is actually passed as an actual
1527 argument. */
1529 if (POINTER_TYPE_P (arg_type))
1531 by_ref = true;
1532 if (TREE_CODE (arg) == SSA_NAME)
1534 tree type_size;
1535 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1536 return;
1537 check_ref = true;
1538 arg_base = arg;
1539 arg_offset = 0;
1540 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1541 arg_size = tree_to_uhwi (type_size);
1542 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1544 else if (TREE_CODE (arg) == ADDR_EXPR)
1546 HOST_WIDE_INT arg_max_size;
1548 arg = TREE_OPERAND (arg, 0);
1549 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1550 &arg_max_size);
1551 if (arg_max_size == -1
1552 || arg_max_size != arg_size
1553 || arg_offset < 0)
1554 return;
1555 if (DECL_P (arg_base))
1557 check_ref = false;
1558 ao_ref_init (&r, arg_base);
1560 else
1561 return;
1563 else
1564 return;
1566 else
1568 HOST_WIDE_INT arg_max_size;
1570 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1572 by_ref = false;
1573 check_ref = false;
1574 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1575 &arg_max_size);
1576 if (arg_max_size == -1
1577 || arg_max_size != arg_size
1578 || arg_offset < 0)
1579 return;
1581 ao_ref_init (&r, arg);
1584 /* Second stage walks back the BB, looks at individual statements and as long
1585 as it is confident of how the statements affect contents of the
1586 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1587 describing it. */
1588 gsi = gsi_for_stmt (call);
1589 gsi_prev (&gsi);
1590 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1592 struct ipa_known_agg_contents_list *n, **p;
1593 gimple stmt = gsi_stmt (gsi);
1594 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1595 tree lhs, rhs, lhs_base;
1597 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1598 continue;
1599 if (!gimple_assign_single_p (stmt))
1600 break;
1602 lhs = gimple_assign_lhs (stmt);
1603 rhs = gimple_assign_rhs1 (stmt);
1604 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1605 || TREE_CODE (lhs) == BIT_FIELD_REF
1606 || contains_bitfld_component_ref_p (lhs))
1607 break;
1609 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1610 &lhs_max_size);
1611 if (lhs_max_size == -1
1612 || lhs_max_size != lhs_size)
1613 break;
1615 if (check_ref)
1617 if (TREE_CODE (lhs_base) != MEM_REF
1618 || TREE_OPERAND (lhs_base, 0) != arg_base
1619 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1620 break;
1622 else if (lhs_base != arg_base)
1624 if (DECL_P (lhs_base))
1625 continue;
1626 else
1627 break;
1630 bool already_there = false;
1631 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1632 &already_there);
1633 if (!p)
1634 break;
1635 if (already_there)
1636 continue;
1638 rhs = get_ssa_def_if_simple_copy (rhs);
1639 n = XALLOCA (struct ipa_known_agg_contents_list);
1640 n->size = lhs_size;
1641 n->offset = lhs_offset;
1642 if (is_gimple_ip_invariant (rhs))
1644 n->constant = rhs;
1645 const_count++;
1647 else
1648 n->constant = NULL_TREE;
1649 n->next = *p;
1650 *p = n;
1652 item_count++;
1653 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1654 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1655 break;
1658 /* Third stage just goes over the list and creates an appropriate vector of
1659 ipa_agg_jf_item structures out of it, of sourse only if there are
1660 any known constants to begin with. */
1662 if (const_count)
1664 jfunc->agg.by_ref = by_ref;
1665 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1669 static tree
1670 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1672 int n;
1673 tree type = (e->callee
1674 ? TREE_TYPE (e->callee->decl)
1675 : gimple_call_fntype (e->call_stmt));
1676 tree t = TYPE_ARG_TYPES (type);
1678 for (n = 0; n < i; n++)
1680 if (!t)
1681 break;
1682 t = TREE_CHAIN (t);
1684 if (t)
1685 return TREE_VALUE (t);
1686 if (!e->callee)
1687 return NULL;
1688 t = DECL_ARGUMENTS (e->callee->decl);
1689 for (n = 0; n < i; n++)
1691 if (!t)
1692 return NULL;
1693 t = TREE_CHAIN (t);
1695 if (t)
1696 return TREE_TYPE (t);
1697 return NULL;
1700 /* Compute jump function for all arguments of callsite CS and insert the
1701 information in the jump_functions array in the ipa_edge_args corresponding
1702 to this callsite. */
1704 static void
1705 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1706 struct cgraph_edge *cs)
1708 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1709 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1710 gcall *call = cs->call_stmt;
1711 int n, arg_num = gimple_call_num_args (call);
1712 bool useful_context = false;
1714 if (arg_num == 0 || args->jump_functions)
1715 return;
1716 vec_safe_grow_cleared (args->jump_functions, arg_num);
1717 if (flag_devirtualize)
1718 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1720 if (gimple_call_internal_p (call))
1721 return;
1722 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1723 return;
1725 for (n = 0; n < arg_num; n++)
1727 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1728 tree arg = gimple_call_arg (call, n);
1729 tree param_type = ipa_get_callee_param_type (cs, n);
1730 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1732 tree instance;
1733 struct ipa_polymorphic_call_context context (cs->caller->decl,
1734 arg, cs->call_stmt,
1735 &instance);
1736 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1737 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1738 if (!context.useless_p ())
1739 useful_context = true;
1742 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1744 unsigned HOST_WIDE_INT hwi_bitpos;
1745 unsigned align;
1747 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1748 && align % BITS_PER_UNIT == 0
1749 && hwi_bitpos % BITS_PER_UNIT == 0)
1751 jfunc->alignment.known = true;
1752 jfunc->alignment.align = align / BITS_PER_UNIT;
1753 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1755 else
1756 gcc_assert (!jfunc->alignment.known);
1758 else
1759 gcc_assert (!jfunc->alignment.known);
1761 if (is_gimple_ip_invariant (arg))
1762 ipa_set_jf_constant (jfunc, arg, cs);
1763 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1764 && TREE_CODE (arg) == PARM_DECL)
1766 int index = ipa_get_param_decl_index (info, arg);
1768 gcc_assert (index >=0);
1769 /* Aggregate passed by value, check for pass-through, otherwise we
1770 will attempt to fill in aggregate contents later in this
1771 for cycle. */
1772 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1774 ipa_set_jf_simple_pass_through (jfunc, index, false);
1775 continue;
1778 else if (TREE_CODE (arg) == SSA_NAME)
1780 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1782 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1783 if (index >= 0)
1785 bool agg_p;
1786 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1787 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1790 else
1792 gimple stmt = SSA_NAME_DEF_STMT (arg);
1793 if (is_gimple_assign (stmt))
1794 compute_complex_assign_jump_func (fbi, info, jfunc,
1795 call, stmt, arg, param_type);
1796 else if (gimple_code (stmt) == GIMPLE_PHI)
1797 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1798 call,
1799 as_a <gphi *> (stmt));
1803 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1804 passed (because type conversions are ignored in gimple). Usually we can
1805 safely get type from function declaration, but in case of K&R prototypes or
1806 variadic functions we can try our luck with type of the pointer passed.
1807 TODO: Since we look for actual initialization of the memory object, we may better
1808 work out the type based on the memory stores we find. */
1809 if (!param_type)
1810 param_type = TREE_TYPE (arg);
1812 if ((jfunc->type != IPA_JF_PASS_THROUGH
1813 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1814 && (jfunc->type != IPA_JF_ANCESTOR
1815 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1816 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1817 || POINTER_TYPE_P (param_type)))
1818 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1820 if (!useful_context)
1821 vec_free (args->polymorphic_call_contexts);
1824 /* Compute jump functions for all edges - both direct and indirect - outgoing
1825 from BB. */
1827 static void
1828 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1830 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1831 int i;
1832 struct cgraph_edge *cs;
1834 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1836 struct cgraph_node *callee = cs->callee;
1838 if (callee)
1840 callee->ultimate_alias_target ();
1841 /* We do not need to bother analyzing calls to unknown functions
1842 unless they may become known during lto/whopr. */
1843 if (!callee->definition && !flag_lto)
1844 continue;
1846 ipa_compute_jump_functions_for_edge (fbi, cs);
1850 /* If STMT looks like a statement loading a value from a member pointer formal
1851 parameter, return that parameter and store the offset of the field to
1852 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1853 might be clobbered). If USE_DELTA, then we look for a use of the delta
1854 field rather than the pfn. */
1856 static tree
1857 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1858 HOST_WIDE_INT *offset_p)
1860 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1862 if (!gimple_assign_single_p (stmt))
1863 return NULL_TREE;
1865 rhs = gimple_assign_rhs1 (stmt);
1866 if (TREE_CODE (rhs) == COMPONENT_REF)
1868 ref_field = TREE_OPERAND (rhs, 1);
1869 rhs = TREE_OPERAND (rhs, 0);
1871 else
1872 ref_field = NULL_TREE;
1873 if (TREE_CODE (rhs) != MEM_REF)
1874 return NULL_TREE;
1875 rec = TREE_OPERAND (rhs, 0);
1876 if (TREE_CODE (rec) != ADDR_EXPR)
1877 return NULL_TREE;
1878 rec = TREE_OPERAND (rec, 0);
1879 if (TREE_CODE (rec) != PARM_DECL
1880 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1881 return NULL_TREE;
1882 ref_offset = TREE_OPERAND (rhs, 1);
1884 if (use_delta)
1885 fld = delta_field;
1886 else
1887 fld = ptr_field;
1888 if (offset_p)
1889 *offset_p = int_bit_position (fld);
1891 if (ref_field)
1893 if (integer_nonzerop (ref_offset))
1894 return NULL_TREE;
1895 return ref_field == fld ? rec : NULL_TREE;
1897 else
1898 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1899 : NULL_TREE;
1902 /* Returns true iff T is an SSA_NAME defined by a statement. */
1904 static bool
1905 ipa_is_ssa_with_stmt_def (tree t)
1907 if (TREE_CODE (t) == SSA_NAME
1908 && !SSA_NAME_IS_DEFAULT_DEF (t))
1909 return true;
1910 else
1911 return false;
1914 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1915 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1916 indirect call graph edge. */
1918 static struct cgraph_edge *
1919 ipa_note_param_call (struct cgraph_node *node, int param_index,
1920 gcall *stmt)
1922 struct cgraph_edge *cs;
1924 cs = node->get_edge (stmt);
1925 cs->indirect_info->param_index = param_index;
1926 cs->indirect_info->agg_contents = 0;
1927 cs->indirect_info->member_ptr = 0;
1928 return cs;
1931 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1932 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1933 intermediate information about each formal parameter. Currently it checks
1934 whether the call calls a pointer that is a formal parameter and if so, the
1935 parameter is marked with the called flag and an indirect call graph edge
1936 describing the call is created. This is very simple for ordinary pointers
1937 represented in SSA but not-so-nice when it comes to member pointers. The
1938 ugly part of this function does nothing more than trying to match the
1939 pattern of such a call. An example of such a pattern is the gimple dump
1940 below, the call is on the last line:
1942 <bb 2>:
1943 f$__delta_5 = f.__delta;
1944 f$__pfn_24 = f.__pfn;
1947 <bb 2>:
1948 f$__delta_5 = MEM[(struct *)&f];
1949 f$__pfn_24 = MEM[(struct *)&f + 4B];
1951 and a few lines below:
1953 <bb 5>
1954 D.2496_3 = (int) f$__pfn_24;
1955 D.2497_4 = D.2496_3 & 1;
1956 if (D.2497_4 != 0)
1957 goto <bb 3>;
1958 else
1959 goto <bb 4>;
1961 <bb 6>:
1962 D.2500_7 = (unsigned int) f$__delta_5;
1963 D.2501_8 = &S + D.2500_7;
1964 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1965 D.2503_10 = *D.2502_9;
1966 D.2504_12 = f$__pfn_24 + -1;
1967 D.2505_13 = (unsigned int) D.2504_12;
1968 D.2506_14 = D.2503_10 + D.2505_13;
1969 D.2507_15 = *D.2506_14;
1970 iftmp.11_16 = (String:: *) D.2507_15;
1972 <bb 7>:
1973 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1974 D.2500_19 = (unsigned int) f$__delta_5;
1975 D.2508_20 = &S + D.2500_19;
1976 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1978 Such patterns are results of simple calls to a member pointer:
1980 int doprinting (int (MyString::* f)(int) const)
1982 MyString S ("somestring");
1984 return (S.*f)(4);
1987 Moreover, the function also looks for called pointers loaded from aggregates
1988 passed by value or reference. */
1990 static void
1991 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
1992 tree target)
1994 struct ipa_node_params *info = fbi->info;
1995 HOST_WIDE_INT offset;
1996 bool by_ref;
1998 if (SSA_NAME_IS_DEFAULT_DEF (target))
2000 tree var = SSA_NAME_VAR (target);
2001 int index = ipa_get_param_decl_index (info, var);
2002 if (index >= 0)
2003 ipa_note_param_call (fbi->node, index, call);
2004 return;
2007 int index;
2008 gimple def = SSA_NAME_DEF_STMT (target);
2009 if (gimple_assign_single_p (def)
2010 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2011 gimple_assign_rhs1 (def), &index, &offset,
2012 NULL, &by_ref))
2014 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2015 cs->indirect_info->offset = offset;
2016 cs->indirect_info->agg_contents = 1;
2017 cs->indirect_info->by_ref = by_ref;
2018 return;
2021 /* Now we need to try to match the complex pattern of calling a member
2022 pointer. */
2023 if (gimple_code (def) != GIMPLE_PHI
2024 || gimple_phi_num_args (def) != 2
2025 || !POINTER_TYPE_P (TREE_TYPE (target))
2026 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2027 return;
2029 /* First, we need to check whether one of these is a load from a member
2030 pointer that is a parameter to this function. */
2031 tree n1 = PHI_ARG_DEF (def, 0);
2032 tree n2 = PHI_ARG_DEF (def, 1);
2033 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2034 return;
2035 gimple d1 = SSA_NAME_DEF_STMT (n1);
2036 gimple d2 = SSA_NAME_DEF_STMT (n2);
2038 tree rec;
2039 basic_block bb, virt_bb;
2040 basic_block join = gimple_bb (def);
2041 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2043 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2044 return;
2046 bb = EDGE_PRED (join, 0)->src;
2047 virt_bb = gimple_bb (d2);
2049 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2051 bb = EDGE_PRED (join, 1)->src;
2052 virt_bb = gimple_bb (d1);
2054 else
2055 return;
2057 /* Second, we need to check that the basic blocks are laid out in the way
2058 corresponding to the pattern. */
2060 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2061 || single_pred (virt_bb) != bb
2062 || single_succ (virt_bb) != join)
2063 return;
2065 /* Third, let's see that the branching is done depending on the least
2066 significant bit of the pfn. */
2068 gimple branch = last_stmt (bb);
2069 if (!branch || gimple_code (branch) != GIMPLE_COND)
2070 return;
2072 if ((gimple_cond_code (branch) != NE_EXPR
2073 && gimple_cond_code (branch) != EQ_EXPR)
2074 || !integer_zerop (gimple_cond_rhs (branch)))
2075 return;
2077 tree cond = gimple_cond_lhs (branch);
2078 if (!ipa_is_ssa_with_stmt_def (cond))
2079 return;
2081 def = SSA_NAME_DEF_STMT (cond);
2082 if (!is_gimple_assign (def)
2083 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2084 || !integer_onep (gimple_assign_rhs2 (def)))
2085 return;
2087 cond = gimple_assign_rhs1 (def);
2088 if (!ipa_is_ssa_with_stmt_def (cond))
2089 return;
2091 def = SSA_NAME_DEF_STMT (cond);
2093 if (is_gimple_assign (def)
2094 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2096 cond = gimple_assign_rhs1 (def);
2097 if (!ipa_is_ssa_with_stmt_def (cond))
2098 return;
2099 def = SSA_NAME_DEF_STMT (cond);
2102 tree rec2;
2103 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2104 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2105 == ptrmemfunc_vbit_in_delta),
2106 NULL);
2107 if (rec != rec2)
2108 return;
2110 index = ipa_get_param_decl_index (info, rec);
2111 if (index >= 0
2112 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2114 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2115 cs->indirect_info->offset = offset;
2116 cs->indirect_info->agg_contents = 1;
2117 cs->indirect_info->member_ptr = 1;
2120 return;
2123 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2124 object referenced in the expression is a formal parameter of the caller
2125 FBI->node (described by FBI->info), create a call note for the
2126 statement. */
2128 static void
2129 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2130 gcall *call, tree target)
2132 tree obj = OBJ_TYPE_REF_OBJECT (target);
2133 int index;
2134 HOST_WIDE_INT anc_offset;
2136 if (!flag_devirtualize)
2137 return;
2139 if (TREE_CODE (obj) != SSA_NAME)
2140 return;
2142 struct ipa_node_params *info = fbi->info;
2143 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2145 struct ipa_jump_func jfunc;
2146 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2147 return;
2149 anc_offset = 0;
2150 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2151 gcc_assert (index >= 0);
2152 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2153 call, &jfunc))
2154 return;
2156 else
2158 struct ipa_jump_func jfunc;
2159 gimple stmt = SSA_NAME_DEF_STMT (obj);
2160 tree expr;
2162 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2163 if (!expr)
2164 return;
2165 index = ipa_get_param_decl_index (info,
2166 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2167 gcc_assert (index >= 0);
2168 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2169 call, &jfunc, anc_offset))
2170 return;
2173 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2174 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2175 ii->offset = anc_offset;
2176 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2177 ii->otr_type = obj_type_ref_class (target);
2178 ii->polymorphic = 1;
2181 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2182 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2183 containing intermediate information about each formal parameter. */
2185 static void
2186 ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
2188 tree target = gimple_call_fn (call);
2190 if (!target
2191 || (TREE_CODE (target) != SSA_NAME
2192 && !virtual_method_call_p (target)))
2193 return;
2195 struct cgraph_edge *cs = fbi->node->get_edge (call);
2196 /* If we previously turned the call into a direct call, there is
2197 no need to analyze. */
2198 if (cs && !cs->indirect_unknown_callee)
2199 return;
2201 if (cs->indirect_info->polymorphic && flag_devirtualize)
2203 tree instance;
2204 tree target = gimple_call_fn (call);
2205 ipa_polymorphic_call_context context (current_function_decl,
2206 target, call, &instance);
2208 gcc_checking_assert (cs->indirect_info->otr_type
2209 == obj_type_ref_class (target));
2210 gcc_checking_assert (cs->indirect_info->otr_token
2211 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2213 cs->indirect_info->vptr_changed
2214 = !context.get_dynamic_type (instance,
2215 OBJ_TYPE_REF_OBJECT (target),
2216 obj_type_ref_class (target), call);
2217 cs->indirect_info->context = context;
2220 if (TREE_CODE (target) == SSA_NAME)
2221 ipa_analyze_indirect_call_uses (fbi, call, target);
2222 else if (virtual_method_call_p (target))
2223 ipa_analyze_virtual_call_uses (fbi, call, target);
2227 /* Analyze the call statement STMT with respect to formal parameters (described
2228 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2229 formal parameters are called. */
2231 static void
2232 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2234 if (is_gimple_call (stmt))
2235 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2238 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2239 If OP is a parameter declaration, mark it as used in the info structure
2240 passed in DATA. */
2242 static bool
2243 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2245 struct ipa_node_params *info = (struct ipa_node_params *) data;
2247 op = get_base_address (op);
2248 if (op
2249 && TREE_CODE (op) == PARM_DECL)
2251 int index = ipa_get_param_decl_index (info, op);
2252 gcc_assert (index >= 0);
2253 ipa_set_param_used (info, index, true);
2256 return false;
2259 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2260 the findings in various structures of the associated ipa_node_params
2261 structure, such as parameter flags, notes etc. FBI holds various data about
2262 the function being analyzed. */
2264 static void
2265 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2267 gimple_stmt_iterator gsi;
2268 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2270 gimple stmt = gsi_stmt (gsi);
2272 if (is_gimple_debug (stmt))
2273 continue;
2275 ipa_analyze_stmt_uses (fbi, stmt);
2276 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2277 visit_ref_for_mod_analysis,
2278 visit_ref_for_mod_analysis,
2279 visit_ref_for_mod_analysis);
2281 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2282 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2283 visit_ref_for_mod_analysis,
2284 visit_ref_for_mod_analysis,
2285 visit_ref_for_mod_analysis);
2288 /* Calculate controlled uses of parameters of NODE. */
2290 static void
2291 ipa_analyze_controlled_uses (struct cgraph_node *node)
2293 struct ipa_node_params *info = IPA_NODE_REF (node);
2295 for (int i = 0; i < ipa_get_param_count (info); i++)
2297 tree parm = ipa_get_param (info, i);
2298 int controlled_uses = 0;
2300 /* For SSA regs see if parameter is used. For non-SSA we compute
2301 the flag during modification analysis. */
2302 if (is_gimple_reg (parm))
2304 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2305 parm);
2306 if (ddef && !has_zero_uses (ddef))
2308 imm_use_iterator imm_iter;
2309 use_operand_p use_p;
2311 ipa_set_param_used (info, i, true);
2312 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2313 if (!is_gimple_call (USE_STMT (use_p)))
2315 if (!is_gimple_debug (USE_STMT (use_p)))
2317 controlled_uses = IPA_UNDESCRIBED_USE;
2318 break;
2321 else
2322 controlled_uses++;
2324 else
2325 controlled_uses = 0;
2327 else
2328 controlled_uses = IPA_UNDESCRIBED_USE;
2329 ipa_set_controlled_uses (info, i, controlled_uses);
2333 /* Free stuff in BI. */
2335 static void
2336 free_ipa_bb_info (struct ipa_bb_info *bi)
2338 bi->cg_edges.release ();
2339 bi->param_aa_statuses.release ();
2342 /* Dominator walker driving the analysis. */
2344 class analysis_dom_walker : public dom_walker
2346 public:
2347 analysis_dom_walker (struct func_body_info *fbi)
2348 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2350 virtual void before_dom_children (basic_block);
2352 private:
2353 struct func_body_info *m_fbi;
2356 void
2357 analysis_dom_walker::before_dom_children (basic_block bb)
2359 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2360 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2363 /* Initialize the array describing properties of of formal parameters
2364 of NODE, analyze their uses and compute jump functions associated
2365 with actual arguments of calls from within NODE. */
2367 void
2368 ipa_analyze_node (struct cgraph_node *node)
2370 struct func_body_info fbi;
2371 struct ipa_node_params *info;
2373 ipa_check_create_node_params ();
2374 ipa_check_create_edge_args ();
2375 info = IPA_NODE_REF (node);
2377 if (info->analysis_done)
2378 return;
2379 info->analysis_done = 1;
2381 if (ipa_func_spec_opts_forbid_analysis_p (node))
2383 for (int i = 0; i < ipa_get_param_count (info); i++)
2385 ipa_set_param_used (info, i, true);
2386 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2388 return;
2391 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2392 push_cfun (func);
2393 calculate_dominance_info (CDI_DOMINATORS);
2394 ipa_initialize_node_params (node);
2395 ipa_analyze_controlled_uses (node);
2397 fbi.node = node;
2398 fbi.info = IPA_NODE_REF (node);
2399 fbi.bb_infos = vNULL;
2400 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2401 fbi.param_count = ipa_get_param_count (info);
2402 fbi.aa_walked = 0;
2404 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2406 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2407 bi->cg_edges.safe_push (cs);
2410 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2412 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2413 bi->cg_edges.safe_push (cs);
2416 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2418 int i;
2419 struct ipa_bb_info *bi;
2420 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2421 free_ipa_bb_info (bi);
2422 fbi.bb_infos.release ();
2423 free_dominance_info (CDI_DOMINATORS);
2424 pop_cfun ();
2427 /* Update the jump functions associated with call graph edge E when the call
2428 graph edge CS is being inlined, assuming that E->caller is already (possibly
2429 indirectly) inlined into CS->callee and that E has not been inlined. */
2431 static void
2432 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2433 struct cgraph_edge *e)
2435 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2436 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2437 int count = ipa_get_cs_argument_count (args);
2438 int i;
2440 for (i = 0; i < count; i++)
2442 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2443 struct ipa_polymorphic_call_context *dst_ctx
2444 = ipa_get_ith_polymorhic_call_context (args, i);
2446 if (dst->type == IPA_JF_ANCESTOR)
2448 struct ipa_jump_func *src;
2449 int dst_fid = dst->value.ancestor.formal_id;
2450 struct ipa_polymorphic_call_context *src_ctx
2451 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2453 /* Variable number of arguments can cause havoc if we try to access
2454 one that does not exist in the inlined edge. So make sure we
2455 don't. */
2456 if (dst_fid >= ipa_get_cs_argument_count (top))
2458 ipa_set_jf_unknown (dst);
2459 continue;
2462 src = ipa_get_ith_jump_func (top, dst_fid);
2464 if (src_ctx && !src_ctx->useless_p ())
2466 struct ipa_polymorphic_call_context ctx = *src_ctx;
2468 /* TODO: Make type preserved safe WRT contexts. */
2469 if (!ipa_get_jf_ancestor_type_preserved (dst))
2470 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2471 ctx.offset_by (dst->value.ancestor.offset);
2472 if (!ctx.useless_p ())
2474 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2475 count);
2476 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2478 dst_ctx->combine_with (ctx);
2481 if (src->agg.items
2482 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2484 struct ipa_agg_jf_item *item;
2485 int j;
2487 /* Currently we do not produce clobber aggregate jump functions,
2488 replace with merging when we do. */
2489 gcc_assert (!dst->agg.items);
2491 dst->agg.items = vec_safe_copy (src->agg.items);
2492 dst->agg.by_ref = src->agg.by_ref;
2493 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2494 item->offset -= dst->value.ancestor.offset;
2497 if (src->type == IPA_JF_PASS_THROUGH
2498 && src->value.pass_through.operation == NOP_EXPR)
2500 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2501 dst->value.ancestor.agg_preserved &=
2502 src->value.pass_through.agg_preserved;
2504 else if (src->type == IPA_JF_ANCESTOR)
2506 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2507 dst->value.ancestor.offset += src->value.ancestor.offset;
2508 dst->value.ancestor.agg_preserved &=
2509 src->value.ancestor.agg_preserved;
2511 else
2512 ipa_set_jf_unknown (dst);
2514 else if (dst->type == IPA_JF_PASS_THROUGH)
2516 struct ipa_jump_func *src;
2517 /* We must check range due to calls with variable number of arguments
2518 and we cannot combine jump functions with operations. */
2519 if (dst->value.pass_through.operation == NOP_EXPR
2520 && (dst->value.pass_through.formal_id
2521 < ipa_get_cs_argument_count (top)))
2523 int dst_fid = dst->value.pass_through.formal_id;
2524 src = ipa_get_ith_jump_func (top, dst_fid);
2525 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2526 struct ipa_polymorphic_call_context *src_ctx
2527 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2529 if (src_ctx && !src_ctx->useless_p ())
2531 struct ipa_polymorphic_call_context ctx = *src_ctx;
2533 /* TODO: Make type preserved safe WRT contexts. */
2534 if (!ipa_get_jf_pass_through_type_preserved (dst))
2535 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2536 if (!ctx.useless_p ())
2538 if (!dst_ctx)
2540 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2541 count);
2542 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2544 dst_ctx->combine_with (ctx);
2547 switch (src->type)
2549 case IPA_JF_UNKNOWN:
2550 ipa_set_jf_unknown (dst);
2551 break;
2552 case IPA_JF_CONST:
2553 ipa_set_jf_cst_copy (dst, src);
2554 break;
2556 case IPA_JF_PASS_THROUGH:
2558 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2559 enum tree_code operation;
2560 operation = ipa_get_jf_pass_through_operation (src);
2562 if (operation == NOP_EXPR)
2564 bool agg_p;
2565 agg_p = dst_agg_p
2566 && ipa_get_jf_pass_through_agg_preserved (src);
2567 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2569 else
2571 tree operand = ipa_get_jf_pass_through_operand (src);
2572 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2573 operation);
2575 break;
2577 case IPA_JF_ANCESTOR:
2579 bool agg_p;
2580 agg_p = dst_agg_p
2581 && ipa_get_jf_ancestor_agg_preserved (src);
2582 ipa_set_ancestor_jf (dst,
2583 ipa_get_jf_ancestor_offset (src),
2584 ipa_get_jf_ancestor_formal_id (src),
2585 agg_p);
2586 break;
2588 default:
2589 gcc_unreachable ();
2592 if (src->agg.items
2593 && (dst_agg_p || !src->agg.by_ref))
2595 /* Currently we do not produce clobber aggregate jump
2596 functions, replace with merging when we do. */
2597 gcc_assert (!dst->agg.items);
2599 dst->agg.by_ref = src->agg.by_ref;
2600 dst->agg.items = vec_safe_copy (src->agg.items);
2603 else
2604 ipa_set_jf_unknown (dst);
2609 /* If TARGET is an addr_expr of a function declaration, make it the
2610 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2611 Otherwise, return NULL. */
2613 struct cgraph_edge *
2614 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2615 bool speculative)
2617 struct cgraph_node *callee;
2618 struct inline_edge_summary *es = inline_edge_summary (ie);
2619 bool unreachable = false;
2621 if (TREE_CODE (target) == ADDR_EXPR)
2622 target = TREE_OPERAND (target, 0);
2623 if (TREE_CODE (target) != FUNCTION_DECL)
2625 target = canonicalize_constructor_val (target, NULL);
2626 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2628 if (ie->indirect_info->member_ptr)
2629 /* Member pointer call that goes through a VMT lookup. */
2630 return NULL;
2632 if (dump_enabled_p ())
2634 location_t loc = gimple_location_safe (ie->call_stmt);
2635 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2636 "discovered direct call to non-function in %s/%i, "
2637 "making it __builtin_unreachable\n",
2638 ie->caller->name (), ie->caller->order);
2641 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2642 callee = cgraph_node::get_create (target);
2643 unreachable = true;
2645 else
2646 callee = cgraph_node::get (target);
2648 else
2649 callee = cgraph_node::get (target);
2651 /* Because may-edges are not explicitely represented and vtable may be external,
2652 we may create the first reference to the object in the unit. */
2653 if (!callee || callee->global.inlined_to)
2656 /* We are better to ensure we can refer to it.
2657 In the case of static functions we are out of luck, since we already
2658 removed its body. In the case of public functions we may or may
2659 not introduce the reference. */
2660 if (!canonicalize_constructor_val (target, NULL)
2661 || !TREE_PUBLIC (target))
2663 if (dump_file)
2664 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2665 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2666 xstrdup_for_dump (ie->caller->name ()),
2667 ie->caller->order,
2668 xstrdup_for_dump (ie->callee->name ()),
2669 ie->callee->order);
2670 return NULL;
2672 callee = cgraph_node::get_create (target);
2675 /* If the edge is already speculated. */
2676 if (speculative && ie->speculative)
2678 struct cgraph_edge *e2;
2679 struct ipa_ref *ref;
2680 ie->speculative_call_info (e2, ie, ref);
2681 if (e2->callee->ultimate_alias_target ()
2682 != callee->ultimate_alias_target ())
2684 if (dump_file)
2685 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2686 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2687 xstrdup_for_dump (ie->caller->name ()),
2688 ie->caller->order,
2689 xstrdup_for_dump (callee->name ()),
2690 callee->order,
2691 xstrdup_for_dump (e2->callee->name ()),
2692 e2->callee->order);
2694 else
2696 if (dump_file)
2697 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2698 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2699 xstrdup_for_dump (ie->caller->name ()),
2700 ie->caller->order,
2701 xstrdup_for_dump (callee->name ()),
2702 callee->order);
2704 return NULL;
2707 if (!dbg_cnt (devirt))
2708 return NULL;
2710 ipa_check_create_node_params ();
2712 /* We can not make edges to inline clones. It is bug that someone removed
2713 the cgraph node too early. */
2714 gcc_assert (!callee->global.inlined_to);
2716 if (dump_file && !unreachable)
2718 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2719 "(%s/%i -> %s/%i), for stmt ",
2720 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2721 speculative ? "speculative" : "known",
2722 xstrdup_for_dump (ie->caller->name ()),
2723 ie->caller->order,
2724 xstrdup_for_dump (callee->name ()),
2725 callee->order);
2726 if (ie->call_stmt)
2727 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2728 else
2729 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2731 if (dump_enabled_p ())
2733 location_t loc = gimple_location_safe (ie->call_stmt);
2735 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2736 "converting indirect call in %s to direct call to %s\n",
2737 ie->caller->name (), callee->name ());
2739 if (!speculative)
2740 ie = ie->make_direct (callee);
2741 else
2743 if (!callee->can_be_discarded_p ())
2745 cgraph_node *alias;
2746 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2747 if (alias)
2748 callee = alias;
2750 ie = ie->make_speculative
2751 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2753 es = inline_edge_summary (ie);
2754 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2755 - eni_size_weights.call_cost);
2756 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2757 - eni_time_weights.call_cost);
2759 return ie;
2762 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2763 return NULL if there is not any. BY_REF specifies whether the value has to
2764 be passed by reference or by value. */
2766 tree
2767 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2768 HOST_WIDE_INT offset, bool by_ref)
2770 struct ipa_agg_jf_item *item;
2771 int i;
2773 if (by_ref != agg->by_ref)
2774 return NULL;
2776 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2777 if (item->offset == offset)
2779 /* Currently we do not have clobber values, return NULL for them once
2780 we do. */
2781 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2782 return item->value;
2784 return NULL;
2787 /* Remove a reference to SYMBOL from the list of references of a node given by
2788 reference description RDESC. Return true if the reference has been
2789 successfully found and removed. */
2791 static bool
2792 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2794 struct ipa_ref *to_del;
2795 struct cgraph_edge *origin;
2797 origin = rdesc->cs;
2798 if (!origin)
2799 return false;
2800 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2801 origin->lto_stmt_uid);
2802 if (!to_del)
2803 return false;
2805 to_del->remove_reference ();
2806 if (dump_file)
2807 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2808 xstrdup_for_dump (origin->caller->name ()),
2809 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2810 return true;
2813 /* If JFUNC has a reference description with refcount different from
2814 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2815 NULL. JFUNC must be a constant jump function. */
2817 static struct ipa_cst_ref_desc *
2818 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2820 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2821 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2822 return rdesc;
2823 else
2824 return NULL;
2827 /* If the value of constant jump function JFUNC is an address of a function
2828 declaration, return the associated call graph node. Otherwise return
2829 NULL. */
2831 static cgraph_node *
2832 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2834 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2835 tree cst = ipa_get_jf_constant (jfunc);
2836 if (TREE_CODE (cst) != ADDR_EXPR
2837 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2838 return NULL;
2840 return cgraph_node::get (TREE_OPERAND (cst, 0));
2844 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2845 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2846 the edge specified in the rdesc. Return false if either the symbol or the
2847 reference could not be found, otherwise return true. */
2849 static bool
2850 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2852 struct ipa_cst_ref_desc *rdesc;
2853 if (jfunc->type == IPA_JF_CONST
2854 && (rdesc = jfunc_rdesc_usable (jfunc))
2855 && --rdesc->refcount == 0)
2857 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2858 if (!symbol)
2859 return false;
2861 return remove_described_reference (symbol, rdesc);
2863 return true;
2866 /* Try to find a destination for indirect edge IE that corresponds to a simple
2867 call or a call of a member function pointer and where the destination is a
2868 pointer formal parameter described by jump function JFUNC. If it can be
2869 determined, return the newly direct edge, otherwise return NULL.
2870 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2872 static struct cgraph_edge *
2873 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2874 struct ipa_jump_func *jfunc,
2875 struct ipa_node_params *new_root_info)
2877 struct cgraph_edge *cs;
2878 tree target;
2879 bool agg_contents = ie->indirect_info->agg_contents;
2881 if (ie->indirect_info->agg_contents)
2882 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2883 ie->indirect_info->offset,
2884 ie->indirect_info->by_ref);
2885 else
2886 target = ipa_value_from_jfunc (new_root_info, jfunc);
2887 if (!target)
2888 return NULL;
2889 cs = ipa_make_edge_direct_to_target (ie, target);
2891 if (cs && !agg_contents)
2893 bool ok;
2894 gcc_checking_assert (cs->callee
2895 && (cs != ie
2896 || jfunc->type != IPA_JF_CONST
2897 || !cgraph_node_for_jfunc (jfunc)
2898 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2899 ok = try_decrement_rdesc_refcount (jfunc);
2900 gcc_checking_assert (ok);
2903 return cs;
2906 /* Return the target to be used in cases of impossible devirtualization. IE
2907 and target (the latter can be NULL) are dumped when dumping is enabled. */
2909 tree
2910 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2912 if (dump_file)
2914 if (target)
2915 fprintf (dump_file,
2916 "Type inconsistent devirtualization: %s/%i->%s\n",
2917 ie->caller->name (), ie->caller->order,
2918 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2919 else
2920 fprintf (dump_file,
2921 "No devirtualization target in %s/%i\n",
2922 ie->caller->name (), ie->caller->order);
2924 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2925 cgraph_node::get_create (new_target);
2926 return new_target;
2929 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2930 call based on a formal parameter which is described by jump function JFUNC
2931 and if it can be determined, make it direct and return the direct edge.
2932 Otherwise, return NULL. CTX describes the polymorphic context that the
2933 parameter the call is based on brings along with it. */
2935 static struct cgraph_edge *
2936 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2937 struct ipa_jump_func *jfunc,
2938 struct ipa_polymorphic_call_context ctx)
2940 tree target = NULL;
2941 bool speculative = false;
2943 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2944 return NULL;
2946 gcc_assert (!ie->indirect_info->by_ref);
2948 /* Try to do lookup via known virtual table pointer value. */
2949 if (!ie->indirect_info->vptr_changed
2950 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2952 tree vtable;
2953 unsigned HOST_WIDE_INT offset;
2954 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2955 ie->indirect_info->offset,
2956 true);
2957 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2959 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2960 vtable, offset);
2961 if (t)
2963 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2964 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2965 || !possible_polymorphic_call_target_p
2966 (ie, cgraph_node::get (t)))
2968 /* Do not speculate builtin_unreachable, it is stpid! */
2969 if (!ie->indirect_info->vptr_changed)
2970 target = ipa_impossible_devirt_target (ie, target);
2972 else
2974 target = t;
2975 speculative = ie->indirect_info->vptr_changed;
2981 ipa_polymorphic_call_context ie_context (ie);
2982 vec <cgraph_node *>targets;
2983 bool final;
2985 ctx.offset_by (ie->indirect_info->offset);
2986 if (ie->indirect_info->vptr_changed)
2987 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2988 ie->indirect_info->otr_type);
2989 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
2990 targets = possible_polymorphic_call_targets
2991 (ie->indirect_info->otr_type,
2992 ie->indirect_info->otr_token,
2993 ctx, &final);
2994 if (final && targets.length () <= 1)
2996 if (targets.length () == 1)
2997 target = targets[0]->decl;
2998 else
2999 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3001 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3002 && !ie->speculative && ie->maybe_hot_p ())
3004 cgraph_node *n;
3005 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3006 ie->indirect_info->otr_token,
3007 ie->indirect_info->context);
3008 if (n)
3010 target = n->decl;
3011 speculative = true;
3015 if (target)
3017 if (!possible_polymorphic_call_target_p
3018 (ie, cgraph_node::get_create (target)))
3020 if (speculative)
3021 return NULL;
3022 target = ipa_impossible_devirt_target (ie, target);
3024 return ipa_make_edge_direct_to_target (ie, target, speculative);
3026 else
3027 return NULL;
3030 /* Update the param called notes associated with NODE when CS is being inlined,
3031 assuming NODE is (potentially indirectly) inlined into CS->callee.
3032 Moreover, if the callee is discovered to be constant, create a new cgraph
3033 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3034 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3036 static bool
3037 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3038 struct cgraph_node *node,
3039 vec<cgraph_edge *> *new_edges)
3041 struct ipa_edge_args *top;
3042 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3043 struct ipa_node_params *new_root_info;
3044 bool res = false;
3046 ipa_check_create_edge_args ();
3047 top = IPA_EDGE_REF (cs);
3048 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3049 ? cs->caller->global.inlined_to
3050 : cs->caller);
3052 for (ie = node->indirect_calls; ie; ie = next_ie)
3054 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3055 struct ipa_jump_func *jfunc;
3056 int param_index;
3058 next_ie = ie->next_callee;
3060 if (ici->param_index == -1)
3061 continue;
3063 /* We must check range due to calls with variable number of arguments: */
3064 if (ici->param_index >= ipa_get_cs_argument_count (top))
3066 ici->param_index = -1;
3067 continue;
3070 param_index = ici->param_index;
3071 jfunc = ipa_get_ith_jump_func (top, param_index);
3073 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3074 new_direct_edge = NULL;
3075 else if (ici->polymorphic)
3077 ipa_polymorphic_call_context ctx;
3078 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3079 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3081 else
3082 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3083 new_root_info);
3084 /* If speculation was removed, then we need to do nothing. */
3085 if (new_direct_edge && new_direct_edge != ie)
3087 new_direct_edge->indirect_inlining_edge = 1;
3088 top = IPA_EDGE_REF (cs);
3089 res = true;
3091 else if (new_direct_edge)
3093 new_direct_edge->indirect_inlining_edge = 1;
3094 if (new_direct_edge->call_stmt)
3095 new_direct_edge->call_stmt_cannot_inline_p
3096 = !gimple_check_call_matching_types (
3097 new_direct_edge->call_stmt,
3098 new_direct_edge->callee->decl, false);
3099 if (new_edges)
3101 new_edges->safe_push (new_direct_edge);
3102 res = true;
3104 top = IPA_EDGE_REF (cs);
3106 else if (jfunc->type == IPA_JF_PASS_THROUGH
3107 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3109 if ((ici->agg_contents
3110 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3111 || (ici->polymorphic
3112 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3113 ici->param_index = -1;
3114 else
3115 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3117 else if (jfunc->type == IPA_JF_ANCESTOR)
3119 if ((ici->agg_contents
3120 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3121 || (ici->polymorphic
3122 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3123 ici->param_index = -1;
3124 else
3126 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3127 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3130 else
3131 /* Either we can find a destination for this edge now or never. */
3132 ici->param_index = -1;
3135 return res;
3138 /* Recursively traverse subtree of NODE (including node) made of inlined
3139 cgraph_edges when CS has been inlined and invoke
3140 update_indirect_edges_after_inlining on all nodes and
3141 update_jump_functions_after_inlining on all non-inlined edges that lead out
3142 of this subtree. Newly discovered indirect edges will be added to
3143 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3144 created. */
3146 static bool
3147 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3148 struct cgraph_node *node,
3149 vec<cgraph_edge *> *new_edges)
3151 struct cgraph_edge *e;
3152 bool res;
3154 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3156 for (e = node->callees; e; e = e->next_callee)
3157 if (!e->inline_failed)
3158 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3159 else
3160 update_jump_functions_after_inlining (cs, e);
3161 for (e = node->indirect_calls; e; e = e->next_callee)
3162 update_jump_functions_after_inlining (cs, e);
3164 return res;
3167 /* Combine two controlled uses counts as done during inlining. */
3169 static int
3170 combine_controlled_uses_counters (int c, int d)
3172 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3173 return IPA_UNDESCRIBED_USE;
3174 else
3175 return c + d - 1;
3178 /* Propagate number of controlled users from CS->caleee to the new root of the
3179 tree of inlined nodes. */
3181 static void
3182 propagate_controlled_uses (struct cgraph_edge *cs)
3184 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3185 struct cgraph_node *new_root = cs->caller->global.inlined_to
3186 ? cs->caller->global.inlined_to : cs->caller;
3187 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3188 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3189 int count, i;
3191 count = MIN (ipa_get_cs_argument_count (args),
3192 ipa_get_param_count (old_root_info));
3193 for (i = 0; i < count; i++)
3195 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3196 struct ipa_cst_ref_desc *rdesc;
3198 if (jf->type == IPA_JF_PASS_THROUGH)
3200 int src_idx, c, d;
3201 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3202 c = ipa_get_controlled_uses (new_root_info, src_idx);
3203 d = ipa_get_controlled_uses (old_root_info, i);
3205 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3206 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3207 c = combine_controlled_uses_counters (c, d);
3208 ipa_set_controlled_uses (new_root_info, src_idx, c);
3209 if (c == 0 && new_root_info->ipcp_orig_node)
3211 struct cgraph_node *n;
3212 struct ipa_ref *ref;
3213 tree t = new_root_info->known_csts[src_idx];
3215 if (t && TREE_CODE (t) == ADDR_EXPR
3216 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3217 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3218 && (ref = new_root->find_reference (n, NULL, 0)))
3220 if (dump_file)
3221 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3222 "reference from %s/%i to %s/%i.\n",
3223 xstrdup_for_dump (new_root->name ()),
3224 new_root->order,
3225 xstrdup_for_dump (n->name ()), n->order);
3226 ref->remove_reference ();
3230 else if (jf->type == IPA_JF_CONST
3231 && (rdesc = jfunc_rdesc_usable (jf)))
3233 int d = ipa_get_controlled_uses (old_root_info, i);
3234 int c = rdesc->refcount;
3235 rdesc->refcount = combine_controlled_uses_counters (c, d);
3236 if (rdesc->refcount == 0)
3238 tree cst = ipa_get_jf_constant (jf);
3239 struct cgraph_node *n;
3240 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3241 && TREE_CODE (TREE_OPERAND (cst, 0))
3242 == FUNCTION_DECL);
3243 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3244 if (n)
3246 struct cgraph_node *clone;
3247 bool ok;
3248 ok = remove_described_reference (n, rdesc);
3249 gcc_checking_assert (ok);
3251 clone = cs->caller;
3252 while (clone->global.inlined_to
3253 && clone != rdesc->cs->caller
3254 && IPA_NODE_REF (clone)->ipcp_orig_node)
3256 struct ipa_ref *ref;
3257 ref = clone->find_reference (n, NULL, 0);
3258 if (ref)
3260 if (dump_file)
3261 fprintf (dump_file, "ipa-prop: Removing "
3262 "cloning-created reference "
3263 "from %s/%i to %s/%i.\n",
3264 xstrdup_for_dump (clone->name ()),
3265 clone->order,
3266 xstrdup_for_dump (n->name ()),
3267 n->order);
3268 ref->remove_reference ();
3270 clone = clone->callers->caller;
3277 for (i = ipa_get_param_count (old_root_info);
3278 i < ipa_get_cs_argument_count (args);
3279 i++)
3281 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3283 if (jf->type == IPA_JF_CONST)
3285 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3286 if (rdesc)
3287 rdesc->refcount = IPA_UNDESCRIBED_USE;
3289 else if (jf->type == IPA_JF_PASS_THROUGH)
3290 ipa_set_controlled_uses (new_root_info,
3291 jf->value.pass_through.formal_id,
3292 IPA_UNDESCRIBED_USE);
3296 /* Update jump functions and call note functions on inlining the call site CS.
3297 CS is expected to lead to a node already cloned by
3298 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3299 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3300 created. */
3302 bool
3303 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3304 vec<cgraph_edge *> *new_edges)
3306 bool changed;
3307 /* Do nothing if the preparation phase has not been carried out yet
3308 (i.e. during early inlining). */
3309 if (!ipa_node_params_sum)
3310 return false;
3311 gcc_assert (ipa_edge_args_vector);
3313 propagate_controlled_uses (cs);
3314 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3316 return changed;
3319 /* Frees all dynamically allocated structures that the argument info points
3320 to. */
3322 void
3323 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3325 vec_free (args->jump_functions);
3326 memset (args, 0, sizeof (*args));
3329 /* Free all ipa_edge structures. */
3331 void
3332 ipa_free_all_edge_args (void)
3334 int i;
3335 struct ipa_edge_args *args;
3337 if (!ipa_edge_args_vector)
3338 return;
3340 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3341 ipa_free_edge_args_substructures (args);
3343 vec_free (ipa_edge_args_vector);
3346 /* Frees all dynamically allocated structures that the param info points
3347 to. */
3349 ipa_node_params::~ipa_node_params ()
3351 descriptors.release ();
3352 free (lattices);
3353 /* Lattice values and their sources are deallocated with their alocation
3354 pool. */
3355 known_contexts.release ();
3357 lattices = NULL;
3358 ipcp_orig_node = NULL;
3359 analysis_done = 0;
3360 node_enqueued = 0;
3361 do_clone_for_all_contexts = 0;
3362 is_all_contexts_clone = 0;
3363 node_dead = 0;
3366 /* Free all ipa_node_params structures. */
3368 void
3369 ipa_free_all_node_params (void)
3371 delete ipa_node_params_sum;
3372 ipa_node_params_sum = NULL;
3375 /* Grow ipcp_transformations if necessary. */
3377 void
3378 ipcp_grow_transformations_if_necessary (void)
3380 if (vec_safe_length (ipcp_transformations)
3381 <= (unsigned) symtab->cgraph_max_uid)
3382 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3385 /* Set the aggregate replacements of NODE to be AGGVALS. */
3387 void
3388 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3389 struct ipa_agg_replacement_value *aggvals)
3391 ipcp_grow_transformations_if_necessary ();
3392 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3395 /* Hook that is called by cgraph.c when an edge is removed. */
3397 static void
3398 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3400 struct ipa_edge_args *args;
3402 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3403 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3404 return;
3406 args = IPA_EDGE_REF (cs);
3407 if (args->jump_functions)
3409 struct ipa_jump_func *jf;
3410 int i;
3411 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3413 struct ipa_cst_ref_desc *rdesc;
3414 try_decrement_rdesc_refcount (jf);
3415 if (jf->type == IPA_JF_CONST
3416 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3417 && rdesc->cs == cs)
3418 rdesc->cs = NULL;
3422 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3425 /* Hook that is called by cgraph.c when an edge is duplicated. */
3427 static void
3428 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3429 void *)
3431 struct ipa_edge_args *old_args, *new_args;
3432 unsigned int i;
3434 ipa_check_create_edge_args ();
3436 old_args = IPA_EDGE_REF (src);
3437 new_args = IPA_EDGE_REF (dst);
3439 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3440 if (old_args->polymorphic_call_contexts)
3441 new_args->polymorphic_call_contexts
3442 = vec_safe_copy (old_args->polymorphic_call_contexts);
3444 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3446 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3447 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3449 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3451 if (src_jf->type == IPA_JF_CONST)
3453 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3455 if (!src_rdesc)
3456 dst_jf->value.constant.rdesc = NULL;
3457 else if (src->caller == dst->caller)
3459 struct ipa_ref *ref;
3460 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3461 gcc_checking_assert (n);
3462 ref = src->caller->find_reference (n, src->call_stmt,
3463 src->lto_stmt_uid);
3464 gcc_checking_assert (ref);
3465 dst->caller->clone_reference (ref, ref->stmt);
3467 gcc_checking_assert (ipa_refdesc_pool);
3468 struct ipa_cst_ref_desc *dst_rdesc
3469 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3470 dst_rdesc->cs = dst;
3471 dst_rdesc->refcount = src_rdesc->refcount;
3472 dst_rdesc->next_duplicate = NULL;
3473 dst_jf->value.constant.rdesc = dst_rdesc;
3475 else if (src_rdesc->cs == src)
3477 struct ipa_cst_ref_desc *dst_rdesc;
3478 gcc_checking_assert (ipa_refdesc_pool);
3479 dst_rdesc
3480 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3481 dst_rdesc->cs = dst;
3482 dst_rdesc->refcount = src_rdesc->refcount;
3483 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3484 src_rdesc->next_duplicate = dst_rdesc;
3485 dst_jf->value.constant.rdesc = dst_rdesc;
3487 else
3489 struct ipa_cst_ref_desc *dst_rdesc;
3490 /* This can happen during inlining, when a JFUNC can refer to a
3491 reference taken in a function up in the tree of inline clones.
3492 We need to find the duplicate that refers to our tree of
3493 inline clones. */
3495 gcc_assert (dst->caller->global.inlined_to);
3496 for (dst_rdesc = src_rdesc->next_duplicate;
3497 dst_rdesc;
3498 dst_rdesc = dst_rdesc->next_duplicate)
3500 struct cgraph_node *top;
3501 top = dst_rdesc->cs->caller->global.inlined_to
3502 ? dst_rdesc->cs->caller->global.inlined_to
3503 : dst_rdesc->cs->caller;
3504 if (dst->caller->global.inlined_to == top)
3505 break;
3507 gcc_assert (dst_rdesc);
3508 dst_jf->value.constant.rdesc = dst_rdesc;
3511 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3512 && src->caller == dst->caller)
3514 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3515 ? dst->caller->global.inlined_to : dst->caller;
3516 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3517 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3519 int c = ipa_get_controlled_uses (root_info, idx);
3520 if (c != IPA_UNDESCRIBED_USE)
3522 c++;
3523 ipa_set_controlled_uses (root_info, idx, c);
3529 /* Analyze newly added function into callgraph. */
3531 static void
3532 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3534 if (node->has_gimple_body_p ())
3535 ipa_analyze_node (node);
3538 /* Hook that is called by summary when a node is duplicated. */
3540 void
3541 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3542 ipa_node_params *old_info,
3543 ipa_node_params *new_info)
3545 ipa_agg_replacement_value *old_av, *new_av;
3547 new_info->descriptors = old_info->descriptors.copy ();
3548 new_info->lattices = NULL;
3549 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3551 new_info->analysis_done = old_info->analysis_done;
3552 new_info->node_enqueued = old_info->node_enqueued;
3554 old_av = ipa_get_agg_replacements_for_node (src);
3555 if (old_av)
3557 new_av = NULL;
3558 while (old_av)
3560 struct ipa_agg_replacement_value *v;
3562 v = ggc_alloc<ipa_agg_replacement_value> ();
3563 memcpy (v, old_av, sizeof (*v));
3564 v->next = new_av;
3565 new_av = v;
3566 old_av = old_av->next;
3568 ipa_set_node_agg_value_chain (dst, new_av);
3571 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3573 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3575 ipcp_grow_transformations_if_necessary ();
3576 src_trans = ipcp_get_transformation_summary (src);
3577 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3578 vec<ipa_alignment, va_gc> *&dst_alignments
3579 = ipcp_get_transformation_summary (dst)->alignments;
3580 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3581 for (unsigned i = 0; i < src_alignments->length (); ++i)
3582 dst_alignments->quick_push ((*src_alignments)[i]);
3586 /* Register our cgraph hooks if they are not already there. */
3588 void
3589 ipa_register_cgraph_hooks (void)
3591 ipa_check_create_node_params ();
3593 if (!edge_removal_hook_holder)
3594 edge_removal_hook_holder =
3595 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3596 if (!edge_duplication_hook_holder)
3597 edge_duplication_hook_holder =
3598 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3599 function_insertion_hook_holder =
3600 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3603 /* Unregister our cgraph hooks if they are not already there. */
3605 static void
3606 ipa_unregister_cgraph_hooks (void)
3608 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3609 edge_removal_hook_holder = NULL;
3610 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3611 edge_duplication_hook_holder = NULL;
3612 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3613 function_insertion_hook_holder = NULL;
3616 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3617 longer needed after ipa-cp. */
3619 void
3620 ipa_free_all_structures_after_ipa_cp (void)
3622 if (!optimize && !in_lto_p)
3624 ipa_free_all_edge_args ();
3625 ipa_free_all_node_params ();
3626 free_alloc_pool (ipcp_sources_pool);
3627 free_alloc_pool (ipcp_cst_values_pool);
3628 free_alloc_pool (ipcp_poly_ctx_values_pool);
3629 free_alloc_pool (ipcp_agg_lattice_pool);
3630 ipa_unregister_cgraph_hooks ();
3631 if (ipa_refdesc_pool)
3632 free_alloc_pool (ipa_refdesc_pool);
3636 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3637 longer needed after indirect inlining. */
3639 void
3640 ipa_free_all_structures_after_iinln (void)
3642 ipa_free_all_edge_args ();
3643 ipa_free_all_node_params ();
3644 ipa_unregister_cgraph_hooks ();
3645 if (ipcp_sources_pool)
3646 free_alloc_pool (ipcp_sources_pool);
3647 if (ipcp_cst_values_pool)
3648 free_alloc_pool (ipcp_cst_values_pool);
3649 if (ipcp_poly_ctx_values_pool)
3650 free_alloc_pool (ipcp_poly_ctx_values_pool);
3651 if (ipcp_agg_lattice_pool)
3652 free_alloc_pool (ipcp_agg_lattice_pool);
3653 if (ipa_refdesc_pool)
3654 free_alloc_pool (ipa_refdesc_pool);
3657 /* Print ipa_tree_map data structures of all functions in the
3658 callgraph to F. */
3660 void
3661 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3663 int i, count;
3664 struct ipa_node_params *info;
3666 if (!node->definition)
3667 return;
3668 info = IPA_NODE_REF (node);
3669 fprintf (f, " function %s/%i parameter descriptors:\n",
3670 node->name (), node->order);
3671 count = ipa_get_param_count (info);
3672 for (i = 0; i < count; i++)
3674 int c;
3676 fprintf (f, " ");
3677 ipa_dump_param (f, info, i);
3678 if (ipa_is_param_used (info, i))
3679 fprintf (f, " used");
3680 c = ipa_get_controlled_uses (info, i);
3681 if (c == IPA_UNDESCRIBED_USE)
3682 fprintf (f, " undescribed_use");
3683 else
3684 fprintf (f, " controlled_uses=%i", c);
3685 fprintf (f, "\n");
3689 /* Print ipa_tree_map data structures of all functions in the
3690 callgraph to F. */
3692 void
3693 ipa_print_all_params (FILE * f)
3695 struct cgraph_node *node;
3697 fprintf (f, "\nFunction parameters:\n");
3698 FOR_EACH_FUNCTION (node)
3699 ipa_print_node_params (f, node);
3702 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3704 vec<tree>
3705 ipa_get_vector_of_formal_parms (tree fndecl)
3707 vec<tree> args;
3708 int count;
3709 tree parm;
3711 gcc_assert (!flag_wpa);
3712 count = count_formal_params (fndecl);
3713 args.create (count);
3714 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3715 args.quick_push (parm);
3717 return args;
3720 /* Return a heap allocated vector containing types of formal parameters of
3721 function type FNTYPE. */
3723 vec<tree>
3724 ipa_get_vector_of_formal_parm_types (tree fntype)
3726 vec<tree> types;
3727 int count = 0;
3728 tree t;
3730 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3731 count++;
3733 types.create (count);
3734 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3735 types.quick_push (TREE_VALUE (t));
3737 return types;
3740 /* Modify the function declaration FNDECL and its type according to the plan in
3741 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3742 to reflect the actual parameters being modified which are determined by the
3743 base_index field. */
3745 void
3746 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3748 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3749 tree orig_type = TREE_TYPE (fndecl);
3750 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3752 /* The following test is an ugly hack, some functions simply don't have any
3753 arguments in their type. This is probably a bug but well... */
3754 bool care_for_types = (old_arg_types != NULL_TREE);
3755 bool last_parm_void;
3756 vec<tree> otypes;
3757 if (care_for_types)
3759 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3760 == void_type_node);
3761 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3762 if (last_parm_void)
3763 gcc_assert (oparms.length () + 1 == otypes.length ());
3764 else
3765 gcc_assert (oparms.length () == otypes.length ());
3767 else
3769 last_parm_void = false;
3770 otypes.create (0);
3773 int len = adjustments.length ();
3774 tree *link = &DECL_ARGUMENTS (fndecl);
3775 tree new_arg_types = NULL;
3776 for (int i = 0; i < len; i++)
3778 struct ipa_parm_adjustment *adj;
3779 gcc_assert (link);
3781 adj = &adjustments[i];
3782 tree parm;
3783 if (adj->op == IPA_PARM_OP_NEW)
3784 parm = NULL;
3785 else
3786 parm = oparms[adj->base_index];
3787 adj->base = parm;
3789 if (adj->op == IPA_PARM_OP_COPY)
3791 if (care_for_types)
3792 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3793 new_arg_types);
3794 *link = parm;
3795 link = &DECL_CHAIN (parm);
3797 else if (adj->op != IPA_PARM_OP_REMOVE)
3799 tree new_parm;
3800 tree ptype;
3802 if (adj->by_ref)
3803 ptype = build_pointer_type (adj->type);
3804 else
3806 ptype = adj->type;
3807 if (is_gimple_reg_type (ptype))
3809 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3810 if (TYPE_ALIGN (ptype) < malign)
3811 ptype = build_aligned_type (ptype, malign);
3815 if (care_for_types)
3816 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3818 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3819 ptype);
3820 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3821 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3822 DECL_ARTIFICIAL (new_parm) = 1;
3823 DECL_ARG_TYPE (new_parm) = ptype;
3824 DECL_CONTEXT (new_parm) = fndecl;
3825 TREE_USED (new_parm) = 1;
3826 DECL_IGNORED_P (new_parm) = 1;
3827 layout_decl (new_parm, 0);
3829 if (adj->op == IPA_PARM_OP_NEW)
3830 adj->base = NULL;
3831 else
3832 adj->base = parm;
3833 adj->new_decl = new_parm;
3835 *link = new_parm;
3836 link = &DECL_CHAIN (new_parm);
3840 *link = NULL_TREE;
3842 tree new_reversed = NULL;
3843 if (care_for_types)
3845 new_reversed = nreverse (new_arg_types);
3846 if (last_parm_void)
3848 if (new_reversed)
3849 TREE_CHAIN (new_arg_types) = void_list_node;
3850 else
3851 new_reversed = void_list_node;
3855 /* Use copy_node to preserve as much as possible from original type
3856 (debug info, attribute lists etc.)
3857 Exception is METHOD_TYPEs must have THIS argument.
3858 When we are asked to remove it, we need to build new FUNCTION_TYPE
3859 instead. */
3860 tree new_type = NULL;
3861 if (TREE_CODE (orig_type) != METHOD_TYPE
3862 || (adjustments[0].op == IPA_PARM_OP_COPY
3863 && adjustments[0].base_index == 0))
3865 new_type = build_distinct_type_copy (orig_type);
3866 TYPE_ARG_TYPES (new_type) = new_reversed;
3868 else
3870 new_type
3871 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3872 new_reversed));
3873 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3874 DECL_VINDEX (fndecl) = NULL_TREE;
3877 /* When signature changes, we need to clear builtin info. */
3878 if (DECL_BUILT_IN (fndecl))
3880 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3881 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3884 TREE_TYPE (fndecl) = new_type;
3885 DECL_VIRTUAL_P (fndecl) = 0;
3886 DECL_LANG_SPECIFIC (fndecl) = NULL;
3887 otypes.release ();
3888 oparms.release ();
3891 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3892 If this is a directly recursive call, CS must be NULL. Otherwise it must
3893 contain the corresponding call graph edge. */
3895 void
3896 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3897 ipa_parm_adjustment_vec adjustments)
3899 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3900 vec<tree> vargs;
3901 vec<tree, va_gc> **debug_args = NULL;
3902 gcall *new_stmt;
3903 gimple_stmt_iterator gsi, prev_gsi;
3904 tree callee_decl;
3905 int i, len;
3907 len = adjustments.length ();
3908 vargs.create (len);
3909 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3910 current_node->remove_stmt_references (stmt);
3912 gsi = gsi_for_stmt (stmt);
3913 prev_gsi = gsi;
3914 gsi_prev (&prev_gsi);
3915 for (i = 0; i < len; i++)
3917 struct ipa_parm_adjustment *adj;
3919 adj = &adjustments[i];
3921 if (adj->op == IPA_PARM_OP_COPY)
3923 tree arg = gimple_call_arg (stmt, adj->base_index);
3925 vargs.quick_push (arg);
3927 else if (adj->op != IPA_PARM_OP_REMOVE)
3929 tree expr, base, off;
3930 location_t loc;
3931 unsigned int deref_align = 0;
3932 bool deref_base = false;
3934 /* We create a new parameter out of the value of the old one, we can
3935 do the following kind of transformations:
3937 - A scalar passed by reference is converted to a scalar passed by
3938 value. (adj->by_ref is false and the type of the original
3939 actual argument is a pointer to a scalar).
3941 - A part of an aggregate is passed instead of the whole aggregate.
3942 The part can be passed either by value or by reference, this is
3943 determined by value of adj->by_ref. Moreover, the code below
3944 handles both situations when the original aggregate is passed by
3945 value (its type is not a pointer) and when it is passed by
3946 reference (it is a pointer to an aggregate).
3948 When the new argument is passed by reference (adj->by_ref is true)
3949 it must be a part of an aggregate and therefore we form it by
3950 simply taking the address of a reference inside the original
3951 aggregate. */
3953 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3954 base = gimple_call_arg (stmt, adj->base_index);
3955 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3956 : EXPR_LOCATION (base);
3958 if (TREE_CODE (base) != ADDR_EXPR
3959 && POINTER_TYPE_P (TREE_TYPE (base)))
3960 off = build_int_cst (adj->alias_ptr_type,
3961 adj->offset / BITS_PER_UNIT);
3962 else
3964 HOST_WIDE_INT base_offset;
3965 tree prev_base;
3966 bool addrof;
3968 if (TREE_CODE (base) == ADDR_EXPR)
3970 base = TREE_OPERAND (base, 0);
3971 addrof = true;
3973 else
3974 addrof = false;
3975 prev_base = base;
3976 base = get_addr_base_and_unit_offset (base, &base_offset);
3977 /* Aggregate arguments can have non-invariant addresses. */
3978 if (!base)
3980 base = build_fold_addr_expr (prev_base);
3981 off = build_int_cst (adj->alias_ptr_type,
3982 adj->offset / BITS_PER_UNIT);
3984 else if (TREE_CODE (base) == MEM_REF)
3986 if (!addrof)
3988 deref_base = true;
3989 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3991 off = build_int_cst (adj->alias_ptr_type,
3992 base_offset
3993 + adj->offset / BITS_PER_UNIT);
3994 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3995 off);
3996 base = TREE_OPERAND (base, 0);
3998 else
4000 off = build_int_cst (adj->alias_ptr_type,
4001 base_offset
4002 + adj->offset / BITS_PER_UNIT);
4003 base = build_fold_addr_expr (base);
4007 if (!adj->by_ref)
4009 tree type = adj->type;
4010 unsigned int align;
4011 unsigned HOST_WIDE_INT misalign;
4013 if (deref_base)
4015 align = deref_align;
4016 misalign = 0;
4018 else
4020 get_pointer_alignment_1 (base, &align, &misalign);
4021 if (TYPE_ALIGN (type) > align)
4022 align = TYPE_ALIGN (type);
4024 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4025 * BITS_PER_UNIT);
4026 misalign = misalign & (align - 1);
4027 if (misalign != 0)
4028 align = (misalign & -misalign);
4029 if (align < TYPE_ALIGN (type))
4030 type = build_aligned_type (type, align);
4031 base = force_gimple_operand_gsi (&gsi, base,
4032 true, NULL, true, GSI_SAME_STMT);
4033 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4034 /* If expr is not a valid gimple call argument emit
4035 a load into a temporary. */
4036 if (is_gimple_reg_type (TREE_TYPE (expr)))
4038 gimple tem = gimple_build_assign (NULL_TREE, expr);
4039 if (gimple_in_ssa_p (cfun))
4041 gimple_set_vuse (tem, gimple_vuse (stmt));
4042 expr = make_ssa_name (TREE_TYPE (expr), tem);
4044 else
4045 expr = create_tmp_reg (TREE_TYPE (expr));
4046 gimple_assign_set_lhs (tem, expr);
4047 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4050 else
4052 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4053 expr = build_fold_addr_expr (expr);
4054 expr = force_gimple_operand_gsi (&gsi, expr,
4055 true, NULL, true, GSI_SAME_STMT);
4057 vargs.quick_push (expr);
4059 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4061 unsigned int ix;
4062 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4063 gimple def_temp;
4065 arg = gimple_call_arg (stmt, adj->base_index);
4066 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4068 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4069 continue;
4070 arg = fold_convert_loc (gimple_location (stmt),
4071 TREE_TYPE (origin), arg);
4073 if (debug_args == NULL)
4074 debug_args = decl_debug_args_insert (callee_decl);
4075 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4076 if (ddecl == origin)
4078 ddecl = (**debug_args)[ix + 1];
4079 break;
4081 if (ddecl == NULL)
4083 ddecl = make_node (DEBUG_EXPR_DECL);
4084 DECL_ARTIFICIAL (ddecl) = 1;
4085 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4086 DECL_MODE (ddecl) = DECL_MODE (origin);
4088 vec_safe_push (*debug_args, origin);
4089 vec_safe_push (*debug_args, ddecl);
4091 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4092 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4096 if (dump_file && (dump_flags & TDF_DETAILS))
4098 fprintf (dump_file, "replacing stmt:");
4099 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4102 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4103 vargs.release ();
4104 if (gimple_call_lhs (stmt))
4105 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4107 gimple_set_block (new_stmt, gimple_block (stmt));
4108 if (gimple_has_location (stmt))
4109 gimple_set_location (new_stmt, gimple_location (stmt));
4110 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4111 gimple_call_copy_flags (new_stmt, stmt);
4112 if (gimple_in_ssa_p (cfun))
4114 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4115 if (gimple_vdef (stmt))
4117 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4118 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4122 if (dump_file && (dump_flags & TDF_DETAILS))
4124 fprintf (dump_file, "with stmt:");
4125 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4126 fprintf (dump_file, "\n");
4128 gsi_replace (&gsi, new_stmt, true);
4129 if (cs)
4130 cs->set_call_stmt (new_stmt);
4133 current_node->record_stmt_references (gsi_stmt (gsi));
4134 gsi_prev (&gsi);
4136 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4139 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4140 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4141 specifies whether the function should care about type incompatibility the
4142 current and new expressions. If it is false, the function will leave
4143 incompatibility issues to the caller. Return true iff the expression
4144 was modified. */
4146 bool
4147 ipa_modify_expr (tree *expr, bool convert,
4148 ipa_parm_adjustment_vec adjustments)
4150 struct ipa_parm_adjustment *cand
4151 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4152 if (!cand)
4153 return false;
4155 tree src;
4156 if (cand->by_ref)
4157 src = build_simple_mem_ref (cand->new_decl);
4158 else
4159 src = cand->new_decl;
4161 if (dump_file && (dump_flags & TDF_DETAILS))
4163 fprintf (dump_file, "About to replace expr ");
4164 print_generic_expr (dump_file, *expr, 0);
4165 fprintf (dump_file, " with ");
4166 print_generic_expr (dump_file, src, 0);
4167 fprintf (dump_file, "\n");
4170 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4172 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4173 *expr = vce;
4175 else
4176 *expr = src;
4177 return true;
4180 /* If T is an SSA_NAME, return NULL if it is not a default def or
4181 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4182 the base variable is always returned, regardless if it is a default
4183 def. Return T if it is not an SSA_NAME. */
4185 static tree
4186 get_ssa_base_param (tree t, bool ignore_default_def)
4188 if (TREE_CODE (t) == SSA_NAME)
4190 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4191 return SSA_NAME_VAR (t);
4192 else
4193 return NULL_TREE;
4195 return t;
4198 /* Given an expression, return an adjustment entry specifying the
4199 transformation to be done on EXPR. If no suitable adjustment entry
4200 was found, returns NULL.
4202 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4203 default def, otherwise bail on them.
4205 If CONVERT is non-NULL, this function will set *CONVERT if the
4206 expression provided is a component reference. ADJUSTMENTS is the
4207 adjustments vector. */
4209 ipa_parm_adjustment *
4210 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4211 ipa_parm_adjustment_vec adjustments,
4212 bool ignore_default_def)
4214 if (TREE_CODE (**expr) == BIT_FIELD_REF
4215 || TREE_CODE (**expr) == IMAGPART_EXPR
4216 || TREE_CODE (**expr) == REALPART_EXPR)
4218 *expr = &TREE_OPERAND (**expr, 0);
4219 if (convert)
4220 *convert = true;
4223 HOST_WIDE_INT offset, size, max_size;
4224 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4225 if (!base || size == -1 || max_size == -1)
4226 return NULL;
4228 if (TREE_CODE (base) == MEM_REF)
4230 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4231 base = TREE_OPERAND (base, 0);
4234 base = get_ssa_base_param (base, ignore_default_def);
4235 if (!base || TREE_CODE (base) != PARM_DECL)
4236 return NULL;
4238 struct ipa_parm_adjustment *cand = NULL;
4239 unsigned int len = adjustments.length ();
4240 for (unsigned i = 0; i < len; i++)
4242 struct ipa_parm_adjustment *adj = &adjustments[i];
4244 if (adj->base == base
4245 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4247 cand = adj;
4248 break;
4252 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4253 return NULL;
4254 return cand;
4257 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4259 static bool
4260 index_in_adjustments_multiple_times_p (int base_index,
4261 ipa_parm_adjustment_vec adjustments)
4263 int i, len = adjustments.length ();
4264 bool one = false;
4266 for (i = 0; i < len; i++)
4268 struct ipa_parm_adjustment *adj;
4269 adj = &adjustments[i];
4271 if (adj->base_index == base_index)
4273 if (one)
4274 return true;
4275 else
4276 one = true;
4279 return false;
4283 /* Return adjustments that should have the same effect on function parameters
4284 and call arguments as if they were first changed according to adjustments in
4285 INNER and then by adjustments in OUTER. */
4287 ipa_parm_adjustment_vec
4288 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4289 ipa_parm_adjustment_vec outer)
4291 int i, outlen = outer.length ();
4292 int inlen = inner.length ();
4293 int removals = 0;
4294 ipa_parm_adjustment_vec adjustments, tmp;
4296 tmp.create (inlen);
4297 for (i = 0; i < inlen; i++)
4299 struct ipa_parm_adjustment *n;
4300 n = &inner[i];
4302 if (n->op == IPA_PARM_OP_REMOVE)
4303 removals++;
4304 else
4306 /* FIXME: Handling of new arguments are not implemented yet. */
4307 gcc_assert (n->op != IPA_PARM_OP_NEW);
4308 tmp.quick_push (*n);
4312 adjustments.create (outlen + removals);
4313 for (i = 0; i < outlen; i++)
4315 struct ipa_parm_adjustment r;
4316 struct ipa_parm_adjustment *out = &outer[i];
4317 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4319 memset (&r, 0, sizeof (r));
4320 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4321 if (out->op == IPA_PARM_OP_REMOVE)
4323 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4325 r.op = IPA_PARM_OP_REMOVE;
4326 adjustments.quick_push (r);
4328 continue;
4330 else
4332 /* FIXME: Handling of new arguments are not implemented yet. */
4333 gcc_assert (out->op != IPA_PARM_OP_NEW);
4336 r.base_index = in->base_index;
4337 r.type = out->type;
4339 /* FIXME: Create nonlocal value too. */
4341 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4342 r.op = IPA_PARM_OP_COPY;
4343 else if (in->op == IPA_PARM_OP_COPY)
4344 r.offset = out->offset;
4345 else if (out->op == IPA_PARM_OP_COPY)
4346 r.offset = in->offset;
4347 else
4348 r.offset = in->offset + out->offset;
4349 adjustments.quick_push (r);
4352 for (i = 0; i < inlen; i++)
4354 struct ipa_parm_adjustment *n = &inner[i];
4356 if (n->op == IPA_PARM_OP_REMOVE)
4357 adjustments.quick_push (*n);
4360 tmp.release ();
4361 return adjustments;
4364 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4365 friendly way, assuming they are meant to be applied to FNDECL. */
4367 void
4368 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4369 tree fndecl)
4371 int i, len = adjustments.length ();
4372 bool first = true;
4373 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4375 fprintf (file, "IPA param adjustments: ");
4376 for (i = 0; i < len; i++)
4378 struct ipa_parm_adjustment *adj;
4379 adj = &adjustments[i];
4381 if (!first)
4382 fprintf (file, " ");
4383 else
4384 first = false;
4386 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4387 print_generic_expr (file, parms[adj->base_index], 0);
4388 if (adj->base)
4390 fprintf (file, ", base: ");
4391 print_generic_expr (file, adj->base, 0);
4393 if (adj->new_decl)
4395 fprintf (file, ", new_decl: ");
4396 print_generic_expr (file, adj->new_decl, 0);
4398 if (adj->new_ssa_base)
4400 fprintf (file, ", new_ssa_base: ");
4401 print_generic_expr (file, adj->new_ssa_base, 0);
4404 if (adj->op == IPA_PARM_OP_COPY)
4405 fprintf (file, ", copy_param");
4406 else if (adj->op == IPA_PARM_OP_REMOVE)
4407 fprintf (file, ", remove_param");
4408 else
4409 fprintf (file, ", offset %li", (long) adj->offset);
4410 if (adj->by_ref)
4411 fprintf (file, ", by_ref");
4412 print_node_brief (file, ", type: ", adj->type, 0);
4413 fprintf (file, "\n");
4415 parms.release ();
4418 /* Dump the AV linked list. */
4420 void
4421 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4423 bool comma = false;
4424 fprintf (f, " Aggregate replacements:");
4425 for (; av; av = av->next)
4427 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4428 av->index, av->offset);
4429 print_generic_expr (f, av->value, 0);
4430 comma = true;
4432 fprintf (f, "\n");
4435 /* Stream out jump function JUMP_FUNC to OB. */
4437 static void
4438 ipa_write_jump_function (struct output_block *ob,
4439 struct ipa_jump_func *jump_func)
4441 struct ipa_agg_jf_item *item;
4442 struct bitpack_d bp;
4443 int i, count;
4445 streamer_write_uhwi (ob, jump_func->type);
4446 switch (jump_func->type)
4448 case IPA_JF_UNKNOWN:
4449 break;
4450 case IPA_JF_CONST:
4451 gcc_assert (
4452 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4453 stream_write_tree (ob, jump_func->value.constant.value, true);
4454 break;
4455 case IPA_JF_PASS_THROUGH:
4456 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4457 if (jump_func->value.pass_through.operation == NOP_EXPR)
4459 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4460 bp = bitpack_create (ob->main_stream);
4461 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4462 streamer_write_bitpack (&bp);
4464 else
4466 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4467 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4469 break;
4470 case IPA_JF_ANCESTOR:
4471 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4472 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4473 bp = bitpack_create (ob->main_stream);
4474 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4475 streamer_write_bitpack (&bp);
4476 break;
4479 count = vec_safe_length (jump_func->agg.items);
4480 streamer_write_uhwi (ob, count);
4481 if (count)
4483 bp = bitpack_create (ob->main_stream);
4484 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4485 streamer_write_bitpack (&bp);
4488 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4490 streamer_write_uhwi (ob, item->offset);
4491 stream_write_tree (ob, item->value, true);
4494 bp = bitpack_create (ob->main_stream);
4495 bp_pack_value (&bp, jump_func->alignment.known, 1);
4496 streamer_write_bitpack (&bp);
4497 if (jump_func->alignment.known)
4499 streamer_write_uhwi (ob, jump_func->alignment.align);
4500 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4504 /* Read in jump function JUMP_FUNC from IB. */
4506 static void
4507 ipa_read_jump_function (struct lto_input_block *ib,
4508 struct ipa_jump_func *jump_func,
4509 struct cgraph_edge *cs,
4510 struct data_in *data_in)
4512 enum jump_func_type jftype;
4513 enum tree_code operation;
4514 int i, count;
4516 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4517 switch (jftype)
4519 case IPA_JF_UNKNOWN:
4520 ipa_set_jf_unknown (jump_func);
4521 break;
4522 case IPA_JF_CONST:
4523 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4524 break;
4525 case IPA_JF_PASS_THROUGH:
4526 operation = (enum tree_code) streamer_read_uhwi (ib);
4527 if (operation == NOP_EXPR)
4529 int formal_id = streamer_read_uhwi (ib);
4530 struct bitpack_d bp = streamer_read_bitpack (ib);
4531 bool agg_preserved = bp_unpack_value (&bp, 1);
4532 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4534 else
4536 tree operand = stream_read_tree (ib, data_in);
4537 int formal_id = streamer_read_uhwi (ib);
4538 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4539 operation);
4541 break;
4542 case IPA_JF_ANCESTOR:
4544 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4545 int formal_id = streamer_read_uhwi (ib);
4546 struct bitpack_d bp = streamer_read_bitpack (ib);
4547 bool agg_preserved = bp_unpack_value (&bp, 1);
4548 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4549 break;
4553 count = streamer_read_uhwi (ib);
4554 vec_alloc (jump_func->agg.items, count);
4555 if (count)
4557 struct bitpack_d bp = streamer_read_bitpack (ib);
4558 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4560 for (i = 0; i < count; i++)
4562 struct ipa_agg_jf_item item;
4563 item.offset = streamer_read_uhwi (ib);
4564 item.value = stream_read_tree (ib, data_in);
4565 jump_func->agg.items->quick_push (item);
4568 struct bitpack_d bp = streamer_read_bitpack (ib);
4569 bool alignment_known = bp_unpack_value (&bp, 1);
4570 if (alignment_known)
4572 jump_func->alignment.known = true;
4573 jump_func->alignment.align = streamer_read_uhwi (ib);
4574 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4576 else
4577 jump_func->alignment.known = false;
4580 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4581 relevant to indirect inlining to OB. */
4583 static void
4584 ipa_write_indirect_edge_info (struct output_block *ob,
4585 struct cgraph_edge *cs)
4587 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4588 struct bitpack_d bp;
4590 streamer_write_hwi (ob, ii->param_index);
4591 bp = bitpack_create (ob->main_stream);
4592 bp_pack_value (&bp, ii->polymorphic, 1);
4593 bp_pack_value (&bp, ii->agg_contents, 1);
4594 bp_pack_value (&bp, ii->member_ptr, 1);
4595 bp_pack_value (&bp, ii->by_ref, 1);
4596 bp_pack_value (&bp, ii->vptr_changed, 1);
4597 streamer_write_bitpack (&bp);
4598 if (ii->agg_contents || ii->polymorphic)
4599 streamer_write_hwi (ob, ii->offset);
4600 else
4601 gcc_assert (ii->offset == 0);
4603 if (ii->polymorphic)
4605 streamer_write_hwi (ob, ii->otr_token);
4606 stream_write_tree (ob, ii->otr_type, true);
4607 ii->context.stream_out (ob);
4611 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4612 relevant to indirect inlining from IB. */
4614 static void
4615 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4616 struct data_in *data_in,
4617 struct cgraph_edge *cs)
4619 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4620 struct bitpack_d bp;
4622 ii->param_index = (int) streamer_read_hwi (ib);
4623 bp = streamer_read_bitpack (ib);
4624 ii->polymorphic = bp_unpack_value (&bp, 1);
4625 ii->agg_contents = bp_unpack_value (&bp, 1);
4626 ii->member_ptr = bp_unpack_value (&bp, 1);
4627 ii->by_ref = bp_unpack_value (&bp, 1);
4628 ii->vptr_changed = bp_unpack_value (&bp, 1);
4629 if (ii->agg_contents || ii->polymorphic)
4630 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4631 else
4632 ii->offset = 0;
4633 if (ii->polymorphic)
4635 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4636 ii->otr_type = stream_read_tree (ib, data_in);
4637 ii->context.stream_in (ib, data_in);
4641 /* Stream out NODE info to OB. */
4643 static void
4644 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4646 int node_ref;
4647 lto_symtab_encoder_t encoder;
4648 struct ipa_node_params *info = IPA_NODE_REF (node);
4649 int j;
4650 struct cgraph_edge *e;
4651 struct bitpack_d bp;
4653 encoder = ob->decl_state->symtab_node_encoder;
4654 node_ref = lto_symtab_encoder_encode (encoder, node);
4655 streamer_write_uhwi (ob, node_ref);
4657 streamer_write_uhwi (ob, ipa_get_param_count (info));
4658 for (j = 0; j < ipa_get_param_count (info); j++)
4659 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4660 bp = bitpack_create (ob->main_stream);
4661 gcc_assert (info->analysis_done
4662 || ipa_get_param_count (info) == 0);
4663 gcc_assert (!info->node_enqueued);
4664 gcc_assert (!info->ipcp_orig_node);
4665 for (j = 0; j < ipa_get_param_count (info); j++)
4666 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4667 streamer_write_bitpack (&bp);
4668 for (j = 0; j < ipa_get_param_count (info); j++)
4669 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4670 for (e = node->callees; e; e = e->next_callee)
4672 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4674 streamer_write_uhwi (ob,
4675 ipa_get_cs_argument_count (args) * 2
4676 + (args->polymorphic_call_contexts != NULL));
4677 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4679 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4680 if (args->polymorphic_call_contexts != NULL)
4681 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4684 for (e = node->indirect_calls; e; e = e->next_callee)
4686 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4688 streamer_write_uhwi (ob,
4689 ipa_get_cs_argument_count (args) * 2
4690 + (args->polymorphic_call_contexts != NULL));
4691 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4693 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4694 if (args->polymorphic_call_contexts != NULL)
4695 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4697 ipa_write_indirect_edge_info (ob, e);
4701 /* Stream in NODE info from IB. */
4703 static void
4704 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4705 struct data_in *data_in)
4707 struct ipa_node_params *info = IPA_NODE_REF (node);
4708 int k;
4709 struct cgraph_edge *e;
4710 struct bitpack_d bp;
4712 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4714 for (k = 0; k < ipa_get_param_count (info); k++)
4715 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4717 bp = streamer_read_bitpack (ib);
4718 if (ipa_get_param_count (info) != 0)
4719 info->analysis_done = true;
4720 info->node_enqueued = false;
4721 for (k = 0; k < ipa_get_param_count (info); k++)
4722 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4723 for (k = 0; k < ipa_get_param_count (info); k++)
4724 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4725 for (e = node->callees; e; e = e->next_callee)
4727 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4728 int count = streamer_read_uhwi (ib);
4729 bool contexts_computed = count & 1;
4730 count /= 2;
4732 if (!count)
4733 continue;
4734 vec_safe_grow_cleared (args->jump_functions, count);
4735 if (contexts_computed)
4736 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4738 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4740 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4741 data_in);
4742 if (contexts_computed)
4743 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4746 for (e = node->indirect_calls; e; e = e->next_callee)
4748 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4749 int count = streamer_read_uhwi (ib);
4750 bool contexts_computed = count & 1;
4751 count /= 2;
4753 if (count)
4755 vec_safe_grow_cleared (args->jump_functions, count);
4756 if (contexts_computed)
4757 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4758 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4760 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4761 data_in);
4762 if (contexts_computed)
4763 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4766 ipa_read_indirect_edge_info (ib, data_in, e);
4770 /* Write jump functions for nodes in SET. */
4772 void
4773 ipa_prop_write_jump_functions (void)
4775 struct cgraph_node *node;
4776 struct output_block *ob;
4777 unsigned int count = 0;
4778 lto_symtab_encoder_iterator lsei;
4779 lto_symtab_encoder_t encoder;
4781 if (!ipa_node_params_sum)
4782 return;
4784 ob = create_output_block (LTO_section_jump_functions);
4785 encoder = ob->decl_state->symtab_node_encoder;
4786 ob->symbol = NULL;
4787 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4788 lsei_next_function_in_partition (&lsei))
4790 node = lsei_cgraph_node (lsei);
4791 if (node->has_gimple_body_p ()
4792 && IPA_NODE_REF (node) != NULL)
4793 count++;
4796 streamer_write_uhwi (ob, count);
4798 /* Process all of the functions. */
4799 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4800 lsei_next_function_in_partition (&lsei))
4802 node = lsei_cgraph_node (lsei);
4803 if (node->has_gimple_body_p ()
4804 && IPA_NODE_REF (node) != NULL)
4805 ipa_write_node_info (ob, node);
4807 streamer_write_char_stream (ob->main_stream, 0);
4808 produce_asm (ob, NULL);
4809 destroy_output_block (ob);
4812 /* Read section in file FILE_DATA of length LEN with data DATA. */
4814 static void
4815 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4816 size_t len)
4818 const struct lto_function_header *header =
4819 (const struct lto_function_header *) data;
4820 const int cfg_offset = sizeof (struct lto_function_header);
4821 const int main_offset = cfg_offset + header->cfg_size;
4822 const int string_offset = main_offset + header->main_size;
4823 struct data_in *data_in;
4824 unsigned int i;
4825 unsigned int count;
4827 lto_input_block ib_main ((const char *) data + main_offset,
4828 header->main_size);
4830 data_in =
4831 lto_data_in_create (file_data, (const char *) data + string_offset,
4832 header->string_size, vNULL);
4833 count = streamer_read_uhwi (&ib_main);
4835 for (i = 0; i < count; i++)
4837 unsigned int index;
4838 struct cgraph_node *node;
4839 lto_symtab_encoder_t encoder;
4841 index = streamer_read_uhwi (&ib_main);
4842 encoder = file_data->symtab_node_encoder;
4843 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4844 index));
4845 gcc_assert (node->definition);
4846 ipa_read_node_info (&ib_main, node, data_in);
4848 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4849 len);
4850 lto_data_in_delete (data_in);
4853 /* Read ipcp jump functions. */
4855 void
4856 ipa_prop_read_jump_functions (void)
4858 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4859 struct lto_file_decl_data *file_data;
4860 unsigned int j = 0;
4862 ipa_check_create_node_params ();
4863 ipa_check_create_edge_args ();
4864 ipa_register_cgraph_hooks ();
4866 while ((file_data = file_data_vec[j++]))
4868 size_t len;
4869 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4871 if (data)
4872 ipa_prop_read_section (file_data, data, len);
4876 /* After merging units, we can get mismatch in argument counts.
4877 Also decl merging might've rendered parameter lists obsolete.
4878 Also compute called_with_variable_arg info. */
4880 void
4881 ipa_update_after_lto_read (void)
4883 ipa_check_create_node_params ();
4884 ipa_check_create_edge_args ();
4887 void
4888 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4890 int node_ref;
4891 unsigned int count = 0;
4892 lto_symtab_encoder_t encoder;
4893 struct ipa_agg_replacement_value *aggvals, *av;
4895 aggvals = ipa_get_agg_replacements_for_node (node);
4896 encoder = ob->decl_state->symtab_node_encoder;
4897 node_ref = lto_symtab_encoder_encode (encoder, node);
4898 streamer_write_uhwi (ob, node_ref);
4900 for (av = aggvals; av; av = av->next)
4901 count++;
4902 streamer_write_uhwi (ob, count);
4904 for (av = aggvals; av; av = av->next)
4906 struct bitpack_d bp;
4908 streamer_write_uhwi (ob, av->offset);
4909 streamer_write_uhwi (ob, av->index);
4910 stream_write_tree (ob, av->value, true);
4912 bp = bitpack_create (ob->main_stream);
4913 bp_pack_value (&bp, av->by_ref, 1);
4914 streamer_write_bitpack (&bp);
4917 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4918 if (ts && vec_safe_length (ts->alignments) > 0)
4920 count = ts->alignments->length ();
4922 streamer_write_uhwi (ob, count);
4923 for (unsigned i = 0; i < count; ++i)
4925 ipa_alignment *parm_al = &(*ts->alignments)[i];
4927 struct bitpack_d bp;
4928 bp = bitpack_create (ob->main_stream);
4929 bp_pack_value (&bp, parm_al->known, 1);
4930 streamer_write_bitpack (&bp);
4931 if (parm_al->known)
4933 streamer_write_uhwi (ob, parm_al->align);
4934 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4935 parm_al->misalign);
4939 else
4940 streamer_write_uhwi (ob, 0);
4943 /* Stream in the aggregate value replacement chain for NODE from IB. */
4945 static void
4946 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4947 data_in *data_in)
4949 struct ipa_agg_replacement_value *aggvals = NULL;
4950 unsigned int count, i;
4952 count = streamer_read_uhwi (ib);
4953 for (i = 0; i <count; i++)
4955 struct ipa_agg_replacement_value *av;
4956 struct bitpack_d bp;
4958 av = ggc_alloc<ipa_agg_replacement_value> ();
4959 av->offset = streamer_read_uhwi (ib);
4960 av->index = streamer_read_uhwi (ib);
4961 av->value = stream_read_tree (ib, data_in);
4962 bp = streamer_read_bitpack (ib);
4963 av->by_ref = bp_unpack_value (&bp, 1);
4964 av->next = aggvals;
4965 aggvals = av;
4967 ipa_set_node_agg_value_chain (node, aggvals);
4969 count = streamer_read_uhwi (ib);
4970 if (count > 0)
4972 ipcp_grow_transformations_if_necessary ();
4974 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4975 vec_safe_grow_cleared (ts->alignments, count);
4977 for (i = 0; i < count; i++)
4979 ipa_alignment *parm_al;
4980 parm_al = &(*ts->alignments)[i];
4981 struct bitpack_d bp;
4982 bp = streamer_read_bitpack (ib);
4983 parm_al->known = bp_unpack_value (&bp, 1);
4984 if (parm_al->known)
4986 parm_al->align = streamer_read_uhwi (ib);
4987 parm_al->misalign
4988 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
4989 0, parm_al->align);
4995 /* Write all aggregate replacement for nodes in set. */
4997 void
4998 ipcp_write_transformation_summaries (void)
5000 struct cgraph_node *node;
5001 struct output_block *ob;
5002 unsigned int count = 0;
5003 lto_symtab_encoder_iterator lsei;
5004 lto_symtab_encoder_t encoder;
5006 ob = create_output_block (LTO_section_ipcp_transform);
5007 encoder = ob->decl_state->symtab_node_encoder;
5008 ob->symbol = NULL;
5009 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5010 lsei_next_function_in_partition (&lsei))
5012 node = lsei_cgraph_node (lsei);
5013 if (node->has_gimple_body_p ())
5014 count++;
5017 streamer_write_uhwi (ob, count);
5019 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5020 lsei_next_function_in_partition (&lsei))
5022 node = lsei_cgraph_node (lsei);
5023 if (node->has_gimple_body_p ())
5024 write_ipcp_transformation_info (ob, node);
5026 streamer_write_char_stream (ob->main_stream, 0);
5027 produce_asm (ob, NULL);
5028 destroy_output_block (ob);
5031 /* Read replacements section in file FILE_DATA of length LEN with data
5032 DATA. */
5034 static void
5035 read_replacements_section (struct lto_file_decl_data *file_data,
5036 const char *data,
5037 size_t len)
5039 const struct lto_function_header *header =
5040 (const struct lto_function_header *) data;
5041 const int cfg_offset = sizeof (struct lto_function_header);
5042 const int main_offset = cfg_offset + header->cfg_size;
5043 const int string_offset = main_offset + header->main_size;
5044 struct data_in *data_in;
5045 unsigned int i;
5046 unsigned int count;
5048 lto_input_block ib_main ((const char *) data + main_offset,
5049 header->main_size);
5051 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5052 header->string_size, vNULL);
5053 count = streamer_read_uhwi (&ib_main);
5055 for (i = 0; i < count; i++)
5057 unsigned int index;
5058 struct cgraph_node *node;
5059 lto_symtab_encoder_t encoder;
5061 index = streamer_read_uhwi (&ib_main);
5062 encoder = file_data->symtab_node_encoder;
5063 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5064 index));
5065 gcc_assert (node->definition);
5066 read_ipcp_transformation_info (&ib_main, node, data_in);
5068 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5069 len);
5070 lto_data_in_delete (data_in);
5073 /* Read IPA-CP aggregate replacements. */
5075 void
5076 ipcp_read_transformation_summaries (void)
5078 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5079 struct lto_file_decl_data *file_data;
5080 unsigned int j = 0;
5082 while ((file_data = file_data_vec[j++]))
5084 size_t len;
5085 const char *data = lto_get_section_data (file_data,
5086 LTO_section_ipcp_transform,
5087 NULL, &len);
5088 if (data)
5089 read_replacements_section (file_data, data, len);
5093 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5094 NODE. */
5096 static void
5097 adjust_agg_replacement_values (struct cgraph_node *node,
5098 struct ipa_agg_replacement_value *aggval)
5100 struct ipa_agg_replacement_value *v;
5101 int i, c = 0, d = 0, *adj;
5103 if (!node->clone.combined_args_to_skip)
5104 return;
5106 for (v = aggval; v; v = v->next)
5108 gcc_assert (v->index >= 0);
5109 if (c < v->index)
5110 c = v->index;
5112 c++;
5114 adj = XALLOCAVEC (int, c);
5115 for (i = 0; i < c; i++)
5116 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5118 adj[i] = -1;
5119 d++;
5121 else
5122 adj[i] = i - d;
5124 for (v = aggval; v; v = v->next)
5125 v->index = adj[v->index];
5128 /* Dominator walker driving the ipcp modification phase. */
5130 class ipcp_modif_dom_walker : public dom_walker
5132 public:
5133 ipcp_modif_dom_walker (struct func_body_info *fbi,
5134 vec<ipa_param_descriptor> descs,
5135 struct ipa_agg_replacement_value *av,
5136 bool *sc, bool *cc)
5137 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5138 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5140 virtual void before_dom_children (basic_block);
5142 private:
5143 struct func_body_info *m_fbi;
5144 vec<ipa_param_descriptor> m_descriptors;
5145 struct ipa_agg_replacement_value *m_aggval;
5146 bool *m_something_changed, *m_cfg_changed;
5149 void
5150 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5152 gimple_stmt_iterator gsi;
5153 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5155 struct ipa_agg_replacement_value *v;
5156 gimple stmt = gsi_stmt (gsi);
5157 tree rhs, val, t;
5158 HOST_WIDE_INT offset, size;
5159 int index;
5160 bool by_ref, vce;
5162 if (!gimple_assign_load_p (stmt))
5163 continue;
5164 rhs = gimple_assign_rhs1 (stmt);
5165 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5166 continue;
5168 vce = false;
5169 t = rhs;
5170 while (handled_component_p (t))
5172 /* V_C_E can do things like convert an array of integers to one
5173 bigger integer and similar things we do not handle below. */
5174 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5176 vce = true;
5177 break;
5179 t = TREE_OPERAND (t, 0);
5181 if (vce)
5182 continue;
5184 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5185 &offset, &size, &by_ref))
5186 continue;
5187 for (v = m_aggval; v; v = v->next)
5188 if (v->index == index
5189 && v->offset == offset)
5190 break;
5191 if (!v
5192 || v->by_ref != by_ref
5193 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5194 continue;
5196 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5197 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5199 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5200 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5201 else if (TYPE_SIZE (TREE_TYPE (rhs))
5202 == TYPE_SIZE (TREE_TYPE (v->value)))
5203 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5204 else
5206 if (dump_file)
5208 fprintf (dump_file, " const ");
5209 print_generic_expr (dump_file, v->value, 0);
5210 fprintf (dump_file, " can't be converted to type of ");
5211 print_generic_expr (dump_file, rhs, 0);
5212 fprintf (dump_file, "\n");
5214 continue;
5217 else
5218 val = v->value;
5220 if (dump_file && (dump_flags & TDF_DETAILS))
5222 fprintf (dump_file, "Modifying stmt:\n ");
5223 print_gimple_stmt (dump_file, stmt, 0, 0);
5225 gimple_assign_set_rhs_from_tree (&gsi, val);
5226 update_stmt (stmt);
5228 if (dump_file && (dump_flags & TDF_DETAILS))
5230 fprintf (dump_file, "into:\n ");
5231 print_gimple_stmt (dump_file, stmt, 0, 0);
5232 fprintf (dump_file, "\n");
5235 *m_something_changed = true;
5236 if (maybe_clean_eh_stmt (stmt)
5237 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5238 *m_cfg_changed = true;
5243 /* Update alignment of formal parameters as described in
5244 ipcp_transformation_summary. */
5246 static void
5247 ipcp_update_alignments (struct cgraph_node *node)
5249 tree fndecl = node->decl;
5250 tree parm = DECL_ARGUMENTS (fndecl);
5251 tree next_parm = parm;
5252 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5253 if (!ts || vec_safe_length (ts->alignments) == 0)
5254 return;
5255 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5256 unsigned count = alignments.length ();
5258 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5260 if (node->clone.combined_args_to_skip
5261 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5262 continue;
5263 gcc_checking_assert (parm);
5264 next_parm = DECL_CHAIN (parm);
5266 if (!alignments[i].known || !is_gimple_reg (parm))
5267 continue;
5268 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5269 if (!ddef)
5270 continue;
5272 if (dump_file)
5273 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5274 "misalignment to %u\n", i, alignments[i].align,
5275 alignments[i].misalign);
5277 struct ptr_info_def *pi = get_ptr_info (ddef);
5278 gcc_checking_assert (pi);
5279 unsigned old_align;
5280 unsigned old_misalign;
5281 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5283 if (old_known
5284 && old_align >= alignments[i].align)
5286 if (dump_file)
5287 fprintf (dump_file, " But the alignment was already %u.\n",
5288 old_align);
5289 continue;
5291 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5295 /* IPCP transformation phase doing propagation of aggregate values. */
5297 unsigned int
5298 ipcp_transform_function (struct cgraph_node *node)
5300 vec<ipa_param_descriptor> descriptors = vNULL;
5301 struct func_body_info fbi;
5302 struct ipa_agg_replacement_value *aggval;
5303 int param_count;
5304 bool cfg_changed = false, something_changed = false;
5306 gcc_checking_assert (cfun);
5307 gcc_checking_assert (current_function_decl);
5309 if (dump_file)
5310 fprintf (dump_file, "Modification phase of node %s/%i\n",
5311 node->name (), node->order);
5313 ipcp_update_alignments (node);
5314 aggval = ipa_get_agg_replacements_for_node (node);
5315 if (!aggval)
5316 return 0;
5317 param_count = count_formal_params (node->decl);
5318 if (param_count == 0)
5319 return 0;
5320 adjust_agg_replacement_values (node, aggval);
5321 if (dump_file)
5322 ipa_dump_agg_replacement_values (dump_file, aggval);
5324 fbi.node = node;
5325 fbi.info = NULL;
5326 fbi.bb_infos = vNULL;
5327 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5328 fbi.param_count = param_count;
5329 fbi.aa_walked = 0;
5331 descriptors.safe_grow_cleared (param_count);
5332 ipa_populate_param_decls (node, descriptors);
5333 calculate_dominance_info (CDI_DOMINATORS);
5334 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5335 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5337 int i;
5338 struct ipa_bb_info *bi;
5339 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5340 free_ipa_bb_info (bi);
5341 fbi.bb_infos.release ();
5342 free_dominance_info (CDI_DOMINATORS);
5343 (*ipcp_transformations)[node->uid].agg_values = NULL;
5344 (*ipcp_transformations)[node->uid].alignments = NULL;
5345 descriptors.release ();
5347 if (!something_changed)
5348 return 0;
5349 else if (cfg_changed)
5350 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5351 else
5352 return TODO_update_ssa_only_virtuals;