* ipa-cp.c (ipa_get_indirect_edge_target_1): Do direct
[official-gcc.git] / gcc / ipa-prop.c
blob69566e9f3a88b413084c2e8c41311c176fea5e6f
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
61 /* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
64 struct param_analysis_info
66 bool parm_modified, ref_modified, pt_modified;
67 bitmap parm_visited_statements, pt_visited_statements;
70 /* Vector where the parameter infos are actually stored. */
71 vec<ipa_node_params> ipa_node_params_vector;
72 /* Vector of known aggregate values in cloned nodes. */
73 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
74 /* Vector where the parameter infos are actually stored. */
75 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
79 static struct cgraph_node_hook_list *node_removal_hook_holder;
80 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
81 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
82 static struct cgraph_node_hook_list *function_insertion_hook_holder;
84 /* Description of a reference to an IPA constant. */
85 struct ipa_cst_ref_desc
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge *cs;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc *next_duplicate;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
92 if out of control. */
93 int refcount;
96 /* Allocation pool for reference descriptions. */
98 static alloc_pool ipa_refdesc_pool;
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
103 static bool
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
106 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
107 struct cl_optimization *os;
109 if (!fs_opts)
110 return false;
111 os = TREE_OPTIMIZATION (fs_opts);
112 return !os->x_optimize || !os->x_flag_ipa_cp;
115 /* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
118 static int
119 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
121 int i, count;
123 count = descriptors.length ();
124 for (i = 0; i < count; i++)
125 if (descriptors[i].decl == ptree)
126 return i;
128 return -1;
131 /* Return index of the formal whose tree is PTREE in function which corresponds
132 to INFO. */
135 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
137 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
140 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
141 NODE. */
143 static void
144 ipa_populate_param_decls (struct cgraph_node *node,
145 vec<ipa_param_descriptor> &descriptors)
147 tree fndecl;
148 tree fnargs;
149 tree parm;
150 int param_num;
152 fndecl = node->decl;
153 gcc_assert (gimple_has_body_p (fndecl));
154 fnargs = DECL_ARGUMENTS (fndecl);
155 param_num = 0;
156 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
158 descriptors[param_num].decl = parm;
159 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
160 param_num++;
164 /* Return how many formal parameters FNDECL has. */
166 static inline int
167 count_formal_params (tree fndecl)
169 tree parm;
170 int count = 0;
171 gcc_assert (gimple_has_body_p (fndecl));
173 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
174 count++;
176 return count;
179 /* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
183 void
184 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
186 fprintf (file, "param #%i", i);
187 if (info->descriptors[i].decl)
189 fprintf (file, " ");
190 print_generic_expr (file, info->descriptors[i].decl, 0);
194 /* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
197 void
198 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
200 struct ipa_node_params *info = IPA_NODE_REF (node);
202 if (!info->descriptors.exists () && param_count)
203 info->descriptors.safe_grow_cleared (param_count);
206 /* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
208 param_decls. */
210 void
211 ipa_initialize_node_params (struct cgraph_node *node)
213 struct ipa_node_params *info = IPA_NODE_REF (node);
215 if (!info->descriptors.exists ())
217 ipa_alloc_node_params (node, count_formal_params (node->decl));
218 ipa_populate_param_decls (node, info->descriptors);
222 /* Print the jump functions associated with call graph edge CS to file F. */
224 static void
225 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
227 int i, count;
229 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
230 for (i = 0; i < count; i++)
232 struct ipa_jump_func *jump_func;
233 enum jump_func_type type;
235 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
236 type = jump_func->type;
238 fprintf (f, " param %d: ", i);
239 if (type == IPA_JF_UNKNOWN)
240 fprintf (f, "UNKNOWN\n");
241 else if (type == IPA_JF_KNOWN_TYPE)
243 fprintf (f, "KNOWN TYPE: base ");
244 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
245 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
246 jump_func->value.known_type.offset);
247 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
248 fprintf (f, "\n");
250 else if (type == IPA_JF_CONST)
252 tree val = jump_func->value.constant.value;
253 fprintf (f, "CONST: ");
254 print_generic_expr (f, val, 0);
255 if (TREE_CODE (val) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
258 fprintf (f, " -> ");
259 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
262 fprintf (f, "\n");
264 else if (type == IPA_JF_PASS_THROUGH)
266 fprintf (f, "PASS THROUGH: ");
267 fprintf (f, "%d, op %s",
268 jump_func->value.pass_through.formal_id,
269 get_tree_code_name(jump_func->value.pass_through.operation));
270 if (jump_func->value.pass_through.operation != NOP_EXPR)
272 fprintf (f, " ");
273 print_generic_expr (f,
274 jump_func->value.pass_through.operand, 0);
276 if (jump_func->value.pass_through.agg_preserved)
277 fprintf (f, ", agg_preserved");
278 if (jump_func->value.pass_through.type_preserved)
279 fprintf (f, ", type_preserved");
280 fprintf (f, "\n");
282 else if (type == IPA_JF_ANCESTOR)
284 fprintf (f, "ANCESTOR: ");
285 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
286 jump_func->value.ancestor.formal_id,
287 jump_func->value.ancestor.offset);
288 print_generic_expr (f, jump_func->value.ancestor.type, 0);
289 if (jump_func->value.ancestor.agg_preserved)
290 fprintf (f, ", agg_preserved");
291 if (jump_func->value.ancestor.type_preserved)
292 fprintf (f, ", type_preserved");
293 fprintf (f, "\n");
296 if (jump_func->agg.items)
298 struct ipa_agg_jf_item *item;
299 int j;
301 fprintf (f, " Aggregate passed by %s:\n",
302 jump_func->agg.by_ref ? "reference" : "value");
303 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
305 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
306 item->offset);
307 if (TYPE_P (item->value))
308 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
309 tree_to_uhwi (TYPE_SIZE (item->value)));
310 else
312 fprintf (f, "cst: ");
313 print_generic_expr (f, item->value, 0);
315 fprintf (f, "\n");
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
328 struct cgraph_edge *cs;
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup (node->name ()), node->order,
339 xstrdup (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i, "
359 "offset " HOST_WIDE_INT_PRINT_DEC,
360 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
361 ii->offset);
363 if (cs->call_stmt)
365 fprintf (f, ", for stmt ");
366 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
368 else
369 fprintf (f, "\n");
370 ipa_print_node_jump_functions_for_edge (f, cs);
374 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
376 void
377 ipa_print_all_jump_functions (FILE *f)
379 struct cgraph_node *node;
381 fprintf (f, "\nJump functions:\n");
382 FOR_EACH_FUNCTION (node)
384 ipa_print_node_jump_functions (f, node);
388 /* Set JFUNC to be a known type jump function. */
390 static void
391 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
392 tree base_type, tree component_type)
394 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
395 && TYPE_BINFO (component_type));
396 jfunc->type = IPA_JF_KNOWN_TYPE;
397 jfunc->value.known_type.offset = offset,
398 jfunc->value.known_type.base_type = base_type;
399 jfunc->value.known_type.component_type = component_type;
400 gcc_assert (component_type);
403 /* Set JFUNC to be a copy of another jmp (to be used by jump function
404 combination code). The two functions will share their rdesc. */
406 static void
407 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
408 struct ipa_jump_func *src)
411 gcc_checking_assert (src->type == IPA_JF_CONST);
412 dst->type = IPA_JF_CONST;
413 dst->value.constant = src->value.constant;
416 /* Set JFUNC to be a constant jmp function. */
418 static void
419 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
420 struct cgraph_edge *cs)
422 constant = unshare_expr (constant);
423 if (constant && EXPR_P (constant))
424 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
425 jfunc->type = IPA_JF_CONST;
426 jfunc->value.constant.value = unshare_expr_without_location (constant);
428 if (TREE_CODE (constant) == ADDR_EXPR
429 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
431 struct ipa_cst_ref_desc *rdesc;
432 if (!ipa_refdesc_pool)
433 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
434 sizeof (struct ipa_cst_ref_desc), 32);
436 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
437 rdesc->cs = cs;
438 rdesc->next_duplicate = NULL;
439 rdesc->refcount = 1;
440 jfunc->value.constant.rdesc = rdesc;
442 else
443 jfunc->value.constant.rdesc = NULL;
446 /* Set JFUNC to be a simple pass-through jump function. */
447 static void
448 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
449 bool agg_preserved, bool type_preserved)
451 jfunc->type = IPA_JF_PASS_THROUGH;
452 jfunc->value.pass_through.operand = NULL_TREE;
453 jfunc->value.pass_through.formal_id = formal_id;
454 jfunc->value.pass_through.operation = NOP_EXPR;
455 jfunc->value.pass_through.agg_preserved = agg_preserved;
456 jfunc->value.pass_through.type_preserved = type_preserved;
459 /* Set JFUNC to be an arithmetic pass through jump function. */
461 static void
462 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
463 tree operand, enum tree_code operation)
465 jfunc->type = IPA_JF_PASS_THROUGH;
466 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
467 jfunc->value.pass_through.formal_id = formal_id;
468 jfunc->value.pass_through.operation = operation;
469 jfunc->value.pass_through.agg_preserved = false;
470 jfunc->value.pass_through.type_preserved = false;
473 /* Set JFUNC to be an ancestor jump function. */
475 static void
476 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
477 tree type, int formal_id, bool agg_preserved,
478 bool type_preserved)
480 jfunc->type = IPA_JF_ANCESTOR;
481 jfunc->value.ancestor.formal_id = formal_id;
482 jfunc->value.ancestor.offset = offset;
483 jfunc->value.ancestor.type = type;
484 jfunc->value.ancestor.agg_preserved = agg_preserved;
485 jfunc->value.ancestor.type_preserved = type_preserved;
488 /* Extract the acual BINFO being described by JFUNC which must be a known type
489 jump function. */
491 tree
492 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
494 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
495 if (!base_binfo)
496 return NULL_TREE;
497 return get_binfo_at_offset (base_binfo,
498 jfunc->value.known_type.offset,
499 jfunc->value.known_type.component_type);
502 /* Structure to be passed in between detect_type_change and
503 check_stmt_for_type_change. */
505 struct type_change_info
507 /* Offset into the object where there is the virtual method pointer we are
508 looking for. */
509 HOST_WIDE_INT offset;
510 /* The declaration or SSA_NAME pointer of the base that we are checking for
511 type change. */
512 tree object;
513 /* If we actually can tell the type that the object has changed to, it is
514 stored in this field. Otherwise it remains NULL_TREE. */
515 tree known_current_type;
516 /* Set to true if dynamic type change has been detected. */
517 bool type_maybe_changed;
518 /* Set to true if multiple types have been encountered. known_current_type
519 must be disregarded in that case. */
520 bool multiple_types_encountered;
523 /* Return true if STMT can modify a virtual method table pointer.
525 This function makes special assumptions about both constructors and
526 destructors which are all the functions that are allowed to alter the VMT
527 pointers. It assumes that destructors begin with assignment into all VMT
528 pointers and that constructors essentially look in the following way:
530 1) The very first thing they do is that they call constructors of ancestor
531 sub-objects that have them.
533 2) Then VMT pointers of this and all its ancestors is set to new values
534 corresponding to the type corresponding to the constructor.
536 3) Only afterwards, other stuff such as constructor of member sub-objects
537 and the code written by the user is run. Only this may include calling
538 virtual functions, directly or indirectly.
540 There is no way to call a constructor of an ancestor sub-object in any
541 other way.
543 This means that we do not have to care whether constructors get the correct
544 type information because they will always change it (in fact, if we define
545 the type to be given by the VMT pointer, it is undefined).
547 The most important fact to derive from the above is that if, for some
548 statement in the section 3, we try to detect whether the dynamic type has
549 changed, we can safely ignore all calls as we examine the function body
550 backwards until we reach statements in section 2 because these calls cannot
551 be ancestor constructors or destructors (if the input is not bogus) and so
552 do not change the dynamic type (this holds true only for automatically
553 allocated objects but at the moment we devirtualize only these). We then
554 must detect that statements in section 2 change the dynamic type and can try
555 to derive the new type. That is enough and we can stop, we will never see
556 the calls into constructors of sub-objects in this code. Therefore we can
557 safely ignore all call statements that we traverse.
560 static bool
561 stmt_may_be_vtbl_ptr_store (gimple stmt)
563 if (is_gimple_call (stmt))
564 return false;
565 else if (gimple_clobber_p (stmt))
566 return false;
567 else if (is_gimple_assign (stmt))
569 tree lhs = gimple_assign_lhs (stmt);
571 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
573 if (flag_strict_aliasing
574 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
575 return false;
577 if (TREE_CODE (lhs) == COMPONENT_REF
578 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
579 return false;
580 /* In the future we might want to use get_base_ref_and_offset to find
581 if there is a field corresponding to the offset and if so, proceed
582 almost like if it was a component ref. */
585 return true;
588 /* If STMT can be proved to be an assignment to the virtual method table
589 pointer of ANALYZED_OBJ and the type associated with the new table
590 identified, return the type. Otherwise return NULL_TREE. */
592 static tree
593 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
595 HOST_WIDE_INT offset, size, max_size;
596 tree lhs, rhs, base, binfo;
598 if (!gimple_assign_single_p (stmt))
599 return NULL_TREE;
601 lhs = gimple_assign_lhs (stmt);
602 rhs = gimple_assign_rhs1 (stmt);
603 if (TREE_CODE (lhs) != COMPONENT_REF
604 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
605 return NULL_TREE;
607 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
608 if (offset != tci->offset
609 || size != POINTER_SIZE
610 || max_size != POINTER_SIZE)
611 return NULL_TREE;
612 if (TREE_CODE (base) == MEM_REF)
614 if (TREE_CODE (tci->object) != MEM_REF
615 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
616 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
617 TREE_OPERAND (base, 1)))
618 return NULL_TREE;
620 else if (tci->object != base)
621 return NULL_TREE;
623 binfo = vtable_pointer_value_to_binfo (rhs);
625 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
626 base of outer type. In this case we would need to either
627 work on binfos or translate it back to outer type and offset.
628 KNOWN_TYPE jump functions are not ready for that, yet. */
629 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
630 return NULL;
632 return BINFO_TYPE (binfo);
635 /* Callback of walk_aliased_vdefs and a helper function for
636 detect_type_change to check whether a particular statement may modify
637 the virtual table pointer, and if possible also determine the new type of
638 the (sub-)object. It stores its result into DATA, which points to a
639 type_change_info structure. */
641 static bool
642 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
644 gimple stmt = SSA_NAME_DEF_STMT (vdef);
645 struct type_change_info *tci = (struct type_change_info *) data;
647 if (stmt_may_be_vtbl_ptr_store (stmt))
649 tree type;
650 type = extr_type_from_vtbl_ptr_store (stmt, tci);
651 if (tci->type_maybe_changed
652 && type != tci->known_current_type)
653 tci->multiple_types_encountered = true;
654 tci->known_current_type = type;
655 tci->type_maybe_changed = true;
656 return true;
658 else
659 return false;
664 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
665 callsite CALL) by looking for assignments to its virtual table pointer. If
666 it is, return true and fill in the jump function JFUNC with relevant type
667 information or set it to unknown. ARG is the object itself (not a pointer
668 to it, unless dereferenced). BASE is the base of the memory access as
669 returned by get_ref_base_and_extent, as is the offset. */
671 static bool
672 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
673 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
675 struct type_change_info tci;
676 ao_ref ao;
678 gcc_checking_assert (DECL_P (arg)
679 || TREE_CODE (arg) == MEM_REF
680 || handled_component_p (arg));
681 /* Const calls cannot call virtual methods through VMT and so type changes do
682 not matter. */
683 if (!flag_devirtualize || !gimple_vuse (call)
684 /* Be sure expected_type is polymorphic. */
685 || !comp_type
686 || TREE_CODE (comp_type) != RECORD_TYPE
687 || !TYPE_BINFO (comp_type)
688 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
689 return false;
691 ao_ref_init (&ao, arg);
692 ao.base = base;
693 ao.offset = offset;
694 ao.size = POINTER_SIZE;
695 ao.max_size = ao.size;
697 tci.offset = offset;
698 tci.object = get_base_address (arg);
699 tci.known_current_type = NULL_TREE;
700 tci.type_maybe_changed = false;
701 tci.multiple_types_encountered = false;
703 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
704 &tci, NULL);
705 if (!tci.type_maybe_changed)
706 return false;
708 if (!tci.known_current_type
709 || tci.multiple_types_encountered
710 || offset != 0)
711 jfunc->type = IPA_JF_UNKNOWN;
712 else
713 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
715 return true;
718 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
719 SSA name (its dereference will become the base and the offset is assumed to
720 be zero). */
722 static bool
723 detect_type_change_ssa (tree arg, tree comp_type,
724 gimple call, struct ipa_jump_func *jfunc)
726 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
727 if (!flag_devirtualize
728 || !POINTER_TYPE_P (TREE_TYPE (arg)))
729 return false;
731 arg = build2 (MEM_REF, ptr_type_node, arg,
732 build_int_cst (ptr_type_node, 0));
734 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
737 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
738 boolean variable pointed to by DATA. */
740 static bool
741 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
742 void *data)
744 bool *b = (bool *) data;
745 *b = true;
746 return true;
749 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
750 a value known not to be modified in this function before reaching the
751 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
752 information about the parameter. */
754 static bool
755 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
756 gimple stmt, tree parm_load)
758 bool modified = false;
759 bitmap *visited_stmts;
760 ao_ref refd;
762 if (parm_ainfo && parm_ainfo->parm_modified)
763 return false;
765 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
766 ao_ref_init (&refd, parm_load);
767 /* We can cache visited statements only when parm_ainfo is available and when
768 we are looking at a naked load of the whole parameter. */
769 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
770 visited_stmts = NULL;
771 else
772 visited_stmts = &parm_ainfo->parm_visited_statements;
773 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
774 visited_stmts);
775 if (parm_ainfo && modified)
776 parm_ainfo->parm_modified = true;
777 return !modified;
780 /* If STMT is an assignment that loads a value from an parameter declaration,
781 return the index of the parameter in ipa_node_params which has not been
782 modified. Otherwise return -1. */
784 static int
785 load_from_unmodified_param (vec<ipa_param_descriptor> descriptors,
786 struct param_analysis_info *parms_ainfo,
787 gimple stmt)
789 int index;
790 tree op1;
792 if (!gimple_assign_single_p (stmt))
793 return -1;
795 op1 = gimple_assign_rhs1 (stmt);
796 if (TREE_CODE (op1) != PARM_DECL)
797 return -1;
799 index = ipa_get_param_decl_index_1 (descriptors, op1);
800 if (index < 0
801 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
802 : NULL, stmt, op1))
803 return -1;
805 return index;
808 /* Return true if memory reference REF loads data that are known to be
809 unmodified in this function before reaching statement STMT. PARM_AINFO, if
810 non-NULL, is a pointer to a structure containing temporary information about
811 PARM. */
813 static bool
814 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
815 gimple stmt, tree ref)
817 bool modified = false;
818 ao_ref refd;
820 gcc_checking_assert (gimple_vuse (stmt));
821 if (parm_ainfo && parm_ainfo->ref_modified)
822 return false;
824 ao_ref_init (&refd, ref);
825 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
826 NULL);
827 if (parm_ainfo && modified)
828 parm_ainfo->ref_modified = true;
829 return !modified;
832 /* Return true if the data pointed to by PARM is known to be unmodified in this
833 function before reaching call statement CALL into which it is passed.
834 PARM_AINFO is a pointer to a structure containing temporary information
835 about PARM. */
837 static bool
838 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
839 gimple call, tree parm)
841 bool modified = false;
842 ao_ref refd;
844 /* It's unnecessary to calculate anything about memory contnets for a const
845 function because it is not goin to use it. But do not cache the result
846 either. Also, no such calculations for non-pointers. */
847 if (!gimple_vuse (call)
848 || !POINTER_TYPE_P (TREE_TYPE (parm)))
849 return false;
851 if (parm_ainfo->pt_modified)
852 return false;
854 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
855 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
856 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
857 if (modified)
858 parm_ainfo->pt_modified = true;
859 return !modified;
862 /* Return true if we can prove that OP is a memory reference loading unmodified
863 data from an aggregate passed as a parameter and if the aggregate is passed
864 by reference, that the alias type of the load corresponds to the type of the
865 formal parameter (so that we can rely on this type for TBAA in callers).
866 INFO and PARMS_AINFO describe parameters of the current function (but the
867 latter can be NULL), STMT is the load statement. If function returns true,
868 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
869 within the aggregate and whether it is a load from a value passed by
870 reference respectively. */
872 static bool
873 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor> descriptors,
874 struct param_analysis_info *parms_ainfo, gimple stmt,
875 tree op, int *index_p, HOST_WIDE_INT *offset_p,
876 HOST_WIDE_INT *size_p, bool *by_ref_p)
878 int index;
879 HOST_WIDE_INT size, max_size;
880 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
882 if (max_size == -1 || max_size != size || *offset_p < 0)
883 return false;
885 if (DECL_P (base))
887 int index = ipa_get_param_decl_index_1 (descriptors, base);
888 if (index >= 0
889 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
890 : NULL, stmt, op))
892 *index_p = index;
893 *by_ref_p = false;
894 if (size_p)
895 *size_p = size;
896 return true;
898 return false;
901 if (TREE_CODE (base) != MEM_REF
902 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
903 || !integer_zerop (TREE_OPERAND (base, 1)))
904 return false;
906 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
908 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
909 index = ipa_get_param_decl_index_1 (descriptors, parm);
911 else
913 /* This branch catches situations where a pointer parameter is not a
914 gimple register, for example:
916 void hip7(S*) (struct S * p)
918 void (*<T2e4>) (struct S *) D.1867;
919 struct S * p.1;
921 <bb 2>:
922 p.1_1 = p;
923 D.1867_2 = p.1_1->f;
924 D.1867_2 ();
925 gdp = &p;
928 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
929 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
932 if (index >= 0
933 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
934 stmt, op))
936 *index_p = index;
937 *by_ref_p = true;
938 if (size_p)
939 *size_p = size;
940 return true;
942 return false;
945 /* Just like the previous function, just without the param_analysis_info
946 pointer, for users outside of this file. */
948 bool
949 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
950 tree op, int *index_p, HOST_WIDE_INT *offset_p,
951 bool *by_ref_p)
953 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
954 offset_p, NULL, by_ref_p);
957 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
958 of an assignment statement STMT, try to determine whether we are actually
959 handling any of the following cases and construct an appropriate jump
960 function into JFUNC if so:
962 1) The passed value is loaded from a formal parameter which is not a gimple
963 register (most probably because it is addressable, the value has to be
964 scalar) and we can guarantee the value has not changed. This case can
965 therefore be described by a simple pass-through jump function. For example:
967 foo (int a)
969 int a.0;
971 a.0_2 = a;
972 bar (a.0_2);
974 2) The passed value can be described by a simple arithmetic pass-through
975 jump function. E.g.
977 foo (int a)
979 int D.2064;
981 D.2064_4 = a.1(D) + 4;
982 bar (D.2064_4);
984 This case can also occur in combination of the previous one, e.g.:
986 foo (int a, int z)
988 int a.0;
989 int D.2064;
991 a.0_3 = a;
992 D.2064_4 = a.0_3 + 4;
993 foo (D.2064_4);
995 3) The passed value is an address of an object within another one (which
996 also passed by reference). Such situations are described by an ancestor
997 jump function and describe situations such as:
999 B::foo() (struct B * const this)
1001 struct A * D.1845;
1003 D.1845_2 = &this_1(D)->D.1748;
1004 A::bar (D.1845_2);
1006 INFO is the structure describing individual parameters access different
1007 stages of IPA optimizations. PARMS_AINFO contains the information that is
1008 only needed for intraprocedural analysis. */
1010 static void
1011 compute_complex_assign_jump_func (struct ipa_node_params *info,
1012 struct param_analysis_info *parms_ainfo,
1013 struct ipa_jump_func *jfunc,
1014 gimple call, gimple stmt, tree name,
1015 tree param_type)
1017 HOST_WIDE_INT offset, size, max_size;
1018 tree op1, tc_ssa, base, ssa;
1019 int index;
1021 op1 = gimple_assign_rhs1 (stmt);
1023 if (TREE_CODE (op1) == SSA_NAME)
1025 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1026 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1027 else
1028 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1029 SSA_NAME_DEF_STMT (op1));
1030 tc_ssa = op1;
1032 else
1034 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1035 tc_ssa = gimple_assign_lhs (stmt);
1038 if (index >= 0)
1040 tree op2 = gimple_assign_rhs2 (stmt);
1042 if (op2)
1044 if (!is_gimple_ip_invariant (op2)
1045 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1046 && !useless_type_conversion_p (TREE_TYPE (name),
1047 TREE_TYPE (op1))))
1048 return;
1050 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1051 gimple_assign_rhs_code (stmt));
1053 else if (gimple_assign_single_p (stmt))
1055 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1056 call, tc_ssa);
1057 bool type_p = false;
1059 if (param_type && POINTER_TYPE_P (param_type))
1060 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1061 call, jfunc);
1062 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1063 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1065 return;
1068 if (TREE_CODE (op1) != ADDR_EXPR)
1069 return;
1070 op1 = TREE_OPERAND (op1, 0);
1071 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1072 return;
1073 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1074 if (TREE_CODE (base) != MEM_REF
1075 /* If this is a varying address, punt. */
1076 || max_size == -1
1077 || max_size != size)
1078 return;
1079 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1080 ssa = TREE_OPERAND (base, 0);
1081 if (TREE_CODE (ssa) != SSA_NAME
1082 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1083 || offset < 0)
1084 return;
1086 /* Dynamic types are changed in constructors and destructors. */
1087 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1088 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1090 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1091 call, jfunc, offset);
1092 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1093 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1094 parm_ref_data_pass_through_p (&parms_ainfo[index],
1095 call, ssa), type_p);
1099 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1100 it looks like:
1102 iftmp.1_3 = &obj_2(D)->D.1762;
1104 The base of the MEM_REF must be a default definition SSA NAME of a
1105 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1106 whole MEM_REF expression is returned and the offset calculated from any
1107 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1108 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1110 static tree
1111 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1113 HOST_WIDE_INT size, max_size;
1114 tree expr, parm, obj;
1116 if (!gimple_assign_single_p (assign))
1117 return NULL_TREE;
1118 expr = gimple_assign_rhs1 (assign);
1120 if (TREE_CODE (expr) != ADDR_EXPR)
1121 return NULL_TREE;
1122 expr = TREE_OPERAND (expr, 0);
1123 obj = expr;
1124 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1126 if (TREE_CODE (expr) != MEM_REF
1127 /* If this is a varying address, punt. */
1128 || max_size == -1
1129 || max_size != size
1130 || *offset < 0)
1131 return NULL_TREE;
1132 parm = TREE_OPERAND (expr, 0);
1133 if (TREE_CODE (parm) != SSA_NAME
1134 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1135 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1136 return NULL_TREE;
1138 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1139 *obj_p = obj;
1140 return expr;
1144 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1145 statement PHI, try to find out whether NAME is in fact a
1146 multiple-inheritance typecast from a descendant into an ancestor of a formal
1147 parameter and thus can be described by an ancestor jump function and if so,
1148 write the appropriate function into JFUNC.
1150 Essentially we want to match the following pattern:
1152 if (obj_2(D) != 0B)
1153 goto <bb 3>;
1154 else
1155 goto <bb 4>;
1157 <bb 3>:
1158 iftmp.1_3 = &obj_2(D)->D.1762;
1160 <bb 4>:
1161 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1162 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1163 return D.1879_6; */
1165 static void
1166 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1167 struct param_analysis_info *parms_ainfo,
1168 struct ipa_jump_func *jfunc,
1169 gimple call, gimple phi, tree param_type)
1171 HOST_WIDE_INT offset;
1172 gimple assign, cond;
1173 basic_block phi_bb, assign_bb, cond_bb;
1174 tree tmp, parm, expr, obj;
1175 int index, i;
1177 if (gimple_phi_num_args (phi) != 2)
1178 return;
1180 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1181 tmp = PHI_ARG_DEF (phi, 0);
1182 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1183 tmp = PHI_ARG_DEF (phi, 1);
1184 else
1185 return;
1186 if (TREE_CODE (tmp) != SSA_NAME
1187 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1188 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1189 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1190 return;
1192 assign = SSA_NAME_DEF_STMT (tmp);
1193 assign_bb = gimple_bb (assign);
1194 if (!single_pred_p (assign_bb))
1195 return;
1196 expr = get_ancestor_addr_info (assign, &obj, &offset);
1197 if (!expr)
1198 return;
1199 parm = TREE_OPERAND (expr, 0);
1200 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1201 gcc_assert (index >= 0);
1203 cond_bb = single_pred (assign_bb);
1204 cond = last_stmt (cond_bb);
1205 if (!cond
1206 || gimple_code (cond) != GIMPLE_COND
1207 || gimple_cond_code (cond) != NE_EXPR
1208 || gimple_cond_lhs (cond) != parm
1209 || !integer_zerop (gimple_cond_rhs (cond)))
1210 return;
1212 phi_bb = gimple_bb (phi);
1213 for (i = 0; i < 2; i++)
1215 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1216 if (pred != assign_bb && pred != cond_bb)
1217 return;
1220 bool type_p = false;
1221 if (param_type && POINTER_TYPE_P (param_type))
1222 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1223 call, jfunc, offset);
1224 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1225 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1226 parm_ref_data_pass_through_p (&parms_ainfo[index],
1227 call, parm), type_p);
1230 /* Given OP which is passed as an actual argument to a called function,
1231 determine if it is possible to construct a KNOWN_TYPE jump function for it
1232 and if so, create one and store it to JFUNC.
1233 EXPECTED_TYPE represents a type the argument should be in */
1235 static void
1236 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1237 gimple call, tree expected_type)
1239 HOST_WIDE_INT offset, size, max_size;
1240 tree base;
1242 if (!flag_devirtualize
1243 || TREE_CODE (op) != ADDR_EXPR
1244 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1245 /* Be sure expected_type is polymorphic. */
1246 || !expected_type
1247 || TREE_CODE (expected_type) != RECORD_TYPE
1248 || !TYPE_BINFO (expected_type)
1249 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1250 return;
1252 op = TREE_OPERAND (op, 0);
1253 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1254 if (!DECL_P (base)
1255 || max_size == -1
1256 || max_size != size
1257 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1258 || is_global_var (base))
1259 return;
1261 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1262 return;
1264 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1265 expected_type);
1268 /* Inspect the given TYPE and return true iff it has the same structure (the
1269 same number of fields of the same types) as a C++ member pointer. If
1270 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1271 corresponding fields there. */
1273 static bool
1274 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1276 tree fld;
1278 if (TREE_CODE (type) != RECORD_TYPE)
1279 return false;
1281 fld = TYPE_FIELDS (type);
1282 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1283 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1284 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1285 return false;
1287 if (method_ptr)
1288 *method_ptr = fld;
1290 fld = DECL_CHAIN (fld);
1291 if (!fld || INTEGRAL_TYPE_P (fld)
1292 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1293 return false;
1294 if (delta)
1295 *delta = fld;
1297 if (DECL_CHAIN (fld))
1298 return false;
1300 return true;
1303 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1304 return the rhs of its defining statement. Otherwise return RHS as it
1305 is. */
1307 static inline tree
1308 get_ssa_def_if_simple_copy (tree rhs)
1310 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1312 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1314 if (gimple_assign_single_p (def_stmt))
1315 rhs = gimple_assign_rhs1 (def_stmt);
1316 else
1317 break;
1319 return rhs;
1322 /* Simple linked list, describing known contents of an aggregate beforere
1323 call. */
1325 struct ipa_known_agg_contents_list
1327 /* Offset and size of the described part of the aggregate. */
1328 HOST_WIDE_INT offset, size;
1329 /* Known constant value or NULL if the contents is known to be unknown. */
1330 tree constant;
1331 /* Pointer to the next structure in the list. */
1332 struct ipa_known_agg_contents_list *next;
1335 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1336 in ARG is filled in with constant values. ARG can either be an aggregate
1337 expression or a pointer to an aggregate. ARG_TYPE is the type of the aggregate.
1338 JFUNC is the jump function into which the constants are subsequently stored. */
1340 static void
1341 determine_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1342 struct ipa_jump_func *jfunc)
1344 struct ipa_known_agg_contents_list *list = NULL;
1345 int item_count = 0, const_count = 0;
1346 HOST_WIDE_INT arg_offset, arg_size;
1347 gimple_stmt_iterator gsi;
1348 tree arg_base;
1349 bool check_ref, by_ref;
1350 ao_ref r;
1352 /* The function operates in three stages. First, we prepare check_ref, r,
1353 arg_base and arg_offset based on what is actually passed as an actual
1354 argument. */
1356 if (POINTER_TYPE_P (arg_type))
1358 by_ref = true;
1359 if (TREE_CODE (arg) == SSA_NAME)
1361 tree type_size;
1362 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1363 return;
1364 check_ref = true;
1365 arg_base = arg;
1366 arg_offset = 0;
1367 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1368 arg_size = tree_to_uhwi (type_size);
1369 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1371 else if (TREE_CODE (arg) == ADDR_EXPR)
1373 HOST_WIDE_INT arg_max_size;
1375 arg = TREE_OPERAND (arg, 0);
1376 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1377 &arg_max_size);
1378 if (arg_max_size == -1
1379 || arg_max_size != arg_size
1380 || arg_offset < 0)
1381 return;
1382 if (DECL_P (arg_base))
1384 tree size;
1385 check_ref = false;
1386 size = build_int_cst (integer_type_node, arg_size);
1387 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1389 else
1390 return;
1392 else
1393 return;
1395 else
1397 HOST_WIDE_INT arg_max_size;
1399 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1401 by_ref = false;
1402 check_ref = false;
1403 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1404 &arg_max_size);
1405 if (arg_max_size == -1
1406 || arg_max_size != arg_size
1407 || arg_offset < 0)
1408 return;
1410 ao_ref_init (&r, arg);
1413 /* Second stage walks back the BB, looks at individual statements and as long
1414 as it is confident of how the statements affect contents of the
1415 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1416 describing it. */
1417 gsi = gsi_for_stmt (call);
1418 gsi_prev (&gsi);
1419 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1421 struct ipa_known_agg_contents_list *n, **p;
1422 gimple stmt = gsi_stmt (gsi);
1423 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1424 tree lhs, rhs, lhs_base;
1425 bool partial_overlap;
1427 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1428 continue;
1429 if (!gimple_assign_single_p (stmt))
1430 break;
1432 lhs = gimple_assign_lhs (stmt);
1433 rhs = gimple_assign_rhs1 (stmt);
1434 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1435 || TREE_CODE (lhs) == BIT_FIELD_REF
1436 || contains_bitfld_component_ref_p (lhs))
1437 break;
1439 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1440 &lhs_max_size);
1441 if (lhs_max_size == -1
1442 || lhs_max_size != lhs_size
1443 || (lhs_offset < arg_offset
1444 && lhs_offset + lhs_size > arg_offset)
1445 || (lhs_offset < arg_offset + arg_size
1446 && lhs_offset + lhs_size > arg_offset + arg_size))
1447 break;
1449 if (check_ref)
1451 if (TREE_CODE (lhs_base) != MEM_REF
1452 || TREE_OPERAND (lhs_base, 0) != arg_base
1453 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1454 break;
1456 else if (lhs_base != arg_base)
1458 if (DECL_P (lhs_base))
1459 continue;
1460 else
1461 break;
1464 if (lhs_offset + lhs_size < arg_offset
1465 || lhs_offset >= (arg_offset + arg_size))
1466 continue;
1468 partial_overlap = false;
1469 p = &list;
1470 while (*p && (*p)->offset < lhs_offset)
1472 if ((*p)->offset + (*p)->size > lhs_offset)
1474 partial_overlap = true;
1475 break;
1477 p = &(*p)->next;
1479 if (partial_overlap)
1480 break;
1481 if (*p && (*p)->offset < lhs_offset + lhs_size)
1483 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1484 /* We already know this value is subsequently overwritten with
1485 something else. */
1486 continue;
1487 else
1488 /* Otherwise this is a partial overlap which we cannot
1489 represent. */
1490 break;
1493 rhs = get_ssa_def_if_simple_copy (rhs);
1494 n = XALLOCA (struct ipa_known_agg_contents_list);
1495 n->size = lhs_size;
1496 n->offset = lhs_offset;
1497 if (is_gimple_ip_invariant (rhs))
1499 n->constant = rhs;
1500 const_count++;
1502 else
1503 n->constant = NULL_TREE;
1504 n->next = *p;
1505 *p = n;
1507 item_count++;
1508 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1509 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1510 break;
1513 /* Third stage just goes over the list and creates an appropriate vector of
1514 ipa_agg_jf_item structures out of it, of sourse only if there are
1515 any known constants to begin with. */
1517 if (const_count)
1519 jfunc->agg.by_ref = by_ref;
1520 vec_alloc (jfunc->agg.items, const_count);
1521 while (list)
1523 if (list->constant)
1525 struct ipa_agg_jf_item item;
1526 item.offset = list->offset - arg_offset;
1527 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1528 item.value = unshare_expr_without_location (list->constant);
1529 jfunc->agg.items->quick_push (item);
1531 list = list->next;
1536 static tree
1537 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1539 int n;
1540 tree type = (e->callee
1541 ? TREE_TYPE (e->callee->decl)
1542 : gimple_call_fntype (e->call_stmt));
1543 tree t = TYPE_ARG_TYPES (type);
1545 for (n = 0; n < i; n++)
1547 if (!t)
1548 break;
1549 t = TREE_CHAIN (t);
1551 if (t)
1552 return TREE_VALUE (t);
1553 if (!e->callee)
1554 return NULL;
1555 t = DECL_ARGUMENTS (e->callee->decl);
1556 for (n = 0; n < i; n++)
1558 if (!t)
1559 return NULL;
1560 t = TREE_CHAIN (t);
1562 if (t)
1563 return TREE_TYPE (t);
1564 return NULL;
1567 /* Compute jump function for all arguments of callsite CS and insert the
1568 information in the jump_functions array in the ipa_edge_args corresponding
1569 to this callsite. */
1571 static void
1572 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1573 struct cgraph_edge *cs)
1575 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1576 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1577 gimple call = cs->call_stmt;
1578 int n, arg_num = gimple_call_num_args (call);
1580 if (arg_num == 0 || args->jump_functions)
1581 return;
1582 vec_safe_grow_cleared (args->jump_functions, arg_num);
1584 if (gimple_call_internal_p (call))
1585 return;
1586 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1587 return;
1589 for (n = 0; n < arg_num; n++)
1591 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1592 tree arg = gimple_call_arg (call, n);
1593 tree param_type = ipa_get_callee_param_type (cs, n);
1595 if (is_gimple_ip_invariant (arg))
1596 ipa_set_jf_constant (jfunc, arg, cs);
1597 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1598 && TREE_CODE (arg) == PARM_DECL)
1600 int index = ipa_get_param_decl_index (info, arg);
1602 gcc_assert (index >=0);
1603 /* Aggregate passed by value, check for pass-through, otherwise we
1604 will attempt to fill in aggregate contents later in this
1605 for cycle. */
1606 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1608 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1609 continue;
1612 else if (TREE_CODE (arg) == SSA_NAME)
1614 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1616 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1617 if (index >= 0)
1619 bool agg_p, type_p;
1620 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1621 call, arg);
1622 if (param_type && POINTER_TYPE_P (param_type))
1623 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1624 call, jfunc);
1625 else
1626 type_p = false;
1627 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1628 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1629 type_p);
1632 else
1634 gimple stmt = SSA_NAME_DEF_STMT (arg);
1635 if (is_gimple_assign (stmt))
1636 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1637 call, stmt, arg, param_type);
1638 else if (gimple_code (stmt) == GIMPLE_PHI)
1639 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1640 call, stmt, param_type);
1643 else
1644 compute_known_type_jump_func (arg, jfunc, call,
1645 param_type
1646 && POINTER_TYPE_P (param_type)
1647 ? TREE_TYPE (param_type)
1648 : NULL);
1650 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1651 passed (because type conversions are ignored in gimple). Usually we can
1652 safely get type from function declaration, but in case of K&R prototypes or
1653 variadic functions we can try our luck with type of the pointer passed.
1654 TODO: Since we look for actual initialization of the memory object, we may better
1655 work out the type based on the memory stores we find. */
1656 if (!param_type)
1657 param_type = TREE_TYPE (arg);
1659 if ((jfunc->type != IPA_JF_PASS_THROUGH
1660 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1661 && (jfunc->type != IPA_JF_ANCESTOR
1662 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1663 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1664 || POINTER_TYPE_P (param_type)))
1665 determine_known_aggregate_parts (call, arg, param_type, jfunc);
1669 /* Compute jump functions for all edges - both direct and indirect - outgoing
1670 from NODE. Also count the actual arguments in the process. */
1672 static void
1673 ipa_compute_jump_functions (struct cgraph_node *node,
1674 struct param_analysis_info *parms_ainfo)
1676 struct cgraph_edge *cs;
1678 for (cs = node->callees; cs; cs = cs->next_callee)
1680 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1681 NULL);
1682 /* We do not need to bother analyzing calls to unknown
1683 functions unless they may become known during lto/whopr. */
1684 if (!callee->definition && !flag_lto)
1685 continue;
1686 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1689 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1690 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1693 /* If STMT looks like a statement loading a value from a member pointer formal
1694 parameter, return that parameter and store the offset of the field to
1695 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1696 might be clobbered). If USE_DELTA, then we look for a use of the delta
1697 field rather than the pfn. */
1699 static tree
1700 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1701 HOST_WIDE_INT *offset_p)
1703 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1705 if (!gimple_assign_single_p (stmt))
1706 return NULL_TREE;
1708 rhs = gimple_assign_rhs1 (stmt);
1709 if (TREE_CODE (rhs) == COMPONENT_REF)
1711 ref_field = TREE_OPERAND (rhs, 1);
1712 rhs = TREE_OPERAND (rhs, 0);
1714 else
1715 ref_field = NULL_TREE;
1716 if (TREE_CODE (rhs) != MEM_REF)
1717 return NULL_TREE;
1718 rec = TREE_OPERAND (rhs, 0);
1719 if (TREE_CODE (rec) != ADDR_EXPR)
1720 return NULL_TREE;
1721 rec = TREE_OPERAND (rec, 0);
1722 if (TREE_CODE (rec) != PARM_DECL
1723 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1724 return NULL_TREE;
1725 ref_offset = TREE_OPERAND (rhs, 1);
1727 if (use_delta)
1728 fld = delta_field;
1729 else
1730 fld = ptr_field;
1731 if (offset_p)
1732 *offset_p = int_bit_position (fld);
1734 if (ref_field)
1736 if (integer_nonzerop (ref_offset))
1737 return NULL_TREE;
1738 return ref_field == fld ? rec : NULL_TREE;
1740 else
1741 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1742 : NULL_TREE;
1745 /* Returns true iff T is an SSA_NAME defined by a statement. */
1747 static bool
1748 ipa_is_ssa_with_stmt_def (tree t)
1750 if (TREE_CODE (t) == SSA_NAME
1751 && !SSA_NAME_IS_DEFAULT_DEF (t))
1752 return true;
1753 else
1754 return false;
1757 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1758 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1759 indirect call graph edge. */
1761 static struct cgraph_edge *
1762 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1764 struct cgraph_edge *cs;
1766 cs = cgraph_edge (node, stmt);
1767 cs->indirect_info->param_index = param_index;
1768 cs->indirect_info->agg_contents = 0;
1769 cs->indirect_info->member_ptr = 0;
1770 return cs;
1773 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1774 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1775 intermediate information about each formal parameter. Currently it checks
1776 whether the call calls a pointer that is a formal parameter and if so, the
1777 parameter is marked with the called flag and an indirect call graph edge
1778 describing the call is created. This is very simple for ordinary pointers
1779 represented in SSA but not-so-nice when it comes to member pointers. The
1780 ugly part of this function does nothing more than trying to match the
1781 pattern of such a call. An example of such a pattern is the gimple dump
1782 below, the call is on the last line:
1784 <bb 2>:
1785 f$__delta_5 = f.__delta;
1786 f$__pfn_24 = f.__pfn;
1789 <bb 2>:
1790 f$__delta_5 = MEM[(struct *)&f];
1791 f$__pfn_24 = MEM[(struct *)&f + 4B];
1793 and a few lines below:
1795 <bb 5>
1796 D.2496_3 = (int) f$__pfn_24;
1797 D.2497_4 = D.2496_3 & 1;
1798 if (D.2497_4 != 0)
1799 goto <bb 3>;
1800 else
1801 goto <bb 4>;
1803 <bb 6>:
1804 D.2500_7 = (unsigned int) f$__delta_5;
1805 D.2501_8 = &S + D.2500_7;
1806 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1807 D.2503_10 = *D.2502_9;
1808 D.2504_12 = f$__pfn_24 + -1;
1809 D.2505_13 = (unsigned int) D.2504_12;
1810 D.2506_14 = D.2503_10 + D.2505_13;
1811 D.2507_15 = *D.2506_14;
1812 iftmp.11_16 = (String:: *) D.2507_15;
1814 <bb 7>:
1815 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1816 D.2500_19 = (unsigned int) f$__delta_5;
1817 D.2508_20 = &S + D.2500_19;
1818 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1820 Such patterns are results of simple calls to a member pointer:
1822 int doprinting (int (MyString::* f)(int) const)
1824 MyString S ("somestring");
1826 return (S.*f)(4);
1829 Moreover, the function also looks for called pointers loaded from aggregates
1830 passed by value or reference. */
1832 static void
1833 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1834 struct ipa_node_params *info,
1835 struct param_analysis_info *parms_ainfo,
1836 gimple call, tree target)
1838 gimple def;
1839 tree n1, n2;
1840 gimple d1, d2;
1841 tree rec, rec2, cond;
1842 gimple branch;
1843 int index;
1844 basic_block bb, virt_bb, join;
1845 HOST_WIDE_INT offset;
1846 bool by_ref;
1848 if (SSA_NAME_IS_DEFAULT_DEF (target))
1850 tree var = SSA_NAME_VAR (target);
1851 index = ipa_get_param_decl_index (info, var);
1852 if (index >= 0)
1853 ipa_note_param_call (node, index, call);
1854 return;
1857 def = SSA_NAME_DEF_STMT (target);
1858 if (gimple_assign_single_p (def)
1859 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1860 gimple_assign_rhs1 (def), &index, &offset,
1861 NULL, &by_ref))
1863 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1864 if (cs->indirect_info->offset != offset)
1865 cs->indirect_info->outer_type = NULL;
1866 cs->indirect_info->offset = offset;
1867 cs->indirect_info->agg_contents = 1;
1868 cs->indirect_info->by_ref = by_ref;
1869 return;
1872 /* Now we need to try to match the complex pattern of calling a member
1873 pointer. */
1874 if (gimple_code (def) != GIMPLE_PHI
1875 || gimple_phi_num_args (def) != 2
1876 || !POINTER_TYPE_P (TREE_TYPE (target))
1877 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1878 return;
1880 /* First, we need to check whether one of these is a load from a member
1881 pointer that is a parameter to this function. */
1882 n1 = PHI_ARG_DEF (def, 0);
1883 n2 = PHI_ARG_DEF (def, 1);
1884 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1885 return;
1886 d1 = SSA_NAME_DEF_STMT (n1);
1887 d2 = SSA_NAME_DEF_STMT (n2);
1889 join = gimple_bb (def);
1890 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1892 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1893 return;
1895 bb = EDGE_PRED (join, 0)->src;
1896 virt_bb = gimple_bb (d2);
1898 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1900 bb = EDGE_PRED (join, 1)->src;
1901 virt_bb = gimple_bb (d1);
1903 else
1904 return;
1906 /* Second, we need to check that the basic blocks are laid out in the way
1907 corresponding to the pattern. */
1909 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1910 || single_pred (virt_bb) != bb
1911 || single_succ (virt_bb) != join)
1912 return;
1914 /* Third, let's see that the branching is done depending on the least
1915 significant bit of the pfn. */
1917 branch = last_stmt (bb);
1918 if (!branch || gimple_code (branch) != GIMPLE_COND)
1919 return;
1921 if ((gimple_cond_code (branch) != NE_EXPR
1922 && gimple_cond_code (branch) != EQ_EXPR)
1923 || !integer_zerop (gimple_cond_rhs (branch)))
1924 return;
1926 cond = gimple_cond_lhs (branch);
1927 if (!ipa_is_ssa_with_stmt_def (cond))
1928 return;
1930 def = SSA_NAME_DEF_STMT (cond);
1931 if (!is_gimple_assign (def)
1932 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1933 || !integer_onep (gimple_assign_rhs2 (def)))
1934 return;
1936 cond = gimple_assign_rhs1 (def);
1937 if (!ipa_is_ssa_with_stmt_def (cond))
1938 return;
1940 def = SSA_NAME_DEF_STMT (cond);
1942 if (is_gimple_assign (def)
1943 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1945 cond = gimple_assign_rhs1 (def);
1946 if (!ipa_is_ssa_with_stmt_def (cond))
1947 return;
1948 def = SSA_NAME_DEF_STMT (cond);
1951 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1952 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1953 == ptrmemfunc_vbit_in_delta),
1954 NULL);
1955 if (rec != rec2)
1956 return;
1958 index = ipa_get_param_decl_index (info, rec);
1959 if (index >= 0
1960 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1962 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1963 if (cs->indirect_info->offset != offset)
1964 cs->indirect_info->outer_type = NULL;
1965 cs->indirect_info->offset = offset;
1966 cs->indirect_info->agg_contents = 1;
1967 cs->indirect_info->member_ptr = 1;
1970 return;
1973 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1974 object referenced in the expression is a formal parameter of the caller
1975 (described by INFO), create a call note for the statement. */
1977 static void
1978 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1979 struct ipa_node_params *info, gimple call,
1980 tree target)
1982 struct cgraph_edge *cs;
1983 struct cgraph_indirect_call_info *ii;
1984 struct ipa_jump_func jfunc;
1985 tree obj = OBJ_TYPE_REF_OBJECT (target);
1986 int index;
1987 HOST_WIDE_INT anc_offset;
1989 if (!flag_devirtualize)
1990 return;
1992 if (TREE_CODE (obj) != SSA_NAME)
1993 return;
1995 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1997 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1998 return;
2000 anc_offset = 0;
2001 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2002 gcc_assert (index >= 0);
2003 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2004 call, &jfunc))
2005 return;
2007 else
2009 gimple stmt = SSA_NAME_DEF_STMT (obj);
2010 tree expr;
2012 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2013 if (!expr)
2014 return;
2015 index = ipa_get_param_decl_index (info,
2016 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2017 gcc_assert (index >= 0);
2018 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2019 call, &jfunc, anc_offset))
2020 return;
2023 cs = ipa_note_param_call (node, index, call);
2024 ii = cs->indirect_info;
2025 ii->offset = anc_offset;
2026 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2027 ii->otr_type = obj_type_ref_class (target);
2028 ii->polymorphic = 1;
2031 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2032 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2033 containing intermediate information about each formal parameter. */
2035 static void
2036 ipa_analyze_call_uses (struct cgraph_node *node,
2037 struct ipa_node_params *info,
2038 struct param_analysis_info *parms_ainfo, gimple call)
2040 tree target = gimple_call_fn (call);
2041 struct cgraph_edge *cs;
2043 if (!target
2044 || (TREE_CODE (target) != SSA_NAME
2045 && !virtual_method_call_p (target)))
2046 return;
2048 /* If we previously turned the call into a direct call, there is
2049 no need to analyze. */
2050 cs = cgraph_edge (node, call);
2051 if (cs && !cs->indirect_unknown_callee)
2052 return;
2053 if (TREE_CODE (target) == SSA_NAME)
2054 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2055 else if (virtual_method_call_p (target))
2056 ipa_analyze_virtual_call_uses (node, info, call, target);
2060 /* Analyze the call statement STMT with respect to formal parameters (described
2061 in INFO) of caller given by NODE. Currently it only checks whether formal
2062 parameters are called. PARMS_AINFO is a pointer to a vector containing
2063 intermediate information about each formal parameter. */
2065 static void
2066 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2067 struct param_analysis_info *parms_ainfo, gimple stmt)
2069 if (is_gimple_call (stmt))
2070 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2073 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2074 If OP is a parameter declaration, mark it as used in the info structure
2075 passed in DATA. */
2077 static bool
2078 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2080 struct ipa_node_params *info = (struct ipa_node_params *) data;
2082 op = get_base_address (op);
2083 if (op
2084 && TREE_CODE (op) == PARM_DECL)
2086 int index = ipa_get_param_decl_index (info, op);
2087 gcc_assert (index >= 0);
2088 ipa_set_param_used (info, index, true);
2091 return false;
2094 /* Scan the function body of NODE and inspect the uses of formal parameters.
2095 Store the findings in various structures of the associated ipa_node_params
2096 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2097 vector containing intermediate information about each formal parameter. */
2099 static void
2100 ipa_analyze_params_uses (struct cgraph_node *node,
2101 struct param_analysis_info *parms_ainfo)
2103 tree decl = node->decl;
2104 basic_block bb;
2105 struct function *func;
2106 gimple_stmt_iterator gsi;
2107 struct ipa_node_params *info = IPA_NODE_REF (node);
2108 int i;
2110 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2111 return;
2113 info->uses_analysis_done = 1;
2114 if (ipa_func_spec_opts_forbid_analysis_p (node))
2116 for (i = 0; i < ipa_get_param_count (info); i++)
2118 ipa_set_param_used (info, i, true);
2119 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2121 return;
2124 for (i = 0; i < ipa_get_param_count (info); i++)
2126 tree parm = ipa_get_param (info, i);
2127 int controlled_uses = 0;
2129 /* For SSA regs see if parameter is used. For non-SSA we compute
2130 the flag during modification analysis. */
2131 if (is_gimple_reg (parm))
2133 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2134 parm);
2135 if (ddef && !has_zero_uses (ddef))
2137 imm_use_iterator imm_iter;
2138 use_operand_p use_p;
2140 ipa_set_param_used (info, i, true);
2141 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2142 if (!is_gimple_call (USE_STMT (use_p)))
2144 if (!is_gimple_debug (USE_STMT (use_p)))
2146 controlled_uses = IPA_UNDESCRIBED_USE;
2147 break;
2150 else
2151 controlled_uses++;
2153 else
2154 controlled_uses = 0;
2156 else
2157 controlled_uses = IPA_UNDESCRIBED_USE;
2158 ipa_set_controlled_uses (info, i, controlled_uses);
2161 func = DECL_STRUCT_FUNCTION (decl);
2162 FOR_EACH_BB_FN (bb, func)
2164 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2166 gimple stmt = gsi_stmt (gsi);
2168 if (is_gimple_debug (stmt))
2169 continue;
2171 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2172 walk_stmt_load_store_addr_ops (stmt, info,
2173 visit_ref_for_mod_analysis,
2174 visit_ref_for_mod_analysis,
2175 visit_ref_for_mod_analysis);
2177 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2178 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2179 visit_ref_for_mod_analysis,
2180 visit_ref_for_mod_analysis,
2181 visit_ref_for_mod_analysis);
2185 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2187 static void
2188 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2190 int i;
2192 for (i = 0; i < param_count; i++)
2194 if (parms_ainfo[i].parm_visited_statements)
2195 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2196 if (parms_ainfo[i].pt_visited_statements)
2197 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2201 /* Initialize the array describing properties of of formal parameters
2202 of NODE, analyze their uses and compute jump functions associated
2203 with actual arguments of calls from within NODE. */
2205 void
2206 ipa_analyze_node (struct cgraph_node *node)
2208 struct ipa_node_params *info;
2209 struct param_analysis_info *parms_ainfo;
2210 int param_count;
2212 ipa_check_create_node_params ();
2213 ipa_check_create_edge_args ();
2214 info = IPA_NODE_REF (node);
2215 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2216 ipa_initialize_node_params (node);
2218 param_count = ipa_get_param_count (info);
2219 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2220 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2222 ipa_analyze_params_uses (node, parms_ainfo);
2223 ipa_compute_jump_functions (node, parms_ainfo);
2225 free_parms_ainfo (parms_ainfo, param_count);
2226 pop_cfun ();
2229 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2230 attempt a type-based devirtualization. If successful, return the
2231 target function declaration, otherwise return NULL. */
2233 tree
2234 ipa_intraprocedural_devirtualization (gimple call)
2236 tree binfo, token, fndecl;
2237 struct ipa_jump_func jfunc;
2238 tree otr = gimple_call_fn (call);
2240 jfunc.type = IPA_JF_UNKNOWN;
2241 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2242 call, obj_type_ref_class (otr));
2243 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2244 return NULL_TREE;
2245 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2246 if (!binfo)
2247 return NULL_TREE;
2248 token = OBJ_TYPE_REF_TOKEN (otr);
2249 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2250 binfo);
2251 #ifdef ENABLE_CHECKING
2252 if (fndecl)
2253 gcc_assert (possible_polymorphic_call_target_p
2254 (otr, cgraph_get_node (fndecl)));
2255 #endif
2256 return fndecl;
2259 /* Update the jump function DST when the call graph edge corresponding to SRC is
2260 is being inlined, knowing that DST is of type ancestor and src of known
2261 type. */
2263 static void
2264 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2265 struct ipa_jump_func *dst)
2267 HOST_WIDE_INT combined_offset;
2268 tree combined_type;
2270 if (!ipa_get_jf_ancestor_type_preserved (dst))
2272 dst->type = IPA_JF_UNKNOWN;
2273 return;
2276 combined_offset = ipa_get_jf_known_type_offset (src)
2277 + ipa_get_jf_ancestor_offset (dst);
2278 combined_type = ipa_get_jf_ancestor_type (dst);
2280 ipa_set_jf_known_type (dst, combined_offset,
2281 ipa_get_jf_known_type_base_type (src),
2282 combined_type);
2285 /* Update the jump functions associated with call graph edge E when the call
2286 graph edge CS is being inlined, assuming that E->caller is already (possibly
2287 indirectly) inlined into CS->callee and that E has not been inlined. */
2289 static void
2290 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2291 struct cgraph_edge *e)
2293 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2294 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2295 int count = ipa_get_cs_argument_count (args);
2296 int i;
2298 for (i = 0; i < count; i++)
2300 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2302 if (dst->type == IPA_JF_ANCESTOR)
2304 struct ipa_jump_func *src;
2305 int dst_fid = dst->value.ancestor.formal_id;
2307 /* Variable number of arguments can cause havoc if we try to access
2308 one that does not exist in the inlined edge. So make sure we
2309 don't. */
2310 if (dst_fid >= ipa_get_cs_argument_count (top))
2312 dst->type = IPA_JF_UNKNOWN;
2313 continue;
2316 src = ipa_get_ith_jump_func (top, dst_fid);
2318 if (src->agg.items
2319 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2321 struct ipa_agg_jf_item *item;
2322 int j;
2324 /* Currently we do not produce clobber aggregate jump functions,
2325 replace with merging when we do. */
2326 gcc_assert (!dst->agg.items);
2328 dst->agg.items = vec_safe_copy (src->agg.items);
2329 dst->agg.by_ref = src->agg.by_ref;
2330 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2331 item->offset -= dst->value.ancestor.offset;
2334 if (src->type == IPA_JF_KNOWN_TYPE)
2335 combine_known_type_and_ancestor_jfs (src, dst);
2336 else if (src->type == IPA_JF_PASS_THROUGH
2337 && src->value.pass_through.operation == NOP_EXPR)
2339 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2340 dst->value.ancestor.agg_preserved &=
2341 src->value.pass_through.agg_preserved;
2342 dst->value.ancestor.type_preserved &=
2343 src->value.pass_through.type_preserved;
2345 else if (src->type == IPA_JF_ANCESTOR)
2347 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2348 dst->value.ancestor.offset += src->value.ancestor.offset;
2349 dst->value.ancestor.agg_preserved &=
2350 src->value.ancestor.agg_preserved;
2351 dst->value.ancestor.type_preserved &=
2352 src->value.ancestor.type_preserved;
2354 else
2355 dst->type = IPA_JF_UNKNOWN;
2357 else if (dst->type == IPA_JF_PASS_THROUGH)
2359 struct ipa_jump_func *src;
2360 /* We must check range due to calls with variable number of arguments
2361 and we cannot combine jump functions with operations. */
2362 if (dst->value.pass_through.operation == NOP_EXPR
2363 && (dst->value.pass_through.formal_id
2364 < ipa_get_cs_argument_count (top)))
2366 int dst_fid = dst->value.pass_through.formal_id;
2367 src = ipa_get_ith_jump_func (top, dst_fid);
2368 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2370 switch (src->type)
2372 case IPA_JF_UNKNOWN:
2373 dst->type = IPA_JF_UNKNOWN;
2374 break;
2375 case IPA_JF_KNOWN_TYPE:
2376 if (ipa_get_jf_pass_through_type_preserved (dst))
2377 ipa_set_jf_known_type (dst,
2378 ipa_get_jf_known_type_offset (src),
2379 ipa_get_jf_known_type_base_type (src),
2380 ipa_get_jf_known_type_base_type (src));
2381 else
2382 dst->type = IPA_JF_UNKNOWN;
2383 break;
2384 case IPA_JF_CONST:
2385 ipa_set_jf_cst_copy (dst, src);
2386 break;
2388 case IPA_JF_PASS_THROUGH:
2390 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2391 enum tree_code operation;
2392 operation = ipa_get_jf_pass_through_operation (src);
2394 if (operation == NOP_EXPR)
2396 bool agg_p, type_p;
2397 agg_p = dst_agg_p
2398 && ipa_get_jf_pass_through_agg_preserved (src);
2399 type_p = ipa_get_jf_pass_through_type_preserved (src)
2400 && ipa_get_jf_pass_through_type_preserved (dst);
2401 ipa_set_jf_simple_pass_through (dst, formal_id,
2402 agg_p, type_p);
2404 else
2406 tree operand = ipa_get_jf_pass_through_operand (src);
2407 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2408 operation);
2410 break;
2412 case IPA_JF_ANCESTOR:
2414 bool agg_p, type_p;
2415 agg_p = dst_agg_p
2416 && ipa_get_jf_ancestor_agg_preserved (src);
2417 type_p = ipa_get_jf_ancestor_type_preserved (src)
2418 && ipa_get_jf_pass_through_type_preserved (dst);
2419 ipa_set_ancestor_jf (dst,
2420 ipa_get_jf_ancestor_offset (src),
2421 ipa_get_jf_ancestor_type (src),
2422 ipa_get_jf_ancestor_formal_id (src),
2423 agg_p, type_p);
2424 break;
2426 default:
2427 gcc_unreachable ();
2430 if (src->agg.items
2431 && (dst_agg_p || !src->agg.by_ref))
2433 /* Currently we do not produce clobber aggregate jump
2434 functions, replace with merging when we do. */
2435 gcc_assert (!dst->agg.items);
2437 dst->agg.by_ref = src->agg.by_ref;
2438 dst->agg.items = vec_safe_copy (src->agg.items);
2441 else
2442 dst->type = IPA_JF_UNKNOWN;
2447 /* If TARGET is an addr_expr of a function declaration, make it the destination
2448 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2450 struct cgraph_edge *
2451 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2453 struct cgraph_node *callee;
2454 struct inline_edge_summary *es = inline_edge_summary (ie);
2455 bool unreachable = false;
2457 if (TREE_CODE (target) == ADDR_EXPR)
2458 target = TREE_OPERAND (target, 0);
2459 if (TREE_CODE (target) != FUNCTION_DECL)
2461 target = canonicalize_constructor_val (target, NULL);
2462 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2464 if (ie->indirect_info->member_ptr)
2465 /* Member pointer call that goes through a VMT lookup. */
2466 return NULL;
2468 if (dump_file)
2469 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2470 " in %s/%i, making it unreachable.\n",
2471 ie->caller->name (), ie->caller->order);
2472 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2473 callee = cgraph_get_create_node (target);
2474 unreachable = true;
2476 else
2477 callee = cgraph_get_node (target);
2479 else
2480 callee = cgraph_get_node (target);
2482 /* Because may-edges are not explicitely represented and vtable may be external,
2483 we may create the first reference to the object in the unit. */
2484 if (!callee || callee->global.inlined_to)
2487 /* We are better to ensure we can refer to it.
2488 In the case of static functions we are out of luck, since we already
2489 removed its body. In the case of public functions we may or may
2490 not introduce the reference. */
2491 if (!canonicalize_constructor_val (target, NULL)
2492 || !TREE_PUBLIC (target))
2494 if (dump_file)
2495 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2496 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2497 xstrdup (ie->caller->name ()),
2498 ie->caller->order,
2499 xstrdup (ie->callee->name ()),
2500 ie->callee->order);
2501 return NULL;
2503 callee = cgraph_get_create_node (target);
2505 ipa_check_create_node_params ();
2507 /* We can not make edges to inline clones. It is bug that someone removed
2508 the cgraph node too early. */
2509 gcc_assert (!callee->global.inlined_to);
2511 if (dump_file && !unreachable)
2513 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2514 "(%s/%i -> %s/%i), for stmt ",
2515 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2516 xstrdup (ie->caller->name ()),
2517 ie->caller->order,
2518 xstrdup (callee->name ()),
2519 callee->order);
2520 if (ie->call_stmt)
2521 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2522 else
2523 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2525 ie = cgraph_make_edge_direct (ie, callee);
2526 es = inline_edge_summary (ie);
2527 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2528 - eni_size_weights.call_cost);
2529 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2530 - eni_time_weights.call_cost);
2532 return ie;
2535 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2536 return NULL if there is not any. BY_REF specifies whether the value has to
2537 be passed by reference or by value. */
2539 tree
2540 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2541 HOST_WIDE_INT offset, bool by_ref)
2543 struct ipa_agg_jf_item *item;
2544 int i;
2546 if (by_ref != agg->by_ref)
2547 return NULL;
2549 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2550 if (item->offset == offset)
2552 /* Currently we do not have clobber values, return NULL for them once
2553 we do. */
2554 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2555 return item->value;
2557 return NULL;
2560 /* Remove a reference to SYMBOL from the list of references of a node given by
2561 reference description RDESC. Return true if the reference has been
2562 successfully found and removed. */
2564 static bool
2565 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2567 struct ipa_ref *to_del;
2568 struct cgraph_edge *origin;
2570 origin = rdesc->cs;
2571 if (!origin)
2572 return false;
2573 to_del = ipa_find_reference (origin->caller, symbol,
2574 origin->call_stmt, origin->lto_stmt_uid);
2575 if (!to_del)
2576 return false;
2578 ipa_remove_reference (to_del);
2579 if (dump_file)
2580 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2581 xstrdup (origin->caller->name ()),
2582 origin->caller->order, xstrdup (symbol->name ()));
2583 return true;
2586 /* If JFUNC has a reference description with refcount different from
2587 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2588 NULL. JFUNC must be a constant jump function. */
2590 static struct ipa_cst_ref_desc *
2591 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2593 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2594 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2595 return rdesc;
2596 else
2597 return NULL;
2600 /* If the value of constant jump function JFUNC is an address of a function
2601 declaration, return the associated call graph node. Otherwise return
2602 NULL. */
2604 static cgraph_node *
2605 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2607 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2608 tree cst = ipa_get_jf_constant (jfunc);
2609 if (TREE_CODE (cst) != ADDR_EXPR
2610 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2611 return NULL;
2613 return cgraph_get_node (TREE_OPERAND (cst, 0));
2617 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2618 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2619 the edge specified in the rdesc. Return false if either the symbol or the
2620 reference could not be found, otherwise return true. */
2622 static bool
2623 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2625 struct ipa_cst_ref_desc *rdesc;
2626 if (jfunc->type == IPA_JF_CONST
2627 && (rdesc = jfunc_rdesc_usable (jfunc))
2628 && --rdesc->refcount == 0)
2630 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2631 if (!symbol)
2632 return false;
2634 return remove_described_reference (symbol, rdesc);
2636 return true;
2639 /* Try to find a destination for indirect edge IE that corresponds to a simple
2640 call or a call of a member function pointer and where the destination is a
2641 pointer formal parameter described by jump function JFUNC. If it can be
2642 determined, return the newly direct edge, otherwise return NULL.
2643 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2645 static struct cgraph_edge *
2646 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2647 struct ipa_jump_func *jfunc,
2648 struct ipa_node_params *new_root_info)
2650 struct cgraph_edge *cs;
2651 tree target;
2652 bool agg_contents = ie->indirect_info->agg_contents;
2654 if (ie->indirect_info->agg_contents)
2655 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2656 ie->indirect_info->offset,
2657 ie->indirect_info->by_ref);
2658 else
2659 target = ipa_value_from_jfunc (new_root_info, jfunc);
2660 if (!target)
2661 return NULL;
2662 cs = ipa_make_edge_direct_to_target (ie, target);
2664 if (cs && !agg_contents)
2666 bool ok;
2667 gcc_checking_assert (cs->callee
2668 && (cs != ie
2669 || jfunc->type != IPA_JF_CONST
2670 || !cgraph_node_for_jfunc (jfunc)
2671 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2672 ok = try_decrement_rdesc_refcount (jfunc);
2673 gcc_checking_assert (ok);
2676 return cs;
2679 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2680 call based on a formal parameter which is described by jump function JFUNC
2681 and if it can be determined, make it direct and return the direct edge.
2682 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2683 are relative to. */
2685 static struct cgraph_edge *
2686 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2687 struct ipa_jump_func *jfunc,
2688 struct ipa_node_params *new_root_info)
2690 tree binfo, target;
2692 if (!flag_devirtualize)
2693 return NULL;
2695 /* First try to do lookup via known virtual table pointer value. */
2696 if (!ie->indirect_info->by_ref)
2698 tree vtable;
2699 unsigned HOST_WIDE_INT offset;
2700 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2701 ie->indirect_info->offset,
2702 true);
2703 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2705 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2706 vtable, offset);
2707 if (target)
2709 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2710 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2711 || !possible_polymorphic_call_target_p
2712 (ie, cgraph_get_node (target)))
2714 if (dump_file)
2715 fprintf (dump_file,
2716 "Type inconsident devirtualization: %s/%i->%s\n",
2717 ie->caller->name (), ie->caller->order,
2718 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2719 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2720 cgraph_get_create_node (target);
2722 return ipa_make_edge_direct_to_target (ie, target);
2727 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2729 if (!binfo)
2730 return NULL;
2732 if (TREE_CODE (binfo) != TREE_BINFO)
2734 ipa_polymorphic_call_context context;
2735 vec <cgraph_node *>targets;
2736 bool final;
2738 if (!get_polymorphic_call_info_from_invariant
2739 (&context, binfo, ie->indirect_info->otr_type,
2740 ie->indirect_info->offset))
2741 return NULL;
2742 targets = possible_polymorphic_call_targets
2743 (ie->indirect_info->otr_type,
2744 ie->indirect_info->otr_token,
2745 context, &final);
2746 if (!final || targets.length () > 1)
2747 return NULL;
2748 if (targets.length () == 1)
2749 target = targets[0]->decl;
2750 else
2752 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2753 cgraph_get_create_node (target);
2756 else
2758 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2759 ie->indirect_info->otr_type);
2760 if (binfo)
2761 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2762 binfo);
2763 else
2764 return NULL;
2767 if (target)
2769 #ifdef ENABLE_CHECKING
2770 gcc_assert (possible_polymorphic_call_target_p
2771 (ie, cgraph_get_node (target)));
2772 #endif
2773 return ipa_make_edge_direct_to_target (ie, target);
2775 else
2776 return NULL;
2779 /* Update the param called notes associated with NODE when CS is being inlined,
2780 assuming NODE is (potentially indirectly) inlined into CS->callee.
2781 Moreover, if the callee is discovered to be constant, create a new cgraph
2782 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2783 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2785 static bool
2786 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2787 struct cgraph_node *node,
2788 vec<cgraph_edge_p> *new_edges)
2790 struct ipa_edge_args *top;
2791 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2792 struct ipa_node_params *new_root_info;
2793 bool res = false;
2795 ipa_check_create_edge_args ();
2796 top = IPA_EDGE_REF (cs);
2797 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2798 ? cs->caller->global.inlined_to
2799 : cs->caller);
2801 for (ie = node->indirect_calls; ie; ie = next_ie)
2803 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2804 struct ipa_jump_func *jfunc;
2805 int param_index;
2807 next_ie = ie->next_callee;
2809 if (ici->param_index == -1)
2810 continue;
2812 /* We must check range due to calls with variable number of arguments: */
2813 if (ici->param_index >= ipa_get_cs_argument_count (top))
2815 ici->param_index = -1;
2816 continue;
2819 param_index = ici->param_index;
2820 jfunc = ipa_get_ith_jump_func (top, param_index);
2822 if (!flag_indirect_inlining)
2823 new_direct_edge = NULL;
2824 else if (ici->polymorphic)
2825 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2826 new_root_info);
2827 else
2828 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2829 new_root_info);
2830 /* If speculation was removed, then we need to do nothing. */
2831 if (new_direct_edge && new_direct_edge != ie)
2833 new_direct_edge->indirect_inlining_edge = 1;
2834 top = IPA_EDGE_REF (cs);
2835 res = true;
2837 else if (new_direct_edge)
2839 new_direct_edge->indirect_inlining_edge = 1;
2840 if (new_direct_edge->call_stmt)
2841 new_direct_edge->call_stmt_cannot_inline_p
2842 = !gimple_check_call_matching_types (
2843 new_direct_edge->call_stmt,
2844 new_direct_edge->callee->decl, false);
2845 if (new_edges)
2847 new_edges->safe_push (new_direct_edge);
2848 res = true;
2850 top = IPA_EDGE_REF (cs);
2852 else if (jfunc->type == IPA_JF_PASS_THROUGH
2853 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2855 if (ici->agg_contents
2856 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2857 ici->param_index = -1;
2858 else
2859 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2861 else if (jfunc->type == IPA_JF_ANCESTOR)
2863 if (ici->agg_contents
2864 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2865 ici->param_index = -1;
2866 else
2868 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2869 if (ipa_get_jf_ancestor_offset (jfunc))
2870 ici->outer_type = NULL;
2871 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2874 else
2875 /* Either we can find a destination for this edge now or never. */
2876 ici->param_index = -1;
2879 return res;
2882 /* Recursively traverse subtree of NODE (including node) made of inlined
2883 cgraph_edges when CS has been inlined and invoke
2884 update_indirect_edges_after_inlining on all nodes and
2885 update_jump_functions_after_inlining on all non-inlined edges that lead out
2886 of this subtree. Newly discovered indirect edges will be added to
2887 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2888 created. */
2890 static bool
2891 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2892 struct cgraph_node *node,
2893 vec<cgraph_edge_p> *new_edges)
2895 struct cgraph_edge *e;
2896 bool res;
2898 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2900 for (e = node->callees; e; e = e->next_callee)
2901 if (!e->inline_failed)
2902 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2903 else
2904 update_jump_functions_after_inlining (cs, e);
2905 for (e = node->indirect_calls; e; e = e->next_callee)
2906 update_jump_functions_after_inlining (cs, e);
2908 return res;
2911 /* Combine two controlled uses counts as done during inlining. */
2913 static int
2914 combine_controlled_uses_counters (int c, int d)
2916 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2917 return IPA_UNDESCRIBED_USE;
2918 else
2919 return c + d - 1;
2922 /* Propagate number of controlled users from CS->caleee to the new root of the
2923 tree of inlined nodes. */
2925 static void
2926 propagate_controlled_uses (struct cgraph_edge *cs)
2928 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2929 struct cgraph_node *new_root = cs->caller->global.inlined_to
2930 ? cs->caller->global.inlined_to : cs->caller;
2931 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2932 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2933 int count, i;
2935 count = MIN (ipa_get_cs_argument_count (args),
2936 ipa_get_param_count (old_root_info));
2937 for (i = 0; i < count; i++)
2939 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2940 struct ipa_cst_ref_desc *rdesc;
2942 if (jf->type == IPA_JF_PASS_THROUGH)
2944 int src_idx, c, d;
2945 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2946 c = ipa_get_controlled_uses (new_root_info, src_idx);
2947 d = ipa_get_controlled_uses (old_root_info, i);
2949 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2950 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2951 c = combine_controlled_uses_counters (c, d);
2952 ipa_set_controlled_uses (new_root_info, src_idx, c);
2953 if (c == 0 && new_root_info->ipcp_orig_node)
2955 struct cgraph_node *n;
2956 struct ipa_ref *ref;
2957 tree t = new_root_info->known_vals[src_idx];
2959 if (t && TREE_CODE (t) == ADDR_EXPR
2960 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2961 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2962 && (ref = ipa_find_reference (new_root,
2963 n, NULL, 0)))
2965 if (dump_file)
2966 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2967 "reference from %s/%i to %s/%i.\n",
2968 xstrdup (new_root->name ()),
2969 new_root->order,
2970 xstrdup (n->name ()), n->order);
2971 ipa_remove_reference (ref);
2975 else if (jf->type == IPA_JF_CONST
2976 && (rdesc = jfunc_rdesc_usable (jf)))
2978 int d = ipa_get_controlled_uses (old_root_info, i);
2979 int c = rdesc->refcount;
2980 rdesc->refcount = combine_controlled_uses_counters (c, d);
2981 if (rdesc->refcount == 0)
2983 tree cst = ipa_get_jf_constant (jf);
2984 struct cgraph_node *n;
2985 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2986 && TREE_CODE (TREE_OPERAND (cst, 0))
2987 == FUNCTION_DECL);
2988 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2989 if (n)
2991 struct cgraph_node *clone;
2992 bool ok;
2993 ok = remove_described_reference (n, rdesc);
2994 gcc_checking_assert (ok);
2996 clone = cs->caller;
2997 while (clone->global.inlined_to
2998 && clone != rdesc->cs->caller
2999 && IPA_NODE_REF (clone)->ipcp_orig_node)
3001 struct ipa_ref *ref;
3002 ref = ipa_find_reference (clone,
3003 n, NULL, 0);
3004 if (ref)
3006 if (dump_file)
3007 fprintf (dump_file, "ipa-prop: Removing "
3008 "cloning-created reference "
3009 "from %s/%i to %s/%i.\n",
3010 xstrdup (clone->name ()),
3011 clone->order,
3012 xstrdup (n->name ()),
3013 n->order);
3014 ipa_remove_reference (ref);
3016 clone = clone->callers->caller;
3023 for (i = ipa_get_param_count (old_root_info);
3024 i < ipa_get_cs_argument_count (args);
3025 i++)
3027 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3029 if (jf->type == IPA_JF_CONST)
3031 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3032 if (rdesc)
3033 rdesc->refcount = IPA_UNDESCRIBED_USE;
3035 else if (jf->type == IPA_JF_PASS_THROUGH)
3036 ipa_set_controlled_uses (new_root_info,
3037 jf->value.pass_through.formal_id,
3038 IPA_UNDESCRIBED_USE);
3042 /* Update jump functions and call note functions on inlining the call site CS.
3043 CS is expected to lead to a node already cloned by
3044 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3045 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3046 created. */
3048 bool
3049 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3050 vec<cgraph_edge_p> *new_edges)
3052 bool changed;
3053 /* Do nothing if the preparation phase has not been carried out yet
3054 (i.e. during early inlining). */
3055 if (!ipa_node_params_vector.exists ())
3056 return false;
3057 gcc_assert (ipa_edge_args_vector);
3059 propagate_controlled_uses (cs);
3060 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3062 return changed;
3065 /* Frees all dynamically allocated structures that the argument info points
3066 to. */
3068 void
3069 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3071 vec_free (args->jump_functions);
3072 memset (args, 0, sizeof (*args));
3075 /* Free all ipa_edge structures. */
3077 void
3078 ipa_free_all_edge_args (void)
3080 int i;
3081 struct ipa_edge_args *args;
3083 if (!ipa_edge_args_vector)
3084 return;
3086 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3087 ipa_free_edge_args_substructures (args);
3089 vec_free (ipa_edge_args_vector);
3092 /* Frees all dynamically allocated structures that the param info points
3093 to. */
3095 void
3096 ipa_free_node_params_substructures (struct ipa_node_params *info)
3098 info->descriptors.release ();
3099 free (info->lattices);
3100 /* Lattice values and their sources are deallocated with their alocation
3101 pool. */
3102 info->known_vals.release ();
3103 memset (info, 0, sizeof (*info));
3106 /* Free all ipa_node_params structures. */
3108 void
3109 ipa_free_all_node_params (void)
3111 int i;
3112 struct ipa_node_params *info;
3114 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3115 ipa_free_node_params_substructures (info);
3117 ipa_node_params_vector.release ();
3120 /* Set the aggregate replacements of NODE to be AGGVALS. */
3122 void
3123 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3124 struct ipa_agg_replacement_value *aggvals)
3126 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3127 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3129 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3132 /* Hook that is called by cgraph.c when an edge is removed. */
3134 static void
3135 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3137 struct ipa_edge_args *args;
3139 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3140 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3141 return;
3143 args = IPA_EDGE_REF (cs);
3144 if (args->jump_functions)
3146 struct ipa_jump_func *jf;
3147 int i;
3148 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3150 struct ipa_cst_ref_desc *rdesc;
3151 try_decrement_rdesc_refcount (jf);
3152 if (jf->type == IPA_JF_CONST
3153 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3154 && rdesc->cs == cs)
3155 rdesc->cs = NULL;
3159 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3162 /* Hook that is called by cgraph.c when a node is removed. */
3164 static void
3165 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3167 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3168 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3169 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3170 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3171 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3174 /* Hook that is called by cgraph.c when an edge is duplicated. */
3176 static void
3177 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3178 __attribute__((unused)) void *data)
3180 struct ipa_edge_args *old_args, *new_args;
3181 unsigned int i;
3183 ipa_check_create_edge_args ();
3185 old_args = IPA_EDGE_REF (src);
3186 new_args = IPA_EDGE_REF (dst);
3188 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3190 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3192 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3193 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3195 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3197 if (src_jf->type == IPA_JF_CONST)
3199 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3201 if (!src_rdesc)
3202 dst_jf->value.constant.rdesc = NULL;
3203 else if (src->caller == dst->caller)
3205 struct ipa_ref *ref;
3206 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3207 gcc_checking_assert (n);
3208 ref = ipa_find_reference (src->caller, n,
3209 src->call_stmt, src->lto_stmt_uid);
3210 gcc_checking_assert (ref);
3211 ipa_clone_ref (ref, dst->caller, ref->stmt);
3213 gcc_checking_assert (ipa_refdesc_pool);
3214 struct ipa_cst_ref_desc *dst_rdesc
3215 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3216 dst_rdesc->cs = dst;
3217 dst_rdesc->refcount = src_rdesc->refcount;
3218 dst_rdesc->next_duplicate = NULL;
3219 dst_jf->value.constant.rdesc = dst_rdesc;
3221 else if (src_rdesc->cs == src)
3223 struct ipa_cst_ref_desc *dst_rdesc;
3224 gcc_checking_assert (ipa_refdesc_pool);
3225 dst_rdesc
3226 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3227 dst_rdesc->cs = dst;
3228 dst_rdesc->refcount = src_rdesc->refcount;
3229 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3230 src_rdesc->next_duplicate = dst_rdesc;
3231 dst_jf->value.constant.rdesc = dst_rdesc;
3233 else
3235 struct ipa_cst_ref_desc *dst_rdesc;
3236 /* This can happen during inlining, when a JFUNC can refer to a
3237 reference taken in a function up in the tree of inline clones.
3238 We need to find the duplicate that refers to our tree of
3239 inline clones. */
3241 gcc_assert (dst->caller->global.inlined_to);
3242 for (dst_rdesc = src_rdesc->next_duplicate;
3243 dst_rdesc;
3244 dst_rdesc = dst_rdesc->next_duplicate)
3246 struct cgraph_node *top;
3247 top = dst_rdesc->cs->caller->global.inlined_to
3248 ? dst_rdesc->cs->caller->global.inlined_to
3249 : dst_rdesc->cs->caller;
3250 if (dst->caller->global.inlined_to == top)
3251 break;
3253 gcc_assert (dst_rdesc);
3254 dst_jf->value.constant.rdesc = dst_rdesc;
3260 /* Hook that is called by cgraph.c when a node is duplicated. */
3262 static void
3263 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3264 ATTRIBUTE_UNUSED void *data)
3266 struct ipa_node_params *old_info, *new_info;
3267 struct ipa_agg_replacement_value *old_av, *new_av;
3269 ipa_check_create_node_params ();
3270 old_info = IPA_NODE_REF (src);
3271 new_info = IPA_NODE_REF (dst);
3273 new_info->descriptors = old_info->descriptors.copy ();
3274 new_info->lattices = NULL;
3275 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3277 new_info->uses_analysis_done = old_info->uses_analysis_done;
3278 new_info->node_enqueued = old_info->node_enqueued;
3280 old_av = ipa_get_agg_replacements_for_node (src);
3281 if (!old_av)
3282 return;
3284 new_av = NULL;
3285 while (old_av)
3287 struct ipa_agg_replacement_value *v;
3289 v = ggc_alloc_ipa_agg_replacement_value ();
3290 memcpy (v, old_av, sizeof (*v));
3291 v->next = new_av;
3292 new_av = v;
3293 old_av = old_av->next;
3295 ipa_set_node_agg_value_chain (dst, new_av);
3299 /* Analyze newly added function into callgraph. */
3301 static void
3302 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3304 if (cgraph_function_with_gimple_body_p (node))
3305 ipa_analyze_node (node);
3308 /* Register our cgraph hooks if they are not already there. */
3310 void
3311 ipa_register_cgraph_hooks (void)
3313 if (!edge_removal_hook_holder)
3314 edge_removal_hook_holder =
3315 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3316 if (!node_removal_hook_holder)
3317 node_removal_hook_holder =
3318 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3319 if (!edge_duplication_hook_holder)
3320 edge_duplication_hook_holder =
3321 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3322 if (!node_duplication_hook_holder)
3323 node_duplication_hook_holder =
3324 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3325 function_insertion_hook_holder =
3326 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3329 /* Unregister our cgraph hooks if they are not already there. */
3331 static void
3332 ipa_unregister_cgraph_hooks (void)
3334 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3335 edge_removal_hook_holder = NULL;
3336 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3337 node_removal_hook_holder = NULL;
3338 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3339 edge_duplication_hook_holder = NULL;
3340 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3341 node_duplication_hook_holder = NULL;
3342 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3343 function_insertion_hook_holder = NULL;
3346 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3347 longer needed after ipa-cp. */
3349 void
3350 ipa_free_all_structures_after_ipa_cp (void)
3352 if (!optimize)
3354 ipa_free_all_edge_args ();
3355 ipa_free_all_node_params ();
3356 free_alloc_pool (ipcp_sources_pool);
3357 free_alloc_pool (ipcp_values_pool);
3358 free_alloc_pool (ipcp_agg_lattice_pool);
3359 ipa_unregister_cgraph_hooks ();
3360 if (ipa_refdesc_pool)
3361 free_alloc_pool (ipa_refdesc_pool);
3365 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3366 longer needed after indirect inlining. */
3368 void
3369 ipa_free_all_structures_after_iinln (void)
3371 ipa_free_all_edge_args ();
3372 ipa_free_all_node_params ();
3373 ipa_unregister_cgraph_hooks ();
3374 if (ipcp_sources_pool)
3375 free_alloc_pool (ipcp_sources_pool);
3376 if (ipcp_values_pool)
3377 free_alloc_pool (ipcp_values_pool);
3378 if (ipcp_agg_lattice_pool)
3379 free_alloc_pool (ipcp_agg_lattice_pool);
3380 if (ipa_refdesc_pool)
3381 free_alloc_pool (ipa_refdesc_pool);
3384 /* Print ipa_tree_map data structures of all functions in the
3385 callgraph to F. */
3387 void
3388 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3390 int i, count;
3391 struct ipa_node_params *info;
3393 if (!node->definition)
3394 return;
3395 info = IPA_NODE_REF (node);
3396 fprintf (f, " function %s/%i parameter descriptors:\n",
3397 node->name (), node->order);
3398 count = ipa_get_param_count (info);
3399 for (i = 0; i < count; i++)
3401 int c;
3403 fprintf (f, " ");
3404 ipa_dump_param (f, info, i);
3405 if (ipa_is_param_used (info, i))
3406 fprintf (f, " used");
3407 c = ipa_get_controlled_uses (info, i);
3408 if (c == IPA_UNDESCRIBED_USE)
3409 fprintf (f, " undescribed_use");
3410 else
3411 fprintf (f, " controlled_uses=%i", c);
3412 fprintf (f, "\n");
3416 /* Print ipa_tree_map data structures of all functions in the
3417 callgraph to F. */
3419 void
3420 ipa_print_all_params (FILE * f)
3422 struct cgraph_node *node;
3424 fprintf (f, "\nFunction parameters:\n");
3425 FOR_EACH_FUNCTION (node)
3426 ipa_print_node_params (f, node);
3429 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3431 vec<tree>
3432 ipa_get_vector_of_formal_parms (tree fndecl)
3434 vec<tree> args;
3435 int count;
3436 tree parm;
3438 gcc_assert (!flag_wpa);
3439 count = count_formal_params (fndecl);
3440 args.create (count);
3441 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3442 args.quick_push (parm);
3444 return args;
3447 /* Return a heap allocated vector containing types of formal parameters of
3448 function type FNTYPE. */
3450 vec<tree>
3451 ipa_get_vector_of_formal_parm_types (tree fntype)
3453 vec<tree> types;
3454 int count = 0;
3455 tree t;
3457 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3458 count++;
3460 types.create (count);
3461 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3462 types.quick_push (TREE_VALUE (t));
3464 return types;
3467 /* Modify the function declaration FNDECL and its type according to the plan in
3468 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3469 to reflect the actual parameters being modified which are determined by the
3470 base_index field. */
3472 void
3473 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3475 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3476 tree orig_type = TREE_TYPE (fndecl);
3477 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3479 /* The following test is an ugly hack, some functions simply don't have any
3480 arguments in their type. This is probably a bug but well... */
3481 bool care_for_types = (old_arg_types != NULL_TREE);
3482 bool last_parm_void;
3483 vec<tree> otypes;
3484 if (care_for_types)
3486 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3487 == void_type_node);
3488 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3489 if (last_parm_void)
3490 gcc_assert (oparms.length () + 1 == otypes.length ());
3491 else
3492 gcc_assert (oparms.length () == otypes.length ());
3494 else
3496 last_parm_void = false;
3497 otypes.create (0);
3500 int len = adjustments.length ();
3501 tree *link = &DECL_ARGUMENTS (fndecl);
3502 tree new_arg_types = NULL;
3503 for (int i = 0; i < len; i++)
3505 struct ipa_parm_adjustment *adj;
3506 gcc_assert (link);
3508 adj = &adjustments[i];
3509 tree parm;
3510 if (adj->op == IPA_PARM_OP_NEW)
3511 parm = NULL;
3512 else
3513 parm = oparms[adj->base_index];
3514 adj->base = parm;
3516 if (adj->op == IPA_PARM_OP_COPY)
3518 if (care_for_types)
3519 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3520 new_arg_types);
3521 *link = parm;
3522 link = &DECL_CHAIN (parm);
3524 else if (adj->op != IPA_PARM_OP_REMOVE)
3526 tree new_parm;
3527 tree ptype;
3529 if (adj->by_ref)
3530 ptype = build_pointer_type (adj->type);
3531 else
3533 ptype = adj->type;
3534 if (is_gimple_reg_type (ptype))
3536 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3537 if (TYPE_ALIGN (ptype) < malign)
3538 ptype = build_aligned_type (ptype, malign);
3542 if (care_for_types)
3543 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3545 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3546 ptype);
3547 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3548 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3549 DECL_ARTIFICIAL (new_parm) = 1;
3550 DECL_ARG_TYPE (new_parm) = ptype;
3551 DECL_CONTEXT (new_parm) = fndecl;
3552 TREE_USED (new_parm) = 1;
3553 DECL_IGNORED_P (new_parm) = 1;
3554 layout_decl (new_parm, 0);
3556 if (adj->op == IPA_PARM_OP_NEW)
3557 adj->base = NULL;
3558 else
3559 adj->base = parm;
3560 adj->new_decl = new_parm;
3562 *link = new_parm;
3563 link = &DECL_CHAIN (new_parm);
3567 *link = NULL_TREE;
3569 tree new_reversed = NULL;
3570 if (care_for_types)
3572 new_reversed = nreverse (new_arg_types);
3573 if (last_parm_void)
3575 if (new_reversed)
3576 TREE_CHAIN (new_arg_types) = void_list_node;
3577 else
3578 new_reversed = void_list_node;
3582 /* Use copy_node to preserve as much as possible from original type
3583 (debug info, attribute lists etc.)
3584 Exception is METHOD_TYPEs must have THIS argument.
3585 When we are asked to remove it, we need to build new FUNCTION_TYPE
3586 instead. */
3587 tree new_type = NULL;
3588 if (TREE_CODE (orig_type) != METHOD_TYPE
3589 || (adjustments[0].op == IPA_PARM_OP_COPY
3590 && adjustments[0].base_index == 0))
3592 new_type = build_distinct_type_copy (orig_type);
3593 TYPE_ARG_TYPES (new_type) = new_reversed;
3595 else
3597 new_type
3598 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3599 new_reversed));
3600 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3601 DECL_VINDEX (fndecl) = NULL_TREE;
3604 /* When signature changes, we need to clear builtin info. */
3605 if (DECL_BUILT_IN (fndecl))
3607 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3608 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3611 /* This is a new type, not a copy of an old type. Need to reassociate
3612 variants. We can handle everything except the main variant lazily. */
3613 tree t = TYPE_MAIN_VARIANT (orig_type);
3614 if (orig_type != t)
3616 TYPE_MAIN_VARIANT (new_type) = t;
3617 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3618 TYPE_NEXT_VARIANT (t) = new_type;
3620 else
3622 TYPE_MAIN_VARIANT (new_type) = new_type;
3623 TYPE_NEXT_VARIANT (new_type) = NULL;
3626 TREE_TYPE (fndecl) = new_type;
3627 DECL_VIRTUAL_P (fndecl) = 0;
3628 otypes.release ();
3629 oparms.release ();
3632 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3633 If this is a directly recursive call, CS must be NULL. Otherwise it must
3634 contain the corresponding call graph edge. */
3636 void
3637 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3638 ipa_parm_adjustment_vec adjustments)
3640 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3641 vec<tree> vargs;
3642 vec<tree, va_gc> **debug_args = NULL;
3643 gimple new_stmt;
3644 gimple_stmt_iterator gsi, prev_gsi;
3645 tree callee_decl;
3646 int i, len;
3648 len = adjustments.length ();
3649 vargs.create (len);
3650 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3651 ipa_remove_stmt_references (current_node, stmt);
3653 gsi = gsi_for_stmt (stmt);
3654 prev_gsi = gsi;
3655 gsi_prev (&prev_gsi);
3656 for (i = 0; i < len; i++)
3658 struct ipa_parm_adjustment *adj;
3660 adj = &adjustments[i];
3662 if (adj->op == IPA_PARM_OP_COPY)
3664 tree arg = gimple_call_arg (stmt, adj->base_index);
3666 vargs.quick_push (arg);
3668 else if (adj->op != IPA_PARM_OP_REMOVE)
3670 tree expr, base, off;
3671 location_t loc;
3672 unsigned int deref_align = 0;
3673 bool deref_base = false;
3675 /* We create a new parameter out of the value of the old one, we can
3676 do the following kind of transformations:
3678 - A scalar passed by reference is converted to a scalar passed by
3679 value. (adj->by_ref is false and the type of the original
3680 actual argument is a pointer to a scalar).
3682 - A part of an aggregate is passed instead of the whole aggregate.
3683 The part can be passed either by value or by reference, this is
3684 determined by value of adj->by_ref. Moreover, the code below
3685 handles both situations when the original aggregate is passed by
3686 value (its type is not a pointer) and when it is passed by
3687 reference (it is a pointer to an aggregate).
3689 When the new argument is passed by reference (adj->by_ref is true)
3690 it must be a part of an aggregate and therefore we form it by
3691 simply taking the address of a reference inside the original
3692 aggregate. */
3694 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3695 base = gimple_call_arg (stmt, adj->base_index);
3696 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3697 : EXPR_LOCATION (base);
3699 if (TREE_CODE (base) != ADDR_EXPR
3700 && POINTER_TYPE_P (TREE_TYPE (base)))
3701 off = build_int_cst (adj->alias_ptr_type,
3702 adj->offset / BITS_PER_UNIT);
3703 else
3705 HOST_WIDE_INT base_offset;
3706 tree prev_base;
3707 bool addrof;
3709 if (TREE_CODE (base) == ADDR_EXPR)
3711 base = TREE_OPERAND (base, 0);
3712 addrof = true;
3714 else
3715 addrof = false;
3716 prev_base = base;
3717 base = get_addr_base_and_unit_offset (base, &base_offset);
3718 /* Aggregate arguments can have non-invariant addresses. */
3719 if (!base)
3721 base = build_fold_addr_expr (prev_base);
3722 off = build_int_cst (adj->alias_ptr_type,
3723 adj->offset / BITS_PER_UNIT);
3725 else if (TREE_CODE (base) == MEM_REF)
3727 if (!addrof)
3729 deref_base = true;
3730 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3732 off = build_int_cst (adj->alias_ptr_type,
3733 base_offset
3734 + adj->offset / BITS_PER_UNIT);
3735 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3736 off);
3737 base = TREE_OPERAND (base, 0);
3739 else
3741 off = build_int_cst (adj->alias_ptr_type,
3742 base_offset
3743 + adj->offset / BITS_PER_UNIT);
3744 base = build_fold_addr_expr (base);
3748 if (!adj->by_ref)
3750 tree type = adj->type;
3751 unsigned int align;
3752 unsigned HOST_WIDE_INT misalign;
3754 if (deref_base)
3756 align = deref_align;
3757 misalign = 0;
3759 else
3761 get_pointer_alignment_1 (base, &align, &misalign);
3762 if (TYPE_ALIGN (type) > align)
3763 align = TYPE_ALIGN (type);
3765 misalign += (tree_to_double_int (off)
3766 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3767 * BITS_PER_UNIT);
3768 misalign = misalign & (align - 1);
3769 if (misalign != 0)
3770 align = (misalign & -misalign);
3771 if (align < TYPE_ALIGN (type))
3772 type = build_aligned_type (type, align);
3773 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3775 else
3777 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3778 expr = build_fold_addr_expr (expr);
3781 expr = force_gimple_operand_gsi (&gsi, expr,
3782 adj->by_ref
3783 || is_gimple_reg_type (adj->type),
3784 NULL, true, GSI_SAME_STMT);
3785 vargs.quick_push (expr);
3787 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
3789 unsigned int ix;
3790 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3791 gimple def_temp;
3793 arg = gimple_call_arg (stmt, adj->base_index);
3794 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3796 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3797 continue;
3798 arg = fold_convert_loc (gimple_location (stmt),
3799 TREE_TYPE (origin), arg);
3801 if (debug_args == NULL)
3802 debug_args = decl_debug_args_insert (callee_decl);
3803 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3804 if (ddecl == origin)
3806 ddecl = (**debug_args)[ix + 1];
3807 break;
3809 if (ddecl == NULL)
3811 ddecl = make_node (DEBUG_EXPR_DECL);
3812 DECL_ARTIFICIAL (ddecl) = 1;
3813 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3814 DECL_MODE (ddecl) = DECL_MODE (origin);
3816 vec_safe_push (*debug_args, origin);
3817 vec_safe_push (*debug_args, ddecl);
3819 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3820 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3824 if (dump_file && (dump_flags & TDF_DETAILS))
3826 fprintf (dump_file, "replacing stmt:");
3827 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3830 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3831 vargs.release ();
3832 if (gimple_call_lhs (stmt))
3833 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3835 gimple_set_block (new_stmt, gimple_block (stmt));
3836 if (gimple_has_location (stmt))
3837 gimple_set_location (new_stmt, gimple_location (stmt));
3838 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3839 gimple_call_copy_flags (new_stmt, stmt);
3841 if (dump_file && (dump_flags & TDF_DETAILS))
3843 fprintf (dump_file, "with stmt:");
3844 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3845 fprintf (dump_file, "\n");
3847 gsi_replace (&gsi, new_stmt, true);
3848 if (cs)
3849 cgraph_set_call_stmt (cs, new_stmt);
3852 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3853 gsi_prev (&gsi);
3855 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3856 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3858 update_ssa (TODO_update_ssa);
3859 free_dominance_info (CDI_DOMINATORS);
3862 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3863 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3864 specifies whether the function should care about type incompatibility the
3865 current and new expressions. If it is false, the function will leave
3866 incompatibility issues to the caller. Return true iff the expression
3867 was modified. */
3869 bool
3870 ipa_modify_expr (tree *expr, bool convert,
3871 ipa_parm_adjustment_vec adjustments)
3873 struct ipa_parm_adjustment *cand
3874 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
3875 if (!cand)
3876 return false;
3878 tree src;
3879 if (cand->by_ref)
3880 src = build_simple_mem_ref (cand->new_decl);
3881 else
3882 src = cand->new_decl;
3884 if (dump_file && (dump_flags & TDF_DETAILS))
3886 fprintf (dump_file, "About to replace expr ");
3887 print_generic_expr (dump_file, *expr, 0);
3888 fprintf (dump_file, " with ");
3889 print_generic_expr (dump_file, src, 0);
3890 fprintf (dump_file, "\n");
3893 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3895 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3896 *expr = vce;
3898 else
3899 *expr = src;
3900 return true;
3903 /* If T is an SSA_NAME, return NULL if it is not a default def or
3904 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3905 the base variable is always returned, regardless if it is a default
3906 def. Return T if it is not an SSA_NAME. */
3908 static tree
3909 get_ssa_base_param (tree t, bool ignore_default_def)
3911 if (TREE_CODE (t) == SSA_NAME)
3913 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
3914 return SSA_NAME_VAR (t);
3915 else
3916 return NULL_TREE;
3918 return t;
3921 /* Given an expression, return an adjustment entry specifying the
3922 transformation to be done on EXPR. If no suitable adjustment entry
3923 was found, returns NULL.
3925 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3926 default def, otherwise bail on them.
3928 If CONVERT is non-NULL, this function will set *CONVERT if the
3929 expression provided is a component reference. ADJUSTMENTS is the
3930 adjustments vector. */
3932 ipa_parm_adjustment *
3933 ipa_get_adjustment_candidate (tree **expr, bool *convert,
3934 ipa_parm_adjustment_vec adjustments,
3935 bool ignore_default_def)
3937 if (TREE_CODE (**expr) == BIT_FIELD_REF
3938 || TREE_CODE (**expr) == IMAGPART_EXPR
3939 || TREE_CODE (**expr) == REALPART_EXPR)
3941 *expr = &TREE_OPERAND (**expr, 0);
3942 if (convert)
3943 *convert = true;
3946 HOST_WIDE_INT offset, size, max_size;
3947 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
3948 if (!base || size == -1 || max_size == -1)
3949 return NULL;
3951 if (TREE_CODE (base) == MEM_REF)
3953 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
3954 base = TREE_OPERAND (base, 0);
3957 base = get_ssa_base_param (base, ignore_default_def);
3958 if (!base || TREE_CODE (base) != PARM_DECL)
3959 return NULL;
3961 struct ipa_parm_adjustment *cand = NULL;
3962 unsigned int len = adjustments.length ();
3963 for (unsigned i = 0; i < len; i++)
3965 struct ipa_parm_adjustment *adj = &adjustments[i];
3967 if (adj->base == base
3968 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
3970 cand = adj;
3971 break;
3975 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
3976 return NULL;
3977 return cand;
3980 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3982 static bool
3983 index_in_adjustments_multiple_times_p (int base_index,
3984 ipa_parm_adjustment_vec adjustments)
3986 int i, len = adjustments.length ();
3987 bool one = false;
3989 for (i = 0; i < len; i++)
3991 struct ipa_parm_adjustment *adj;
3992 adj = &adjustments[i];
3994 if (adj->base_index == base_index)
3996 if (one)
3997 return true;
3998 else
3999 one = true;
4002 return false;
4006 /* Return adjustments that should have the same effect on function parameters
4007 and call arguments as if they were first changed according to adjustments in
4008 INNER and then by adjustments in OUTER. */
4010 ipa_parm_adjustment_vec
4011 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4012 ipa_parm_adjustment_vec outer)
4014 int i, outlen = outer.length ();
4015 int inlen = inner.length ();
4016 int removals = 0;
4017 ipa_parm_adjustment_vec adjustments, tmp;
4019 tmp.create (inlen);
4020 for (i = 0; i < inlen; i++)
4022 struct ipa_parm_adjustment *n;
4023 n = &inner[i];
4025 if (n->op == IPA_PARM_OP_REMOVE)
4026 removals++;
4027 else
4029 /* FIXME: Handling of new arguments are not implemented yet. */
4030 gcc_assert (n->op != IPA_PARM_OP_NEW);
4031 tmp.quick_push (*n);
4035 adjustments.create (outlen + removals);
4036 for (i = 0; i < outlen; i++)
4038 struct ipa_parm_adjustment r;
4039 struct ipa_parm_adjustment *out = &outer[i];
4040 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4042 memset (&r, 0, sizeof (r));
4043 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4044 if (out->op == IPA_PARM_OP_REMOVE)
4046 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4048 r.op = IPA_PARM_OP_REMOVE;
4049 adjustments.quick_push (r);
4051 continue;
4053 else
4055 /* FIXME: Handling of new arguments are not implemented yet. */
4056 gcc_assert (out->op != IPA_PARM_OP_NEW);
4059 r.base_index = in->base_index;
4060 r.type = out->type;
4062 /* FIXME: Create nonlocal value too. */
4064 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4065 r.op = IPA_PARM_OP_COPY;
4066 else if (in->op == IPA_PARM_OP_COPY)
4067 r.offset = out->offset;
4068 else if (out->op == IPA_PARM_OP_COPY)
4069 r.offset = in->offset;
4070 else
4071 r.offset = in->offset + out->offset;
4072 adjustments.quick_push (r);
4075 for (i = 0; i < inlen; i++)
4077 struct ipa_parm_adjustment *n = &inner[i];
4079 if (n->op == IPA_PARM_OP_REMOVE)
4080 adjustments.quick_push (*n);
4083 tmp.release ();
4084 return adjustments;
4087 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4088 friendly way, assuming they are meant to be applied to FNDECL. */
4090 void
4091 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4092 tree fndecl)
4094 int i, len = adjustments.length ();
4095 bool first = true;
4096 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4098 fprintf (file, "IPA param adjustments: ");
4099 for (i = 0; i < len; i++)
4101 struct ipa_parm_adjustment *adj;
4102 adj = &adjustments[i];
4104 if (!first)
4105 fprintf (file, " ");
4106 else
4107 first = false;
4109 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4110 print_generic_expr (file, parms[adj->base_index], 0);
4111 if (adj->base)
4113 fprintf (file, ", base: ");
4114 print_generic_expr (file, adj->base, 0);
4116 if (adj->new_decl)
4118 fprintf (file, ", new_decl: ");
4119 print_generic_expr (file, adj->new_decl, 0);
4121 if (adj->new_ssa_base)
4123 fprintf (file, ", new_ssa_base: ");
4124 print_generic_expr (file, adj->new_ssa_base, 0);
4127 if (adj->op == IPA_PARM_OP_COPY)
4128 fprintf (file, ", copy_param");
4129 else if (adj->op == IPA_PARM_OP_REMOVE)
4130 fprintf (file, ", remove_param");
4131 else
4132 fprintf (file, ", offset %li", (long) adj->offset);
4133 if (adj->by_ref)
4134 fprintf (file, ", by_ref");
4135 print_node_brief (file, ", type: ", adj->type, 0);
4136 fprintf (file, "\n");
4138 parms.release ();
4141 /* Dump the AV linked list. */
4143 void
4144 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4146 bool comma = false;
4147 fprintf (f, " Aggregate replacements:");
4148 for (; av; av = av->next)
4150 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4151 av->index, av->offset);
4152 print_generic_expr (f, av->value, 0);
4153 comma = true;
4155 fprintf (f, "\n");
4158 /* Stream out jump function JUMP_FUNC to OB. */
4160 static void
4161 ipa_write_jump_function (struct output_block *ob,
4162 struct ipa_jump_func *jump_func)
4164 struct ipa_agg_jf_item *item;
4165 struct bitpack_d bp;
4166 int i, count;
4168 streamer_write_uhwi (ob, jump_func->type);
4169 switch (jump_func->type)
4171 case IPA_JF_UNKNOWN:
4172 break;
4173 case IPA_JF_KNOWN_TYPE:
4174 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4175 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4176 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4177 break;
4178 case IPA_JF_CONST:
4179 gcc_assert (
4180 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4181 stream_write_tree (ob, jump_func->value.constant.value, true);
4182 break;
4183 case IPA_JF_PASS_THROUGH:
4184 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4185 if (jump_func->value.pass_through.operation == NOP_EXPR)
4187 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4188 bp = bitpack_create (ob->main_stream);
4189 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4190 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4191 streamer_write_bitpack (&bp);
4193 else
4195 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4196 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4198 break;
4199 case IPA_JF_ANCESTOR:
4200 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4201 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4202 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4203 bp = bitpack_create (ob->main_stream);
4204 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4205 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4206 streamer_write_bitpack (&bp);
4207 break;
4210 count = vec_safe_length (jump_func->agg.items);
4211 streamer_write_uhwi (ob, count);
4212 if (count)
4214 bp = bitpack_create (ob->main_stream);
4215 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4216 streamer_write_bitpack (&bp);
4219 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4221 streamer_write_uhwi (ob, item->offset);
4222 stream_write_tree (ob, item->value, true);
4226 /* Read in jump function JUMP_FUNC from IB. */
4228 static void
4229 ipa_read_jump_function (struct lto_input_block *ib,
4230 struct ipa_jump_func *jump_func,
4231 struct cgraph_edge *cs,
4232 struct data_in *data_in)
4234 enum jump_func_type jftype;
4235 enum tree_code operation;
4236 int i, count;
4238 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4239 switch (jftype)
4241 case IPA_JF_UNKNOWN:
4242 jump_func->type = IPA_JF_UNKNOWN;
4243 break;
4244 case IPA_JF_KNOWN_TYPE:
4246 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4247 tree base_type = stream_read_tree (ib, data_in);
4248 tree component_type = stream_read_tree (ib, data_in);
4250 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4251 break;
4253 case IPA_JF_CONST:
4254 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4255 break;
4256 case IPA_JF_PASS_THROUGH:
4257 operation = (enum tree_code) streamer_read_uhwi (ib);
4258 if (operation == NOP_EXPR)
4260 int formal_id = streamer_read_uhwi (ib);
4261 struct bitpack_d bp = streamer_read_bitpack (ib);
4262 bool agg_preserved = bp_unpack_value (&bp, 1);
4263 bool type_preserved = bp_unpack_value (&bp, 1);
4264 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4265 type_preserved);
4267 else
4269 tree operand = stream_read_tree (ib, data_in);
4270 int formal_id = streamer_read_uhwi (ib);
4271 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4272 operation);
4274 break;
4275 case IPA_JF_ANCESTOR:
4277 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4278 tree type = stream_read_tree (ib, data_in);
4279 int formal_id = streamer_read_uhwi (ib);
4280 struct bitpack_d bp = streamer_read_bitpack (ib);
4281 bool agg_preserved = bp_unpack_value (&bp, 1);
4282 bool type_preserved = bp_unpack_value (&bp, 1);
4284 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4285 type_preserved);
4286 break;
4290 count = streamer_read_uhwi (ib);
4291 vec_alloc (jump_func->agg.items, count);
4292 if (count)
4294 struct bitpack_d bp = streamer_read_bitpack (ib);
4295 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4297 for (i = 0; i < count; i++)
4299 struct ipa_agg_jf_item item;
4300 item.offset = streamer_read_uhwi (ib);
4301 item.value = stream_read_tree (ib, data_in);
4302 jump_func->agg.items->quick_push (item);
4306 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4307 relevant to indirect inlining to OB. */
4309 static void
4310 ipa_write_indirect_edge_info (struct output_block *ob,
4311 struct cgraph_edge *cs)
4313 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4314 struct bitpack_d bp;
4316 streamer_write_hwi (ob, ii->param_index);
4317 streamer_write_hwi (ob, ii->offset);
4318 bp = bitpack_create (ob->main_stream);
4319 bp_pack_value (&bp, ii->polymorphic, 1);
4320 bp_pack_value (&bp, ii->agg_contents, 1);
4321 bp_pack_value (&bp, ii->member_ptr, 1);
4322 bp_pack_value (&bp, ii->by_ref, 1);
4323 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4324 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4325 streamer_write_bitpack (&bp);
4327 if (ii->polymorphic)
4329 streamer_write_hwi (ob, ii->otr_token);
4330 stream_write_tree (ob, ii->otr_type, true);
4331 stream_write_tree (ob, ii->outer_type, true);
4335 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4336 relevant to indirect inlining from IB. */
4338 static void
4339 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4340 struct data_in *data_in ATTRIBUTE_UNUSED,
4341 struct cgraph_edge *cs)
4343 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4344 struct bitpack_d bp;
4346 ii->param_index = (int) streamer_read_hwi (ib);
4347 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4348 bp = streamer_read_bitpack (ib);
4349 ii->polymorphic = bp_unpack_value (&bp, 1);
4350 ii->agg_contents = bp_unpack_value (&bp, 1);
4351 ii->member_ptr = bp_unpack_value (&bp, 1);
4352 ii->by_ref = bp_unpack_value (&bp, 1);
4353 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4354 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4355 if (ii->polymorphic)
4357 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4358 ii->otr_type = stream_read_tree (ib, data_in);
4359 ii->outer_type = stream_read_tree (ib, data_in);
4363 /* Stream out NODE info to OB. */
4365 static void
4366 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4368 int node_ref;
4369 lto_symtab_encoder_t encoder;
4370 struct ipa_node_params *info = IPA_NODE_REF (node);
4371 int j;
4372 struct cgraph_edge *e;
4373 struct bitpack_d bp;
4375 encoder = ob->decl_state->symtab_node_encoder;
4376 node_ref = lto_symtab_encoder_encode (encoder, node);
4377 streamer_write_uhwi (ob, node_ref);
4379 streamer_write_uhwi (ob, ipa_get_param_count (info));
4380 for (j = 0; j < ipa_get_param_count (info); j++)
4381 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4382 bp = bitpack_create (ob->main_stream);
4383 gcc_assert (info->uses_analysis_done
4384 || ipa_get_param_count (info) == 0);
4385 gcc_assert (!info->node_enqueued);
4386 gcc_assert (!info->ipcp_orig_node);
4387 for (j = 0; j < ipa_get_param_count (info); j++)
4388 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4389 streamer_write_bitpack (&bp);
4390 for (j = 0; j < ipa_get_param_count (info); j++)
4391 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4392 for (e = node->callees; e; e = e->next_callee)
4394 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4396 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4397 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4398 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4400 for (e = node->indirect_calls; e; e = e->next_callee)
4402 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4404 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4405 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4406 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4407 ipa_write_indirect_edge_info (ob, e);
4411 /* Stream in NODE info from IB. */
4413 static void
4414 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4415 struct data_in *data_in)
4417 struct ipa_node_params *info = IPA_NODE_REF (node);
4418 int k;
4419 struct cgraph_edge *e;
4420 struct bitpack_d bp;
4422 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4424 for (k = 0; k < ipa_get_param_count (info); k++)
4425 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4427 bp = streamer_read_bitpack (ib);
4428 if (ipa_get_param_count (info) != 0)
4429 info->uses_analysis_done = true;
4430 info->node_enqueued = false;
4431 for (k = 0; k < ipa_get_param_count (info); k++)
4432 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4433 for (k = 0; k < ipa_get_param_count (info); k++)
4434 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4435 for (e = node->callees; e; e = e->next_callee)
4437 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4438 int count = streamer_read_uhwi (ib);
4440 if (!count)
4441 continue;
4442 vec_safe_grow_cleared (args->jump_functions, count);
4444 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4445 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4446 data_in);
4448 for (e = node->indirect_calls; e; e = e->next_callee)
4450 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4451 int count = streamer_read_uhwi (ib);
4453 if (count)
4455 vec_safe_grow_cleared (args->jump_functions, count);
4456 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4457 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4458 data_in);
4460 ipa_read_indirect_edge_info (ib, data_in, e);
4464 /* Write jump functions for nodes in SET. */
4466 void
4467 ipa_prop_write_jump_functions (void)
4469 struct cgraph_node *node;
4470 struct output_block *ob;
4471 unsigned int count = 0;
4472 lto_symtab_encoder_iterator lsei;
4473 lto_symtab_encoder_t encoder;
4476 if (!ipa_node_params_vector.exists ())
4477 return;
4479 ob = create_output_block (LTO_section_jump_functions);
4480 encoder = ob->decl_state->symtab_node_encoder;
4481 ob->cgraph_node = NULL;
4482 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4483 lsei_next_function_in_partition (&lsei))
4485 node = lsei_cgraph_node (lsei);
4486 if (cgraph_function_with_gimple_body_p (node)
4487 && IPA_NODE_REF (node) != NULL)
4488 count++;
4491 streamer_write_uhwi (ob, count);
4493 /* Process all of the functions. */
4494 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4495 lsei_next_function_in_partition (&lsei))
4497 node = lsei_cgraph_node (lsei);
4498 if (cgraph_function_with_gimple_body_p (node)
4499 && IPA_NODE_REF (node) != NULL)
4500 ipa_write_node_info (ob, node);
4502 streamer_write_char_stream (ob->main_stream, 0);
4503 produce_asm (ob, NULL);
4504 destroy_output_block (ob);
4507 /* Read section in file FILE_DATA of length LEN with data DATA. */
4509 static void
4510 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4511 size_t len)
4513 const struct lto_function_header *header =
4514 (const struct lto_function_header *) data;
4515 const int cfg_offset = sizeof (struct lto_function_header);
4516 const int main_offset = cfg_offset + header->cfg_size;
4517 const int string_offset = main_offset + header->main_size;
4518 struct data_in *data_in;
4519 struct lto_input_block ib_main;
4520 unsigned int i;
4521 unsigned int count;
4523 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4524 header->main_size);
4526 data_in =
4527 lto_data_in_create (file_data, (const char *) data + string_offset,
4528 header->string_size, vNULL);
4529 count = streamer_read_uhwi (&ib_main);
4531 for (i = 0; i < count; i++)
4533 unsigned int index;
4534 struct cgraph_node *node;
4535 lto_symtab_encoder_t encoder;
4537 index = streamer_read_uhwi (&ib_main);
4538 encoder = file_data->symtab_node_encoder;
4539 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4540 gcc_assert (node->definition);
4541 ipa_read_node_info (&ib_main, node, data_in);
4543 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4544 len);
4545 lto_data_in_delete (data_in);
4548 /* Read ipcp jump functions. */
4550 void
4551 ipa_prop_read_jump_functions (void)
4553 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4554 struct lto_file_decl_data *file_data;
4555 unsigned int j = 0;
4557 ipa_check_create_node_params ();
4558 ipa_check_create_edge_args ();
4559 ipa_register_cgraph_hooks ();
4561 while ((file_data = file_data_vec[j++]))
4563 size_t len;
4564 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4566 if (data)
4567 ipa_prop_read_section (file_data, data, len);
4571 /* After merging units, we can get mismatch in argument counts.
4572 Also decl merging might've rendered parameter lists obsolete.
4573 Also compute called_with_variable_arg info. */
4575 void
4576 ipa_update_after_lto_read (void)
4578 ipa_check_create_node_params ();
4579 ipa_check_create_edge_args ();
4582 void
4583 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4585 int node_ref;
4586 unsigned int count = 0;
4587 lto_symtab_encoder_t encoder;
4588 struct ipa_agg_replacement_value *aggvals, *av;
4590 aggvals = ipa_get_agg_replacements_for_node (node);
4591 encoder = ob->decl_state->symtab_node_encoder;
4592 node_ref = lto_symtab_encoder_encode (encoder, node);
4593 streamer_write_uhwi (ob, node_ref);
4595 for (av = aggvals; av; av = av->next)
4596 count++;
4597 streamer_write_uhwi (ob, count);
4599 for (av = aggvals; av; av = av->next)
4601 struct bitpack_d bp;
4603 streamer_write_uhwi (ob, av->offset);
4604 streamer_write_uhwi (ob, av->index);
4605 stream_write_tree (ob, av->value, true);
4607 bp = bitpack_create (ob->main_stream);
4608 bp_pack_value (&bp, av->by_ref, 1);
4609 streamer_write_bitpack (&bp);
4613 /* Stream in the aggregate value replacement chain for NODE from IB. */
4615 static void
4616 read_agg_replacement_chain (struct lto_input_block *ib,
4617 struct cgraph_node *node,
4618 struct data_in *data_in)
4620 struct ipa_agg_replacement_value *aggvals = NULL;
4621 unsigned int count, i;
4623 count = streamer_read_uhwi (ib);
4624 for (i = 0; i <count; i++)
4626 struct ipa_agg_replacement_value *av;
4627 struct bitpack_d bp;
4629 av = ggc_alloc_ipa_agg_replacement_value ();
4630 av->offset = streamer_read_uhwi (ib);
4631 av->index = streamer_read_uhwi (ib);
4632 av->value = stream_read_tree (ib, data_in);
4633 bp = streamer_read_bitpack (ib);
4634 av->by_ref = bp_unpack_value (&bp, 1);
4635 av->next = aggvals;
4636 aggvals = av;
4638 ipa_set_node_agg_value_chain (node, aggvals);
4641 /* Write all aggregate replacement for nodes in set. */
4643 void
4644 ipa_prop_write_all_agg_replacement (void)
4646 struct cgraph_node *node;
4647 struct output_block *ob;
4648 unsigned int count = 0;
4649 lto_symtab_encoder_iterator lsei;
4650 lto_symtab_encoder_t encoder;
4652 if (!ipa_node_agg_replacements)
4653 return;
4655 ob = create_output_block (LTO_section_ipcp_transform);
4656 encoder = ob->decl_state->symtab_node_encoder;
4657 ob->cgraph_node = NULL;
4658 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4659 lsei_next_function_in_partition (&lsei))
4661 node = lsei_cgraph_node (lsei);
4662 if (cgraph_function_with_gimple_body_p (node)
4663 && ipa_get_agg_replacements_for_node (node) != NULL)
4664 count++;
4667 streamer_write_uhwi (ob, count);
4669 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4670 lsei_next_function_in_partition (&lsei))
4672 node = lsei_cgraph_node (lsei);
4673 if (cgraph_function_with_gimple_body_p (node)
4674 && ipa_get_agg_replacements_for_node (node) != NULL)
4675 write_agg_replacement_chain (ob, node);
4677 streamer_write_char_stream (ob->main_stream, 0);
4678 produce_asm (ob, NULL);
4679 destroy_output_block (ob);
4682 /* Read replacements section in file FILE_DATA of length LEN with data
4683 DATA. */
4685 static void
4686 read_replacements_section (struct lto_file_decl_data *file_data,
4687 const char *data,
4688 size_t len)
4690 const struct lto_function_header *header =
4691 (const struct lto_function_header *) data;
4692 const int cfg_offset = sizeof (struct lto_function_header);
4693 const int main_offset = cfg_offset + header->cfg_size;
4694 const int string_offset = main_offset + header->main_size;
4695 struct data_in *data_in;
4696 struct lto_input_block ib_main;
4697 unsigned int i;
4698 unsigned int count;
4700 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4701 header->main_size);
4703 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4704 header->string_size, vNULL);
4705 count = streamer_read_uhwi (&ib_main);
4707 for (i = 0; i < count; i++)
4709 unsigned int index;
4710 struct cgraph_node *node;
4711 lto_symtab_encoder_t encoder;
4713 index = streamer_read_uhwi (&ib_main);
4714 encoder = file_data->symtab_node_encoder;
4715 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4716 gcc_assert (node->definition);
4717 read_agg_replacement_chain (&ib_main, node, data_in);
4719 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4720 len);
4721 lto_data_in_delete (data_in);
4724 /* Read IPA-CP aggregate replacements. */
4726 void
4727 ipa_prop_read_all_agg_replacement (void)
4729 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4730 struct lto_file_decl_data *file_data;
4731 unsigned int j = 0;
4733 while ((file_data = file_data_vec[j++]))
4735 size_t len;
4736 const char *data = lto_get_section_data (file_data,
4737 LTO_section_ipcp_transform,
4738 NULL, &len);
4739 if (data)
4740 read_replacements_section (file_data, data, len);
4744 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4745 NODE. */
4747 static void
4748 adjust_agg_replacement_values (struct cgraph_node *node,
4749 struct ipa_agg_replacement_value *aggval)
4751 struct ipa_agg_replacement_value *v;
4752 int i, c = 0, d = 0, *adj;
4754 if (!node->clone.combined_args_to_skip)
4755 return;
4757 for (v = aggval; v; v = v->next)
4759 gcc_assert (v->index >= 0);
4760 if (c < v->index)
4761 c = v->index;
4763 c++;
4765 adj = XALLOCAVEC (int, c);
4766 for (i = 0; i < c; i++)
4767 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4769 adj[i] = -1;
4770 d++;
4772 else
4773 adj[i] = i - d;
4775 for (v = aggval; v; v = v->next)
4776 v->index = adj[v->index];
4780 /* Function body transformation phase. */
4782 unsigned int
4783 ipcp_transform_function (struct cgraph_node *node)
4785 vec<ipa_param_descriptor> descriptors = vNULL;
4786 struct param_analysis_info *parms_ainfo;
4787 struct ipa_agg_replacement_value *aggval;
4788 gimple_stmt_iterator gsi;
4789 basic_block bb;
4790 int param_count;
4791 bool cfg_changed = false, something_changed = false;
4793 gcc_checking_assert (cfun);
4794 gcc_checking_assert (current_function_decl);
4796 if (dump_file)
4797 fprintf (dump_file, "Modification phase of node %s/%i\n",
4798 node->name (), node->order);
4800 aggval = ipa_get_agg_replacements_for_node (node);
4801 if (!aggval)
4802 return 0;
4803 param_count = count_formal_params (node->decl);
4804 if (param_count == 0)
4805 return 0;
4806 adjust_agg_replacement_values (node, aggval);
4807 if (dump_file)
4808 ipa_dump_agg_replacement_values (dump_file, aggval);
4809 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4810 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4811 descriptors.safe_grow_cleared (param_count);
4812 ipa_populate_param_decls (node, descriptors);
4814 FOR_EACH_BB_FN (bb, cfun)
4815 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4817 struct ipa_agg_replacement_value *v;
4818 gimple stmt = gsi_stmt (gsi);
4819 tree rhs, val, t;
4820 HOST_WIDE_INT offset, size;
4821 int index;
4822 bool by_ref, vce;
4824 if (!gimple_assign_load_p (stmt))
4825 continue;
4826 rhs = gimple_assign_rhs1 (stmt);
4827 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4828 continue;
4830 vce = false;
4831 t = rhs;
4832 while (handled_component_p (t))
4834 /* V_C_E can do things like convert an array of integers to one
4835 bigger integer and similar things we do not handle below. */
4836 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4838 vce = true;
4839 break;
4841 t = TREE_OPERAND (t, 0);
4843 if (vce)
4844 continue;
4846 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4847 rhs, &index, &offset, &size, &by_ref))
4848 continue;
4849 for (v = aggval; v; v = v->next)
4850 if (v->index == index
4851 && v->offset == offset)
4852 break;
4853 if (!v
4854 || v->by_ref != by_ref
4855 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4856 continue;
4858 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4859 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4861 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4862 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4863 else if (TYPE_SIZE (TREE_TYPE (rhs))
4864 == TYPE_SIZE (TREE_TYPE (v->value)))
4865 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4866 else
4868 if (dump_file)
4870 fprintf (dump_file, " const ");
4871 print_generic_expr (dump_file, v->value, 0);
4872 fprintf (dump_file, " can't be converted to type of ");
4873 print_generic_expr (dump_file, rhs, 0);
4874 fprintf (dump_file, "\n");
4876 continue;
4879 else
4880 val = v->value;
4882 if (dump_file && (dump_flags & TDF_DETAILS))
4884 fprintf (dump_file, "Modifying stmt:\n ");
4885 print_gimple_stmt (dump_file, stmt, 0, 0);
4887 gimple_assign_set_rhs_from_tree (&gsi, val);
4888 update_stmt (stmt);
4890 if (dump_file && (dump_flags & TDF_DETAILS))
4892 fprintf (dump_file, "into:\n ");
4893 print_gimple_stmt (dump_file, stmt, 0, 0);
4894 fprintf (dump_file, "\n");
4897 something_changed = true;
4898 if (maybe_clean_eh_stmt (stmt)
4899 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4900 cfg_changed = true;
4903 (*ipa_node_agg_replacements)[node->uid] = NULL;
4904 free_parms_ainfo (parms_ainfo, param_count);
4905 descriptors.release ();
4907 if (!something_changed)
4908 return 0;
4909 else if (cfg_changed)
4910 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4911 else
4912 return TODO_update_ssa_only_virtuals;