[AArch64] fix big.LITTLE spec rewriting
[official-gcc.git] / gcc / ipa-prop.c
blobaf2e22392b0b972d62451904fc95684a55c68c67
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
61 /* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
64 struct param_analysis_info
66 bool parm_modified, ref_modified, pt_modified;
67 bitmap parm_visited_statements, pt_visited_statements;
70 /* Vector where the parameter infos are actually stored. */
71 vec<ipa_node_params> ipa_node_params_vector;
72 /* Vector of known aggregate values in cloned nodes. */
73 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
74 /* Vector where the parameter infos are actually stored. */
75 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
79 static struct cgraph_node_hook_list *node_removal_hook_holder;
80 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
81 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
82 static struct cgraph_node_hook_list *function_insertion_hook_holder;
84 /* Description of a reference to an IPA constant. */
85 struct ipa_cst_ref_desc
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge *cs;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc *next_duplicate;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
92 if out of control. */
93 int refcount;
96 /* Allocation pool for reference descriptions. */
98 static alloc_pool ipa_refdesc_pool;
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
103 static bool
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
106 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
107 struct cl_optimization *os;
109 if (!fs_opts)
110 return false;
111 os = TREE_OPTIMIZATION (fs_opts);
112 return !os->x_optimize || !os->x_flag_ipa_cp;
115 /* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
118 static int
119 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
121 int i, count;
123 count = descriptors.length ();
124 for (i = 0; i < count; i++)
125 if (descriptors[i].decl == ptree)
126 return i;
128 return -1;
131 /* Return index of the formal whose tree is PTREE in function which corresponds
132 to INFO. */
135 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
137 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
140 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
141 NODE. */
143 static void
144 ipa_populate_param_decls (struct cgraph_node *node,
145 vec<ipa_param_descriptor> &descriptors)
147 tree fndecl;
148 tree fnargs;
149 tree parm;
150 int param_num;
152 fndecl = node->decl;
153 gcc_assert (gimple_has_body_p (fndecl));
154 fnargs = DECL_ARGUMENTS (fndecl);
155 param_num = 0;
156 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
158 descriptors[param_num].decl = parm;
159 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
160 param_num++;
164 /* Return how many formal parameters FNDECL has. */
166 static inline int
167 count_formal_params (tree fndecl)
169 tree parm;
170 int count = 0;
171 gcc_assert (gimple_has_body_p (fndecl));
173 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
174 count++;
176 return count;
179 /* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
183 void
184 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
186 fprintf (file, "param #%i", i);
187 if (info->descriptors[i].decl)
189 fprintf (file, " ");
190 print_generic_expr (file, info->descriptors[i].decl, 0);
194 /* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
197 void
198 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
200 struct ipa_node_params *info = IPA_NODE_REF (node);
202 if (!info->descriptors.exists () && param_count)
203 info->descriptors.safe_grow_cleared (param_count);
206 /* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
208 param_decls. */
210 void
211 ipa_initialize_node_params (struct cgraph_node *node)
213 struct ipa_node_params *info = IPA_NODE_REF (node);
215 if (!info->descriptors.exists ())
217 ipa_alloc_node_params (node, count_formal_params (node->decl));
218 ipa_populate_param_decls (node, info->descriptors);
222 /* Print the jump functions associated with call graph edge CS to file F. */
224 static void
225 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
227 int i, count;
229 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
230 for (i = 0; i < count; i++)
232 struct ipa_jump_func *jump_func;
233 enum jump_func_type type;
235 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
236 type = jump_func->type;
238 fprintf (f, " param %d: ", i);
239 if (type == IPA_JF_UNKNOWN)
240 fprintf (f, "UNKNOWN\n");
241 else if (type == IPA_JF_KNOWN_TYPE)
243 fprintf (f, "KNOWN TYPE: base ");
244 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
245 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
246 jump_func->value.known_type.offset);
247 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
248 fprintf (f, "\n");
250 else if (type == IPA_JF_CONST)
252 tree val = jump_func->value.constant.value;
253 fprintf (f, "CONST: ");
254 print_generic_expr (f, val, 0);
255 if (TREE_CODE (val) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
258 fprintf (f, " -> ");
259 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
262 fprintf (f, "\n");
264 else if (type == IPA_JF_PASS_THROUGH)
266 fprintf (f, "PASS THROUGH: ");
267 fprintf (f, "%d, op %s",
268 jump_func->value.pass_through.formal_id,
269 get_tree_code_name(jump_func->value.pass_through.operation));
270 if (jump_func->value.pass_through.operation != NOP_EXPR)
272 fprintf (f, " ");
273 print_generic_expr (f,
274 jump_func->value.pass_through.operand, 0);
276 if (jump_func->value.pass_through.agg_preserved)
277 fprintf (f, ", agg_preserved");
278 if (jump_func->value.pass_through.type_preserved)
279 fprintf (f, ", type_preserved");
280 fprintf (f, "\n");
282 else if (type == IPA_JF_ANCESTOR)
284 fprintf (f, "ANCESTOR: ");
285 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
286 jump_func->value.ancestor.formal_id,
287 jump_func->value.ancestor.offset);
288 print_generic_expr (f, jump_func->value.ancestor.type, 0);
289 if (jump_func->value.ancestor.agg_preserved)
290 fprintf (f, ", agg_preserved");
291 if (jump_func->value.ancestor.type_preserved)
292 fprintf (f, ", type_preserved");
293 fprintf (f, "\n");
296 if (jump_func->agg.items)
298 struct ipa_agg_jf_item *item;
299 int j;
301 fprintf (f, " Aggregate passed by %s:\n",
302 jump_func->agg.by_ref ? "reference" : "value");
303 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
305 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
306 item->offset);
307 if (TYPE_P (item->value))
308 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
309 tree_to_uhwi (TYPE_SIZE (item->value)));
310 else
312 fprintf (f, "cst: ");
313 print_generic_expr (f, item->value, 0);
315 fprintf (f, "\n");
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
328 struct cgraph_edge *cs;
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup (node->name ()), node->order,
339 xstrdup (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i",
359 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
361 if (cs->call_stmt)
363 fprintf (f, ", for stmt ");
364 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
366 else
367 fprintf (f, "\n");
368 ipa_print_node_jump_functions_for_edge (f, cs);
372 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
374 void
375 ipa_print_all_jump_functions (FILE *f)
377 struct cgraph_node *node;
379 fprintf (f, "\nJump functions:\n");
380 FOR_EACH_FUNCTION (node)
382 ipa_print_node_jump_functions (f, node);
386 /* Set JFUNC to be a known type jump function. */
388 static void
389 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
390 tree base_type, tree component_type)
392 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
393 && TYPE_BINFO (component_type));
394 jfunc->type = IPA_JF_KNOWN_TYPE;
395 jfunc->value.known_type.offset = offset,
396 jfunc->value.known_type.base_type = base_type;
397 jfunc->value.known_type.component_type = component_type;
398 gcc_assert (component_type);
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
404 static void
405 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
406 struct ipa_jump_func *src)
409 gcc_checking_assert (src->type == IPA_JF_CONST);
410 dst->type = IPA_JF_CONST;
411 dst->value.constant = src->value.constant;
414 /* Set JFUNC to be a constant jmp function. */
416 static void
417 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
418 struct cgraph_edge *cs)
420 constant = unshare_expr (constant);
421 if (constant && EXPR_P (constant))
422 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
423 jfunc->type = IPA_JF_CONST;
424 jfunc->value.constant.value = unshare_expr_without_location (constant);
426 if (TREE_CODE (constant) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
429 struct ipa_cst_ref_desc *rdesc;
430 if (!ipa_refdesc_pool)
431 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
432 sizeof (struct ipa_cst_ref_desc), 32);
434 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
435 rdesc->cs = cs;
436 rdesc->next_duplicate = NULL;
437 rdesc->refcount = 1;
438 jfunc->value.constant.rdesc = rdesc;
440 else
441 jfunc->value.constant.rdesc = NULL;
444 /* Set JFUNC to be a simple pass-through jump function. */
445 static void
446 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
447 bool agg_preserved, bool type_preserved)
449 jfunc->type = IPA_JF_PASS_THROUGH;
450 jfunc->value.pass_through.operand = NULL_TREE;
451 jfunc->value.pass_through.formal_id = formal_id;
452 jfunc->value.pass_through.operation = NOP_EXPR;
453 jfunc->value.pass_through.agg_preserved = agg_preserved;
454 jfunc->value.pass_through.type_preserved = type_preserved;
457 /* Set JFUNC to be an arithmetic pass through jump function. */
459 static void
460 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
461 tree operand, enum tree_code operation)
463 jfunc->type = IPA_JF_PASS_THROUGH;
464 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
465 jfunc->value.pass_through.formal_id = formal_id;
466 jfunc->value.pass_through.operation = operation;
467 jfunc->value.pass_through.agg_preserved = false;
468 jfunc->value.pass_through.type_preserved = false;
471 /* Set JFUNC to be an ancestor jump function. */
473 static void
474 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
475 tree type, int formal_id, bool agg_preserved,
476 bool type_preserved)
478 jfunc->type = IPA_JF_ANCESTOR;
479 jfunc->value.ancestor.formal_id = formal_id;
480 jfunc->value.ancestor.offset = offset;
481 jfunc->value.ancestor.type = type;
482 jfunc->value.ancestor.agg_preserved = agg_preserved;
483 jfunc->value.ancestor.type_preserved = type_preserved;
486 /* Extract the acual BINFO being described by JFUNC which must be a known type
487 jump function. */
489 tree
490 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
492 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
493 if (!base_binfo)
494 return NULL_TREE;
495 return get_binfo_at_offset (base_binfo,
496 jfunc->value.known_type.offset,
497 jfunc->value.known_type.component_type);
500 /* Structure to be passed in between detect_type_change and
501 check_stmt_for_type_change. */
503 struct type_change_info
505 /* Offset into the object where there is the virtual method pointer we are
506 looking for. */
507 HOST_WIDE_INT offset;
508 /* The declaration or SSA_NAME pointer of the base that we are checking for
509 type change. */
510 tree object;
511 /* If we actually can tell the type that the object has changed to, it is
512 stored in this field. Otherwise it remains NULL_TREE. */
513 tree known_current_type;
514 /* Set to true if dynamic type change has been detected. */
515 bool type_maybe_changed;
516 /* Set to true if multiple types have been encountered. known_current_type
517 must be disregarded in that case. */
518 bool multiple_types_encountered;
521 /* Return true if STMT can modify a virtual method table pointer.
523 This function makes special assumptions about both constructors and
524 destructors which are all the functions that are allowed to alter the VMT
525 pointers. It assumes that destructors begin with assignment into all VMT
526 pointers and that constructors essentially look in the following way:
528 1) The very first thing they do is that they call constructors of ancestor
529 sub-objects that have them.
531 2) Then VMT pointers of this and all its ancestors is set to new values
532 corresponding to the type corresponding to the constructor.
534 3) Only afterwards, other stuff such as constructor of member sub-objects
535 and the code written by the user is run. Only this may include calling
536 virtual functions, directly or indirectly.
538 There is no way to call a constructor of an ancestor sub-object in any
539 other way.
541 This means that we do not have to care whether constructors get the correct
542 type information because they will always change it (in fact, if we define
543 the type to be given by the VMT pointer, it is undefined).
545 The most important fact to derive from the above is that if, for some
546 statement in the section 3, we try to detect whether the dynamic type has
547 changed, we can safely ignore all calls as we examine the function body
548 backwards until we reach statements in section 2 because these calls cannot
549 be ancestor constructors or destructors (if the input is not bogus) and so
550 do not change the dynamic type (this holds true only for automatically
551 allocated objects but at the moment we devirtualize only these). We then
552 must detect that statements in section 2 change the dynamic type and can try
553 to derive the new type. That is enough and we can stop, we will never see
554 the calls into constructors of sub-objects in this code. Therefore we can
555 safely ignore all call statements that we traverse.
558 static bool
559 stmt_may_be_vtbl_ptr_store (gimple stmt)
561 if (is_gimple_call (stmt))
562 return false;
563 else if (gimple_clobber_p (stmt))
564 return false;
565 else if (is_gimple_assign (stmt))
567 tree lhs = gimple_assign_lhs (stmt);
569 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
571 if (flag_strict_aliasing
572 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
573 return false;
575 if (TREE_CODE (lhs) == COMPONENT_REF
576 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
577 return false;
578 /* In the future we might want to use get_base_ref_and_offset to find
579 if there is a field corresponding to the offset and if so, proceed
580 almost like if it was a component ref. */
583 return true;
586 /* If STMT can be proved to be an assignment to the virtual method table
587 pointer of ANALYZED_OBJ and the type associated with the new table
588 identified, return the type. Otherwise return NULL_TREE. */
590 static tree
591 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
593 HOST_WIDE_INT offset, size, max_size;
594 tree lhs, rhs, base;
596 if (!gimple_assign_single_p (stmt))
597 return NULL_TREE;
599 lhs = gimple_assign_lhs (stmt);
600 rhs = gimple_assign_rhs1 (stmt);
601 if (TREE_CODE (lhs) != COMPONENT_REF
602 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
603 || TREE_CODE (rhs) != ADDR_EXPR)
604 return NULL_TREE;
605 rhs = get_base_address (TREE_OPERAND (rhs, 0));
606 if (!rhs
607 || TREE_CODE (rhs) != VAR_DECL
608 || !DECL_VIRTUAL_P (rhs))
609 return NULL_TREE;
611 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
612 if (offset != tci->offset
613 || size != POINTER_SIZE
614 || max_size != POINTER_SIZE)
615 return NULL_TREE;
616 if (TREE_CODE (base) == MEM_REF)
618 if (TREE_CODE (tci->object) != MEM_REF
619 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
620 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
621 TREE_OPERAND (base, 1)))
622 return NULL_TREE;
624 else if (tci->object != base)
625 return NULL_TREE;
627 return DECL_CONTEXT (rhs);
630 /* Callback of walk_aliased_vdefs and a helper function for
631 detect_type_change to check whether a particular statement may modify
632 the virtual table pointer, and if possible also determine the new type of
633 the (sub-)object. It stores its result into DATA, which points to a
634 type_change_info structure. */
636 static bool
637 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
639 gimple stmt = SSA_NAME_DEF_STMT (vdef);
640 struct type_change_info *tci = (struct type_change_info *) data;
642 if (stmt_may_be_vtbl_ptr_store (stmt))
644 tree type;
645 type = extr_type_from_vtbl_ptr_store (stmt, tci);
646 if (tci->type_maybe_changed
647 && type != tci->known_current_type)
648 tci->multiple_types_encountered = true;
649 tci->known_current_type = type;
650 tci->type_maybe_changed = true;
651 return true;
653 else
654 return false;
659 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
660 callsite CALL) by looking for assignments to its virtual table pointer. If
661 it is, return true and fill in the jump function JFUNC with relevant type
662 information or set it to unknown. ARG is the object itself (not a pointer
663 to it, unless dereferenced). BASE is the base of the memory access as
664 returned by get_ref_base_and_extent, as is the offset. */
666 static bool
667 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
668 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
670 struct type_change_info tci;
671 ao_ref ao;
673 gcc_checking_assert (DECL_P (arg)
674 || TREE_CODE (arg) == MEM_REF
675 || handled_component_p (arg));
676 /* Const calls cannot call virtual methods through VMT and so type changes do
677 not matter. */
678 if (!flag_devirtualize || !gimple_vuse (call)
679 /* Be sure expected_type is polymorphic. */
680 || !comp_type
681 || TREE_CODE (comp_type) != RECORD_TYPE
682 || !TYPE_BINFO (comp_type)
683 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
684 return false;
686 ao_ref_init (&ao, arg);
687 ao.base = base;
688 ao.offset = offset;
689 ao.size = POINTER_SIZE;
690 ao.max_size = ao.size;
692 tci.offset = offset;
693 tci.object = get_base_address (arg);
694 tci.known_current_type = NULL_TREE;
695 tci.type_maybe_changed = false;
696 tci.multiple_types_encountered = false;
698 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
699 &tci, NULL);
700 if (!tci.type_maybe_changed)
701 return false;
703 if (!tci.known_current_type
704 || tci.multiple_types_encountered
705 || offset != 0)
706 jfunc->type = IPA_JF_UNKNOWN;
707 else
708 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
710 return true;
713 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
714 SSA name (its dereference will become the base and the offset is assumed to
715 be zero). */
717 static bool
718 detect_type_change_ssa (tree arg, tree comp_type,
719 gimple call, struct ipa_jump_func *jfunc)
721 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
722 if (!flag_devirtualize
723 || !POINTER_TYPE_P (TREE_TYPE (arg)))
724 return false;
726 arg = build2 (MEM_REF, ptr_type_node, arg,
727 build_int_cst (ptr_type_node, 0));
729 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
732 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
733 boolean variable pointed to by DATA. */
735 static bool
736 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
737 void *data)
739 bool *b = (bool *) data;
740 *b = true;
741 return true;
744 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
745 a value known not to be modified in this function before reaching the
746 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
747 information about the parameter. */
749 static bool
750 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
751 gimple stmt, tree parm_load)
753 bool modified = false;
754 bitmap *visited_stmts;
755 ao_ref refd;
757 if (parm_ainfo && parm_ainfo->parm_modified)
758 return false;
760 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
761 ao_ref_init (&refd, parm_load);
762 /* We can cache visited statements only when parm_ainfo is available and when
763 we are looking at a naked load of the whole parameter. */
764 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
765 visited_stmts = NULL;
766 else
767 visited_stmts = &parm_ainfo->parm_visited_statements;
768 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
769 visited_stmts);
770 if (parm_ainfo && modified)
771 parm_ainfo->parm_modified = true;
772 return !modified;
775 /* If STMT is an assignment that loads a value from an parameter declaration,
776 return the index of the parameter in ipa_node_params which has not been
777 modified. Otherwise return -1. */
779 static int
780 load_from_unmodified_param (vec<ipa_param_descriptor> descriptors,
781 struct param_analysis_info *parms_ainfo,
782 gimple stmt)
784 int index;
785 tree op1;
787 if (!gimple_assign_single_p (stmt))
788 return -1;
790 op1 = gimple_assign_rhs1 (stmt);
791 if (TREE_CODE (op1) != PARM_DECL)
792 return -1;
794 index = ipa_get_param_decl_index_1 (descriptors, op1);
795 if (index < 0
796 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
797 : NULL, stmt, op1))
798 return -1;
800 return index;
803 /* Return true if memory reference REF loads data that are known to be
804 unmodified in this function before reaching statement STMT. PARM_AINFO, if
805 non-NULL, is a pointer to a structure containing temporary information about
806 PARM. */
808 static bool
809 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
810 gimple stmt, tree ref)
812 bool modified = false;
813 ao_ref refd;
815 gcc_checking_assert (gimple_vuse (stmt));
816 if (parm_ainfo && parm_ainfo->ref_modified)
817 return false;
819 ao_ref_init (&refd, ref);
820 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
821 NULL);
822 if (parm_ainfo && modified)
823 parm_ainfo->ref_modified = true;
824 return !modified;
827 /* Return true if the data pointed to by PARM is known to be unmodified in this
828 function before reaching call statement CALL into which it is passed.
829 PARM_AINFO is a pointer to a structure containing temporary information
830 about PARM. */
832 static bool
833 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
834 gimple call, tree parm)
836 bool modified = false;
837 ao_ref refd;
839 /* It's unnecessary to calculate anything about memory contnets for a const
840 function because it is not goin to use it. But do not cache the result
841 either. Also, no such calculations for non-pointers. */
842 if (!gimple_vuse (call)
843 || !POINTER_TYPE_P (TREE_TYPE (parm)))
844 return false;
846 if (parm_ainfo->pt_modified)
847 return false;
849 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
850 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
851 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
852 if (modified)
853 parm_ainfo->pt_modified = true;
854 return !modified;
857 /* Return true if we can prove that OP is a memory reference loading unmodified
858 data from an aggregate passed as a parameter and if the aggregate is passed
859 by reference, that the alias type of the load corresponds to the type of the
860 formal parameter (so that we can rely on this type for TBAA in callers).
861 INFO and PARMS_AINFO describe parameters of the current function (but the
862 latter can be NULL), STMT is the load statement. If function returns true,
863 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
864 within the aggregate and whether it is a load from a value passed by
865 reference respectively. */
867 static bool
868 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor> descriptors,
869 struct param_analysis_info *parms_ainfo, gimple stmt,
870 tree op, int *index_p, HOST_WIDE_INT *offset_p,
871 HOST_WIDE_INT *size_p, bool *by_ref_p)
873 int index;
874 HOST_WIDE_INT size, max_size;
875 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
877 if (max_size == -1 || max_size != size || *offset_p < 0)
878 return false;
880 if (DECL_P (base))
882 int index = ipa_get_param_decl_index_1 (descriptors, base);
883 if (index >= 0
884 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
885 : NULL, stmt, op))
887 *index_p = index;
888 *by_ref_p = false;
889 if (size_p)
890 *size_p = size;
891 return true;
893 return false;
896 if (TREE_CODE (base) != MEM_REF
897 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
898 || !integer_zerop (TREE_OPERAND (base, 1)))
899 return false;
901 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
903 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
904 index = ipa_get_param_decl_index_1 (descriptors, parm);
906 else
908 /* This branch catches situations where a pointer parameter is not a
909 gimple register, for example:
911 void hip7(S*) (struct S * p)
913 void (*<T2e4>) (struct S *) D.1867;
914 struct S * p.1;
916 <bb 2>:
917 p.1_1 = p;
918 D.1867_2 = p.1_1->f;
919 D.1867_2 ();
920 gdp = &p;
923 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
924 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
927 if (index >= 0
928 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
929 stmt, op))
931 *index_p = index;
932 *by_ref_p = true;
933 if (size_p)
934 *size_p = size;
935 return true;
937 return false;
940 /* Just like the previous function, just without the param_analysis_info
941 pointer, for users outside of this file. */
943 bool
944 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
945 tree op, int *index_p, HOST_WIDE_INT *offset_p,
946 bool *by_ref_p)
948 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
949 offset_p, NULL, by_ref_p);
952 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
953 of an assignment statement STMT, try to determine whether we are actually
954 handling any of the following cases and construct an appropriate jump
955 function into JFUNC if so:
957 1) The passed value is loaded from a formal parameter which is not a gimple
958 register (most probably because it is addressable, the value has to be
959 scalar) and we can guarantee the value has not changed. This case can
960 therefore be described by a simple pass-through jump function. For example:
962 foo (int a)
964 int a.0;
966 a.0_2 = a;
967 bar (a.0_2);
969 2) The passed value can be described by a simple arithmetic pass-through
970 jump function. E.g.
972 foo (int a)
974 int D.2064;
976 D.2064_4 = a.1(D) + 4;
977 bar (D.2064_4);
979 This case can also occur in combination of the previous one, e.g.:
981 foo (int a, int z)
983 int a.0;
984 int D.2064;
986 a.0_3 = a;
987 D.2064_4 = a.0_3 + 4;
988 foo (D.2064_4);
990 3) The passed value is an address of an object within another one (which
991 also passed by reference). Such situations are described by an ancestor
992 jump function and describe situations such as:
994 B::foo() (struct B * const this)
996 struct A * D.1845;
998 D.1845_2 = &this_1(D)->D.1748;
999 A::bar (D.1845_2);
1001 INFO is the structure describing individual parameters access different
1002 stages of IPA optimizations. PARMS_AINFO contains the information that is
1003 only needed for intraprocedural analysis. */
1005 static void
1006 compute_complex_assign_jump_func (struct ipa_node_params *info,
1007 struct param_analysis_info *parms_ainfo,
1008 struct ipa_jump_func *jfunc,
1009 gimple call, gimple stmt, tree name,
1010 tree param_type)
1012 HOST_WIDE_INT offset, size, max_size;
1013 tree op1, tc_ssa, base, ssa;
1014 int index;
1016 op1 = gimple_assign_rhs1 (stmt);
1018 if (TREE_CODE (op1) == SSA_NAME)
1020 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1021 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1022 else
1023 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1024 SSA_NAME_DEF_STMT (op1));
1025 tc_ssa = op1;
1027 else
1029 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1030 tc_ssa = gimple_assign_lhs (stmt);
1033 if (index >= 0)
1035 tree op2 = gimple_assign_rhs2 (stmt);
1037 if (op2)
1039 if (!is_gimple_ip_invariant (op2)
1040 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1041 && !useless_type_conversion_p (TREE_TYPE (name),
1042 TREE_TYPE (op1))))
1043 return;
1045 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1046 gimple_assign_rhs_code (stmt));
1048 else if (gimple_assign_single_p (stmt))
1050 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1051 call, tc_ssa);
1052 bool type_p = false;
1054 if (param_type && POINTER_TYPE_P (param_type))
1055 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1056 call, jfunc);
1057 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1058 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1060 return;
1063 if (TREE_CODE (op1) != ADDR_EXPR)
1064 return;
1065 op1 = TREE_OPERAND (op1, 0);
1066 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1067 return;
1068 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1069 if (TREE_CODE (base) != MEM_REF
1070 /* If this is a varying address, punt. */
1071 || max_size == -1
1072 || max_size != size)
1073 return;
1074 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1075 ssa = TREE_OPERAND (base, 0);
1076 if (TREE_CODE (ssa) != SSA_NAME
1077 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1078 || offset < 0)
1079 return;
1081 /* Dynamic types are changed in constructors and destructors. */
1082 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1083 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1085 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1086 call, jfunc, offset);
1087 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1088 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1089 parm_ref_data_pass_through_p (&parms_ainfo[index],
1090 call, ssa), type_p);
1094 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1095 it looks like:
1097 iftmp.1_3 = &obj_2(D)->D.1762;
1099 The base of the MEM_REF must be a default definition SSA NAME of a
1100 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1101 whole MEM_REF expression is returned and the offset calculated from any
1102 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1103 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1105 static tree
1106 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1108 HOST_WIDE_INT size, max_size;
1109 tree expr, parm, obj;
1111 if (!gimple_assign_single_p (assign))
1112 return NULL_TREE;
1113 expr = gimple_assign_rhs1 (assign);
1115 if (TREE_CODE (expr) != ADDR_EXPR)
1116 return NULL_TREE;
1117 expr = TREE_OPERAND (expr, 0);
1118 obj = expr;
1119 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1121 if (TREE_CODE (expr) != MEM_REF
1122 /* If this is a varying address, punt. */
1123 || max_size == -1
1124 || max_size != size
1125 || *offset < 0)
1126 return NULL_TREE;
1127 parm = TREE_OPERAND (expr, 0);
1128 if (TREE_CODE (parm) != SSA_NAME
1129 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1130 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1131 return NULL_TREE;
1133 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1134 *obj_p = obj;
1135 return expr;
1139 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1140 statement PHI, try to find out whether NAME is in fact a
1141 multiple-inheritance typecast from a descendant into an ancestor of a formal
1142 parameter and thus can be described by an ancestor jump function and if so,
1143 write the appropriate function into JFUNC.
1145 Essentially we want to match the following pattern:
1147 if (obj_2(D) != 0B)
1148 goto <bb 3>;
1149 else
1150 goto <bb 4>;
1152 <bb 3>:
1153 iftmp.1_3 = &obj_2(D)->D.1762;
1155 <bb 4>:
1156 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1157 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1158 return D.1879_6; */
1160 static void
1161 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1162 struct param_analysis_info *parms_ainfo,
1163 struct ipa_jump_func *jfunc,
1164 gimple call, gimple phi, tree param_type)
1166 HOST_WIDE_INT offset;
1167 gimple assign, cond;
1168 basic_block phi_bb, assign_bb, cond_bb;
1169 tree tmp, parm, expr, obj;
1170 int index, i;
1172 if (gimple_phi_num_args (phi) != 2)
1173 return;
1175 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1176 tmp = PHI_ARG_DEF (phi, 0);
1177 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1178 tmp = PHI_ARG_DEF (phi, 1);
1179 else
1180 return;
1181 if (TREE_CODE (tmp) != SSA_NAME
1182 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1183 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1184 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1185 return;
1187 assign = SSA_NAME_DEF_STMT (tmp);
1188 assign_bb = gimple_bb (assign);
1189 if (!single_pred_p (assign_bb))
1190 return;
1191 expr = get_ancestor_addr_info (assign, &obj, &offset);
1192 if (!expr)
1193 return;
1194 parm = TREE_OPERAND (expr, 0);
1195 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1196 gcc_assert (index >= 0);
1198 cond_bb = single_pred (assign_bb);
1199 cond = last_stmt (cond_bb);
1200 if (!cond
1201 || gimple_code (cond) != GIMPLE_COND
1202 || gimple_cond_code (cond) != NE_EXPR
1203 || gimple_cond_lhs (cond) != parm
1204 || !integer_zerop (gimple_cond_rhs (cond)))
1205 return;
1207 phi_bb = gimple_bb (phi);
1208 for (i = 0; i < 2; i++)
1210 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1211 if (pred != assign_bb && pred != cond_bb)
1212 return;
1215 bool type_p = false;
1216 if (param_type && POINTER_TYPE_P (param_type))
1217 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1218 call, jfunc, offset);
1219 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1220 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1221 parm_ref_data_pass_through_p (&parms_ainfo[index],
1222 call, parm), type_p);
1225 /* Given OP which is passed as an actual argument to a called function,
1226 determine if it is possible to construct a KNOWN_TYPE jump function for it
1227 and if so, create one and store it to JFUNC.
1228 EXPECTED_TYPE represents a type the argument should be in */
1230 static void
1231 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1232 gimple call, tree expected_type)
1234 HOST_WIDE_INT offset, size, max_size;
1235 tree base;
1237 if (!flag_devirtualize
1238 || TREE_CODE (op) != ADDR_EXPR
1239 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1240 /* Be sure expected_type is polymorphic. */
1241 || !expected_type
1242 || TREE_CODE (expected_type) != RECORD_TYPE
1243 || !TYPE_BINFO (expected_type)
1244 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1245 return;
1247 op = TREE_OPERAND (op, 0);
1248 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1249 if (!DECL_P (base)
1250 || max_size == -1
1251 || max_size != size
1252 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1253 || is_global_var (base))
1254 return;
1256 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1257 return;
1259 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1260 expected_type);
1263 /* Inspect the given TYPE and return true iff it has the same structure (the
1264 same number of fields of the same types) as a C++ member pointer. If
1265 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1266 corresponding fields there. */
1268 static bool
1269 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1271 tree fld;
1273 if (TREE_CODE (type) != RECORD_TYPE)
1274 return false;
1276 fld = TYPE_FIELDS (type);
1277 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1278 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1279 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1280 return false;
1282 if (method_ptr)
1283 *method_ptr = fld;
1285 fld = DECL_CHAIN (fld);
1286 if (!fld || INTEGRAL_TYPE_P (fld)
1287 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1288 return false;
1289 if (delta)
1290 *delta = fld;
1292 if (DECL_CHAIN (fld))
1293 return false;
1295 return true;
1298 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1299 return the rhs of its defining statement. Otherwise return RHS as it
1300 is. */
1302 static inline tree
1303 get_ssa_def_if_simple_copy (tree rhs)
1305 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1307 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1309 if (gimple_assign_single_p (def_stmt))
1310 rhs = gimple_assign_rhs1 (def_stmt);
1311 else
1312 break;
1314 return rhs;
1317 /* Simple linked list, describing known contents of an aggregate beforere
1318 call. */
1320 struct ipa_known_agg_contents_list
1322 /* Offset and size of the described part of the aggregate. */
1323 HOST_WIDE_INT offset, size;
1324 /* Known constant value or NULL if the contents is known to be unknown. */
1325 tree constant;
1326 /* Pointer to the next structure in the list. */
1327 struct ipa_known_agg_contents_list *next;
1330 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1331 in ARG is filled in with constant values. ARG can either be an aggregate
1332 expression or a pointer to an aggregate. JFUNC is the jump function into
1333 which the constants are subsequently stored. */
1335 static void
1336 determine_known_aggregate_parts (gimple call, tree arg,
1337 struct ipa_jump_func *jfunc)
1339 struct ipa_known_agg_contents_list *list = NULL;
1340 int item_count = 0, const_count = 0;
1341 HOST_WIDE_INT arg_offset, arg_size;
1342 gimple_stmt_iterator gsi;
1343 tree arg_base;
1344 bool check_ref, by_ref;
1345 ao_ref r;
1347 /* The function operates in three stages. First, we prepare check_ref, r,
1348 arg_base and arg_offset based on what is actually passed as an actual
1349 argument. */
1351 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1353 by_ref = true;
1354 if (TREE_CODE (arg) == SSA_NAME)
1356 tree type_size;
1357 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)))))
1358 return;
1359 check_ref = true;
1360 arg_base = arg;
1361 arg_offset = 0;
1362 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1363 arg_size = tree_to_uhwi (type_size);
1364 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1366 else if (TREE_CODE (arg) == ADDR_EXPR)
1368 HOST_WIDE_INT arg_max_size;
1370 arg = TREE_OPERAND (arg, 0);
1371 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1372 &arg_max_size);
1373 if (arg_max_size == -1
1374 || arg_max_size != arg_size
1375 || arg_offset < 0)
1376 return;
1377 if (DECL_P (arg_base))
1379 tree size;
1380 check_ref = false;
1381 size = build_int_cst (integer_type_node, arg_size);
1382 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1384 else
1385 return;
1387 else
1388 return;
1390 else
1392 HOST_WIDE_INT arg_max_size;
1394 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1396 by_ref = false;
1397 check_ref = false;
1398 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1399 &arg_max_size);
1400 if (arg_max_size == -1
1401 || arg_max_size != arg_size
1402 || arg_offset < 0)
1403 return;
1405 ao_ref_init (&r, arg);
1408 /* Second stage walks back the BB, looks at individual statements and as long
1409 as it is confident of how the statements affect contents of the
1410 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1411 describing it. */
1412 gsi = gsi_for_stmt (call);
1413 gsi_prev (&gsi);
1414 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1416 struct ipa_known_agg_contents_list *n, **p;
1417 gimple stmt = gsi_stmt (gsi);
1418 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1419 tree lhs, rhs, lhs_base;
1420 bool partial_overlap;
1422 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1423 continue;
1424 if (!gimple_assign_single_p (stmt))
1425 break;
1427 lhs = gimple_assign_lhs (stmt);
1428 rhs = gimple_assign_rhs1 (stmt);
1429 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1430 || TREE_CODE (lhs) == BIT_FIELD_REF
1431 || contains_bitfld_component_ref_p (lhs))
1432 break;
1434 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1435 &lhs_max_size);
1436 if (lhs_max_size == -1
1437 || lhs_max_size != lhs_size
1438 || (lhs_offset < arg_offset
1439 && lhs_offset + lhs_size > arg_offset)
1440 || (lhs_offset < arg_offset + arg_size
1441 && lhs_offset + lhs_size > arg_offset + arg_size))
1442 break;
1444 if (check_ref)
1446 if (TREE_CODE (lhs_base) != MEM_REF
1447 || TREE_OPERAND (lhs_base, 0) != arg_base
1448 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1449 break;
1451 else if (lhs_base != arg_base)
1453 if (DECL_P (lhs_base))
1454 continue;
1455 else
1456 break;
1459 if (lhs_offset + lhs_size < arg_offset
1460 || lhs_offset >= (arg_offset + arg_size))
1461 continue;
1463 partial_overlap = false;
1464 p = &list;
1465 while (*p && (*p)->offset < lhs_offset)
1467 if ((*p)->offset + (*p)->size > lhs_offset)
1469 partial_overlap = true;
1470 break;
1472 p = &(*p)->next;
1474 if (partial_overlap)
1475 break;
1476 if (*p && (*p)->offset < lhs_offset + lhs_size)
1478 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1479 /* We already know this value is subsequently overwritten with
1480 something else. */
1481 continue;
1482 else
1483 /* Otherwise this is a partial overlap which we cannot
1484 represent. */
1485 break;
1488 rhs = get_ssa_def_if_simple_copy (rhs);
1489 n = XALLOCA (struct ipa_known_agg_contents_list);
1490 n->size = lhs_size;
1491 n->offset = lhs_offset;
1492 if (is_gimple_ip_invariant (rhs))
1494 n->constant = rhs;
1495 const_count++;
1497 else
1498 n->constant = NULL_TREE;
1499 n->next = *p;
1500 *p = n;
1502 item_count++;
1503 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1504 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1505 break;
1508 /* Third stage just goes over the list and creates an appropriate vector of
1509 ipa_agg_jf_item structures out of it, of sourse only if there are
1510 any known constants to begin with. */
1512 if (const_count)
1514 jfunc->agg.by_ref = by_ref;
1515 vec_alloc (jfunc->agg.items, const_count);
1516 while (list)
1518 if (list->constant)
1520 struct ipa_agg_jf_item item;
1521 item.offset = list->offset - arg_offset;
1522 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1523 item.value = unshare_expr_without_location (list->constant);
1524 jfunc->agg.items->quick_push (item);
1526 list = list->next;
1531 static tree
1532 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1534 int n;
1535 tree type = (e->callee
1536 ? TREE_TYPE (e->callee->decl)
1537 : gimple_call_fntype (e->call_stmt));
1538 tree t = TYPE_ARG_TYPES (type);
1540 for (n = 0; n < i; n++)
1542 if (!t)
1543 break;
1544 t = TREE_CHAIN (t);
1546 if (t)
1547 return TREE_VALUE (t);
1548 if (!e->callee)
1549 return NULL;
1550 t = DECL_ARGUMENTS (e->callee->decl);
1551 for (n = 0; n < i; n++)
1553 if (!t)
1554 return NULL;
1555 t = TREE_CHAIN (t);
1557 if (t)
1558 return TREE_TYPE (t);
1559 return NULL;
1562 /* Compute jump function for all arguments of callsite CS and insert the
1563 information in the jump_functions array in the ipa_edge_args corresponding
1564 to this callsite. */
1566 static void
1567 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1568 struct cgraph_edge *cs)
1570 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1571 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1572 gimple call = cs->call_stmt;
1573 int n, arg_num = gimple_call_num_args (call);
1575 if (arg_num == 0 || args->jump_functions)
1576 return;
1577 vec_safe_grow_cleared (args->jump_functions, arg_num);
1579 if (gimple_call_internal_p (call))
1580 return;
1581 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1582 return;
1584 for (n = 0; n < arg_num; n++)
1586 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1587 tree arg = gimple_call_arg (call, n);
1588 tree param_type = ipa_get_callee_param_type (cs, n);
1590 if (is_gimple_ip_invariant (arg))
1591 ipa_set_jf_constant (jfunc, arg, cs);
1592 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1593 && TREE_CODE (arg) == PARM_DECL)
1595 int index = ipa_get_param_decl_index (info, arg);
1597 gcc_assert (index >=0);
1598 /* Aggregate passed by value, check for pass-through, otherwise we
1599 will attempt to fill in aggregate contents later in this
1600 for cycle. */
1601 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1603 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1604 continue;
1607 else if (TREE_CODE (arg) == SSA_NAME)
1609 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1611 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1612 if (index >= 0)
1614 bool agg_p, type_p;
1615 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1616 call, arg);
1617 if (param_type && POINTER_TYPE_P (param_type))
1618 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1619 call, jfunc);
1620 else
1621 type_p = false;
1622 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1623 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1624 type_p);
1627 else
1629 gimple stmt = SSA_NAME_DEF_STMT (arg);
1630 if (is_gimple_assign (stmt))
1631 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1632 call, stmt, arg, param_type);
1633 else if (gimple_code (stmt) == GIMPLE_PHI)
1634 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1635 call, stmt, param_type);
1638 else
1639 compute_known_type_jump_func (arg, jfunc, call,
1640 param_type
1641 && POINTER_TYPE_P (param_type)
1642 ? TREE_TYPE (param_type)
1643 : NULL);
1645 if ((jfunc->type != IPA_JF_PASS_THROUGH
1646 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1647 && (jfunc->type != IPA_JF_ANCESTOR
1648 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1649 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1650 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1651 determine_known_aggregate_parts (call, arg, jfunc);
1655 /* Compute jump functions for all edges - both direct and indirect - outgoing
1656 from NODE. Also count the actual arguments in the process. */
1658 static void
1659 ipa_compute_jump_functions (struct cgraph_node *node,
1660 struct param_analysis_info *parms_ainfo)
1662 struct cgraph_edge *cs;
1664 for (cs = node->callees; cs; cs = cs->next_callee)
1666 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1667 NULL);
1668 /* We do not need to bother analyzing calls to unknown
1669 functions unless they may become known during lto/whopr. */
1670 if (!callee->definition && !flag_lto)
1671 continue;
1672 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1675 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1676 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1679 /* If STMT looks like a statement loading a value from a member pointer formal
1680 parameter, return that parameter and store the offset of the field to
1681 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1682 might be clobbered). If USE_DELTA, then we look for a use of the delta
1683 field rather than the pfn. */
1685 static tree
1686 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1687 HOST_WIDE_INT *offset_p)
1689 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1691 if (!gimple_assign_single_p (stmt))
1692 return NULL_TREE;
1694 rhs = gimple_assign_rhs1 (stmt);
1695 if (TREE_CODE (rhs) == COMPONENT_REF)
1697 ref_field = TREE_OPERAND (rhs, 1);
1698 rhs = TREE_OPERAND (rhs, 0);
1700 else
1701 ref_field = NULL_TREE;
1702 if (TREE_CODE (rhs) != MEM_REF)
1703 return NULL_TREE;
1704 rec = TREE_OPERAND (rhs, 0);
1705 if (TREE_CODE (rec) != ADDR_EXPR)
1706 return NULL_TREE;
1707 rec = TREE_OPERAND (rec, 0);
1708 if (TREE_CODE (rec) != PARM_DECL
1709 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1710 return NULL_TREE;
1711 ref_offset = TREE_OPERAND (rhs, 1);
1713 if (use_delta)
1714 fld = delta_field;
1715 else
1716 fld = ptr_field;
1717 if (offset_p)
1718 *offset_p = int_bit_position (fld);
1720 if (ref_field)
1722 if (integer_nonzerop (ref_offset))
1723 return NULL_TREE;
1724 return ref_field == fld ? rec : NULL_TREE;
1726 else
1727 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1728 : NULL_TREE;
1731 /* Returns true iff T is an SSA_NAME defined by a statement. */
1733 static bool
1734 ipa_is_ssa_with_stmt_def (tree t)
1736 if (TREE_CODE (t) == SSA_NAME
1737 && !SSA_NAME_IS_DEFAULT_DEF (t))
1738 return true;
1739 else
1740 return false;
1743 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1744 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1745 indirect call graph edge. */
1747 static struct cgraph_edge *
1748 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1750 struct cgraph_edge *cs;
1752 cs = cgraph_edge (node, stmt);
1753 cs->indirect_info->param_index = param_index;
1754 cs->indirect_info->agg_contents = 0;
1755 cs->indirect_info->member_ptr = 0;
1756 return cs;
1759 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1760 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1761 intermediate information about each formal parameter. Currently it checks
1762 whether the call calls a pointer that is a formal parameter and if so, the
1763 parameter is marked with the called flag and an indirect call graph edge
1764 describing the call is created. This is very simple for ordinary pointers
1765 represented in SSA but not-so-nice when it comes to member pointers. The
1766 ugly part of this function does nothing more than trying to match the
1767 pattern of such a call. An example of such a pattern is the gimple dump
1768 below, the call is on the last line:
1770 <bb 2>:
1771 f$__delta_5 = f.__delta;
1772 f$__pfn_24 = f.__pfn;
1775 <bb 2>:
1776 f$__delta_5 = MEM[(struct *)&f];
1777 f$__pfn_24 = MEM[(struct *)&f + 4B];
1779 and a few lines below:
1781 <bb 5>
1782 D.2496_3 = (int) f$__pfn_24;
1783 D.2497_4 = D.2496_3 & 1;
1784 if (D.2497_4 != 0)
1785 goto <bb 3>;
1786 else
1787 goto <bb 4>;
1789 <bb 6>:
1790 D.2500_7 = (unsigned int) f$__delta_5;
1791 D.2501_8 = &S + D.2500_7;
1792 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1793 D.2503_10 = *D.2502_9;
1794 D.2504_12 = f$__pfn_24 + -1;
1795 D.2505_13 = (unsigned int) D.2504_12;
1796 D.2506_14 = D.2503_10 + D.2505_13;
1797 D.2507_15 = *D.2506_14;
1798 iftmp.11_16 = (String:: *) D.2507_15;
1800 <bb 7>:
1801 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1802 D.2500_19 = (unsigned int) f$__delta_5;
1803 D.2508_20 = &S + D.2500_19;
1804 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1806 Such patterns are results of simple calls to a member pointer:
1808 int doprinting (int (MyString::* f)(int) const)
1810 MyString S ("somestring");
1812 return (S.*f)(4);
1815 Moreover, the function also looks for called pointers loaded from aggregates
1816 passed by value or reference. */
1818 static void
1819 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1820 struct ipa_node_params *info,
1821 struct param_analysis_info *parms_ainfo,
1822 gimple call, tree target)
1824 gimple def;
1825 tree n1, n2;
1826 gimple d1, d2;
1827 tree rec, rec2, cond;
1828 gimple branch;
1829 int index;
1830 basic_block bb, virt_bb, join;
1831 HOST_WIDE_INT offset;
1832 bool by_ref;
1834 if (SSA_NAME_IS_DEFAULT_DEF (target))
1836 tree var = SSA_NAME_VAR (target);
1837 index = ipa_get_param_decl_index (info, var);
1838 if (index >= 0)
1839 ipa_note_param_call (node, index, call);
1840 return;
1843 def = SSA_NAME_DEF_STMT (target);
1844 if (gimple_assign_single_p (def)
1845 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1846 gimple_assign_rhs1 (def), &index, &offset,
1847 NULL, &by_ref))
1849 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1850 if (cs->indirect_info->offset != offset)
1851 cs->indirect_info->outer_type = NULL;
1852 cs->indirect_info->offset = offset;
1853 cs->indirect_info->agg_contents = 1;
1854 cs->indirect_info->by_ref = by_ref;
1855 return;
1858 /* Now we need to try to match the complex pattern of calling a member
1859 pointer. */
1860 if (gimple_code (def) != GIMPLE_PHI
1861 || gimple_phi_num_args (def) != 2
1862 || !POINTER_TYPE_P (TREE_TYPE (target))
1863 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1864 return;
1866 /* First, we need to check whether one of these is a load from a member
1867 pointer that is a parameter to this function. */
1868 n1 = PHI_ARG_DEF (def, 0);
1869 n2 = PHI_ARG_DEF (def, 1);
1870 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1871 return;
1872 d1 = SSA_NAME_DEF_STMT (n1);
1873 d2 = SSA_NAME_DEF_STMT (n2);
1875 join = gimple_bb (def);
1876 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1878 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1879 return;
1881 bb = EDGE_PRED (join, 0)->src;
1882 virt_bb = gimple_bb (d2);
1884 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1886 bb = EDGE_PRED (join, 1)->src;
1887 virt_bb = gimple_bb (d1);
1889 else
1890 return;
1892 /* Second, we need to check that the basic blocks are laid out in the way
1893 corresponding to the pattern. */
1895 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1896 || single_pred (virt_bb) != bb
1897 || single_succ (virt_bb) != join)
1898 return;
1900 /* Third, let's see that the branching is done depending on the least
1901 significant bit of the pfn. */
1903 branch = last_stmt (bb);
1904 if (!branch || gimple_code (branch) != GIMPLE_COND)
1905 return;
1907 if ((gimple_cond_code (branch) != NE_EXPR
1908 && gimple_cond_code (branch) != EQ_EXPR)
1909 || !integer_zerop (gimple_cond_rhs (branch)))
1910 return;
1912 cond = gimple_cond_lhs (branch);
1913 if (!ipa_is_ssa_with_stmt_def (cond))
1914 return;
1916 def = SSA_NAME_DEF_STMT (cond);
1917 if (!is_gimple_assign (def)
1918 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1919 || !integer_onep (gimple_assign_rhs2 (def)))
1920 return;
1922 cond = gimple_assign_rhs1 (def);
1923 if (!ipa_is_ssa_with_stmt_def (cond))
1924 return;
1926 def = SSA_NAME_DEF_STMT (cond);
1928 if (is_gimple_assign (def)
1929 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1931 cond = gimple_assign_rhs1 (def);
1932 if (!ipa_is_ssa_with_stmt_def (cond))
1933 return;
1934 def = SSA_NAME_DEF_STMT (cond);
1937 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1938 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1939 == ptrmemfunc_vbit_in_delta),
1940 NULL);
1941 if (rec != rec2)
1942 return;
1944 index = ipa_get_param_decl_index (info, rec);
1945 if (index >= 0
1946 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1948 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1949 if (cs->indirect_info->offset != offset)
1950 cs->indirect_info->outer_type = NULL;
1951 cs->indirect_info->offset = offset;
1952 cs->indirect_info->agg_contents = 1;
1953 cs->indirect_info->member_ptr = 1;
1956 return;
1959 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1960 object referenced in the expression is a formal parameter of the caller
1961 (described by INFO), create a call note for the statement. */
1963 static void
1964 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1965 struct ipa_node_params *info, gimple call,
1966 tree target)
1968 struct cgraph_edge *cs;
1969 struct cgraph_indirect_call_info *ii;
1970 struct ipa_jump_func jfunc;
1971 tree obj = OBJ_TYPE_REF_OBJECT (target);
1972 int index;
1973 HOST_WIDE_INT anc_offset;
1975 if (!flag_devirtualize)
1976 return;
1978 if (TREE_CODE (obj) != SSA_NAME)
1979 return;
1981 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1983 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1984 return;
1986 anc_offset = 0;
1987 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1988 gcc_assert (index >= 0);
1989 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1990 call, &jfunc))
1991 return;
1993 else
1995 gimple stmt = SSA_NAME_DEF_STMT (obj);
1996 tree expr;
1998 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1999 if (!expr)
2000 return;
2001 index = ipa_get_param_decl_index (info,
2002 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2003 gcc_assert (index >= 0);
2004 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2005 call, &jfunc, anc_offset))
2006 return;
2009 cs = ipa_note_param_call (node, index, call);
2010 ii = cs->indirect_info;
2011 ii->offset = anc_offset;
2012 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2013 ii->otr_type = obj_type_ref_class (target);
2014 ii->polymorphic = 1;
2017 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2018 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2019 containing intermediate information about each formal parameter. */
2021 static void
2022 ipa_analyze_call_uses (struct cgraph_node *node,
2023 struct ipa_node_params *info,
2024 struct param_analysis_info *parms_ainfo, gimple call)
2026 tree target = gimple_call_fn (call);
2027 struct cgraph_edge *cs;
2029 if (!target
2030 || (TREE_CODE (target) != SSA_NAME
2031 && !virtual_method_call_p (target)))
2032 return;
2034 /* If we previously turned the call into a direct call, there is
2035 no need to analyze. */
2036 cs = cgraph_edge (node, call);
2037 if (cs && !cs->indirect_unknown_callee)
2038 return;
2039 if (TREE_CODE (target) == SSA_NAME)
2040 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2041 else if (virtual_method_call_p (target))
2042 ipa_analyze_virtual_call_uses (node, info, call, target);
2046 /* Analyze the call statement STMT with respect to formal parameters (described
2047 in INFO) of caller given by NODE. Currently it only checks whether formal
2048 parameters are called. PARMS_AINFO is a pointer to a vector containing
2049 intermediate information about each formal parameter. */
2051 static void
2052 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2053 struct param_analysis_info *parms_ainfo, gimple stmt)
2055 if (is_gimple_call (stmt))
2056 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2059 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2060 If OP is a parameter declaration, mark it as used in the info structure
2061 passed in DATA. */
2063 static bool
2064 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2066 struct ipa_node_params *info = (struct ipa_node_params *) data;
2068 op = get_base_address (op);
2069 if (op
2070 && TREE_CODE (op) == PARM_DECL)
2072 int index = ipa_get_param_decl_index (info, op);
2073 gcc_assert (index >= 0);
2074 ipa_set_param_used (info, index, true);
2077 return false;
2080 /* Scan the function body of NODE and inspect the uses of formal parameters.
2081 Store the findings in various structures of the associated ipa_node_params
2082 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2083 vector containing intermediate information about each formal parameter. */
2085 static void
2086 ipa_analyze_params_uses (struct cgraph_node *node,
2087 struct param_analysis_info *parms_ainfo)
2089 tree decl = node->decl;
2090 basic_block bb;
2091 struct function *func;
2092 gimple_stmt_iterator gsi;
2093 struct ipa_node_params *info = IPA_NODE_REF (node);
2094 int i;
2096 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2097 return;
2099 info->uses_analysis_done = 1;
2100 if (ipa_func_spec_opts_forbid_analysis_p (node))
2102 for (i = 0; i < ipa_get_param_count (info); i++)
2104 ipa_set_param_used (info, i, true);
2105 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2107 return;
2110 for (i = 0; i < ipa_get_param_count (info); i++)
2112 tree parm = ipa_get_param (info, i);
2113 int controlled_uses = 0;
2115 /* For SSA regs see if parameter is used. For non-SSA we compute
2116 the flag during modification analysis. */
2117 if (is_gimple_reg (parm))
2119 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2120 parm);
2121 if (ddef && !has_zero_uses (ddef))
2123 imm_use_iterator imm_iter;
2124 use_operand_p use_p;
2126 ipa_set_param_used (info, i, true);
2127 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2128 if (!is_gimple_call (USE_STMT (use_p)))
2130 if (!is_gimple_debug (USE_STMT (use_p)))
2132 controlled_uses = IPA_UNDESCRIBED_USE;
2133 break;
2136 else
2137 controlled_uses++;
2139 else
2140 controlled_uses = 0;
2142 else
2143 controlled_uses = IPA_UNDESCRIBED_USE;
2144 ipa_set_controlled_uses (info, i, controlled_uses);
2147 func = DECL_STRUCT_FUNCTION (decl);
2148 FOR_EACH_BB_FN (bb, func)
2150 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2152 gimple stmt = gsi_stmt (gsi);
2154 if (is_gimple_debug (stmt))
2155 continue;
2157 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2158 walk_stmt_load_store_addr_ops (stmt, info,
2159 visit_ref_for_mod_analysis,
2160 visit_ref_for_mod_analysis,
2161 visit_ref_for_mod_analysis);
2163 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2164 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2165 visit_ref_for_mod_analysis,
2166 visit_ref_for_mod_analysis,
2167 visit_ref_for_mod_analysis);
2171 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2173 static void
2174 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2176 int i;
2178 for (i = 0; i < param_count; i++)
2180 if (parms_ainfo[i].parm_visited_statements)
2181 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2182 if (parms_ainfo[i].pt_visited_statements)
2183 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2187 /* Initialize the array describing properties of of formal parameters
2188 of NODE, analyze their uses and compute jump functions associated
2189 with actual arguments of calls from within NODE. */
2191 void
2192 ipa_analyze_node (struct cgraph_node *node)
2194 struct ipa_node_params *info;
2195 struct param_analysis_info *parms_ainfo;
2196 int param_count;
2198 ipa_check_create_node_params ();
2199 ipa_check_create_edge_args ();
2200 info = IPA_NODE_REF (node);
2201 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2202 ipa_initialize_node_params (node);
2204 param_count = ipa_get_param_count (info);
2205 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2206 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2208 ipa_analyze_params_uses (node, parms_ainfo);
2209 ipa_compute_jump_functions (node, parms_ainfo);
2211 free_parms_ainfo (parms_ainfo, param_count);
2212 pop_cfun ();
2215 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2216 attempt a type-based devirtualization. If successful, return the
2217 target function declaration, otherwise return NULL. */
2219 tree
2220 ipa_intraprocedural_devirtualization (gimple call)
2222 tree binfo, token, fndecl;
2223 struct ipa_jump_func jfunc;
2224 tree otr = gimple_call_fn (call);
2226 jfunc.type = IPA_JF_UNKNOWN;
2227 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2228 call, obj_type_ref_class (otr));
2229 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2230 return NULL_TREE;
2231 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2232 if (!binfo)
2233 return NULL_TREE;
2234 token = OBJ_TYPE_REF_TOKEN (otr);
2235 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2236 binfo);
2237 #ifdef ENABLE_CHECKING
2238 if (fndecl)
2239 gcc_assert (possible_polymorphic_call_target_p
2240 (otr, cgraph_get_node (fndecl)));
2241 #endif
2242 return fndecl;
2245 /* Update the jump function DST when the call graph edge corresponding to SRC is
2246 is being inlined, knowing that DST is of type ancestor and src of known
2247 type. */
2249 static void
2250 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2251 struct ipa_jump_func *dst)
2253 HOST_WIDE_INT combined_offset;
2254 tree combined_type;
2256 if (!ipa_get_jf_ancestor_type_preserved (dst))
2258 dst->type = IPA_JF_UNKNOWN;
2259 return;
2262 combined_offset = ipa_get_jf_known_type_offset (src)
2263 + ipa_get_jf_ancestor_offset (dst);
2264 combined_type = ipa_get_jf_ancestor_type (dst);
2266 ipa_set_jf_known_type (dst, combined_offset,
2267 ipa_get_jf_known_type_base_type (src),
2268 combined_type);
2271 /* Update the jump functions associated with call graph edge E when the call
2272 graph edge CS is being inlined, assuming that E->caller is already (possibly
2273 indirectly) inlined into CS->callee and that E has not been inlined. */
2275 static void
2276 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2277 struct cgraph_edge *e)
2279 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2280 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2281 int count = ipa_get_cs_argument_count (args);
2282 int i;
2284 for (i = 0; i < count; i++)
2286 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2288 if (dst->type == IPA_JF_ANCESTOR)
2290 struct ipa_jump_func *src;
2291 int dst_fid = dst->value.ancestor.formal_id;
2293 /* Variable number of arguments can cause havoc if we try to access
2294 one that does not exist in the inlined edge. So make sure we
2295 don't. */
2296 if (dst_fid >= ipa_get_cs_argument_count (top))
2298 dst->type = IPA_JF_UNKNOWN;
2299 continue;
2302 src = ipa_get_ith_jump_func (top, dst_fid);
2304 if (src->agg.items
2305 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2307 struct ipa_agg_jf_item *item;
2308 int j;
2310 /* Currently we do not produce clobber aggregate jump functions,
2311 replace with merging when we do. */
2312 gcc_assert (!dst->agg.items);
2314 dst->agg.items = vec_safe_copy (src->agg.items);
2315 dst->agg.by_ref = src->agg.by_ref;
2316 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2317 item->offset -= dst->value.ancestor.offset;
2320 if (src->type == IPA_JF_KNOWN_TYPE)
2321 combine_known_type_and_ancestor_jfs (src, dst);
2322 else if (src->type == IPA_JF_PASS_THROUGH
2323 && src->value.pass_through.operation == NOP_EXPR)
2325 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2326 dst->value.ancestor.agg_preserved &=
2327 src->value.pass_through.agg_preserved;
2328 dst->value.ancestor.type_preserved &=
2329 src->value.pass_through.type_preserved;
2331 else if (src->type == IPA_JF_ANCESTOR)
2333 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2334 dst->value.ancestor.offset += src->value.ancestor.offset;
2335 dst->value.ancestor.agg_preserved &=
2336 src->value.ancestor.agg_preserved;
2337 dst->value.ancestor.type_preserved &=
2338 src->value.ancestor.type_preserved;
2340 else
2341 dst->type = IPA_JF_UNKNOWN;
2343 else if (dst->type == IPA_JF_PASS_THROUGH)
2345 struct ipa_jump_func *src;
2346 /* We must check range due to calls with variable number of arguments
2347 and we cannot combine jump functions with operations. */
2348 if (dst->value.pass_through.operation == NOP_EXPR
2349 && (dst->value.pass_through.formal_id
2350 < ipa_get_cs_argument_count (top)))
2352 int dst_fid = dst->value.pass_through.formal_id;
2353 src = ipa_get_ith_jump_func (top, dst_fid);
2354 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2356 switch (src->type)
2358 case IPA_JF_UNKNOWN:
2359 dst->type = IPA_JF_UNKNOWN;
2360 break;
2361 case IPA_JF_KNOWN_TYPE:
2362 ipa_set_jf_known_type (dst,
2363 ipa_get_jf_known_type_offset (src),
2364 ipa_get_jf_known_type_base_type (src),
2365 ipa_get_jf_known_type_base_type (src));
2366 break;
2367 case IPA_JF_CONST:
2368 ipa_set_jf_cst_copy (dst, src);
2369 break;
2371 case IPA_JF_PASS_THROUGH:
2373 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2374 enum tree_code operation;
2375 operation = ipa_get_jf_pass_through_operation (src);
2377 if (operation == NOP_EXPR)
2379 bool agg_p, type_p;
2380 agg_p = dst_agg_p
2381 && ipa_get_jf_pass_through_agg_preserved (src);
2382 type_p = ipa_get_jf_pass_through_type_preserved (src)
2383 && ipa_get_jf_pass_through_type_preserved (dst);
2384 ipa_set_jf_simple_pass_through (dst, formal_id,
2385 agg_p, type_p);
2387 else
2389 tree operand = ipa_get_jf_pass_through_operand (src);
2390 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2391 operation);
2393 break;
2395 case IPA_JF_ANCESTOR:
2397 bool agg_p, type_p;
2398 agg_p = dst_agg_p
2399 && ipa_get_jf_ancestor_agg_preserved (src);
2400 type_p = ipa_get_jf_ancestor_type_preserved (src)
2401 && ipa_get_jf_pass_through_type_preserved (dst);
2402 ipa_set_ancestor_jf (dst,
2403 ipa_get_jf_ancestor_offset (src),
2404 ipa_get_jf_ancestor_type (src),
2405 ipa_get_jf_ancestor_formal_id (src),
2406 agg_p, type_p);
2407 break;
2409 default:
2410 gcc_unreachable ();
2413 if (src->agg.items
2414 && (dst_agg_p || !src->agg.by_ref))
2416 /* Currently we do not produce clobber aggregate jump
2417 functions, replace with merging when we do. */
2418 gcc_assert (!dst->agg.items);
2420 dst->agg.by_ref = src->agg.by_ref;
2421 dst->agg.items = vec_safe_copy (src->agg.items);
2424 else
2425 dst->type = IPA_JF_UNKNOWN;
2430 /* If TARGET is an addr_expr of a function declaration, make it the destination
2431 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2433 struct cgraph_edge *
2434 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2436 struct cgraph_node *callee;
2437 struct inline_edge_summary *es = inline_edge_summary (ie);
2438 bool unreachable = false;
2440 if (TREE_CODE (target) == ADDR_EXPR)
2441 target = TREE_OPERAND (target, 0);
2442 if (TREE_CODE (target) != FUNCTION_DECL)
2444 target = canonicalize_constructor_val (target, NULL);
2445 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2447 if (ie->indirect_info->member_ptr)
2448 /* Member pointer call that goes through a VMT lookup. */
2449 return NULL;
2451 if (dump_file)
2452 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2453 " in %s/%i, making it unreachable.\n",
2454 ie->caller->name (), ie->caller->order);
2455 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2456 callee = cgraph_get_create_node (target);
2457 unreachable = true;
2459 else
2460 callee = cgraph_get_node (target);
2462 else
2463 callee = cgraph_get_node (target);
2465 /* Because may-edges are not explicitely represented and vtable may be external,
2466 we may create the first reference to the object in the unit. */
2467 if (!callee || callee->global.inlined_to)
2470 /* We are better to ensure we can refer to it.
2471 In the case of static functions we are out of luck, since we already
2472 removed its body. In the case of public functions we may or may
2473 not introduce the reference. */
2474 if (!canonicalize_constructor_val (target, NULL)
2475 || !TREE_PUBLIC (target))
2477 if (dump_file)
2478 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2479 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2480 xstrdup (ie->caller->name ()),
2481 ie->caller->order,
2482 xstrdup (ie->callee->name ()),
2483 ie->callee->order);
2484 return NULL;
2486 callee = cgraph_get_create_node (target);
2488 ipa_check_create_node_params ();
2490 /* We can not make edges to inline clones. It is bug that someone removed
2491 the cgraph node too early. */
2492 gcc_assert (!callee->global.inlined_to);
2494 if (dump_file && !unreachable)
2496 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2497 "(%s/%i -> %s/%i), for stmt ",
2498 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2499 xstrdup (ie->caller->name ()),
2500 ie->caller->order,
2501 xstrdup (callee->name ()),
2502 callee->order);
2503 if (ie->call_stmt)
2504 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2505 else
2506 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2508 ie = cgraph_make_edge_direct (ie, callee);
2509 es = inline_edge_summary (ie);
2510 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2511 - eni_size_weights.call_cost);
2512 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2513 - eni_time_weights.call_cost);
2515 return ie;
2518 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2519 return NULL if there is not any. BY_REF specifies whether the value has to
2520 be passed by reference or by value. */
2522 tree
2523 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2524 HOST_WIDE_INT offset, bool by_ref)
2526 struct ipa_agg_jf_item *item;
2527 int i;
2529 if (by_ref != agg->by_ref)
2530 return NULL;
2532 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2533 if (item->offset == offset)
2535 /* Currently we do not have clobber values, return NULL for them once
2536 we do. */
2537 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2538 return item->value;
2540 return NULL;
2543 /* Remove a reference to SYMBOL from the list of references of a node given by
2544 reference description RDESC. Return true if the reference has been
2545 successfully found and removed. */
2547 static bool
2548 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2550 struct ipa_ref *to_del;
2551 struct cgraph_edge *origin;
2553 origin = rdesc->cs;
2554 if (!origin)
2555 return false;
2556 to_del = ipa_find_reference (origin->caller, symbol,
2557 origin->call_stmt, origin->lto_stmt_uid);
2558 if (!to_del)
2559 return false;
2561 ipa_remove_reference (to_del);
2562 if (dump_file)
2563 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2564 xstrdup (origin->caller->name ()),
2565 origin->caller->order, xstrdup (symbol->name ()));
2566 return true;
2569 /* If JFUNC has a reference description with refcount different from
2570 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2571 NULL. JFUNC must be a constant jump function. */
2573 static struct ipa_cst_ref_desc *
2574 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2576 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2577 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2578 return rdesc;
2579 else
2580 return NULL;
2583 /* If the value of constant jump function JFUNC is an address of a function
2584 declaration, return the associated call graph node. Otherwise return
2585 NULL. */
2587 static cgraph_node *
2588 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2590 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2591 tree cst = ipa_get_jf_constant (jfunc);
2592 if (TREE_CODE (cst) != ADDR_EXPR
2593 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2594 return NULL;
2596 return cgraph_get_node (TREE_OPERAND (cst, 0));
2600 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2601 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2602 the edge specified in the rdesc. Return false if either the symbol or the
2603 reference could not be found, otherwise return true. */
2605 static bool
2606 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2608 struct ipa_cst_ref_desc *rdesc;
2609 if (jfunc->type == IPA_JF_CONST
2610 && (rdesc = jfunc_rdesc_usable (jfunc))
2611 && --rdesc->refcount == 0)
2613 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2614 if (!symbol)
2615 return false;
2617 return remove_described_reference (symbol, rdesc);
2619 return true;
2622 /* Try to find a destination for indirect edge IE that corresponds to a simple
2623 call or a call of a member function pointer and where the destination is a
2624 pointer formal parameter described by jump function JFUNC. If it can be
2625 determined, return the newly direct edge, otherwise return NULL.
2626 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2628 static struct cgraph_edge *
2629 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2630 struct ipa_jump_func *jfunc,
2631 struct ipa_node_params *new_root_info)
2633 struct cgraph_edge *cs;
2634 tree target;
2635 bool agg_contents = ie->indirect_info->agg_contents;
2637 if (ie->indirect_info->agg_contents)
2638 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2639 ie->indirect_info->offset,
2640 ie->indirect_info->by_ref);
2641 else
2642 target = ipa_value_from_jfunc (new_root_info, jfunc);
2643 if (!target)
2644 return NULL;
2645 cs = ipa_make_edge_direct_to_target (ie, target);
2647 if (cs && !agg_contents)
2649 bool ok;
2650 gcc_checking_assert (cs->callee
2651 && (cs != ie
2652 || jfunc->type != IPA_JF_CONST
2653 || !cgraph_node_for_jfunc (jfunc)
2654 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2655 ok = try_decrement_rdesc_refcount (jfunc);
2656 gcc_checking_assert (ok);
2659 return cs;
2662 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2663 call based on a formal parameter which is described by jump function JFUNC
2664 and if it can be determined, make it direct and return the direct edge.
2665 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2666 are relative to. */
2668 static struct cgraph_edge *
2669 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2670 struct ipa_jump_func *jfunc,
2671 struct ipa_node_params *new_root_info)
2673 tree binfo, target;
2675 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2677 if (!binfo)
2678 return NULL;
2680 if (TREE_CODE (binfo) != TREE_BINFO)
2682 binfo = gimple_extract_devirt_binfo_from_cst
2683 (binfo, ie->indirect_info->otr_type);
2684 if (!binfo)
2685 return NULL;
2688 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2689 ie->indirect_info->otr_type);
2690 if (binfo)
2691 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2692 binfo);
2693 else
2694 return NULL;
2696 if (target)
2698 #ifdef ENABLE_CHECKING
2699 gcc_assert (possible_polymorphic_call_target_p
2700 (ie, cgraph_get_node (target)));
2701 #endif
2702 return ipa_make_edge_direct_to_target (ie, target);
2704 else
2705 return NULL;
2708 /* Update the param called notes associated with NODE when CS is being inlined,
2709 assuming NODE is (potentially indirectly) inlined into CS->callee.
2710 Moreover, if the callee is discovered to be constant, create a new cgraph
2711 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2712 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2714 static bool
2715 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2716 struct cgraph_node *node,
2717 vec<cgraph_edge_p> *new_edges)
2719 struct ipa_edge_args *top;
2720 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2721 struct ipa_node_params *new_root_info;
2722 bool res = false;
2724 ipa_check_create_edge_args ();
2725 top = IPA_EDGE_REF (cs);
2726 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2727 ? cs->caller->global.inlined_to
2728 : cs->caller);
2730 for (ie = node->indirect_calls; ie; ie = next_ie)
2732 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2733 struct ipa_jump_func *jfunc;
2734 int param_index;
2736 next_ie = ie->next_callee;
2738 if (ici->param_index == -1)
2739 continue;
2741 /* We must check range due to calls with variable number of arguments: */
2742 if (ici->param_index >= ipa_get_cs_argument_count (top))
2744 ici->param_index = -1;
2745 continue;
2748 param_index = ici->param_index;
2749 jfunc = ipa_get_ith_jump_func (top, param_index);
2751 if (!flag_indirect_inlining)
2752 new_direct_edge = NULL;
2753 else if (ici->polymorphic)
2754 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2755 new_root_info);
2756 else
2757 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2758 new_root_info);
2759 /* If speculation was removed, then we need to do nothing. */
2760 if (new_direct_edge && new_direct_edge != ie)
2762 new_direct_edge->indirect_inlining_edge = 1;
2763 top = IPA_EDGE_REF (cs);
2764 res = true;
2766 else if (new_direct_edge)
2768 new_direct_edge->indirect_inlining_edge = 1;
2769 if (new_direct_edge->call_stmt)
2770 new_direct_edge->call_stmt_cannot_inline_p
2771 = !gimple_check_call_matching_types (
2772 new_direct_edge->call_stmt,
2773 new_direct_edge->callee->decl, false);
2774 if (new_edges)
2776 new_edges->safe_push (new_direct_edge);
2777 res = true;
2779 top = IPA_EDGE_REF (cs);
2781 else if (jfunc->type == IPA_JF_PASS_THROUGH
2782 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2784 if (ici->agg_contents
2785 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2786 ici->param_index = -1;
2787 else
2788 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2790 else if (jfunc->type == IPA_JF_ANCESTOR)
2792 if (ici->agg_contents
2793 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2794 ici->param_index = -1;
2795 else
2797 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2798 if (ipa_get_jf_ancestor_offset (jfunc))
2799 ici->outer_type = NULL;
2800 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2803 else
2804 /* Either we can find a destination for this edge now or never. */
2805 ici->param_index = -1;
2808 return res;
2811 /* Recursively traverse subtree of NODE (including node) made of inlined
2812 cgraph_edges when CS has been inlined and invoke
2813 update_indirect_edges_after_inlining on all nodes and
2814 update_jump_functions_after_inlining on all non-inlined edges that lead out
2815 of this subtree. Newly discovered indirect edges will be added to
2816 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2817 created. */
2819 static bool
2820 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2821 struct cgraph_node *node,
2822 vec<cgraph_edge_p> *new_edges)
2824 struct cgraph_edge *e;
2825 bool res;
2827 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2829 for (e = node->callees; e; e = e->next_callee)
2830 if (!e->inline_failed)
2831 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2832 else
2833 update_jump_functions_after_inlining (cs, e);
2834 for (e = node->indirect_calls; e; e = e->next_callee)
2835 update_jump_functions_after_inlining (cs, e);
2837 return res;
2840 /* Combine two controlled uses counts as done during inlining. */
2842 static int
2843 combine_controlled_uses_counters (int c, int d)
2845 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2846 return IPA_UNDESCRIBED_USE;
2847 else
2848 return c + d - 1;
2851 /* Propagate number of controlled users from CS->caleee to the new root of the
2852 tree of inlined nodes. */
2854 static void
2855 propagate_controlled_uses (struct cgraph_edge *cs)
2857 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2858 struct cgraph_node *new_root = cs->caller->global.inlined_to
2859 ? cs->caller->global.inlined_to : cs->caller;
2860 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2861 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2862 int count, i;
2864 count = MIN (ipa_get_cs_argument_count (args),
2865 ipa_get_param_count (old_root_info));
2866 for (i = 0; i < count; i++)
2868 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2869 struct ipa_cst_ref_desc *rdesc;
2871 if (jf->type == IPA_JF_PASS_THROUGH)
2873 int src_idx, c, d;
2874 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2875 c = ipa_get_controlled_uses (new_root_info, src_idx);
2876 d = ipa_get_controlled_uses (old_root_info, i);
2878 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2879 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2880 c = combine_controlled_uses_counters (c, d);
2881 ipa_set_controlled_uses (new_root_info, src_idx, c);
2882 if (c == 0 && new_root_info->ipcp_orig_node)
2884 struct cgraph_node *n;
2885 struct ipa_ref *ref;
2886 tree t = new_root_info->known_vals[src_idx];
2888 if (t && TREE_CODE (t) == ADDR_EXPR
2889 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2890 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2891 && (ref = ipa_find_reference (new_root,
2892 n, NULL, 0)))
2894 if (dump_file)
2895 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2896 "reference from %s/%i to %s/%i.\n",
2897 xstrdup (new_root->name ()),
2898 new_root->order,
2899 xstrdup (n->name ()), n->order);
2900 ipa_remove_reference (ref);
2904 else if (jf->type == IPA_JF_CONST
2905 && (rdesc = jfunc_rdesc_usable (jf)))
2907 int d = ipa_get_controlled_uses (old_root_info, i);
2908 int c = rdesc->refcount;
2909 rdesc->refcount = combine_controlled_uses_counters (c, d);
2910 if (rdesc->refcount == 0)
2912 tree cst = ipa_get_jf_constant (jf);
2913 struct cgraph_node *n;
2914 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2915 && TREE_CODE (TREE_OPERAND (cst, 0))
2916 == FUNCTION_DECL);
2917 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2918 if (n)
2920 struct cgraph_node *clone;
2921 bool ok;
2922 ok = remove_described_reference (n, rdesc);
2923 gcc_checking_assert (ok);
2925 clone = cs->caller;
2926 while (clone->global.inlined_to
2927 && clone != rdesc->cs->caller
2928 && IPA_NODE_REF (clone)->ipcp_orig_node)
2930 struct ipa_ref *ref;
2931 ref = ipa_find_reference (clone,
2932 n, NULL, 0);
2933 if (ref)
2935 if (dump_file)
2936 fprintf (dump_file, "ipa-prop: Removing "
2937 "cloning-created reference "
2938 "from %s/%i to %s/%i.\n",
2939 xstrdup (clone->name ()),
2940 clone->order,
2941 xstrdup (n->name ()),
2942 n->order);
2943 ipa_remove_reference (ref);
2945 clone = clone->callers->caller;
2952 for (i = ipa_get_param_count (old_root_info);
2953 i < ipa_get_cs_argument_count (args);
2954 i++)
2956 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2958 if (jf->type == IPA_JF_CONST)
2960 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2961 if (rdesc)
2962 rdesc->refcount = IPA_UNDESCRIBED_USE;
2964 else if (jf->type == IPA_JF_PASS_THROUGH)
2965 ipa_set_controlled_uses (new_root_info,
2966 jf->value.pass_through.formal_id,
2967 IPA_UNDESCRIBED_USE);
2971 /* Update jump functions and call note functions on inlining the call site CS.
2972 CS is expected to lead to a node already cloned by
2973 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2974 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2975 created. */
2977 bool
2978 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2979 vec<cgraph_edge_p> *new_edges)
2981 bool changed;
2982 /* Do nothing if the preparation phase has not been carried out yet
2983 (i.e. during early inlining). */
2984 if (!ipa_node_params_vector.exists ())
2985 return false;
2986 gcc_assert (ipa_edge_args_vector);
2988 propagate_controlled_uses (cs);
2989 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2991 return changed;
2994 /* Frees all dynamically allocated structures that the argument info points
2995 to. */
2997 void
2998 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3000 vec_free (args->jump_functions);
3001 memset (args, 0, sizeof (*args));
3004 /* Free all ipa_edge structures. */
3006 void
3007 ipa_free_all_edge_args (void)
3009 int i;
3010 struct ipa_edge_args *args;
3012 if (!ipa_edge_args_vector)
3013 return;
3015 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3016 ipa_free_edge_args_substructures (args);
3018 vec_free (ipa_edge_args_vector);
3021 /* Frees all dynamically allocated structures that the param info points
3022 to. */
3024 void
3025 ipa_free_node_params_substructures (struct ipa_node_params *info)
3027 info->descriptors.release ();
3028 free (info->lattices);
3029 /* Lattice values and their sources are deallocated with their alocation
3030 pool. */
3031 info->known_vals.release ();
3032 memset (info, 0, sizeof (*info));
3035 /* Free all ipa_node_params structures. */
3037 void
3038 ipa_free_all_node_params (void)
3040 int i;
3041 struct ipa_node_params *info;
3043 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3044 ipa_free_node_params_substructures (info);
3046 ipa_node_params_vector.release ();
3049 /* Set the aggregate replacements of NODE to be AGGVALS. */
3051 void
3052 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3053 struct ipa_agg_replacement_value *aggvals)
3055 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3056 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3058 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3061 /* Hook that is called by cgraph.c when an edge is removed. */
3063 static void
3064 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3066 struct ipa_edge_args *args;
3068 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3069 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3070 return;
3072 args = IPA_EDGE_REF (cs);
3073 if (args->jump_functions)
3075 struct ipa_jump_func *jf;
3076 int i;
3077 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3079 struct ipa_cst_ref_desc *rdesc;
3080 try_decrement_rdesc_refcount (jf);
3081 if (jf->type == IPA_JF_CONST
3082 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3083 && rdesc->cs == cs)
3084 rdesc->cs = NULL;
3088 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3091 /* Hook that is called by cgraph.c when a node is removed. */
3093 static void
3094 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3096 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3097 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3098 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3099 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3100 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3103 /* Hook that is called by cgraph.c when an edge is duplicated. */
3105 static void
3106 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3107 __attribute__((unused)) void *data)
3109 struct ipa_edge_args *old_args, *new_args;
3110 unsigned int i;
3112 ipa_check_create_edge_args ();
3114 old_args = IPA_EDGE_REF (src);
3115 new_args = IPA_EDGE_REF (dst);
3117 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3119 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3121 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3122 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3124 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3126 if (src_jf->type == IPA_JF_CONST)
3128 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3130 if (!src_rdesc)
3131 dst_jf->value.constant.rdesc = NULL;
3132 else if (src->caller == dst->caller)
3134 struct ipa_ref *ref;
3135 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3136 gcc_checking_assert (n);
3137 ref = ipa_find_reference (src->caller, n,
3138 src->call_stmt, src->lto_stmt_uid);
3139 gcc_checking_assert (ref);
3140 ipa_clone_ref (ref, dst->caller, ref->stmt);
3142 gcc_checking_assert (ipa_refdesc_pool);
3143 struct ipa_cst_ref_desc *dst_rdesc
3144 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3145 dst_rdesc->cs = dst;
3146 dst_rdesc->refcount = src_rdesc->refcount;
3147 dst_rdesc->next_duplicate = NULL;
3148 dst_jf->value.constant.rdesc = dst_rdesc;
3150 else if (src_rdesc->cs == src)
3152 struct ipa_cst_ref_desc *dst_rdesc;
3153 gcc_checking_assert (ipa_refdesc_pool);
3154 dst_rdesc
3155 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3156 dst_rdesc->cs = dst;
3157 dst_rdesc->refcount = src_rdesc->refcount;
3158 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3159 src_rdesc->next_duplicate = dst_rdesc;
3160 dst_jf->value.constant.rdesc = dst_rdesc;
3162 else
3164 struct ipa_cst_ref_desc *dst_rdesc;
3165 /* This can happen during inlining, when a JFUNC can refer to a
3166 reference taken in a function up in the tree of inline clones.
3167 We need to find the duplicate that refers to our tree of
3168 inline clones. */
3170 gcc_assert (dst->caller->global.inlined_to);
3171 for (dst_rdesc = src_rdesc->next_duplicate;
3172 dst_rdesc;
3173 dst_rdesc = dst_rdesc->next_duplicate)
3175 struct cgraph_node *top;
3176 top = dst_rdesc->cs->caller->global.inlined_to
3177 ? dst_rdesc->cs->caller->global.inlined_to
3178 : dst_rdesc->cs->caller;
3179 if (dst->caller->global.inlined_to == top)
3180 break;
3182 gcc_assert (dst_rdesc);
3183 dst_jf->value.constant.rdesc = dst_rdesc;
3189 /* Hook that is called by cgraph.c when a node is duplicated. */
3191 static void
3192 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3193 ATTRIBUTE_UNUSED void *data)
3195 struct ipa_node_params *old_info, *new_info;
3196 struct ipa_agg_replacement_value *old_av, *new_av;
3198 ipa_check_create_node_params ();
3199 old_info = IPA_NODE_REF (src);
3200 new_info = IPA_NODE_REF (dst);
3202 new_info->descriptors = old_info->descriptors.copy ();
3203 new_info->lattices = NULL;
3204 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3206 new_info->uses_analysis_done = old_info->uses_analysis_done;
3207 new_info->node_enqueued = old_info->node_enqueued;
3209 old_av = ipa_get_agg_replacements_for_node (src);
3210 if (!old_av)
3211 return;
3213 new_av = NULL;
3214 while (old_av)
3216 struct ipa_agg_replacement_value *v;
3218 v = ggc_alloc_ipa_agg_replacement_value ();
3219 memcpy (v, old_av, sizeof (*v));
3220 v->next = new_av;
3221 new_av = v;
3222 old_av = old_av->next;
3224 ipa_set_node_agg_value_chain (dst, new_av);
3228 /* Analyze newly added function into callgraph. */
3230 static void
3231 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3233 if (cgraph_function_with_gimple_body_p (node))
3234 ipa_analyze_node (node);
3237 /* Register our cgraph hooks if they are not already there. */
3239 void
3240 ipa_register_cgraph_hooks (void)
3242 if (!edge_removal_hook_holder)
3243 edge_removal_hook_holder =
3244 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3245 if (!node_removal_hook_holder)
3246 node_removal_hook_holder =
3247 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3248 if (!edge_duplication_hook_holder)
3249 edge_duplication_hook_holder =
3250 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3251 if (!node_duplication_hook_holder)
3252 node_duplication_hook_holder =
3253 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3254 function_insertion_hook_holder =
3255 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3258 /* Unregister our cgraph hooks if they are not already there. */
3260 static void
3261 ipa_unregister_cgraph_hooks (void)
3263 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3264 edge_removal_hook_holder = NULL;
3265 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3266 node_removal_hook_holder = NULL;
3267 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3268 edge_duplication_hook_holder = NULL;
3269 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3270 node_duplication_hook_holder = NULL;
3271 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3272 function_insertion_hook_holder = NULL;
3275 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3276 longer needed after ipa-cp. */
3278 void
3279 ipa_free_all_structures_after_ipa_cp (void)
3281 if (!optimize)
3283 ipa_free_all_edge_args ();
3284 ipa_free_all_node_params ();
3285 free_alloc_pool (ipcp_sources_pool);
3286 free_alloc_pool (ipcp_values_pool);
3287 free_alloc_pool (ipcp_agg_lattice_pool);
3288 ipa_unregister_cgraph_hooks ();
3289 if (ipa_refdesc_pool)
3290 free_alloc_pool (ipa_refdesc_pool);
3294 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3295 longer needed after indirect inlining. */
3297 void
3298 ipa_free_all_structures_after_iinln (void)
3300 ipa_free_all_edge_args ();
3301 ipa_free_all_node_params ();
3302 ipa_unregister_cgraph_hooks ();
3303 if (ipcp_sources_pool)
3304 free_alloc_pool (ipcp_sources_pool);
3305 if (ipcp_values_pool)
3306 free_alloc_pool (ipcp_values_pool);
3307 if (ipcp_agg_lattice_pool)
3308 free_alloc_pool (ipcp_agg_lattice_pool);
3309 if (ipa_refdesc_pool)
3310 free_alloc_pool (ipa_refdesc_pool);
3313 /* Print ipa_tree_map data structures of all functions in the
3314 callgraph to F. */
3316 void
3317 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3319 int i, count;
3320 struct ipa_node_params *info;
3322 if (!node->definition)
3323 return;
3324 info = IPA_NODE_REF (node);
3325 fprintf (f, " function %s/%i parameter descriptors:\n",
3326 node->name (), node->order);
3327 count = ipa_get_param_count (info);
3328 for (i = 0; i < count; i++)
3330 int c;
3332 fprintf (f, " ");
3333 ipa_dump_param (f, info, i);
3334 if (ipa_is_param_used (info, i))
3335 fprintf (f, " used");
3336 c = ipa_get_controlled_uses (info, i);
3337 if (c == IPA_UNDESCRIBED_USE)
3338 fprintf (f, " undescribed_use");
3339 else
3340 fprintf (f, " controlled_uses=%i", c);
3341 fprintf (f, "\n");
3345 /* Print ipa_tree_map data structures of all functions in the
3346 callgraph to F. */
3348 void
3349 ipa_print_all_params (FILE * f)
3351 struct cgraph_node *node;
3353 fprintf (f, "\nFunction parameters:\n");
3354 FOR_EACH_FUNCTION (node)
3355 ipa_print_node_params (f, node);
3358 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3360 vec<tree>
3361 ipa_get_vector_of_formal_parms (tree fndecl)
3363 vec<tree> args;
3364 int count;
3365 tree parm;
3367 gcc_assert (!flag_wpa);
3368 count = count_formal_params (fndecl);
3369 args.create (count);
3370 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3371 args.quick_push (parm);
3373 return args;
3376 /* Return a heap allocated vector containing types of formal parameters of
3377 function type FNTYPE. */
3379 vec<tree>
3380 ipa_get_vector_of_formal_parm_types (tree fntype)
3382 vec<tree> types;
3383 int count = 0;
3384 tree t;
3386 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3387 count++;
3389 types.create (count);
3390 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3391 types.quick_push (TREE_VALUE (t));
3393 return types;
3396 /* Modify the function declaration FNDECL and its type according to the plan in
3397 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3398 to reflect the actual parameters being modified which are determined by the
3399 base_index field. */
3401 void
3402 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3404 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3405 tree orig_type = TREE_TYPE (fndecl);
3406 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3408 /* The following test is an ugly hack, some functions simply don't have any
3409 arguments in their type. This is probably a bug but well... */
3410 bool care_for_types = (old_arg_types != NULL_TREE);
3411 bool last_parm_void;
3412 vec<tree> otypes;
3413 if (care_for_types)
3415 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3416 == void_type_node);
3417 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3418 if (last_parm_void)
3419 gcc_assert (oparms.length () + 1 == otypes.length ());
3420 else
3421 gcc_assert (oparms.length () == otypes.length ());
3423 else
3425 last_parm_void = false;
3426 otypes.create (0);
3429 int len = adjustments.length ();
3430 tree *link = &DECL_ARGUMENTS (fndecl);
3431 tree new_arg_types = NULL;
3432 for (int i = 0; i < len; i++)
3434 struct ipa_parm_adjustment *adj;
3435 gcc_assert (link);
3437 adj = &adjustments[i];
3438 tree parm;
3439 if (adj->op == IPA_PARM_OP_NEW)
3440 parm = NULL;
3441 else
3442 parm = oparms[adj->base_index];
3443 adj->base = parm;
3445 if (adj->op == IPA_PARM_OP_COPY)
3447 if (care_for_types)
3448 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3449 new_arg_types);
3450 *link = parm;
3451 link = &DECL_CHAIN (parm);
3453 else if (adj->op != IPA_PARM_OP_REMOVE)
3455 tree new_parm;
3456 tree ptype;
3458 if (adj->by_ref)
3459 ptype = build_pointer_type (adj->type);
3460 else
3462 ptype = adj->type;
3463 if (is_gimple_reg_type (ptype))
3465 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3466 if (TYPE_ALIGN (ptype) < malign)
3467 ptype = build_aligned_type (ptype, malign);
3471 if (care_for_types)
3472 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3474 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3475 ptype);
3476 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3477 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3478 DECL_ARTIFICIAL (new_parm) = 1;
3479 DECL_ARG_TYPE (new_parm) = ptype;
3480 DECL_CONTEXT (new_parm) = fndecl;
3481 TREE_USED (new_parm) = 1;
3482 DECL_IGNORED_P (new_parm) = 1;
3483 layout_decl (new_parm, 0);
3485 if (adj->op == IPA_PARM_OP_NEW)
3486 adj->base = NULL;
3487 else
3488 adj->base = parm;
3489 adj->new_decl = new_parm;
3491 *link = new_parm;
3492 link = &DECL_CHAIN (new_parm);
3496 *link = NULL_TREE;
3498 tree new_reversed = NULL;
3499 if (care_for_types)
3501 new_reversed = nreverse (new_arg_types);
3502 if (last_parm_void)
3504 if (new_reversed)
3505 TREE_CHAIN (new_arg_types) = void_list_node;
3506 else
3507 new_reversed = void_list_node;
3511 /* Use copy_node to preserve as much as possible from original type
3512 (debug info, attribute lists etc.)
3513 Exception is METHOD_TYPEs must have THIS argument.
3514 When we are asked to remove it, we need to build new FUNCTION_TYPE
3515 instead. */
3516 tree new_type = NULL;
3517 if (TREE_CODE (orig_type) != METHOD_TYPE
3518 || (adjustments[0].op == IPA_PARM_OP_COPY
3519 && adjustments[0].base_index == 0))
3521 new_type = build_distinct_type_copy (orig_type);
3522 TYPE_ARG_TYPES (new_type) = new_reversed;
3524 else
3526 new_type
3527 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3528 new_reversed));
3529 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3530 DECL_VINDEX (fndecl) = NULL_TREE;
3533 /* When signature changes, we need to clear builtin info. */
3534 if (DECL_BUILT_IN (fndecl))
3536 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3537 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3540 /* This is a new type, not a copy of an old type. Need to reassociate
3541 variants. We can handle everything except the main variant lazily. */
3542 tree t = TYPE_MAIN_VARIANT (orig_type);
3543 if (orig_type != t)
3545 TYPE_MAIN_VARIANT (new_type) = t;
3546 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3547 TYPE_NEXT_VARIANT (t) = new_type;
3549 else
3551 TYPE_MAIN_VARIANT (new_type) = new_type;
3552 TYPE_NEXT_VARIANT (new_type) = NULL;
3555 TREE_TYPE (fndecl) = new_type;
3556 DECL_VIRTUAL_P (fndecl) = 0;
3557 otypes.release ();
3558 oparms.release ();
3561 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3562 If this is a directly recursive call, CS must be NULL. Otherwise it must
3563 contain the corresponding call graph edge. */
3565 void
3566 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3567 ipa_parm_adjustment_vec adjustments)
3569 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3570 vec<tree> vargs;
3571 vec<tree, va_gc> **debug_args = NULL;
3572 gimple new_stmt;
3573 gimple_stmt_iterator gsi, prev_gsi;
3574 tree callee_decl;
3575 int i, len;
3577 len = adjustments.length ();
3578 vargs.create (len);
3579 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3580 ipa_remove_stmt_references (current_node, stmt);
3582 gsi = gsi_for_stmt (stmt);
3583 prev_gsi = gsi;
3584 gsi_prev (&prev_gsi);
3585 for (i = 0; i < len; i++)
3587 struct ipa_parm_adjustment *adj;
3589 adj = &adjustments[i];
3591 if (adj->op == IPA_PARM_OP_COPY)
3593 tree arg = gimple_call_arg (stmt, adj->base_index);
3595 vargs.quick_push (arg);
3597 else if (adj->op != IPA_PARM_OP_REMOVE)
3599 tree expr, base, off;
3600 location_t loc;
3601 unsigned int deref_align = 0;
3602 bool deref_base = false;
3604 /* We create a new parameter out of the value of the old one, we can
3605 do the following kind of transformations:
3607 - A scalar passed by reference is converted to a scalar passed by
3608 value. (adj->by_ref is false and the type of the original
3609 actual argument is a pointer to a scalar).
3611 - A part of an aggregate is passed instead of the whole aggregate.
3612 The part can be passed either by value or by reference, this is
3613 determined by value of adj->by_ref. Moreover, the code below
3614 handles both situations when the original aggregate is passed by
3615 value (its type is not a pointer) and when it is passed by
3616 reference (it is a pointer to an aggregate).
3618 When the new argument is passed by reference (adj->by_ref is true)
3619 it must be a part of an aggregate and therefore we form it by
3620 simply taking the address of a reference inside the original
3621 aggregate. */
3623 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3624 base = gimple_call_arg (stmt, adj->base_index);
3625 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3626 : EXPR_LOCATION (base);
3628 if (TREE_CODE (base) != ADDR_EXPR
3629 && POINTER_TYPE_P (TREE_TYPE (base)))
3630 off = build_int_cst (adj->alias_ptr_type,
3631 adj->offset / BITS_PER_UNIT);
3632 else
3634 HOST_WIDE_INT base_offset;
3635 tree prev_base;
3636 bool addrof;
3638 if (TREE_CODE (base) == ADDR_EXPR)
3640 base = TREE_OPERAND (base, 0);
3641 addrof = true;
3643 else
3644 addrof = false;
3645 prev_base = base;
3646 base = get_addr_base_and_unit_offset (base, &base_offset);
3647 /* Aggregate arguments can have non-invariant addresses. */
3648 if (!base)
3650 base = build_fold_addr_expr (prev_base);
3651 off = build_int_cst (adj->alias_ptr_type,
3652 adj->offset / BITS_PER_UNIT);
3654 else if (TREE_CODE (base) == MEM_REF)
3656 if (!addrof)
3658 deref_base = true;
3659 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3661 off = build_int_cst (adj->alias_ptr_type,
3662 base_offset
3663 + adj->offset / BITS_PER_UNIT);
3664 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3665 off);
3666 base = TREE_OPERAND (base, 0);
3668 else
3670 off = build_int_cst (adj->alias_ptr_type,
3671 base_offset
3672 + adj->offset / BITS_PER_UNIT);
3673 base = build_fold_addr_expr (base);
3677 if (!adj->by_ref)
3679 tree type = adj->type;
3680 unsigned int align;
3681 unsigned HOST_WIDE_INT misalign;
3683 if (deref_base)
3685 align = deref_align;
3686 misalign = 0;
3688 else
3690 get_pointer_alignment_1 (base, &align, &misalign);
3691 if (TYPE_ALIGN (type) > align)
3692 align = TYPE_ALIGN (type);
3694 misalign += (tree_to_double_int (off)
3695 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3696 * BITS_PER_UNIT);
3697 misalign = misalign & (align - 1);
3698 if (misalign != 0)
3699 align = (misalign & -misalign);
3700 if (align < TYPE_ALIGN (type))
3701 type = build_aligned_type (type, align);
3702 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3704 else
3706 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3707 expr = build_fold_addr_expr (expr);
3710 expr = force_gimple_operand_gsi (&gsi, expr,
3711 adj->by_ref
3712 || is_gimple_reg_type (adj->type),
3713 NULL, true, GSI_SAME_STMT);
3714 vargs.quick_push (expr);
3716 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
3718 unsigned int ix;
3719 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3720 gimple def_temp;
3722 arg = gimple_call_arg (stmt, adj->base_index);
3723 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3725 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3726 continue;
3727 arg = fold_convert_loc (gimple_location (stmt),
3728 TREE_TYPE (origin), arg);
3730 if (debug_args == NULL)
3731 debug_args = decl_debug_args_insert (callee_decl);
3732 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3733 if (ddecl == origin)
3735 ddecl = (**debug_args)[ix + 1];
3736 break;
3738 if (ddecl == NULL)
3740 ddecl = make_node (DEBUG_EXPR_DECL);
3741 DECL_ARTIFICIAL (ddecl) = 1;
3742 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3743 DECL_MODE (ddecl) = DECL_MODE (origin);
3745 vec_safe_push (*debug_args, origin);
3746 vec_safe_push (*debug_args, ddecl);
3748 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3749 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3753 if (dump_file && (dump_flags & TDF_DETAILS))
3755 fprintf (dump_file, "replacing stmt:");
3756 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3759 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3760 vargs.release ();
3761 if (gimple_call_lhs (stmt))
3762 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3764 gimple_set_block (new_stmt, gimple_block (stmt));
3765 if (gimple_has_location (stmt))
3766 gimple_set_location (new_stmt, gimple_location (stmt));
3767 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3768 gimple_call_copy_flags (new_stmt, stmt);
3770 if (dump_file && (dump_flags & TDF_DETAILS))
3772 fprintf (dump_file, "with stmt:");
3773 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3774 fprintf (dump_file, "\n");
3776 gsi_replace (&gsi, new_stmt, true);
3777 if (cs)
3778 cgraph_set_call_stmt (cs, new_stmt);
3781 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3782 gsi_prev (&gsi);
3784 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3785 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3787 update_ssa (TODO_update_ssa);
3788 free_dominance_info (CDI_DOMINATORS);
3791 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3792 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3793 specifies whether the function should care about type incompatibility the
3794 current and new expressions. If it is false, the function will leave
3795 incompatibility issues to the caller. Return true iff the expression
3796 was modified. */
3798 bool
3799 ipa_modify_expr (tree *expr, bool convert,
3800 ipa_parm_adjustment_vec adjustments)
3802 struct ipa_parm_adjustment *cand
3803 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
3804 if (!cand)
3805 return false;
3807 tree src;
3808 if (cand->by_ref)
3809 src = build_simple_mem_ref (cand->new_decl);
3810 else
3811 src = cand->new_decl;
3813 if (dump_file && (dump_flags & TDF_DETAILS))
3815 fprintf (dump_file, "About to replace expr ");
3816 print_generic_expr (dump_file, *expr, 0);
3817 fprintf (dump_file, " with ");
3818 print_generic_expr (dump_file, src, 0);
3819 fprintf (dump_file, "\n");
3822 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3824 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3825 *expr = vce;
3827 else
3828 *expr = src;
3829 return true;
3832 /* If T is an SSA_NAME, return NULL if it is not a default def or
3833 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3834 the base variable is always returned, regardless if it is a default
3835 def. Return T if it is not an SSA_NAME. */
3837 static tree
3838 get_ssa_base_param (tree t, bool ignore_default_def)
3840 if (TREE_CODE (t) == SSA_NAME)
3842 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
3843 return SSA_NAME_VAR (t);
3844 else
3845 return NULL_TREE;
3847 return t;
3850 /* Given an expression, return an adjustment entry specifying the
3851 transformation to be done on EXPR. If no suitable adjustment entry
3852 was found, returns NULL.
3854 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3855 default def, otherwise bail on them.
3857 If CONVERT is non-NULL, this function will set *CONVERT if the
3858 expression provided is a component reference. ADJUSTMENTS is the
3859 adjustments vector. */
3861 ipa_parm_adjustment *
3862 ipa_get_adjustment_candidate (tree **expr, bool *convert,
3863 ipa_parm_adjustment_vec adjustments,
3864 bool ignore_default_def)
3866 if (TREE_CODE (**expr) == BIT_FIELD_REF
3867 || TREE_CODE (**expr) == IMAGPART_EXPR
3868 || TREE_CODE (**expr) == REALPART_EXPR)
3870 *expr = &TREE_OPERAND (**expr, 0);
3871 if (convert)
3872 *convert = true;
3875 HOST_WIDE_INT offset, size, max_size;
3876 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
3877 if (!base || size == -1 || max_size == -1)
3878 return NULL;
3880 if (TREE_CODE (base) == MEM_REF)
3882 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
3883 base = TREE_OPERAND (base, 0);
3886 base = get_ssa_base_param (base, ignore_default_def);
3887 if (!base || TREE_CODE (base) != PARM_DECL)
3888 return NULL;
3890 struct ipa_parm_adjustment *cand = NULL;
3891 unsigned int len = adjustments.length ();
3892 for (unsigned i = 0; i < len; i++)
3894 struct ipa_parm_adjustment *adj = &adjustments[i];
3896 if (adj->base == base
3897 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
3899 cand = adj;
3900 break;
3904 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
3905 return NULL;
3906 return cand;
3909 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3911 static bool
3912 index_in_adjustments_multiple_times_p (int base_index,
3913 ipa_parm_adjustment_vec adjustments)
3915 int i, len = adjustments.length ();
3916 bool one = false;
3918 for (i = 0; i < len; i++)
3920 struct ipa_parm_adjustment *adj;
3921 adj = &adjustments[i];
3923 if (adj->base_index == base_index)
3925 if (one)
3926 return true;
3927 else
3928 one = true;
3931 return false;
3935 /* Return adjustments that should have the same effect on function parameters
3936 and call arguments as if they were first changed according to adjustments in
3937 INNER and then by adjustments in OUTER. */
3939 ipa_parm_adjustment_vec
3940 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3941 ipa_parm_adjustment_vec outer)
3943 int i, outlen = outer.length ();
3944 int inlen = inner.length ();
3945 int removals = 0;
3946 ipa_parm_adjustment_vec adjustments, tmp;
3948 tmp.create (inlen);
3949 for (i = 0; i < inlen; i++)
3951 struct ipa_parm_adjustment *n;
3952 n = &inner[i];
3954 if (n->op == IPA_PARM_OP_REMOVE)
3955 removals++;
3956 else
3958 /* FIXME: Handling of new arguments are not implemented yet. */
3959 gcc_assert (n->op != IPA_PARM_OP_NEW);
3960 tmp.quick_push (*n);
3964 adjustments.create (outlen + removals);
3965 for (i = 0; i < outlen; i++)
3967 struct ipa_parm_adjustment r;
3968 struct ipa_parm_adjustment *out = &outer[i];
3969 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3971 memset (&r, 0, sizeof (r));
3972 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
3973 if (out->op == IPA_PARM_OP_REMOVE)
3975 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3977 r.op = IPA_PARM_OP_REMOVE;
3978 adjustments.quick_push (r);
3980 continue;
3982 else
3984 /* FIXME: Handling of new arguments are not implemented yet. */
3985 gcc_assert (out->op != IPA_PARM_OP_NEW);
3988 r.base_index = in->base_index;
3989 r.type = out->type;
3991 /* FIXME: Create nonlocal value too. */
3993 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
3994 r.op = IPA_PARM_OP_COPY;
3995 else if (in->op == IPA_PARM_OP_COPY)
3996 r.offset = out->offset;
3997 else if (out->op == IPA_PARM_OP_COPY)
3998 r.offset = in->offset;
3999 else
4000 r.offset = in->offset + out->offset;
4001 adjustments.quick_push (r);
4004 for (i = 0; i < inlen; i++)
4006 struct ipa_parm_adjustment *n = &inner[i];
4008 if (n->op == IPA_PARM_OP_REMOVE)
4009 adjustments.quick_push (*n);
4012 tmp.release ();
4013 return adjustments;
4016 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4017 friendly way, assuming they are meant to be applied to FNDECL. */
4019 void
4020 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4021 tree fndecl)
4023 int i, len = adjustments.length ();
4024 bool first = true;
4025 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4027 fprintf (file, "IPA param adjustments: ");
4028 for (i = 0; i < len; i++)
4030 struct ipa_parm_adjustment *adj;
4031 adj = &adjustments[i];
4033 if (!first)
4034 fprintf (file, " ");
4035 else
4036 first = false;
4038 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4039 print_generic_expr (file, parms[adj->base_index], 0);
4040 if (adj->base)
4042 fprintf (file, ", base: ");
4043 print_generic_expr (file, adj->base, 0);
4045 if (adj->new_decl)
4047 fprintf (file, ", new_decl: ");
4048 print_generic_expr (file, adj->new_decl, 0);
4050 if (adj->new_ssa_base)
4052 fprintf (file, ", new_ssa_base: ");
4053 print_generic_expr (file, adj->new_ssa_base, 0);
4056 if (adj->op == IPA_PARM_OP_COPY)
4057 fprintf (file, ", copy_param");
4058 else if (adj->op == IPA_PARM_OP_REMOVE)
4059 fprintf (file, ", remove_param");
4060 else
4061 fprintf (file, ", offset %li", (long) adj->offset);
4062 if (adj->by_ref)
4063 fprintf (file, ", by_ref");
4064 print_node_brief (file, ", type: ", adj->type, 0);
4065 fprintf (file, "\n");
4067 parms.release ();
4070 /* Dump the AV linked list. */
4072 void
4073 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4075 bool comma = false;
4076 fprintf (f, " Aggregate replacements:");
4077 for (; av; av = av->next)
4079 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4080 av->index, av->offset);
4081 print_generic_expr (f, av->value, 0);
4082 comma = true;
4084 fprintf (f, "\n");
4087 /* Stream out jump function JUMP_FUNC to OB. */
4089 static void
4090 ipa_write_jump_function (struct output_block *ob,
4091 struct ipa_jump_func *jump_func)
4093 struct ipa_agg_jf_item *item;
4094 struct bitpack_d bp;
4095 int i, count;
4097 streamer_write_uhwi (ob, jump_func->type);
4098 switch (jump_func->type)
4100 case IPA_JF_UNKNOWN:
4101 break;
4102 case IPA_JF_KNOWN_TYPE:
4103 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4104 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4105 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4106 break;
4107 case IPA_JF_CONST:
4108 gcc_assert (
4109 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4110 stream_write_tree (ob, jump_func->value.constant.value, true);
4111 break;
4112 case IPA_JF_PASS_THROUGH:
4113 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4114 if (jump_func->value.pass_through.operation == NOP_EXPR)
4116 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4117 bp = bitpack_create (ob->main_stream);
4118 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4119 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4120 streamer_write_bitpack (&bp);
4122 else
4124 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4125 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4127 break;
4128 case IPA_JF_ANCESTOR:
4129 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4130 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4131 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4132 bp = bitpack_create (ob->main_stream);
4133 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4134 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4135 streamer_write_bitpack (&bp);
4136 break;
4139 count = vec_safe_length (jump_func->agg.items);
4140 streamer_write_uhwi (ob, count);
4141 if (count)
4143 bp = bitpack_create (ob->main_stream);
4144 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4145 streamer_write_bitpack (&bp);
4148 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4150 streamer_write_uhwi (ob, item->offset);
4151 stream_write_tree (ob, item->value, true);
4155 /* Read in jump function JUMP_FUNC from IB. */
4157 static void
4158 ipa_read_jump_function (struct lto_input_block *ib,
4159 struct ipa_jump_func *jump_func,
4160 struct cgraph_edge *cs,
4161 struct data_in *data_in)
4163 enum jump_func_type jftype;
4164 enum tree_code operation;
4165 int i, count;
4167 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4168 switch (jftype)
4170 case IPA_JF_UNKNOWN:
4171 jump_func->type = IPA_JF_UNKNOWN;
4172 break;
4173 case IPA_JF_KNOWN_TYPE:
4175 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4176 tree base_type = stream_read_tree (ib, data_in);
4177 tree component_type = stream_read_tree (ib, data_in);
4179 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4180 break;
4182 case IPA_JF_CONST:
4183 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4184 break;
4185 case IPA_JF_PASS_THROUGH:
4186 operation = (enum tree_code) streamer_read_uhwi (ib);
4187 if (operation == NOP_EXPR)
4189 int formal_id = streamer_read_uhwi (ib);
4190 struct bitpack_d bp = streamer_read_bitpack (ib);
4191 bool agg_preserved = bp_unpack_value (&bp, 1);
4192 bool type_preserved = bp_unpack_value (&bp, 1);
4193 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4194 type_preserved);
4196 else
4198 tree operand = stream_read_tree (ib, data_in);
4199 int formal_id = streamer_read_uhwi (ib);
4200 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4201 operation);
4203 break;
4204 case IPA_JF_ANCESTOR:
4206 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4207 tree type = stream_read_tree (ib, data_in);
4208 int formal_id = streamer_read_uhwi (ib);
4209 struct bitpack_d bp = streamer_read_bitpack (ib);
4210 bool agg_preserved = bp_unpack_value (&bp, 1);
4211 bool type_preserved = bp_unpack_value (&bp, 1);
4213 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4214 type_preserved);
4215 break;
4219 count = streamer_read_uhwi (ib);
4220 vec_alloc (jump_func->agg.items, count);
4221 if (count)
4223 struct bitpack_d bp = streamer_read_bitpack (ib);
4224 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4226 for (i = 0; i < count; i++)
4228 struct ipa_agg_jf_item item;
4229 item.offset = streamer_read_uhwi (ib);
4230 item.value = stream_read_tree (ib, data_in);
4231 jump_func->agg.items->quick_push (item);
4235 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4236 relevant to indirect inlining to OB. */
4238 static void
4239 ipa_write_indirect_edge_info (struct output_block *ob,
4240 struct cgraph_edge *cs)
4242 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4243 struct bitpack_d bp;
4245 streamer_write_hwi (ob, ii->param_index);
4246 streamer_write_hwi (ob, ii->offset);
4247 bp = bitpack_create (ob->main_stream);
4248 bp_pack_value (&bp, ii->polymorphic, 1);
4249 bp_pack_value (&bp, ii->agg_contents, 1);
4250 bp_pack_value (&bp, ii->member_ptr, 1);
4251 bp_pack_value (&bp, ii->by_ref, 1);
4252 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4253 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4254 streamer_write_bitpack (&bp);
4256 if (ii->polymorphic)
4258 streamer_write_hwi (ob, ii->otr_token);
4259 stream_write_tree (ob, ii->otr_type, true);
4260 stream_write_tree (ob, ii->outer_type, true);
4264 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4265 relevant to indirect inlining from IB. */
4267 static void
4268 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4269 struct data_in *data_in ATTRIBUTE_UNUSED,
4270 struct cgraph_edge *cs)
4272 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4273 struct bitpack_d bp;
4275 ii->param_index = (int) streamer_read_hwi (ib);
4276 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4277 bp = streamer_read_bitpack (ib);
4278 ii->polymorphic = bp_unpack_value (&bp, 1);
4279 ii->agg_contents = bp_unpack_value (&bp, 1);
4280 ii->member_ptr = bp_unpack_value (&bp, 1);
4281 ii->by_ref = bp_unpack_value (&bp, 1);
4282 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4283 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4284 if (ii->polymorphic)
4286 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4287 ii->otr_type = stream_read_tree (ib, data_in);
4288 ii->outer_type = stream_read_tree (ib, data_in);
4292 /* Stream out NODE info to OB. */
4294 static void
4295 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4297 int node_ref;
4298 lto_symtab_encoder_t encoder;
4299 struct ipa_node_params *info = IPA_NODE_REF (node);
4300 int j;
4301 struct cgraph_edge *e;
4302 struct bitpack_d bp;
4304 encoder = ob->decl_state->symtab_node_encoder;
4305 node_ref = lto_symtab_encoder_encode (encoder, node);
4306 streamer_write_uhwi (ob, node_ref);
4308 streamer_write_uhwi (ob, ipa_get_param_count (info));
4309 for (j = 0; j < ipa_get_param_count (info); j++)
4310 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4311 bp = bitpack_create (ob->main_stream);
4312 gcc_assert (info->uses_analysis_done
4313 || ipa_get_param_count (info) == 0);
4314 gcc_assert (!info->node_enqueued);
4315 gcc_assert (!info->ipcp_orig_node);
4316 for (j = 0; j < ipa_get_param_count (info); j++)
4317 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4318 streamer_write_bitpack (&bp);
4319 for (j = 0; j < ipa_get_param_count (info); j++)
4320 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4321 for (e = node->callees; e; e = e->next_callee)
4323 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4325 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4326 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4327 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4329 for (e = node->indirect_calls; e; e = e->next_callee)
4331 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4333 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4334 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4335 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4336 ipa_write_indirect_edge_info (ob, e);
4340 /* Stream in NODE info from IB. */
4342 static void
4343 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4344 struct data_in *data_in)
4346 struct ipa_node_params *info = IPA_NODE_REF (node);
4347 int k;
4348 struct cgraph_edge *e;
4349 struct bitpack_d bp;
4351 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4353 for (k = 0; k < ipa_get_param_count (info); k++)
4354 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4356 bp = streamer_read_bitpack (ib);
4357 if (ipa_get_param_count (info) != 0)
4358 info->uses_analysis_done = true;
4359 info->node_enqueued = false;
4360 for (k = 0; k < ipa_get_param_count (info); k++)
4361 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4362 for (k = 0; k < ipa_get_param_count (info); k++)
4363 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4364 for (e = node->callees; e; e = e->next_callee)
4366 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4367 int count = streamer_read_uhwi (ib);
4369 if (!count)
4370 continue;
4371 vec_safe_grow_cleared (args->jump_functions, count);
4373 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4374 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4375 data_in);
4377 for (e = node->indirect_calls; e; e = e->next_callee)
4379 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4380 int count = streamer_read_uhwi (ib);
4382 if (count)
4384 vec_safe_grow_cleared (args->jump_functions, count);
4385 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4386 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4387 data_in);
4389 ipa_read_indirect_edge_info (ib, data_in, e);
4393 /* Write jump functions for nodes in SET. */
4395 void
4396 ipa_prop_write_jump_functions (void)
4398 struct cgraph_node *node;
4399 struct output_block *ob;
4400 unsigned int count = 0;
4401 lto_symtab_encoder_iterator lsei;
4402 lto_symtab_encoder_t encoder;
4405 if (!ipa_node_params_vector.exists ())
4406 return;
4408 ob = create_output_block (LTO_section_jump_functions);
4409 encoder = ob->decl_state->symtab_node_encoder;
4410 ob->cgraph_node = NULL;
4411 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4412 lsei_next_function_in_partition (&lsei))
4414 node = lsei_cgraph_node (lsei);
4415 if (cgraph_function_with_gimple_body_p (node)
4416 && IPA_NODE_REF (node) != NULL)
4417 count++;
4420 streamer_write_uhwi (ob, count);
4422 /* Process all of the functions. */
4423 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4424 lsei_next_function_in_partition (&lsei))
4426 node = lsei_cgraph_node (lsei);
4427 if (cgraph_function_with_gimple_body_p (node)
4428 && IPA_NODE_REF (node) != NULL)
4429 ipa_write_node_info (ob, node);
4431 streamer_write_char_stream (ob->main_stream, 0);
4432 produce_asm (ob, NULL);
4433 destroy_output_block (ob);
4436 /* Read section in file FILE_DATA of length LEN with data DATA. */
4438 static void
4439 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4440 size_t len)
4442 const struct lto_function_header *header =
4443 (const struct lto_function_header *) data;
4444 const int cfg_offset = sizeof (struct lto_function_header);
4445 const int main_offset = cfg_offset + header->cfg_size;
4446 const int string_offset = main_offset + header->main_size;
4447 struct data_in *data_in;
4448 struct lto_input_block ib_main;
4449 unsigned int i;
4450 unsigned int count;
4452 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4453 header->main_size);
4455 data_in =
4456 lto_data_in_create (file_data, (const char *) data + string_offset,
4457 header->string_size, vNULL);
4458 count = streamer_read_uhwi (&ib_main);
4460 for (i = 0; i < count; i++)
4462 unsigned int index;
4463 struct cgraph_node *node;
4464 lto_symtab_encoder_t encoder;
4466 index = streamer_read_uhwi (&ib_main);
4467 encoder = file_data->symtab_node_encoder;
4468 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4469 gcc_assert (node->definition);
4470 ipa_read_node_info (&ib_main, node, data_in);
4472 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4473 len);
4474 lto_data_in_delete (data_in);
4477 /* Read ipcp jump functions. */
4479 void
4480 ipa_prop_read_jump_functions (void)
4482 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4483 struct lto_file_decl_data *file_data;
4484 unsigned int j = 0;
4486 ipa_check_create_node_params ();
4487 ipa_check_create_edge_args ();
4488 ipa_register_cgraph_hooks ();
4490 while ((file_data = file_data_vec[j++]))
4492 size_t len;
4493 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4495 if (data)
4496 ipa_prop_read_section (file_data, data, len);
4500 /* After merging units, we can get mismatch in argument counts.
4501 Also decl merging might've rendered parameter lists obsolete.
4502 Also compute called_with_variable_arg info. */
4504 void
4505 ipa_update_after_lto_read (void)
4507 ipa_check_create_node_params ();
4508 ipa_check_create_edge_args ();
4511 void
4512 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4514 int node_ref;
4515 unsigned int count = 0;
4516 lto_symtab_encoder_t encoder;
4517 struct ipa_agg_replacement_value *aggvals, *av;
4519 aggvals = ipa_get_agg_replacements_for_node (node);
4520 encoder = ob->decl_state->symtab_node_encoder;
4521 node_ref = lto_symtab_encoder_encode (encoder, node);
4522 streamer_write_uhwi (ob, node_ref);
4524 for (av = aggvals; av; av = av->next)
4525 count++;
4526 streamer_write_uhwi (ob, count);
4528 for (av = aggvals; av; av = av->next)
4530 struct bitpack_d bp;
4532 streamer_write_uhwi (ob, av->offset);
4533 streamer_write_uhwi (ob, av->index);
4534 stream_write_tree (ob, av->value, true);
4536 bp = bitpack_create (ob->main_stream);
4537 bp_pack_value (&bp, av->by_ref, 1);
4538 streamer_write_bitpack (&bp);
4542 /* Stream in the aggregate value replacement chain for NODE from IB. */
4544 static void
4545 read_agg_replacement_chain (struct lto_input_block *ib,
4546 struct cgraph_node *node,
4547 struct data_in *data_in)
4549 struct ipa_agg_replacement_value *aggvals = NULL;
4550 unsigned int count, i;
4552 count = streamer_read_uhwi (ib);
4553 for (i = 0; i <count; i++)
4555 struct ipa_agg_replacement_value *av;
4556 struct bitpack_d bp;
4558 av = ggc_alloc_ipa_agg_replacement_value ();
4559 av->offset = streamer_read_uhwi (ib);
4560 av->index = streamer_read_uhwi (ib);
4561 av->value = stream_read_tree (ib, data_in);
4562 bp = streamer_read_bitpack (ib);
4563 av->by_ref = bp_unpack_value (&bp, 1);
4564 av->next = aggvals;
4565 aggvals = av;
4567 ipa_set_node_agg_value_chain (node, aggvals);
4570 /* Write all aggregate replacement for nodes in set. */
4572 void
4573 ipa_prop_write_all_agg_replacement (void)
4575 struct cgraph_node *node;
4576 struct output_block *ob;
4577 unsigned int count = 0;
4578 lto_symtab_encoder_iterator lsei;
4579 lto_symtab_encoder_t encoder;
4581 if (!ipa_node_agg_replacements)
4582 return;
4584 ob = create_output_block (LTO_section_ipcp_transform);
4585 encoder = ob->decl_state->symtab_node_encoder;
4586 ob->cgraph_node = NULL;
4587 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4588 lsei_next_function_in_partition (&lsei))
4590 node = lsei_cgraph_node (lsei);
4591 if (cgraph_function_with_gimple_body_p (node)
4592 && ipa_get_agg_replacements_for_node (node) != NULL)
4593 count++;
4596 streamer_write_uhwi (ob, count);
4598 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4599 lsei_next_function_in_partition (&lsei))
4601 node = lsei_cgraph_node (lsei);
4602 if (cgraph_function_with_gimple_body_p (node)
4603 && ipa_get_agg_replacements_for_node (node) != NULL)
4604 write_agg_replacement_chain (ob, node);
4606 streamer_write_char_stream (ob->main_stream, 0);
4607 produce_asm (ob, NULL);
4608 destroy_output_block (ob);
4611 /* Read replacements section in file FILE_DATA of length LEN with data
4612 DATA. */
4614 static void
4615 read_replacements_section (struct lto_file_decl_data *file_data,
4616 const char *data,
4617 size_t len)
4619 const struct lto_function_header *header =
4620 (const struct lto_function_header *) data;
4621 const int cfg_offset = sizeof (struct lto_function_header);
4622 const int main_offset = cfg_offset + header->cfg_size;
4623 const int string_offset = main_offset + header->main_size;
4624 struct data_in *data_in;
4625 struct lto_input_block ib_main;
4626 unsigned int i;
4627 unsigned int count;
4629 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4630 header->main_size);
4632 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4633 header->string_size, vNULL);
4634 count = streamer_read_uhwi (&ib_main);
4636 for (i = 0; i < count; i++)
4638 unsigned int index;
4639 struct cgraph_node *node;
4640 lto_symtab_encoder_t encoder;
4642 index = streamer_read_uhwi (&ib_main);
4643 encoder = file_data->symtab_node_encoder;
4644 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4645 gcc_assert (node->definition);
4646 read_agg_replacement_chain (&ib_main, node, data_in);
4648 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4649 len);
4650 lto_data_in_delete (data_in);
4653 /* Read IPA-CP aggregate replacements. */
4655 void
4656 ipa_prop_read_all_agg_replacement (void)
4658 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4659 struct lto_file_decl_data *file_data;
4660 unsigned int j = 0;
4662 while ((file_data = file_data_vec[j++]))
4664 size_t len;
4665 const char *data = lto_get_section_data (file_data,
4666 LTO_section_ipcp_transform,
4667 NULL, &len);
4668 if (data)
4669 read_replacements_section (file_data, data, len);
4673 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4674 NODE. */
4676 static void
4677 adjust_agg_replacement_values (struct cgraph_node *node,
4678 struct ipa_agg_replacement_value *aggval)
4680 struct ipa_agg_replacement_value *v;
4681 int i, c = 0, d = 0, *adj;
4683 if (!node->clone.combined_args_to_skip)
4684 return;
4686 for (v = aggval; v; v = v->next)
4688 gcc_assert (v->index >= 0);
4689 if (c < v->index)
4690 c = v->index;
4692 c++;
4694 adj = XALLOCAVEC (int, c);
4695 for (i = 0; i < c; i++)
4696 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4698 adj[i] = -1;
4699 d++;
4701 else
4702 adj[i] = i - d;
4704 for (v = aggval; v; v = v->next)
4705 v->index = adj[v->index];
4709 /* Function body transformation phase. */
4711 unsigned int
4712 ipcp_transform_function (struct cgraph_node *node)
4714 vec<ipa_param_descriptor> descriptors = vNULL;
4715 struct param_analysis_info *parms_ainfo;
4716 struct ipa_agg_replacement_value *aggval;
4717 gimple_stmt_iterator gsi;
4718 basic_block bb;
4719 int param_count;
4720 bool cfg_changed = false, something_changed = false;
4722 gcc_checking_assert (cfun);
4723 gcc_checking_assert (current_function_decl);
4725 if (dump_file)
4726 fprintf (dump_file, "Modification phase of node %s/%i\n",
4727 node->name (), node->order);
4729 aggval = ipa_get_agg_replacements_for_node (node);
4730 if (!aggval)
4731 return 0;
4732 param_count = count_formal_params (node->decl);
4733 if (param_count == 0)
4734 return 0;
4735 adjust_agg_replacement_values (node, aggval);
4736 if (dump_file)
4737 ipa_dump_agg_replacement_values (dump_file, aggval);
4738 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4739 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4740 descriptors.safe_grow_cleared (param_count);
4741 ipa_populate_param_decls (node, descriptors);
4743 FOR_EACH_BB_FN (bb, cfun)
4744 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4746 struct ipa_agg_replacement_value *v;
4747 gimple stmt = gsi_stmt (gsi);
4748 tree rhs, val, t;
4749 HOST_WIDE_INT offset, size;
4750 int index;
4751 bool by_ref, vce;
4753 if (!gimple_assign_load_p (stmt))
4754 continue;
4755 rhs = gimple_assign_rhs1 (stmt);
4756 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4757 continue;
4759 vce = false;
4760 t = rhs;
4761 while (handled_component_p (t))
4763 /* V_C_E can do things like convert an array of integers to one
4764 bigger integer and similar things we do not handle below. */
4765 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4767 vce = true;
4768 break;
4770 t = TREE_OPERAND (t, 0);
4772 if (vce)
4773 continue;
4775 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4776 rhs, &index, &offset, &size, &by_ref))
4777 continue;
4778 for (v = aggval; v; v = v->next)
4779 if (v->index == index
4780 && v->offset == offset)
4781 break;
4782 if (!v
4783 || v->by_ref != by_ref
4784 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4785 continue;
4787 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4788 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4790 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4791 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4792 else if (TYPE_SIZE (TREE_TYPE (rhs))
4793 == TYPE_SIZE (TREE_TYPE (v->value)))
4794 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4795 else
4797 if (dump_file)
4799 fprintf (dump_file, " const ");
4800 print_generic_expr (dump_file, v->value, 0);
4801 fprintf (dump_file, " can't be converted to type of ");
4802 print_generic_expr (dump_file, rhs, 0);
4803 fprintf (dump_file, "\n");
4805 continue;
4808 else
4809 val = v->value;
4811 if (dump_file && (dump_flags & TDF_DETAILS))
4813 fprintf (dump_file, "Modifying stmt:\n ");
4814 print_gimple_stmt (dump_file, stmt, 0, 0);
4816 gimple_assign_set_rhs_from_tree (&gsi, val);
4817 update_stmt (stmt);
4819 if (dump_file && (dump_flags & TDF_DETAILS))
4821 fprintf (dump_file, "into:\n ");
4822 print_gimple_stmt (dump_file, stmt, 0, 0);
4823 fprintf (dump_file, "\n");
4826 something_changed = true;
4827 if (maybe_clean_eh_stmt (stmt)
4828 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4829 cfg_changed = true;
4832 (*ipa_node_agg_replacements)[node->uid] = NULL;
4833 free_parms_ainfo (parms_ainfo, param_count);
4834 descriptors.release ();
4836 if (!something_changed)
4837 return 0;
4838 else if (cfg_changed)
4839 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4840 else
4841 return TODO_update_ssa_only_virtuals;