* gcc.dg/tree-ssa/pr44258.c: Disable scan test for Epiphany.
[official-gcc.git] / gcc / ipa-prop.c
blobbf9e903ca80ebe15096273ef3964cd7074aa71ba
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "langhooks.h"
25 #include "ggc.h"
26 #include "target.h"
27 #include "cgraph.h"
28 #include "ipa-prop.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
45 struct param_analysis_info
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
51 /* Vector where the parameter infos are actually stored. */
52 vec<ipa_node_params_t> ipa_node_params_vector;
53 /* Vector of known aggregate values in cloned nodes. */
54 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
55 /* Vector where the parameter infos are actually stored. */
56 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60 static struct cgraph_node_hook_list *node_removal_hook_holder;
61 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
63 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65 /* Description of a reference to an IPA constant. */
66 struct ipa_cst_ref_desc
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge *cs;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc *next_duplicate;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
73 if out of control. */
74 int refcount;
77 /* Allocation pool for reference descriptions. */
79 static alloc_pool ipa_refdesc_pool;
81 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
82 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
84 static bool
85 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
87 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->symbol.decl);
88 struct cl_optimization *os;
90 if (!fs_opts)
91 return false;
92 os = TREE_OPTIMIZATION (fs_opts);
93 return !os->x_optimize || !os->x_flag_ipa_cp;
96 /* Return index of the formal whose tree is PTREE in function which corresponds
97 to INFO. */
99 static int
100 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
102 int i, count;
104 count = descriptors.length ();
105 for (i = 0; i < count; i++)
106 if (descriptors[i].decl == ptree)
107 return i;
109 return -1;
112 /* Return index of the formal whose tree is PTREE in function which corresponds
113 to INFO. */
116 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
118 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
122 NODE. */
124 static void
125 ipa_populate_param_decls (struct cgraph_node *node,
126 vec<ipa_param_descriptor_t> &descriptors)
128 tree fndecl;
129 tree fnargs;
130 tree parm;
131 int param_num;
133 /* We do not copy DECL_ARGUMENTS to virtual clones. */
134 while (node->clone_of)
135 node = node->clone_of;
137 fndecl = node->symbol.decl;
138 fnargs = DECL_ARGUMENTS (fndecl);
139 param_num = 0;
140 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
142 descriptors[param_num].decl = parm;
143 param_num++;
147 /* Return how many formal parameters FNDECL has. */
149 static inline int
150 count_formal_params (tree fndecl)
152 tree parm;
153 int count = 0;
155 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
156 count++;
158 return count;
161 /* Initialize the ipa_node_params structure associated with NODE by counting
162 the function parameters, creating the descriptors and populating their
163 param_decls. */
165 void
166 ipa_initialize_node_params (struct cgraph_node *node)
168 struct ipa_node_params *info = IPA_NODE_REF (node);
170 if (!info->descriptors.exists ())
172 int param_count;
173 gcc_assert (!node->clone_of);
175 param_count = count_formal_params (node->symbol.decl);
176 if (param_count)
178 info->descriptors.safe_grow_cleared (param_count);
179 ipa_populate_param_decls (node, info->descriptors);
184 /* Print the jump functions associated with call graph edge CS to file F. */
186 static void
187 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
189 int i, count;
191 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
192 for (i = 0; i < count; i++)
194 struct ipa_jump_func *jump_func;
195 enum jump_func_type type;
197 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
198 type = jump_func->type;
200 fprintf (f, " param %d: ", i);
201 if (type == IPA_JF_UNKNOWN)
202 fprintf (f, "UNKNOWN\n");
203 else if (type == IPA_JF_KNOWN_TYPE)
205 fprintf (f, "KNOWN TYPE: base ");
206 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
207 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
208 jump_func->value.known_type.offset);
209 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
210 fprintf (f, "\n");
212 else if (type == IPA_JF_CONST)
214 tree val = jump_func->value.constant.value;
215 fprintf (f, "CONST: ");
216 print_generic_expr (f, val, 0);
217 if (TREE_CODE (val) == ADDR_EXPR
218 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
220 fprintf (f, " -> ");
221 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
224 fprintf (f, "\n");
226 else if (type == IPA_JF_PASS_THROUGH)
228 fprintf (f, "PASS THROUGH: ");
229 fprintf (f, "%d, op %s",
230 jump_func->value.pass_through.formal_id,
231 tree_code_name[(int)
232 jump_func->value.pass_through.operation]);
233 if (jump_func->value.pass_through.operation != NOP_EXPR)
235 fprintf (f, " ");
236 print_generic_expr (f,
237 jump_func->value.pass_through.operand, 0);
239 if (jump_func->value.pass_through.agg_preserved)
240 fprintf (f, ", agg_preserved");
241 fprintf (f, "\n");
243 else if (type == IPA_JF_ANCESTOR)
245 fprintf (f, "ANCESTOR: ");
246 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
247 jump_func->value.ancestor.formal_id,
248 jump_func->value.ancestor.offset);
249 print_generic_expr (f, jump_func->value.ancestor.type, 0);
250 if (jump_func->value.ancestor.agg_preserved)
251 fprintf (f, ", agg_preserved");
252 fprintf (f, "\n");
255 if (jump_func->agg.items)
257 struct ipa_agg_jf_item *item;
258 int j;
260 fprintf (f, " Aggregate passed by %s:\n",
261 jump_func->agg.by_ref ? "reference" : "value");
262 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
264 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
265 item->offset);
266 if (TYPE_P (item->value))
267 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
268 tree_low_cst (TYPE_SIZE (item->value), 1));
269 else
271 fprintf (f, "cst: ");
272 print_generic_expr (f, item->value, 0);
274 fprintf (f, "\n");
281 /* Print the jump functions of all arguments on all call graph edges going from
282 NODE to file F. */
284 void
285 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
287 struct cgraph_edge *cs;
289 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
290 node->symbol.order);
291 for (cs = node->callees; cs; cs = cs->next_callee)
293 if (!ipa_edge_args_info_available_for_edge_p (cs))
294 continue;
296 fprintf (f, " callsite %s/%i -> %s/%i : \n",
297 xstrdup (cgraph_node_name (node)), node->symbol.order,
298 xstrdup (cgraph_node_name (cs->callee)),
299 cs->callee->symbol.order);
300 ipa_print_node_jump_functions_for_edge (f, cs);
303 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
305 struct cgraph_indirect_call_info *ii;
306 if (!ipa_edge_args_info_available_for_edge_p (cs))
307 continue;
309 ii = cs->indirect_info;
310 if (ii->agg_contents)
311 fprintf (f, " indirect %s callsite, calling param %i, "
312 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
313 ii->member_ptr ? "member ptr" : "aggregate",
314 ii->param_index, ii->offset,
315 ii->by_ref ? "by reference" : "by_value");
316 else
317 fprintf (f, " indirect %s callsite, calling param %i",
318 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
320 if (cs->call_stmt)
322 fprintf (f, ", for stmt ");
323 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
325 else
326 fprintf (f, "\n");
327 ipa_print_node_jump_functions_for_edge (f, cs);
331 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
333 void
334 ipa_print_all_jump_functions (FILE *f)
336 struct cgraph_node *node;
338 fprintf (f, "\nJump functions:\n");
339 FOR_EACH_FUNCTION (node)
341 ipa_print_node_jump_functions (f, node);
345 /* Set JFUNC to be a known type jump function. */
347 static void
348 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
349 tree base_type, tree component_type)
351 jfunc->type = IPA_JF_KNOWN_TYPE;
352 jfunc->value.known_type.offset = offset,
353 jfunc->value.known_type.base_type = base_type;
354 jfunc->value.known_type.component_type = component_type;
357 /* Set JFUNC to be a constant jmp function. */
359 static void
360 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
361 struct cgraph_edge *cs)
363 constant = unshare_expr (constant);
364 if (constant && EXPR_P (constant))
365 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
366 jfunc->type = IPA_JF_CONST;
367 jfunc->value.constant.value = unshare_expr_without_location (constant);
369 if (TREE_CODE (constant) == ADDR_EXPR
370 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
372 struct ipa_cst_ref_desc *rdesc;
373 if (!ipa_refdesc_pool)
374 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
375 sizeof (struct ipa_cst_ref_desc), 32);
377 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
378 rdesc->cs = cs;
379 rdesc->next_duplicate = NULL;
380 rdesc->refcount = 1;
381 jfunc->value.constant.rdesc = rdesc;
383 else
384 jfunc->value.constant.rdesc = NULL;
387 /* Set JFUNC to be a simple pass-through jump function. */
388 static void
389 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
390 bool agg_preserved)
392 jfunc->type = IPA_JF_PASS_THROUGH;
393 jfunc->value.pass_through.operand = NULL_TREE;
394 jfunc->value.pass_through.formal_id = formal_id;
395 jfunc->value.pass_through.operation = NOP_EXPR;
396 jfunc->value.pass_through.agg_preserved = agg_preserved;
399 /* Set JFUNC to be an arithmetic pass through jump function. */
401 static void
402 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
403 tree operand, enum tree_code operation)
405 jfunc->type = IPA_JF_PASS_THROUGH;
406 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
407 jfunc->value.pass_through.formal_id = formal_id;
408 jfunc->value.pass_through.operation = operation;
409 jfunc->value.pass_through.agg_preserved = false;
412 /* Set JFUNC to be an ancestor jump function. */
414 static void
415 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
416 tree type, int formal_id, bool agg_preserved)
418 jfunc->type = IPA_JF_ANCESTOR;
419 jfunc->value.ancestor.formal_id = formal_id;
420 jfunc->value.ancestor.offset = offset;
421 jfunc->value.ancestor.type = type;
422 jfunc->value.ancestor.agg_preserved = agg_preserved;
425 /* Extract the acual BINFO being described by JFUNC which must be a known type
426 jump function. */
428 tree
429 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
431 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
432 if (!base_binfo)
433 return NULL_TREE;
434 return get_binfo_at_offset (base_binfo,
435 jfunc->value.known_type.offset,
436 jfunc->value.known_type.component_type);
439 /* Structure to be passed in between detect_type_change and
440 check_stmt_for_type_change. */
442 struct type_change_info
444 /* Offset into the object where there is the virtual method pointer we are
445 looking for. */
446 HOST_WIDE_INT offset;
447 /* The declaration or SSA_NAME pointer of the base that we are checking for
448 type change. */
449 tree object;
450 /* If we actually can tell the type that the object has changed to, it is
451 stored in this field. Otherwise it remains NULL_TREE. */
452 tree known_current_type;
453 /* Set to true if dynamic type change has been detected. */
454 bool type_maybe_changed;
455 /* Set to true if multiple types have been encountered. known_current_type
456 must be disregarded in that case. */
457 bool multiple_types_encountered;
460 /* Return true if STMT can modify a virtual method table pointer.
462 This function makes special assumptions about both constructors and
463 destructors which are all the functions that are allowed to alter the VMT
464 pointers. It assumes that destructors begin with assignment into all VMT
465 pointers and that constructors essentially look in the following way:
467 1) The very first thing they do is that they call constructors of ancestor
468 sub-objects that have them.
470 2) Then VMT pointers of this and all its ancestors is set to new values
471 corresponding to the type corresponding to the constructor.
473 3) Only afterwards, other stuff such as constructor of member sub-objects
474 and the code written by the user is run. Only this may include calling
475 virtual functions, directly or indirectly.
477 There is no way to call a constructor of an ancestor sub-object in any
478 other way.
480 This means that we do not have to care whether constructors get the correct
481 type information because they will always change it (in fact, if we define
482 the type to be given by the VMT pointer, it is undefined).
484 The most important fact to derive from the above is that if, for some
485 statement in the section 3, we try to detect whether the dynamic type has
486 changed, we can safely ignore all calls as we examine the function body
487 backwards until we reach statements in section 2 because these calls cannot
488 be ancestor constructors or destructors (if the input is not bogus) and so
489 do not change the dynamic type (this holds true only for automatically
490 allocated objects but at the moment we devirtualize only these). We then
491 must detect that statements in section 2 change the dynamic type and can try
492 to derive the new type. That is enough and we can stop, we will never see
493 the calls into constructors of sub-objects in this code. Therefore we can
494 safely ignore all call statements that we traverse.
497 static bool
498 stmt_may_be_vtbl_ptr_store (gimple stmt)
500 if (is_gimple_call (stmt))
501 return false;
502 else if (is_gimple_assign (stmt))
504 tree lhs = gimple_assign_lhs (stmt);
506 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
508 if (flag_strict_aliasing
509 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
510 return false;
512 if (TREE_CODE (lhs) == COMPONENT_REF
513 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
514 return false;
515 /* In the future we might want to use get_base_ref_and_offset to find
516 if there is a field corresponding to the offset and if so, proceed
517 almost like if it was a component ref. */
520 return true;
523 /* If STMT can be proved to be an assignment to the virtual method table
524 pointer of ANALYZED_OBJ and the type associated with the new table
525 identified, return the type. Otherwise return NULL_TREE. */
527 static tree
528 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
530 HOST_WIDE_INT offset, size, max_size;
531 tree lhs, rhs, base;
533 if (!gimple_assign_single_p (stmt))
534 return NULL_TREE;
536 lhs = gimple_assign_lhs (stmt);
537 rhs = gimple_assign_rhs1 (stmt);
538 if (TREE_CODE (lhs) != COMPONENT_REF
539 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
540 || TREE_CODE (rhs) != ADDR_EXPR)
541 return NULL_TREE;
542 rhs = get_base_address (TREE_OPERAND (rhs, 0));
543 if (!rhs
544 || TREE_CODE (rhs) != VAR_DECL
545 || !DECL_VIRTUAL_P (rhs))
546 return NULL_TREE;
548 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
549 if (offset != tci->offset
550 || size != POINTER_SIZE
551 || max_size != POINTER_SIZE)
552 return NULL_TREE;
553 if (TREE_CODE (base) == MEM_REF)
555 if (TREE_CODE (tci->object) != MEM_REF
556 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
557 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
558 TREE_OPERAND (base, 1)))
559 return NULL_TREE;
561 else if (tci->object != base)
562 return NULL_TREE;
564 return DECL_CONTEXT (rhs);
567 /* Callback of walk_aliased_vdefs and a helper function for
568 detect_type_change to check whether a particular statement may modify
569 the virtual table pointer, and if possible also determine the new type of
570 the (sub-)object. It stores its result into DATA, which points to a
571 type_change_info structure. */
573 static bool
574 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
576 gimple stmt = SSA_NAME_DEF_STMT (vdef);
577 struct type_change_info *tci = (struct type_change_info *) data;
579 if (stmt_may_be_vtbl_ptr_store (stmt))
581 tree type;
582 type = extr_type_from_vtbl_ptr_store (stmt, tci);
583 if (tci->type_maybe_changed
584 && type != tci->known_current_type)
585 tci->multiple_types_encountered = true;
586 tci->known_current_type = type;
587 tci->type_maybe_changed = true;
588 return true;
590 else
591 return false;
596 /* Like detect_type_change but with extra argument COMP_TYPE which will become
597 the component type part of new JFUNC of dynamic type change is detected and
598 the new base type is identified. */
600 static bool
601 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
602 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
604 struct type_change_info tci;
605 ao_ref ao;
607 gcc_checking_assert (DECL_P (arg)
608 || TREE_CODE (arg) == MEM_REF
609 || handled_component_p (arg));
610 /* Const calls cannot call virtual methods through VMT and so type changes do
611 not matter. */
612 if (!flag_devirtualize || !gimple_vuse (call))
613 return false;
615 ao_ref_init (&ao, arg);
616 ao.base = base;
617 ao.offset = offset;
618 ao.size = POINTER_SIZE;
619 ao.max_size = ao.size;
621 tci.offset = offset;
622 tci.object = get_base_address (arg);
623 tci.known_current_type = NULL_TREE;
624 tci.type_maybe_changed = false;
625 tci.multiple_types_encountered = false;
627 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
628 &tci, NULL);
629 if (!tci.type_maybe_changed)
630 return false;
632 if (!tci.known_current_type
633 || tci.multiple_types_encountered
634 || offset != 0)
635 jfunc->type = IPA_JF_UNKNOWN;
636 else
637 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
639 return true;
642 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
643 looking for assignments to its virtual table pointer. If it is, return true
644 and fill in the jump function JFUNC with relevant type information or set it
645 to unknown. ARG is the object itself (not a pointer to it, unless
646 dereferenced). BASE is the base of the memory access as returned by
647 get_ref_base_and_extent, as is the offset. */
649 static bool
650 detect_type_change (tree arg, tree base, gimple call,
651 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
653 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
656 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
657 SSA name (its dereference will become the base and the offset is assumed to
658 be zero). */
660 static bool
661 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
663 tree comp_type;
665 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
666 if (!flag_devirtualize
667 || !POINTER_TYPE_P (TREE_TYPE (arg))
668 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
669 return false;
671 comp_type = TREE_TYPE (TREE_TYPE (arg));
672 arg = build2 (MEM_REF, ptr_type_node, arg,
673 build_int_cst (ptr_type_node, 0));
675 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
678 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
679 boolean variable pointed to by DATA. */
681 static bool
682 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
683 void *data)
685 bool *b = (bool *) data;
686 *b = true;
687 return true;
690 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
691 a value known not to be modified in this function before reaching the
692 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
693 information about the parameter. */
695 static bool
696 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
697 gimple stmt, tree parm_load)
699 bool modified = false;
700 bitmap *visited_stmts;
701 ao_ref refd;
703 if (parm_ainfo && parm_ainfo->parm_modified)
704 return false;
706 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
707 ao_ref_init (&refd, parm_load);
708 /* We can cache visited statements only when parm_ainfo is available and when
709 we are looking at a naked load of the whole parameter. */
710 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
711 visited_stmts = NULL;
712 else
713 visited_stmts = &parm_ainfo->parm_visited_statements;
714 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
715 visited_stmts);
716 if (parm_ainfo && modified)
717 parm_ainfo->parm_modified = true;
718 return !modified;
721 /* If STMT is an assignment that loads a value from an parameter declaration,
722 return the index of the parameter in ipa_node_params which has not been
723 modified. Otherwise return -1. */
725 static int
726 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
727 struct param_analysis_info *parms_ainfo,
728 gimple stmt)
730 int index;
731 tree op1;
733 if (!gimple_assign_single_p (stmt))
734 return -1;
736 op1 = gimple_assign_rhs1 (stmt);
737 if (TREE_CODE (op1) != PARM_DECL)
738 return -1;
740 index = ipa_get_param_decl_index_1 (descriptors, op1);
741 if (index < 0
742 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
743 : NULL, stmt, op1))
744 return -1;
746 return index;
749 /* Return true if memory reference REF loads data that are known to be
750 unmodified in this function before reaching statement STMT. PARM_AINFO, if
751 non-NULL, is a pointer to a structure containing temporary information about
752 PARM. */
754 static bool
755 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
756 gimple stmt, tree ref)
758 bool modified = false;
759 ao_ref refd;
761 gcc_checking_assert (gimple_vuse (stmt));
762 if (parm_ainfo && parm_ainfo->ref_modified)
763 return false;
765 ao_ref_init (&refd, ref);
766 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
767 NULL);
768 if (parm_ainfo && modified)
769 parm_ainfo->ref_modified = true;
770 return !modified;
773 /* Return true if the data pointed to by PARM is known to be unmodified in this
774 function before reaching call statement CALL into which it is passed.
775 PARM_AINFO is a pointer to a structure containing temporary information
776 about PARM. */
778 static bool
779 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
780 gimple call, tree parm)
782 bool modified = false;
783 ao_ref refd;
785 /* It's unnecessary to calculate anything about memory contnets for a const
786 function because it is not goin to use it. But do not cache the result
787 either. Also, no such calculations for non-pointers. */
788 if (!gimple_vuse (call)
789 || !POINTER_TYPE_P (TREE_TYPE (parm)))
790 return false;
792 if (parm_ainfo->pt_modified)
793 return false;
795 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
796 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
797 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
798 if (modified)
799 parm_ainfo->pt_modified = true;
800 return !modified;
803 /* Return true if we can prove that OP is a memory reference loading unmodified
804 data from an aggregate passed as a parameter and if the aggregate is passed
805 by reference, that the alias type of the load corresponds to the type of the
806 formal parameter (so that we can rely on this type for TBAA in callers).
807 INFO and PARMS_AINFO describe parameters of the current function (but the
808 latter can be NULL), STMT is the load statement. If function returns true,
809 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
810 within the aggregate and whether it is a load from a value passed by
811 reference respectively. */
813 static bool
814 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
815 struct param_analysis_info *parms_ainfo, gimple stmt,
816 tree op, int *index_p, HOST_WIDE_INT *offset_p,
817 bool *by_ref_p)
819 int index;
820 HOST_WIDE_INT size, max_size;
821 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
823 if (max_size == -1 || max_size != size || *offset_p < 0)
824 return false;
826 if (DECL_P (base))
828 int index = ipa_get_param_decl_index_1 (descriptors, base);
829 if (index >= 0
830 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
831 : NULL, stmt, op))
833 *index_p = index;
834 *by_ref_p = false;
835 return true;
837 return false;
840 if (TREE_CODE (base) != MEM_REF
841 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
842 || !integer_zerop (TREE_OPERAND (base, 1)))
843 return false;
845 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
847 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
848 index = ipa_get_param_decl_index_1 (descriptors, parm);
850 else
852 /* This branch catches situations where a pointer parameter is not a
853 gimple register, for example:
855 void hip7(S*) (struct S * p)
857 void (*<T2e4>) (struct S *) D.1867;
858 struct S * p.1;
860 <bb 2>:
861 p.1_1 = p;
862 D.1867_2 = p.1_1->f;
863 D.1867_2 ();
864 gdp = &p;
867 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
868 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
871 if (index >= 0
872 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
873 stmt, op))
875 *index_p = index;
876 *by_ref_p = true;
877 return true;
879 return false;
882 /* Just like the previous function, just without the param_analysis_info
883 pointer, for users outside of this file. */
885 bool
886 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
887 tree op, int *index_p, HOST_WIDE_INT *offset_p,
888 bool *by_ref_p)
890 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
891 offset_p, by_ref_p);
894 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
895 of an assignment statement STMT, try to determine whether we are actually
896 handling any of the following cases and construct an appropriate jump
897 function into JFUNC if so:
899 1) The passed value is loaded from a formal parameter which is not a gimple
900 register (most probably because it is addressable, the value has to be
901 scalar) and we can guarantee the value has not changed. This case can
902 therefore be described by a simple pass-through jump function. For example:
904 foo (int a)
906 int a.0;
908 a.0_2 = a;
909 bar (a.0_2);
911 2) The passed value can be described by a simple arithmetic pass-through
912 jump function. E.g.
914 foo (int a)
916 int D.2064;
918 D.2064_4 = a.1(D) + 4;
919 bar (D.2064_4);
921 This case can also occur in combination of the previous one, e.g.:
923 foo (int a, int z)
925 int a.0;
926 int D.2064;
928 a.0_3 = a;
929 D.2064_4 = a.0_3 + 4;
930 foo (D.2064_4);
932 3) The passed value is an address of an object within another one (which
933 also passed by reference). Such situations are described by an ancestor
934 jump function and describe situations such as:
936 B::foo() (struct B * const this)
938 struct A * D.1845;
940 D.1845_2 = &this_1(D)->D.1748;
941 A::bar (D.1845_2);
943 INFO is the structure describing individual parameters access different
944 stages of IPA optimizations. PARMS_AINFO contains the information that is
945 only needed for intraprocedural analysis. */
947 static void
948 compute_complex_assign_jump_func (struct ipa_node_params *info,
949 struct param_analysis_info *parms_ainfo,
950 struct ipa_jump_func *jfunc,
951 gimple call, gimple stmt, tree name)
953 HOST_WIDE_INT offset, size, max_size;
954 tree op1, tc_ssa, base, ssa;
955 int index;
957 op1 = gimple_assign_rhs1 (stmt);
959 if (TREE_CODE (op1) == SSA_NAME)
961 if (SSA_NAME_IS_DEFAULT_DEF (op1))
962 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
963 else
964 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
965 SSA_NAME_DEF_STMT (op1));
966 tc_ssa = op1;
968 else
970 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
971 tc_ssa = gimple_assign_lhs (stmt);
974 if (index >= 0)
976 tree op2 = gimple_assign_rhs2 (stmt);
978 if (op2)
980 if (!is_gimple_ip_invariant (op2)
981 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
982 && !useless_type_conversion_p (TREE_TYPE (name),
983 TREE_TYPE (op1))))
984 return;
986 ipa_set_jf_arith_pass_through (jfunc, index, op2,
987 gimple_assign_rhs_code (stmt));
989 else if (gimple_assign_single_p (stmt)
990 && !detect_type_change_ssa (tc_ssa, call, jfunc))
992 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
993 call, tc_ssa);
994 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
996 return;
999 if (TREE_CODE (op1) != ADDR_EXPR)
1000 return;
1001 op1 = TREE_OPERAND (op1, 0);
1002 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1003 return;
1004 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1005 if (TREE_CODE (base) != MEM_REF
1006 /* If this is a varying address, punt. */
1007 || max_size == -1
1008 || max_size != size)
1009 return;
1010 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1011 ssa = TREE_OPERAND (base, 0);
1012 if (TREE_CODE (ssa) != SSA_NAME
1013 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1014 || offset < 0)
1015 return;
1017 /* Dynamic types are changed only in constructors and destructors and */
1018 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1019 if (index >= 0
1020 && !detect_type_change (op1, base, call, jfunc, offset))
1021 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1022 parm_ref_data_pass_through_p (&parms_ainfo[index],
1023 call, ssa));
1026 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1027 it looks like:
1029 iftmp.1_3 = &obj_2(D)->D.1762;
1031 The base of the MEM_REF must be a default definition SSA NAME of a
1032 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1033 whole MEM_REF expression is returned and the offset calculated from any
1034 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1035 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1037 static tree
1038 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1040 HOST_WIDE_INT size, max_size;
1041 tree expr, parm, obj;
1043 if (!gimple_assign_single_p (assign))
1044 return NULL_TREE;
1045 expr = gimple_assign_rhs1 (assign);
1047 if (TREE_CODE (expr) != ADDR_EXPR)
1048 return NULL_TREE;
1049 expr = TREE_OPERAND (expr, 0);
1050 obj = expr;
1051 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1053 if (TREE_CODE (expr) != MEM_REF
1054 /* If this is a varying address, punt. */
1055 || max_size == -1
1056 || max_size != size
1057 || *offset < 0)
1058 return NULL_TREE;
1059 parm = TREE_OPERAND (expr, 0);
1060 if (TREE_CODE (parm) != SSA_NAME
1061 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1062 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1063 return NULL_TREE;
1065 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1066 *obj_p = obj;
1067 return expr;
1071 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1072 statement PHI, try to find out whether NAME is in fact a
1073 multiple-inheritance typecast from a descendant into an ancestor of a formal
1074 parameter and thus can be described by an ancestor jump function and if so,
1075 write the appropriate function into JFUNC.
1077 Essentially we want to match the following pattern:
1079 if (obj_2(D) != 0B)
1080 goto <bb 3>;
1081 else
1082 goto <bb 4>;
1084 <bb 3>:
1085 iftmp.1_3 = &obj_2(D)->D.1762;
1087 <bb 4>:
1088 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1089 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1090 return D.1879_6; */
1092 static void
1093 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1094 struct param_analysis_info *parms_ainfo,
1095 struct ipa_jump_func *jfunc,
1096 gimple call, gimple phi)
1098 HOST_WIDE_INT offset;
1099 gimple assign, cond;
1100 basic_block phi_bb, assign_bb, cond_bb;
1101 tree tmp, parm, expr, obj;
1102 int index, i;
1104 if (gimple_phi_num_args (phi) != 2)
1105 return;
1107 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1108 tmp = PHI_ARG_DEF (phi, 0);
1109 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1110 tmp = PHI_ARG_DEF (phi, 1);
1111 else
1112 return;
1113 if (TREE_CODE (tmp) != SSA_NAME
1114 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1115 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1116 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1117 return;
1119 assign = SSA_NAME_DEF_STMT (tmp);
1120 assign_bb = gimple_bb (assign);
1121 if (!single_pred_p (assign_bb))
1122 return;
1123 expr = get_ancestor_addr_info (assign, &obj, &offset);
1124 if (!expr)
1125 return;
1126 parm = TREE_OPERAND (expr, 0);
1127 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1128 gcc_assert (index >= 0);
1130 cond_bb = single_pred (assign_bb);
1131 cond = last_stmt (cond_bb);
1132 if (!cond
1133 || gimple_code (cond) != GIMPLE_COND
1134 || gimple_cond_code (cond) != NE_EXPR
1135 || gimple_cond_lhs (cond) != parm
1136 || !integer_zerop (gimple_cond_rhs (cond)))
1137 return;
1139 phi_bb = gimple_bb (phi);
1140 for (i = 0; i < 2; i++)
1142 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1143 if (pred != assign_bb && pred != cond_bb)
1144 return;
1147 if (!detect_type_change (obj, expr, call, jfunc, offset))
1148 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1149 parm_ref_data_pass_through_p (&parms_ainfo[index],
1150 call, parm));
1153 /* Given OP which is passed as an actual argument to a called function,
1154 determine if it is possible to construct a KNOWN_TYPE jump function for it
1155 and if so, create one and store it to JFUNC. */
1157 static void
1158 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1159 gimple call)
1161 HOST_WIDE_INT offset, size, max_size;
1162 tree base;
1164 if (!flag_devirtualize
1165 || TREE_CODE (op) != ADDR_EXPR
1166 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1167 return;
1169 op = TREE_OPERAND (op, 0);
1170 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1171 if (!DECL_P (base)
1172 || max_size == -1
1173 || max_size != size
1174 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1175 || is_global_var (base))
1176 return;
1178 if (!TYPE_BINFO (TREE_TYPE (base))
1179 || detect_type_change (op, base, call, jfunc, offset))
1180 return;
1182 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1185 /* Inspect the given TYPE and return true iff it has the same structure (the
1186 same number of fields of the same types) as a C++ member pointer. If
1187 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1188 corresponding fields there. */
1190 static bool
1191 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1193 tree fld;
1195 if (TREE_CODE (type) != RECORD_TYPE)
1196 return false;
1198 fld = TYPE_FIELDS (type);
1199 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1200 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1201 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1202 return false;
1204 if (method_ptr)
1205 *method_ptr = fld;
1207 fld = DECL_CHAIN (fld);
1208 if (!fld || INTEGRAL_TYPE_P (fld)
1209 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1210 return false;
1211 if (delta)
1212 *delta = fld;
1214 if (DECL_CHAIN (fld))
1215 return false;
1217 return true;
1220 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1221 return the rhs of its defining statement. Otherwise return RHS as it
1222 is. */
1224 static inline tree
1225 get_ssa_def_if_simple_copy (tree rhs)
1227 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1229 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1231 if (gimple_assign_single_p (def_stmt))
1232 rhs = gimple_assign_rhs1 (def_stmt);
1233 else
1234 break;
1236 return rhs;
1239 /* Simple linked list, describing known contents of an aggregate beforere
1240 call. */
1242 struct ipa_known_agg_contents_list
1244 /* Offset and size of the described part of the aggregate. */
1245 HOST_WIDE_INT offset, size;
1246 /* Known constant value or NULL if the contents is known to be unknown. */
1247 tree constant;
1248 /* Pointer to the next structure in the list. */
1249 struct ipa_known_agg_contents_list *next;
1252 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1253 in ARG is filled in with constant values. ARG can either be an aggregate
1254 expression or a pointer to an aggregate. JFUNC is the jump function into
1255 which the constants are subsequently stored. */
1257 static void
1258 determine_known_aggregate_parts (gimple call, tree arg,
1259 struct ipa_jump_func *jfunc)
1261 struct ipa_known_agg_contents_list *list = NULL;
1262 int item_count = 0, const_count = 0;
1263 HOST_WIDE_INT arg_offset, arg_size;
1264 gimple_stmt_iterator gsi;
1265 tree arg_base;
1266 bool check_ref, by_ref;
1267 ao_ref r;
1269 /* The function operates in three stages. First, we prepare check_ref, r,
1270 arg_base and arg_offset based on what is actually passed as an actual
1271 argument. */
1273 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1275 by_ref = true;
1276 if (TREE_CODE (arg) == SSA_NAME)
1278 tree type_size;
1279 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1280 return;
1281 check_ref = true;
1282 arg_base = arg;
1283 arg_offset = 0;
1284 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1285 arg_size = tree_low_cst (type_size, 1);
1286 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1288 else if (TREE_CODE (arg) == ADDR_EXPR)
1290 HOST_WIDE_INT arg_max_size;
1292 arg = TREE_OPERAND (arg, 0);
1293 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1294 &arg_max_size);
1295 if (arg_max_size == -1
1296 || arg_max_size != arg_size
1297 || arg_offset < 0)
1298 return;
1299 if (DECL_P (arg_base))
1301 tree size;
1302 check_ref = false;
1303 size = build_int_cst (integer_type_node, arg_size);
1304 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1306 else
1307 return;
1309 else
1310 return;
1312 else
1314 HOST_WIDE_INT arg_max_size;
1316 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1318 by_ref = false;
1319 check_ref = false;
1320 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1321 &arg_max_size);
1322 if (arg_max_size == -1
1323 || arg_max_size != arg_size
1324 || arg_offset < 0)
1325 return;
1327 ao_ref_init (&r, arg);
1330 /* Second stage walks back the BB, looks at individual statements and as long
1331 as it is confident of how the statements affect contents of the
1332 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1333 describing it. */
1334 gsi = gsi_for_stmt (call);
1335 gsi_prev (&gsi);
1336 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1338 struct ipa_known_agg_contents_list *n, **p;
1339 gimple stmt = gsi_stmt (gsi);
1340 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1341 tree lhs, rhs, lhs_base;
1342 bool partial_overlap;
1344 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1345 continue;
1346 if (!gimple_assign_single_p (stmt))
1347 break;
1349 lhs = gimple_assign_lhs (stmt);
1350 rhs = gimple_assign_rhs1 (stmt);
1351 if (!is_gimple_reg_type (rhs)
1352 || TREE_CODE (lhs) == BIT_FIELD_REF
1353 || contains_bitfld_component_ref_p (lhs))
1354 break;
1356 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1357 &lhs_max_size);
1358 if (lhs_max_size == -1
1359 || lhs_max_size != lhs_size
1360 || (lhs_offset < arg_offset
1361 && lhs_offset + lhs_size > arg_offset)
1362 || (lhs_offset < arg_offset + arg_size
1363 && lhs_offset + lhs_size > arg_offset + arg_size))
1364 break;
1366 if (check_ref)
1368 if (TREE_CODE (lhs_base) != MEM_REF
1369 || TREE_OPERAND (lhs_base, 0) != arg_base
1370 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1371 break;
1373 else if (lhs_base != arg_base)
1375 if (DECL_P (lhs_base))
1376 continue;
1377 else
1378 break;
1381 if (lhs_offset + lhs_size < arg_offset
1382 || lhs_offset >= (arg_offset + arg_size))
1383 continue;
1385 partial_overlap = false;
1386 p = &list;
1387 while (*p && (*p)->offset < lhs_offset)
1389 if ((*p)->offset + (*p)->size > lhs_offset)
1391 partial_overlap = true;
1392 break;
1394 p = &(*p)->next;
1396 if (partial_overlap)
1397 break;
1398 if (*p && (*p)->offset < lhs_offset + lhs_size)
1400 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1401 /* We already know this value is subsequently overwritten with
1402 something else. */
1403 continue;
1404 else
1405 /* Otherwise this is a partial overlap which we cannot
1406 represent. */
1407 break;
1410 rhs = get_ssa_def_if_simple_copy (rhs);
1411 n = XALLOCA (struct ipa_known_agg_contents_list);
1412 n->size = lhs_size;
1413 n->offset = lhs_offset;
1414 if (is_gimple_ip_invariant (rhs))
1416 n->constant = rhs;
1417 const_count++;
1419 else
1420 n->constant = NULL_TREE;
1421 n->next = *p;
1422 *p = n;
1424 item_count++;
1425 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1426 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1427 break;
1430 /* Third stage just goes over the list and creates an appropriate vector of
1431 ipa_agg_jf_item structures out of it, of sourse only if there are
1432 any known constants to begin with. */
1434 if (const_count)
1436 jfunc->agg.by_ref = by_ref;
1437 vec_alloc (jfunc->agg.items, const_count);
1438 while (list)
1440 if (list->constant)
1442 struct ipa_agg_jf_item item;
1443 item.offset = list->offset - arg_offset;
1444 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1445 item.value = unshare_expr_without_location (list->constant);
1446 jfunc->agg.items->quick_push (item);
1448 list = list->next;
1453 /* Compute jump function for all arguments of callsite CS and insert the
1454 information in the jump_functions array in the ipa_edge_args corresponding
1455 to this callsite. */
1457 static void
1458 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1459 struct cgraph_edge *cs)
1461 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1462 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1463 gimple call = cs->call_stmt;
1464 int n, arg_num = gimple_call_num_args (call);
1466 if (arg_num == 0 || args->jump_functions)
1467 return;
1468 vec_safe_grow_cleared (args->jump_functions, arg_num);
1470 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1471 return;
1473 for (n = 0; n < arg_num; n++)
1475 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1476 tree arg = gimple_call_arg (call, n);
1478 if (is_gimple_ip_invariant (arg))
1479 ipa_set_jf_constant (jfunc, arg, cs);
1480 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1481 && TREE_CODE (arg) == PARM_DECL)
1483 int index = ipa_get_param_decl_index (info, arg);
1485 gcc_assert (index >=0);
1486 /* Aggregate passed by value, check for pass-through, otherwise we
1487 will attempt to fill in aggregate contents later in this
1488 for cycle. */
1489 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1491 ipa_set_jf_simple_pass_through (jfunc, index, false);
1492 continue;
1495 else if (TREE_CODE (arg) == SSA_NAME)
1497 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1499 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1500 if (index >= 0
1501 && !detect_type_change_ssa (arg, call, jfunc))
1503 bool agg_p;
1504 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1505 call, arg);
1506 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1509 else
1511 gimple stmt = SSA_NAME_DEF_STMT (arg);
1512 if (is_gimple_assign (stmt))
1513 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1514 call, stmt, arg);
1515 else if (gimple_code (stmt) == GIMPLE_PHI)
1516 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1517 call, stmt);
1520 else
1521 compute_known_type_jump_func (arg, jfunc, call);
1523 if ((jfunc->type != IPA_JF_PASS_THROUGH
1524 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1525 && (jfunc->type != IPA_JF_ANCESTOR
1526 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1527 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1528 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1529 determine_known_aggregate_parts (call, arg, jfunc);
1533 /* Compute jump functions for all edges - both direct and indirect - outgoing
1534 from NODE. Also count the actual arguments in the process. */
1536 static void
1537 ipa_compute_jump_functions (struct cgraph_node *node,
1538 struct param_analysis_info *parms_ainfo)
1540 struct cgraph_edge *cs;
1542 for (cs = node->callees; cs; cs = cs->next_callee)
1544 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1545 NULL);
1546 /* We do not need to bother analyzing calls to unknown
1547 functions unless they may become known during lto/whopr. */
1548 if (!callee->symbol.definition && !flag_lto)
1549 continue;
1550 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1553 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1554 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1557 /* If STMT looks like a statement loading a value from a member pointer formal
1558 parameter, return that parameter and store the offset of the field to
1559 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1560 might be clobbered). If USE_DELTA, then we look for a use of the delta
1561 field rather than the pfn. */
1563 static tree
1564 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1565 HOST_WIDE_INT *offset_p)
1567 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1569 if (!gimple_assign_single_p (stmt))
1570 return NULL_TREE;
1572 rhs = gimple_assign_rhs1 (stmt);
1573 if (TREE_CODE (rhs) == COMPONENT_REF)
1575 ref_field = TREE_OPERAND (rhs, 1);
1576 rhs = TREE_OPERAND (rhs, 0);
1578 else
1579 ref_field = NULL_TREE;
1580 if (TREE_CODE (rhs) != MEM_REF)
1581 return NULL_TREE;
1582 rec = TREE_OPERAND (rhs, 0);
1583 if (TREE_CODE (rec) != ADDR_EXPR)
1584 return NULL_TREE;
1585 rec = TREE_OPERAND (rec, 0);
1586 if (TREE_CODE (rec) != PARM_DECL
1587 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1588 return NULL_TREE;
1589 ref_offset = TREE_OPERAND (rhs, 1);
1591 if (use_delta)
1592 fld = delta_field;
1593 else
1594 fld = ptr_field;
1595 if (offset_p)
1596 *offset_p = int_bit_position (fld);
1598 if (ref_field)
1600 if (integer_nonzerop (ref_offset))
1601 return NULL_TREE;
1602 return ref_field == fld ? rec : NULL_TREE;
1604 else
1605 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1606 : NULL_TREE;
1609 /* Returns true iff T is an SSA_NAME defined by a statement. */
1611 static bool
1612 ipa_is_ssa_with_stmt_def (tree t)
1614 if (TREE_CODE (t) == SSA_NAME
1615 && !SSA_NAME_IS_DEFAULT_DEF (t))
1616 return true;
1617 else
1618 return false;
1621 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1622 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1623 indirect call graph edge. */
1625 static struct cgraph_edge *
1626 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1628 struct cgraph_edge *cs;
1630 cs = cgraph_edge (node, stmt);
1631 cs->indirect_info->param_index = param_index;
1632 cs->indirect_info->offset = 0;
1633 cs->indirect_info->polymorphic = 0;
1634 cs->indirect_info->agg_contents = 0;
1635 cs->indirect_info->member_ptr = 0;
1636 return cs;
1639 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1640 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1641 intermediate information about each formal parameter. Currently it checks
1642 whether the call calls a pointer that is a formal parameter and if so, the
1643 parameter is marked with the called flag and an indirect call graph edge
1644 describing the call is created. This is very simple for ordinary pointers
1645 represented in SSA but not-so-nice when it comes to member pointers. The
1646 ugly part of this function does nothing more than trying to match the
1647 pattern of such a call. An example of such a pattern is the gimple dump
1648 below, the call is on the last line:
1650 <bb 2>:
1651 f$__delta_5 = f.__delta;
1652 f$__pfn_24 = f.__pfn;
1655 <bb 2>:
1656 f$__delta_5 = MEM[(struct *)&f];
1657 f$__pfn_24 = MEM[(struct *)&f + 4B];
1659 and a few lines below:
1661 <bb 5>
1662 D.2496_3 = (int) f$__pfn_24;
1663 D.2497_4 = D.2496_3 & 1;
1664 if (D.2497_4 != 0)
1665 goto <bb 3>;
1666 else
1667 goto <bb 4>;
1669 <bb 6>:
1670 D.2500_7 = (unsigned int) f$__delta_5;
1671 D.2501_8 = &S + D.2500_7;
1672 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1673 D.2503_10 = *D.2502_9;
1674 D.2504_12 = f$__pfn_24 + -1;
1675 D.2505_13 = (unsigned int) D.2504_12;
1676 D.2506_14 = D.2503_10 + D.2505_13;
1677 D.2507_15 = *D.2506_14;
1678 iftmp.11_16 = (String:: *) D.2507_15;
1680 <bb 7>:
1681 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1682 D.2500_19 = (unsigned int) f$__delta_5;
1683 D.2508_20 = &S + D.2500_19;
1684 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1686 Such patterns are results of simple calls to a member pointer:
1688 int doprinting (int (MyString::* f)(int) const)
1690 MyString S ("somestring");
1692 return (S.*f)(4);
1695 Moreover, the function also looks for called pointers loaded from aggregates
1696 passed by value or reference. */
1698 static void
1699 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1700 struct ipa_node_params *info,
1701 struct param_analysis_info *parms_ainfo,
1702 gimple call, tree target)
1704 gimple def;
1705 tree n1, n2;
1706 gimple d1, d2;
1707 tree rec, rec2, cond;
1708 gimple branch;
1709 int index;
1710 basic_block bb, virt_bb, join;
1711 HOST_WIDE_INT offset;
1712 bool by_ref;
1714 if (SSA_NAME_IS_DEFAULT_DEF (target))
1716 tree var = SSA_NAME_VAR (target);
1717 index = ipa_get_param_decl_index (info, var);
1718 if (index >= 0)
1719 ipa_note_param_call (node, index, call);
1720 return;
1723 def = SSA_NAME_DEF_STMT (target);
1724 if (gimple_assign_single_p (def)
1725 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1726 gimple_assign_rhs1 (def), &index, &offset,
1727 &by_ref))
1729 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1730 cs->indirect_info->offset = offset;
1731 cs->indirect_info->agg_contents = 1;
1732 cs->indirect_info->by_ref = by_ref;
1733 return;
1736 /* Now we need to try to match the complex pattern of calling a member
1737 pointer. */
1738 if (gimple_code (def) != GIMPLE_PHI
1739 || gimple_phi_num_args (def) != 2
1740 || !POINTER_TYPE_P (TREE_TYPE (target))
1741 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1742 return;
1744 /* First, we need to check whether one of these is a load from a member
1745 pointer that is a parameter to this function. */
1746 n1 = PHI_ARG_DEF (def, 0);
1747 n2 = PHI_ARG_DEF (def, 1);
1748 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1749 return;
1750 d1 = SSA_NAME_DEF_STMT (n1);
1751 d2 = SSA_NAME_DEF_STMT (n2);
1753 join = gimple_bb (def);
1754 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1756 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1757 return;
1759 bb = EDGE_PRED (join, 0)->src;
1760 virt_bb = gimple_bb (d2);
1762 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1764 bb = EDGE_PRED (join, 1)->src;
1765 virt_bb = gimple_bb (d1);
1767 else
1768 return;
1770 /* Second, we need to check that the basic blocks are laid out in the way
1771 corresponding to the pattern. */
1773 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1774 || single_pred (virt_bb) != bb
1775 || single_succ (virt_bb) != join)
1776 return;
1778 /* Third, let's see that the branching is done depending on the least
1779 significant bit of the pfn. */
1781 branch = last_stmt (bb);
1782 if (!branch || gimple_code (branch) != GIMPLE_COND)
1783 return;
1785 if ((gimple_cond_code (branch) != NE_EXPR
1786 && gimple_cond_code (branch) != EQ_EXPR)
1787 || !integer_zerop (gimple_cond_rhs (branch)))
1788 return;
1790 cond = gimple_cond_lhs (branch);
1791 if (!ipa_is_ssa_with_stmt_def (cond))
1792 return;
1794 def = SSA_NAME_DEF_STMT (cond);
1795 if (!is_gimple_assign (def)
1796 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1797 || !integer_onep (gimple_assign_rhs2 (def)))
1798 return;
1800 cond = gimple_assign_rhs1 (def);
1801 if (!ipa_is_ssa_with_stmt_def (cond))
1802 return;
1804 def = SSA_NAME_DEF_STMT (cond);
1806 if (is_gimple_assign (def)
1807 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1809 cond = gimple_assign_rhs1 (def);
1810 if (!ipa_is_ssa_with_stmt_def (cond))
1811 return;
1812 def = SSA_NAME_DEF_STMT (cond);
1815 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1816 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1817 == ptrmemfunc_vbit_in_delta),
1818 NULL);
1819 if (rec != rec2)
1820 return;
1822 index = ipa_get_param_decl_index (info, rec);
1823 if (index >= 0
1824 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1826 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1827 cs->indirect_info->offset = offset;
1828 cs->indirect_info->agg_contents = 1;
1829 cs->indirect_info->member_ptr = 1;
1832 return;
1835 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1836 object referenced in the expression is a formal parameter of the caller
1837 (described by INFO), create a call note for the statement. */
1839 static void
1840 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1841 struct ipa_node_params *info, gimple call,
1842 tree target)
1844 struct cgraph_edge *cs;
1845 struct cgraph_indirect_call_info *ii;
1846 struct ipa_jump_func jfunc;
1847 tree obj = OBJ_TYPE_REF_OBJECT (target);
1848 int index;
1849 HOST_WIDE_INT anc_offset;
1851 if (!flag_devirtualize)
1852 return;
1854 if (TREE_CODE (obj) != SSA_NAME)
1855 return;
1857 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1859 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1860 return;
1862 anc_offset = 0;
1863 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1864 gcc_assert (index >= 0);
1865 if (detect_type_change_ssa (obj, call, &jfunc))
1866 return;
1868 else
1870 gimple stmt = SSA_NAME_DEF_STMT (obj);
1871 tree expr;
1873 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1874 if (!expr)
1875 return;
1876 index = ipa_get_param_decl_index (info,
1877 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1878 gcc_assert (index >= 0);
1879 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1880 return;
1883 cs = ipa_note_param_call (node, index, call);
1884 ii = cs->indirect_info;
1885 ii->offset = anc_offset;
1886 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1887 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1888 ii->polymorphic = 1;
1891 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1892 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1893 containing intermediate information about each formal parameter. */
1895 static void
1896 ipa_analyze_call_uses (struct cgraph_node *node,
1897 struct ipa_node_params *info,
1898 struct param_analysis_info *parms_ainfo, gimple call)
1900 tree target = gimple_call_fn (call);
1902 if (!target)
1903 return;
1904 if (TREE_CODE (target) == SSA_NAME)
1905 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1906 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1907 ipa_analyze_virtual_call_uses (node, info, call, target);
1911 /* Analyze the call statement STMT with respect to formal parameters (described
1912 in INFO) of caller given by NODE. Currently it only checks whether formal
1913 parameters are called. PARMS_AINFO is a pointer to a vector containing
1914 intermediate information about each formal parameter. */
1916 static void
1917 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1918 struct param_analysis_info *parms_ainfo, gimple stmt)
1920 if (is_gimple_call (stmt))
1921 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1924 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1925 If OP is a parameter declaration, mark it as used in the info structure
1926 passed in DATA. */
1928 static bool
1929 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1930 tree op, void *data)
1932 struct ipa_node_params *info = (struct ipa_node_params *) data;
1934 op = get_base_address (op);
1935 if (op
1936 && TREE_CODE (op) == PARM_DECL)
1938 int index = ipa_get_param_decl_index (info, op);
1939 gcc_assert (index >= 0);
1940 ipa_set_param_used (info, index, true);
1943 return false;
1946 /* Scan the function body of NODE and inspect the uses of formal parameters.
1947 Store the findings in various structures of the associated ipa_node_params
1948 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1949 vector containing intermediate information about each formal parameter. */
1951 static void
1952 ipa_analyze_params_uses (struct cgraph_node *node,
1953 struct param_analysis_info *parms_ainfo)
1955 tree decl = node->symbol.decl;
1956 basic_block bb;
1957 struct function *func;
1958 gimple_stmt_iterator gsi;
1959 struct ipa_node_params *info = IPA_NODE_REF (node);
1960 int i;
1962 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1963 return;
1965 info->uses_analysis_done = 1;
1966 if (ipa_func_spec_opts_forbid_analysis_p (node))
1968 for (i = 0; i < ipa_get_param_count (info); i++)
1970 ipa_set_param_used (info, i, true);
1971 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
1973 return;
1976 for (i = 0; i < ipa_get_param_count (info); i++)
1978 tree parm = ipa_get_param (info, i);
1979 int controlled_uses = 0;
1981 /* For SSA regs see if parameter is used. For non-SSA we compute
1982 the flag during modification analysis. */
1983 if (is_gimple_reg (parm))
1985 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1986 parm);
1987 if (ddef && !has_zero_uses (ddef))
1989 imm_use_iterator imm_iter;
1990 use_operand_p use_p;
1992 ipa_set_param_used (info, i, true);
1993 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
1994 if (!is_gimple_call (USE_STMT (use_p)))
1996 controlled_uses = IPA_UNDESCRIBED_USE;
1997 break;
1999 else
2000 controlled_uses++;
2002 else
2003 controlled_uses = 0;
2005 else
2006 controlled_uses = IPA_UNDESCRIBED_USE;
2007 ipa_set_controlled_uses (info, i, controlled_uses);
2010 func = DECL_STRUCT_FUNCTION (decl);
2011 FOR_EACH_BB_FN (bb, func)
2013 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2015 gimple stmt = gsi_stmt (gsi);
2017 if (is_gimple_debug (stmt))
2018 continue;
2020 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2021 walk_stmt_load_store_addr_ops (stmt, info,
2022 visit_ref_for_mod_analysis,
2023 visit_ref_for_mod_analysis,
2024 visit_ref_for_mod_analysis);
2026 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2027 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2028 visit_ref_for_mod_analysis,
2029 visit_ref_for_mod_analysis,
2030 visit_ref_for_mod_analysis);
2034 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2036 static void
2037 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2039 int i;
2041 for (i = 0; i < param_count; i++)
2043 if (parms_ainfo[i].parm_visited_statements)
2044 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2045 if (parms_ainfo[i].pt_visited_statements)
2046 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2050 /* Initialize the array describing properties of of formal parameters
2051 of NODE, analyze their uses and compute jump functions associated
2052 with actual arguments of calls from within NODE. */
2054 void
2055 ipa_analyze_node (struct cgraph_node *node)
2057 struct ipa_node_params *info;
2058 struct param_analysis_info *parms_ainfo;
2059 int param_count;
2061 ipa_check_create_node_params ();
2062 ipa_check_create_edge_args ();
2063 info = IPA_NODE_REF (node);
2064 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
2065 ipa_initialize_node_params (node);
2067 param_count = ipa_get_param_count (info);
2068 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2069 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2071 ipa_analyze_params_uses (node, parms_ainfo);
2072 ipa_compute_jump_functions (node, parms_ainfo);
2074 free_parms_ainfo (parms_ainfo, param_count);
2075 pop_cfun ();
2078 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2079 attempt a type-based devirtualization. If successful, return the
2080 target function declaration, otherwise return NULL. */
2082 tree
2083 ipa_intraprocedural_devirtualization (gimple call)
2085 tree binfo, token, fndecl;
2086 struct ipa_jump_func jfunc;
2087 tree otr = gimple_call_fn (call);
2089 jfunc.type = IPA_JF_UNKNOWN;
2090 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2091 call);
2092 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2093 return NULL_TREE;
2094 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2095 if (!binfo)
2096 return NULL_TREE;
2097 token = OBJ_TYPE_REF_TOKEN (otr);
2098 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2099 binfo);
2100 return fndecl;
2103 /* Update the jump function DST when the call graph edge corresponding to SRC is
2104 is being inlined, knowing that DST is of type ancestor and src of known
2105 type. */
2107 static void
2108 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2109 struct ipa_jump_func *dst)
2111 HOST_WIDE_INT combined_offset;
2112 tree combined_type;
2114 combined_offset = ipa_get_jf_known_type_offset (src)
2115 + ipa_get_jf_ancestor_offset (dst);
2116 combined_type = ipa_get_jf_ancestor_type (dst);
2118 ipa_set_jf_known_type (dst, combined_offset,
2119 ipa_get_jf_known_type_base_type (src),
2120 combined_type);
2123 /* Update the jump functions associated with call graph edge E when the call
2124 graph edge CS is being inlined, assuming that E->caller is already (possibly
2125 indirectly) inlined into CS->callee and that E has not been inlined. */
2127 static void
2128 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2129 struct cgraph_edge *e)
2131 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2132 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2133 int count = ipa_get_cs_argument_count (args);
2134 int i;
2136 for (i = 0; i < count; i++)
2138 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2140 if (dst->type == IPA_JF_ANCESTOR)
2142 struct ipa_jump_func *src;
2143 int dst_fid = dst->value.ancestor.formal_id;
2145 /* Variable number of arguments can cause havoc if we try to access
2146 one that does not exist in the inlined edge. So make sure we
2147 don't. */
2148 if (dst_fid >= ipa_get_cs_argument_count (top))
2150 dst->type = IPA_JF_UNKNOWN;
2151 continue;
2154 src = ipa_get_ith_jump_func (top, dst_fid);
2156 if (src->agg.items
2157 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2159 struct ipa_agg_jf_item *item;
2160 int j;
2162 /* Currently we do not produce clobber aggregate jump functions,
2163 replace with merging when we do. */
2164 gcc_assert (!dst->agg.items);
2166 dst->agg.items = vec_safe_copy (src->agg.items);
2167 dst->agg.by_ref = src->agg.by_ref;
2168 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2169 item->offset -= dst->value.ancestor.offset;
2172 if (src->type == IPA_JF_KNOWN_TYPE)
2173 combine_known_type_and_ancestor_jfs (src, dst);
2174 else if (src->type == IPA_JF_PASS_THROUGH
2175 && src->value.pass_through.operation == NOP_EXPR)
2177 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2178 dst->value.ancestor.agg_preserved &=
2179 src->value.pass_through.agg_preserved;
2181 else if (src->type == IPA_JF_ANCESTOR)
2183 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2184 dst->value.ancestor.offset += src->value.ancestor.offset;
2185 dst->value.ancestor.agg_preserved &=
2186 src->value.ancestor.agg_preserved;
2188 else
2189 dst->type = IPA_JF_UNKNOWN;
2191 else if (dst->type == IPA_JF_PASS_THROUGH)
2193 struct ipa_jump_func *src;
2194 /* We must check range due to calls with variable number of arguments
2195 and we cannot combine jump functions with operations. */
2196 if (dst->value.pass_through.operation == NOP_EXPR
2197 && (dst->value.pass_through.formal_id
2198 < ipa_get_cs_argument_count (top)))
2200 bool agg_p;
2201 int dst_fid = dst->value.pass_through.formal_id;
2202 src = ipa_get_ith_jump_func (top, dst_fid);
2203 agg_p = dst->value.pass_through.agg_preserved;
2205 dst->type = src->type;
2206 dst->value = src->value;
2208 if (src->agg.items
2209 && (agg_p || !src->agg.by_ref))
2211 /* Currently we do not produce clobber aggregate jump
2212 functions, replace with merging when we do. */
2213 gcc_assert (!dst->agg.items);
2215 dst->agg.by_ref = src->agg.by_ref;
2216 dst->agg.items = vec_safe_copy (src->agg.items);
2219 if (!agg_p)
2221 if (dst->type == IPA_JF_PASS_THROUGH)
2222 dst->value.pass_through.agg_preserved = false;
2223 else if (dst->type == IPA_JF_ANCESTOR)
2224 dst->value.ancestor.agg_preserved = false;
2227 else
2228 dst->type = IPA_JF_UNKNOWN;
2233 /* If TARGET is an addr_expr of a function declaration, make it the destination
2234 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2236 struct cgraph_edge *
2237 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2239 struct cgraph_node *callee;
2240 struct inline_edge_summary *es = inline_edge_summary (ie);
2241 bool unreachable = false;
2243 if (TREE_CODE (target) == ADDR_EXPR)
2244 target = TREE_OPERAND (target, 0);
2245 if (TREE_CODE (target) != FUNCTION_DECL)
2247 target = canonicalize_constructor_val (target, NULL);
2248 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2250 if (ie->indirect_info->member_ptr)
2251 /* Member pointer call that goes through a VMT lookup. */
2252 return NULL;
2254 if (dump_file)
2255 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2256 " in %s/%i, making it unreachable.\n",
2257 cgraph_node_name (ie->caller), ie->caller->symbol.order);
2258 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2259 callee = cgraph_get_create_node (target);
2260 unreachable = true;
2262 else
2263 callee = cgraph_get_node (target);
2265 else
2266 callee = cgraph_get_node (target);
2268 /* Because may-edges are not explicitely represented and vtable may be external,
2269 we may create the first reference to the object in the unit. */
2270 if (!callee || callee->global.inlined_to)
2273 /* We are better to ensure we can refer to it.
2274 In the case of static functions we are out of luck, since we already
2275 removed its body. In the case of public functions we may or may
2276 not introduce the reference. */
2277 if (!canonicalize_constructor_val (target, NULL)
2278 || !TREE_PUBLIC (target))
2280 if (dump_file)
2281 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2282 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2283 xstrdup (cgraph_node_name (ie->caller)),
2284 ie->caller->symbol.order,
2285 xstrdup (cgraph_node_name (ie->callee)),
2286 ie->callee->symbol.order);
2287 return NULL;
2289 callee = cgraph_get_create_real_symbol_node (target);
2291 ipa_check_create_node_params ();
2293 /* We can not make edges to inline clones. It is bug that someone removed
2294 the cgraph node too early. */
2295 gcc_assert (!callee->global.inlined_to);
2297 cgraph_make_edge_direct (ie, callee);
2298 es = inline_edge_summary (ie);
2299 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2300 - eni_size_weights.call_cost);
2301 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2302 - eni_time_weights.call_cost);
2303 if (dump_file && !unreachable)
2305 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2306 "(%s/%i -> %s/%i), for stmt ",
2307 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2308 xstrdup (cgraph_node_name (ie->caller)),
2309 ie->caller->symbol.order,
2310 xstrdup (cgraph_node_name (ie->callee)),
2311 ie->callee->symbol.order);
2312 if (ie->call_stmt)
2313 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2314 else
2315 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2317 callee = cgraph_function_or_thunk_node (callee, NULL);
2319 return ie;
2322 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2323 return NULL if there is not any. BY_REF specifies whether the value has to
2324 be passed by reference or by value. */
2326 tree
2327 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2328 HOST_WIDE_INT offset, bool by_ref)
2330 struct ipa_agg_jf_item *item;
2331 int i;
2333 if (by_ref != agg->by_ref)
2334 return NULL;
2336 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2337 if (item->offset == offset)
2339 /* Currently we do not have clobber values, return NULL for them once
2340 we do. */
2341 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2342 return item->value;
2344 return NULL;
2347 /* Remove a reference to SYMBOL from the list of references of a node given by
2348 reference description RDESC. */
2350 static void
2351 remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2353 struct ipa_ref *to_del;
2354 struct cgraph_edge *origin;
2356 origin = rdesc->cs;
2357 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
2358 origin->call_stmt);
2359 gcc_assert (to_del);
2360 ipa_remove_reference (to_del);
2361 if (dump_file)
2362 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2363 xstrdup (cgraph_node_name (origin->caller)),
2364 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
2367 /* If JFUNC has a reference description with refcount different from
2368 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2369 NULL. JFUNC must be a constant jump function. */
2371 static struct ipa_cst_ref_desc *
2372 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2374 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2375 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2376 return rdesc;
2377 else
2378 return NULL;
2381 /* Try to find a destination for indirect edge IE that corresponds to a simple
2382 call or a call of a member function pointer and where the destination is a
2383 pointer formal parameter described by jump function JFUNC. If it can be
2384 determined, return the newly direct edge, otherwise return NULL.
2385 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2387 static struct cgraph_edge *
2388 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2389 struct ipa_jump_func *jfunc,
2390 struct ipa_node_params *new_root_info)
2392 struct ipa_cst_ref_desc *rdesc;
2393 struct cgraph_edge *cs;
2394 tree target;
2396 if (ie->indirect_info->agg_contents)
2397 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2398 ie->indirect_info->offset,
2399 ie->indirect_info->by_ref);
2400 else
2401 target = ipa_value_from_jfunc (new_root_info, jfunc);
2402 if (!target)
2403 return NULL;
2404 cs = ipa_make_edge_direct_to_target (ie, target);
2406 if (cs && !ie->indirect_info->agg_contents
2407 && jfunc->type == IPA_JF_CONST
2408 && (rdesc = jfunc_rdesc_usable (jfunc))
2409 && --rdesc->refcount == 0)
2410 remove_described_reference ((symtab_node) cs->callee, rdesc);
2412 return cs;
2415 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2416 call based on a formal parameter which is described by jump function JFUNC
2417 and if it can be determined, make it direct and return the direct edge.
2418 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2419 are relative to. */
2421 static struct cgraph_edge *
2422 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2423 struct ipa_jump_func *jfunc,
2424 struct ipa_node_params *new_root_info)
2426 tree binfo, target;
2428 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2430 if (!binfo)
2431 return NULL;
2433 if (TREE_CODE (binfo) != TREE_BINFO)
2435 binfo = gimple_extract_devirt_binfo_from_cst (binfo);
2436 if (!binfo)
2437 return NULL;
2440 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2441 ie->indirect_info->otr_type);
2442 if (binfo)
2443 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2444 binfo);
2445 else
2446 return NULL;
2448 if (target)
2449 return ipa_make_edge_direct_to_target (ie, target);
2450 else
2451 return NULL;
2454 /* Update the param called notes associated with NODE when CS is being inlined,
2455 assuming NODE is (potentially indirectly) inlined into CS->callee.
2456 Moreover, if the callee is discovered to be constant, create a new cgraph
2457 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2458 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2460 static bool
2461 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2462 struct cgraph_node *node,
2463 vec<cgraph_edge_p> *new_edges)
2465 struct ipa_edge_args *top;
2466 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2467 struct ipa_node_params *new_root_info;
2468 bool res = false;
2470 ipa_check_create_edge_args ();
2471 top = IPA_EDGE_REF (cs);
2472 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2473 ? cs->caller->global.inlined_to
2474 : cs->caller);
2476 for (ie = node->indirect_calls; ie; ie = next_ie)
2478 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2479 struct ipa_jump_func *jfunc;
2480 int param_index;
2482 next_ie = ie->next_callee;
2484 if (ici->param_index == -1)
2485 continue;
2487 /* We must check range due to calls with variable number of arguments: */
2488 if (ici->param_index >= ipa_get_cs_argument_count (top))
2490 ici->param_index = -1;
2491 continue;
2494 param_index = ici->param_index;
2495 jfunc = ipa_get_ith_jump_func (top, param_index);
2497 if (!flag_indirect_inlining)
2498 new_direct_edge = NULL;
2499 else if (ici->polymorphic)
2500 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2501 new_root_info);
2502 else
2503 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2504 new_root_info);
2505 if (new_direct_edge)
2507 new_direct_edge->indirect_inlining_edge = 1;
2508 if (new_direct_edge->call_stmt)
2509 new_direct_edge->call_stmt_cannot_inline_p
2510 = !gimple_check_call_matching_types (
2511 new_direct_edge->call_stmt,
2512 new_direct_edge->callee->symbol.decl, false);
2513 if (new_edges)
2515 new_edges->safe_push (new_direct_edge);
2516 top = IPA_EDGE_REF (cs);
2517 res = true;
2520 else if (jfunc->type == IPA_JF_PASS_THROUGH
2521 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2523 if (ici->agg_contents
2524 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2525 ici->param_index = -1;
2526 else
2527 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2529 else if (jfunc->type == IPA_JF_ANCESTOR)
2531 if (ici->agg_contents
2532 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2533 ici->param_index = -1;
2534 else
2536 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2537 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2540 else
2541 /* Either we can find a destination for this edge now or never. */
2542 ici->param_index = -1;
2545 return res;
2548 /* Recursively traverse subtree of NODE (including node) made of inlined
2549 cgraph_edges when CS has been inlined and invoke
2550 update_indirect_edges_after_inlining on all nodes and
2551 update_jump_functions_after_inlining on all non-inlined edges that lead out
2552 of this subtree. Newly discovered indirect edges will be added to
2553 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2554 created. */
2556 static bool
2557 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2558 struct cgraph_node *node,
2559 vec<cgraph_edge_p> *new_edges)
2561 struct cgraph_edge *e;
2562 bool res;
2564 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2566 for (e = node->callees; e; e = e->next_callee)
2567 if (!e->inline_failed)
2568 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2569 else
2570 update_jump_functions_after_inlining (cs, e);
2571 for (e = node->indirect_calls; e; e = e->next_callee)
2572 update_jump_functions_after_inlining (cs, e);
2574 return res;
2577 /* Combine two controlled uses counts as done during inlining. */
2579 static int
2580 combine_controlled_uses_counters (int c, int d)
2582 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2583 return IPA_UNDESCRIBED_USE;
2584 else
2585 return c + d - 1;
2588 /* Propagate number of controlled users from CS->caleee to the new root of the
2589 tree of inlined nodes. */
2591 static void
2592 propagate_controlled_uses (struct cgraph_edge *cs)
2594 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2595 struct cgraph_node *new_root = cs->caller->global.inlined_to
2596 ? cs->caller->global.inlined_to : cs->caller;
2597 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2598 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2599 int count, i;
2601 count = MIN (ipa_get_cs_argument_count (args),
2602 ipa_get_param_count (old_root_info));
2603 for (i = 0; i < count; i++)
2605 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2606 struct ipa_cst_ref_desc *rdesc;
2608 if (jf->type == IPA_JF_PASS_THROUGH)
2610 int src_idx, c, d;
2611 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2612 c = ipa_get_controlled_uses (new_root_info, src_idx);
2613 d = ipa_get_controlled_uses (old_root_info, i);
2615 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2616 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2617 c = combine_controlled_uses_counters (c, d);
2618 ipa_set_controlled_uses (new_root_info, src_idx, c);
2619 if (c == 0 && new_root_info->ipcp_orig_node)
2621 struct cgraph_node *n;
2622 struct ipa_ref *ref;
2623 tree t = new_root_info->known_vals[src_idx];
2625 if (t && TREE_CODE (t) == ADDR_EXPR
2626 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2627 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2628 && (ref = ipa_find_reference ((symtab_node) new_root,
2629 (symtab_node) n, NULL)))
2631 if (dump_file)
2632 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2633 "reference from %s/%i to %s/%i.\n",
2634 xstrdup (cgraph_node_name (new_root)),
2635 new_root->symbol.order,
2636 xstrdup (cgraph_node_name (n)), n->symbol.order);
2637 ipa_remove_reference (ref);
2641 else if (jf->type == IPA_JF_CONST
2642 && (rdesc = jfunc_rdesc_usable (jf)))
2644 int d = ipa_get_controlled_uses (old_root_info, i);
2645 int c = rdesc->refcount;
2646 rdesc->refcount = combine_controlled_uses_counters (c, d);
2647 if (rdesc->refcount == 0)
2649 tree cst = ipa_get_jf_constant (jf);
2650 struct cgraph_node *n;
2651 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2652 && TREE_CODE (TREE_OPERAND (cst, 0))
2653 == FUNCTION_DECL);
2654 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2655 if (n)
2657 struct cgraph_node *clone;
2658 remove_described_reference ((symtab_node) n, rdesc);
2660 clone = cs->caller;
2661 while (clone->global.inlined_to
2662 && clone != rdesc->cs->caller
2663 && IPA_NODE_REF (clone)->ipcp_orig_node)
2665 struct ipa_ref *ref;
2666 ref = ipa_find_reference ((symtab_node) clone,
2667 (symtab_node) n, NULL);
2668 if (ref)
2670 if (dump_file)
2671 fprintf (dump_file, "ipa-prop: Removing "
2672 "cloning-created reference "
2673 "from %s/%i to %s/%i.\n",
2674 xstrdup (cgraph_node_name (clone)),
2675 clone->symbol.order,
2676 xstrdup (cgraph_node_name (n)),
2677 n->symbol.order);
2678 ipa_remove_reference (ref);
2680 clone = clone->callers->caller;
2687 for (i = ipa_get_param_count (old_root_info);
2688 i < ipa_get_cs_argument_count (args);
2689 i++)
2691 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2693 if (jf->type == IPA_JF_CONST)
2695 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2696 if (rdesc)
2697 rdesc->refcount = IPA_UNDESCRIBED_USE;
2699 else if (jf->type == IPA_JF_PASS_THROUGH)
2700 ipa_set_controlled_uses (new_root_info,
2701 jf->value.pass_through.formal_id,
2702 IPA_UNDESCRIBED_USE);
2706 /* Update jump functions and call note functions on inlining the call site CS.
2707 CS is expected to lead to a node already cloned by
2708 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2709 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2710 created. */
2712 bool
2713 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2714 vec<cgraph_edge_p> *new_edges)
2716 bool changed;
2717 /* Do nothing if the preparation phase has not been carried out yet
2718 (i.e. during early inlining). */
2719 if (!ipa_node_params_vector.exists ())
2720 return false;
2721 gcc_assert (ipa_edge_args_vector);
2723 propagate_controlled_uses (cs);
2724 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2726 return changed;
2729 /* Frees all dynamically allocated structures that the argument info points
2730 to. */
2732 void
2733 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2735 vec_free (args->jump_functions);
2736 memset (args, 0, sizeof (*args));
2739 /* Free all ipa_edge structures. */
2741 void
2742 ipa_free_all_edge_args (void)
2744 int i;
2745 struct ipa_edge_args *args;
2747 if (!ipa_edge_args_vector)
2748 return;
2750 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2751 ipa_free_edge_args_substructures (args);
2753 vec_free (ipa_edge_args_vector);
2756 /* Frees all dynamically allocated structures that the param info points
2757 to. */
2759 void
2760 ipa_free_node_params_substructures (struct ipa_node_params *info)
2762 info->descriptors.release ();
2763 free (info->lattices);
2764 /* Lattice values and their sources are deallocated with their alocation
2765 pool. */
2766 info->known_vals.release ();
2767 memset (info, 0, sizeof (*info));
2770 /* Free all ipa_node_params structures. */
2772 void
2773 ipa_free_all_node_params (void)
2775 int i;
2776 struct ipa_node_params *info;
2778 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2779 ipa_free_node_params_substructures (info);
2781 ipa_node_params_vector.release ();
2784 /* Set the aggregate replacements of NODE to be AGGVALS. */
2786 void
2787 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2788 struct ipa_agg_replacement_value *aggvals)
2790 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2791 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2793 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2796 /* Hook that is called by cgraph.c when an edge is removed. */
2798 static void
2799 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2801 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2802 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
2803 return;
2804 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2807 /* Hook that is called by cgraph.c when a node is removed. */
2809 static void
2810 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2812 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2813 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2814 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2815 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2816 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
2819 /* Hook that is called by cgraph.c when an edge is duplicated. */
2821 static void
2822 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2823 __attribute__((unused)) void *data)
2825 struct ipa_edge_args *old_args, *new_args;
2826 unsigned int i;
2828 ipa_check_create_edge_args ();
2830 old_args = IPA_EDGE_REF (src);
2831 new_args = IPA_EDGE_REF (dst);
2833 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
2835 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
2837 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
2838 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
2840 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
2842 if (src_jf->type == IPA_JF_CONST)
2844 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
2846 if (!src_rdesc)
2847 dst_jf->value.constant.rdesc = NULL;
2848 else if (src_rdesc->cs == src)
2850 struct ipa_cst_ref_desc *dst_rdesc;
2851 gcc_checking_assert (ipa_refdesc_pool);
2852 dst_rdesc
2853 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
2854 dst_rdesc->cs = dst;
2855 dst_rdesc->refcount = src_rdesc->refcount;
2856 if (dst->caller->global.inlined_to)
2858 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
2859 src_rdesc->next_duplicate = dst_rdesc;
2861 dst_jf->value.constant.rdesc = dst_rdesc;
2863 else
2865 struct ipa_cst_ref_desc *dst_rdesc;
2866 /* This can happen during inlining, when a JFUNC can refer to a
2867 reference taken in a function up in the tree of inline clones.
2868 We need to find the duplicate that refers to our tree of
2869 inline clones. */
2871 gcc_assert (dst->caller->global.inlined_to);
2872 for (dst_rdesc = src_rdesc->next_duplicate;
2873 dst_rdesc;
2874 dst_rdesc = dst_rdesc->next_duplicate)
2875 if (dst_rdesc->cs->caller->global.inlined_to
2876 == dst->caller->global.inlined_to)
2877 break;
2878 gcc_assert (dst_rdesc);
2879 dst_jf->value.constant.rdesc = dst_rdesc;
2885 /* Hook that is called by cgraph.c when a node is duplicated. */
2887 static void
2888 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2889 ATTRIBUTE_UNUSED void *data)
2891 struct ipa_node_params *old_info, *new_info;
2892 struct ipa_agg_replacement_value *old_av, *new_av;
2894 ipa_check_create_node_params ();
2895 old_info = IPA_NODE_REF (src);
2896 new_info = IPA_NODE_REF (dst);
2898 new_info->descriptors = old_info->descriptors.copy ();
2899 new_info->lattices = NULL;
2900 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2902 new_info->uses_analysis_done = old_info->uses_analysis_done;
2903 new_info->node_enqueued = old_info->node_enqueued;
2905 old_av = ipa_get_agg_replacements_for_node (src);
2906 if (!old_av)
2907 return;
2909 new_av = NULL;
2910 while (old_av)
2912 struct ipa_agg_replacement_value *v;
2914 v = ggc_alloc_ipa_agg_replacement_value ();
2915 memcpy (v, old_av, sizeof (*v));
2916 v->next = new_av;
2917 new_av = v;
2918 old_av = old_av->next;
2920 ipa_set_node_agg_value_chain (dst, new_av);
2924 /* Analyze newly added function into callgraph. */
2926 static void
2927 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2929 ipa_analyze_node (node);
2932 /* Register our cgraph hooks if they are not already there. */
2934 void
2935 ipa_register_cgraph_hooks (void)
2937 if (!edge_removal_hook_holder)
2938 edge_removal_hook_holder =
2939 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2940 if (!node_removal_hook_holder)
2941 node_removal_hook_holder =
2942 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2943 if (!edge_duplication_hook_holder)
2944 edge_duplication_hook_holder =
2945 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2946 if (!node_duplication_hook_holder)
2947 node_duplication_hook_holder =
2948 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2949 function_insertion_hook_holder =
2950 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2953 /* Unregister our cgraph hooks if they are not already there. */
2955 static void
2956 ipa_unregister_cgraph_hooks (void)
2958 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2959 edge_removal_hook_holder = NULL;
2960 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2961 node_removal_hook_holder = NULL;
2962 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2963 edge_duplication_hook_holder = NULL;
2964 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2965 node_duplication_hook_holder = NULL;
2966 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2967 function_insertion_hook_holder = NULL;
2970 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2971 longer needed after ipa-cp. */
2973 void
2974 ipa_free_all_structures_after_ipa_cp (void)
2976 if (!optimize)
2978 ipa_free_all_edge_args ();
2979 ipa_free_all_node_params ();
2980 free_alloc_pool (ipcp_sources_pool);
2981 free_alloc_pool (ipcp_values_pool);
2982 free_alloc_pool (ipcp_agg_lattice_pool);
2983 ipa_unregister_cgraph_hooks ();
2984 if (ipa_refdesc_pool)
2985 free_alloc_pool (ipa_refdesc_pool);
2989 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2990 longer needed after indirect inlining. */
2992 void
2993 ipa_free_all_structures_after_iinln (void)
2995 ipa_free_all_edge_args ();
2996 ipa_free_all_node_params ();
2997 ipa_unregister_cgraph_hooks ();
2998 if (ipcp_sources_pool)
2999 free_alloc_pool (ipcp_sources_pool);
3000 if (ipcp_values_pool)
3001 free_alloc_pool (ipcp_values_pool);
3002 if (ipcp_agg_lattice_pool)
3003 free_alloc_pool (ipcp_agg_lattice_pool);
3004 if (ipa_refdesc_pool)
3005 free_alloc_pool (ipa_refdesc_pool);
3008 /* Print ipa_tree_map data structures of all functions in the
3009 callgraph to F. */
3011 void
3012 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3014 int i, count;
3015 tree temp;
3016 struct ipa_node_params *info;
3018 if (!node->symbol.definition)
3019 return;
3020 info = IPA_NODE_REF (node);
3021 fprintf (f, " function %s/%i parameter descriptors:\n",
3022 cgraph_node_name (node), node->symbol.order);
3023 count = ipa_get_param_count (info);
3024 for (i = 0; i < count; i++)
3026 int c;
3028 temp = ipa_get_param (info, i);
3029 if (TREE_CODE (temp) == PARM_DECL)
3030 fprintf (f, " param %d : %s", i,
3031 (DECL_NAME (temp)
3032 ? (*lang_hooks.decl_printable_name) (temp, 2)
3033 : "(unnamed)"));
3034 if (ipa_is_param_used (info, i))
3035 fprintf (f, " used");
3036 c = ipa_get_controlled_uses (info, i);
3037 if (c == IPA_UNDESCRIBED_USE)
3038 fprintf (f, " undescribed_use");
3039 else
3040 fprintf (f, " controlled_uses=%i", c);
3041 fprintf (f, "\n");
3045 /* Print ipa_tree_map data structures of all functions in the
3046 callgraph to F. */
3048 void
3049 ipa_print_all_params (FILE * f)
3051 struct cgraph_node *node;
3053 fprintf (f, "\nFunction parameters:\n");
3054 FOR_EACH_FUNCTION (node)
3055 ipa_print_node_params (f, node);
3058 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3060 vec<tree>
3061 ipa_get_vector_of_formal_parms (tree fndecl)
3063 vec<tree> args;
3064 int count;
3065 tree parm;
3067 count = count_formal_params (fndecl);
3068 args.create (count);
3069 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3070 args.quick_push (parm);
3072 return args;
3075 /* Return a heap allocated vector containing types of formal parameters of
3076 function type FNTYPE. */
3078 static inline vec<tree>
3079 get_vector_of_formal_parm_types (tree fntype)
3081 vec<tree> types;
3082 int count = 0;
3083 tree t;
3085 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3086 count++;
3088 types.create (count);
3089 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3090 types.quick_push (TREE_VALUE (t));
3092 return types;
3095 /* Modify the function declaration FNDECL and its type according to the plan in
3096 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3097 to reflect the actual parameters being modified which are determined by the
3098 base_index field. */
3100 void
3101 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3102 const char *synth_parm_prefix)
3104 vec<tree> oparms, otypes;
3105 tree orig_type, new_type = NULL;
3106 tree old_arg_types, t, new_arg_types = NULL;
3107 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3108 int i, len = adjustments.length ();
3109 tree new_reversed = NULL;
3110 bool care_for_types, last_parm_void;
3112 if (!synth_parm_prefix)
3113 synth_parm_prefix = "SYNTH";
3115 oparms = ipa_get_vector_of_formal_parms (fndecl);
3116 orig_type = TREE_TYPE (fndecl);
3117 old_arg_types = TYPE_ARG_TYPES (orig_type);
3119 /* The following test is an ugly hack, some functions simply don't have any
3120 arguments in their type. This is probably a bug but well... */
3121 care_for_types = (old_arg_types != NULL_TREE);
3122 if (care_for_types)
3124 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3125 == void_type_node);
3126 otypes = get_vector_of_formal_parm_types (orig_type);
3127 if (last_parm_void)
3128 gcc_assert (oparms.length () + 1 == otypes.length ());
3129 else
3130 gcc_assert (oparms.length () == otypes.length ());
3132 else
3134 last_parm_void = false;
3135 otypes.create (0);
3138 for (i = 0; i < len; i++)
3140 struct ipa_parm_adjustment *adj;
3141 gcc_assert (link);
3143 adj = &adjustments[i];
3144 parm = oparms[adj->base_index];
3145 adj->base = parm;
3147 if (adj->copy_param)
3149 if (care_for_types)
3150 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3151 new_arg_types);
3152 *link = parm;
3153 link = &DECL_CHAIN (parm);
3155 else if (!adj->remove_param)
3157 tree new_parm;
3158 tree ptype;
3160 if (adj->by_ref)
3161 ptype = build_pointer_type (adj->type);
3162 else
3163 ptype = adj->type;
3165 if (care_for_types)
3166 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3168 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3169 ptype);
3170 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3172 DECL_ARTIFICIAL (new_parm) = 1;
3173 DECL_ARG_TYPE (new_parm) = ptype;
3174 DECL_CONTEXT (new_parm) = fndecl;
3175 TREE_USED (new_parm) = 1;
3176 DECL_IGNORED_P (new_parm) = 1;
3177 layout_decl (new_parm, 0);
3179 adj->base = parm;
3180 adj->reduction = new_parm;
3182 *link = new_parm;
3184 link = &DECL_CHAIN (new_parm);
3188 *link = NULL_TREE;
3190 if (care_for_types)
3192 new_reversed = nreverse (new_arg_types);
3193 if (last_parm_void)
3195 if (new_reversed)
3196 TREE_CHAIN (new_arg_types) = void_list_node;
3197 else
3198 new_reversed = void_list_node;
3202 /* Use copy_node to preserve as much as possible from original type
3203 (debug info, attribute lists etc.)
3204 Exception is METHOD_TYPEs must have THIS argument.
3205 When we are asked to remove it, we need to build new FUNCTION_TYPE
3206 instead. */
3207 if (TREE_CODE (orig_type) != METHOD_TYPE
3208 || (adjustments[0].copy_param
3209 && adjustments[0].base_index == 0))
3211 new_type = build_distinct_type_copy (orig_type);
3212 TYPE_ARG_TYPES (new_type) = new_reversed;
3214 else
3216 new_type
3217 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3218 new_reversed));
3219 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3220 DECL_VINDEX (fndecl) = NULL_TREE;
3223 /* When signature changes, we need to clear builtin info. */
3224 if (DECL_BUILT_IN (fndecl))
3226 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3227 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3230 /* This is a new type, not a copy of an old type. Need to reassociate
3231 variants. We can handle everything except the main variant lazily. */
3232 t = TYPE_MAIN_VARIANT (orig_type);
3233 if (orig_type != t)
3235 TYPE_MAIN_VARIANT (new_type) = t;
3236 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3237 TYPE_NEXT_VARIANT (t) = new_type;
3239 else
3241 TYPE_MAIN_VARIANT (new_type) = new_type;
3242 TYPE_NEXT_VARIANT (new_type) = NULL;
3245 TREE_TYPE (fndecl) = new_type;
3246 DECL_VIRTUAL_P (fndecl) = 0;
3247 otypes.release ();
3248 oparms.release ();
3251 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3252 If this is a directly recursive call, CS must be NULL. Otherwise it must
3253 contain the corresponding call graph edge. */
3255 void
3256 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3257 ipa_parm_adjustment_vec adjustments)
3259 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3260 vec<tree> vargs;
3261 vec<tree, va_gc> **debug_args = NULL;
3262 gimple new_stmt;
3263 gimple_stmt_iterator gsi, prev_gsi;
3264 tree callee_decl;
3265 int i, len;
3267 len = adjustments.length ();
3268 vargs.create (len);
3269 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3270 ipa_remove_stmt_references ((symtab_node) current_node, stmt);
3272 gsi = gsi_for_stmt (stmt);
3273 prev_gsi = gsi;
3274 gsi_prev (&prev_gsi);
3275 for (i = 0; i < len; i++)
3277 struct ipa_parm_adjustment *adj;
3279 adj = &adjustments[i];
3281 if (adj->copy_param)
3283 tree arg = gimple_call_arg (stmt, adj->base_index);
3285 vargs.quick_push (arg);
3287 else if (!adj->remove_param)
3289 tree expr, base, off;
3290 location_t loc;
3291 unsigned int deref_align;
3292 bool deref_base = false;
3294 /* We create a new parameter out of the value of the old one, we can
3295 do the following kind of transformations:
3297 - A scalar passed by reference is converted to a scalar passed by
3298 value. (adj->by_ref is false and the type of the original
3299 actual argument is a pointer to a scalar).
3301 - A part of an aggregate is passed instead of the whole aggregate.
3302 The part can be passed either by value or by reference, this is
3303 determined by value of adj->by_ref. Moreover, the code below
3304 handles both situations when the original aggregate is passed by
3305 value (its type is not a pointer) and when it is passed by
3306 reference (it is a pointer to an aggregate).
3308 When the new argument is passed by reference (adj->by_ref is true)
3309 it must be a part of an aggregate and therefore we form it by
3310 simply taking the address of a reference inside the original
3311 aggregate. */
3313 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3314 base = gimple_call_arg (stmt, adj->base_index);
3315 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3316 : EXPR_LOCATION (base);
3318 if (TREE_CODE (base) != ADDR_EXPR
3319 && POINTER_TYPE_P (TREE_TYPE (base)))
3320 off = build_int_cst (adj->alias_ptr_type,
3321 adj->offset / BITS_PER_UNIT);
3322 else
3324 HOST_WIDE_INT base_offset;
3325 tree prev_base;
3326 bool addrof;
3328 if (TREE_CODE (base) == ADDR_EXPR)
3330 base = TREE_OPERAND (base, 0);
3331 addrof = true;
3333 else
3334 addrof = false;
3335 prev_base = base;
3336 base = get_addr_base_and_unit_offset (base, &base_offset);
3337 /* Aggregate arguments can have non-invariant addresses. */
3338 if (!base)
3340 base = build_fold_addr_expr (prev_base);
3341 off = build_int_cst (adj->alias_ptr_type,
3342 adj->offset / BITS_PER_UNIT);
3344 else if (TREE_CODE (base) == MEM_REF)
3346 if (!addrof)
3348 deref_base = true;
3349 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3351 off = build_int_cst (adj->alias_ptr_type,
3352 base_offset
3353 + adj->offset / BITS_PER_UNIT);
3354 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3355 off);
3356 base = TREE_OPERAND (base, 0);
3358 else
3360 off = build_int_cst (adj->alias_ptr_type,
3361 base_offset
3362 + adj->offset / BITS_PER_UNIT);
3363 base = build_fold_addr_expr (base);
3367 if (!adj->by_ref)
3369 tree type = adj->type;
3370 unsigned int align;
3371 unsigned HOST_WIDE_INT misalign;
3373 if (deref_base)
3375 align = deref_align;
3376 misalign = 0;
3378 else
3380 get_pointer_alignment_1 (base, &align, &misalign);
3381 if (TYPE_ALIGN (type) > align)
3382 align = TYPE_ALIGN (type);
3384 misalign += (tree_to_double_int (off)
3385 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3386 * BITS_PER_UNIT);
3387 misalign = misalign & (align - 1);
3388 if (misalign != 0)
3389 align = (misalign & -misalign);
3390 if (align < TYPE_ALIGN (type))
3391 type = build_aligned_type (type, align);
3392 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3394 else
3396 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3397 expr = build_fold_addr_expr (expr);
3400 expr = force_gimple_operand_gsi (&gsi, expr,
3401 adj->by_ref
3402 || is_gimple_reg_type (adj->type),
3403 NULL, true, GSI_SAME_STMT);
3404 vargs.quick_push (expr);
3406 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3408 unsigned int ix;
3409 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3410 gimple def_temp;
3412 arg = gimple_call_arg (stmt, adj->base_index);
3413 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3415 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3416 continue;
3417 arg = fold_convert_loc (gimple_location (stmt),
3418 TREE_TYPE (origin), arg);
3420 if (debug_args == NULL)
3421 debug_args = decl_debug_args_insert (callee_decl);
3422 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3423 if (ddecl == origin)
3425 ddecl = (**debug_args)[ix + 1];
3426 break;
3428 if (ddecl == NULL)
3430 ddecl = make_node (DEBUG_EXPR_DECL);
3431 DECL_ARTIFICIAL (ddecl) = 1;
3432 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3433 DECL_MODE (ddecl) = DECL_MODE (origin);
3435 vec_safe_push (*debug_args, origin);
3436 vec_safe_push (*debug_args, ddecl);
3438 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3439 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3443 if (dump_file && (dump_flags & TDF_DETAILS))
3445 fprintf (dump_file, "replacing stmt:");
3446 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3449 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3450 vargs.release ();
3451 if (gimple_call_lhs (stmt))
3452 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3454 gimple_set_block (new_stmt, gimple_block (stmt));
3455 if (gimple_has_location (stmt))
3456 gimple_set_location (new_stmt, gimple_location (stmt));
3457 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3458 gimple_call_copy_flags (new_stmt, stmt);
3460 if (dump_file && (dump_flags & TDF_DETAILS))
3462 fprintf (dump_file, "with stmt:");
3463 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3464 fprintf (dump_file, "\n");
3466 gsi_replace (&gsi, new_stmt, true);
3467 if (cs)
3468 cgraph_set_call_stmt (cs, new_stmt);
3471 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3472 gsi_prev (&gsi);
3474 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3475 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3477 update_ssa (TODO_update_ssa);
3478 free_dominance_info (CDI_DOMINATORS);
3481 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3483 static bool
3484 index_in_adjustments_multiple_times_p (int base_index,
3485 ipa_parm_adjustment_vec adjustments)
3487 int i, len = adjustments.length ();
3488 bool one = false;
3490 for (i = 0; i < len; i++)
3492 struct ipa_parm_adjustment *adj;
3493 adj = &adjustments[i];
3495 if (adj->base_index == base_index)
3497 if (one)
3498 return true;
3499 else
3500 one = true;
3503 return false;
3507 /* Return adjustments that should have the same effect on function parameters
3508 and call arguments as if they were first changed according to adjustments in
3509 INNER and then by adjustments in OUTER. */
3511 ipa_parm_adjustment_vec
3512 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3513 ipa_parm_adjustment_vec outer)
3515 int i, outlen = outer.length ();
3516 int inlen = inner.length ();
3517 int removals = 0;
3518 ipa_parm_adjustment_vec adjustments, tmp;
3520 tmp.create (inlen);
3521 for (i = 0; i < inlen; i++)
3523 struct ipa_parm_adjustment *n;
3524 n = &inner[i];
3526 if (n->remove_param)
3527 removals++;
3528 else
3529 tmp.quick_push (*n);
3532 adjustments.create (outlen + removals);
3533 for (i = 0; i < outlen; i++)
3535 struct ipa_parm_adjustment r;
3536 struct ipa_parm_adjustment *out = &outer[i];
3537 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3539 memset (&r, 0, sizeof (r));
3540 gcc_assert (!in->remove_param);
3541 if (out->remove_param)
3543 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3545 r.remove_param = true;
3546 adjustments.quick_push (r);
3548 continue;
3551 r.base_index = in->base_index;
3552 r.type = out->type;
3554 /* FIXME: Create nonlocal value too. */
3556 if (in->copy_param && out->copy_param)
3557 r.copy_param = true;
3558 else if (in->copy_param)
3559 r.offset = out->offset;
3560 else if (out->copy_param)
3561 r.offset = in->offset;
3562 else
3563 r.offset = in->offset + out->offset;
3564 adjustments.quick_push (r);
3567 for (i = 0; i < inlen; i++)
3569 struct ipa_parm_adjustment *n = &inner[i];
3571 if (n->remove_param)
3572 adjustments.quick_push (*n);
3575 tmp.release ();
3576 return adjustments;
3579 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3580 friendly way, assuming they are meant to be applied to FNDECL. */
3582 void
3583 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3584 tree fndecl)
3586 int i, len = adjustments.length ();
3587 bool first = true;
3588 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3590 fprintf (file, "IPA param adjustments: ");
3591 for (i = 0; i < len; i++)
3593 struct ipa_parm_adjustment *adj;
3594 adj = &adjustments[i];
3596 if (!first)
3597 fprintf (file, " ");
3598 else
3599 first = false;
3601 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3602 print_generic_expr (file, parms[adj->base_index], 0);
3603 if (adj->base)
3605 fprintf (file, ", base: ");
3606 print_generic_expr (file, adj->base, 0);
3608 if (adj->reduction)
3610 fprintf (file, ", reduction: ");
3611 print_generic_expr (file, adj->reduction, 0);
3613 if (adj->new_ssa_base)
3615 fprintf (file, ", new_ssa_base: ");
3616 print_generic_expr (file, adj->new_ssa_base, 0);
3619 if (adj->copy_param)
3620 fprintf (file, ", copy_param");
3621 else if (adj->remove_param)
3622 fprintf (file, ", remove_param");
3623 else
3624 fprintf (file, ", offset %li", (long) adj->offset);
3625 if (adj->by_ref)
3626 fprintf (file, ", by_ref");
3627 print_node_brief (file, ", type: ", adj->type, 0);
3628 fprintf (file, "\n");
3630 parms.release ();
3633 /* Dump the AV linked list. */
3635 void
3636 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3638 bool comma = false;
3639 fprintf (f, " Aggregate replacements:");
3640 for (; av; av = av->next)
3642 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3643 av->index, av->offset);
3644 print_generic_expr (f, av->value, 0);
3645 comma = true;
3647 fprintf (f, "\n");
3650 /* Stream out jump function JUMP_FUNC to OB. */
3652 static void
3653 ipa_write_jump_function (struct output_block *ob,
3654 struct ipa_jump_func *jump_func)
3656 struct ipa_agg_jf_item *item;
3657 struct bitpack_d bp;
3658 int i, count;
3660 streamer_write_uhwi (ob, jump_func->type);
3661 switch (jump_func->type)
3663 case IPA_JF_UNKNOWN:
3664 break;
3665 case IPA_JF_KNOWN_TYPE:
3666 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3667 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3668 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3669 break;
3670 case IPA_JF_CONST:
3671 gcc_assert (
3672 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3673 stream_write_tree (ob, jump_func->value.constant.value, true);
3674 break;
3675 case IPA_JF_PASS_THROUGH:
3676 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3677 if (jump_func->value.pass_through.operation == NOP_EXPR)
3679 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3680 bp = bitpack_create (ob->main_stream);
3681 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3682 streamer_write_bitpack (&bp);
3684 else
3686 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3687 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3689 break;
3690 case IPA_JF_ANCESTOR:
3691 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3692 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3693 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3694 bp = bitpack_create (ob->main_stream);
3695 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3696 streamer_write_bitpack (&bp);
3697 break;
3700 count = vec_safe_length (jump_func->agg.items);
3701 streamer_write_uhwi (ob, count);
3702 if (count)
3704 bp = bitpack_create (ob->main_stream);
3705 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3706 streamer_write_bitpack (&bp);
3709 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3711 streamer_write_uhwi (ob, item->offset);
3712 stream_write_tree (ob, item->value, true);
3716 /* Read in jump function JUMP_FUNC from IB. */
3718 static void
3719 ipa_read_jump_function (struct lto_input_block *ib,
3720 struct ipa_jump_func *jump_func,
3721 struct cgraph_edge *cs,
3722 struct data_in *data_in)
3724 enum jump_func_type jftype;
3725 enum tree_code operation;
3726 int i, count;
3728 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3729 switch (jftype)
3731 case IPA_JF_UNKNOWN:
3732 jump_func->type = IPA_JF_UNKNOWN;
3733 break;
3734 case IPA_JF_KNOWN_TYPE:
3736 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3737 tree base_type = stream_read_tree (ib, data_in);
3738 tree component_type = stream_read_tree (ib, data_in);
3740 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
3741 break;
3743 case IPA_JF_CONST:
3744 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
3745 break;
3746 case IPA_JF_PASS_THROUGH:
3747 operation = (enum tree_code) streamer_read_uhwi (ib);
3748 if (operation == NOP_EXPR)
3750 int formal_id = streamer_read_uhwi (ib);
3751 struct bitpack_d bp = streamer_read_bitpack (ib);
3752 bool agg_preserved = bp_unpack_value (&bp, 1);
3753 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
3755 else
3757 tree operand = stream_read_tree (ib, data_in);
3758 int formal_id = streamer_read_uhwi (ib);
3759 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
3760 operation);
3762 break;
3763 case IPA_JF_ANCESTOR:
3765 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3766 tree type = stream_read_tree (ib, data_in);
3767 int formal_id = streamer_read_uhwi (ib);
3768 struct bitpack_d bp = streamer_read_bitpack (ib);
3769 bool agg_preserved = bp_unpack_value (&bp, 1);
3771 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved);
3772 break;
3776 count = streamer_read_uhwi (ib);
3777 vec_alloc (jump_func->agg.items, count);
3778 if (count)
3780 struct bitpack_d bp = streamer_read_bitpack (ib);
3781 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3783 for (i = 0; i < count; i++)
3785 struct ipa_agg_jf_item item;
3786 item.offset = streamer_read_uhwi (ib);
3787 item.value = stream_read_tree (ib, data_in);
3788 jump_func->agg.items->quick_push (item);
3792 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3793 relevant to indirect inlining to OB. */
3795 static void
3796 ipa_write_indirect_edge_info (struct output_block *ob,
3797 struct cgraph_edge *cs)
3799 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3800 struct bitpack_d bp;
3802 streamer_write_hwi (ob, ii->param_index);
3803 streamer_write_hwi (ob, ii->offset);
3804 bp = bitpack_create (ob->main_stream);
3805 bp_pack_value (&bp, ii->polymorphic, 1);
3806 bp_pack_value (&bp, ii->agg_contents, 1);
3807 bp_pack_value (&bp, ii->member_ptr, 1);
3808 bp_pack_value (&bp, ii->by_ref, 1);
3809 streamer_write_bitpack (&bp);
3811 if (ii->polymorphic)
3813 streamer_write_hwi (ob, ii->otr_token);
3814 stream_write_tree (ob, ii->otr_type, true);
3818 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3819 relevant to indirect inlining from IB. */
3821 static void
3822 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3823 struct data_in *data_in ATTRIBUTE_UNUSED,
3824 struct cgraph_edge *cs)
3826 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3827 struct bitpack_d bp;
3829 ii->param_index = (int) streamer_read_hwi (ib);
3830 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3831 bp = streamer_read_bitpack (ib);
3832 ii->polymorphic = bp_unpack_value (&bp, 1);
3833 ii->agg_contents = bp_unpack_value (&bp, 1);
3834 ii->member_ptr = bp_unpack_value (&bp, 1);
3835 ii->by_ref = bp_unpack_value (&bp, 1);
3836 if (ii->polymorphic)
3838 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3839 ii->otr_type = stream_read_tree (ib, data_in);
3843 /* Stream out NODE info to OB. */
3845 static void
3846 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3848 int node_ref;
3849 lto_symtab_encoder_t encoder;
3850 struct ipa_node_params *info = IPA_NODE_REF (node);
3851 int j;
3852 struct cgraph_edge *e;
3853 struct bitpack_d bp;
3855 encoder = ob->decl_state->symtab_node_encoder;
3856 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3857 streamer_write_uhwi (ob, node_ref);
3859 bp = bitpack_create (ob->main_stream);
3860 gcc_assert (info->uses_analysis_done
3861 || ipa_get_param_count (info) == 0);
3862 gcc_assert (!info->node_enqueued);
3863 gcc_assert (!info->ipcp_orig_node);
3864 for (j = 0; j < ipa_get_param_count (info); j++)
3865 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3866 streamer_write_bitpack (&bp);
3867 for (j = 0; j < ipa_get_param_count (info); j++)
3868 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
3869 for (e = node->callees; e; e = e->next_callee)
3871 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3873 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3874 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3875 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3877 for (e = node->indirect_calls; e; e = e->next_callee)
3879 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3881 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3882 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3883 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3884 ipa_write_indirect_edge_info (ob, e);
3888 /* Stream in NODE info from IB. */
3890 static void
3891 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3892 struct data_in *data_in)
3894 struct ipa_node_params *info = IPA_NODE_REF (node);
3895 int k;
3896 struct cgraph_edge *e;
3897 struct bitpack_d bp;
3899 ipa_initialize_node_params (node);
3901 bp = streamer_read_bitpack (ib);
3902 if (ipa_get_param_count (info) != 0)
3903 info->uses_analysis_done = true;
3904 info->node_enqueued = false;
3905 for (k = 0; k < ipa_get_param_count (info); k++)
3906 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3907 for (k = 0; k < ipa_get_param_count (info); k++)
3908 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
3909 for (e = node->callees; e; e = e->next_callee)
3911 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3912 int count = streamer_read_uhwi (ib);
3914 if (!count)
3915 continue;
3916 vec_safe_grow_cleared (args->jump_functions, count);
3918 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3919 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
3920 data_in);
3922 for (e = node->indirect_calls; e; e = e->next_callee)
3924 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3925 int count = streamer_read_uhwi (ib);
3927 if (count)
3929 vec_safe_grow_cleared (args->jump_functions, count);
3930 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3931 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
3932 data_in);
3934 ipa_read_indirect_edge_info (ib, data_in, e);
3938 /* Write jump functions for nodes in SET. */
3940 void
3941 ipa_prop_write_jump_functions (void)
3943 struct cgraph_node *node;
3944 struct output_block *ob;
3945 unsigned int count = 0;
3946 lto_symtab_encoder_iterator lsei;
3947 lto_symtab_encoder_t encoder;
3950 if (!ipa_node_params_vector.exists ())
3951 return;
3953 ob = create_output_block (LTO_section_jump_functions);
3954 encoder = ob->decl_state->symtab_node_encoder;
3955 ob->cgraph_node = NULL;
3956 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3957 lsei_next_function_in_partition (&lsei))
3959 node = lsei_cgraph_node (lsei);
3960 if (cgraph_function_with_gimple_body_p (node)
3961 && IPA_NODE_REF (node) != NULL)
3962 count++;
3965 streamer_write_uhwi (ob, count);
3967 /* Process all of the functions. */
3968 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3969 lsei_next_function_in_partition (&lsei))
3971 node = lsei_cgraph_node (lsei);
3972 if (cgraph_function_with_gimple_body_p (node)
3973 && IPA_NODE_REF (node) != NULL)
3974 ipa_write_node_info (ob, node);
3976 streamer_write_char_stream (ob->main_stream, 0);
3977 produce_asm (ob, NULL);
3978 destroy_output_block (ob);
3981 /* Read section in file FILE_DATA of length LEN with data DATA. */
3983 static void
3984 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3985 size_t len)
3987 const struct lto_function_header *header =
3988 (const struct lto_function_header *) data;
3989 const int cfg_offset = sizeof (struct lto_function_header);
3990 const int main_offset = cfg_offset + header->cfg_size;
3991 const int string_offset = main_offset + header->main_size;
3992 struct data_in *data_in;
3993 struct lto_input_block ib_main;
3994 unsigned int i;
3995 unsigned int count;
3997 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3998 header->main_size);
4000 data_in =
4001 lto_data_in_create (file_data, (const char *) data + string_offset,
4002 header->string_size, vNULL);
4003 count = streamer_read_uhwi (&ib_main);
4005 for (i = 0; i < count; i++)
4007 unsigned int index;
4008 struct cgraph_node *node;
4009 lto_symtab_encoder_t encoder;
4011 index = streamer_read_uhwi (&ib_main);
4012 encoder = file_data->symtab_node_encoder;
4013 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4014 gcc_assert (node->symbol.definition);
4015 ipa_read_node_info (&ib_main, node, data_in);
4017 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4018 len);
4019 lto_data_in_delete (data_in);
4022 /* Read ipcp jump functions. */
4024 void
4025 ipa_prop_read_jump_functions (void)
4027 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4028 struct lto_file_decl_data *file_data;
4029 unsigned int j = 0;
4031 ipa_check_create_node_params ();
4032 ipa_check_create_edge_args ();
4033 ipa_register_cgraph_hooks ();
4035 while ((file_data = file_data_vec[j++]))
4037 size_t len;
4038 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4040 if (data)
4041 ipa_prop_read_section (file_data, data, len);
4045 /* After merging units, we can get mismatch in argument counts.
4046 Also decl merging might've rendered parameter lists obsolete.
4047 Also compute called_with_variable_arg info. */
4049 void
4050 ipa_update_after_lto_read (void)
4052 struct cgraph_node *node;
4054 ipa_check_create_node_params ();
4055 ipa_check_create_edge_args ();
4057 FOR_EACH_DEFINED_FUNCTION (node)
4058 ipa_initialize_node_params (node);
4061 void
4062 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4064 int node_ref;
4065 unsigned int count = 0;
4066 lto_symtab_encoder_t encoder;
4067 struct ipa_agg_replacement_value *aggvals, *av;
4069 aggvals = ipa_get_agg_replacements_for_node (node);
4070 encoder = ob->decl_state->symtab_node_encoder;
4071 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4072 streamer_write_uhwi (ob, node_ref);
4074 for (av = aggvals; av; av = av->next)
4075 count++;
4076 streamer_write_uhwi (ob, count);
4078 for (av = aggvals; av; av = av->next)
4080 struct bitpack_d bp;
4082 streamer_write_uhwi (ob, av->offset);
4083 streamer_write_uhwi (ob, av->index);
4084 stream_write_tree (ob, av->value, true);
4086 bp = bitpack_create (ob->main_stream);
4087 bp_pack_value (&bp, av->by_ref, 1);
4088 streamer_write_bitpack (&bp);
4092 /* Stream in the aggregate value replacement chain for NODE from IB. */
4094 static void
4095 read_agg_replacement_chain (struct lto_input_block *ib,
4096 struct cgraph_node *node,
4097 struct data_in *data_in)
4099 struct ipa_agg_replacement_value *aggvals = NULL;
4100 unsigned int count, i;
4102 count = streamer_read_uhwi (ib);
4103 for (i = 0; i <count; i++)
4105 struct ipa_agg_replacement_value *av;
4106 struct bitpack_d bp;
4108 av = ggc_alloc_ipa_agg_replacement_value ();
4109 av->offset = streamer_read_uhwi (ib);
4110 av->index = streamer_read_uhwi (ib);
4111 av->value = stream_read_tree (ib, data_in);
4112 bp = streamer_read_bitpack (ib);
4113 av->by_ref = bp_unpack_value (&bp, 1);
4114 av->next = aggvals;
4115 aggvals = av;
4117 ipa_set_node_agg_value_chain (node, aggvals);
4120 /* Write all aggregate replacement for nodes in set. */
4122 void
4123 ipa_prop_write_all_agg_replacement (void)
4125 struct cgraph_node *node;
4126 struct output_block *ob;
4127 unsigned int count = 0;
4128 lto_symtab_encoder_iterator lsei;
4129 lto_symtab_encoder_t encoder;
4131 if (!ipa_node_agg_replacements)
4132 return;
4134 ob = create_output_block (LTO_section_ipcp_transform);
4135 encoder = ob->decl_state->symtab_node_encoder;
4136 ob->cgraph_node = NULL;
4137 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4138 lsei_next_function_in_partition (&lsei))
4140 node = lsei_cgraph_node (lsei);
4141 if (cgraph_function_with_gimple_body_p (node)
4142 && ipa_get_agg_replacements_for_node (node) != NULL)
4143 count++;
4146 streamer_write_uhwi (ob, count);
4148 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4149 lsei_next_function_in_partition (&lsei))
4151 node = lsei_cgraph_node (lsei);
4152 if (cgraph_function_with_gimple_body_p (node)
4153 && ipa_get_agg_replacements_for_node (node) != NULL)
4154 write_agg_replacement_chain (ob, node);
4156 streamer_write_char_stream (ob->main_stream, 0);
4157 produce_asm (ob, NULL);
4158 destroy_output_block (ob);
4161 /* Read replacements section in file FILE_DATA of length LEN with data
4162 DATA. */
4164 static void
4165 read_replacements_section (struct lto_file_decl_data *file_data,
4166 const char *data,
4167 size_t len)
4169 const struct lto_function_header *header =
4170 (const struct lto_function_header *) data;
4171 const int cfg_offset = sizeof (struct lto_function_header);
4172 const int main_offset = cfg_offset + header->cfg_size;
4173 const int string_offset = main_offset + header->main_size;
4174 struct data_in *data_in;
4175 struct lto_input_block ib_main;
4176 unsigned int i;
4177 unsigned int count;
4179 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4180 header->main_size);
4182 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4183 header->string_size, vNULL);
4184 count = streamer_read_uhwi (&ib_main);
4186 for (i = 0; i < count; i++)
4188 unsigned int index;
4189 struct cgraph_node *node;
4190 lto_symtab_encoder_t encoder;
4192 index = streamer_read_uhwi (&ib_main);
4193 encoder = file_data->symtab_node_encoder;
4194 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4195 gcc_assert (node->symbol.definition);
4196 read_agg_replacement_chain (&ib_main, node, data_in);
4198 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4199 len);
4200 lto_data_in_delete (data_in);
4203 /* Read IPA-CP aggregate replacements. */
4205 void
4206 ipa_prop_read_all_agg_replacement (void)
4208 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4209 struct lto_file_decl_data *file_data;
4210 unsigned int j = 0;
4212 while ((file_data = file_data_vec[j++]))
4214 size_t len;
4215 const char *data = lto_get_section_data (file_data,
4216 LTO_section_ipcp_transform,
4217 NULL, &len);
4218 if (data)
4219 read_replacements_section (file_data, data, len);
4223 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4224 NODE. */
4226 static void
4227 adjust_agg_replacement_values (struct cgraph_node *node,
4228 struct ipa_agg_replacement_value *aggval)
4230 struct ipa_agg_replacement_value *v;
4231 int i, c = 0, d = 0, *adj;
4233 if (!node->clone.combined_args_to_skip)
4234 return;
4236 for (v = aggval; v; v = v->next)
4238 gcc_assert (v->index >= 0);
4239 if (c < v->index)
4240 c = v->index;
4242 c++;
4244 adj = XALLOCAVEC (int, c);
4245 for (i = 0; i < c; i++)
4246 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4248 adj[i] = -1;
4249 d++;
4251 else
4252 adj[i] = i - d;
4254 for (v = aggval; v; v = v->next)
4255 v->index = adj[v->index];
4259 /* Function body transformation phase. */
4261 unsigned int
4262 ipcp_transform_function (struct cgraph_node *node)
4264 vec<ipa_param_descriptor_t> descriptors = vNULL;
4265 struct param_analysis_info *parms_ainfo;
4266 struct ipa_agg_replacement_value *aggval;
4267 gimple_stmt_iterator gsi;
4268 basic_block bb;
4269 int param_count;
4270 bool cfg_changed = false, something_changed = false;
4272 gcc_checking_assert (cfun);
4273 gcc_checking_assert (current_function_decl);
4275 if (dump_file)
4276 fprintf (dump_file, "Modification phase of node %s/%i\n",
4277 cgraph_node_name (node), node->symbol.order);
4279 aggval = ipa_get_agg_replacements_for_node (node);
4280 if (!aggval)
4281 return 0;
4282 param_count = count_formal_params (node->symbol.decl);
4283 if (param_count == 0)
4284 return 0;
4285 adjust_agg_replacement_values (node, aggval);
4286 if (dump_file)
4287 ipa_dump_agg_replacement_values (dump_file, aggval);
4288 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4289 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4290 descriptors.safe_grow_cleared (param_count);
4291 ipa_populate_param_decls (node, descriptors);
4293 FOR_EACH_BB (bb)
4294 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4296 struct ipa_agg_replacement_value *v;
4297 gimple stmt = gsi_stmt (gsi);
4298 tree rhs, val, t;
4299 HOST_WIDE_INT offset;
4300 int index;
4301 bool by_ref, vce;
4303 if (!gimple_assign_load_p (stmt))
4304 continue;
4305 rhs = gimple_assign_rhs1 (stmt);
4306 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4307 continue;
4309 vce = false;
4310 t = rhs;
4311 while (handled_component_p (t))
4313 /* V_C_E can do things like convert an array of integers to one
4314 bigger integer and similar things we do not handle below. */
4315 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4317 vce = true;
4318 break;
4320 t = TREE_OPERAND (t, 0);
4322 if (vce)
4323 continue;
4325 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4326 rhs, &index, &offset, &by_ref))
4327 continue;
4328 for (v = aggval; v; v = v->next)
4329 if (v->index == index
4330 && v->offset == offset)
4331 break;
4332 if (!v || v->by_ref != by_ref)
4333 continue;
4335 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4336 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4338 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4339 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4340 else if (TYPE_SIZE (TREE_TYPE (rhs))
4341 == TYPE_SIZE (TREE_TYPE (v->value)))
4342 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4343 else
4345 if (dump_file)
4347 fprintf (dump_file, " const ");
4348 print_generic_expr (dump_file, v->value, 0);
4349 fprintf (dump_file, " can't be converted to type of ");
4350 print_generic_expr (dump_file, rhs, 0);
4351 fprintf (dump_file, "\n");
4353 continue;
4356 else
4357 val = v->value;
4359 if (dump_file && (dump_flags & TDF_DETAILS))
4361 fprintf (dump_file, "Modifying stmt:\n ");
4362 print_gimple_stmt (dump_file, stmt, 0, 0);
4364 gimple_assign_set_rhs_from_tree (&gsi, val);
4365 update_stmt (stmt);
4367 if (dump_file && (dump_flags & TDF_DETAILS))
4369 fprintf (dump_file, "into:\n ");
4370 print_gimple_stmt (dump_file, stmt, 0, 0);
4371 fprintf (dump_file, "\n");
4374 something_changed = true;
4375 if (maybe_clean_eh_stmt (stmt)
4376 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4377 cfg_changed = true;
4380 (*ipa_node_agg_replacements)[node->uid] = NULL;
4381 free_parms_ainfo (parms_ainfo, param_count);
4382 descriptors.release ();
4384 if (!something_changed)
4385 return 0;
4386 else if (cfg_changed)
4387 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4388 else
4389 return TODO_update_ssa_only_virtuals;