Remove unused variable and field
[official-gcc.git] / gcc / ipa-prop.c
blobb06f640b3f8727a7651b522b37cf66632c59def9
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "langhooks.h"
25 #include "ggc.h"
26 #include "target.h"
27 #include "cgraph.h"
28 #include "ipa-prop.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
45 struct param_analysis_info
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
51 /* Vector where the parameter infos are actually stored. */
52 vec<ipa_node_params_t> ipa_node_params_vector;
53 /* Vector of known aggregate values in cloned nodes. */
54 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
55 /* Vector where the parameter infos are actually stored. */
56 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60 static struct cgraph_node_hook_list *node_removal_hook_holder;
61 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
63 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65 /* Description of a reference to an IPA constant. */
66 struct ipa_cst_ref_desc
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge *cs;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc *next_duplicate;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
73 if out of control. */
74 int refcount;
77 /* Allocation pool for reference descriptions. */
79 static alloc_pool ipa_refdesc_pool;
81 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
82 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
84 static bool
85 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
87 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->symbol.decl);
88 struct cl_optimization *os;
90 if (!fs_opts)
91 return false;
92 os = TREE_OPTIMIZATION (fs_opts);
93 return !os->x_optimize || !os->x_flag_ipa_cp;
96 /* Return index of the formal whose tree is PTREE in function which corresponds
97 to INFO. */
99 static int
100 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
102 int i, count;
104 count = descriptors.length ();
105 for (i = 0; i < count; i++)
106 if (descriptors[i].decl == ptree)
107 return i;
109 return -1;
112 /* Return index of the formal whose tree is PTREE in function which corresponds
113 to INFO. */
116 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
118 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
122 NODE. */
124 static void
125 ipa_populate_param_decls (struct cgraph_node *node,
126 vec<ipa_param_descriptor_t> &descriptors)
128 tree fndecl;
129 tree fnargs;
130 tree parm;
131 int param_num;
133 fndecl = node->symbol.decl;
134 gcc_assert (gimple_has_body_p (fndecl));
135 fnargs = DECL_ARGUMENTS (fndecl);
136 param_num = 0;
137 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
139 descriptors[param_num].decl = parm;
140 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
141 param_num++;
145 /* Return how many formal parameters FNDECL has. */
147 static inline int
148 count_formal_params (tree fndecl)
150 tree parm;
151 int count = 0;
152 gcc_assert (gimple_has_body_p (fndecl));
154 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
155 count++;
157 return count;
160 /* Return the declaration of Ith formal parameter of the function corresponding
161 to INFO. Note there is no setter function as this array is built just once
162 using ipa_initialize_node_params. */
164 void
165 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
167 fprintf (file, "param #%i", i);
168 if (info->descriptors[i].decl)
170 fprintf (file, " ");
171 print_generic_expr (file, info->descriptors[i].decl, 0);
175 /* Initialize the ipa_node_params structure associated with NODE
176 to hold PARAM_COUNT parameters. */
178 void
179 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
181 struct ipa_node_params *info = IPA_NODE_REF (node);
183 if (!info->descriptors.exists () && param_count)
184 info->descriptors.safe_grow_cleared (param_count);
187 /* Initialize the ipa_node_params structure associated with NODE by counting
188 the function parameters, creating the descriptors and populating their
189 param_decls. */
191 void
192 ipa_initialize_node_params (struct cgraph_node *node)
194 struct ipa_node_params *info = IPA_NODE_REF (node);
196 if (!info->descriptors.exists ())
198 ipa_alloc_node_params (node, count_formal_params (node->symbol.decl));
199 ipa_populate_param_decls (node, info->descriptors);
203 /* Print the jump functions associated with call graph edge CS to file F. */
205 static void
206 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
208 int i, count;
210 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
211 for (i = 0; i < count; i++)
213 struct ipa_jump_func *jump_func;
214 enum jump_func_type type;
216 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
217 type = jump_func->type;
219 fprintf (f, " param %d: ", i);
220 if (type == IPA_JF_UNKNOWN)
221 fprintf (f, "UNKNOWN\n");
222 else if (type == IPA_JF_KNOWN_TYPE)
224 fprintf (f, "KNOWN TYPE: base ");
225 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
226 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
227 jump_func->value.known_type.offset);
228 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
229 fprintf (f, "\n");
231 else if (type == IPA_JF_CONST)
233 tree val = jump_func->value.constant.value;
234 fprintf (f, "CONST: ");
235 print_generic_expr (f, val, 0);
236 if (TREE_CODE (val) == ADDR_EXPR
237 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
239 fprintf (f, " -> ");
240 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
243 fprintf (f, "\n");
245 else if (type == IPA_JF_PASS_THROUGH)
247 fprintf (f, "PASS THROUGH: ");
248 fprintf (f, "%d, op %s",
249 jump_func->value.pass_through.formal_id,
250 tree_code_name[(int)
251 jump_func->value.pass_through.operation]);
252 if (jump_func->value.pass_through.operation != NOP_EXPR)
254 fprintf (f, " ");
255 print_generic_expr (f,
256 jump_func->value.pass_through.operand, 0);
258 if (jump_func->value.pass_through.agg_preserved)
259 fprintf (f, ", agg_preserved");
260 fprintf (f, "\n");
262 else if (type == IPA_JF_ANCESTOR)
264 fprintf (f, "ANCESTOR: ");
265 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
266 jump_func->value.ancestor.formal_id,
267 jump_func->value.ancestor.offset);
268 print_generic_expr (f, jump_func->value.ancestor.type, 0);
269 if (jump_func->value.ancestor.agg_preserved)
270 fprintf (f, ", agg_preserved");
271 fprintf (f, "\n");
274 if (jump_func->agg.items)
276 struct ipa_agg_jf_item *item;
277 int j;
279 fprintf (f, " Aggregate passed by %s:\n",
280 jump_func->agg.by_ref ? "reference" : "value");
281 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
283 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
284 item->offset);
285 if (TYPE_P (item->value))
286 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
287 tree_low_cst (TYPE_SIZE (item->value), 1));
288 else
290 fprintf (f, "cst: ");
291 print_generic_expr (f, item->value, 0);
293 fprintf (f, "\n");
300 /* Print the jump functions of all arguments on all call graph edges going from
301 NODE to file F. */
303 void
304 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
306 struct cgraph_edge *cs;
308 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
309 node->symbol.order);
310 for (cs = node->callees; cs; cs = cs->next_callee)
312 if (!ipa_edge_args_info_available_for_edge_p (cs))
313 continue;
315 fprintf (f, " callsite %s/%i -> %s/%i : \n",
316 xstrdup (cgraph_node_name (node)), node->symbol.order,
317 xstrdup (cgraph_node_name (cs->callee)),
318 cs->callee->symbol.order);
319 ipa_print_node_jump_functions_for_edge (f, cs);
322 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
324 struct cgraph_indirect_call_info *ii;
325 if (!ipa_edge_args_info_available_for_edge_p (cs))
326 continue;
328 ii = cs->indirect_info;
329 if (ii->agg_contents)
330 fprintf (f, " indirect %s callsite, calling param %i, "
331 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
332 ii->member_ptr ? "member ptr" : "aggregate",
333 ii->param_index, ii->offset,
334 ii->by_ref ? "by reference" : "by_value");
335 else
336 fprintf (f, " indirect %s callsite, calling param %i",
337 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
339 if (cs->call_stmt)
341 fprintf (f, ", for stmt ");
342 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
344 else
345 fprintf (f, "\n");
346 ipa_print_node_jump_functions_for_edge (f, cs);
350 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
352 void
353 ipa_print_all_jump_functions (FILE *f)
355 struct cgraph_node *node;
357 fprintf (f, "\nJump functions:\n");
358 FOR_EACH_FUNCTION (node)
360 ipa_print_node_jump_functions (f, node);
364 /* Set JFUNC to be a known type jump function. */
366 static void
367 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
368 tree base_type, tree component_type)
370 jfunc->type = IPA_JF_KNOWN_TYPE;
371 jfunc->value.known_type.offset = offset,
372 jfunc->value.known_type.base_type = base_type;
373 jfunc->value.known_type.component_type = component_type;
376 /* Set JFUNC to be a constant jmp function. */
378 static void
379 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
380 struct cgraph_edge *cs)
382 constant = unshare_expr (constant);
383 if (constant && EXPR_P (constant))
384 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
385 jfunc->type = IPA_JF_CONST;
386 jfunc->value.constant.value = unshare_expr_without_location (constant);
388 if (TREE_CODE (constant) == ADDR_EXPR
389 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
391 struct ipa_cst_ref_desc *rdesc;
392 if (!ipa_refdesc_pool)
393 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
394 sizeof (struct ipa_cst_ref_desc), 32);
396 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
397 rdesc->cs = cs;
398 rdesc->next_duplicate = NULL;
399 rdesc->refcount = 1;
400 jfunc->value.constant.rdesc = rdesc;
402 else
403 jfunc->value.constant.rdesc = NULL;
406 /* Set JFUNC to be a simple pass-through jump function. */
407 static void
408 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
409 bool agg_preserved)
411 jfunc->type = IPA_JF_PASS_THROUGH;
412 jfunc->value.pass_through.operand = NULL_TREE;
413 jfunc->value.pass_through.formal_id = formal_id;
414 jfunc->value.pass_through.operation = NOP_EXPR;
415 jfunc->value.pass_through.agg_preserved = agg_preserved;
418 /* Set JFUNC to be an arithmetic pass through jump function. */
420 static void
421 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
422 tree operand, enum tree_code operation)
424 jfunc->type = IPA_JF_PASS_THROUGH;
425 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
426 jfunc->value.pass_through.formal_id = formal_id;
427 jfunc->value.pass_through.operation = operation;
428 jfunc->value.pass_through.agg_preserved = false;
431 /* Set JFUNC to be an ancestor jump function. */
433 static void
434 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
435 tree type, int formal_id, bool agg_preserved)
437 jfunc->type = IPA_JF_ANCESTOR;
438 jfunc->value.ancestor.formal_id = formal_id;
439 jfunc->value.ancestor.offset = offset;
440 jfunc->value.ancestor.type = type;
441 jfunc->value.ancestor.agg_preserved = agg_preserved;
444 /* Extract the acual BINFO being described by JFUNC which must be a known type
445 jump function. */
447 tree
448 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
450 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
451 if (!base_binfo)
452 return NULL_TREE;
453 return get_binfo_at_offset (base_binfo,
454 jfunc->value.known_type.offset,
455 jfunc->value.known_type.component_type);
458 /* Structure to be passed in between detect_type_change and
459 check_stmt_for_type_change. */
461 struct type_change_info
463 /* Offset into the object where there is the virtual method pointer we are
464 looking for. */
465 HOST_WIDE_INT offset;
466 /* The declaration or SSA_NAME pointer of the base that we are checking for
467 type change. */
468 tree object;
469 /* If we actually can tell the type that the object has changed to, it is
470 stored in this field. Otherwise it remains NULL_TREE. */
471 tree known_current_type;
472 /* Set to true if dynamic type change has been detected. */
473 bool type_maybe_changed;
474 /* Set to true if multiple types have been encountered. known_current_type
475 must be disregarded in that case. */
476 bool multiple_types_encountered;
479 /* Return true if STMT can modify a virtual method table pointer.
481 This function makes special assumptions about both constructors and
482 destructors which are all the functions that are allowed to alter the VMT
483 pointers. It assumes that destructors begin with assignment into all VMT
484 pointers and that constructors essentially look in the following way:
486 1) The very first thing they do is that they call constructors of ancestor
487 sub-objects that have them.
489 2) Then VMT pointers of this and all its ancestors is set to new values
490 corresponding to the type corresponding to the constructor.
492 3) Only afterwards, other stuff such as constructor of member sub-objects
493 and the code written by the user is run. Only this may include calling
494 virtual functions, directly or indirectly.
496 There is no way to call a constructor of an ancestor sub-object in any
497 other way.
499 This means that we do not have to care whether constructors get the correct
500 type information because they will always change it (in fact, if we define
501 the type to be given by the VMT pointer, it is undefined).
503 The most important fact to derive from the above is that if, for some
504 statement in the section 3, we try to detect whether the dynamic type has
505 changed, we can safely ignore all calls as we examine the function body
506 backwards until we reach statements in section 2 because these calls cannot
507 be ancestor constructors or destructors (if the input is not bogus) and so
508 do not change the dynamic type (this holds true only for automatically
509 allocated objects but at the moment we devirtualize only these). We then
510 must detect that statements in section 2 change the dynamic type and can try
511 to derive the new type. That is enough and we can stop, we will never see
512 the calls into constructors of sub-objects in this code. Therefore we can
513 safely ignore all call statements that we traverse.
516 static bool
517 stmt_may_be_vtbl_ptr_store (gimple stmt)
519 if (is_gimple_call (stmt))
520 return false;
521 else if (is_gimple_assign (stmt))
523 tree lhs = gimple_assign_lhs (stmt);
525 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
527 if (flag_strict_aliasing
528 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
529 return false;
531 if (TREE_CODE (lhs) == COMPONENT_REF
532 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
533 return false;
534 /* In the future we might want to use get_base_ref_and_offset to find
535 if there is a field corresponding to the offset and if so, proceed
536 almost like if it was a component ref. */
539 return true;
542 /* If STMT can be proved to be an assignment to the virtual method table
543 pointer of ANALYZED_OBJ and the type associated with the new table
544 identified, return the type. Otherwise return NULL_TREE. */
546 static tree
547 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
549 HOST_WIDE_INT offset, size, max_size;
550 tree lhs, rhs, base;
552 if (!gimple_assign_single_p (stmt))
553 return NULL_TREE;
555 lhs = gimple_assign_lhs (stmt);
556 rhs = gimple_assign_rhs1 (stmt);
557 if (TREE_CODE (lhs) != COMPONENT_REF
558 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
559 || TREE_CODE (rhs) != ADDR_EXPR)
560 return NULL_TREE;
561 rhs = get_base_address (TREE_OPERAND (rhs, 0));
562 if (!rhs
563 || TREE_CODE (rhs) != VAR_DECL
564 || !DECL_VIRTUAL_P (rhs))
565 return NULL_TREE;
567 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
568 if (offset != tci->offset
569 || size != POINTER_SIZE
570 || max_size != POINTER_SIZE)
571 return NULL_TREE;
572 if (TREE_CODE (base) == MEM_REF)
574 if (TREE_CODE (tci->object) != MEM_REF
575 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
576 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
577 TREE_OPERAND (base, 1)))
578 return NULL_TREE;
580 else if (tci->object != base)
581 return NULL_TREE;
583 return DECL_CONTEXT (rhs);
586 /* Callback of walk_aliased_vdefs and a helper function for
587 detect_type_change to check whether a particular statement may modify
588 the virtual table pointer, and if possible also determine the new type of
589 the (sub-)object. It stores its result into DATA, which points to a
590 type_change_info structure. */
592 static bool
593 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
595 gimple stmt = SSA_NAME_DEF_STMT (vdef);
596 struct type_change_info *tci = (struct type_change_info *) data;
598 if (stmt_may_be_vtbl_ptr_store (stmt))
600 tree type;
601 type = extr_type_from_vtbl_ptr_store (stmt, tci);
602 if (tci->type_maybe_changed
603 && type != tci->known_current_type)
604 tci->multiple_types_encountered = true;
605 tci->known_current_type = type;
606 tci->type_maybe_changed = true;
607 return true;
609 else
610 return false;
615 /* Like detect_type_change but with extra argument COMP_TYPE which will become
616 the component type part of new JFUNC of dynamic type change is detected and
617 the new base type is identified. */
619 static bool
620 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
621 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
623 struct type_change_info tci;
624 ao_ref ao;
626 gcc_checking_assert (DECL_P (arg)
627 || TREE_CODE (arg) == MEM_REF
628 || handled_component_p (arg));
629 /* Const calls cannot call virtual methods through VMT and so type changes do
630 not matter. */
631 if (!flag_devirtualize || !gimple_vuse (call))
632 return false;
634 ao_ref_init (&ao, arg);
635 ao.base = base;
636 ao.offset = offset;
637 ao.size = POINTER_SIZE;
638 ao.max_size = ao.size;
640 tci.offset = offset;
641 tci.object = get_base_address (arg);
642 tci.known_current_type = NULL_TREE;
643 tci.type_maybe_changed = false;
644 tci.multiple_types_encountered = false;
646 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
647 &tci, NULL);
648 if (!tci.type_maybe_changed)
649 return false;
651 if (!tci.known_current_type
652 || tci.multiple_types_encountered
653 || offset != 0)
654 jfunc->type = IPA_JF_UNKNOWN;
655 else
656 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
658 return true;
661 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
662 looking for assignments to its virtual table pointer. If it is, return true
663 and fill in the jump function JFUNC with relevant type information or set it
664 to unknown. ARG is the object itself (not a pointer to it, unless
665 dereferenced). BASE is the base of the memory access as returned by
666 get_ref_base_and_extent, as is the offset. */
668 static bool
669 detect_type_change (tree arg, tree base, gimple call,
670 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
672 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
675 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
676 SSA name (its dereference will become the base and the offset is assumed to
677 be zero). */
679 static bool
680 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
682 tree comp_type;
684 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
685 if (!flag_devirtualize
686 || !POINTER_TYPE_P (TREE_TYPE (arg))
687 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
688 return false;
690 comp_type = TREE_TYPE (TREE_TYPE (arg));
691 arg = build2 (MEM_REF, ptr_type_node, arg,
692 build_int_cst (ptr_type_node, 0));
694 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
697 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
698 boolean variable pointed to by DATA. */
700 static bool
701 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
702 void *data)
704 bool *b = (bool *) data;
705 *b = true;
706 return true;
709 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
710 a value known not to be modified in this function before reaching the
711 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
712 information about the parameter. */
714 static bool
715 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
716 gimple stmt, tree parm_load)
718 bool modified = false;
719 bitmap *visited_stmts;
720 ao_ref refd;
722 if (parm_ainfo && parm_ainfo->parm_modified)
723 return false;
725 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
726 ao_ref_init (&refd, parm_load);
727 /* We can cache visited statements only when parm_ainfo is available and when
728 we are looking at a naked load of the whole parameter. */
729 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
730 visited_stmts = NULL;
731 else
732 visited_stmts = &parm_ainfo->parm_visited_statements;
733 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
734 visited_stmts);
735 if (parm_ainfo && modified)
736 parm_ainfo->parm_modified = true;
737 return !modified;
740 /* If STMT is an assignment that loads a value from an parameter declaration,
741 return the index of the parameter in ipa_node_params which has not been
742 modified. Otherwise return -1. */
744 static int
745 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
746 struct param_analysis_info *parms_ainfo,
747 gimple stmt)
749 int index;
750 tree op1;
752 if (!gimple_assign_single_p (stmt))
753 return -1;
755 op1 = gimple_assign_rhs1 (stmt);
756 if (TREE_CODE (op1) != PARM_DECL)
757 return -1;
759 index = ipa_get_param_decl_index_1 (descriptors, op1);
760 if (index < 0
761 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
762 : NULL, stmt, op1))
763 return -1;
765 return index;
768 /* Return true if memory reference REF loads data that are known to be
769 unmodified in this function before reaching statement STMT. PARM_AINFO, if
770 non-NULL, is a pointer to a structure containing temporary information about
771 PARM. */
773 static bool
774 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
775 gimple stmt, tree ref)
777 bool modified = false;
778 ao_ref refd;
780 gcc_checking_assert (gimple_vuse (stmt));
781 if (parm_ainfo && parm_ainfo->ref_modified)
782 return false;
784 ao_ref_init (&refd, ref);
785 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
786 NULL);
787 if (parm_ainfo && modified)
788 parm_ainfo->ref_modified = true;
789 return !modified;
792 /* Return true if the data pointed to by PARM is known to be unmodified in this
793 function before reaching call statement CALL into which it is passed.
794 PARM_AINFO is a pointer to a structure containing temporary information
795 about PARM. */
797 static bool
798 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
799 gimple call, tree parm)
801 bool modified = false;
802 ao_ref refd;
804 /* It's unnecessary to calculate anything about memory contnets for a const
805 function because it is not goin to use it. But do not cache the result
806 either. Also, no such calculations for non-pointers. */
807 if (!gimple_vuse (call)
808 || !POINTER_TYPE_P (TREE_TYPE (parm)))
809 return false;
811 if (parm_ainfo->pt_modified)
812 return false;
814 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
815 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
816 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
817 if (modified)
818 parm_ainfo->pt_modified = true;
819 return !modified;
822 /* Return true if we can prove that OP is a memory reference loading unmodified
823 data from an aggregate passed as a parameter and if the aggregate is passed
824 by reference, that the alias type of the load corresponds to the type of the
825 formal parameter (so that we can rely on this type for TBAA in callers).
826 INFO and PARMS_AINFO describe parameters of the current function (but the
827 latter can be NULL), STMT is the load statement. If function returns true,
828 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
829 within the aggregate and whether it is a load from a value passed by
830 reference respectively. */
832 static bool
833 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
834 struct param_analysis_info *parms_ainfo, gimple stmt,
835 tree op, int *index_p, HOST_WIDE_INT *offset_p,
836 bool *by_ref_p)
838 int index;
839 HOST_WIDE_INT size, max_size;
840 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
842 if (max_size == -1 || max_size != size || *offset_p < 0)
843 return false;
845 if (DECL_P (base))
847 int index = ipa_get_param_decl_index_1 (descriptors, base);
848 if (index >= 0
849 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
850 : NULL, stmt, op))
852 *index_p = index;
853 *by_ref_p = false;
854 return true;
856 return false;
859 if (TREE_CODE (base) != MEM_REF
860 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
861 || !integer_zerop (TREE_OPERAND (base, 1)))
862 return false;
864 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
866 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
867 index = ipa_get_param_decl_index_1 (descriptors, parm);
869 else
871 /* This branch catches situations where a pointer parameter is not a
872 gimple register, for example:
874 void hip7(S*) (struct S * p)
876 void (*<T2e4>) (struct S *) D.1867;
877 struct S * p.1;
879 <bb 2>:
880 p.1_1 = p;
881 D.1867_2 = p.1_1->f;
882 D.1867_2 ();
883 gdp = &p;
886 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
887 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
890 if (index >= 0
891 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
892 stmt, op))
894 *index_p = index;
895 *by_ref_p = true;
896 return true;
898 return false;
901 /* Just like the previous function, just without the param_analysis_info
902 pointer, for users outside of this file. */
904 bool
905 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
906 tree op, int *index_p, HOST_WIDE_INT *offset_p,
907 bool *by_ref_p)
909 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
910 offset_p, by_ref_p);
913 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
914 of an assignment statement STMT, try to determine whether we are actually
915 handling any of the following cases and construct an appropriate jump
916 function into JFUNC if so:
918 1) The passed value is loaded from a formal parameter which is not a gimple
919 register (most probably because it is addressable, the value has to be
920 scalar) and we can guarantee the value has not changed. This case can
921 therefore be described by a simple pass-through jump function. For example:
923 foo (int a)
925 int a.0;
927 a.0_2 = a;
928 bar (a.0_2);
930 2) The passed value can be described by a simple arithmetic pass-through
931 jump function. E.g.
933 foo (int a)
935 int D.2064;
937 D.2064_4 = a.1(D) + 4;
938 bar (D.2064_4);
940 This case can also occur in combination of the previous one, e.g.:
942 foo (int a, int z)
944 int a.0;
945 int D.2064;
947 a.0_3 = a;
948 D.2064_4 = a.0_3 + 4;
949 foo (D.2064_4);
951 3) The passed value is an address of an object within another one (which
952 also passed by reference). Such situations are described by an ancestor
953 jump function and describe situations such as:
955 B::foo() (struct B * const this)
957 struct A * D.1845;
959 D.1845_2 = &this_1(D)->D.1748;
960 A::bar (D.1845_2);
962 INFO is the structure describing individual parameters access different
963 stages of IPA optimizations. PARMS_AINFO contains the information that is
964 only needed for intraprocedural analysis. */
966 static void
967 compute_complex_assign_jump_func (struct ipa_node_params *info,
968 struct param_analysis_info *parms_ainfo,
969 struct ipa_jump_func *jfunc,
970 gimple call, gimple stmt, tree name)
972 HOST_WIDE_INT offset, size, max_size;
973 tree op1, tc_ssa, base, ssa;
974 int index;
976 op1 = gimple_assign_rhs1 (stmt);
978 if (TREE_CODE (op1) == SSA_NAME)
980 if (SSA_NAME_IS_DEFAULT_DEF (op1))
981 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
982 else
983 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
984 SSA_NAME_DEF_STMT (op1));
985 tc_ssa = op1;
987 else
989 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
990 tc_ssa = gimple_assign_lhs (stmt);
993 if (index >= 0)
995 tree op2 = gimple_assign_rhs2 (stmt);
997 if (op2)
999 if (!is_gimple_ip_invariant (op2)
1000 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1001 && !useless_type_conversion_p (TREE_TYPE (name),
1002 TREE_TYPE (op1))))
1003 return;
1005 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1006 gimple_assign_rhs_code (stmt));
1008 else if (gimple_assign_single_p (stmt)
1009 && !detect_type_change_ssa (tc_ssa, call, jfunc))
1011 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1012 call, tc_ssa);
1013 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1015 return;
1018 if (TREE_CODE (op1) != ADDR_EXPR)
1019 return;
1020 op1 = TREE_OPERAND (op1, 0);
1021 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1022 return;
1023 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1024 if (TREE_CODE (base) != MEM_REF
1025 /* If this is a varying address, punt. */
1026 || max_size == -1
1027 || max_size != size)
1028 return;
1029 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1030 ssa = TREE_OPERAND (base, 0);
1031 if (TREE_CODE (ssa) != SSA_NAME
1032 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1033 || offset < 0)
1034 return;
1036 /* Dynamic types are changed only in constructors and destructors and */
1037 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1038 if (index >= 0
1039 && !detect_type_change (op1, base, call, jfunc, offset))
1040 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1041 parm_ref_data_pass_through_p (&parms_ainfo[index],
1042 call, ssa));
1045 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1046 it looks like:
1048 iftmp.1_3 = &obj_2(D)->D.1762;
1050 The base of the MEM_REF must be a default definition SSA NAME of a
1051 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1052 whole MEM_REF expression is returned and the offset calculated from any
1053 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1054 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1056 static tree
1057 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1059 HOST_WIDE_INT size, max_size;
1060 tree expr, parm, obj;
1062 if (!gimple_assign_single_p (assign))
1063 return NULL_TREE;
1064 expr = gimple_assign_rhs1 (assign);
1066 if (TREE_CODE (expr) != ADDR_EXPR)
1067 return NULL_TREE;
1068 expr = TREE_OPERAND (expr, 0);
1069 obj = expr;
1070 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1072 if (TREE_CODE (expr) != MEM_REF
1073 /* If this is a varying address, punt. */
1074 || max_size == -1
1075 || max_size != size
1076 || *offset < 0)
1077 return NULL_TREE;
1078 parm = TREE_OPERAND (expr, 0);
1079 if (TREE_CODE (parm) != SSA_NAME
1080 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1081 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1082 return NULL_TREE;
1084 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1085 *obj_p = obj;
1086 return expr;
1090 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1091 statement PHI, try to find out whether NAME is in fact a
1092 multiple-inheritance typecast from a descendant into an ancestor of a formal
1093 parameter and thus can be described by an ancestor jump function and if so,
1094 write the appropriate function into JFUNC.
1096 Essentially we want to match the following pattern:
1098 if (obj_2(D) != 0B)
1099 goto <bb 3>;
1100 else
1101 goto <bb 4>;
1103 <bb 3>:
1104 iftmp.1_3 = &obj_2(D)->D.1762;
1106 <bb 4>:
1107 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1108 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1109 return D.1879_6; */
1111 static void
1112 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1113 struct param_analysis_info *parms_ainfo,
1114 struct ipa_jump_func *jfunc,
1115 gimple call, gimple phi)
1117 HOST_WIDE_INT offset;
1118 gimple assign, cond;
1119 basic_block phi_bb, assign_bb, cond_bb;
1120 tree tmp, parm, expr, obj;
1121 int index, i;
1123 if (gimple_phi_num_args (phi) != 2)
1124 return;
1126 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1127 tmp = PHI_ARG_DEF (phi, 0);
1128 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1129 tmp = PHI_ARG_DEF (phi, 1);
1130 else
1131 return;
1132 if (TREE_CODE (tmp) != SSA_NAME
1133 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1134 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1135 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1136 return;
1138 assign = SSA_NAME_DEF_STMT (tmp);
1139 assign_bb = gimple_bb (assign);
1140 if (!single_pred_p (assign_bb))
1141 return;
1142 expr = get_ancestor_addr_info (assign, &obj, &offset);
1143 if (!expr)
1144 return;
1145 parm = TREE_OPERAND (expr, 0);
1146 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1147 gcc_assert (index >= 0);
1149 cond_bb = single_pred (assign_bb);
1150 cond = last_stmt (cond_bb);
1151 if (!cond
1152 || gimple_code (cond) != GIMPLE_COND
1153 || gimple_cond_code (cond) != NE_EXPR
1154 || gimple_cond_lhs (cond) != parm
1155 || !integer_zerop (gimple_cond_rhs (cond)))
1156 return;
1158 phi_bb = gimple_bb (phi);
1159 for (i = 0; i < 2; i++)
1161 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1162 if (pred != assign_bb && pred != cond_bb)
1163 return;
1166 if (!detect_type_change (obj, expr, call, jfunc, offset))
1167 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1168 parm_ref_data_pass_through_p (&parms_ainfo[index],
1169 call, parm));
1172 /* Given OP which is passed as an actual argument to a called function,
1173 determine if it is possible to construct a KNOWN_TYPE jump function for it
1174 and if so, create one and store it to JFUNC. */
1176 static void
1177 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1178 gimple call)
1180 HOST_WIDE_INT offset, size, max_size;
1181 tree base;
1183 if (!flag_devirtualize
1184 || TREE_CODE (op) != ADDR_EXPR
1185 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1186 return;
1188 op = TREE_OPERAND (op, 0);
1189 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1190 if (!DECL_P (base)
1191 || max_size == -1
1192 || max_size != size
1193 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1194 || is_global_var (base))
1195 return;
1197 if (!TYPE_BINFO (TREE_TYPE (base))
1198 || detect_type_change (op, base, call, jfunc, offset))
1199 return;
1201 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1204 /* Inspect the given TYPE and return true iff it has the same structure (the
1205 same number of fields of the same types) as a C++ member pointer. If
1206 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1207 corresponding fields there. */
1209 static bool
1210 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1212 tree fld;
1214 if (TREE_CODE (type) != RECORD_TYPE)
1215 return false;
1217 fld = TYPE_FIELDS (type);
1218 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1219 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1220 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1221 return false;
1223 if (method_ptr)
1224 *method_ptr = fld;
1226 fld = DECL_CHAIN (fld);
1227 if (!fld || INTEGRAL_TYPE_P (fld)
1228 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1229 return false;
1230 if (delta)
1231 *delta = fld;
1233 if (DECL_CHAIN (fld))
1234 return false;
1236 return true;
1239 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1240 return the rhs of its defining statement. Otherwise return RHS as it
1241 is. */
1243 static inline tree
1244 get_ssa_def_if_simple_copy (tree rhs)
1246 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1248 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1250 if (gimple_assign_single_p (def_stmt))
1251 rhs = gimple_assign_rhs1 (def_stmt);
1252 else
1253 break;
1255 return rhs;
1258 /* Simple linked list, describing known contents of an aggregate beforere
1259 call. */
1261 struct ipa_known_agg_contents_list
1263 /* Offset and size of the described part of the aggregate. */
1264 HOST_WIDE_INT offset, size;
1265 /* Known constant value or NULL if the contents is known to be unknown. */
1266 tree constant;
1267 /* Pointer to the next structure in the list. */
1268 struct ipa_known_agg_contents_list *next;
1271 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1272 in ARG is filled in with constant values. ARG can either be an aggregate
1273 expression or a pointer to an aggregate. JFUNC is the jump function into
1274 which the constants are subsequently stored. */
1276 static void
1277 determine_known_aggregate_parts (gimple call, tree arg,
1278 struct ipa_jump_func *jfunc)
1280 struct ipa_known_agg_contents_list *list = NULL;
1281 int item_count = 0, const_count = 0;
1282 HOST_WIDE_INT arg_offset, arg_size;
1283 gimple_stmt_iterator gsi;
1284 tree arg_base;
1285 bool check_ref, by_ref;
1286 ao_ref r;
1288 /* The function operates in three stages. First, we prepare check_ref, r,
1289 arg_base and arg_offset based on what is actually passed as an actual
1290 argument. */
1292 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1294 by_ref = true;
1295 if (TREE_CODE (arg) == SSA_NAME)
1297 tree type_size;
1298 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1299 return;
1300 check_ref = true;
1301 arg_base = arg;
1302 arg_offset = 0;
1303 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1304 arg_size = tree_low_cst (type_size, 1);
1305 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1307 else if (TREE_CODE (arg) == ADDR_EXPR)
1309 HOST_WIDE_INT arg_max_size;
1311 arg = TREE_OPERAND (arg, 0);
1312 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1313 &arg_max_size);
1314 if (arg_max_size == -1
1315 || arg_max_size != arg_size
1316 || arg_offset < 0)
1317 return;
1318 if (DECL_P (arg_base))
1320 tree size;
1321 check_ref = false;
1322 size = build_int_cst (integer_type_node, arg_size);
1323 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1325 else
1326 return;
1328 else
1329 return;
1331 else
1333 HOST_WIDE_INT arg_max_size;
1335 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1337 by_ref = false;
1338 check_ref = false;
1339 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1340 &arg_max_size);
1341 if (arg_max_size == -1
1342 || arg_max_size != arg_size
1343 || arg_offset < 0)
1344 return;
1346 ao_ref_init (&r, arg);
1349 /* Second stage walks back the BB, looks at individual statements and as long
1350 as it is confident of how the statements affect contents of the
1351 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1352 describing it. */
1353 gsi = gsi_for_stmt (call);
1354 gsi_prev (&gsi);
1355 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1357 struct ipa_known_agg_contents_list *n, **p;
1358 gimple stmt = gsi_stmt (gsi);
1359 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1360 tree lhs, rhs, lhs_base;
1361 bool partial_overlap;
1363 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1364 continue;
1365 if (!gimple_assign_single_p (stmt))
1366 break;
1368 lhs = gimple_assign_lhs (stmt);
1369 rhs = gimple_assign_rhs1 (stmt);
1370 if (!is_gimple_reg_type (rhs)
1371 || TREE_CODE (lhs) == BIT_FIELD_REF
1372 || contains_bitfld_component_ref_p (lhs))
1373 break;
1375 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1376 &lhs_max_size);
1377 if (lhs_max_size == -1
1378 || lhs_max_size != lhs_size
1379 || (lhs_offset < arg_offset
1380 && lhs_offset + lhs_size > arg_offset)
1381 || (lhs_offset < arg_offset + arg_size
1382 && lhs_offset + lhs_size > arg_offset + arg_size))
1383 break;
1385 if (check_ref)
1387 if (TREE_CODE (lhs_base) != MEM_REF
1388 || TREE_OPERAND (lhs_base, 0) != arg_base
1389 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1390 break;
1392 else if (lhs_base != arg_base)
1394 if (DECL_P (lhs_base))
1395 continue;
1396 else
1397 break;
1400 if (lhs_offset + lhs_size < arg_offset
1401 || lhs_offset >= (arg_offset + arg_size))
1402 continue;
1404 partial_overlap = false;
1405 p = &list;
1406 while (*p && (*p)->offset < lhs_offset)
1408 if ((*p)->offset + (*p)->size > lhs_offset)
1410 partial_overlap = true;
1411 break;
1413 p = &(*p)->next;
1415 if (partial_overlap)
1416 break;
1417 if (*p && (*p)->offset < lhs_offset + lhs_size)
1419 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1420 /* We already know this value is subsequently overwritten with
1421 something else. */
1422 continue;
1423 else
1424 /* Otherwise this is a partial overlap which we cannot
1425 represent. */
1426 break;
1429 rhs = get_ssa_def_if_simple_copy (rhs);
1430 n = XALLOCA (struct ipa_known_agg_contents_list);
1431 n->size = lhs_size;
1432 n->offset = lhs_offset;
1433 if (is_gimple_ip_invariant (rhs))
1435 n->constant = rhs;
1436 const_count++;
1438 else
1439 n->constant = NULL_TREE;
1440 n->next = *p;
1441 *p = n;
1443 item_count++;
1444 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1445 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1446 break;
1449 /* Third stage just goes over the list and creates an appropriate vector of
1450 ipa_agg_jf_item structures out of it, of sourse only if there are
1451 any known constants to begin with. */
1453 if (const_count)
1455 jfunc->agg.by_ref = by_ref;
1456 vec_alloc (jfunc->agg.items, const_count);
1457 while (list)
1459 if (list->constant)
1461 struct ipa_agg_jf_item item;
1462 item.offset = list->offset - arg_offset;
1463 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1464 item.value = unshare_expr_without_location (list->constant);
1465 jfunc->agg.items->quick_push (item);
1467 list = list->next;
1472 /* Compute jump function for all arguments of callsite CS and insert the
1473 information in the jump_functions array in the ipa_edge_args corresponding
1474 to this callsite. */
1476 static void
1477 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1478 struct cgraph_edge *cs)
1480 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1481 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1482 gimple call = cs->call_stmt;
1483 int n, arg_num = gimple_call_num_args (call);
1485 if (arg_num == 0 || args->jump_functions)
1486 return;
1487 vec_safe_grow_cleared (args->jump_functions, arg_num);
1489 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1490 return;
1492 for (n = 0; n < arg_num; n++)
1494 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1495 tree arg = gimple_call_arg (call, n);
1497 if (is_gimple_ip_invariant (arg))
1498 ipa_set_jf_constant (jfunc, arg, cs);
1499 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1500 && TREE_CODE (arg) == PARM_DECL)
1502 int index = ipa_get_param_decl_index (info, arg);
1504 gcc_assert (index >=0);
1505 /* Aggregate passed by value, check for pass-through, otherwise we
1506 will attempt to fill in aggregate contents later in this
1507 for cycle. */
1508 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1510 ipa_set_jf_simple_pass_through (jfunc, index, false);
1511 continue;
1514 else if (TREE_CODE (arg) == SSA_NAME)
1516 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1518 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1519 if (index >= 0
1520 && !detect_type_change_ssa (arg, call, jfunc))
1522 bool agg_p;
1523 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1524 call, arg);
1525 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1528 else
1530 gimple stmt = SSA_NAME_DEF_STMT (arg);
1531 if (is_gimple_assign (stmt))
1532 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1533 call, stmt, arg);
1534 else if (gimple_code (stmt) == GIMPLE_PHI)
1535 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1536 call, stmt);
1539 else
1540 compute_known_type_jump_func (arg, jfunc, call);
1542 if ((jfunc->type != IPA_JF_PASS_THROUGH
1543 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1544 && (jfunc->type != IPA_JF_ANCESTOR
1545 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1546 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1547 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1548 determine_known_aggregate_parts (call, arg, jfunc);
1552 /* Compute jump functions for all edges - both direct and indirect - outgoing
1553 from NODE. Also count the actual arguments in the process. */
1555 static void
1556 ipa_compute_jump_functions (struct cgraph_node *node,
1557 struct param_analysis_info *parms_ainfo)
1559 struct cgraph_edge *cs;
1561 for (cs = node->callees; cs; cs = cs->next_callee)
1563 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1564 NULL);
1565 /* We do not need to bother analyzing calls to unknown
1566 functions unless they may become known during lto/whopr. */
1567 if (!callee->symbol.definition && !flag_lto)
1568 continue;
1569 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1572 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1573 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1576 /* If STMT looks like a statement loading a value from a member pointer formal
1577 parameter, return that parameter and store the offset of the field to
1578 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1579 might be clobbered). If USE_DELTA, then we look for a use of the delta
1580 field rather than the pfn. */
1582 static tree
1583 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1584 HOST_WIDE_INT *offset_p)
1586 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1588 if (!gimple_assign_single_p (stmt))
1589 return NULL_TREE;
1591 rhs = gimple_assign_rhs1 (stmt);
1592 if (TREE_CODE (rhs) == COMPONENT_REF)
1594 ref_field = TREE_OPERAND (rhs, 1);
1595 rhs = TREE_OPERAND (rhs, 0);
1597 else
1598 ref_field = NULL_TREE;
1599 if (TREE_CODE (rhs) != MEM_REF)
1600 return NULL_TREE;
1601 rec = TREE_OPERAND (rhs, 0);
1602 if (TREE_CODE (rec) != ADDR_EXPR)
1603 return NULL_TREE;
1604 rec = TREE_OPERAND (rec, 0);
1605 if (TREE_CODE (rec) != PARM_DECL
1606 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1607 return NULL_TREE;
1608 ref_offset = TREE_OPERAND (rhs, 1);
1610 if (use_delta)
1611 fld = delta_field;
1612 else
1613 fld = ptr_field;
1614 if (offset_p)
1615 *offset_p = int_bit_position (fld);
1617 if (ref_field)
1619 if (integer_nonzerop (ref_offset))
1620 return NULL_TREE;
1621 return ref_field == fld ? rec : NULL_TREE;
1623 else
1624 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1625 : NULL_TREE;
1628 /* Returns true iff T is an SSA_NAME defined by a statement. */
1630 static bool
1631 ipa_is_ssa_with_stmt_def (tree t)
1633 if (TREE_CODE (t) == SSA_NAME
1634 && !SSA_NAME_IS_DEFAULT_DEF (t))
1635 return true;
1636 else
1637 return false;
1640 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1641 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1642 indirect call graph edge. */
1644 static struct cgraph_edge *
1645 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1647 struct cgraph_edge *cs;
1649 cs = cgraph_edge (node, stmt);
1650 cs->indirect_info->param_index = param_index;
1651 cs->indirect_info->offset = 0;
1652 cs->indirect_info->polymorphic = 0;
1653 cs->indirect_info->agg_contents = 0;
1654 cs->indirect_info->member_ptr = 0;
1655 return cs;
1658 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1659 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1660 intermediate information about each formal parameter. Currently it checks
1661 whether the call calls a pointer that is a formal parameter and if so, the
1662 parameter is marked with the called flag and an indirect call graph edge
1663 describing the call is created. This is very simple for ordinary pointers
1664 represented in SSA but not-so-nice when it comes to member pointers. The
1665 ugly part of this function does nothing more than trying to match the
1666 pattern of such a call. An example of such a pattern is the gimple dump
1667 below, the call is on the last line:
1669 <bb 2>:
1670 f$__delta_5 = f.__delta;
1671 f$__pfn_24 = f.__pfn;
1674 <bb 2>:
1675 f$__delta_5 = MEM[(struct *)&f];
1676 f$__pfn_24 = MEM[(struct *)&f + 4B];
1678 and a few lines below:
1680 <bb 5>
1681 D.2496_3 = (int) f$__pfn_24;
1682 D.2497_4 = D.2496_3 & 1;
1683 if (D.2497_4 != 0)
1684 goto <bb 3>;
1685 else
1686 goto <bb 4>;
1688 <bb 6>:
1689 D.2500_7 = (unsigned int) f$__delta_5;
1690 D.2501_8 = &S + D.2500_7;
1691 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1692 D.2503_10 = *D.2502_9;
1693 D.2504_12 = f$__pfn_24 + -1;
1694 D.2505_13 = (unsigned int) D.2504_12;
1695 D.2506_14 = D.2503_10 + D.2505_13;
1696 D.2507_15 = *D.2506_14;
1697 iftmp.11_16 = (String:: *) D.2507_15;
1699 <bb 7>:
1700 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1701 D.2500_19 = (unsigned int) f$__delta_5;
1702 D.2508_20 = &S + D.2500_19;
1703 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1705 Such patterns are results of simple calls to a member pointer:
1707 int doprinting (int (MyString::* f)(int) const)
1709 MyString S ("somestring");
1711 return (S.*f)(4);
1714 Moreover, the function also looks for called pointers loaded from aggregates
1715 passed by value or reference. */
1717 static void
1718 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1719 struct ipa_node_params *info,
1720 struct param_analysis_info *parms_ainfo,
1721 gimple call, tree target)
1723 gimple def;
1724 tree n1, n2;
1725 gimple d1, d2;
1726 tree rec, rec2, cond;
1727 gimple branch;
1728 int index;
1729 basic_block bb, virt_bb, join;
1730 HOST_WIDE_INT offset;
1731 bool by_ref;
1733 if (SSA_NAME_IS_DEFAULT_DEF (target))
1735 tree var = SSA_NAME_VAR (target);
1736 index = ipa_get_param_decl_index (info, var);
1737 if (index >= 0)
1738 ipa_note_param_call (node, index, call);
1739 return;
1742 def = SSA_NAME_DEF_STMT (target);
1743 if (gimple_assign_single_p (def)
1744 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1745 gimple_assign_rhs1 (def), &index, &offset,
1746 &by_ref))
1748 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1749 cs->indirect_info->offset = offset;
1750 cs->indirect_info->agg_contents = 1;
1751 cs->indirect_info->by_ref = by_ref;
1752 return;
1755 /* Now we need to try to match the complex pattern of calling a member
1756 pointer. */
1757 if (gimple_code (def) != GIMPLE_PHI
1758 || gimple_phi_num_args (def) != 2
1759 || !POINTER_TYPE_P (TREE_TYPE (target))
1760 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1761 return;
1763 /* First, we need to check whether one of these is a load from a member
1764 pointer that is a parameter to this function. */
1765 n1 = PHI_ARG_DEF (def, 0);
1766 n2 = PHI_ARG_DEF (def, 1);
1767 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1768 return;
1769 d1 = SSA_NAME_DEF_STMT (n1);
1770 d2 = SSA_NAME_DEF_STMT (n2);
1772 join = gimple_bb (def);
1773 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1775 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1776 return;
1778 bb = EDGE_PRED (join, 0)->src;
1779 virt_bb = gimple_bb (d2);
1781 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1783 bb = EDGE_PRED (join, 1)->src;
1784 virt_bb = gimple_bb (d1);
1786 else
1787 return;
1789 /* Second, we need to check that the basic blocks are laid out in the way
1790 corresponding to the pattern. */
1792 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1793 || single_pred (virt_bb) != bb
1794 || single_succ (virt_bb) != join)
1795 return;
1797 /* Third, let's see that the branching is done depending on the least
1798 significant bit of the pfn. */
1800 branch = last_stmt (bb);
1801 if (!branch || gimple_code (branch) != GIMPLE_COND)
1802 return;
1804 if ((gimple_cond_code (branch) != NE_EXPR
1805 && gimple_cond_code (branch) != EQ_EXPR)
1806 || !integer_zerop (gimple_cond_rhs (branch)))
1807 return;
1809 cond = gimple_cond_lhs (branch);
1810 if (!ipa_is_ssa_with_stmt_def (cond))
1811 return;
1813 def = SSA_NAME_DEF_STMT (cond);
1814 if (!is_gimple_assign (def)
1815 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1816 || !integer_onep (gimple_assign_rhs2 (def)))
1817 return;
1819 cond = gimple_assign_rhs1 (def);
1820 if (!ipa_is_ssa_with_stmt_def (cond))
1821 return;
1823 def = SSA_NAME_DEF_STMT (cond);
1825 if (is_gimple_assign (def)
1826 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1828 cond = gimple_assign_rhs1 (def);
1829 if (!ipa_is_ssa_with_stmt_def (cond))
1830 return;
1831 def = SSA_NAME_DEF_STMT (cond);
1834 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1835 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1836 == ptrmemfunc_vbit_in_delta),
1837 NULL);
1838 if (rec != rec2)
1839 return;
1841 index = ipa_get_param_decl_index (info, rec);
1842 if (index >= 0
1843 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1845 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1846 cs->indirect_info->offset = offset;
1847 cs->indirect_info->agg_contents = 1;
1848 cs->indirect_info->member_ptr = 1;
1851 return;
1854 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1855 object referenced in the expression is a formal parameter of the caller
1856 (described by INFO), create a call note for the statement. */
1858 static void
1859 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1860 struct ipa_node_params *info, gimple call,
1861 tree target)
1863 struct cgraph_edge *cs;
1864 struct cgraph_indirect_call_info *ii;
1865 struct ipa_jump_func jfunc;
1866 tree obj = OBJ_TYPE_REF_OBJECT (target);
1867 int index;
1868 HOST_WIDE_INT anc_offset;
1870 if (!flag_devirtualize)
1871 return;
1873 if (TREE_CODE (obj) != SSA_NAME)
1874 return;
1876 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1878 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1879 return;
1881 anc_offset = 0;
1882 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1883 gcc_assert (index >= 0);
1884 if (detect_type_change_ssa (obj, call, &jfunc))
1885 return;
1887 else
1889 gimple stmt = SSA_NAME_DEF_STMT (obj);
1890 tree expr;
1892 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1893 if (!expr)
1894 return;
1895 index = ipa_get_param_decl_index (info,
1896 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1897 gcc_assert (index >= 0);
1898 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1899 return;
1902 cs = ipa_note_param_call (node, index, call);
1903 ii = cs->indirect_info;
1904 ii->offset = anc_offset;
1905 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1906 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1907 ii->polymorphic = 1;
1910 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1911 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1912 containing intermediate information about each formal parameter. */
1914 static void
1915 ipa_analyze_call_uses (struct cgraph_node *node,
1916 struct ipa_node_params *info,
1917 struct param_analysis_info *parms_ainfo, gimple call)
1919 tree target = gimple_call_fn (call);
1921 if (!target)
1922 return;
1923 if (TREE_CODE (target) == SSA_NAME)
1924 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1925 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1926 ipa_analyze_virtual_call_uses (node, info, call, target);
1930 /* Analyze the call statement STMT with respect to formal parameters (described
1931 in INFO) of caller given by NODE. Currently it only checks whether formal
1932 parameters are called. PARMS_AINFO is a pointer to a vector containing
1933 intermediate information about each formal parameter. */
1935 static void
1936 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1937 struct param_analysis_info *parms_ainfo, gimple stmt)
1939 if (is_gimple_call (stmt))
1940 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1943 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1944 If OP is a parameter declaration, mark it as used in the info structure
1945 passed in DATA. */
1947 static bool
1948 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1949 tree op, void *data)
1951 struct ipa_node_params *info = (struct ipa_node_params *) data;
1953 op = get_base_address (op);
1954 if (op
1955 && TREE_CODE (op) == PARM_DECL)
1957 int index = ipa_get_param_decl_index (info, op);
1958 gcc_assert (index >= 0);
1959 ipa_set_param_used (info, index, true);
1962 return false;
1965 /* Scan the function body of NODE and inspect the uses of formal parameters.
1966 Store the findings in various structures of the associated ipa_node_params
1967 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1968 vector containing intermediate information about each formal parameter. */
1970 static void
1971 ipa_analyze_params_uses (struct cgraph_node *node,
1972 struct param_analysis_info *parms_ainfo)
1974 tree decl = node->symbol.decl;
1975 basic_block bb;
1976 struct function *func;
1977 gimple_stmt_iterator gsi;
1978 struct ipa_node_params *info = IPA_NODE_REF (node);
1979 int i;
1981 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1982 return;
1984 info->uses_analysis_done = 1;
1985 if (ipa_func_spec_opts_forbid_analysis_p (node))
1987 for (i = 0; i < ipa_get_param_count (info); i++)
1989 ipa_set_param_used (info, i, true);
1990 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
1992 return;
1995 for (i = 0; i < ipa_get_param_count (info); i++)
1997 tree parm = ipa_get_param (info, i);
1998 int controlled_uses = 0;
2000 /* For SSA regs see if parameter is used. For non-SSA we compute
2001 the flag during modification analysis. */
2002 if (is_gimple_reg (parm))
2004 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
2005 parm);
2006 if (ddef && !has_zero_uses (ddef))
2008 imm_use_iterator imm_iter;
2009 use_operand_p use_p;
2011 ipa_set_param_used (info, i, true);
2012 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2013 if (!is_gimple_call (USE_STMT (use_p)))
2015 controlled_uses = IPA_UNDESCRIBED_USE;
2016 break;
2018 else
2019 controlled_uses++;
2021 else
2022 controlled_uses = 0;
2024 else
2025 controlled_uses = IPA_UNDESCRIBED_USE;
2026 ipa_set_controlled_uses (info, i, controlled_uses);
2029 func = DECL_STRUCT_FUNCTION (decl);
2030 FOR_EACH_BB_FN (bb, func)
2032 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2034 gimple stmt = gsi_stmt (gsi);
2036 if (is_gimple_debug (stmt))
2037 continue;
2039 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2040 walk_stmt_load_store_addr_ops (stmt, info,
2041 visit_ref_for_mod_analysis,
2042 visit_ref_for_mod_analysis,
2043 visit_ref_for_mod_analysis);
2045 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2046 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2047 visit_ref_for_mod_analysis,
2048 visit_ref_for_mod_analysis,
2049 visit_ref_for_mod_analysis);
2053 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2055 static void
2056 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2058 int i;
2060 for (i = 0; i < param_count; i++)
2062 if (parms_ainfo[i].parm_visited_statements)
2063 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2064 if (parms_ainfo[i].pt_visited_statements)
2065 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2069 /* Initialize the array describing properties of of formal parameters
2070 of NODE, analyze their uses and compute jump functions associated
2071 with actual arguments of calls from within NODE. */
2073 void
2074 ipa_analyze_node (struct cgraph_node *node)
2076 struct ipa_node_params *info;
2077 struct param_analysis_info *parms_ainfo;
2078 int param_count;
2080 ipa_check_create_node_params ();
2081 ipa_check_create_edge_args ();
2082 info = IPA_NODE_REF (node);
2083 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
2084 ipa_initialize_node_params (node);
2086 param_count = ipa_get_param_count (info);
2087 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2088 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2090 ipa_analyze_params_uses (node, parms_ainfo);
2091 ipa_compute_jump_functions (node, parms_ainfo);
2093 free_parms_ainfo (parms_ainfo, param_count);
2094 pop_cfun ();
2097 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2098 attempt a type-based devirtualization. If successful, return the
2099 target function declaration, otherwise return NULL. */
2101 tree
2102 ipa_intraprocedural_devirtualization (gimple call)
2104 tree binfo, token, fndecl;
2105 struct ipa_jump_func jfunc;
2106 tree otr = gimple_call_fn (call);
2108 jfunc.type = IPA_JF_UNKNOWN;
2109 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2110 call);
2111 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2112 return NULL_TREE;
2113 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2114 if (!binfo)
2115 return NULL_TREE;
2116 token = OBJ_TYPE_REF_TOKEN (otr);
2117 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2118 binfo);
2119 return fndecl;
2122 /* Update the jump function DST when the call graph edge corresponding to SRC is
2123 is being inlined, knowing that DST is of type ancestor and src of known
2124 type. */
2126 static void
2127 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2128 struct ipa_jump_func *dst)
2130 HOST_WIDE_INT combined_offset;
2131 tree combined_type;
2133 combined_offset = ipa_get_jf_known_type_offset (src)
2134 + ipa_get_jf_ancestor_offset (dst);
2135 combined_type = ipa_get_jf_ancestor_type (dst);
2137 ipa_set_jf_known_type (dst, combined_offset,
2138 ipa_get_jf_known_type_base_type (src),
2139 combined_type);
2142 /* Update the jump functions associated with call graph edge E when the call
2143 graph edge CS is being inlined, assuming that E->caller is already (possibly
2144 indirectly) inlined into CS->callee and that E has not been inlined. */
2146 static void
2147 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2148 struct cgraph_edge *e)
2150 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2151 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2152 int count = ipa_get_cs_argument_count (args);
2153 int i;
2155 for (i = 0; i < count; i++)
2157 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2159 if (dst->type == IPA_JF_ANCESTOR)
2161 struct ipa_jump_func *src;
2162 int dst_fid = dst->value.ancestor.formal_id;
2164 /* Variable number of arguments can cause havoc if we try to access
2165 one that does not exist in the inlined edge. So make sure we
2166 don't. */
2167 if (dst_fid >= ipa_get_cs_argument_count (top))
2169 dst->type = IPA_JF_UNKNOWN;
2170 continue;
2173 src = ipa_get_ith_jump_func (top, dst_fid);
2175 if (src->agg.items
2176 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2178 struct ipa_agg_jf_item *item;
2179 int j;
2181 /* Currently we do not produce clobber aggregate jump functions,
2182 replace with merging when we do. */
2183 gcc_assert (!dst->agg.items);
2185 dst->agg.items = vec_safe_copy (src->agg.items);
2186 dst->agg.by_ref = src->agg.by_ref;
2187 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2188 item->offset -= dst->value.ancestor.offset;
2191 if (src->type == IPA_JF_KNOWN_TYPE)
2192 combine_known_type_and_ancestor_jfs (src, dst);
2193 else if (src->type == IPA_JF_PASS_THROUGH
2194 && src->value.pass_through.operation == NOP_EXPR)
2196 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2197 dst->value.ancestor.agg_preserved &=
2198 src->value.pass_through.agg_preserved;
2200 else if (src->type == IPA_JF_ANCESTOR)
2202 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2203 dst->value.ancestor.offset += src->value.ancestor.offset;
2204 dst->value.ancestor.agg_preserved &=
2205 src->value.ancestor.agg_preserved;
2207 else
2208 dst->type = IPA_JF_UNKNOWN;
2210 else if (dst->type == IPA_JF_PASS_THROUGH)
2212 struct ipa_jump_func *src;
2213 /* We must check range due to calls with variable number of arguments
2214 and we cannot combine jump functions with operations. */
2215 if (dst->value.pass_through.operation == NOP_EXPR
2216 && (dst->value.pass_through.formal_id
2217 < ipa_get_cs_argument_count (top)))
2219 bool agg_p;
2220 int dst_fid = dst->value.pass_through.formal_id;
2221 src = ipa_get_ith_jump_func (top, dst_fid);
2222 agg_p = dst->value.pass_through.agg_preserved;
2224 dst->type = src->type;
2225 dst->value = src->value;
2227 if (src->agg.items
2228 && (agg_p || !src->agg.by_ref))
2230 /* Currently we do not produce clobber aggregate jump
2231 functions, replace with merging when we do. */
2232 gcc_assert (!dst->agg.items);
2234 dst->agg.by_ref = src->agg.by_ref;
2235 dst->agg.items = vec_safe_copy (src->agg.items);
2238 if (!agg_p)
2240 if (dst->type == IPA_JF_PASS_THROUGH)
2241 dst->value.pass_through.agg_preserved = false;
2242 else if (dst->type == IPA_JF_ANCESTOR)
2243 dst->value.ancestor.agg_preserved = false;
2246 else
2247 dst->type = IPA_JF_UNKNOWN;
2252 /* If TARGET is an addr_expr of a function declaration, make it the destination
2253 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2255 struct cgraph_edge *
2256 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2258 struct cgraph_node *callee;
2259 struct inline_edge_summary *es = inline_edge_summary (ie);
2260 bool unreachable = false;
2262 if (TREE_CODE (target) == ADDR_EXPR)
2263 target = TREE_OPERAND (target, 0);
2264 if (TREE_CODE (target) != FUNCTION_DECL)
2266 target = canonicalize_constructor_val (target, NULL);
2267 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2269 if (ie->indirect_info->member_ptr)
2270 /* Member pointer call that goes through a VMT lookup. */
2271 return NULL;
2273 if (dump_file)
2274 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2275 " in %s/%i, making it unreachable.\n",
2276 cgraph_node_name (ie->caller), ie->caller->symbol.order);
2277 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2278 callee = cgraph_get_create_node (target);
2279 unreachable = true;
2281 else
2282 callee = cgraph_get_node (target);
2284 else
2285 callee = cgraph_get_node (target);
2287 /* Because may-edges are not explicitely represented and vtable may be external,
2288 we may create the first reference to the object in the unit. */
2289 if (!callee || callee->global.inlined_to)
2292 /* We are better to ensure we can refer to it.
2293 In the case of static functions we are out of luck, since we already
2294 removed its body. In the case of public functions we may or may
2295 not introduce the reference. */
2296 if (!canonicalize_constructor_val (target, NULL)
2297 || !TREE_PUBLIC (target))
2299 if (dump_file)
2300 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2301 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2302 xstrdup (cgraph_node_name (ie->caller)),
2303 ie->caller->symbol.order,
2304 xstrdup (cgraph_node_name (ie->callee)),
2305 ie->callee->symbol.order);
2306 return NULL;
2308 callee = cgraph_get_create_real_symbol_node (target);
2310 ipa_check_create_node_params ();
2312 /* We can not make edges to inline clones. It is bug that someone removed
2313 the cgraph node too early. */
2314 gcc_assert (!callee->global.inlined_to);
2316 if (dump_file && !unreachable)
2318 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2319 "(%s/%i -> %s/%i), for stmt ",
2320 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2321 xstrdup (cgraph_node_name (ie->caller)),
2322 ie->caller->symbol.order,
2323 xstrdup (cgraph_node_name (callee)),
2324 callee->symbol.order);
2325 if (ie->call_stmt)
2326 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2327 else
2328 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2330 ie = cgraph_make_edge_direct (ie, callee);
2331 es = inline_edge_summary (ie);
2332 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2333 - eni_size_weights.call_cost);
2334 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2335 - eni_time_weights.call_cost);
2337 return ie;
2340 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2341 return NULL if there is not any. BY_REF specifies whether the value has to
2342 be passed by reference or by value. */
2344 tree
2345 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2346 HOST_WIDE_INT offset, bool by_ref)
2348 struct ipa_agg_jf_item *item;
2349 int i;
2351 if (by_ref != agg->by_ref)
2352 return NULL;
2354 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2355 if (item->offset == offset)
2357 /* Currently we do not have clobber values, return NULL for them once
2358 we do. */
2359 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2360 return item->value;
2362 return NULL;
2365 /* Remove a reference to SYMBOL from the list of references of a node given by
2366 reference description RDESC. */
2368 static void
2369 remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2371 struct ipa_ref *to_del;
2372 struct cgraph_edge *origin;
2374 origin = rdesc->cs;
2375 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
2376 origin->call_stmt, origin->lto_stmt_uid);
2377 gcc_assert (to_del);
2378 ipa_remove_reference (to_del);
2379 if (dump_file)
2380 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2381 xstrdup (cgraph_node_name (origin->caller)),
2382 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
2385 /* If JFUNC has a reference description with refcount different from
2386 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2387 NULL. JFUNC must be a constant jump function. */
2389 static struct ipa_cst_ref_desc *
2390 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2392 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2393 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2394 return rdesc;
2395 else
2396 return NULL;
2399 /* Try to find a destination for indirect edge IE that corresponds to a simple
2400 call or a call of a member function pointer and where the destination is a
2401 pointer formal parameter described by jump function JFUNC. If it can be
2402 determined, return the newly direct edge, otherwise return NULL.
2403 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2405 static struct cgraph_edge *
2406 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2407 struct ipa_jump_func *jfunc,
2408 struct ipa_node_params *new_root_info)
2410 struct cgraph_edge *cs;
2411 tree target;
2412 bool agg_contents = ie->indirect_info->agg_contents;
2413 bool speculative = ie->speculative;
2414 struct ipa_cst_ref_desc *rdesc;
2416 if (ie->indirect_info->agg_contents)
2417 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2418 ie->indirect_info->offset,
2419 ie->indirect_info->by_ref);
2420 else
2421 target = ipa_value_from_jfunc (new_root_info, jfunc);
2422 if (!target)
2423 return NULL;
2424 cs = ipa_make_edge_direct_to_target (ie, target);
2426 /* FIXME: speculative edges can be handled. */
2427 if (cs && !agg_contents && !speculative
2428 && jfunc->type == IPA_JF_CONST
2429 && (rdesc = jfunc_rdesc_usable (jfunc))
2430 && --rdesc->refcount == 0)
2431 remove_described_reference ((symtab_node) cs->callee, rdesc);
2433 return cs;
2436 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2437 call based on a formal parameter which is described by jump function JFUNC
2438 and if it can be determined, make it direct and return the direct edge.
2439 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2440 are relative to. */
2442 static struct cgraph_edge *
2443 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2444 struct ipa_jump_func *jfunc,
2445 struct ipa_node_params *new_root_info)
2447 tree binfo, target;
2449 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2451 if (!binfo)
2452 return NULL;
2454 if (TREE_CODE (binfo) != TREE_BINFO)
2456 binfo = gimple_extract_devirt_binfo_from_cst (binfo);
2457 if (!binfo)
2458 return NULL;
2461 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2462 ie->indirect_info->otr_type);
2463 if (binfo)
2464 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2465 binfo);
2466 else
2467 return NULL;
2469 if (target)
2470 return ipa_make_edge_direct_to_target (ie, target);
2471 else
2472 return NULL;
2475 /* Update the param called notes associated with NODE when CS is being inlined,
2476 assuming NODE is (potentially indirectly) inlined into CS->callee.
2477 Moreover, if the callee is discovered to be constant, create a new cgraph
2478 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2479 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2481 static bool
2482 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2483 struct cgraph_node *node,
2484 vec<cgraph_edge_p> *new_edges)
2486 struct ipa_edge_args *top;
2487 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2488 struct ipa_node_params *new_root_info;
2489 bool res = false;
2491 ipa_check_create_edge_args ();
2492 top = IPA_EDGE_REF (cs);
2493 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2494 ? cs->caller->global.inlined_to
2495 : cs->caller);
2497 for (ie = node->indirect_calls; ie; ie = next_ie)
2499 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2500 struct ipa_jump_func *jfunc;
2501 int param_index;
2503 next_ie = ie->next_callee;
2505 if (ici->param_index == -1)
2506 continue;
2508 /* We must check range due to calls with variable number of arguments: */
2509 if (ici->param_index >= ipa_get_cs_argument_count (top))
2511 ici->param_index = -1;
2512 continue;
2515 param_index = ici->param_index;
2516 jfunc = ipa_get_ith_jump_func (top, param_index);
2518 if (!flag_indirect_inlining)
2519 new_direct_edge = NULL;
2520 else if (ici->polymorphic)
2521 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2522 new_root_info);
2523 else
2524 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2525 new_root_info);
2526 /* If speculation was removed, then we need to do nothing. */
2527 if (new_direct_edge && new_direct_edge != ie)
2529 new_direct_edge->indirect_inlining_edge = 1;
2530 top = IPA_EDGE_REF (cs);
2531 res = true;
2533 else if (new_direct_edge)
2535 new_direct_edge->indirect_inlining_edge = 1;
2536 if (new_direct_edge->call_stmt)
2537 new_direct_edge->call_stmt_cannot_inline_p
2538 = !gimple_check_call_matching_types (
2539 new_direct_edge->call_stmt,
2540 new_direct_edge->callee->symbol.decl, false);
2541 if (new_edges)
2543 new_edges->safe_push (new_direct_edge);
2544 res = true;
2546 top = IPA_EDGE_REF (cs);
2548 else if (jfunc->type == IPA_JF_PASS_THROUGH
2549 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2551 if (ici->agg_contents
2552 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2553 ici->param_index = -1;
2554 else
2555 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2557 else if (jfunc->type == IPA_JF_ANCESTOR)
2559 if (ici->agg_contents
2560 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2561 ici->param_index = -1;
2562 else
2564 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2565 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2568 else
2569 /* Either we can find a destination for this edge now or never. */
2570 ici->param_index = -1;
2573 return res;
2576 /* Recursively traverse subtree of NODE (including node) made of inlined
2577 cgraph_edges when CS has been inlined and invoke
2578 update_indirect_edges_after_inlining on all nodes and
2579 update_jump_functions_after_inlining on all non-inlined edges that lead out
2580 of this subtree. Newly discovered indirect edges will be added to
2581 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2582 created. */
2584 static bool
2585 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2586 struct cgraph_node *node,
2587 vec<cgraph_edge_p> *new_edges)
2589 struct cgraph_edge *e;
2590 bool res;
2592 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2594 for (e = node->callees; e; e = e->next_callee)
2595 if (!e->inline_failed)
2596 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2597 else
2598 update_jump_functions_after_inlining (cs, e);
2599 for (e = node->indirect_calls; e; e = e->next_callee)
2600 update_jump_functions_after_inlining (cs, e);
2602 return res;
2605 /* Combine two controlled uses counts as done during inlining. */
2607 static int
2608 combine_controlled_uses_counters (int c, int d)
2610 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2611 return IPA_UNDESCRIBED_USE;
2612 else
2613 return c + d - 1;
2616 /* Propagate number of controlled users from CS->caleee to the new root of the
2617 tree of inlined nodes. */
2619 static void
2620 propagate_controlled_uses (struct cgraph_edge *cs)
2622 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2623 struct cgraph_node *new_root = cs->caller->global.inlined_to
2624 ? cs->caller->global.inlined_to : cs->caller;
2625 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2626 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2627 int count, i;
2629 count = MIN (ipa_get_cs_argument_count (args),
2630 ipa_get_param_count (old_root_info));
2631 for (i = 0; i < count; i++)
2633 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2634 struct ipa_cst_ref_desc *rdesc;
2636 if (jf->type == IPA_JF_PASS_THROUGH)
2638 int src_idx, c, d;
2639 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2640 c = ipa_get_controlled_uses (new_root_info, src_idx);
2641 d = ipa_get_controlled_uses (old_root_info, i);
2643 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2644 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2645 c = combine_controlled_uses_counters (c, d);
2646 ipa_set_controlled_uses (new_root_info, src_idx, c);
2647 if (c == 0 && new_root_info->ipcp_orig_node)
2649 struct cgraph_node *n;
2650 struct ipa_ref *ref;
2651 tree t = new_root_info->known_vals[src_idx];
2653 if (t && TREE_CODE (t) == ADDR_EXPR
2654 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2655 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2656 && (ref = ipa_find_reference ((symtab_node) new_root,
2657 (symtab_node) n, NULL, 0)))
2659 if (dump_file)
2660 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2661 "reference from %s/%i to %s/%i.\n",
2662 xstrdup (cgraph_node_name (new_root)),
2663 new_root->symbol.order,
2664 xstrdup (cgraph_node_name (n)), n->symbol.order);
2665 ipa_remove_reference (ref);
2669 else if (jf->type == IPA_JF_CONST
2670 && (rdesc = jfunc_rdesc_usable (jf)))
2672 int d = ipa_get_controlled_uses (old_root_info, i);
2673 int c = rdesc->refcount;
2674 rdesc->refcount = combine_controlled_uses_counters (c, d);
2675 if (rdesc->refcount == 0)
2677 tree cst = ipa_get_jf_constant (jf);
2678 struct cgraph_node *n;
2679 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2680 && TREE_CODE (TREE_OPERAND (cst, 0))
2681 == FUNCTION_DECL);
2682 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2683 if (n)
2685 struct cgraph_node *clone;
2686 remove_described_reference ((symtab_node) n, rdesc);
2688 clone = cs->caller;
2689 while (clone->global.inlined_to
2690 && clone != rdesc->cs->caller
2691 && IPA_NODE_REF (clone)->ipcp_orig_node)
2693 struct ipa_ref *ref;
2694 ref = ipa_find_reference ((symtab_node) clone,
2695 (symtab_node) n, NULL, 0);
2696 if (ref)
2698 if (dump_file)
2699 fprintf (dump_file, "ipa-prop: Removing "
2700 "cloning-created reference "
2701 "from %s/%i to %s/%i.\n",
2702 xstrdup (cgraph_node_name (clone)),
2703 clone->symbol.order,
2704 xstrdup (cgraph_node_name (n)),
2705 n->symbol.order);
2706 ipa_remove_reference (ref);
2708 clone = clone->callers->caller;
2715 for (i = ipa_get_param_count (old_root_info);
2716 i < ipa_get_cs_argument_count (args);
2717 i++)
2719 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2721 if (jf->type == IPA_JF_CONST)
2723 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2724 if (rdesc)
2725 rdesc->refcount = IPA_UNDESCRIBED_USE;
2727 else if (jf->type == IPA_JF_PASS_THROUGH)
2728 ipa_set_controlled_uses (new_root_info,
2729 jf->value.pass_through.formal_id,
2730 IPA_UNDESCRIBED_USE);
2734 /* Update jump functions and call note functions on inlining the call site CS.
2735 CS is expected to lead to a node already cloned by
2736 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2737 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2738 created. */
2740 bool
2741 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2742 vec<cgraph_edge_p> *new_edges)
2744 bool changed;
2745 /* Do nothing if the preparation phase has not been carried out yet
2746 (i.e. during early inlining). */
2747 if (!ipa_node_params_vector.exists ())
2748 return false;
2749 gcc_assert (ipa_edge_args_vector);
2751 propagate_controlled_uses (cs);
2752 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2754 return changed;
2757 /* Frees all dynamically allocated structures that the argument info points
2758 to. */
2760 void
2761 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2763 vec_free (args->jump_functions);
2764 memset (args, 0, sizeof (*args));
2767 /* Free all ipa_edge structures. */
2769 void
2770 ipa_free_all_edge_args (void)
2772 int i;
2773 struct ipa_edge_args *args;
2775 if (!ipa_edge_args_vector)
2776 return;
2778 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2779 ipa_free_edge_args_substructures (args);
2781 vec_free (ipa_edge_args_vector);
2784 /* Frees all dynamically allocated structures that the param info points
2785 to. */
2787 void
2788 ipa_free_node_params_substructures (struct ipa_node_params *info)
2790 info->descriptors.release ();
2791 free (info->lattices);
2792 /* Lattice values and their sources are deallocated with their alocation
2793 pool. */
2794 info->known_vals.release ();
2795 memset (info, 0, sizeof (*info));
2798 /* Free all ipa_node_params structures. */
2800 void
2801 ipa_free_all_node_params (void)
2803 int i;
2804 struct ipa_node_params *info;
2806 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2807 ipa_free_node_params_substructures (info);
2809 ipa_node_params_vector.release ();
2812 /* Set the aggregate replacements of NODE to be AGGVALS. */
2814 void
2815 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2816 struct ipa_agg_replacement_value *aggvals)
2818 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2819 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2821 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2824 /* Hook that is called by cgraph.c when an edge is removed. */
2826 static void
2827 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2829 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2830 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
2831 return;
2832 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2835 /* Hook that is called by cgraph.c when a node is removed. */
2837 static void
2838 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2840 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2841 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2842 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2843 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2844 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
2847 /* Hook that is called by cgraph.c when an edge is duplicated. */
2849 static void
2850 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2851 __attribute__((unused)) void *data)
2853 struct ipa_edge_args *old_args, *new_args;
2854 unsigned int i;
2856 ipa_check_create_edge_args ();
2858 old_args = IPA_EDGE_REF (src);
2859 new_args = IPA_EDGE_REF (dst);
2861 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
2863 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
2865 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
2866 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
2868 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
2870 if (src_jf->type == IPA_JF_CONST)
2872 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
2874 if (!src_rdesc)
2875 dst_jf->value.constant.rdesc = NULL;
2876 else if (src_rdesc->cs == src)
2878 struct ipa_cst_ref_desc *dst_rdesc;
2879 gcc_checking_assert (ipa_refdesc_pool);
2880 dst_rdesc
2881 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
2882 dst_rdesc->cs = dst;
2883 dst_rdesc->refcount = src_rdesc->refcount;
2884 if (dst->caller->global.inlined_to)
2886 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
2887 src_rdesc->next_duplicate = dst_rdesc;
2889 dst_jf->value.constant.rdesc = dst_rdesc;
2891 else
2893 struct ipa_cst_ref_desc *dst_rdesc;
2894 /* This can happen during inlining, when a JFUNC can refer to a
2895 reference taken in a function up in the tree of inline clones.
2896 We need to find the duplicate that refers to our tree of
2897 inline clones. */
2899 gcc_assert (dst->caller->global.inlined_to);
2900 for (dst_rdesc = src_rdesc->next_duplicate;
2901 dst_rdesc;
2902 dst_rdesc = dst_rdesc->next_duplicate)
2903 if (dst_rdesc->cs->caller->global.inlined_to
2904 == dst->caller->global.inlined_to)
2905 break;
2906 gcc_assert (dst_rdesc);
2907 dst_jf->value.constant.rdesc = dst_rdesc;
2913 /* Hook that is called by cgraph.c when a node is duplicated. */
2915 static void
2916 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2917 ATTRIBUTE_UNUSED void *data)
2919 struct ipa_node_params *old_info, *new_info;
2920 struct ipa_agg_replacement_value *old_av, *new_av;
2922 ipa_check_create_node_params ();
2923 old_info = IPA_NODE_REF (src);
2924 new_info = IPA_NODE_REF (dst);
2926 new_info->descriptors = old_info->descriptors.copy ();
2927 new_info->lattices = NULL;
2928 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2930 new_info->uses_analysis_done = old_info->uses_analysis_done;
2931 new_info->node_enqueued = old_info->node_enqueued;
2933 old_av = ipa_get_agg_replacements_for_node (src);
2934 if (!old_av)
2935 return;
2937 new_av = NULL;
2938 while (old_av)
2940 struct ipa_agg_replacement_value *v;
2942 v = ggc_alloc_ipa_agg_replacement_value ();
2943 memcpy (v, old_av, sizeof (*v));
2944 v->next = new_av;
2945 new_av = v;
2946 old_av = old_av->next;
2948 ipa_set_node_agg_value_chain (dst, new_av);
2952 /* Analyze newly added function into callgraph. */
2954 static void
2955 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2957 ipa_analyze_node (node);
2960 /* Register our cgraph hooks if they are not already there. */
2962 void
2963 ipa_register_cgraph_hooks (void)
2965 if (!edge_removal_hook_holder)
2966 edge_removal_hook_holder =
2967 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2968 if (!node_removal_hook_holder)
2969 node_removal_hook_holder =
2970 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2971 if (!edge_duplication_hook_holder)
2972 edge_duplication_hook_holder =
2973 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2974 if (!node_duplication_hook_holder)
2975 node_duplication_hook_holder =
2976 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2977 function_insertion_hook_holder =
2978 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2981 /* Unregister our cgraph hooks if they are not already there. */
2983 static void
2984 ipa_unregister_cgraph_hooks (void)
2986 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2987 edge_removal_hook_holder = NULL;
2988 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2989 node_removal_hook_holder = NULL;
2990 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2991 edge_duplication_hook_holder = NULL;
2992 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2993 node_duplication_hook_holder = NULL;
2994 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2995 function_insertion_hook_holder = NULL;
2998 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2999 longer needed after ipa-cp. */
3001 void
3002 ipa_free_all_structures_after_ipa_cp (void)
3004 if (!optimize)
3006 ipa_free_all_edge_args ();
3007 ipa_free_all_node_params ();
3008 free_alloc_pool (ipcp_sources_pool);
3009 free_alloc_pool (ipcp_values_pool);
3010 free_alloc_pool (ipcp_agg_lattice_pool);
3011 ipa_unregister_cgraph_hooks ();
3012 if (ipa_refdesc_pool)
3013 free_alloc_pool (ipa_refdesc_pool);
3017 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3018 longer needed after indirect inlining. */
3020 void
3021 ipa_free_all_structures_after_iinln (void)
3023 ipa_free_all_edge_args ();
3024 ipa_free_all_node_params ();
3025 ipa_unregister_cgraph_hooks ();
3026 if (ipcp_sources_pool)
3027 free_alloc_pool (ipcp_sources_pool);
3028 if (ipcp_values_pool)
3029 free_alloc_pool (ipcp_values_pool);
3030 if (ipcp_agg_lattice_pool)
3031 free_alloc_pool (ipcp_agg_lattice_pool);
3032 if (ipa_refdesc_pool)
3033 free_alloc_pool (ipa_refdesc_pool);
3036 /* Print ipa_tree_map data structures of all functions in the
3037 callgraph to F. */
3039 void
3040 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3042 int i, count;
3043 tree temp;
3044 struct ipa_node_params *info;
3046 if (!node->symbol.definition)
3047 return;
3048 info = IPA_NODE_REF (node);
3049 fprintf (f, " function %s/%i parameter descriptors:\n",
3050 cgraph_node_name (node), node->symbol.order);
3051 count = ipa_get_param_count (info);
3052 for (i = 0; i < count; i++)
3054 int c;
3056 temp = ipa_get_param (info, i);
3057 if (TREE_CODE (temp) == PARM_DECL)
3058 fprintf (f, " param %d : %s", i,
3059 (DECL_NAME (temp)
3060 ? (*lang_hooks.decl_printable_name) (temp, 2)
3061 : "(unnamed)"));
3062 if (ipa_is_param_used (info, i))
3063 fprintf (f, " used");
3064 c = ipa_get_controlled_uses (info, i);
3065 if (c == IPA_UNDESCRIBED_USE)
3066 fprintf (f, " undescribed_use");
3067 else
3068 fprintf (f, " controlled_uses=%i", c);
3069 fprintf (f, "\n");
3073 /* Print ipa_tree_map data structures of all functions in the
3074 callgraph to F. */
3076 void
3077 ipa_print_all_params (FILE * f)
3079 struct cgraph_node *node;
3081 fprintf (f, "\nFunction parameters:\n");
3082 FOR_EACH_FUNCTION (node)
3083 ipa_print_node_params (f, node);
3086 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3088 vec<tree>
3089 ipa_get_vector_of_formal_parms (tree fndecl)
3091 vec<tree> args;
3092 int count;
3093 tree parm;
3095 gcc_assert (!flag_wpa);
3096 count = count_formal_params (fndecl);
3097 args.create (count);
3098 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3099 args.quick_push (parm);
3101 return args;
3104 /* Return a heap allocated vector containing types of formal parameters of
3105 function type FNTYPE. */
3107 static inline vec<tree>
3108 get_vector_of_formal_parm_types (tree fntype)
3110 vec<tree> types;
3111 int count = 0;
3112 tree t;
3114 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3115 count++;
3117 types.create (count);
3118 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3119 types.quick_push (TREE_VALUE (t));
3121 return types;
3124 /* Modify the function declaration FNDECL and its type according to the plan in
3125 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3126 to reflect the actual parameters being modified which are determined by the
3127 base_index field. */
3129 void
3130 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3131 const char *synth_parm_prefix)
3133 vec<tree> oparms, otypes;
3134 tree orig_type, new_type = NULL;
3135 tree old_arg_types, t, new_arg_types = NULL;
3136 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3137 int i, len = adjustments.length ();
3138 tree new_reversed = NULL;
3139 bool care_for_types, last_parm_void;
3141 if (!synth_parm_prefix)
3142 synth_parm_prefix = "SYNTH";
3144 oparms = ipa_get_vector_of_formal_parms (fndecl);
3145 orig_type = TREE_TYPE (fndecl);
3146 old_arg_types = TYPE_ARG_TYPES (orig_type);
3148 /* The following test is an ugly hack, some functions simply don't have any
3149 arguments in their type. This is probably a bug but well... */
3150 care_for_types = (old_arg_types != NULL_TREE);
3151 if (care_for_types)
3153 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3154 == void_type_node);
3155 otypes = get_vector_of_formal_parm_types (orig_type);
3156 if (last_parm_void)
3157 gcc_assert (oparms.length () + 1 == otypes.length ());
3158 else
3159 gcc_assert (oparms.length () == otypes.length ());
3161 else
3163 last_parm_void = false;
3164 otypes.create (0);
3167 for (i = 0; i < len; i++)
3169 struct ipa_parm_adjustment *adj;
3170 gcc_assert (link);
3172 adj = &adjustments[i];
3173 parm = oparms[adj->base_index];
3174 adj->base = parm;
3176 if (adj->copy_param)
3178 if (care_for_types)
3179 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3180 new_arg_types);
3181 *link = parm;
3182 link = &DECL_CHAIN (parm);
3184 else if (!adj->remove_param)
3186 tree new_parm;
3187 tree ptype;
3189 if (adj->by_ref)
3190 ptype = build_pointer_type (adj->type);
3191 else
3192 ptype = adj->type;
3194 if (care_for_types)
3195 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3197 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3198 ptype);
3199 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3201 DECL_ARTIFICIAL (new_parm) = 1;
3202 DECL_ARG_TYPE (new_parm) = ptype;
3203 DECL_CONTEXT (new_parm) = fndecl;
3204 TREE_USED (new_parm) = 1;
3205 DECL_IGNORED_P (new_parm) = 1;
3206 layout_decl (new_parm, 0);
3208 adj->base = parm;
3209 adj->reduction = new_parm;
3211 *link = new_parm;
3213 link = &DECL_CHAIN (new_parm);
3217 *link = NULL_TREE;
3219 if (care_for_types)
3221 new_reversed = nreverse (new_arg_types);
3222 if (last_parm_void)
3224 if (new_reversed)
3225 TREE_CHAIN (new_arg_types) = void_list_node;
3226 else
3227 new_reversed = void_list_node;
3231 /* Use copy_node to preserve as much as possible from original type
3232 (debug info, attribute lists etc.)
3233 Exception is METHOD_TYPEs must have THIS argument.
3234 When we are asked to remove it, we need to build new FUNCTION_TYPE
3235 instead. */
3236 if (TREE_CODE (orig_type) != METHOD_TYPE
3237 || (adjustments[0].copy_param
3238 && adjustments[0].base_index == 0))
3240 new_type = build_distinct_type_copy (orig_type);
3241 TYPE_ARG_TYPES (new_type) = new_reversed;
3243 else
3245 new_type
3246 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3247 new_reversed));
3248 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3249 DECL_VINDEX (fndecl) = NULL_TREE;
3252 /* When signature changes, we need to clear builtin info. */
3253 if (DECL_BUILT_IN (fndecl))
3255 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3256 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3259 /* This is a new type, not a copy of an old type. Need to reassociate
3260 variants. We can handle everything except the main variant lazily. */
3261 t = TYPE_MAIN_VARIANT (orig_type);
3262 if (orig_type != t)
3264 TYPE_MAIN_VARIANT (new_type) = t;
3265 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3266 TYPE_NEXT_VARIANT (t) = new_type;
3268 else
3270 TYPE_MAIN_VARIANT (new_type) = new_type;
3271 TYPE_NEXT_VARIANT (new_type) = NULL;
3274 TREE_TYPE (fndecl) = new_type;
3275 DECL_VIRTUAL_P (fndecl) = 0;
3276 otypes.release ();
3277 oparms.release ();
3280 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3281 If this is a directly recursive call, CS must be NULL. Otherwise it must
3282 contain the corresponding call graph edge. */
3284 void
3285 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3286 ipa_parm_adjustment_vec adjustments)
3288 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3289 vec<tree> vargs;
3290 vec<tree, va_gc> **debug_args = NULL;
3291 gimple new_stmt;
3292 gimple_stmt_iterator gsi, prev_gsi;
3293 tree callee_decl;
3294 int i, len;
3296 len = adjustments.length ();
3297 vargs.create (len);
3298 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3299 ipa_remove_stmt_references ((symtab_node) current_node, stmt);
3301 gsi = gsi_for_stmt (stmt);
3302 prev_gsi = gsi;
3303 gsi_prev (&prev_gsi);
3304 for (i = 0; i < len; i++)
3306 struct ipa_parm_adjustment *adj;
3308 adj = &adjustments[i];
3310 if (adj->copy_param)
3312 tree arg = gimple_call_arg (stmt, adj->base_index);
3314 vargs.quick_push (arg);
3316 else if (!adj->remove_param)
3318 tree expr, base, off;
3319 location_t loc;
3320 unsigned int deref_align;
3321 bool deref_base = false;
3323 /* We create a new parameter out of the value of the old one, we can
3324 do the following kind of transformations:
3326 - A scalar passed by reference is converted to a scalar passed by
3327 value. (adj->by_ref is false and the type of the original
3328 actual argument is a pointer to a scalar).
3330 - A part of an aggregate is passed instead of the whole aggregate.
3331 The part can be passed either by value or by reference, this is
3332 determined by value of adj->by_ref. Moreover, the code below
3333 handles both situations when the original aggregate is passed by
3334 value (its type is not a pointer) and when it is passed by
3335 reference (it is a pointer to an aggregate).
3337 When the new argument is passed by reference (adj->by_ref is true)
3338 it must be a part of an aggregate and therefore we form it by
3339 simply taking the address of a reference inside the original
3340 aggregate. */
3342 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3343 base = gimple_call_arg (stmt, adj->base_index);
3344 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3345 : EXPR_LOCATION (base);
3347 if (TREE_CODE (base) != ADDR_EXPR
3348 && POINTER_TYPE_P (TREE_TYPE (base)))
3349 off = build_int_cst (adj->alias_ptr_type,
3350 adj->offset / BITS_PER_UNIT);
3351 else
3353 HOST_WIDE_INT base_offset;
3354 tree prev_base;
3355 bool addrof;
3357 if (TREE_CODE (base) == ADDR_EXPR)
3359 base = TREE_OPERAND (base, 0);
3360 addrof = true;
3362 else
3363 addrof = false;
3364 prev_base = base;
3365 base = get_addr_base_and_unit_offset (base, &base_offset);
3366 /* Aggregate arguments can have non-invariant addresses. */
3367 if (!base)
3369 base = build_fold_addr_expr (prev_base);
3370 off = build_int_cst (adj->alias_ptr_type,
3371 adj->offset / BITS_PER_UNIT);
3373 else if (TREE_CODE (base) == MEM_REF)
3375 if (!addrof)
3377 deref_base = true;
3378 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3380 off = build_int_cst (adj->alias_ptr_type,
3381 base_offset
3382 + adj->offset / BITS_PER_UNIT);
3383 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3384 off);
3385 base = TREE_OPERAND (base, 0);
3387 else
3389 off = build_int_cst (adj->alias_ptr_type,
3390 base_offset
3391 + adj->offset / BITS_PER_UNIT);
3392 base = build_fold_addr_expr (base);
3396 if (!adj->by_ref)
3398 tree type = adj->type;
3399 unsigned int align;
3400 unsigned HOST_WIDE_INT misalign;
3402 if (deref_base)
3404 align = deref_align;
3405 misalign = 0;
3407 else
3409 get_pointer_alignment_1 (base, &align, &misalign);
3410 if (TYPE_ALIGN (type) > align)
3411 align = TYPE_ALIGN (type);
3413 misalign += (tree_to_double_int (off)
3414 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3415 * BITS_PER_UNIT);
3416 misalign = misalign & (align - 1);
3417 if (misalign != 0)
3418 align = (misalign & -misalign);
3419 if (align < TYPE_ALIGN (type))
3420 type = build_aligned_type (type, align);
3421 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3423 else
3425 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3426 expr = build_fold_addr_expr (expr);
3429 expr = force_gimple_operand_gsi (&gsi, expr,
3430 adj->by_ref
3431 || is_gimple_reg_type (adj->type),
3432 NULL, true, GSI_SAME_STMT);
3433 vargs.quick_push (expr);
3435 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3437 unsigned int ix;
3438 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3439 gimple def_temp;
3441 arg = gimple_call_arg (stmt, adj->base_index);
3442 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3444 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3445 continue;
3446 arg = fold_convert_loc (gimple_location (stmt),
3447 TREE_TYPE (origin), arg);
3449 if (debug_args == NULL)
3450 debug_args = decl_debug_args_insert (callee_decl);
3451 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3452 if (ddecl == origin)
3454 ddecl = (**debug_args)[ix + 1];
3455 break;
3457 if (ddecl == NULL)
3459 ddecl = make_node (DEBUG_EXPR_DECL);
3460 DECL_ARTIFICIAL (ddecl) = 1;
3461 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3462 DECL_MODE (ddecl) = DECL_MODE (origin);
3464 vec_safe_push (*debug_args, origin);
3465 vec_safe_push (*debug_args, ddecl);
3467 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3468 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3472 if (dump_file && (dump_flags & TDF_DETAILS))
3474 fprintf (dump_file, "replacing stmt:");
3475 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3478 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3479 vargs.release ();
3480 if (gimple_call_lhs (stmt))
3481 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3483 gimple_set_block (new_stmt, gimple_block (stmt));
3484 if (gimple_has_location (stmt))
3485 gimple_set_location (new_stmt, gimple_location (stmt));
3486 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3487 gimple_call_copy_flags (new_stmt, stmt);
3489 if (dump_file && (dump_flags & TDF_DETAILS))
3491 fprintf (dump_file, "with stmt:");
3492 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3493 fprintf (dump_file, "\n");
3495 gsi_replace (&gsi, new_stmt, true);
3496 if (cs)
3497 cgraph_set_call_stmt (cs, new_stmt);
3500 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3501 gsi_prev (&gsi);
3503 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3504 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3506 update_ssa (TODO_update_ssa);
3507 free_dominance_info (CDI_DOMINATORS);
3510 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3512 static bool
3513 index_in_adjustments_multiple_times_p (int base_index,
3514 ipa_parm_adjustment_vec adjustments)
3516 int i, len = adjustments.length ();
3517 bool one = false;
3519 for (i = 0; i < len; i++)
3521 struct ipa_parm_adjustment *adj;
3522 adj = &adjustments[i];
3524 if (adj->base_index == base_index)
3526 if (one)
3527 return true;
3528 else
3529 one = true;
3532 return false;
3536 /* Return adjustments that should have the same effect on function parameters
3537 and call arguments as if they were first changed according to adjustments in
3538 INNER and then by adjustments in OUTER. */
3540 ipa_parm_adjustment_vec
3541 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3542 ipa_parm_adjustment_vec outer)
3544 int i, outlen = outer.length ();
3545 int inlen = inner.length ();
3546 int removals = 0;
3547 ipa_parm_adjustment_vec adjustments, tmp;
3549 tmp.create (inlen);
3550 for (i = 0; i < inlen; i++)
3552 struct ipa_parm_adjustment *n;
3553 n = &inner[i];
3555 if (n->remove_param)
3556 removals++;
3557 else
3558 tmp.quick_push (*n);
3561 adjustments.create (outlen + removals);
3562 for (i = 0; i < outlen; i++)
3564 struct ipa_parm_adjustment r;
3565 struct ipa_parm_adjustment *out = &outer[i];
3566 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3568 memset (&r, 0, sizeof (r));
3569 gcc_assert (!in->remove_param);
3570 if (out->remove_param)
3572 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3574 r.remove_param = true;
3575 adjustments.quick_push (r);
3577 continue;
3580 r.base_index = in->base_index;
3581 r.type = out->type;
3583 /* FIXME: Create nonlocal value too. */
3585 if (in->copy_param && out->copy_param)
3586 r.copy_param = true;
3587 else if (in->copy_param)
3588 r.offset = out->offset;
3589 else if (out->copy_param)
3590 r.offset = in->offset;
3591 else
3592 r.offset = in->offset + out->offset;
3593 adjustments.quick_push (r);
3596 for (i = 0; i < inlen; i++)
3598 struct ipa_parm_adjustment *n = &inner[i];
3600 if (n->remove_param)
3601 adjustments.quick_push (*n);
3604 tmp.release ();
3605 return adjustments;
3608 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3609 friendly way, assuming they are meant to be applied to FNDECL. */
3611 void
3612 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3613 tree fndecl)
3615 int i, len = adjustments.length ();
3616 bool first = true;
3617 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3619 fprintf (file, "IPA param adjustments: ");
3620 for (i = 0; i < len; i++)
3622 struct ipa_parm_adjustment *adj;
3623 adj = &adjustments[i];
3625 if (!first)
3626 fprintf (file, " ");
3627 else
3628 first = false;
3630 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3631 print_generic_expr (file, parms[adj->base_index], 0);
3632 if (adj->base)
3634 fprintf (file, ", base: ");
3635 print_generic_expr (file, adj->base, 0);
3637 if (adj->reduction)
3639 fprintf (file, ", reduction: ");
3640 print_generic_expr (file, adj->reduction, 0);
3642 if (adj->new_ssa_base)
3644 fprintf (file, ", new_ssa_base: ");
3645 print_generic_expr (file, adj->new_ssa_base, 0);
3648 if (adj->copy_param)
3649 fprintf (file, ", copy_param");
3650 else if (adj->remove_param)
3651 fprintf (file, ", remove_param");
3652 else
3653 fprintf (file, ", offset %li", (long) adj->offset);
3654 if (adj->by_ref)
3655 fprintf (file, ", by_ref");
3656 print_node_brief (file, ", type: ", adj->type, 0);
3657 fprintf (file, "\n");
3659 parms.release ();
3662 /* Dump the AV linked list. */
3664 void
3665 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3667 bool comma = false;
3668 fprintf (f, " Aggregate replacements:");
3669 for (; av; av = av->next)
3671 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3672 av->index, av->offset);
3673 print_generic_expr (f, av->value, 0);
3674 comma = true;
3676 fprintf (f, "\n");
3679 /* Stream out jump function JUMP_FUNC to OB. */
3681 static void
3682 ipa_write_jump_function (struct output_block *ob,
3683 struct ipa_jump_func *jump_func)
3685 struct ipa_agg_jf_item *item;
3686 struct bitpack_d bp;
3687 int i, count;
3689 streamer_write_uhwi (ob, jump_func->type);
3690 switch (jump_func->type)
3692 case IPA_JF_UNKNOWN:
3693 break;
3694 case IPA_JF_KNOWN_TYPE:
3695 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3696 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3697 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3698 break;
3699 case IPA_JF_CONST:
3700 gcc_assert (
3701 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3702 stream_write_tree (ob, jump_func->value.constant.value, true);
3703 break;
3704 case IPA_JF_PASS_THROUGH:
3705 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3706 if (jump_func->value.pass_through.operation == NOP_EXPR)
3708 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3709 bp = bitpack_create (ob->main_stream);
3710 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3711 streamer_write_bitpack (&bp);
3713 else
3715 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3716 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3718 break;
3719 case IPA_JF_ANCESTOR:
3720 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3721 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3722 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3723 bp = bitpack_create (ob->main_stream);
3724 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3725 streamer_write_bitpack (&bp);
3726 break;
3729 count = vec_safe_length (jump_func->agg.items);
3730 streamer_write_uhwi (ob, count);
3731 if (count)
3733 bp = bitpack_create (ob->main_stream);
3734 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3735 streamer_write_bitpack (&bp);
3738 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3740 streamer_write_uhwi (ob, item->offset);
3741 stream_write_tree (ob, item->value, true);
3745 /* Read in jump function JUMP_FUNC from IB. */
3747 static void
3748 ipa_read_jump_function (struct lto_input_block *ib,
3749 struct ipa_jump_func *jump_func,
3750 struct cgraph_edge *cs,
3751 struct data_in *data_in)
3753 enum jump_func_type jftype;
3754 enum tree_code operation;
3755 int i, count;
3757 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3758 switch (jftype)
3760 case IPA_JF_UNKNOWN:
3761 jump_func->type = IPA_JF_UNKNOWN;
3762 break;
3763 case IPA_JF_KNOWN_TYPE:
3765 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3766 tree base_type = stream_read_tree (ib, data_in);
3767 tree component_type = stream_read_tree (ib, data_in);
3769 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
3770 break;
3772 case IPA_JF_CONST:
3773 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
3774 break;
3775 case IPA_JF_PASS_THROUGH:
3776 operation = (enum tree_code) streamer_read_uhwi (ib);
3777 if (operation == NOP_EXPR)
3779 int formal_id = streamer_read_uhwi (ib);
3780 struct bitpack_d bp = streamer_read_bitpack (ib);
3781 bool agg_preserved = bp_unpack_value (&bp, 1);
3782 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
3784 else
3786 tree operand = stream_read_tree (ib, data_in);
3787 int formal_id = streamer_read_uhwi (ib);
3788 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
3789 operation);
3791 break;
3792 case IPA_JF_ANCESTOR:
3794 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3795 tree type = stream_read_tree (ib, data_in);
3796 int formal_id = streamer_read_uhwi (ib);
3797 struct bitpack_d bp = streamer_read_bitpack (ib);
3798 bool agg_preserved = bp_unpack_value (&bp, 1);
3800 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved);
3801 break;
3805 count = streamer_read_uhwi (ib);
3806 vec_alloc (jump_func->agg.items, count);
3807 if (count)
3809 struct bitpack_d bp = streamer_read_bitpack (ib);
3810 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3812 for (i = 0; i < count; i++)
3814 struct ipa_agg_jf_item item;
3815 item.offset = streamer_read_uhwi (ib);
3816 item.value = stream_read_tree (ib, data_in);
3817 jump_func->agg.items->quick_push (item);
3821 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3822 relevant to indirect inlining to OB. */
3824 static void
3825 ipa_write_indirect_edge_info (struct output_block *ob,
3826 struct cgraph_edge *cs)
3828 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3829 struct bitpack_d bp;
3831 streamer_write_hwi (ob, ii->param_index);
3832 streamer_write_hwi (ob, ii->offset);
3833 bp = bitpack_create (ob->main_stream);
3834 bp_pack_value (&bp, ii->polymorphic, 1);
3835 bp_pack_value (&bp, ii->agg_contents, 1);
3836 bp_pack_value (&bp, ii->member_ptr, 1);
3837 bp_pack_value (&bp, ii->by_ref, 1);
3838 streamer_write_bitpack (&bp);
3840 if (ii->polymorphic)
3842 streamer_write_hwi (ob, ii->otr_token);
3843 stream_write_tree (ob, ii->otr_type, true);
3847 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3848 relevant to indirect inlining from IB. */
3850 static void
3851 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3852 struct data_in *data_in ATTRIBUTE_UNUSED,
3853 struct cgraph_edge *cs)
3855 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3856 struct bitpack_d bp;
3858 ii->param_index = (int) streamer_read_hwi (ib);
3859 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3860 bp = streamer_read_bitpack (ib);
3861 ii->polymorphic = bp_unpack_value (&bp, 1);
3862 ii->agg_contents = bp_unpack_value (&bp, 1);
3863 ii->member_ptr = bp_unpack_value (&bp, 1);
3864 ii->by_ref = bp_unpack_value (&bp, 1);
3865 if (ii->polymorphic)
3867 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3868 ii->otr_type = stream_read_tree (ib, data_in);
3872 /* Stream out NODE info to OB. */
3874 static void
3875 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3877 int node_ref;
3878 lto_symtab_encoder_t encoder;
3879 struct ipa_node_params *info = IPA_NODE_REF (node);
3880 int j;
3881 struct cgraph_edge *e;
3882 struct bitpack_d bp;
3884 encoder = ob->decl_state->symtab_node_encoder;
3885 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3886 streamer_write_uhwi (ob, node_ref);
3888 streamer_write_uhwi (ob, ipa_get_param_count (info));
3889 for (j = 0; j < ipa_get_param_count (info); j++)
3890 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
3891 bp = bitpack_create (ob->main_stream);
3892 gcc_assert (info->uses_analysis_done
3893 || ipa_get_param_count (info) == 0);
3894 gcc_assert (!info->node_enqueued);
3895 gcc_assert (!info->ipcp_orig_node);
3896 for (j = 0; j < ipa_get_param_count (info); j++)
3897 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3898 streamer_write_bitpack (&bp);
3899 for (j = 0; j < ipa_get_param_count (info); j++)
3900 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
3901 for (e = node->callees; e; e = e->next_callee)
3903 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3905 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3906 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3907 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3909 for (e = node->indirect_calls; e; e = e->next_callee)
3911 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3913 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3914 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3915 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3916 ipa_write_indirect_edge_info (ob, e);
3920 /* Stream in NODE info from IB. */
3922 static void
3923 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3924 struct data_in *data_in)
3926 struct ipa_node_params *info = IPA_NODE_REF (node);
3927 int k;
3928 struct cgraph_edge *e;
3929 struct bitpack_d bp;
3931 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
3933 for (k = 0; k < ipa_get_param_count (info); k++)
3934 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
3936 bp = streamer_read_bitpack (ib);
3937 if (ipa_get_param_count (info) != 0)
3938 info->uses_analysis_done = true;
3939 info->node_enqueued = false;
3940 for (k = 0; k < ipa_get_param_count (info); k++)
3941 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3942 for (k = 0; k < ipa_get_param_count (info); k++)
3943 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
3944 for (e = node->callees; e; e = e->next_callee)
3946 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3947 int count = streamer_read_uhwi (ib);
3949 if (!count)
3950 continue;
3951 vec_safe_grow_cleared (args->jump_functions, count);
3953 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3954 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
3955 data_in);
3957 for (e = node->indirect_calls; e; e = e->next_callee)
3959 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3960 int count = streamer_read_uhwi (ib);
3962 if (count)
3964 vec_safe_grow_cleared (args->jump_functions, count);
3965 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3966 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
3967 data_in);
3969 ipa_read_indirect_edge_info (ib, data_in, e);
3973 /* Write jump functions for nodes in SET. */
3975 void
3976 ipa_prop_write_jump_functions (void)
3978 struct cgraph_node *node;
3979 struct output_block *ob;
3980 unsigned int count = 0;
3981 lto_symtab_encoder_iterator lsei;
3982 lto_symtab_encoder_t encoder;
3985 if (!ipa_node_params_vector.exists ())
3986 return;
3988 ob = create_output_block (LTO_section_jump_functions);
3989 encoder = ob->decl_state->symtab_node_encoder;
3990 ob->cgraph_node = NULL;
3991 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3992 lsei_next_function_in_partition (&lsei))
3994 node = lsei_cgraph_node (lsei);
3995 if (cgraph_function_with_gimple_body_p (node)
3996 && IPA_NODE_REF (node) != NULL)
3997 count++;
4000 streamer_write_uhwi (ob, count);
4002 /* Process all of the functions. */
4003 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4004 lsei_next_function_in_partition (&lsei))
4006 node = lsei_cgraph_node (lsei);
4007 if (cgraph_function_with_gimple_body_p (node)
4008 && IPA_NODE_REF (node) != NULL)
4009 ipa_write_node_info (ob, node);
4011 streamer_write_char_stream (ob->main_stream, 0);
4012 produce_asm (ob, NULL);
4013 destroy_output_block (ob);
4016 /* Read section in file FILE_DATA of length LEN with data DATA. */
4018 static void
4019 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4020 size_t len)
4022 const struct lto_function_header *header =
4023 (const struct lto_function_header *) data;
4024 const int cfg_offset = sizeof (struct lto_function_header);
4025 const int main_offset = cfg_offset + header->cfg_size;
4026 const int string_offset = main_offset + header->main_size;
4027 struct data_in *data_in;
4028 struct lto_input_block ib_main;
4029 unsigned int i;
4030 unsigned int count;
4032 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4033 header->main_size);
4035 data_in =
4036 lto_data_in_create (file_data, (const char *) data + string_offset,
4037 header->string_size, vNULL);
4038 count = streamer_read_uhwi (&ib_main);
4040 for (i = 0; i < count; i++)
4042 unsigned int index;
4043 struct cgraph_node *node;
4044 lto_symtab_encoder_t encoder;
4046 index = streamer_read_uhwi (&ib_main);
4047 encoder = file_data->symtab_node_encoder;
4048 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4049 gcc_assert (node->symbol.definition);
4050 ipa_read_node_info (&ib_main, node, data_in);
4052 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4053 len);
4054 lto_data_in_delete (data_in);
4057 /* Read ipcp jump functions. */
4059 void
4060 ipa_prop_read_jump_functions (void)
4062 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4063 struct lto_file_decl_data *file_data;
4064 unsigned int j = 0;
4066 ipa_check_create_node_params ();
4067 ipa_check_create_edge_args ();
4068 ipa_register_cgraph_hooks ();
4070 while ((file_data = file_data_vec[j++]))
4072 size_t len;
4073 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4075 if (data)
4076 ipa_prop_read_section (file_data, data, len);
4080 /* After merging units, we can get mismatch in argument counts.
4081 Also decl merging might've rendered parameter lists obsolete.
4082 Also compute called_with_variable_arg info. */
4084 void
4085 ipa_update_after_lto_read (void)
4087 ipa_check_create_node_params ();
4088 ipa_check_create_edge_args ();
4091 void
4092 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4094 int node_ref;
4095 unsigned int count = 0;
4096 lto_symtab_encoder_t encoder;
4097 struct ipa_agg_replacement_value *aggvals, *av;
4099 aggvals = ipa_get_agg_replacements_for_node (node);
4100 encoder = ob->decl_state->symtab_node_encoder;
4101 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4102 streamer_write_uhwi (ob, node_ref);
4104 for (av = aggvals; av; av = av->next)
4105 count++;
4106 streamer_write_uhwi (ob, count);
4108 for (av = aggvals; av; av = av->next)
4110 struct bitpack_d bp;
4112 streamer_write_uhwi (ob, av->offset);
4113 streamer_write_uhwi (ob, av->index);
4114 stream_write_tree (ob, av->value, true);
4116 bp = bitpack_create (ob->main_stream);
4117 bp_pack_value (&bp, av->by_ref, 1);
4118 streamer_write_bitpack (&bp);
4122 /* Stream in the aggregate value replacement chain for NODE from IB. */
4124 static void
4125 read_agg_replacement_chain (struct lto_input_block *ib,
4126 struct cgraph_node *node,
4127 struct data_in *data_in)
4129 struct ipa_agg_replacement_value *aggvals = NULL;
4130 unsigned int count, i;
4132 count = streamer_read_uhwi (ib);
4133 for (i = 0; i <count; i++)
4135 struct ipa_agg_replacement_value *av;
4136 struct bitpack_d bp;
4138 av = ggc_alloc_ipa_agg_replacement_value ();
4139 av->offset = streamer_read_uhwi (ib);
4140 av->index = streamer_read_uhwi (ib);
4141 av->value = stream_read_tree (ib, data_in);
4142 bp = streamer_read_bitpack (ib);
4143 av->by_ref = bp_unpack_value (&bp, 1);
4144 av->next = aggvals;
4145 aggvals = av;
4147 ipa_set_node_agg_value_chain (node, aggvals);
4150 /* Write all aggregate replacement for nodes in set. */
4152 void
4153 ipa_prop_write_all_agg_replacement (void)
4155 struct cgraph_node *node;
4156 struct output_block *ob;
4157 unsigned int count = 0;
4158 lto_symtab_encoder_iterator lsei;
4159 lto_symtab_encoder_t encoder;
4161 if (!ipa_node_agg_replacements)
4162 return;
4164 ob = create_output_block (LTO_section_ipcp_transform);
4165 encoder = ob->decl_state->symtab_node_encoder;
4166 ob->cgraph_node = NULL;
4167 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4168 lsei_next_function_in_partition (&lsei))
4170 node = lsei_cgraph_node (lsei);
4171 if (cgraph_function_with_gimple_body_p (node)
4172 && ipa_get_agg_replacements_for_node (node) != NULL)
4173 count++;
4176 streamer_write_uhwi (ob, count);
4178 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4179 lsei_next_function_in_partition (&lsei))
4181 node = lsei_cgraph_node (lsei);
4182 if (cgraph_function_with_gimple_body_p (node)
4183 && ipa_get_agg_replacements_for_node (node) != NULL)
4184 write_agg_replacement_chain (ob, node);
4186 streamer_write_char_stream (ob->main_stream, 0);
4187 produce_asm (ob, NULL);
4188 destroy_output_block (ob);
4191 /* Read replacements section in file FILE_DATA of length LEN with data
4192 DATA. */
4194 static void
4195 read_replacements_section (struct lto_file_decl_data *file_data,
4196 const char *data,
4197 size_t len)
4199 const struct lto_function_header *header =
4200 (const struct lto_function_header *) data;
4201 const int cfg_offset = sizeof (struct lto_function_header);
4202 const int main_offset = cfg_offset + header->cfg_size;
4203 const int string_offset = main_offset + header->main_size;
4204 struct data_in *data_in;
4205 struct lto_input_block ib_main;
4206 unsigned int i;
4207 unsigned int count;
4209 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4210 header->main_size);
4212 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4213 header->string_size, vNULL);
4214 count = streamer_read_uhwi (&ib_main);
4216 for (i = 0; i < count; i++)
4218 unsigned int index;
4219 struct cgraph_node *node;
4220 lto_symtab_encoder_t encoder;
4222 index = streamer_read_uhwi (&ib_main);
4223 encoder = file_data->symtab_node_encoder;
4224 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4225 gcc_assert (node->symbol.definition);
4226 read_agg_replacement_chain (&ib_main, node, data_in);
4228 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4229 len);
4230 lto_data_in_delete (data_in);
4233 /* Read IPA-CP aggregate replacements. */
4235 void
4236 ipa_prop_read_all_agg_replacement (void)
4238 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4239 struct lto_file_decl_data *file_data;
4240 unsigned int j = 0;
4242 while ((file_data = file_data_vec[j++]))
4244 size_t len;
4245 const char *data = lto_get_section_data (file_data,
4246 LTO_section_ipcp_transform,
4247 NULL, &len);
4248 if (data)
4249 read_replacements_section (file_data, data, len);
4253 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4254 NODE. */
4256 static void
4257 adjust_agg_replacement_values (struct cgraph_node *node,
4258 struct ipa_agg_replacement_value *aggval)
4260 struct ipa_agg_replacement_value *v;
4261 int i, c = 0, d = 0, *adj;
4263 if (!node->clone.combined_args_to_skip)
4264 return;
4266 for (v = aggval; v; v = v->next)
4268 gcc_assert (v->index >= 0);
4269 if (c < v->index)
4270 c = v->index;
4272 c++;
4274 adj = XALLOCAVEC (int, c);
4275 for (i = 0; i < c; i++)
4276 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4278 adj[i] = -1;
4279 d++;
4281 else
4282 adj[i] = i - d;
4284 for (v = aggval; v; v = v->next)
4285 v->index = adj[v->index];
4289 /* Function body transformation phase. */
4291 unsigned int
4292 ipcp_transform_function (struct cgraph_node *node)
4294 vec<ipa_param_descriptor_t> descriptors = vNULL;
4295 struct param_analysis_info *parms_ainfo;
4296 struct ipa_agg_replacement_value *aggval;
4297 gimple_stmt_iterator gsi;
4298 basic_block bb;
4299 int param_count;
4300 bool cfg_changed = false, something_changed = false;
4302 gcc_checking_assert (cfun);
4303 gcc_checking_assert (current_function_decl);
4305 if (dump_file)
4306 fprintf (dump_file, "Modification phase of node %s/%i\n",
4307 cgraph_node_name (node), node->symbol.order);
4309 aggval = ipa_get_agg_replacements_for_node (node);
4310 if (!aggval)
4311 return 0;
4312 param_count = count_formal_params (node->symbol.decl);
4313 if (param_count == 0)
4314 return 0;
4315 adjust_agg_replacement_values (node, aggval);
4316 if (dump_file)
4317 ipa_dump_agg_replacement_values (dump_file, aggval);
4318 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4319 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4320 descriptors.safe_grow_cleared (param_count);
4321 ipa_populate_param_decls (node, descriptors);
4323 FOR_EACH_BB (bb)
4324 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4326 struct ipa_agg_replacement_value *v;
4327 gimple stmt = gsi_stmt (gsi);
4328 tree rhs, val, t;
4329 HOST_WIDE_INT offset;
4330 int index;
4331 bool by_ref, vce;
4333 if (!gimple_assign_load_p (stmt))
4334 continue;
4335 rhs = gimple_assign_rhs1 (stmt);
4336 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4337 continue;
4339 vce = false;
4340 t = rhs;
4341 while (handled_component_p (t))
4343 /* V_C_E can do things like convert an array of integers to one
4344 bigger integer and similar things we do not handle below. */
4345 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4347 vce = true;
4348 break;
4350 t = TREE_OPERAND (t, 0);
4352 if (vce)
4353 continue;
4355 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4356 rhs, &index, &offset, &by_ref))
4357 continue;
4358 for (v = aggval; v; v = v->next)
4359 if (v->index == index
4360 && v->offset == offset)
4361 break;
4362 if (!v || v->by_ref != by_ref)
4363 continue;
4365 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4366 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4368 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4369 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4370 else if (TYPE_SIZE (TREE_TYPE (rhs))
4371 == TYPE_SIZE (TREE_TYPE (v->value)))
4372 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4373 else
4375 if (dump_file)
4377 fprintf (dump_file, " const ");
4378 print_generic_expr (dump_file, v->value, 0);
4379 fprintf (dump_file, " can't be converted to type of ");
4380 print_generic_expr (dump_file, rhs, 0);
4381 fprintf (dump_file, "\n");
4383 continue;
4386 else
4387 val = v->value;
4389 if (dump_file && (dump_flags & TDF_DETAILS))
4391 fprintf (dump_file, "Modifying stmt:\n ");
4392 print_gimple_stmt (dump_file, stmt, 0, 0);
4394 gimple_assign_set_rhs_from_tree (&gsi, val);
4395 update_stmt (stmt);
4397 if (dump_file && (dump_flags & TDF_DETAILS))
4399 fprintf (dump_file, "into:\n ");
4400 print_gimple_stmt (dump_file, stmt, 0, 0);
4401 fprintf (dump_file, "\n");
4404 something_changed = true;
4405 if (maybe_clean_eh_stmt (stmt)
4406 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4407 cfg_changed = true;
4410 (*ipa_node_agg_replacements)[node->uid] = NULL;
4411 free_parms_ainfo (parms_ainfo, param_count);
4412 descriptors.release ();
4414 if (!something_changed)
4415 return 0;
4416 else if (cfg_changed)
4417 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4418 else
4419 return TODO_update_ssa_only_virtuals;