2013-09-18 Marc Glisse <marc.glisse@inria.fr>
[official-gcc.git] / gcc / ipa-prop.c
blobc09ec2f016681dc3f07faff1b7623f3484bbcc1e
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "langhooks.h"
25 #include "ggc.h"
26 #include "target.h"
27 #include "cgraph.h"
28 #include "ipa-prop.h"
29 #include "tree-ssa.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
45 struct param_analysis_info
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
51 /* Vector where the parameter infos are actually stored. */
52 vec<ipa_node_params_t> ipa_node_params_vector;
53 /* Vector of known aggregate values in cloned nodes. */
54 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
55 /* Vector where the parameter infos are actually stored. */
56 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60 static struct cgraph_node_hook_list *node_removal_hook_holder;
61 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
63 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65 /* Description of a reference to an IPA constant. */
66 struct ipa_cst_ref_desc
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge *cs;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc *next_duplicate;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
73 if out of control. */
74 int refcount;
77 /* Allocation pool for reference descriptions. */
79 static alloc_pool ipa_refdesc_pool;
81 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
82 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
84 static bool
85 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
87 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->symbol.decl);
88 struct cl_optimization *os;
90 if (!fs_opts)
91 return false;
92 os = TREE_OPTIMIZATION (fs_opts);
93 return !os->x_optimize || !os->x_flag_ipa_cp;
96 /* Return index of the formal whose tree is PTREE in function which corresponds
97 to INFO. */
99 static int
100 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
102 int i, count;
104 count = descriptors.length ();
105 for (i = 0; i < count; i++)
106 if (descriptors[i].decl == ptree)
107 return i;
109 return -1;
112 /* Return index of the formal whose tree is PTREE in function which corresponds
113 to INFO. */
116 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
118 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
122 NODE. */
124 static void
125 ipa_populate_param_decls (struct cgraph_node *node,
126 vec<ipa_param_descriptor_t> &descriptors)
128 tree fndecl;
129 tree fnargs;
130 tree parm;
131 int param_num;
133 fndecl = node->symbol.decl;
134 gcc_assert (gimple_has_body_p (fndecl));
135 fnargs = DECL_ARGUMENTS (fndecl);
136 param_num = 0;
137 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
139 descriptors[param_num].decl = parm;
140 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
141 param_num++;
145 /* Return how many formal parameters FNDECL has. */
147 static inline int
148 count_formal_params (tree fndecl)
150 tree parm;
151 int count = 0;
152 gcc_assert (gimple_has_body_p (fndecl));
154 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
155 count++;
157 return count;
160 /* Return the declaration of Ith formal parameter of the function corresponding
161 to INFO. Note there is no setter function as this array is built just once
162 using ipa_initialize_node_params. */
164 void
165 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
167 fprintf (file, "param #%i", i);
168 if (info->descriptors[i].decl)
170 fprintf (file, " ");
171 print_generic_expr (file, info->descriptors[i].decl, 0);
175 /* Initialize the ipa_node_params structure associated with NODE
176 to hold PARAM_COUNT parameters. */
178 void
179 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
181 struct ipa_node_params *info = IPA_NODE_REF (node);
183 if (!info->descriptors.exists () && param_count)
184 info->descriptors.safe_grow_cleared (param_count);
187 /* Initialize the ipa_node_params structure associated with NODE by counting
188 the function parameters, creating the descriptors and populating their
189 param_decls. */
191 void
192 ipa_initialize_node_params (struct cgraph_node *node)
194 struct ipa_node_params *info = IPA_NODE_REF (node);
196 if (!info->descriptors.exists ())
198 ipa_alloc_node_params (node, count_formal_params (node->symbol.decl));
199 ipa_populate_param_decls (node, info->descriptors);
203 /* Print the jump functions associated with call graph edge CS to file F. */
205 static void
206 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
208 int i, count;
210 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
211 for (i = 0; i < count; i++)
213 struct ipa_jump_func *jump_func;
214 enum jump_func_type type;
216 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
217 type = jump_func->type;
219 fprintf (f, " param %d: ", i);
220 if (type == IPA_JF_UNKNOWN)
221 fprintf (f, "UNKNOWN\n");
222 else if (type == IPA_JF_KNOWN_TYPE)
224 fprintf (f, "KNOWN TYPE: base ");
225 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
226 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
227 jump_func->value.known_type.offset);
228 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
229 fprintf (f, "\n");
231 else if (type == IPA_JF_CONST)
233 tree val = jump_func->value.constant.value;
234 fprintf (f, "CONST: ");
235 print_generic_expr (f, val, 0);
236 if (TREE_CODE (val) == ADDR_EXPR
237 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
239 fprintf (f, " -> ");
240 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
243 fprintf (f, "\n");
245 else if (type == IPA_JF_PASS_THROUGH)
247 fprintf (f, "PASS THROUGH: ");
248 fprintf (f, "%d, op %s",
249 jump_func->value.pass_through.formal_id,
250 tree_code_name[(int)
251 jump_func->value.pass_through.operation]);
252 if (jump_func->value.pass_through.operation != NOP_EXPR)
254 fprintf (f, " ");
255 print_generic_expr (f,
256 jump_func->value.pass_through.operand, 0);
258 if (jump_func->value.pass_through.agg_preserved)
259 fprintf (f, ", agg_preserved");
260 if (jump_func->value.pass_through.type_preserved)
261 fprintf (f, ", type_preserved");
262 fprintf (f, "\n");
264 else if (type == IPA_JF_ANCESTOR)
266 fprintf (f, "ANCESTOR: ");
267 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
268 jump_func->value.ancestor.formal_id,
269 jump_func->value.ancestor.offset);
270 print_generic_expr (f, jump_func->value.ancestor.type, 0);
271 if (jump_func->value.ancestor.agg_preserved)
272 fprintf (f, ", agg_preserved");
273 if (jump_func->value.ancestor.type_preserved)
274 fprintf (f, ", type_preserved");
275 fprintf (f, "\n");
278 if (jump_func->agg.items)
280 struct ipa_agg_jf_item *item;
281 int j;
283 fprintf (f, " Aggregate passed by %s:\n",
284 jump_func->agg.by_ref ? "reference" : "value");
285 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
287 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
288 item->offset);
289 if (TYPE_P (item->value))
290 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
291 tree_low_cst (TYPE_SIZE (item->value), 1));
292 else
294 fprintf (f, "cst: ");
295 print_generic_expr (f, item->value, 0);
297 fprintf (f, "\n");
304 /* Print the jump functions of all arguments on all call graph edges going from
305 NODE to file F. */
307 void
308 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
310 struct cgraph_edge *cs;
312 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
313 node->symbol.order);
314 for (cs = node->callees; cs; cs = cs->next_callee)
316 if (!ipa_edge_args_info_available_for_edge_p (cs))
317 continue;
319 fprintf (f, " callsite %s/%i -> %s/%i : \n",
320 xstrdup (cgraph_node_name (node)), node->symbol.order,
321 xstrdup (cgraph_node_name (cs->callee)),
322 cs->callee->symbol.order);
323 ipa_print_node_jump_functions_for_edge (f, cs);
326 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
328 struct cgraph_indirect_call_info *ii;
329 if (!ipa_edge_args_info_available_for_edge_p (cs))
330 continue;
332 ii = cs->indirect_info;
333 if (ii->agg_contents)
334 fprintf (f, " indirect %s callsite, calling param %i, "
335 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
336 ii->member_ptr ? "member ptr" : "aggregate",
337 ii->param_index, ii->offset,
338 ii->by_ref ? "by reference" : "by_value");
339 else
340 fprintf (f, " indirect %s callsite, calling param %i",
341 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
343 if (cs->call_stmt)
345 fprintf (f, ", for stmt ");
346 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
348 else
349 fprintf (f, "\n");
350 ipa_print_node_jump_functions_for_edge (f, cs);
354 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
356 void
357 ipa_print_all_jump_functions (FILE *f)
359 struct cgraph_node *node;
361 fprintf (f, "\nJump functions:\n");
362 FOR_EACH_FUNCTION (node)
364 ipa_print_node_jump_functions (f, node);
368 /* Set JFUNC to be a known type jump function. */
370 static void
371 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
372 tree base_type, tree component_type)
374 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
375 && TYPE_BINFO (component_type));
376 jfunc->type = IPA_JF_KNOWN_TYPE;
377 jfunc->value.known_type.offset = offset,
378 jfunc->value.known_type.base_type = base_type;
379 jfunc->value.known_type.component_type = component_type;
382 /* Set JFUNC to be a copy of another jmp (to be used by jump function
383 combination code). The two functions will share their rdesc. */
385 static void
386 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
387 struct ipa_jump_func *src)
390 gcc_checking_assert (src->type == IPA_JF_CONST);
391 dst->type = IPA_JF_CONST;
392 dst->value.constant = src->value.constant;
395 /* Set JFUNC to be a constant jmp function. */
397 static void
398 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
399 struct cgraph_edge *cs)
401 constant = unshare_expr (constant);
402 if (constant && EXPR_P (constant))
403 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
404 jfunc->type = IPA_JF_CONST;
405 jfunc->value.constant.value = unshare_expr_without_location (constant);
407 if (TREE_CODE (constant) == ADDR_EXPR
408 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
410 struct ipa_cst_ref_desc *rdesc;
411 if (!ipa_refdesc_pool)
412 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
413 sizeof (struct ipa_cst_ref_desc), 32);
415 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
416 rdesc->cs = cs;
417 rdesc->next_duplicate = NULL;
418 rdesc->refcount = 1;
419 jfunc->value.constant.rdesc = rdesc;
421 else
422 jfunc->value.constant.rdesc = NULL;
425 /* Set JFUNC to be a simple pass-through jump function. */
426 static void
427 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
428 bool agg_preserved, bool type_preserved)
430 jfunc->type = IPA_JF_PASS_THROUGH;
431 jfunc->value.pass_through.operand = NULL_TREE;
432 jfunc->value.pass_through.formal_id = formal_id;
433 jfunc->value.pass_through.operation = NOP_EXPR;
434 jfunc->value.pass_through.agg_preserved = agg_preserved;
435 jfunc->value.pass_through.type_preserved = type_preserved;
438 /* Set JFUNC to be an arithmetic pass through jump function. */
440 static void
441 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
442 tree operand, enum tree_code operation)
444 jfunc->type = IPA_JF_PASS_THROUGH;
445 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
446 jfunc->value.pass_through.formal_id = formal_id;
447 jfunc->value.pass_through.operation = operation;
448 jfunc->value.pass_through.agg_preserved = false;
449 jfunc->value.pass_through.type_preserved = false;
452 /* Set JFUNC to be an ancestor jump function. */
454 static void
455 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
456 tree type, int formal_id, bool agg_preserved,
457 bool type_preserved)
459 jfunc->type = IPA_JF_ANCESTOR;
460 jfunc->value.ancestor.formal_id = formal_id;
461 jfunc->value.ancestor.offset = offset;
462 jfunc->value.ancestor.type = type;
463 jfunc->value.ancestor.agg_preserved = agg_preserved;
464 jfunc->value.ancestor.type_preserved = type_preserved;
467 /* Extract the acual BINFO being described by JFUNC which must be a known type
468 jump function. */
470 tree
471 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
473 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
474 if (!base_binfo)
475 return NULL_TREE;
476 return get_binfo_at_offset (base_binfo,
477 jfunc->value.known_type.offset,
478 jfunc->value.known_type.component_type);
481 /* Structure to be passed in between detect_type_change and
482 check_stmt_for_type_change. */
484 struct type_change_info
486 /* Offset into the object where there is the virtual method pointer we are
487 looking for. */
488 HOST_WIDE_INT offset;
489 /* The declaration or SSA_NAME pointer of the base that we are checking for
490 type change. */
491 tree object;
492 /* If we actually can tell the type that the object has changed to, it is
493 stored in this field. Otherwise it remains NULL_TREE. */
494 tree known_current_type;
495 /* Set to true if dynamic type change has been detected. */
496 bool type_maybe_changed;
497 /* Set to true if multiple types have been encountered. known_current_type
498 must be disregarded in that case. */
499 bool multiple_types_encountered;
502 /* Return true if STMT can modify a virtual method table pointer.
504 This function makes special assumptions about both constructors and
505 destructors which are all the functions that are allowed to alter the VMT
506 pointers. It assumes that destructors begin with assignment into all VMT
507 pointers and that constructors essentially look in the following way:
509 1) The very first thing they do is that they call constructors of ancestor
510 sub-objects that have them.
512 2) Then VMT pointers of this and all its ancestors is set to new values
513 corresponding to the type corresponding to the constructor.
515 3) Only afterwards, other stuff such as constructor of member sub-objects
516 and the code written by the user is run. Only this may include calling
517 virtual functions, directly or indirectly.
519 There is no way to call a constructor of an ancestor sub-object in any
520 other way.
522 This means that we do not have to care whether constructors get the correct
523 type information because they will always change it (in fact, if we define
524 the type to be given by the VMT pointer, it is undefined).
526 The most important fact to derive from the above is that if, for some
527 statement in the section 3, we try to detect whether the dynamic type has
528 changed, we can safely ignore all calls as we examine the function body
529 backwards until we reach statements in section 2 because these calls cannot
530 be ancestor constructors or destructors (if the input is not bogus) and so
531 do not change the dynamic type (this holds true only for automatically
532 allocated objects but at the moment we devirtualize only these). We then
533 must detect that statements in section 2 change the dynamic type and can try
534 to derive the new type. That is enough and we can stop, we will never see
535 the calls into constructors of sub-objects in this code. Therefore we can
536 safely ignore all call statements that we traverse.
539 static bool
540 stmt_may_be_vtbl_ptr_store (gimple stmt)
542 if (is_gimple_call (stmt))
543 return false;
544 else if (is_gimple_assign (stmt))
546 tree lhs = gimple_assign_lhs (stmt);
548 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
550 if (flag_strict_aliasing
551 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
552 return false;
554 if (TREE_CODE (lhs) == COMPONENT_REF
555 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
556 return false;
557 /* In the future we might want to use get_base_ref_and_offset to find
558 if there is a field corresponding to the offset and if so, proceed
559 almost like if it was a component ref. */
562 return true;
565 /* If STMT can be proved to be an assignment to the virtual method table
566 pointer of ANALYZED_OBJ and the type associated with the new table
567 identified, return the type. Otherwise return NULL_TREE. */
569 static tree
570 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
572 HOST_WIDE_INT offset, size, max_size;
573 tree lhs, rhs, base;
575 if (!gimple_assign_single_p (stmt))
576 return NULL_TREE;
578 lhs = gimple_assign_lhs (stmt);
579 rhs = gimple_assign_rhs1 (stmt);
580 if (TREE_CODE (lhs) != COMPONENT_REF
581 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
582 || TREE_CODE (rhs) != ADDR_EXPR)
583 return NULL_TREE;
584 rhs = get_base_address (TREE_OPERAND (rhs, 0));
585 if (!rhs
586 || TREE_CODE (rhs) != VAR_DECL
587 || !DECL_VIRTUAL_P (rhs))
588 return NULL_TREE;
590 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
591 if (offset != tci->offset
592 || size != POINTER_SIZE
593 || max_size != POINTER_SIZE)
594 return NULL_TREE;
595 if (TREE_CODE (base) == MEM_REF)
597 if (TREE_CODE (tci->object) != MEM_REF
598 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
599 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
600 TREE_OPERAND (base, 1)))
601 return NULL_TREE;
603 else if (tci->object != base)
604 return NULL_TREE;
606 return DECL_CONTEXT (rhs);
609 /* Callback of walk_aliased_vdefs and a helper function for
610 detect_type_change to check whether a particular statement may modify
611 the virtual table pointer, and if possible also determine the new type of
612 the (sub-)object. It stores its result into DATA, which points to a
613 type_change_info structure. */
615 static bool
616 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
618 gimple stmt = SSA_NAME_DEF_STMT (vdef);
619 struct type_change_info *tci = (struct type_change_info *) data;
621 if (stmt_may_be_vtbl_ptr_store (stmt))
623 tree type;
624 type = extr_type_from_vtbl_ptr_store (stmt, tci);
625 if (tci->type_maybe_changed
626 && type != tci->known_current_type)
627 tci->multiple_types_encountered = true;
628 tci->known_current_type = type;
629 tci->type_maybe_changed = true;
630 return true;
632 else
633 return false;
638 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
639 callsite CALL) by looking for assignments to its virtual table pointer. If
640 it is, return true and fill in the jump function JFUNC with relevant type
641 information or set it to unknown. ARG is the object itself (not a pointer
642 to it, unless dereferenced). BASE is the base of the memory access as
643 returned by get_ref_base_and_extent, as is the offset. */
645 static bool
646 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
647 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
649 struct type_change_info tci;
650 ao_ref ao;
652 gcc_checking_assert (DECL_P (arg)
653 || TREE_CODE (arg) == MEM_REF
654 || handled_component_p (arg));
655 /* Const calls cannot call virtual methods through VMT and so type changes do
656 not matter. */
657 if (!flag_devirtualize || !gimple_vuse (call)
658 /* Be sure expected_type is polymorphic. */
659 || !comp_type
660 || TREE_CODE (comp_type) != RECORD_TYPE
661 || !TYPE_BINFO (comp_type)
662 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
663 return false;
665 ao_ref_init (&ao, arg);
666 ao.base = base;
667 ao.offset = offset;
668 ao.size = POINTER_SIZE;
669 ao.max_size = ao.size;
671 tci.offset = offset;
672 tci.object = get_base_address (arg);
673 tci.known_current_type = NULL_TREE;
674 tci.type_maybe_changed = false;
675 tci.multiple_types_encountered = false;
677 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
678 &tci, NULL);
679 if (!tci.type_maybe_changed)
680 return false;
682 if (!tci.known_current_type
683 || tci.multiple_types_encountered
684 || offset != 0)
685 jfunc->type = IPA_JF_UNKNOWN;
686 else
687 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
689 return true;
692 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
693 SSA name (its dereference will become the base and the offset is assumed to
694 be zero). */
696 static bool
697 detect_type_change_ssa (tree arg, tree comp_type,
698 gimple call, struct ipa_jump_func *jfunc)
700 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
701 if (!flag_devirtualize
702 || !POINTER_TYPE_P (TREE_TYPE (arg)))
703 return false;
705 arg = build2 (MEM_REF, ptr_type_node, arg,
706 build_int_cst (ptr_type_node, 0));
708 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
711 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
712 boolean variable pointed to by DATA. */
714 static bool
715 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
716 void *data)
718 bool *b = (bool *) data;
719 *b = true;
720 return true;
723 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
724 a value known not to be modified in this function before reaching the
725 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
726 information about the parameter. */
728 static bool
729 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
730 gimple stmt, tree parm_load)
732 bool modified = false;
733 bitmap *visited_stmts;
734 ao_ref refd;
736 if (parm_ainfo && parm_ainfo->parm_modified)
737 return false;
739 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
740 ao_ref_init (&refd, parm_load);
741 /* We can cache visited statements only when parm_ainfo is available and when
742 we are looking at a naked load of the whole parameter. */
743 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
744 visited_stmts = NULL;
745 else
746 visited_stmts = &parm_ainfo->parm_visited_statements;
747 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
748 visited_stmts);
749 if (parm_ainfo && modified)
750 parm_ainfo->parm_modified = true;
751 return !modified;
754 /* If STMT is an assignment that loads a value from an parameter declaration,
755 return the index of the parameter in ipa_node_params which has not been
756 modified. Otherwise return -1. */
758 static int
759 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
760 struct param_analysis_info *parms_ainfo,
761 gimple stmt)
763 int index;
764 tree op1;
766 if (!gimple_assign_single_p (stmt))
767 return -1;
769 op1 = gimple_assign_rhs1 (stmt);
770 if (TREE_CODE (op1) != PARM_DECL)
771 return -1;
773 index = ipa_get_param_decl_index_1 (descriptors, op1);
774 if (index < 0
775 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
776 : NULL, stmt, op1))
777 return -1;
779 return index;
782 /* Return true if memory reference REF loads data that are known to be
783 unmodified in this function before reaching statement STMT. PARM_AINFO, if
784 non-NULL, is a pointer to a structure containing temporary information about
785 PARM. */
787 static bool
788 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
789 gimple stmt, tree ref)
791 bool modified = false;
792 ao_ref refd;
794 gcc_checking_assert (gimple_vuse (stmt));
795 if (parm_ainfo && parm_ainfo->ref_modified)
796 return false;
798 ao_ref_init (&refd, ref);
799 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
800 NULL);
801 if (parm_ainfo && modified)
802 parm_ainfo->ref_modified = true;
803 return !modified;
806 /* Return true if the data pointed to by PARM is known to be unmodified in this
807 function before reaching call statement CALL into which it is passed.
808 PARM_AINFO is a pointer to a structure containing temporary information
809 about PARM. */
811 static bool
812 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
813 gimple call, tree parm)
815 bool modified = false;
816 ao_ref refd;
818 /* It's unnecessary to calculate anything about memory contnets for a const
819 function because it is not goin to use it. But do not cache the result
820 either. Also, no such calculations for non-pointers. */
821 if (!gimple_vuse (call)
822 || !POINTER_TYPE_P (TREE_TYPE (parm)))
823 return false;
825 if (parm_ainfo->pt_modified)
826 return false;
828 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
829 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
830 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
831 if (modified)
832 parm_ainfo->pt_modified = true;
833 return !modified;
836 /* Return true if we can prove that OP is a memory reference loading unmodified
837 data from an aggregate passed as a parameter and if the aggregate is passed
838 by reference, that the alias type of the load corresponds to the type of the
839 formal parameter (so that we can rely on this type for TBAA in callers).
840 INFO and PARMS_AINFO describe parameters of the current function (but the
841 latter can be NULL), STMT is the load statement. If function returns true,
842 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
843 within the aggregate and whether it is a load from a value passed by
844 reference respectively. */
846 static bool
847 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
848 struct param_analysis_info *parms_ainfo, gimple stmt,
849 tree op, int *index_p, HOST_WIDE_INT *offset_p,
850 bool *by_ref_p)
852 int index;
853 HOST_WIDE_INT size, max_size;
854 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
856 if (max_size == -1 || max_size != size || *offset_p < 0)
857 return false;
859 if (DECL_P (base))
861 int index = ipa_get_param_decl_index_1 (descriptors, base);
862 if (index >= 0
863 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
864 : NULL, stmt, op))
866 *index_p = index;
867 *by_ref_p = false;
868 return true;
870 return false;
873 if (TREE_CODE (base) != MEM_REF
874 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
875 || !integer_zerop (TREE_OPERAND (base, 1)))
876 return false;
878 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
880 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
881 index = ipa_get_param_decl_index_1 (descriptors, parm);
883 else
885 /* This branch catches situations where a pointer parameter is not a
886 gimple register, for example:
888 void hip7(S*) (struct S * p)
890 void (*<T2e4>) (struct S *) D.1867;
891 struct S * p.1;
893 <bb 2>:
894 p.1_1 = p;
895 D.1867_2 = p.1_1->f;
896 D.1867_2 ();
897 gdp = &p;
900 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
901 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
904 if (index >= 0
905 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
906 stmt, op))
908 *index_p = index;
909 *by_ref_p = true;
910 return true;
912 return false;
915 /* Just like the previous function, just without the param_analysis_info
916 pointer, for users outside of this file. */
918 bool
919 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
920 tree op, int *index_p, HOST_WIDE_INT *offset_p,
921 bool *by_ref_p)
923 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
924 offset_p, by_ref_p);
927 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
928 of an assignment statement STMT, try to determine whether we are actually
929 handling any of the following cases and construct an appropriate jump
930 function into JFUNC if so:
932 1) The passed value is loaded from a formal parameter which is not a gimple
933 register (most probably because it is addressable, the value has to be
934 scalar) and we can guarantee the value has not changed. This case can
935 therefore be described by a simple pass-through jump function. For example:
937 foo (int a)
939 int a.0;
941 a.0_2 = a;
942 bar (a.0_2);
944 2) The passed value can be described by a simple arithmetic pass-through
945 jump function. E.g.
947 foo (int a)
949 int D.2064;
951 D.2064_4 = a.1(D) + 4;
952 bar (D.2064_4);
954 This case can also occur in combination of the previous one, e.g.:
956 foo (int a, int z)
958 int a.0;
959 int D.2064;
961 a.0_3 = a;
962 D.2064_4 = a.0_3 + 4;
963 foo (D.2064_4);
965 3) The passed value is an address of an object within another one (which
966 also passed by reference). Such situations are described by an ancestor
967 jump function and describe situations such as:
969 B::foo() (struct B * const this)
971 struct A * D.1845;
973 D.1845_2 = &this_1(D)->D.1748;
974 A::bar (D.1845_2);
976 INFO is the structure describing individual parameters access different
977 stages of IPA optimizations. PARMS_AINFO contains the information that is
978 only needed for intraprocedural analysis. */
980 static void
981 compute_complex_assign_jump_func (struct ipa_node_params *info,
982 struct param_analysis_info *parms_ainfo,
983 struct ipa_jump_func *jfunc,
984 gimple call, gimple stmt, tree name,
985 tree param_type)
987 HOST_WIDE_INT offset, size, max_size;
988 tree op1, tc_ssa, base, ssa;
989 int index;
991 op1 = gimple_assign_rhs1 (stmt);
993 if (TREE_CODE (op1) == SSA_NAME)
995 if (SSA_NAME_IS_DEFAULT_DEF (op1))
996 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
997 else
998 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
999 SSA_NAME_DEF_STMT (op1));
1000 tc_ssa = op1;
1002 else
1004 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1005 tc_ssa = gimple_assign_lhs (stmt);
1008 if (index >= 0)
1010 tree op2 = gimple_assign_rhs2 (stmt);
1012 if (op2)
1014 if (!is_gimple_ip_invariant (op2)
1015 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1016 && !useless_type_conversion_p (TREE_TYPE (name),
1017 TREE_TYPE (op1))))
1018 return;
1020 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1021 gimple_assign_rhs_code (stmt));
1023 else if (gimple_assign_single_p (stmt))
1025 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1026 call, tc_ssa);
1027 bool type_p = false;
1029 if (param_type && POINTER_TYPE_P (param_type))
1030 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1031 call, jfunc);
1032 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1033 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1035 return;
1038 if (TREE_CODE (op1) != ADDR_EXPR)
1039 return;
1040 op1 = TREE_OPERAND (op1, 0);
1041 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1042 return;
1043 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1044 if (TREE_CODE (base) != MEM_REF
1045 /* If this is a varying address, punt. */
1046 || max_size == -1
1047 || max_size != size)
1048 return;
1049 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1050 ssa = TREE_OPERAND (base, 0);
1051 if (TREE_CODE (ssa) != SSA_NAME
1052 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1053 || offset < 0)
1054 return;
1056 /* Dynamic types are changed in constructors and destructors. */
1057 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1058 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1060 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1061 call, jfunc, offset);
1062 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1063 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1064 parm_ref_data_pass_through_p (&parms_ainfo[index],
1065 call, ssa), type_p);
1069 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1070 it looks like:
1072 iftmp.1_3 = &obj_2(D)->D.1762;
1074 The base of the MEM_REF must be a default definition SSA NAME of a
1075 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1076 whole MEM_REF expression is returned and the offset calculated from any
1077 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1078 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1080 static tree
1081 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1083 HOST_WIDE_INT size, max_size;
1084 tree expr, parm, obj;
1086 if (!gimple_assign_single_p (assign))
1087 return NULL_TREE;
1088 expr = gimple_assign_rhs1 (assign);
1090 if (TREE_CODE (expr) != ADDR_EXPR)
1091 return NULL_TREE;
1092 expr = TREE_OPERAND (expr, 0);
1093 obj = expr;
1094 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1096 if (TREE_CODE (expr) != MEM_REF
1097 /* If this is a varying address, punt. */
1098 || max_size == -1
1099 || max_size != size
1100 || *offset < 0)
1101 return NULL_TREE;
1102 parm = TREE_OPERAND (expr, 0);
1103 if (TREE_CODE (parm) != SSA_NAME
1104 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1105 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1106 return NULL_TREE;
1108 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1109 *obj_p = obj;
1110 return expr;
1114 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1115 statement PHI, try to find out whether NAME is in fact a
1116 multiple-inheritance typecast from a descendant into an ancestor of a formal
1117 parameter and thus can be described by an ancestor jump function and if so,
1118 write the appropriate function into JFUNC.
1120 Essentially we want to match the following pattern:
1122 if (obj_2(D) != 0B)
1123 goto <bb 3>;
1124 else
1125 goto <bb 4>;
1127 <bb 3>:
1128 iftmp.1_3 = &obj_2(D)->D.1762;
1130 <bb 4>:
1131 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1132 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1133 return D.1879_6; */
1135 static void
1136 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1137 struct param_analysis_info *parms_ainfo,
1138 struct ipa_jump_func *jfunc,
1139 gimple call, gimple phi, tree param_type)
1141 HOST_WIDE_INT offset;
1142 gimple assign, cond;
1143 basic_block phi_bb, assign_bb, cond_bb;
1144 tree tmp, parm, expr, obj;
1145 int index, i;
1147 if (gimple_phi_num_args (phi) != 2)
1148 return;
1150 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1151 tmp = PHI_ARG_DEF (phi, 0);
1152 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1153 tmp = PHI_ARG_DEF (phi, 1);
1154 else
1155 return;
1156 if (TREE_CODE (tmp) != SSA_NAME
1157 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1158 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1159 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1160 return;
1162 assign = SSA_NAME_DEF_STMT (tmp);
1163 assign_bb = gimple_bb (assign);
1164 if (!single_pred_p (assign_bb))
1165 return;
1166 expr = get_ancestor_addr_info (assign, &obj, &offset);
1167 if (!expr)
1168 return;
1169 parm = TREE_OPERAND (expr, 0);
1170 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1171 gcc_assert (index >= 0);
1173 cond_bb = single_pred (assign_bb);
1174 cond = last_stmt (cond_bb);
1175 if (!cond
1176 || gimple_code (cond) != GIMPLE_COND
1177 || gimple_cond_code (cond) != NE_EXPR
1178 || gimple_cond_lhs (cond) != parm
1179 || !integer_zerop (gimple_cond_rhs (cond)))
1180 return;
1182 phi_bb = gimple_bb (phi);
1183 for (i = 0; i < 2; i++)
1185 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1186 if (pred != assign_bb && pred != cond_bb)
1187 return;
1190 bool type_p = false;
1191 if (param_type && POINTER_TYPE_P (param_type))
1192 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1193 call, jfunc, offset);
1194 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1195 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1196 parm_ref_data_pass_through_p (&parms_ainfo[index],
1197 call, parm), type_p);
1200 /* Given OP which is passed as an actual argument to a called function,
1201 determine if it is possible to construct a KNOWN_TYPE jump function for it
1202 and if so, create one and store it to JFUNC.
1203 EXPECTED_TYPE represents a type the argument should be in */
1205 static void
1206 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1207 gimple call, tree expected_type)
1209 HOST_WIDE_INT offset, size, max_size;
1210 tree base;
1212 if (!flag_devirtualize
1213 || TREE_CODE (op) != ADDR_EXPR
1214 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1215 /* Be sure expected_type is polymorphic. */
1216 || !expected_type
1217 || TREE_CODE (expected_type) != RECORD_TYPE
1218 || !TYPE_BINFO (expected_type)
1219 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1220 return;
1222 op = TREE_OPERAND (op, 0);
1223 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1224 if (!DECL_P (base)
1225 || max_size == -1
1226 || max_size != size
1227 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1228 || is_global_var (base))
1229 return;
1231 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1232 return;
1234 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1235 expected_type);
1238 /* Inspect the given TYPE and return true iff it has the same structure (the
1239 same number of fields of the same types) as a C++ member pointer. If
1240 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1241 corresponding fields there. */
1243 static bool
1244 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1246 tree fld;
1248 if (TREE_CODE (type) != RECORD_TYPE)
1249 return false;
1251 fld = TYPE_FIELDS (type);
1252 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1253 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1254 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1255 return false;
1257 if (method_ptr)
1258 *method_ptr = fld;
1260 fld = DECL_CHAIN (fld);
1261 if (!fld || INTEGRAL_TYPE_P (fld)
1262 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1263 return false;
1264 if (delta)
1265 *delta = fld;
1267 if (DECL_CHAIN (fld))
1268 return false;
1270 return true;
1273 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1274 return the rhs of its defining statement. Otherwise return RHS as it
1275 is. */
1277 static inline tree
1278 get_ssa_def_if_simple_copy (tree rhs)
1280 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1282 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1284 if (gimple_assign_single_p (def_stmt))
1285 rhs = gimple_assign_rhs1 (def_stmt);
1286 else
1287 break;
1289 return rhs;
1292 /* Simple linked list, describing known contents of an aggregate beforere
1293 call. */
1295 struct ipa_known_agg_contents_list
1297 /* Offset and size of the described part of the aggregate. */
1298 HOST_WIDE_INT offset, size;
1299 /* Known constant value or NULL if the contents is known to be unknown. */
1300 tree constant;
1301 /* Pointer to the next structure in the list. */
1302 struct ipa_known_agg_contents_list *next;
1305 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1306 in ARG is filled in with constant values. ARG can either be an aggregate
1307 expression or a pointer to an aggregate. JFUNC is the jump function into
1308 which the constants are subsequently stored. */
1310 static void
1311 determine_known_aggregate_parts (gimple call, tree arg,
1312 struct ipa_jump_func *jfunc)
1314 struct ipa_known_agg_contents_list *list = NULL;
1315 int item_count = 0, const_count = 0;
1316 HOST_WIDE_INT arg_offset, arg_size;
1317 gimple_stmt_iterator gsi;
1318 tree arg_base;
1319 bool check_ref, by_ref;
1320 ao_ref r;
1322 /* The function operates in three stages. First, we prepare check_ref, r,
1323 arg_base and arg_offset based on what is actually passed as an actual
1324 argument. */
1326 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1328 by_ref = true;
1329 if (TREE_CODE (arg) == SSA_NAME)
1331 tree type_size;
1332 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1333 return;
1334 check_ref = true;
1335 arg_base = arg;
1336 arg_offset = 0;
1337 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1338 arg_size = tree_low_cst (type_size, 1);
1339 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1341 else if (TREE_CODE (arg) == ADDR_EXPR)
1343 HOST_WIDE_INT arg_max_size;
1345 arg = TREE_OPERAND (arg, 0);
1346 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1347 &arg_max_size);
1348 if (arg_max_size == -1
1349 || arg_max_size != arg_size
1350 || arg_offset < 0)
1351 return;
1352 if (DECL_P (arg_base))
1354 tree size;
1355 check_ref = false;
1356 size = build_int_cst (integer_type_node, arg_size);
1357 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1359 else
1360 return;
1362 else
1363 return;
1365 else
1367 HOST_WIDE_INT arg_max_size;
1369 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1371 by_ref = false;
1372 check_ref = false;
1373 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1374 &arg_max_size);
1375 if (arg_max_size == -1
1376 || arg_max_size != arg_size
1377 || arg_offset < 0)
1378 return;
1380 ao_ref_init (&r, arg);
1383 /* Second stage walks back the BB, looks at individual statements and as long
1384 as it is confident of how the statements affect contents of the
1385 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1386 describing it. */
1387 gsi = gsi_for_stmt (call);
1388 gsi_prev (&gsi);
1389 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1391 struct ipa_known_agg_contents_list *n, **p;
1392 gimple stmt = gsi_stmt (gsi);
1393 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1394 tree lhs, rhs, lhs_base;
1395 bool partial_overlap;
1397 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1398 continue;
1399 if (!gimple_assign_single_p (stmt))
1400 break;
1402 lhs = gimple_assign_lhs (stmt);
1403 rhs = gimple_assign_rhs1 (stmt);
1404 if (!is_gimple_reg_type (rhs)
1405 || TREE_CODE (lhs) == BIT_FIELD_REF
1406 || contains_bitfld_component_ref_p (lhs))
1407 break;
1409 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1410 &lhs_max_size);
1411 if (lhs_max_size == -1
1412 || lhs_max_size != lhs_size
1413 || (lhs_offset < arg_offset
1414 && lhs_offset + lhs_size > arg_offset)
1415 || (lhs_offset < arg_offset + arg_size
1416 && lhs_offset + lhs_size > arg_offset + arg_size))
1417 break;
1419 if (check_ref)
1421 if (TREE_CODE (lhs_base) != MEM_REF
1422 || TREE_OPERAND (lhs_base, 0) != arg_base
1423 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1424 break;
1426 else if (lhs_base != arg_base)
1428 if (DECL_P (lhs_base))
1429 continue;
1430 else
1431 break;
1434 if (lhs_offset + lhs_size < arg_offset
1435 || lhs_offset >= (arg_offset + arg_size))
1436 continue;
1438 partial_overlap = false;
1439 p = &list;
1440 while (*p && (*p)->offset < lhs_offset)
1442 if ((*p)->offset + (*p)->size > lhs_offset)
1444 partial_overlap = true;
1445 break;
1447 p = &(*p)->next;
1449 if (partial_overlap)
1450 break;
1451 if (*p && (*p)->offset < lhs_offset + lhs_size)
1453 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1454 /* We already know this value is subsequently overwritten with
1455 something else. */
1456 continue;
1457 else
1458 /* Otherwise this is a partial overlap which we cannot
1459 represent. */
1460 break;
1463 rhs = get_ssa_def_if_simple_copy (rhs);
1464 n = XALLOCA (struct ipa_known_agg_contents_list);
1465 n->size = lhs_size;
1466 n->offset = lhs_offset;
1467 if (is_gimple_ip_invariant (rhs))
1469 n->constant = rhs;
1470 const_count++;
1472 else
1473 n->constant = NULL_TREE;
1474 n->next = *p;
1475 *p = n;
1477 item_count++;
1478 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1479 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1480 break;
1483 /* Third stage just goes over the list and creates an appropriate vector of
1484 ipa_agg_jf_item structures out of it, of sourse only if there are
1485 any known constants to begin with. */
1487 if (const_count)
1489 jfunc->agg.by_ref = by_ref;
1490 vec_alloc (jfunc->agg.items, const_count);
1491 while (list)
1493 if (list->constant)
1495 struct ipa_agg_jf_item item;
1496 item.offset = list->offset - arg_offset;
1497 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1498 item.value = unshare_expr_without_location (list->constant);
1499 jfunc->agg.items->quick_push (item);
1501 list = list->next;
1506 static tree
1507 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1509 int n;
1510 tree type = (e->callee
1511 ? TREE_TYPE (e->callee->symbol.decl)
1512 : gimple_call_fntype (e->call_stmt));
1513 tree t = TYPE_ARG_TYPES (type);
1515 for (n = 0; n < i; n++)
1517 if (!t)
1518 break;
1519 t = TREE_CHAIN (t);
1521 if (t)
1522 return TREE_VALUE (t);
1523 if (!e->callee)
1524 return NULL;
1525 t = DECL_ARGUMENTS (e->callee->symbol.decl);
1526 for (n = 0; n < i; n++)
1528 if (!t)
1529 return NULL;
1530 t = TREE_CHAIN (t);
1532 if (t)
1533 return TREE_TYPE (t);
1534 return NULL;
1537 /* Compute jump function for all arguments of callsite CS and insert the
1538 information in the jump_functions array in the ipa_edge_args corresponding
1539 to this callsite. */
1541 static void
1542 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1543 struct cgraph_edge *cs)
1545 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1546 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1547 gimple call = cs->call_stmt;
1548 int n, arg_num = gimple_call_num_args (call);
1550 if (arg_num == 0 || args->jump_functions)
1551 return;
1552 vec_safe_grow_cleared (args->jump_functions, arg_num);
1554 if (gimple_call_internal_p (call))
1555 return;
1556 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1557 return;
1559 for (n = 0; n < arg_num; n++)
1561 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1562 tree arg = gimple_call_arg (call, n);
1563 tree param_type = ipa_get_callee_param_type (cs, n);
1565 if (is_gimple_ip_invariant (arg))
1566 ipa_set_jf_constant (jfunc, arg, cs);
1567 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1568 && TREE_CODE (arg) == PARM_DECL)
1570 int index = ipa_get_param_decl_index (info, arg);
1572 gcc_assert (index >=0);
1573 /* Aggregate passed by value, check for pass-through, otherwise we
1574 will attempt to fill in aggregate contents later in this
1575 for cycle. */
1576 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1578 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1579 continue;
1582 else if (TREE_CODE (arg) == SSA_NAME)
1584 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1586 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1587 if (index >= 0)
1589 bool agg_p, type_p;
1590 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1591 call, arg);
1592 if (param_type && POINTER_TYPE_P (param_type))
1593 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1594 call, jfunc);
1595 else
1596 type_p = false;
1597 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1598 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1599 type_p);
1602 else
1604 gimple stmt = SSA_NAME_DEF_STMT (arg);
1605 if (is_gimple_assign (stmt))
1606 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1607 call, stmt, arg, param_type);
1608 else if (gimple_code (stmt) == GIMPLE_PHI)
1609 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1610 call, stmt, param_type);
1613 else
1614 compute_known_type_jump_func (arg, jfunc, call,
1615 param_type
1616 && POINTER_TYPE_P (param_type)
1617 ? TREE_TYPE (param_type)
1618 : NULL);
1620 if ((jfunc->type != IPA_JF_PASS_THROUGH
1621 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1622 && (jfunc->type != IPA_JF_ANCESTOR
1623 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1624 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1625 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1626 determine_known_aggregate_parts (call, arg, jfunc);
1630 /* Compute jump functions for all edges - both direct and indirect - outgoing
1631 from NODE. Also count the actual arguments in the process. */
1633 static void
1634 ipa_compute_jump_functions (struct cgraph_node *node,
1635 struct param_analysis_info *parms_ainfo)
1637 struct cgraph_edge *cs;
1639 for (cs = node->callees; cs; cs = cs->next_callee)
1641 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1642 NULL);
1643 /* We do not need to bother analyzing calls to unknown
1644 functions unless they may become known during lto/whopr. */
1645 if (!callee->symbol.definition && !flag_lto)
1646 continue;
1647 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1650 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1651 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1654 /* If STMT looks like a statement loading a value from a member pointer formal
1655 parameter, return that parameter and store the offset of the field to
1656 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1657 might be clobbered). If USE_DELTA, then we look for a use of the delta
1658 field rather than the pfn. */
1660 static tree
1661 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1662 HOST_WIDE_INT *offset_p)
1664 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1666 if (!gimple_assign_single_p (stmt))
1667 return NULL_TREE;
1669 rhs = gimple_assign_rhs1 (stmt);
1670 if (TREE_CODE (rhs) == COMPONENT_REF)
1672 ref_field = TREE_OPERAND (rhs, 1);
1673 rhs = TREE_OPERAND (rhs, 0);
1675 else
1676 ref_field = NULL_TREE;
1677 if (TREE_CODE (rhs) != MEM_REF)
1678 return NULL_TREE;
1679 rec = TREE_OPERAND (rhs, 0);
1680 if (TREE_CODE (rec) != ADDR_EXPR)
1681 return NULL_TREE;
1682 rec = TREE_OPERAND (rec, 0);
1683 if (TREE_CODE (rec) != PARM_DECL
1684 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1685 return NULL_TREE;
1686 ref_offset = TREE_OPERAND (rhs, 1);
1688 if (use_delta)
1689 fld = delta_field;
1690 else
1691 fld = ptr_field;
1692 if (offset_p)
1693 *offset_p = int_bit_position (fld);
1695 if (ref_field)
1697 if (integer_nonzerop (ref_offset))
1698 return NULL_TREE;
1699 return ref_field == fld ? rec : NULL_TREE;
1701 else
1702 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1703 : NULL_TREE;
1706 /* Returns true iff T is an SSA_NAME defined by a statement. */
1708 static bool
1709 ipa_is_ssa_with_stmt_def (tree t)
1711 if (TREE_CODE (t) == SSA_NAME
1712 && !SSA_NAME_IS_DEFAULT_DEF (t))
1713 return true;
1714 else
1715 return false;
1718 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1719 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1720 indirect call graph edge. */
1722 static struct cgraph_edge *
1723 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1725 struct cgraph_edge *cs;
1727 cs = cgraph_edge (node, stmt);
1728 cs->indirect_info->param_index = param_index;
1729 cs->indirect_info->offset = 0;
1730 cs->indirect_info->polymorphic = 0;
1731 cs->indirect_info->agg_contents = 0;
1732 cs->indirect_info->member_ptr = 0;
1733 return cs;
1736 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1737 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1738 intermediate information about each formal parameter. Currently it checks
1739 whether the call calls a pointer that is a formal parameter and if so, the
1740 parameter is marked with the called flag and an indirect call graph edge
1741 describing the call is created. This is very simple for ordinary pointers
1742 represented in SSA but not-so-nice when it comes to member pointers. The
1743 ugly part of this function does nothing more than trying to match the
1744 pattern of such a call. An example of such a pattern is the gimple dump
1745 below, the call is on the last line:
1747 <bb 2>:
1748 f$__delta_5 = f.__delta;
1749 f$__pfn_24 = f.__pfn;
1752 <bb 2>:
1753 f$__delta_5 = MEM[(struct *)&f];
1754 f$__pfn_24 = MEM[(struct *)&f + 4B];
1756 and a few lines below:
1758 <bb 5>
1759 D.2496_3 = (int) f$__pfn_24;
1760 D.2497_4 = D.2496_3 & 1;
1761 if (D.2497_4 != 0)
1762 goto <bb 3>;
1763 else
1764 goto <bb 4>;
1766 <bb 6>:
1767 D.2500_7 = (unsigned int) f$__delta_5;
1768 D.2501_8 = &S + D.2500_7;
1769 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1770 D.2503_10 = *D.2502_9;
1771 D.2504_12 = f$__pfn_24 + -1;
1772 D.2505_13 = (unsigned int) D.2504_12;
1773 D.2506_14 = D.2503_10 + D.2505_13;
1774 D.2507_15 = *D.2506_14;
1775 iftmp.11_16 = (String:: *) D.2507_15;
1777 <bb 7>:
1778 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1779 D.2500_19 = (unsigned int) f$__delta_5;
1780 D.2508_20 = &S + D.2500_19;
1781 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1783 Such patterns are results of simple calls to a member pointer:
1785 int doprinting (int (MyString::* f)(int) const)
1787 MyString S ("somestring");
1789 return (S.*f)(4);
1792 Moreover, the function also looks for called pointers loaded from aggregates
1793 passed by value or reference. */
1795 static void
1796 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1797 struct ipa_node_params *info,
1798 struct param_analysis_info *parms_ainfo,
1799 gimple call, tree target)
1801 gimple def;
1802 tree n1, n2;
1803 gimple d1, d2;
1804 tree rec, rec2, cond;
1805 gimple branch;
1806 int index;
1807 basic_block bb, virt_bb, join;
1808 HOST_WIDE_INT offset;
1809 bool by_ref;
1811 if (SSA_NAME_IS_DEFAULT_DEF (target))
1813 tree var = SSA_NAME_VAR (target);
1814 index = ipa_get_param_decl_index (info, var);
1815 if (index >= 0)
1816 ipa_note_param_call (node, index, call);
1817 return;
1820 def = SSA_NAME_DEF_STMT (target);
1821 if (gimple_assign_single_p (def)
1822 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1823 gimple_assign_rhs1 (def), &index, &offset,
1824 &by_ref))
1826 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1827 cs->indirect_info->offset = offset;
1828 cs->indirect_info->agg_contents = 1;
1829 cs->indirect_info->by_ref = by_ref;
1830 return;
1833 /* Now we need to try to match the complex pattern of calling a member
1834 pointer. */
1835 if (gimple_code (def) != GIMPLE_PHI
1836 || gimple_phi_num_args (def) != 2
1837 || !POINTER_TYPE_P (TREE_TYPE (target))
1838 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1839 return;
1841 /* First, we need to check whether one of these is a load from a member
1842 pointer that is a parameter to this function. */
1843 n1 = PHI_ARG_DEF (def, 0);
1844 n2 = PHI_ARG_DEF (def, 1);
1845 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1846 return;
1847 d1 = SSA_NAME_DEF_STMT (n1);
1848 d2 = SSA_NAME_DEF_STMT (n2);
1850 join = gimple_bb (def);
1851 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1853 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1854 return;
1856 bb = EDGE_PRED (join, 0)->src;
1857 virt_bb = gimple_bb (d2);
1859 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1861 bb = EDGE_PRED (join, 1)->src;
1862 virt_bb = gimple_bb (d1);
1864 else
1865 return;
1867 /* Second, we need to check that the basic blocks are laid out in the way
1868 corresponding to the pattern. */
1870 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1871 || single_pred (virt_bb) != bb
1872 || single_succ (virt_bb) != join)
1873 return;
1875 /* Third, let's see that the branching is done depending on the least
1876 significant bit of the pfn. */
1878 branch = last_stmt (bb);
1879 if (!branch || gimple_code (branch) != GIMPLE_COND)
1880 return;
1882 if ((gimple_cond_code (branch) != NE_EXPR
1883 && gimple_cond_code (branch) != EQ_EXPR)
1884 || !integer_zerop (gimple_cond_rhs (branch)))
1885 return;
1887 cond = gimple_cond_lhs (branch);
1888 if (!ipa_is_ssa_with_stmt_def (cond))
1889 return;
1891 def = SSA_NAME_DEF_STMT (cond);
1892 if (!is_gimple_assign (def)
1893 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1894 || !integer_onep (gimple_assign_rhs2 (def)))
1895 return;
1897 cond = gimple_assign_rhs1 (def);
1898 if (!ipa_is_ssa_with_stmt_def (cond))
1899 return;
1901 def = SSA_NAME_DEF_STMT (cond);
1903 if (is_gimple_assign (def)
1904 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1906 cond = gimple_assign_rhs1 (def);
1907 if (!ipa_is_ssa_with_stmt_def (cond))
1908 return;
1909 def = SSA_NAME_DEF_STMT (cond);
1912 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1913 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1914 == ptrmemfunc_vbit_in_delta),
1915 NULL);
1916 if (rec != rec2)
1917 return;
1919 index = ipa_get_param_decl_index (info, rec);
1920 if (index >= 0
1921 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1923 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1924 cs->indirect_info->offset = offset;
1925 cs->indirect_info->agg_contents = 1;
1926 cs->indirect_info->member_ptr = 1;
1929 return;
1932 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1933 object referenced in the expression is a formal parameter of the caller
1934 (described by INFO), create a call note for the statement. */
1936 static void
1937 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1938 struct ipa_node_params *info, gimple call,
1939 tree target)
1941 struct cgraph_edge *cs;
1942 struct cgraph_indirect_call_info *ii;
1943 struct ipa_jump_func jfunc;
1944 tree obj = OBJ_TYPE_REF_OBJECT (target);
1945 int index;
1946 HOST_WIDE_INT anc_offset;
1948 if (!flag_devirtualize)
1949 return;
1951 if (TREE_CODE (obj) != SSA_NAME)
1952 return;
1954 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1956 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1957 return;
1959 anc_offset = 0;
1960 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1961 gcc_assert (index >= 0);
1962 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1963 call, &jfunc))
1964 return;
1966 else
1968 gimple stmt = SSA_NAME_DEF_STMT (obj);
1969 tree expr;
1971 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1972 if (!expr)
1973 return;
1974 index = ipa_get_param_decl_index (info,
1975 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1976 gcc_assert (index >= 0);
1977 if (detect_type_change (obj, expr, obj_type_ref_class (target),
1978 call, &jfunc, anc_offset))
1979 return;
1982 cs = ipa_note_param_call (node, index, call);
1983 ii = cs->indirect_info;
1984 ii->offset = anc_offset;
1985 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1986 ii->otr_type = obj_type_ref_class (target);
1987 ii->polymorphic = 1;
1990 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1991 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1992 containing intermediate information about each formal parameter. */
1994 static void
1995 ipa_analyze_call_uses (struct cgraph_node *node,
1996 struct ipa_node_params *info,
1997 struct param_analysis_info *parms_ainfo, gimple call)
1999 tree target = gimple_call_fn (call);
2001 if (!target)
2002 return;
2003 if (TREE_CODE (target) == SSA_NAME)
2004 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2005 else if (virtual_method_call_p (target))
2006 ipa_analyze_virtual_call_uses (node, info, call, target);
2010 /* Analyze the call statement STMT with respect to formal parameters (described
2011 in INFO) of caller given by NODE. Currently it only checks whether formal
2012 parameters are called. PARMS_AINFO is a pointer to a vector containing
2013 intermediate information about each formal parameter. */
2015 static void
2016 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2017 struct param_analysis_info *parms_ainfo, gimple stmt)
2019 if (is_gimple_call (stmt))
2020 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2023 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2024 If OP is a parameter declaration, mark it as used in the info structure
2025 passed in DATA. */
2027 static bool
2028 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2029 tree op, void *data)
2031 struct ipa_node_params *info = (struct ipa_node_params *) data;
2033 op = get_base_address (op);
2034 if (op
2035 && TREE_CODE (op) == PARM_DECL)
2037 int index = ipa_get_param_decl_index (info, op);
2038 gcc_assert (index >= 0);
2039 ipa_set_param_used (info, index, true);
2042 return false;
2045 /* Scan the function body of NODE and inspect the uses of formal parameters.
2046 Store the findings in various structures of the associated ipa_node_params
2047 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2048 vector containing intermediate information about each formal parameter. */
2050 static void
2051 ipa_analyze_params_uses (struct cgraph_node *node,
2052 struct param_analysis_info *parms_ainfo)
2054 tree decl = node->symbol.decl;
2055 basic_block bb;
2056 struct function *func;
2057 gimple_stmt_iterator gsi;
2058 struct ipa_node_params *info = IPA_NODE_REF (node);
2059 int i;
2061 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2062 return;
2064 info->uses_analysis_done = 1;
2065 if (ipa_func_spec_opts_forbid_analysis_p (node))
2067 for (i = 0; i < ipa_get_param_count (info); i++)
2069 ipa_set_param_used (info, i, true);
2070 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2072 return;
2075 for (i = 0; i < ipa_get_param_count (info); i++)
2077 tree parm = ipa_get_param (info, i);
2078 int controlled_uses = 0;
2080 /* For SSA regs see if parameter is used. For non-SSA we compute
2081 the flag during modification analysis. */
2082 if (is_gimple_reg (parm))
2084 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
2085 parm);
2086 if (ddef && !has_zero_uses (ddef))
2088 imm_use_iterator imm_iter;
2089 use_operand_p use_p;
2091 ipa_set_param_used (info, i, true);
2092 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2093 if (!is_gimple_call (USE_STMT (use_p)))
2095 controlled_uses = IPA_UNDESCRIBED_USE;
2096 break;
2098 else
2099 controlled_uses++;
2101 else
2102 controlled_uses = 0;
2104 else
2105 controlled_uses = IPA_UNDESCRIBED_USE;
2106 ipa_set_controlled_uses (info, i, controlled_uses);
2109 func = DECL_STRUCT_FUNCTION (decl);
2110 FOR_EACH_BB_FN (bb, func)
2112 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2114 gimple stmt = gsi_stmt (gsi);
2116 if (is_gimple_debug (stmt))
2117 continue;
2119 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2120 walk_stmt_load_store_addr_ops (stmt, info,
2121 visit_ref_for_mod_analysis,
2122 visit_ref_for_mod_analysis,
2123 visit_ref_for_mod_analysis);
2125 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2126 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2127 visit_ref_for_mod_analysis,
2128 visit_ref_for_mod_analysis,
2129 visit_ref_for_mod_analysis);
2133 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2135 static void
2136 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2138 int i;
2140 for (i = 0; i < param_count; i++)
2142 if (parms_ainfo[i].parm_visited_statements)
2143 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2144 if (parms_ainfo[i].pt_visited_statements)
2145 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2149 /* Initialize the array describing properties of of formal parameters
2150 of NODE, analyze their uses and compute jump functions associated
2151 with actual arguments of calls from within NODE. */
2153 void
2154 ipa_analyze_node (struct cgraph_node *node)
2156 struct ipa_node_params *info;
2157 struct param_analysis_info *parms_ainfo;
2158 int param_count;
2160 ipa_check_create_node_params ();
2161 ipa_check_create_edge_args ();
2162 info = IPA_NODE_REF (node);
2163 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
2164 ipa_initialize_node_params (node);
2166 param_count = ipa_get_param_count (info);
2167 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2168 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2170 ipa_analyze_params_uses (node, parms_ainfo);
2171 ipa_compute_jump_functions (node, parms_ainfo);
2173 free_parms_ainfo (parms_ainfo, param_count);
2174 pop_cfun ();
2177 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2178 attempt a type-based devirtualization. If successful, return the
2179 target function declaration, otherwise return NULL. */
2181 tree
2182 ipa_intraprocedural_devirtualization (gimple call)
2184 tree binfo, token, fndecl;
2185 struct ipa_jump_func jfunc;
2186 tree otr = gimple_call_fn (call);
2188 jfunc.type = IPA_JF_UNKNOWN;
2189 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2190 call, obj_type_ref_class (otr));
2191 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2192 return NULL_TREE;
2193 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2194 if (!binfo)
2195 return NULL_TREE;
2196 token = OBJ_TYPE_REF_TOKEN (otr);
2197 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2198 binfo);
2199 return fndecl;
2202 /* Update the jump function DST when the call graph edge corresponding to SRC is
2203 is being inlined, knowing that DST is of type ancestor and src of known
2204 type. */
2206 static void
2207 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2208 struct ipa_jump_func *dst)
2210 HOST_WIDE_INT combined_offset;
2211 tree combined_type;
2213 if (!ipa_get_jf_ancestor_type_preserved (dst))
2215 dst->type = IPA_JF_UNKNOWN;
2216 return;
2219 combined_offset = ipa_get_jf_known_type_offset (src)
2220 + ipa_get_jf_ancestor_offset (dst);
2221 combined_type = ipa_get_jf_ancestor_type (dst);
2223 ipa_set_jf_known_type (dst, combined_offset,
2224 ipa_get_jf_known_type_base_type (src),
2225 combined_type);
2228 /* Update the jump functions associated with call graph edge E when the call
2229 graph edge CS is being inlined, assuming that E->caller is already (possibly
2230 indirectly) inlined into CS->callee and that E has not been inlined. */
2232 static void
2233 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2234 struct cgraph_edge *e)
2236 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2237 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2238 int count = ipa_get_cs_argument_count (args);
2239 int i;
2241 for (i = 0; i < count; i++)
2243 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2245 if (dst->type == IPA_JF_ANCESTOR)
2247 struct ipa_jump_func *src;
2248 int dst_fid = dst->value.ancestor.formal_id;
2250 /* Variable number of arguments can cause havoc if we try to access
2251 one that does not exist in the inlined edge. So make sure we
2252 don't. */
2253 if (dst_fid >= ipa_get_cs_argument_count (top))
2255 dst->type = IPA_JF_UNKNOWN;
2256 continue;
2259 src = ipa_get_ith_jump_func (top, dst_fid);
2261 if (src->agg.items
2262 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2264 struct ipa_agg_jf_item *item;
2265 int j;
2267 /* Currently we do not produce clobber aggregate jump functions,
2268 replace with merging when we do. */
2269 gcc_assert (!dst->agg.items);
2271 dst->agg.items = vec_safe_copy (src->agg.items);
2272 dst->agg.by_ref = src->agg.by_ref;
2273 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2274 item->offset -= dst->value.ancestor.offset;
2277 if (src->type == IPA_JF_KNOWN_TYPE)
2278 combine_known_type_and_ancestor_jfs (src, dst);
2279 else if (src->type == IPA_JF_PASS_THROUGH
2280 && src->value.pass_through.operation == NOP_EXPR)
2282 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2283 dst->value.ancestor.agg_preserved &=
2284 src->value.pass_through.agg_preserved;
2285 dst->value.ancestor.type_preserved &=
2286 src->value.pass_through.type_preserved;
2288 else if (src->type == IPA_JF_ANCESTOR)
2290 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2291 dst->value.ancestor.offset += src->value.ancestor.offset;
2292 dst->value.ancestor.agg_preserved &=
2293 src->value.ancestor.agg_preserved;
2294 dst->value.ancestor.type_preserved &=
2295 src->value.ancestor.type_preserved;
2297 else
2298 dst->type = IPA_JF_UNKNOWN;
2300 else if (dst->type == IPA_JF_PASS_THROUGH)
2302 struct ipa_jump_func *src;
2303 /* We must check range due to calls with variable number of arguments
2304 and we cannot combine jump functions with operations. */
2305 if (dst->value.pass_through.operation == NOP_EXPR
2306 && (dst->value.pass_through.formal_id
2307 < ipa_get_cs_argument_count (top)))
2309 int dst_fid = dst->value.pass_through.formal_id;
2310 src = ipa_get_ith_jump_func (top, dst_fid);
2311 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2313 switch (src->type)
2315 case IPA_JF_UNKNOWN:
2316 dst->type = IPA_JF_UNKNOWN;
2317 break;
2318 case IPA_JF_KNOWN_TYPE:
2319 ipa_set_jf_known_type (dst,
2320 ipa_get_jf_known_type_offset (src),
2321 ipa_get_jf_known_type_base_type (src),
2322 ipa_get_jf_known_type_base_type (src));
2323 break;
2324 case IPA_JF_CONST:
2325 ipa_set_jf_cst_copy (dst, src);
2326 break;
2328 case IPA_JF_PASS_THROUGH:
2330 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2331 enum tree_code operation;
2332 operation = ipa_get_jf_pass_through_operation (src);
2334 if (operation == NOP_EXPR)
2336 bool agg_p, type_p;
2337 agg_p = dst_agg_p
2338 && ipa_get_jf_pass_through_agg_preserved (src);
2339 type_p = ipa_get_jf_pass_through_type_preserved (src)
2340 && ipa_get_jf_pass_through_type_preserved (dst);
2341 ipa_set_jf_simple_pass_through (dst, formal_id,
2342 agg_p, type_p);
2344 else
2346 tree operand = ipa_get_jf_pass_through_operand (src);
2347 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2348 operation);
2350 break;
2352 case IPA_JF_ANCESTOR:
2354 bool agg_p, type_p;
2355 agg_p = dst_agg_p
2356 && ipa_get_jf_ancestor_agg_preserved (src);
2357 type_p = ipa_get_jf_ancestor_type_preserved (src)
2358 && ipa_get_jf_pass_through_type_preserved (dst);
2359 ipa_set_ancestor_jf (dst,
2360 ipa_get_jf_ancestor_offset (src),
2361 ipa_get_jf_ancestor_type (src),
2362 ipa_get_jf_ancestor_formal_id (src),
2363 agg_p, type_p);
2364 break;
2366 default:
2367 gcc_unreachable ();
2370 if (src->agg.items
2371 && (dst_agg_p || !src->agg.by_ref))
2373 /* Currently we do not produce clobber aggregate jump
2374 functions, replace with merging when we do. */
2375 gcc_assert (!dst->agg.items);
2377 dst->agg.by_ref = src->agg.by_ref;
2378 dst->agg.items = vec_safe_copy (src->agg.items);
2381 else
2382 dst->type = IPA_JF_UNKNOWN;
2387 /* If TARGET is an addr_expr of a function declaration, make it the destination
2388 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2390 struct cgraph_edge *
2391 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2393 struct cgraph_node *callee;
2394 struct inline_edge_summary *es = inline_edge_summary (ie);
2395 bool unreachable = false;
2397 if (TREE_CODE (target) == ADDR_EXPR)
2398 target = TREE_OPERAND (target, 0);
2399 if (TREE_CODE (target) != FUNCTION_DECL)
2401 target = canonicalize_constructor_val (target, NULL);
2402 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2404 if (ie->indirect_info->member_ptr)
2405 /* Member pointer call that goes through a VMT lookup. */
2406 return NULL;
2408 if (dump_file)
2409 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2410 " in %s/%i, making it unreachable.\n",
2411 cgraph_node_name (ie->caller), ie->caller->symbol.order);
2412 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2413 callee = cgraph_get_create_node (target);
2414 unreachable = true;
2416 else
2417 callee = cgraph_get_node (target);
2419 else
2420 callee = cgraph_get_node (target);
2422 /* Because may-edges are not explicitely represented and vtable may be external,
2423 we may create the first reference to the object in the unit. */
2424 if (!callee || callee->global.inlined_to)
2427 /* We are better to ensure we can refer to it.
2428 In the case of static functions we are out of luck, since we already
2429 removed its body. In the case of public functions we may or may
2430 not introduce the reference. */
2431 if (!canonicalize_constructor_val (target, NULL)
2432 || !TREE_PUBLIC (target))
2434 if (dump_file)
2435 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2436 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2437 xstrdup (cgraph_node_name (ie->caller)),
2438 ie->caller->symbol.order,
2439 xstrdup (cgraph_node_name (ie->callee)),
2440 ie->callee->symbol.order);
2441 return NULL;
2443 callee = cgraph_get_create_real_symbol_node (target);
2445 ipa_check_create_node_params ();
2447 /* We can not make edges to inline clones. It is bug that someone removed
2448 the cgraph node too early. */
2449 gcc_assert (!callee->global.inlined_to);
2451 if (dump_file && !unreachable)
2453 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2454 "(%s/%i -> %s/%i), for stmt ",
2455 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2456 xstrdup (cgraph_node_name (ie->caller)),
2457 ie->caller->symbol.order,
2458 xstrdup (cgraph_node_name (callee)),
2459 callee->symbol.order);
2460 if (ie->call_stmt)
2461 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2462 else
2463 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2465 ie = cgraph_make_edge_direct (ie, callee);
2466 es = inline_edge_summary (ie);
2467 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2468 - eni_size_weights.call_cost);
2469 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2470 - eni_time_weights.call_cost);
2472 return ie;
2475 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2476 return NULL if there is not any. BY_REF specifies whether the value has to
2477 be passed by reference or by value. */
2479 tree
2480 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2481 HOST_WIDE_INT offset, bool by_ref)
2483 struct ipa_agg_jf_item *item;
2484 int i;
2486 if (by_ref != agg->by_ref)
2487 return NULL;
2489 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2490 if (item->offset == offset)
2492 /* Currently we do not have clobber values, return NULL for them once
2493 we do. */
2494 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2495 return item->value;
2497 return NULL;
2500 /* Remove a reference to SYMBOL from the list of references of a node given by
2501 reference description RDESC. Return true if the reference has been
2502 successfully found and removed. */
2504 static bool
2505 remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2507 struct ipa_ref *to_del;
2508 struct cgraph_edge *origin;
2510 origin = rdesc->cs;
2511 if (!origin)
2512 return false;
2513 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
2514 origin->call_stmt, origin->lto_stmt_uid);
2515 if (!to_del)
2516 return false;
2518 ipa_remove_reference (to_del);
2519 if (dump_file)
2520 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2521 xstrdup (cgraph_node_name (origin->caller)),
2522 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
2523 return true;
2526 /* If JFUNC has a reference description with refcount different from
2527 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2528 NULL. JFUNC must be a constant jump function. */
2530 static struct ipa_cst_ref_desc *
2531 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2533 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2534 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2535 return rdesc;
2536 else
2537 return NULL;
2540 /* If the value of constant jump function JFUNC is an address of a function
2541 declaration, return the associated call graph node. Otherwise return
2542 NULL. */
2544 static cgraph_node *
2545 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2547 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2548 tree cst = ipa_get_jf_constant (jfunc);
2549 if (TREE_CODE (cst) != ADDR_EXPR
2550 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2551 return NULL;
2553 return cgraph_get_node (TREE_OPERAND (cst, 0));
2557 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2558 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2559 the edge specified in the rdesc. Return false if either the symbol or the
2560 reference could not be found, otherwise return true. */
2562 static bool
2563 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2565 struct ipa_cst_ref_desc *rdesc;
2566 if (jfunc->type == IPA_JF_CONST
2567 && (rdesc = jfunc_rdesc_usable (jfunc))
2568 && --rdesc->refcount == 0)
2570 symtab_node symbol = (symtab_node) cgraph_node_for_jfunc (jfunc);
2571 if (!symbol)
2572 return false;
2574 return remove_described_reference (symbol, rdesc);
2576 return true;
2579 /* Try to find a destination for indirect edge IE that corresponds to a simple
2580 call or a call of a member function pointer and where the destination is a
2581 pointer formal parameter described by jump function JFUNC. If it can be
2582 determined, return the newly direct edge, otherwise return NULL.
2583 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2585 static struct cgraph_edge *
2586 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2587 struct ipa_jump_func *jfunc,
2588 struct ipa_node_params *new_root_info)
2590 struct cgraph_edge *cs;
2591 tree target;
2592 bool agg_contents = ie->indirect_info->agg_contents;
2594 if (ie->indirect_info->agg_contents)
2595 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2596 ie->indirect_info->offset,
2597 ie->indirect_info->by_ref);
2598 else
2599 target = ipa_value_from_jfunc (new_root_info, jfunc);
2600 if (!target)
2601 return NULL;
2602 cs = ipa_make_edge_direct_to_target (ie, target);
2604 if (cs && !agg_contents)
2606 bool ok;
2607 gcc_checking_assert (cs->callee
2608 && (cs != ie
2609 || jfunc->type != IPA_JF_CONST
2610 || !cgraph_node_for_jfunc (jfunc)
2611 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2612 ok = try_decrement_rdesc_refcount (jfunc);
2613 gcc_checking_assert (ok);
2616 return cs;
2619 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2620 call based on a formal parameter which is described by jump function JFUNC
2621 and if it can be determined, make it direct and return the direct edge.
2622 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2623 are relative to. */
2625 static struct cgraph_edge *
2626 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2627 struct ipa_jump_func *jfunc,
2628 struct ipa_node_params *new_root_info)
2630 tree binfo, target;
2632 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2634 if (!binfo)
2635 return NULL;
2637 if (TREE_CODE (binfo) != TREE_BINFO)
2639 binfo = gimple_extract_devirt_binfo_from_cst
2640 (binfo, ie->indirect_info->otr_type);
2641 if (!binfo)
2642 return NULL;
2645 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2646 ie->indirect_info->otr_type);
2647 if (binfo)
2648 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2649 binfo);
2650 else
2651 return NULL;
2653 if (target)
2654 return ipa_make_edge_direct_to_target (ie, target);
2655 else
2656 return NULL;
2659 /* Update the param called notes associated with NODE when CS is being inlined,
2660 assuming NODE is (potentially indirectly) inlined into CS->callee.
2661 Moreover, if the callee is discovered to be constant, create a new cgraph
2662 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2663 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2665 static bool
2666 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2667 struct cgraph_node *node,
2668 vec<cgraph_edge_p> *new_edges)
2670 struct ipa_edge_args *top;
2671 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2672 struct ipa_node_params *new_root_info;
2673 bool res = false;
2675 ipa_check_create_edge_args ();
2676 top = IPA_EDGE_REF (cs);
2677 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2678 ? cs->caller->global.inlined_to
2679 : cs->caller);
2681 for (ie = node->indirect_calls; ie; ie = next_ie)
2683 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2684 struct ipa_jump_func *jfunc;
2685 int param_index;
2687 next_ie = ie->next_callee;
2689 if (ici->param_index == -1)
2690 continue;
2692 /* We must check range due to calls with variable number of arguments: */
2693 if (ici->param_index >= ipa_get_cs_argument_count (top))
2695 ici->param_index = -1;
2696 continue;
2699 param_index = ici->param_index;
2700 jfunc = ipa_get_ith_jump_func (top, param_index);
2702 if (!flag_indirect_inlining)
2703 new_direct_edge = NULL;
2704 else if (ici->polymorphic)
2705 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2706 new_root_info);
2707 else
2708 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2709 new_root_info);
2710 /* If speculation was removed, then we need to do nothing. */
2711 if (new_direct_edge && new_direct_edge != ie)
2713 new_direct_edge->indirect_inlining_edge = 1;
2714 top = IPA_EDGE_REF (cs);
2715 res = true;
2717 else if (new_direct_edge)
2719 new_direct_edge->indirect_inlining_edge = 1;
2720 if (new_direct_edge->call_stmt)
2721 new_direct_edge->call_stmt_cannot_inline_p
2722 = !gimple_check_call_matching_types (
2723 new_direct_edge->call_stmt,
2724 new_direct_edge->callee->symbol.decl, false);
2725 if (new_edges)
2727 new_edges->safe_push (new_direct_edge);
2728 res = true;
2730 top = IPA_EDGE_REF (cs);
2732 else if (jfunc->type == IPA_JF_PASS_THROUGH
2733 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2735 if (ici->agg_contents
2736 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2737 ici->param_index = -1;
2738 else
2739 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2741 else if (jfunc->type == IPA_JF_ANCESTOR)
2743 if (ici->agg_contents
2744 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2745 ici->param_index = -1;
2746 else
2748 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2749 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2752 else
2753 /* Either we can find a destination for this edge now or never. */
2754 ici->param_index = -1;
2757 return res;
2760 /* Recursively traverse subtree of NODE (including node) made of inlined
2761 cgraph_edges when CS has been inlined and invoke
2762 update_indirect_edges_after_inlining on all nodes and
2763 update_jump_functions_after_inlining on all non-inlined edges that lead out
2764 of this subtree. Newly discovered indirect edges will be added to
2765 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2766 created. */
2768 static bool
2769 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2770 struct cgraph_node *node,
2771 vec<cgraph_edge_p> *new_edges)
2773 struct cgraph_edge *e;
2774 bool res;
2776 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2778 for (e = node->callees; e; e = e->next_callee)
2779 if (!e->inline_failed)
2780 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2781 else
2782 update_jump_functions_after_inlining (cs, e);
2783 for (e = node->indirect_calls; e; e = e->next_callee)
2784 update_jump_functions_after_inlining (cs, e);
2786 return res;
2789 /* Combine two controlled uses counts as done during inlining. */
2791 static int
2792 combine_controlled_uses_counters (int c, int d)
2794 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2795 return IPA_UNDESCRIBED_USE;
2796 else
2797 return c + d - 1;
2800 /* Propagate number of controlled users from CS->caleee to the new root of the
2801 tree of inlined nodes. */
2803 static void
2804 propagate_controlled_uses (struct cgraph_edge *cs)
2806 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2807 struct cgraph_node *new_root = cs->caller->global.inlined_to
2808 ? cs->caller->global.inlined_to : cs->caller;
2809 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2810 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2811 int count, i;
2813 count = MIN (ipa_get_cs_argument_count (args),
2814 ipa_get_param_count (old_root_info));
2815 for (i = 0; i < count; i++)
2817 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2818 struct ipa_cst_ref_desc *rdesc;
2820 if (jf->type == IPA_JF_PASS_THROUGH)
2822 int src_idx, c, d;
2823 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2824 c = ipa_get_controlled_uses (new_root_info, src_idx);
2825 d = ipa_get_controlled_uses (old_root_info, i);
2827 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2828 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2829 c = combine_controlled_uses_counters (c, d);
2830 ipa_set_controlled_uses (new_root_info, src_idx, c);
2831 if (c == 0 && new_root_info->ipcp_orig_node)
2833 struct cgraph_node *n;
2834 struct ipa_ref *ref;
2835 tree t = new_root_info->known_vals[src_idx];
2837 if (t && TREE_CODE (t) == ADDR_EXPR
2838 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2839 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2840 && (ref = ipa_find_reference ((symtab_node) new_root,
2841 (symtab_node) n, NULL, 0)))
2843 if (dump_file)
2844 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2845 "reference from %s/%i to %s/%i.\n",
2846 xstrdup (cgraph_node_name (new_root)),
2847 new_root->symbol.order,
2848 xstrdup (cgraph_node_name (n)), n->symbol.order);
2849 ipa_remove_reference (ref);
2853 else if (jf->type == IPA_JF_CONST
2854 && (rdesc = jfunc_rdesc_usable (jf)))
2856 int d = ipa_get_controlled_uses (old_root_info, i);
2857 int c = rdesc->refcount;
2858 rdesc->refcount = combine_controlled_uses_counters (c, d);
2859 if (rdesc->refcount == 0)
2861 tree cst = ipa_get_jf_constant (jf);
2862 struct cgraph_node *n;
2863 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2864 && TREE_CODE (TREE_OPERAND (cst, 0))
2865 == FUNCTION_DECL);
2866 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2867 if (n)
2869 struct cgraph_node *clone;
2870 bool ok;
2871 ok = remove_described_reference ((symtab_node) n, rdesc);
2872 gcc_checking_assert (ok);
2874 clone = cs->caller;
2875 while (clone->global.inlined_to
2876 && clone != rdesc->cs->caller
2877 && IPA_NODE_REF (clone)->ipcp_orig_node)
2879 struct ipa_ref *ref;
2880 ref = ipa_find_reference ((symtab_node) clone,
2881 (symtab_node) n, NULL, 0);
2882 if (ref)
2884 if (dump_file)
2885 fprintf (dump_file, "ipa-prop: Removing "
2886 "cloning-created reference "
2887 "from %s/%i to %s/%i.\n",
2888 xstrdup (cgraph_node_name (clone)),
2889 clone->symbol.order,
2890 xstrdup (cgraph_node_name (n)),
2891 n->symbol.order);
2892 ipa_remove_reference (ref);
2894 clone = clone->callers->caller;
2901 for (i = ipa_get_param_count (old_root_info);
2902 i < ipa_get_cs_argument_count (args);
2903 i++)
2905 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2907 if (jf->type == IPA_JF_CONST)
2909 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2910 if (rdesc)
2911 rdesc->refcount = IPA_UNDESCRIBED_USE;
2913 else if (jf->type == IPA_JF_PASS_THROUGH)
2914 ipa_set_controlled_uses (new_root_info,
2915 jf->value.pass_through.formal_id,
2916 IPA_UNDESCRIBED_USE);
2920 /* Update jump functions and call note functions on inlining the call site CS.
2921 CS is expected to lead to a node already cloned by
2922 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2923 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2924 created. */
2926 bool
2927 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2928 vec<cgraph_edge_p> *new_edges)
2930 bool changed;
2931 /* Do nothing if the preparation phase has not been carried out yet
2932 (i.e. during early inlining). */
2933 if (!ipa_node_params_vector.exists ())
2934 return false;
2935 gcc_assert (ipa_edge_args_vector);
2937 propagate_controlled_uses (cs);
2938 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2940 return changed;
2943 /* Frees all dynamically allocated structures that the argument info points
2944 to. */
2946 void
2947 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2949 vec_free (args->jump_functions);
2950 memset (args, 0, sizeof (*args));
2953 /* Free all ipa_edge structures. */
2955 void
2956 ipa_free_all_edge_args (void)
2958 int i;
2959 struct ipa_edge_args *args;
2961 if (!ipa_edge_args_vector)
2962 return;
2964 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2965 ipa_free_edge_args_substructures (args);
2967 vec_free (ipa_edge_args_vector);
2970 /* Frees all dynamically allocated structures that the param info points
2971 to. */
2973 void
2974 ipa_free_node_params_substructures (struct ipa_node_params *info)
2976 info->descriptors.release ();
2977 free (info->lattices);
2978 /* Lattice values and their sources are deallocated with their alocation
2979 pool. */
2980 info->known_vals.release ();
2981 memset (info, 0, sizeof (*info));
2984 /* Free all ipa_node_params structures. */
2986 void
2987 ipa_free_all_node_params (void)
2989 int i;
2990 struct ipa_node_params *info;
2992 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2993 ipa_free_node_params_substructures (info);
2995 ipa_node_params_vector.release ();
2998 /* Set the aggregate replacements of NODE to be AGGVALS. */
3000 void
3001 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3002 struct ipa_agg_replacement_value *aggvals)
3004 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3005 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3007 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3010 /* Hook that is called by cgraph.c when an edge is removed. */
3012 static void
3013 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3015 struct ipa_edge_args *args;
3017 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3018 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3019 return;
3021 args = IPA_EDGE_REF (cs);
3022 if (args->jump_functions)
3024 struct ipa_jump_func *jf;
3025 int i;
3026 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3028 struct ipa_cst_ref_desc *rdesc;
3029 try_decrement_rdesc_refcount (jf);
3030 if (jf->type == IPA_JF_CONST
3031 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3032 && rdesc->cs == cs)
3033 rdesc->cs = NULL;
3037 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3040 /* Hook that is called by cgraph.c when a node is removed. */
3042 static void
3043 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3045 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3046 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3047 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3048 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3049 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3052 /* Hook that is called by cgraph.c when an edge is duplicated. */
3054 static void
3055 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3056 __attribute__((unused)) void *data)
3058 struct ipa_edge_args *old_args, *new_args;
3059 unsigned int i;
3061 ipa_check_create_edge_args ();
3063 old_args = IPA_EDGE_REF (src);
3064 new_args = IPA_EDGE_REF (dst);
3066 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3068 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3070 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3071 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3073 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3075 if (src_jf->type == IPA_JF_CONST)
3077 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3079 if (!src_rdesc)
3080 dst_jf->value.constant.rdesc = NULL;
3081 else if (src->caller == dst->caller)
3083 struct ipa_ref *ref;
3084 symtab_node n = (symtab_node) cgraph_node_for_jfunc (src_jf);
3085 gcc_checking_assert (n);
3086 ref = ipa_find_reference ((symtab_node) src->caller, n,
3087 src->call_stmt, src->lto_stmt_uid);
3088 gcc_checking_assert (ref);
3089 ipa_clone_ref (ref, (symtab_node) dst->caller, ref->stmt);
3091 gcc_checking_assert (ipa_refdesc_pool);
3092 struct ipa_cst_ref_desc *dst_rdesc
3093 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3094 dst_rdesc->cs = dst;
3095 dst_rdesc->refcount = src_rdesc->refcount;
3096 dst_rdesc->next_duplicate = NULL;
3097 dst_jf->value.constant.rdesc = dst_rdesc;
3099 else if (src_rdesc->cs == src)
3101 struct ipa_cst_ref_desc *dst_rdesc;
3102 gcc_checking_assert (ipa_refdesc_pool);
3103 dst_rdesc
3104 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3105 dst_rdesc->cs = dst;
3106 dst_rdesc->refcount = src_rdesc->refcount;
3107 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3108 src_rdesc->next_duplicate = dst_rdesc;
3109 dst_jf->value.constant.rdesc = dst_rdesc;
3111 else
3113 struct ipa_cst_ref_desc *dst_rdesc;
3114 /* This can happen during inlining, when a JFUNC can refer to a
3115 reference taken in a function up in the tree of inline clones.
3116 We need to find the duplicate that refers to our tree of
3117 inline clones. */
3119 gcc_assert (dst->caller->global.inlined_to);
3120 for (dst_rdesc = src_rdesc->next_duplicate;
3121 dst_rdesc;
3122 dst_rdesc = dst_rdesc->next_duplicate)
3124 struct cgraph_node *top;
3125 top = dst_rdesc->cs->caller->global.inlined_to
3126 ? dst_rdesc->cs->caller->global.inlined_to
3127 : dst_rdesc->cs->caller;
3128 if (dst->caller->global.inlined_to == top)
3129 break;
3131 gcc_assert (dst_rdesc);
3132 dst_jf->value.constant.rdesc = dst_rdesc;
3138 /* Hook that is called by cgraph.c when a node is duplicated. */
3140 static void
3141 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3142 ATTRIBUTE_UNUSED void *data)
3144 struct ipa_node_params *old_info, *new_info;
3145 struct ipa_agg_replacement_value *old_av, *new_av;
3147 ipa_check_create_node_params ();
3148 old_info = IPA_NODE_REF (src);
3149 new_info = IPA_NODE_REF (dst);
3151 new_info->descriptors = old_info->descriptors.copy ();
3152 new_info->lattices = NULL;
3153 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3155 new_info->uses_analysis_done = old_info->uses_analysis_done;
3156 new_info->node_enqueued = old_info->node_enqueued;
3158 old_av = ipa_get_agg_replacements_for_node (src);
3159 if (!old_av)
3160 return;
3162 new_av = NULL;
3163 while (old_av)
3165 struct ipa_agg_replacement_value *v;
3167 v = ggc_alloc_ipa_agg_replacement_value ();
3168 memcpy (v, old_av, sizeof (*v));
3169 v->next = new_av;
3170 new_av = v;
3171 old_av = old_av->next;
3173 ipa_set_node_agg_value_chain (dst, new_av);
3177 /* Analyze newly added function into callgraph. */
3179 static void
3180 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3182 ipa_analyze_node (node);
3185 /* Register our cgraph hooks if they are not already there. */
3187 void
3188 ipa_register_cgraph_hooks (void)
3190 if (!edge_removal_hook_holder)
3191 edge_removal_hook_holder =
3192 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3193 if (!node_removal_hook_holder)
3194 node_removal_hook_holder =
3195 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3196 if (!edge_duplication_hook_holder)
3197 edge_duplication_hook_holder =
3198 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3199 if (!node_duplication_hook_holder)
3200 node_duplication_hook_holder =
3201 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3202 function_insertion_hook_holder =
3203 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3206 /* Unregister our cgraph hooks if they are not already there. */
3208 static void
3209 ipa_unregister_cgraph_hooks (void)
3211 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3212 edge_removal_hook_holder = NULL;
3213 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3214 node_removal_hook_holder = NULL;
3215 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3216 edge_duplication_hook_holder = NULL;
3217 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3218 node_duplication_hook_holder = NULL;
3219 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3220 function_insertion_hook_holder = NULL;
3223 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3224 longer needed after ipa-cp. */
3226 void
3227 ipa_free_all_structures_after_ipa_cp (void)
3229 if (!optimize)
3231 ipa_free_all_edge_args ();
3232 ipa_free_all_node_params ();
3233 free_alloc_pool (ipcp_sources_pool);
3234 free_alloc_pool (ipcp_values_pool);
3235 free_alloc_pool (ipcp_agg_lattice_pool);
3236 ipa_unregister_cgraph_hooks ();
3237 if (ipa_refdesc_pool)
3238 free_alloc_pool (ipa_refdesc_pool);
3242 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3243 longer needed after indirect inlining. */
3245 void
3246 ipa_free_all_structures_after_iinln (void)
3248 ipa_free_all_edge_args ();
3249 ipa_free_all_node_params ();
3250 ipa_unregister_cgraph_hooks ();
3251 if (ipcp_sources_pool)
3252 free_alloc_pool (ipcp_sources_pool);
3253 if (ipcp_values_pool)
3254 free_alloc_pool (ipcp_values_pool);
3255 if (ipcp_agg_lattice_pool)
3256 free_alloc_pool (ipcp_agg_lattice_pool);
3257 if (ipa_refdesc_pool)
3258 free_alloc_pool (ipa_refdesc_pool);
3261 /* Print ipa_tree_map data structures of all functions in the
3262 callgraph to F. */
3264 void
3265 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3267 int i, count;
3268 struct ipa_node_params *info;
3270 if (!node->symbol.definition)
3271 return;
3272 info = IPA_NODE_REF (node);
3273 fprintf (f, " function %s/%i parameter descriptors:\n",
3274 cgraph_node_name (node), node->symbol.order);
3275 count = ipa_get_param_count (info);
3276 for (i = 0; i < count; i++)
3278 int c;
3280 ipa_dump_param (f, info, i);
3281 if (ipa_is_param_used (info, i))
3282 fprintf (f, " used");
3283 c = ipa_get_controlled_uses (info, i);
3284 if (c == IPA_UNDESCRIBED_USE)
3285 fprintf (f, " undescribed_use");
3286 else
3287 fprintf (f, " controlled_uses=%i", c);
3288 fprintf (f, "\n");
3292 /* Print ipa_tree_map data structures of all functions in the
3293 callgraph to F. */
3295 void
3296 ipa_print_all_params (FILE * f)
3298 struct cgraph_node *node;
3300 fprintf (f, "\nFunction parameters:\n");
3301 FOR_EACH_FUNCTION (node)
3302 ipa_print_node_params (f, node);
3305 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3307 vec<tree>
3308 ipa_get_vector_of_formal_parms (tree fndecl)
3310 vec<tree> args;
3311 int count;
3312 tree parm;
3314 gcc_assert (!flag_wpa);
3315 count = count_formal_params (fndecl);
3316 args.create (count);
3317 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3318 args.quick_push (parm);
3320 return args;
3323 /* Return a heap allocated vector containing types of formal parameters of
3324 function type FNTYPE. */
3326 static inline vec<tree>
3327 get_vector_of_formal_parm_types (tree fntype)
3329 vec<tree> types;
3330 int count = 0;
3331 tree t;
3333 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3334 count++;
3336 types.create (count);
3337 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3338 types.quick_push (TREE_VALUE (t));
3340 return types;
3343 /* Modify the function declaration FNDECL and its type according to the plan in
3344 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3345 to reflect the actual parameters being modified which are determined by the
3346 base_index field. */
3348 void
3349 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3350 const char *synth_parm_prefix)
3352 vec<tree> oparms, otypes;
3353 tree orig_type, new_type = NULL;
3354 tree old_arg_types, t, new_arg_types = NULL;
3355 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3356 int i, len = adjustments.length ();
3357 tree new_reversed = NULL;
3358 bool care_for_types, last_parm_void;
3360 if (!synth_parm_prefix)
3361 synth_parm_prefix = "SYNTH";
3363 oparms = ipa_get_vector_of_formal_parms (fndecl);
3364 orig_type = TREE_TYPE (fndecl);
3365 old_arg_types = TYPE_ARG_TYPES (orig_type);
3367 /* The following test is an ugly hack, some functions simply don't have any
3368 arguments in their type. This is probably a bug but well... */
3369 care_for_types = (old_arg_types != NULL_TREE);
3370 if (care_for_types)
3372 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3373 == void_type_node);
3374 otypes = get_vector_of_formal_parm_types (orig_type);
3375 if (last_parm_void)
3376 gcc_assert (oparms.length () + 1 == otypes.length ());
3377 else
3378 gcc_assert (oparms.length () == otypes.length ());
3380 else
3382 last_parm_void = false;
3383 otypes.create (0);
3386 for (i = 0; i < len; i++)
3388 struct ipa_parm_adjustment *adj;
3389 gcc_assert (link);
3391 adj = &adjustments[i];
3392 parm = oparms[adj->base_index];
3393 adj->base = parm;
3395 if (adj->copy_param)
3397 if (care_for_types)
3398 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3399 new_arg_types);
3400 *link = parm;
3401 link = &DECL_CHAIN (parm);
3403 else if (!adj->remove_param)
3405 tree new_parm;
3406 tree ptype;
3408 if (adj->by_ref)
3409 ptype = build_pointer_type (adj->type);
3410 else
3411 ptype = adj->type;
3413 if (care_for_types)
3414 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3416 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3417 ptype);
3418 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3420 DECL_ARTIFICIAL (new_parm) = 1;
3421 DECL_ARG_TYPE (new_parm) = ptype;
3422 DECL_CONTEXT (new_parm) = fndecl;
3423 TREE_USED (new_parm) = 1;
3424 DECL_IGNORED_P (new_parm) = 1;
3425 layout_decl (new_parm, 0);
3427 adj->base = parm;
3428 adj->reduction = new_parm;
3430 *link = new_parm;
3432 link = &DECL_CHAIN (new_parm);
3436 *link = NULL_TREE;
3438 if (care_for_types)
3440 new_reversed = nreverse (new_arg_types);
3441 if (last_parm_void)
3443 if (new_reversed)
3444 TREE_CHAIN (new_arg_types) = void_list_node;
3445 else
3446 new_reversed = void_list_node;
3450 /* Use copy_node to preserve as much as possible from original type
3451 (debug info, attribute lists etc.)
3452 Exception is METHOD_TYPEs must have THIS argument.
3453 When we are asked to remove it, we need to build new FUNCTION_TYPE
3454 instead. */
3455 if (TREE_CODE (orig_type) != METHOD_TYPE
3456 || (adjustments[0].copy_param
3457 && adjustments[0].base_index == 0))
3459 new_type = build_distinct_type_copy (orig_type);
3460 TYPE_ARG_TYPES (new_type) = new_reversed;
3462 else
3464 new_type
3465 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3466 new_reversed));
3467 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3468 DECL_VINDEX (fndecl) = NULL_TREE;
3471 /* When signature changes, we need to clear builtin info. */
3472 if (DECL_BUILT_IN (fndecl))
3474 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3475 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3478 /* This is a new type, not a copy of an old type. Need to reassociate
3479 variants. We can handle everything except the main variant lazily. */
3480 t = TYPE_MAIN_VARIANT (orig_type);
3481 if (orig_type != t)
3483 TYPE_MAIN_VARIANT (new_type) = t;
3484 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3485 TYPE_NEXT_VARIANT (t) = new_type;
3487 else
3489 TYPE_MAIN_VARIANT (new_type) = new_type;
3490 TYPE_NEXT_VARIANT (new_type) = NULL;
3493 TREE_TYPE (fndecl) = new_type;
3494 DECL_VIRTUAL_P (fndecl) = 0;
3495 otypes.release ();
3496 oparms.release ();
3499 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3500 If this is a directly recursive call, CS must be NULL. Otherwise it must
3501 contain the corresponding call graph edge. */
3503 void
3504 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3505 ipa_parm_adjustment_vec adjustments)
3507 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3508 vec<tree> vargs;
3509 vec<tree, va_gc> **debug_args = NULL;
3510 gimple new_stmt;
3511 gimple_stmt_iterator gsi, prev_gsi;
3512 tree callee_decl;
3513 int i, len;
3515 len = adjustments.length ();
3516 vargs.create (len);
3517 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3518 ipa_remove_stmt_references ((symtab_node) current_node, stmt);
3520 gsi = gsi_for_stmt (stmt);
3521 prev_gsi = gsi;
3522 gsi_prev (&prev_gsi);
3523 for (i = 0; i < len; i++)
3525 struct ipa_parm_adjustment *adj;
3527 adj = &adjustments[i];
3529 if (adj->copy_param)
3531 tree arg = gimple_call_arg (stmt, adj->base_index);
3533 vargs.quick_push (arg);
3535 else if (!adj->remove_param)
3537 tree expr, base, off;
3538 location_t loc;
3539 unsigned int deref_align = 0;
3540 bool deref_base = false;
3542 /* We create a new parameter out of the value of the old one, we can
3543 do the following kind of transformations:
3545 - A scalar passed by reference is converted to a scalar passed by
3546 value. (adj->by_ref is false and the type of the original
3547 actual argument is a pointer to a scalar).
3549 - A part of an aggregate is passed instead of the whole aggregate.
3550 The part can be passed either by value or by reference, this is
3551 determined by value of adj->by_ref. Moreover, the code below
3552 handles both situations when the original aggregate is passed by
3553 value (its type is not a pointer) and when it is passed by
3554 reference (it is a pointer to an aggregate).
3556 When the new argument is passed by reference (adj->by_ref is true)
3557 it must be a part of an aggregate and therefore we form it by
3558 simply taking the address of a reference inside the original
3559 aggregate. */
3561 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3562 base = gimple_call_arg (stmt, adj->base_index);
3563 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3564 : EXPR_LOCATION (base);
3566 if (TREE_CODE (base) != ADDR_EXPR
3567 && POINTER_TYPE_P (TREE_TYPE (base)))
3568 off = build_int_cst (adj->alias_ptr_type,
3569 adj->offset / BITS_PER_UNIT);
3570 else
3572 HOST_WIDE_INT base_offset;
3573 tree prev_base;
3574 bool addrof;
3576 if (TREE_CODE (base) == ADDR_EXPR)
3578 base = TREE_OPERAND (base, 0);
3579 addrof = true;
3581 else
3582 addrof = false;
3583 prev_base = base;
3584 base = get_addr_base_and_unit_offset (base, &base_offset);
3585 /* Aggregate arguments can have non-invariant addresses. */
3586 if (!base)
3588 base = build_fold_addr_expr (prev_base);
3589 off = build_int_cst (adj->alias_ptr_type,
3590 adj->offset / BITS_PER_UNIT);
3592 else if (TREE_CODE (base) == MEM_REF)
3594 if (!addrof)
3596 deref_base = true;
3597 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3599 off = build_int_cst (adj->alias_ptr_type,
3600 base_offset
3601 + adj->offset / BITS_PER_UNIT);
3602 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3603 off);
3604 base = TREE_OPERAND (base, 0);
3606 else
3608 off = build_int_cst (adj->alias_ptr_type,
3609 base_offset
3610 + adj->offset / BITS_PER_UNIT);
3611 base = build_fold_addr_expr (base);
3615 if (!adj->by_ref)
3617 tree type = adj->type;
3618 unsigned int align;
3619 unsigned HOST_WIDE_INT misalign;
3621 if (deref_base)
3623 align = deref_align;
3624 misalign = 0;
3626 else
3628 get_pointer_alignment_1 (base, &align, &misalign);
3629 if (TYPE_ALIGN (type) > align)
3630 align = TYPE_ALIGN (type);
3632 misalign += (tree_to_double_int (off)
3633 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3634 * BITS_PER_UNIT);
3635 misalign = misalign & (align - 1);
3636 if (misalign != 0)
3637 align = (misalign & -misalign);
3638 if (align < TYPE_ALIGN (type))
3639 type = build_aligned_type (type, align);
3640 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3642 else
3644 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3645 expr = build_fold_addr_expr (expr);
3648 expr = force_gimple_operand_gsi (&gsi, expr,
3649 adj->by_ref
3650 || is_gimple_reg_type (adj->type),
3651 NULL, true, GSI_SAME_STMT);
3652 vargs.quick_push (expr);
3654 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3656 unsigned int ix;
3657 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3658 gimple def_temp;
3660 arg = gimple_call_arg (stmt, adj->base_index);
3661 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3663 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3664 continue;
3665 arg = fold_convert_loc (gimple_location (stmt),
3666 TREE_TYPE (origin), arg);
3668 if (debug_args == NULL)
3669 debug_args = decl_debug_args_insert (callee_decl);
3670 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3671 if (ddecl == origin)
3673 ddecl = (**debug_args)[ix + 1];
3674 break;
3676 if (ddecl == NULL)
3678 ddecl = make_node (DEBUG_EXPR_DECL);
3679 DECL_ARTIFICIAL (ddecl) = 1;
3680 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3681 DECL_MODE (ddecl) = DECL_MODE (origin);
3683 vec_safe_push (*debug_args, origin);
3684 vec_safe_push (*debug_args, ddecl);
3686 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3687 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3691 if (dump_file && (dump_flags & TDF_DETAILS))
3693 fprintf (dump_file, "replacing stmt:");
3694 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3697 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3698 vargs.release ();
3699 if (gimple_call_lhs (stmt))
3700 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3702 gimple_set_block (new_stmt, gimple_block (stmt));
3703 if (gimple_has_location (stmt))
3704 gimple_set_location (new_stmt, gimple_location (stmt));
3705 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3706 gimple_call_copy_flags (new_stmt, stmt);
3708 if (dump_file && (dump_flags & TDF_DETAILS))
3710 fprintf (dump_file, "with stmt:");
3711 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3712 fprintf (dump_file, "\n");
3714 gsi_replace (&gsi, new_stmt, true);
3715 if (cs)
3716 cgraph_set_call_stmt (cs, new_stmt);
3719 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3720 gsi_prev (&gsi);
3722 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3723 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3725 update_ssa (TODO_update_ssa);
3726 free_dominance_info (CDI_DOMINATORS);
3729 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3731 static bool
3732 index_in_adjustments_multiple_times_p (int base_index,
3733 ipa_parm_adjustment_vec adjustments)
3735 int i, len = adjustments.length ();
3736 bool one = false;
3738 for (i = 0; i < len; i++)
3740 struct ipa_parm_adjustment *adj;
3741 adj = &adjustments[i];
3743 if (adj->base_index == base_index)
3745 if (one)
3746 return true;
3747 else
3748 one = true;
3751 return false;
3755 /* Return adjustments that should have the same effect on function parameters
3756 and call arguments as if they were first changed according to adjustments in
3757 INNER and then by adjustments in OUTER. */
3759 ipa_parm_adjustment_vec
3760 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3761 ipa_parm_adjustment_vec outer)
3763 int i, outlen = outer.length ();
3764 int inlen = inner.length ();
3765 int removals = 0;
3766 ipa_parm_adjustment_vec adjustments, tmp;
3768 tmp.create (inlen);
3769 for (i = 0; i < inlen; i++)
3771 struct ipa_parm_adjustment *n;
3772 n = &inner[i];
3774 if (n->remove_param)
3775 removals++;
3776 else
3777 tmp.quick_push (*n);
3780 adjustments.create (outlen + removals);
3781 for (i = 0; i < outlen; i++)
3783 struct ipa_parm_adjustment r;
3784 struct ipa_parm_adjustment *out = &outer[i];
3785 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3787 memset (&r, 0, sizeof (r));
3788 gcc_assert (!in->remove_param);
3789 if (out->remove_param)
3791 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3793 r.remove_param = true;
3794 adjustments.quick_push (r);
3796 continue;
3799 r.base_index = in->base_index;
3800 r.type = out->type;
3802 /* FIXME: Create nonlocal value too. */
3804 if (in->copy_param && out->copy_param)
3805 r.copy_param = true;
3806 else if (in->copy_param)
3807 r.offset = out->offset;
3808 else if (out->copy_param)
3809 r.offset = in->offset;
3810 else
3811 r.offset = in->offset + out->offset;
3812 adjustments.quick_push (r);
3815 for (i = 0; i < inlen; i++)
3817 struct ipa_parm_adjustment *n = &inner[i];
3819 if (n->remove_param)
3820 adjustments.quick_push (*n);
3823 tmp.release ();
3824 return adjustments;
3827 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3828 friendly way, assuming they are meant to be applied to FNDECL. */
3830 void
3831 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3832 tree fndecl)
3834 int i, len = adjustments.length ();
3835 bool first = true;
3836 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3838 fprintf (file, "IPA param adjustments: ");
3839 for (i = 0; i < len; i++)
3841 struct ipa_parm_adjustment *adj;
3842 adj = &adjustments[i];
3844 if (!first)
3845 fprintf (file, " ");
3846 else
3847 first = false;
3849 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3850 print_generic_expr (file, parms[adj->base_index], 0);
3851 if (adj->base)
3853 fprintf (file, ", base: ");
3854 print_generic_expr (file, adj->base, 0);
3856 if (adj->reduction)
3858 fprintf (file, ", reduction: ");
3859 print_generic_expr (file, adj->reduction, 0);
3861 if (adj->new_ssa_base)
3863 fprintf (file, ", new_ssa_base: ");
3864 print_generic_expr (file, adj->new_ssa_base, 0);
3867 if (adj->copy_param)
3868 fprintf (file, ", copy_param");
3869 else if (adj->remove_param)
3870 fprintf (file, ", remove_param");
3871 else
3872 fprintf (file, ", offset %li", (long) adj->offset);
3873 if (adj->by_ref)
3874 fprintf (file, ", by_ref");
3875 print_node_brief (file, ", type: ", adj->type, 0);
3876 fprintf (file, "\n");
3878 parms.release ();
3881 /* Dump the AV linked list. */
3883 void
3884 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3886 bool comma = false;
3887 fprintf (f, " Aggregate replacements:");
3888 for (; av; av = av->next)
3890 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3891 av->index, av->offset);
3892 print_generic_expr (f, av->value, 0);
3893 comma = true;
3895 fprintf (f, "\n");
3898 /* Stream out jump function JUMP_FUNC to OB. */
3900 static void
3901 ipa_write_jump_function (struct output_block *ob,
3902 struct ipa_jump_func *jump_func)
3904 struct ipa_agg_jf_item *item;
3905 struct bitpack_d bp;
3906 int i, count;
3908 streamer_write_uhwi (ob, jump_func->type);
3909 switch (jump_func->type)
3911 case IPA_JF_UNKNOWN:
3912 break;
3913 case IPA_JF_KNOWN_TYPE:
3914 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3915 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3916 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3917 break;
3918 case IPA_JF_CONST:
3919 gcc_assert (
3920 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3921 stream_write_tree (ob, jump_func->value.constant.value, true);
3922 break;
3923 case IPA_JF_PASS_THROUGH:
3924 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3925 if (jump_func->value.pass_through.operation == NOP_EXPR)
3927 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3928 bp = bitpack_create (ob->main_stream);
3929 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3930 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
3931 streamer_write_bitpack (&bp);
3933 else
3935 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3936 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3938 break;
3939 case IPA_JF_ANCESTOR:
3940 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3941 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3942 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3943 bp = bitpack_create (ob->main_stream);
3944 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3945 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
3946 streamer_write_bitpack (&bp);
3947 break;
3950 count = vec_safe_length (jump_func->agg.items);
3951 streamer_write_uhwi (ob, count);
3952 if (count)
3954 bp = bitpack_create (ob->main_stream);
3955 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3956 streamer_write_bitpack (&bp);
3959 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3961 streamer_write_uhwi (ob, item->offset);
3962 stream_write_tree (ob, item->value, true);
3966 /* Read in jump function JUMP_FUNC from IB. */
3968 static void
3969 ipa_read_jump_function (struct lto_input_block *ib,
3970 struct ipa_jump_func *jump_func,
3971 struct cgraph_edge *cs,
3972 struct data_in *data_in)
3974 enum jump_func_type jftype;
3975 enum tree_code operation;
3976 int i, count;
3978 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3979 switch (jftype)
3981 case IPA_JF_UNKNOWN:
3982 jump_func->type = IPA_JF_UNKNOWN;
3983 break;
3984 case IPA_JF_KNOWN_TYPE:
3986 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3987 tree base_type = stream_read_tree (ib, data_in);
3988 tree component_type = stream_read_tree (ib, data_in);
3990 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
3991 break;
3993 case IPA_JF_CONST:
3994 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
3995 break;
3996 case IPA_JF_PASS_THROUGH:
3997 operation = (enum tree_code) streamer_read_uhwi (ib);
3998 if (operation == NOP_EXPR)
4000 int formal_id = streamer_read_uhwi (ib);
4001 struct bitpack_d bp = streamer_read_bitpack (ib);
4002 bool agg_preserved = bp_unpack_value (&bp, 1);
4003 bool type_preserved = bp_unpack_value (&bp, 1);
4004 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4005 type_preserved);
4007 else
4009 tree operand = stream_read_tree (ib, data_in);
4010 int formal_id = streamer_read_uhwi (ib);
4011 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4012 operation);
4014 break;
4015 case IPA_JF_ANCESTOR:
4017 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4018 tree type = stream_read_tree (ib, data_in);
4019 int formal_id = streamer_read_uhwi (ib);
4020 struct bitpack_d bp = streamer_read_bitpack (ib);
4021 bool agg_preserved = bp_unpack_value (&bp, 1);
4022 bool type_preserved = bp_unpack_value (&bp, 1);
4024 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4025 type_preserved);
4026 break;
4030 count = streamer_read_uhwi (ib);
4031 vec_alloc (jump_func->agg.items, count);
4032 if (count)
4034 struct bitpack_d bp = streamer_read_bitpack (ib);
4035 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4037 for (i = 0; i < count; i++)
4039 struct ipa_agg_jf_item item;
4040 item.offset = streamer_read_uhwi (ib);
4041 item.value = stream_read_tree (ib, data_in);
4042 jump_func->agg.items->quick_push (item);
4046 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4047 relevant to indirect inlining to OB. */
4049 static void
4050 ipa_write_indirect_edge_info (struct output_block *ob,
4051 struct cgraph_edge *cs)
4053 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4054 struct bitpack_d bp;
4056 streamer_write_hwi (ob, ii->param_index);
4057 streamer_write_hwi (ob, ii->offset);
4058 bp = bitpack_create (ob->main_stream);
4059 bp_pack_value (&bp, ii->polymorphic, 1);
4060 bp_pack_value (&bp, ii->agg_contents, 1);
4061 bp_pack_value (&bp, ii->member_ptr, 1);
4062 bp_pack_value (&bp, ii->by_ref, 1);
4063 streamer_write_bitpack (&bp);
4065 if (ii->polymorphic)
4067 streamer_write_hwi (ob, ii->otr_token);
4068 stream_write_tree (ob, ii->otr_type, true);
4072 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4073 relevant to indirect inlining from IB. */
4075 static void
4076 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4077 struct data_in *data_in ATTRIBUTE_UNUSED,
4078 struct cgraph_edge *cs)
4080 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4081 struct bitpack_d bp;
4083 ii->param_index = (int) streamer_read_hwi (ib);
4084 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4085 bp = streamer_read_bitpack (ib);
4086 ii->polymorphic = bp_unpack_value (&bp, 1);
4087 ii->agg_contents = bp_unpack_value (&bp, 1);
4088 ii->member_ptr = bp_unpack_value (&bp, 1);
4089 ii->by_ref = bp_unpack_value (&bp, 1);
4090 if (ii->polymorphic)
4092 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4093 ii->otr_type = stream_read_tree (ib, data_in);
4097 /* Stream out NODE info to OB. */
4099 static void
4100 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4102 int node_ref;
4103 lto_symtab_encoder_t encoder;
4104 struct ipa_node_params *info = IPA_NODE_REF (node);
4105 int j;
4106 struct cgraph_edge *e;
4107 struct bitpack_d bp;
4109 encoder = ob->decl_state->symtab_node_encoder;
4110 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4111 streamer_write_uhwi (ob, node_ref);
4113 streamer_write_uhwi (ob, ipa_get_param_count (info));
4114 for (j = 0; j < ipa_get_param_count (info); j++)
4115 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4116 bp = bitpack_create (ob->main_stream);
4117 gcc_assert (info->uses_analysis_done
4118 || ipa_get_param_count (info) == 0);
4119 gcc_assert (!info->node_enqueued);
4120 gcc_assert (!info->ipcp_orig_node);
4121 for (j = 0; j < ipa_get_param_count (info); j++)
4122 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4123 streamer_write_bitpack (&bp);
4124 for (j = 0; j < ipa_get_param_count (info); j++)
4125 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4126 for (e = node->callees; e; e = e->next_callee)
4128 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4130 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4131 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4132 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4134 for (e = node->indirect_calls; e; e = e->next_callee)
4136 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4138 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4139 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4140 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4141 ipa_write_indirect_edge_info (ob, e);
4145 /* Stream in NODE info from IB. */
4147 static void
4148 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4149 struct data_in *data_in)
4151 struct ipa_node_params *info = IPA_NODE_REF (node);
4152 int k;
4153 struct cgraph_edge *e;
4154 struct bitpack_d bp;
4156 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4158 for (k = 0; k < ipa_get_param_count (info); k++)
4159 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4161 bp = streamer_read_bitpack (ib);
4162 if (ipa_get_param_count (info) != 0)
4163 info->uses_analysis_done = true;
4164 info->node_enqueued = false;
4165 for (k = 0; k < ipa_get_param_count (info); k++)
4166 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4167 for (k = 0; k < ipa_get_param_count (info); k++)
4168 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4169 for (e = node->callees; e; e = e->next_callee)
4171 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4172 int count = streamer_read_uhwi (ib);
4174 if (!count)
4175 continue;
4176 vec_safe_grow_cleared (args->jump_functions, count);
4178 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4179 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4180 data_in);
4182 for (e = node->indirect_calls; e; e = e->next_callee)
4184 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4185 int count = streamer_read_uhwi (ib);
4187 if (count)
4189 vec_safe_grow_cleared (args->jump_functions, count);
4190 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4191 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4192 data_in);
4194 ipa_read_indirect_edge_info (ib, data_in, e);
4198 /* Write jump functions for nodes in SET. */
4200 void
4201 ipa_prop_write_jump_functions (void)
4203 struct cgraph_node *node;
4204 struct output_block *ob;
4205 unsigned int count = 0;
4206 lto_symtab_encoder_iterator lsei;
4207 lto_symtab_encoder_t encoder;
4210 if (!ipa_node_params_vector.exists ())
4211 return;
4213 ob = create_output_block (LTO_section_jump_functions);
4214 encoder = ob->decl_state->symtab_node_encoder;
4215 ob->cgraph_node = NULL;
4216 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4217 lsei_next_function_in_partition (&lsei))
4219 node = lsei_cgraph_node (lsei);
4220 if (cgraph_function_with_gimple_body_p (node)
4221 && IPA_NODE_REF (node) != NULL)
4222 count++;
4225 streamer_write_uhwi (ob, count);
4227 /* Process all of the functions. */
4228 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4229 lsei_next_function_in_partition (&lsei))
4231 node = lsei_cgraph_node (lsei);
4232 if (cgraph_function_with_gimple_body_p (node)
4233 && IPA_NODE_REF (node) != NULL)
4234 ipa_write_node_info (ob, node);
4236 streamer_write_char_stream (ob->main_stream, 0);
4237 produce_asm (ob, NULL);
4238 destroy_output_block (ob);
4241 /* Read section in file FILE_DATA of length LEN with data DATA. */
4243 static void
4244 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4245 size_t len)
4247 const struct lto_function_header *header =
4248 (const struct lto_function_header *) data;
4249 const int cfg_offset = sizeof (struct lto_function_header);
4250 const int main_offset = cfg_offset + header->cfg_size;
4251 const int string_offset = main_offset + header->main_size;
4252 struct data_in *data_in;
4253 struct lto_input_block ib_main;
4254 unsigned int i;
4255 unsigned int count;
4257 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4258 header->main_size);
4260 data_in =
4261 lto_data_in_create (file_data, (const char *) data + string_offset,
4262 header->string_size, vNULL);
4263 count = streamer_read_uhwi (&ib_main);
4265 for (i = 0; i < count; i++)
4267 unsigned int index;
4268 struct cgraph_node *node;
4269 lto_symtab_encoder_t encoder;
4271 index = streamer_read_uhwi (&ib_main);
4272 encoder = file_data->symtab_node_encoder;
4273 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4274 gcc_assert (node->symbol.definition);
4275 ipa_read_node_info (&ib_main, node, data_in);
4277 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4278 len);
4279 lto_data_in_delete (data_in);
4282 /* Read ipcp jump functions. */
4284 void
4285 ipa_prop_read_jump_functions (void)
4287 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4288 struct lto_file_decl_data *file_data;
4289 unsigned int j = 0;
4291 ipa_check_create_node_params ();
4292 ipa_check_create_edge_args ();
4293 ipa_register_cgraph_hooks ();
4295 while ((file_data = file_data_vec[j++]))
4297 size_t len;
4298 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4300 if (data)
4301 ipa_prop_read_section (file_data, data, len);
4305 /* After merging units, we can get mismatch in argument counts.
4306 Also decl merging might've rendered parameter lists obsolete.
4307 Also compute called_with_variable_arg info. */
4309 void
4310 ipa_update_after_lto_read (void)
4312 ipa_check_create_node_params ();
4313 ipa_check_create_edge_args ();
4316 void
4317 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4319 int node_ref;
4320 unsigned int count = 0;
4321 lto_symtab_encoder_t encoder;
4322 struct ipa_agg_replacement_value *aggvals, *av;
4324 aggvals = ipa_get_agg_replacements_for_node (node);
4325 encoder = ob->decl_state->symtab_node_encoder;
4326 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4327 streamer_write_uhwi (ob, node_ref);
4329 for (av = aggvals; av; av = av->next)
4330 count++;
4331 streamer_write_uhwi (ob, count);
4333 for (av = aggvals; av; av = av->next)
4335 struct bitpack_d bp;
4337 streamer_write_uhwi (ob, av->offset);
4338 streamer_write_uhwi (ob, av->index);
4339 stream_write_tree (ob, av->value, true);
4341 bp = bitpack_create (ob->main_stream);
4342 bp_pack_value (&bp, av->by_ref, 1);
4343 streamer_write_bitpack (&bp);
4347 /* Stream in the aggregate value replacement chain for NODE from IB. */
4349 static void
4350 read_agg_replacement_chain (struct lto_input_block *ib,
4351 struct cgraph_node *node,
4352 struct data_in *data_in)
4354 struct ipa_agg_replacement_value *aggvals = NULL;
4355 unsigned int count, i;
4357 count = streamer_read_uhwi (ib);
4358 for (i = 0; i <count; i++)
4360 struct ipa_agg_replacement_value *av;
4361 struct bitpack_d bp;
4363 av = ggc_alloc_ipa_agg_replacement_value ();
4364 av->offset = streamer_read_uhwi (ib);
4365 av->index = streamer_read_uhwi (ib);
4366 av->value = stream_read_tree (ib, data_in);
4367 bp = streamer_read_bitpack (ib);
4368 av->by_ref = bp_unpack_value (&bp, 1);
4369 av->next = aggvals;
4370 aggvals = av;
4372 ipa_set_node_agg_value_chain (node, aggvals);
4375 /* Write all aggregate replacement for nodes in set. */
4377 void
4378 ipa_prop_write_all_agg_replacement (void)
4380 struct cgraph_node *node;
4381 struct output_block *ob;
4382 unsigned int count = 0;
4383 lto_symtab_encoder_iterator lsei;
4384 lto_symtab_encoder_t encoder;
4386 if (!ipa_node_agg_replacements)
4387 return;
4389 ob = create_output_block (LTO_section_ipcp_transform);
4390 encoder = ob->decl_state->symtab_node_encoder;
4391 ob->cgraph_node = NULL;
4392 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4393 lsei_next_function_in_partition (&lsei))
4395 node = lsei_cgraph_node (lsei);
4396 if (cgraph_function_with_gimple_body_p (node)
4397 && ipa_get_agg_replacements_for_node (node) != NULL)
4398 count++;
4401 streamer_write_uhwi (ob, count);
4403 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4404 lsei_next_function_in_partition (&lsei))
4406 node = lsei_cgraph_node (lsei);
4407 if (cgraph_function_with_gimple_body_p (node)
4408 && ipa_get_agg_replacements_for_node (node) != NULL)
4409 write_agg_replacement_chain (ob, node);
4411 streamer_write_char_stream (ob->main_stream, 0);
4412 produce_asm (ob, NULL);
4413 destroy_output_block (ob);
4416 /* Read replacements section in file FILE_DATA of length LEN with data
4417 DATA. */
4419 static void
4420 read_replacements_section (struct lto_file_decl_data *file_data,
4421 const char *data,
4422 size_t len)
4424 const struct lto_function_header *header =
4425 (const struct lto_function_header *) data;
4426 const int cfg_offset = sizeof (struct lto_function_header);
4427 const int main_offset = cfg_offset + header->cfg_size;
4428 const int string_offset = main_offset + header->main_size;
4429 struct data_in *data_in;
4430 struct lto_input_block ib_main;
4431 unsigned int i;
4432 unsigned int count;
4434 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4435 header->main_size);
4437 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4438 header->string_size, vNULL);
4439 count = streamer_read_uhwi (&ib_main);
4441 for (i = 0; i < count; i++)
4443 unsigned int index;
4444 struct cgraph_node *node;
4445 lto_symtab_encoder_t encoder;
4447 index = streamer_read_uhwi (&ib_main);
4448 encoder = file_data->symtab_node_encoder;
4449 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4450 gcc_assert (node->symbol.definition);
4451 read_agg_replacement_chain (&ib_main, node, data_in);
4453 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4454 len);
4455 lto_data_in_delete (data_in);
4458 /* Read IPA-CP aggregate replacements. */
4460 void
4461 ipa_prop_read_all_agg_replacement (void)
4463 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4464 struct lto_file_decl_data *file_data;
4465 unsigned int j = 0;
4467 while ((file_data = file_data_vec[j++]))
4469 size_t len;
4470 const char *data = lto_get_section_data (file_data,
4471 LTO_section_ipcp_transform,
4472 NULL, &len);
4473 if (data)
4474 read_replacements_section (file_data, data, len);
4478 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4479 NODE. */
4481 static void
4482 adjust_agg_replacement_values (struct cgraph_node *node,
4483 struct ipa_agg_replacement_value *aggval)
4485 struct ipa_agg_replacement_value *v;
4486 int i, c = 0, d = 0, *adj;
4488 if (!node->clone.combined_args_to_skip)
4489 return;
4491 for (v = aggval; v; v = v->next)
4493 gcc_assert (v->index >= 0);
4494 if (c < v->index)
4495 c = v->index;
4497 c++;
4499 adj = XALLOCAVEC (int, c);
4500 for (i = 0; i < c; i++)
4501 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4503 adj[i] = -1;
4504 d++;
4506 else
4507 adj[i] = i - d;
4509 for (v = aggval; v; v = v->next)
4510 v->index = adj[v->index];
4514 /* Function body transformation phase. */
4516 unsigned int
4517 ipcp_transform_function (struct cgraph_node *node)
4519 vec<ipa_param_descriptor_t> descriptors = vNULL;
4520 struct param_analysis_info *parms_ainfo;
4521 struct ipa_agg_replacement_value *aggval;
4522 gimple_stmt_iterator gsi;
4523 basic_block bb;
4524 int param_count;
4525 bool cfg_changed = false, something_changed = false;
4527 gcc_checking_assert (cfun);
4528 gcc_checking_assert (current_function_decl);
4530 if (dump_file)
4531 fprintf (dump_file, "Modification phase of node %s/%i\n",
4532 cgraph_node_name (node), node->symbol.order);
4534 aggval = ipa_get_agg_replacements_for_node (node);
4535 if (!aggval)
4536 return 0;
4537 param_count = count_formal_params (node->symbol.decl);
4538 if (param_count == 0)
4539 return 0;
4540 adjust_agg_replacement_values (node, aggval);
4541 if (dump_file)
4542 ipa_dump_agg_replacement_values (dump_file, aggval);
4543 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4544 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4545 descriptors.safe_grow_cleared (param_count);
4546 ipa_populate_param_decls (node, descriptors);
4548 FOR_EACH_BB (bb)
4549 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4551 struct ipa_agg_replacement_value *v;
4552 gimple stmt = gsi_stmt (gsi);
4553 tree rhs, val, t;
4554 HOST_WIDE_INT offset;
4555 int index;
4556 bool by_ref, vce;
4558 if (!gimple_assign_load_p (stmt))
4559 continue;
4560 rhs = gimple_assign_rhs1 (stmt);
4561 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4562 continue;
4564 vce = false;
4565 t = rhs;
4566 while (handled_component_p (t))
4568 /* V_C_E can do things like convert an array of integers to one
4569 bigger integer and similar things we do not handle below. */
4570 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4572 vce = true;
4573 break;
4575 t = TREE_OPERAND (t, 0);
4577 if (vce)
4578 continue;
4580 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4581 rhs, &index, &offset, &by_ref))
4582 continue;
4583 for (v = aggval; v; v = v->next)
4584 if (v->index == index
4585 && v->offset == offset)
4586 break;
4587 if (!v || v->by_ref != by_ref)
4588 continue;
4590 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4591 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4593 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4594 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4595 else if (TYPE_SIZE (TREE_TYPE (rhs))
4596 == TYPE_SIZE (TREE_TYPE (v->value)))
4597 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4598 else
4600 if (dump_file)
4602 fprintf (dump_file, " const ");
4603 print_generic_expr (dump_file, v->value, 0);
4604 fprintf (dump_file, " can't be converted to type of ");
4605 print_generic_expr (dump_file, rhs, 0);
4606 fprintf (dump_file, "\n");
4608 continue;
4611 else
4612 val = v->value;
4614 if (dump_file && (dump_flags & TDF_DETAILS))
4616 fprintf (dump_file, "Modifying stmt:\n ");
4617 print_gimple_stmt (dump_file, stmt, 0, 0);
4619 gimple_assign_set_rhs_from_tree (&gsi, val);
4620 update_stmt (stmt);
4622 if (dump_file && (dump_flags & TDF_DETAILS))
4624 fprintf (dump_file, "into:\n ");
4625 print_gimple_stmt (dump_file, stmt, 0, 0);
4626 fprintf (dump_file, "\n");
4629 something_changed = true;
4630 if (maybe_clean_eh_stmt (stmt)
4631 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4632 cfg_changed = true;
4635 (*ipa_node_agg_replacements)[node->uid] = NULL;
4636 free_parms_ainfo (parms_ainfo, param_count);
4637 descriptors.release ();
4639 if (!something_changed)
4640 return 0;
4641 else if (cfg_changed)
4642 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4643 else
4644 return TODO_update_ssa_only_virtuals;