2013-09-12 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / ipa-prop.c
bloba0eea86fc0a545c7d3a20f55f4d8ce30bbc9a828
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "langhooks.h"
25 #include "ggc.h"
26 #include "target.h"
27 #include "cgraph.h"
28 #include "ipa-prop.h"
29 #include "tree-ssa.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
45 struct param_analysis_info
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
51 /* Vector where the parameter infos are actually stored. */
52 vec<ipa_node_params_t> ipa_node_params_vector;
53 /* Vector of known aggregate values in cloned nodes. */
54 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
55 /* Vector where the parameter infos are actually stored. */
56 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60 static struct cgraph_node_hook_list *node_removal_hook_holder;
61 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
63 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65 /* Description of a reference to an IPA constant. */
66 struct ipa_cst_ref_desc
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge *cs;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc *next_duplicate;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
73 if out of control. */
74 int refcount;
77 /* Allocation pool for reference descriptions. */
79 static alloc_pool ipa_refdesc_pool;
81 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
82 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
84 static bool
85 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
87 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->symbol.decl);
88 struct cl_optimization *os;
90 if (!fs_opts)
91 return false;
92 os = TREE_OPTIMIZATION (fs_opts);
93 return !os->x_optimize || !os->x_flag_ipa_cp;
96 /* Return index of the formal whose tree is PTREE in function which corresponds
97 to INFO. */
99 static int
100 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
102 int i, count;
104 count = descriptors.length ();
105 for (i = 0; i < count; i++)
106 if (descriptors[i].decl == ptree)
107 return i;
109 return -1;
112 /* Return index of the formal whose tree is PTREE in function which corresponds
113 to INFO. */
116 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
118 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
122 NODE. */
124 static void
125 ipa_populate_param_decls (struct cgraph_node *node,
126 vec<ipa_param_descriptor_t> &descriptors)
128 tree fndecl;
129 tree fnargs;
130 tree parm;
131 int param_num;
133 fndecl = node->symbol.decl;
134 gcc_assert (gimple_has_body_p (fndecl));
135 fnargs = DECL_ARGUMENTS (fndecl);
136 param_num = 0;
137 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
139 descriptors[param_num].decl = parm;
140 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
141 param_num++;
145 /* Return how many formal parameters FNDECL has. */
147 static inline int
148 count_formal_params (tree fndecl)
150 tree parm;
151 int count = 0;
152 gcc_assert (gimple_has_body_p (fndecl));
154 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
155 count++;
157 return count;
160 /* Return the declaration of Ith formal parameter of the function corresponding
161 to INFO. Note there is no setter function as this array is built just once
162 using ipa_initialize_node_params. */
164 void
165 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
167 fprintf (file, "param #%i", i);
168 if (info->descriptors[i].decl)
170 fprintf (file, " ");
171 print_generic_expr (file, info->descriptors[i].decl, 0);
175 /* Initialize the ipa_node_params structure associated with NODE
176 to hold PARAM_COUNT parameters. */
178 void
179 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
181 struct ipa_node_params *info = IPA_NODE_REF (node);
183 if (!info->descriptors.exists () && param_count)
184 info->descriptors.safe_grow_cleared (param_count);
187 /* Initialize the ipa_node_params structure associated with NODE by counting
188 the function parameters, creating the descriptors and populating their
189 param_decls. */
191 void
192 ipa_initialize_node_params (struct cgraph_node *node)
194 struct ipa_node_params *info = IPA_NODE_REF (node);
196 if (!info->descriptors.exists ())
198 ipa_alloc_node_params (node, count_formal_params (node->symbol.decl));
199 ipa_populate_param_decls (node, info->descriptors);
203 /* Print the jump functions associated with call graph edge CS to file F. */
205 static void
206 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
208 int i, count;
210 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
211 for (i = 0; i < count; i++)
213 struct ipa_jump_func *jump_func;
214 enum jump_func_type type;
216 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
217 type = jump_func->type;
219 fprintf (f, " param %d: ", i);
220 if (type == IPA_JF_UNKNOWN)
221 fprintf (f, "UNKNOWN\n");
222 else if (type == IPA_JF_KNOWN_TYPE)
224 fprintf (f, "KNOWN TYPE: base ");
225 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
226 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
227 jump_func->value.known_type.offset);
228 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
229 fprintf (f, "\n");
231 else if (type == IPA_JF_CONST)
233 tree val = jump_func->value.constant.value;
234 fprintf (f, "CONST: ");
235 print_generic_expr (f, val, 0);
236 if (TREE_CODE (val) == ADDR_EXPR
237 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
239 fprintf (f, " -> ");
240 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
243 fprintf (f, "\n");
245 else if (type == IPA_JF_PASS_THROUGH)
247 fprintf (f, "PASS THROUGH: ");
248 fprintf (f, "%d, op %s",
249 jump_func->value.pass_through.formal_id,
250 tree_code_name[(int)
251 jump_func->value.pass_through.operation]);
252 if (jump_func->value.pass_through.operation != NOP_EXPR)
254 fprintf (f, " ");
255 print_generic_expr (f,
256 jump_func->value.pass_through.operand, 0);
258 if (jump_func->value.pass_through.agg_preserved)
259 fprintf (f, ", agg_preserved");
260 if (jump_func->value.pass_through.type_preserved)
261 fprintf (f, ", type_preserved");
262 fprintf (f, "\n");
264 else if (type == IPA_JF_ANCESTOR)
266 fprintf (f, "ANCESTOR: ");
267 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
268 jump_func->value.ancestor.formal_id,
269 jump_func->value.ancestor.offset);
270 print_generic_expr (f, jump_func->value.ancestor.type, 0);
271 if (jump_func->value.ancestor.agg_preserved)
272 fprintf (f, ", agg_preserved");
273 if (jump_func->value.ancestor.type_preserved)
274 fprintf (f, ", type_preserved");
275 fprintf (f, "\n");
278 if (jump_func->agg.items)
280 struct ipa_agg_jf_item *item;
281 int j;
283 fprintf (f, " Aggregate passed by %s:\n",
284 jump_func->agg.by_ref ? "reference" : "value");
285 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
287 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
288 item->offset);
289 if (TYPE_P (item->value))
290 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
291 tree_low_cst (TYPE_SIZE (item->value), 1));
292 else
294 fprintf (f, "cst: ");
295 print_generic_expr (f, item->value, 0);
297 fprintf (f, "\n");
304 /* Print the jump functions of all arguments on all call graph edges going from
305 NODE to file F. */
307 void
308 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
310 struct cgraph_edge *cs;
312 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
313 node->symbol.order);
314 for (cs = node->callees; cs; cs = cs->next_callee)
316 if (!ipa_edge_args_info_available_for_edge_p (cs))
317 continue;
319 fprintf (f, " callsite %s/%i -> %s/%i : \n",
320 xstrdup (cgraph_node_name (node)), node->symbol.order,
321 xstrdup (cgraph_node_name (cs->callee)),
322 cs->callee->symbol.order);
323 ipa_print_node_jump_functions_for_edge (f, cs);
326 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
328 struct cgraph_indirect_call_info *ii;
329 if (!ipa_edge_args_info_available_for_edge_p (cs))
330 continue;
332 ii = cs->indirect_info;
333 if (ii->agg_contents)
334 fprintf (f, " indirect %s callsite, calling param %i, "
335 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
336 ii->member_ptr ? "member ptr" : "aggregate",
337 ii->param_index, ii->offset,
338 ii->by_ref ? "by reference" : "by_value");
339 else
340 fprintf (f, " indirect %s callsite, calling param %i",
341 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
343 if (cs->call_stmt)
345 fprintf (f, ", for stmt ");
346 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
348 else
349 fprintf (f, "\n");
350 ipa_print_node_jump_functions_for_edge (f, cs);
354 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
356 void
357 ipa_print_all_jump_functions (FILE *f)
359 struct cgraph_node *node;
361 fprintf (f, "\nJump functions:\n");
362 FOR_EACH_FUNCTION (node)
364 ipa_print_node_jump_functions (f, node);
368 /* Set JFUNC to be a known type jump function. */
370 static void
371 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
372 tree base_type, tree component_type)
374 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
375 && TYPE_BINFO (component_type));
376 jfunc->type = IPA_JF_KNOWN_TYPE;
377 jfunc->value.known_type.offset = offset,
378 jfunc->value.known_type.base_type = base_type;
379 jfunc->value.known_type.component_type = component_type;
382 /* Set JFUNC to be a copy of another jmp (to be used by jump function
383 combination code). The two functions will share their rdesc. */
385 static void
386 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
387 struct ipa_jump_func *src)
390 gcc_checking_assert (src->type == IPA_JF_CONST);
391 dst->type = IPA_JF_CONST;
392 dst->value.constant = src->value.constant;
395 /* Set JFUNC to be a constant jmp function. */
397 static void
398 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
399 struct cgraph_edge *cs)
401 constant = unshare_expr (constant);
402 if (constant && EXPR_P (constant))
403 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
404 jfunc->type = IPA_JF_CONST;
405 jfunc->value.constant.value = unshare_expr_without_location (constant);
407 if (TREE_CODE (constant) == ADDR_EXPR
408 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
410 struct ipa_cst_ref_desc *rdesc;
411 if (!ipa_refdesc_pool)
412 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
413 sizeof (struct ipa_cst_ref_desc), 32);
415 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
416 rdesc->cs = cs;
417 rdesc->next_duplicate = NULL;
418 rdesc->refcount = 1;
419 jfunc->value.constant.rdesc = rdesc;
421 else
422 jfunc->value.constant.rdesc = NULL;
425 /* Set JFUNC to be a simple pass-through jump function. */
426 static void
427 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
428 bool agg_preserved, bool type_preserved)
430 jfunc->type = IPA_JF_PASS_THROUGH;
431 jfunc->value.pass_through.operand = NULL_TREE;
432 jfunc->value.pass_through.formal_id = formal_id;
433 jfunc->value.pass_through.operation = NOP_EXPR;
434 jfunc->value.pass_through.agg_preserved = agg_preserved;
435 jfunc->value.pass_through.type_preserved = type_preserved;
438 /* Set JFUNC to be an arithmetic pass through jump function. */
440 static void
441 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
442 tree operand, enum tree_code operation)
444 jfunc->type = IPA_JF_PASS_THROUGH;
445 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
446 jfunc->value.pass_through.formal_id = formal_id;
447 jfunc->value.pass_through.operation = operation;
448 jfunc->value.pass_through.agg_preserved = false;
449 jfunc->value.pass_through.type_preserved = false;
452 /* Set JFUNC to be an ancestor jump function. */
454 static void
455 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
456 tree type, int formal_id, bool agg_preserved,
457 bool type_preserved)
459 jfunc->type = IPA_JF_ANCESTOR;
460 jfunc->value.ancestor.formal_id = formal_id;
461 jfunc->value.ancestor.offset = offset;
462 jfunc->value.ancestor.type = type;
463 jfunc->value.ancestor.agg_preserved = agg_preserved;
464 jfunc->value.ancestor.type_preserved = type_preserved;
467 /* Extract the acual BINFO being described by JFUNC which must be a known type
468 jump function. */
470 tree
471 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
473 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
474 if (!base_binfo)
475 return NULL_TREE;
476 return get_binfo_at_offset (base_binfo,
477 jfunc->value.known_type.offset,
478 jfunc->value.known_type.component_type);
481 /* Structure to be passed in between detect_type_change and
482 check_stmt_for_type_change. */
484 struct type_change_info
486 /* Offset into the object where there is the virtual method pointer we are
487 looking for. */
488 HOST_WIDE_INT offset;
489 /* The declaration or SSA_NAME pointer of the base that we are checking for
490 type change. */
491 tree object;
492 /* If we actually can tell the type that the object has changed to, it is
493 stored in this field. Otherwise it remains NULL_TREE. */
494 tree known_current_type;
495 /* Set to true if dynamic type change has been detected. */
496 bool type_maybe_changed;
497 /* Set to true if multiple types have been encountered. known_current_type
498 must be disregarded in that case. */
499 bool multiple_types_encountered;
502 /* Return true if STMT can modify a virtual method table pointer.
504 This function makes special assumptions about both constructors and
505 destructors which are all the functions that are allowed to alter the VMT
506 pointers. It assumes that destructors begin with assignment into all VMT
507 pointers and that constructors essentially look in the following way:
509 1) The very first thing they do is that they call constructors of ancestor
510 sub-objects that have them.
512 2) Then VMT pointers of this and all its ancestors is set to new values
513 corresponding to the type corresponding to the constructor.
515 3) Only afterwards, other stuff such as constructor of member sub-objects
516 and the code written by the user is run. Only this may include calling
517 virtual functions, directly or indirectly.
519 There is no way to call a constructor of an ancestor sub-object in any
520 other way.
522 This means that we do not have to care whether constructors get the correct
523 type information because they will always change it (in fact, if we define
524 the type to be given by the VMT pointer, it is undefined).
526 The most important fact to derive from the above is that if, for some
527 statement in the section 3, we try to detect whether the dynamic type has
528 changed, we can safely ignore all calls as we examine the function body
529 backwards until we reach statements in section 2 because these calls cannot
530 be ancestor constructors or destructors (if the input is not bogus) and so
531 do not change the dynamic type (this holds true only for automatically
532 allocated objects but at the moment we devirtualize only these). We then
533 must detect that statements in section 2 change the dynamic type and can try
534 to derive the new type. That is enough and we can stop, we will never see
535 the calls into constructors of sub-objects in this code. Therefore we can
536 safely ignore all call statements that we traverse.
539 static bool
540 stmt_may_be_vtbl_ptr_store (gimple stmt)
542 if (is_gimple_call (stmt))
543 return false;
544 else if (is_gimple_assign (stmt))
546 tree lhs = gimple_assign_lhs (stmt);
548 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
550 if (flag_strict_aliasing
551 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
552 return false;
554 if (TREE_CODE (lhs) == COMPONENT_REF
555 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
556 return false;
557 /* In the future we might want to use get_base_ref_and_offset to find
558 if there is a field corresponding to the offset and if so, proceed
559 almost like if it was a component ref. */
562 return true;
565 /* If STMT can be proved to be an assignment to the virtual method table
566 pointer of ANALYZED_OBJ and the type associated with the new table
567 identified, return the type. Otherwise return NULL_TREE. */
569 static tree
570 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
572 HOST_WIDE_INT offset, size, max_size;
573 tree lhs, rhs, base;
575 if (!gimple_assign_single_p (stmt))
576 return NULL_TREE;
578 lhs = gimple_assign_lhs (stmt);
579 rhs = gimple_assign_rhs1 (stmt);
580 if (TREE_CODE (lhs) != COMPONENT_REF
581 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
582 || TREE_CODE (rhs) != ADDR_EXPR)
583 return NULL_TREE;
584 rhs = get_base_address (TREE_OPERAND (rhs, 0));
585 if (!rhs
586 || TREE_CODE (rhs) != VAR_DECL
587 || !DECL_VIRTUAL_P (rhs))
588 return NULL_TREE;
590 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
591 if (offset != tci->offset
592 || size != POINTER_SIZE
593 || max_size != POINTER_SIZE)
594 return NULL_TREE;
595 if (TREE_CODE (base) == MEM_REF)
597 if (TREE_CODE (tci->object) != MEM_REF
598 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
599 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
600 TREE_OPERAND (base, 1)))
601 return NULL_TREE;
603 else if (tci->object != base)
604 return NULL_TREE;
606 return DECL_CONTEXT (rhs);
609 /* Callback of walk_aliased_vdefs and a helper function for
610 detect_type_change to check whether a particular statement may modify
611 the virtual table pointer, and if possible also determine the new type of
612 the (sub-)object. It stores its result into DATA, which points to a
613 type_change_info structure. */
615 static bool
616 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
618 gimple stmt = SSA_NAME_DEF_STMT (vdef);
619 struct type_change_info *tci = (struct type_change_info *) data;
621 if (stmt_may_be_vtbl_ptr_store (stmt))
623 tree type;
624 type = extr_type_from_vtbl_ptr_store (stmt, tci);
625 if (tci->type_maybe_changed
626 && type != tci->known_current_type)
627 tci->multiple_types_encountered = true;
628 tci->known_current_type = type;
629 tci->type_maybe_changed = true;
630 return true;
632 else
633 return false;
638 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
639 callsite CALL) by looking for assignments to its virtual table pointer. If
640 it is, return true and fill in the jump function JFUNC with relevant type
641 information or set it to unknown. ARG is the object itself (not a pointer
642 to it, unless dereferenced). BASE is the base of the memory access as
643 returned by get_ref_base_and_extent, as is the offset. */
645 static bool
646 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
647 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
649 struct type_change_info tci;
650 ao_ref ao;
652 gcc_checking_assert (DECL_P (arg)
653 || TREE_CODE (arg) == MEM_REF
654 || handled_component_p (arg));
655 /* Const calls cannot call virtual methods through VMT and so type changes do
656 not matter. */
657 if (!flag_devirtualize || !gimple_vuse (call)
658 /* Be sure expected_type is polymorphic. */
659 || !comp_type
660 || TREE_CODE (comp_type) != RECORD_TYPE
661 || !TYPE_BINFO (comp_type)
662 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
663 return false;
665 ao_ref_init (&ao, arg);
666 ao.base = base;
667 ao.offset = offset;
668 ao.size = POINTER_SIZE;
669 ao.max_size = ao.size;
671 tci.offset = offset;
672 tci.object = get_base_address (arg);
673 tci.known_current_type = NULL_TREE;
674 tci.type_maybe_changed = false;
675 tci.multiple_types_encountered = false;
677 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
678 &tci, NULL);
679 if (!tci.type_maybe_changed)
680 return false;
682 if (!tci.known_current_type
683 || tci.multiple_types_encountered
684 || offset != 0)
685 jfunc->type = IPA_JF_UNKNOWN;
686 else
687 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
689 return true;
692 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
693 SSA name (its dereference will become the base and the offset is assumed to
694 be zero). */
696 static bool
697 detect_type_change_ssa (tree arg, tree comp_type,
698 gimple call, struct ipa_jump_func *jfunc)
700 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
701 if (!flag_devirtualize
702 || !POINTER_TYPE_P (TREE_TYPE (arg)))
703 return false;
705 arg = build2 (MEM_REF, ptr_type_node, arg,
706 build_int_cst (ptr_type_node, 0));
708 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
711 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
712 boolean variable pointed to by DATA. */
714 static bool
715 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
716 void *data)
718 bool *b = (bool *) data;
719 *b = true;
720 return true;
723 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
724 a value known not to be modified in this function before reaching the
725 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
726 information about the parameter. */
728 static bool
729 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
730 gimple stmt, tree parm_load)
732 bool modified = false;
733 bitmap *visited_stmts;
734 ao_ref refd;
736 if (parm_ainfo && parm_ainfo->parm_modified)
737 return false;
739 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
740 ao_ref_init (&refd, parm_load);
741 /* We can cache visited statements only when parm_ainfo is available and when
742 we are looking at a naked load of the whole parameter. */
743 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
744 visited_stmts = NULL;
745 else
746 visited_stmts = &parm_ainfo->parm_visited_statements;
747 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
748 visited_stmts);
749 if (parm_ainfo && modified)
750 parm_ainfo->parm_modified = true;
751 return !modified;
754 /* If STMT is an assignment that loads a value from an parameter declaration,
755 return the index of the parameter in ipa_node_params which has not been
756 modified. Otherwise return -1. */
758 static int
759 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
760 struct param_analysis_info *parms_ainfo,
761 gimple stmt)
763 int index;
764 tree op1;
766 if (!gimple_assign_single_p (stmt))
767 return -1;
769 op1 = gimple_assign_rhs1 (stmt);
770 if (TREE_CODE (op1) != PARM_DECL)
771 return -1;
773 index = ipa_get_param_decl_index_1 (descriptors, op1);
774 if (index < 0
775 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
776 : NULL, stmt, op1))
777 return -1;
779 return index;
782 /* Return true if memory reference REF loads data that are known to be
783 unmodified in this function before reaching statement STMT. PARM_AINFO, if
784 non-NULL, is a pointer to a structure containing temporary information about
785 PARM. */
787 static bool
788 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
789 gimple stmt, tree ref)
791 bool modified = false;
792 ao_ref refd;
794 gcc_checking_assert (gimple_vuse (stmt));
795 if (parm_ainfo && parm_ainfo->ref_modified)
796 return false;
798 ao_ref_init (&refd, ref);
799 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
800 NULL);
801 if (parm_ainfo && modified)
802 parm_ainfo->ref_modified = true;
803 return !modified;
806 /* Return true if the data pointed to by PARM is known to be unmodified in this
807 function before reaching call statement CALL into which it is passed.
808 PARM_AINFO is a pointer to a structure containing temporary information
809 about PARM. */
811 static bool
812 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
813 gimple call, tree parm)
815 bool modified = false;
816 ao_ref refd;
818 /* It's unnecessary to calculate anything about memory contnets for a const
819 function because it is not goin to use it. But do not cache the result
820 either. Also, no such calculations for non-pointers. */
821 if (!gimple_vuse (call)
822 || !POINTER_TYPE_P (TREE_TYPE (parm)))
823 return false;
825 if (parm_ainfo->pt_modified)
826 return false;
828 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
829 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
830 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
831 if (modified)
832 parm_ainfo->pt_modified = true;
833 return !modified;
836 /* Return true if we can prove that OP is a memory reference loading unmodified
837 data from an aggregate passed as a parameter and if the aggregate is passed
838 by reference, that the alias type of the load corresponds to the type of the
839 formal parameter (so that we can rely on this type for TBAA in callers).
840 INFO and PARMS_AINFO describe parameters of the current function (but the
841 latter can be NULL), STMT is the load statement. If function returns true,
842 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
843 within the aggregate and whether it is a load from a value passed by
844 reference respectively. */
846 static bool
847 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
848 struct param_analysis_info *parms_ainfo, gimple stmt,
849 tree op, int *index_p, HOST_WIDE_INT *offset_p,
850 bool *by_ref_p)
852 int index;
853 HOST_WIDE_INT size, max_size;
854 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
856 if (max_size == -1 || max_size != size || *offset_p < 0)
857 return false;
859 if (DECL_P (base))
861 int index = ipa_get_param_decl_index_1 (descriptors, base);
862 if (index >= 0
863 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
864 : NULL, stmt, op))
866 *index_p = index;
867 *by_ref_p = false;
868 return true;
870 return false;
873 if (TREE_CODE (base) != MEM_REF
874 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
875 || !integer_zerop (TREE_OPERAND (base, 1)))
876 return false;
878 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
880 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
881 index = ipa_get_param_decl_index_1 (descriptors, parm);
883 else
885 /* This branch catches situations where a pointer parameter is not a
886 gimple register, for example:
888 void hip7(S*) (struct S * p)
890 void (*<T2e4>) (struct S *) D.1867;
891 struct S * p.1;
893 <bb 2>:
894 p.1_1 = p;
895 D.1867_2 = p.1_1->f;
896 D.1867_2 ();
897 gdp = &p;
900 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
901 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
904 if (index >= 0
905 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
906 stmt, op))
908 *index_p = index;
909 *by_ref_p = true;
910 return true;
912 return false;
915 /* Just like the previous function, just without the param_analysis_info
916 pointer, for users outside of this file. */
918 bool
919 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
920 tree op, int *index_p, HOST_WIDE_INT *offset_p,
921 bool *by_ref_p)
923 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
924 offset_p, by_ref_p);
927 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
928 of an assignment statement STMT, try to determine whether we are actually
929 handling any of the following cases and construct an appropriate jump
930 function into JFUNC if so:
932 1) The passed value is loaded from a formal parameter which is not a gimple
933 register (most probably because it is addressable, the value has to be
934 scalar) and we can guarantee the value has not changed. This case can
935 therefore be described by a simple pass-through jump function. For example:
937 foo (int a)
939 int a.0;
941 a.0_2 = a;
942 bar (a.0_2);
944 2) The passed value can be described by a simple arithmetic pass-through
945 jump function. E.g.
947 foo (int a)
949 int D.2064;
951 D.2064_4 = a.1(D) + 4;
952 bar (D.2064_4);
954 This case can also occur in combination of the previous one, e.g.:
956 foo (int a, int z)
958 int a.0;
959 int D.2064;
961 a.0_3 = a;
962 D.2064_4 = a.0_3 + 4;
963 foo (D.2064_4);
965 3) The passed value is an address of an object within another one (which
966 also passed by reference). Such situations are described by an ancestor
967 jump function and describe situations such as:
969 B::foo() (struct B * const this)
971 struct A * D.1845;
973 D.1845_2 = &this_1(D)->D.1748;
974 A::bar (D.1845_2);
976 INFO is the structure describing individual parameters access different
977 stages of IPA optimizations. PARMS_AINFO contains the information that is
978 only needed for intraprocedural analysis. */
980 static void
981 compute_complex_assign_jump_func (struct ipa_node_params *info,
982 struct param_analysis_info *parms_ainfo,
983 struct ipa_jump_func *jfunc,
984 gimple call, gimple stmt, tree name,
985 tree param_type)
987 HOST_WIDE_INT offset, size, max_size;
988 tree op1, tc_ssa, base, ssa;
989 int index;
991 op1 = gimple_assign_rhs1 (stmt);
993 if (TREE_CODE (op1) == SSA_NAME)
995 if (SSA_NAME_IS_DEFAULT_DEF (op1))
996 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
997 else
998 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
999 SSA_NAME_DEF_STMT (op1));
1000 tc_ssa = op1;
1002 else
1004 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1005 tc_ssa = gimple_assign_lhs (stmt);
1008 if (index >= 0)
1010 tree op2 = gimple_assign_rhs2 (stmt);
1012 if (op2)
1014 if (!is_gimple_ip_invariant (op2)
1015 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1016 && !useless_type_conversion_p (TREE_TYPE (name),
1017 TREE_TYPE (op1))))
1018 return;
1020 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1021 gimple_assign_rhs_code (stmt));
1023 else if (gimple_assign_single_p (stmt))
1025 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1026 call, tc_ssa);
1027 bool type_p = false;
1029 if (param_type && POINTER_TYPE_P (param_type))
1030 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1031 call, jfunc);
1032 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1033 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1035 return;
1038 if (TREE_CODE (op1) != ADDR_EXPR)
1039 return;
1040 op1 = TREE_OPERAND (op1, 0);
1041 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1042 return;
1043 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1044 if (TREE_CODE (base) != MEM_REF
1045 /* If this is a varying address, punt. */
1046 || max_size == -1
1047 || max_size != size)
1048 return;
1049 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1050 ssa = TREE_OPERAND (base, 0);
1051 if (TREE_CODE (ssa) != SSA_NAME
1052 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1053 || offset < 0)
1054 return;
1056 /* Dynamic types are changed in constructors and destructors. */
1057 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1058 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1060 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1061 call, jfunc, offset);
1062 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1063 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1064 parm_ref_data_pass_through_p (&parms_ainfo[index],
1065 call, ssa), type_p);
1069 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1070 it looks like:
1072 iftmp.1_3 = &obj_2(D)->D.1762;
1074 The base of the MEM_REF must be a default definition SSA NAME of a
1075 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1076 whole MEM_REF expression is returned and the offset calculated from any
1077 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1078 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1080 static tree
1081 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1083 HOST_WIDE_INT size, max_size;
1084 tree expr, parm, obj;
1086 if (!gimple_assign_single_p (assign))
1087 return NULL_TREE;
1088 expr = gimple_assign_rhs1 (assign);
1090 if (TREE_CODE (expr) != ADDR_EXPR)
1091 return NULL_TREE;
1092 expr = TREE_OPERAND (expr, 0);
1093 obj = expr;
1094 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1096 if (TREE_CODE (expr) != MEM_REF
1097 /* If this is a varying address, punt. */
1098 || max_size == -1
1099 || max_size != size
1100 || *offset < 0)
1101 return NULL_TREE;
1102 parm = TREE_OPERAND (expr, 0);
1103 if (TREE_CODE (parm) != SSA_NAME
1104 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1105 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1106 return NULL_TREE;
1108 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1109 *obj_p = obj;
1110 return expr;
1114 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1115 statement PHI, try to find out whether NAME is in fact a
1116 multiple-inheritance typecast from a descendant into an ancestor of a formal
1117 parameter and thus can be described by an ancestor jump function and if so,
1118 write the appropriate function into JFUNC.
1120 Essentially we want to match the following pattern:
1122 if (obj_2(D) != 0B)
1123 goto <bb 3>;
1124 else
1125 goto <bb 4>;
1127 <bb 3>:
1128 iftmp.1_3 = &obj_2(D)->D.1762;
1130 <bb 4>:
1131 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1132 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1133 return D.1879_6; */
1135 static void
1136 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1137 struct param_analysis_info *parms_ainfo,
1138 struct ipa_jump_func *jfunc,
1139 gimple call, gimple phi, tree param_type)
1141 HOST_WIDE_INT offset;
1142 gimple assign, cond;
1143 basic_block phi_bb, assign_bb, cond_bb;
1144 tree tmp, parm, expr, obj;
1145 int index, i;
1147 if (gimple_phi_num_args (phi) != 2)
1148 return;
1150 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1151 tmp = PHI_ARG_DEF (phi, 0);
1152 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1153 tmp = PHI_ARG_DEF (phi, 1);
1154 else
1155 return;
1156 if (TREE_CODE (tmp) != SSA_NAME
1157 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1158 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1159 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1160 return;
1162 assign = SSA_NAME_DEF_STMT (tmp);
1163 assign_bb = gimple_bb (assign);
1164 if (!single_pred_p (assign_bb))
1165 return;
1166 expr = get_ancestor_addr_info (assign, &obj, &offset);
1167 if (!expr)
1168 return;
1169 parm = TREE_OPERAND (expr, 0);
1170 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1171 gcc_assert (index >= 0);
1173 cond_bb = single_pred (assign_bb);
1174 cond = last_stmt (cond_bb);
1175 if (!cond
1176 || gimple_code (cond) != GIMPLE_COND
1177 || gimple_cond_code (cond) != NE_EXPR
1178 || gimple_cond_lhs (cond) != parm
1179 || !integer_zerop (gimple_cond_rhs (cond)))
1180 return;
1182 phi_bb = gimple_bb (phi);
1183 for (i = 0; i < 2; i++)
1185 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1186 if (pred != assign_bb && pred != cond_bb)
1187 return;
1190 bool type_p = false;
1191 if (param_type && POINTER_TYPE_P (param_type))
1192 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1193 call, jfunc, offset);
1194 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1195 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1196 parm_ref_data_pass_through_p (&parms_ainfo[index],
1197 call, parm), type_p);
1200 /* Given OP which is passed as an actual argument to a called function,
1201 determine if it is possible to construct a KNOWN_TYPE jump function for it
1202 and if so, create one and store it to JFUNC.
1203 EXPECTED_TYPE represents a type the argument should be in */
1205 static void
1206 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1207 gimple call, tree expected_type)
1209 HOST_WIDE_INT offset, size, max_size;
1210 tree base;
1212 if (!flag_devirtualize
1213 || TREE_CODE (op) != ADDR_EXPR
1214 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1215 /* Be sure expected_type is polymorphic. */
1216 || !expected_type
1217 || TREE_CODE (expected_type) != RECORD_TYPE
1218 || !TYPE_BINFO (expected_type)
1219 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1220 return;
1222 op = TREE_OPERAND (op, 0);
1223 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1224 if (!DECL_P (base)
1225 || max_size == -1
1226 || max_size != size
1227 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1228 || is_global_var (base))
1229 return;
1231 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1232 return;
1234 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1235 expected_type);
1238 /* Inspect the given TYPE and return true iff it has the same structure (the
1239 same number of fields of the same types) as a C++ member pointer. If
1240 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1241 corresponding fields there. */
1243 static bool
1244 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1246 tree fld;
1248 if (TREE_CODE (type) != RECORD_TYPE)
1249 return false;
1251 fld = TYPE_FIELDS (type);
1252 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1253 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1254 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1255 return false;
1257 if (method_ptr)
1258 *method_ptr = fld;
1260 fld = DECL_CHAIN (fld);
1261 if (!fld || INTEGRAL_TYPE_P (fld)
1262 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1263 return false;
1264 if (delta)
1265 *delta = fld;
1267 if (DECL_CHAIN (fld))
1268 return false;
1270 return true;
1273 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1274 return the rhs of its defining statement. Otherwise return RHS as it
1275 is. */
1277 static inline tree
1278 get_ssa_def_if_simple_copy (tree rhs)
1280 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1282 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1284 if (gimple_assign_single_p (def_stmt))
1285 rhs = gimple_assign_rhs1 (def_stmt);
1286 else
1287 break;
1289 return rhs;
1292 /* Simple linked list, describing known contents of an aggregate beforere
1293 call. */
1295 struct ipa_known_agg_contents_list
1297 /* Offset and size of the described part of the aggregate. */
1298 HOST_WIDE_INT offset, size;
1299 /* Known constant value or NULL if the contents is known to be unknown. */
1300 tree constant;
1301 /* Pointer to the next structure in the list. */
1302 struct ipa_known_agg_contents_list *next;
1305 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1306 in ARG is filled in with constant values. ARG can either be an aggregate
1307 expression or a pointer to an aggregate. JFUNC is the jump function into
1308 which the constants are subsequently stored. */
1310 static void
1311 determine_known_aggregate_parts (gimple call, tree arg,
1312 struct ipa_jump_func *jfunc)
1314 struct ipa_known_agg_contents_list *list = NULL;
1315 int item_count = 0, const_count = 0;
1316 HOST_WIDE_INT arg_offset, arg_size;
1317 gimple_stmt_iterator gsi;
1318 tree arg_base;
1319 bool check_ref, by_ref;
1320 ao_ref r;
1322 /* The function operates in three stages. First, we prepare check_ref, r,
1323 arg_base and arg_offset based on what is actually passed as an actual
1324 argument. */
1326 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1328 by_ref = true;
1329 if (TREE_CODE (arg) == SSA_NAME)
1331 tree type_size;
1332 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1333 return;
1334 check_ref = true;
1335 arg_base = arg;
1336 arg_offset = 0;
1337 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1338 arg_size = tree_low_cst (type_size, 1);
1339 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1341 else if (TREE_CODE (arg) == ADDR_EXPR)
1343 HOST_WIDE_INT arg_max_size;
1345 arg = TREE_OPERAND (arg, 0);
1346 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1347 &arg_max_size);
1348 if (arg_max_size == -1
1349 || arg_max_size != arg_size
1350 || arg_offset < 0)
1351 return;
1352 if (DECL_P (arg_base))
1354 tree size;
1355 check_ref = false;
1356 size = build_int_cst (integer_type_node, arg_size);
1357 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1359 else
1360 return;
1362 else
1363 return;
1365 else
1367 HOST_WIDE_INT arg_max_size;
1369 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1371 by_ref = false;
1372 check_ref = false;
1373 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1374 &arg_max_size);
1375 if (arg_max_size == -1
1376 || arg_max_size != arg_size
1377 || arg_offset < 0)
1378 return;
1380 ao_ref_init (&r, arg);
1383 /* Second stage walks back the BB, looks at individual statements and as long
1384 as it is confident of how the statements affect contents of the
1385 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1386 describing it. */
1387 gsi = gsi_for_stmt (call);
1388 gsi_prev (&gsi);
1389 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1391 struct ipa_known_agg_contents_list *n, **p;
1392 gimple stmt = gsi_stmt (gsi);
1393 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1394 tree lhs, rhs, lhs_base;
1395 bool partial_overlap;
1397 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1398 continue;
1399 if (!gimple_assign_single_p (stmt))
1400 break;
1402 lhs = gimple_assign_lhs (stmt);
1403 rhs = gimple_assign_rhs1 (stmt);
1404 if (!is_gimple_reg_type (rhs)
1405 || TREE_CODE (lhs) == BIT_FIELD_REF
1406 || contains_bitfld_component_ref_p (lhs))
1407 break;
1409 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1410 &lhs_max_size);
1411 if (lhs_max_size == -1
1412 || lhs_max_size != lhs_size
1413 || (lhs_offset < arg_offset
1414 && lhs_offset + lhs_size > arg_offset)
1415 || (lhs_offset < arg_offset + arg_size
1416 && lhs_offset + lhs_size > arg_offset + arg_size))
1417 break;
1419 if (check_ref)
1421 if (TREE_CODE (lhs_base) != MEM_REF
1422 || TREE_OPERAND (lhs_base, 0) != arg_base
1423 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1424 break;
1426 else if (lhs_base != arg_base)
1428 if (DECL_P (lhs_base))
1429 continue;
1430 else
1431 break;
1434 if (lhs_offset + lhs_size < arg_offset
1435 || lhs_offset >= (arg_offset + arg_size))
1436 continue;
1438 partial_overlap = false;
1439 p = &list;
1440 while (*p && (*p)->offset < lhs_offset)
1442 if ((*p)->offset + (*p)->size > lhs_offset)
1444 partial_overlap = true;
1445 break;
1447 p = &(*p)->next;
1449 if (partial_overlap)
1450 break;
1451 if (*p && (*p)->offset < lhs_offset + lhs_size)
1453 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1454 /* We already know this value is subsequently overwritten with
1455 something else. */
1456 continue;
1457 else
1458 /* Otherwise this is a partial overlap which we cannot
1459 represent. */
1460 break;
1463 rhs = get_ssa_def_if_simple_copy (rhs);
1464 n = XALLOCA (struct ipa_known_agg_contents_list);
1465 n->size = lhs_size;
1466 n->offset = lhs_offset;
1467 if (is_gimple_ip_invariant (rhs))
1469 n->constant = rhs;
1470 const_count++;
1472 else
1473 n->constant = NULL_TREE;
1474 n->next = *p;
1475 *p = n;
1477 item_count++;
1478 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1479 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1480 break;
1483 /* Third stage just goes over the list and creates an appropriate vector of
1484 ipa_agg_jf_item structures out of it, of sourse only if there are
1485 any known constants to begin with. */
1487 if (const_count)
1489 jfunc->agg.by_ref = by_ref;
1490 vec_alloc (jfunc->agg.items, const_count);
1491 while (list)
1493 if (list->constant)
1495 struct ipa_agg_jf_item item;
1496 item.offset = list->offset - arg_offset;
1497 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1498 item.value = unshare_expr_without_location (list->constant);
1499 jfunc->agg.items->quick_push (item);
1501 list = list->next;
1506 static tree
1507 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1509 int n;
1510 tree type = (e->callee
1511 ? TREE_TYPE (e->callee->symbol.decl)
1512 : gimple_call_fntype (e->call_stmt));
1513 tree t = TYPE_ARG_TYPES (type);
1515 for (n = 0; n < i; n++)
1517 if (!t)
1518 break;
1519 t = TREE_CHAIN (t);
1521 if (t)
1522 return TREE_VALUE (t);
1523 if (!e->callee)
1524 return NULL;
1525 t = DECL_ARGUMENTS (e->callee->symbol.decl);
1526 for (n = 0; n < i; n++)
1528 if (!t)
1529 return NULL;
1530 t = TREE_CHAIN (t);
1532 if (t)
1533 return TREE_TYPE (t);
1534 return NULL;
1537 /* Compute jump function for all arguments of callsite CS and insert the
1538 information in the jump_functions array in the ipa_edge_args corresponding
1539 to this callsite. */
1541 static void
1542 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1543 struct cgraph_edge *cs)
1545 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1546 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1547 gimple call = cs->call_stmt;
1548 int n, arg_num = gimple_call_num_args (call);
1550 if (arg_num == 0 || args->jump_functions)
1551 return;
1552 vec_safe_grow_cleared (args->jump_functions, arg_num);
1554 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1555 return;
1557 for (n = 0; n < arg_num; n++)
1559 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1560 tree arg = gimple_call_arg (call, n);
1561 tree param_type = ipa_get_callee_param_type (cs, n);
1563 if (is_gimple_ip_invariant (arg))
1564 ipa_set_jf_constant (jfunc, arg, cs);
1565 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1566 && TREE_CODE (arg) == PARM_DECL)
1568 int index = ipa_get_param_decl_index (info, arg);
1570 gcc_assert (index >=0);
1571 /* Aggregate passed by value, check for pass-through, otherwise we
1572 will attempt to fill in aggregate contents later in this
1573 for cycle. */
1574 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1576 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1577 continue;
1580 else if (TREE_CODE (arg) == SSA_NAME)
1582 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1584 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1585 if (index >= 0)
1587 bool agg_p, type_p;
1588 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1589 call, arg);
1590 if (param_type && POINTER_TYPE_P (param_type))
1591 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1592 call, jfunc);
1593 else
1594 type_p = false;
1595 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1596 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1597 type_p);
1600 else
1602 gimple stmt = SSA_NAME_DEF_STMT (arg);
1603 if (is_gimple_assign (stmt))
1604 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1605 call, stmt, arg, param_type);
1606 else if (gimple_code (stmt) == GIMPLE_PHI)
1607 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1608 call, stmt, param_type);
1611 else
1612 compute_known_type_jump_func (arg, jfunc, call,
1613 param_type
1614 && POINTER_TYPE_P (param_type)
1615 ? TREE_TYPE (param_type)
1616 : NULL);
1618 if ((jfunc->type != IPA_JF_PASS_THROUGH
1619 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1620 && (jfunc->type != IPA_JF_ANCESTOR
1621 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1622 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1623 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1624 determine_known_aggregate_parts (call, arg, jfunc);
1628 /* Compute jump functions for all edges - both direct and indirect - outgoing
1629 from NODE. Also count the actual arguments in the process. */
1631 static void
1632 ipa_compute_jump_functions (struct cgraph_node *node,
1633 struct param_analysis_info *parms_ainfo)
1635 struct cgraph_edge *cs;
1637 for (cs = node->callees; cs; cs = cs->next_callee)
1639 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1640 NULL);
1641 /* We do not need to bother analyzing calls to unknown
1642 functions unless they may become known during lto/whopr. */
1643 if (!callee->symbol.definition && !flag_lto)
1644 continue;
1645 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1648 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1649 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1652 /* If STMT looks like a statement loading a value from a member pointer formal
1653 parameter, return that parameter and store the offset of the field to
1654 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1655 might be clobbered). If USE_DELTA, then we look for a use of the delta
1656 field rather than the pfn. */
1658 static tree
1659 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1660 HOST_WIDE_INT *offset_p)
1662 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1664 if (!gimple_assign_single_p (stmt))
1665 return NULL_TREE;
1667 rhs = gimple_assign_rhs1 (stmt);
1668 if (TREE_CODE (rhs) == COMPONENT_REF)
1670 ref_field = TREE_OPERAND (rhs, 1);
1671 rhs = TREE_OPERAND (rhs, 0);
1673 else
1674 ref_field = NULL_TREE;
1675 if (TREE_CODE (rhs) != MEM_REF)
1676 return NULL_TREE;
1677 rec = TREE_OPERAND (rhs, 0);
1678 if (TREE_CODE (rec) != ADDR_EXPR)
1679 return NULL_TREE;
1680 rec = TREE_OPERAND (rec, 0);
1681 if (TREE_CODE (rec) != PARM_DECL
1682 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1683 return NULL_TREE;
1684 ref_offset = TREE_OPERAND (rhs, 1);
1686 if (use_delta)
1687 fld = delta_field;
1688 else
1689 fld = ptr_field;
1690 if (offset_p)
1691 *offset_p = int_bit_position (fld);
1693 if (ref_field)
1695 if (integer_nonzerop (ref_offset))
1696 return NULL_TREE;
1697 return ref_field == fld ? rec : NULL_TREE;
1699 else
1700 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1701 : NULL_TREE;
1704 /* Returns true iff T is an SSA_NAME defined by a statement. */
1706 static bool
1707 ipa_is_ssa_with_stmt_def (tree t)
1709 if (TREE_CODE (t) == SSA_NAME
1710 && !SSA_NAME_IS_DEFAULT_DEF (t))
1711 return true;
1712 else
1713 return false;
1716 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1717 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1718 indirect call graph edge. */
1720 static struct cgraph_edge *
1721 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1723 struct cgraph_edge *cs;
1725 cs = cgraph_edge (node, stmt);
1726 cs->indirect_info->param_index = param_index;
1727 cs->indirect_info->offset = 0;
1728 cs->indirect_info->polymorphic = 0;
1729 cs->indirect_info->agg_contents = 0;
1730 cs->indirect_info->member_ptr = 0;
1731 return cs;
1734 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1735 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1736 intermediate information about each formal parameter. Currently it checks
1737 whether the call calls a pointer that is a formal parameter and if so, the
1738 parameter is marked with the called flag and an indirect call graph edge
1739 describing the call is created. This is very simple for ordinary pointers
1740 represented in SSA but not-so-nice when it comes to member pointers. The
1741 ugly part of this function does nothing more than trying to match the
1742 pattern of such a call. An example of such a pattern is the gimple dump
1743 below, the call is on the last line:
1745 <bb 2>:
1746 f$__delta_5 = f.__delta;
1747 f$__pfn_24 = f.__pfn;
1750 <bb 2>:
1751 f$__delta_5 = MEM[(struct *)&f];
1752 f$__pfn_24 = MEM[(struct *)&f + 4B];
1754 and a few lines below:
1756 <bb 5>
1757 D.2496_3 = (int) f$__pfn_24;
1758 D.2497_4 = D.2496_3 & 1;
1759 if (D.2497_4 != 0)
1760 goto <bb 3>;
1761 else
1762 goto <bb 4>;
1764 <bb 6>:
1765 D.2500_7 = (unsigned int) f$__delta_5;
1766 D.2501_8 = &S + D.2500_7;
1767 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1768 D.2503_10 = *D.2502_9;
1769 D.2504_12 = f$__pfn_24 + -1;
1770 D.2505_13 = (unsigned int) D.2504_12;
1771 D.2506_14 = D.2503_10 + D.2505_13;
1772 D.2507_15 = *D.2506_14;
1773 iftmp.11_16 = (String:: *) D.2507_15;
1775 <bb 7>:
1776 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1777 D.2500_19 = (unsigned int) f$__delta_5;
1778 D.2508_20 = &S + D.2500_19;
1779 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1781 Such patterns are results of simple calls to a member pointer:
1783 int doprinting (int (MyString::* f)(int) const)
1785 MyString S ("somestring");
1787 return (S.*f)(4);
1790 Moreover, the function also looks for called pointers loaded from aggregates
1791 passed by value or reference. */
1793 static void
1794 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1795 struct ipa_node_params *info,
1796 struct param_analysis_info *parms_ainfo,
1797 gimple call, tree target)
1799 gimple def;
1800 tree n1, n2;
1801 gimple d1, d2;
1802 tree rec, rec2, cond;
1803 gimple branch;
1804 int index;
1805 basic_block bb, virt_bb, join;
1806 HOST_WIDE_INT offset;
1807 bool by_ref;
1809 if (SSA_NAME_IS_DEFAULT_DEF (target))
1811 tree var = SSA_NAME_VAR (target);
1812 index = ipa_get_param_decl_index (info, var);
1813 if (index >= 0)
1814 ipa_note_param_call (node, index, call);
1815 return;
1818 def = SSA_NAME_DEF_STMT (target);
1819 if (gimple_assign_single_p (def)
1820 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1821 gimple_assign_rhs1 (def), &index, &offset,
1822 &by_ref))
1824 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1825 cs->indirect_info->offset = offset;
1826 cs->indirect_info->agg_contents = 1;
1827 cs->indirect_info->by_ref = by_ref;
1828 return;
1831 /* Now we need to try to match the complex pattern of calling a member
1832 pointer. */
1833 if (gimple_code (def) != GIMPLE_PHI
1834 || gimple_phi_num_args (def) != 2
1835 || !POINTER_TYPE_P (TREE_TYPE (target))
1836 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1837 return;
1839 /* First, we need to check whether one of these is a load from a member
1840 pointer that is a parameter to this function. */
1841 n1 = PHI_ARG_DEF (def, 0);
1842 n2 = PHI_ARG_DEF (def, 1);
1843 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1844 return;
1845 d1 = SSA_NAME_DEF_STMT (n1);
1846 d2 = SSA_NAME_DEF_STMT (n2);
1848 join = gimple_bb (def);
1849 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1851 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1852 return;
1854 bb = EDGE_PRED (join, 0)->src;
1855 virt_bb = gimple_bb (d2);
1857 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1859 bb = EDGE_PRED (join, 1)->src;
1860 virt_bb = gimple_bb (d1);
1862 else
1863 return;
1865 /* Second, we need to check that the basic blocks are laid out in the way
1866 corresponding to the pattern. */
1868 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1869 || single_pred (virt_bb) != bb
1870 || single_succ (virt_bb) != join)
1871 return;
1873 /* Third, let's see that the branching is done depending on the least
1874 significant bit of the pfn. */
1876 branch = last_stmt (bb);
1877 if (!branch || gimple_code (branch) != GIMPLE_COND)
1878 return;
1880 if ((gimple_cond_code (branch) != NE_EXPR
1881 && gimple_cond_code (branch) != EQ_EXPR)
1882 || !integer_zerop (gimple_cond_rhs (branch)))
1883 return;
1885 cond = gimple_cond_lhs (branch);
1886 if (!ipa_is_ssa_with_stmt_def (cond))
1887 return;
1889 def = SSA_NAME_DEF_STMT (cond);
1890 if (!is_gimple_assign (def)
1891 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1892 || !integer_onep (gimple_assign_rhs2 (def)))
1893 return;
1895 cond = gimple_assign_rhs1 (def);
1896 if (!ipa_is_ssa_with_stmt_def (cond))
1897 return;
1899 def = SSA_NAME_DEF_STMT (cond);
1901 if (is_gimple_assign (def)
1902 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1904 cond = gimple_assign_rhs1 (def);
1905 if (!ipa_is_ssa_with_stmt_def (cond))
1906 return;
1907 def = SSA_NAME_DEF_STMT (cond);
1910 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1911 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1912 == ptrmemfunc_vbit_in_delta),
1913 NULL);
1914 if (rec != rec2)
1915 return;
1917 index = ipa_get_param_decl_index (info, rec);
1918 if (index >= 0
1919 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1921 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1922 cs->indirect_info->offset = offset;
1923 cs->indirect_info->agg_contents = 1;
1924 cs->indirect_info->member_ptr = 1;
1927 return;
1930 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1931 object referenced in the expression is a formal parameter of the caller
1932 (described by INFO), create a call note for the statement. */
1934 static void
1935 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1936 struct ipa_node_params *info, gimple call,
1937 tree target)
1939 struct cgraph_edge *cs;
1940 struct cgraph_indirect_call_info *ii;
1941 struct ipa_jump_func jfunc;
1942 tree obj = OBJ_TYPE_REF_OBJECT (target);
1943 int index;
1944 HOST_WIDE_INT anc_offset;
1946 if (!flag_devirtualize)
1947 return;
1949 if (TREE_CODE (obj) != SSA_NAME)
1950 return;
1952 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1954 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1955 return;
1957 anc_offset = 0;
1958 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1959 gcc_assert (index >= 0);
1960 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1961 call, &jfunc))
1962 return;
1964 else
1966 gimple stmt = SSA_NAME_DEF_STMT (obj);
1967 tree expr;
1969 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1970 if (!expr)
1971 return;
1972 index = ipa_get_param_decl_index (info,
1973 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1974 gcc_assert (index >= 0);
1975 if (detect_type_change (obj, expr, obj_type_ref_class (target),
1976 call, &jfunc, anc_offset))
1977 return;
1980 cs = ipa_note_param_call (node, index, call);
1981 ii = cs->indirect_info;
1982 ii->offset = anc_offset;
1983 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1984 ii->otr_type = obj_type_ref_class (target);
1985 ii->polymorphic = 1;
1988 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1989 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1990 containing intermediate information about each formal parameter. */
1992 static void
1993 ipa_analyze_call_uses (struct cgraph_node *node,
1994 struct ipa_node_params *info,
1995 struct param_analysis_info *parms_ainfo, gimple call)
1997 tree target = gimple_call_fn (call);
1999 if (!target)
2000 return;
2001 if (TREE_CODE (target) == SSA_NAME)
2002 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2003 else if (virtual_method_call_p (target))
2004 ipa_analyze_virtual_call_uses (node, info, call, target);
2008 /* Analyze the call statement STMT with respect to formal parameters (described
2009 in INFO) of caller given by NODE. Currently it only checks whether formal
2010 parameters are called. PARMS_AINFO is a pointer to a vector containing
2011 intermediate information about each formal parameter. */
2013 static void
2014 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2015 struct param_analysis_info *parms_ainfo, gimple stmt)
2017 if (is_gimple_call (stmt))
2018 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2021 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2022 If OP is a parameter declaration, mark it as used in the info structure
2023 passed in DATA. */
2025 static bool
2026 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2027 tree op, void *data)
2029 struct ipa_node_params *info = (struct ipa_node_params *) data;
2031 op = get_base_address (op);
2032 if (op
2033 && TREE_CODE (op) == PARM_DECL)
2035 int index = ipa_get_param_decl_index (info, op);
2036 gcc_assert (index >= 0);
2037 ipa_set_param_used (info, index, true);
2040 return false;
2043 /* Scan the function body of NODE and inspect the uses of formal parameters.
2044 Store the findings in various structures of the associated ipa_node_params
2045 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2046 vector containing intermediate information about each formal parameter. */
2048 static void
2049 ipa_analyze_params_uses (struct cgraph_node *node,
2050 struct param_analysis_info *parms_ainfo)
2052 tree decl = node->symbol.decl;
2053 basic_block bb;
2054 struct function *func;
2055 gimple_stmt_iterator gsi;
2056 struct ipa_node_params *info = IPA_NODE_REF (node);
2057 int i;
2059 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2060 return;
2062 info->uses_analysis_done = 1;
2063 if (ipa_func_spec_opts_forbid_analysis_p (node))
2065 for (i = 0; i < ipa_get_param_count (info); i++)
2067 ipa_set_param_used (info, i, true);
2068 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2070 return;
2073 for (i = 0; i < ipa_get_param_count (info); i++)
2075 tree parm = ipa_get_param (info, i);
2076 int controlled_uses = 0;
2078 /* For SSA regs see if parameter is used. For non-SSA we compute
2079 the flag during modification analysis. */
2080 if (is_gimple_reg (parm))
2082 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
2083 parm);
2084 if (ddef && !has_zero_uses (ddef))
2086 imm_use_iterator imm_iter;
2087 use_operand_p use_p;
2089 ipa_set_param_used (info, i, true);
2090 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2091 if (!is_gimple_call (USE_STMT (use_p)))
2093 controlled_uses = IPA_UNDESCRIBED_USE;
2094 break;
2096 else
2097 controlled_uses++;
2099 else
2100 controlled_uses = 0;
2102 else
2103 controlled_uses = IPA_UNDESCRIBED_USE;
2104 ipa_set_controlled_uses (info, i, controlled_uses);
2107 func = DECL_STRUCT_FUNCTION (decl);
2108 FOR_EACH_BB_FN (bb, func)
2110 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2112 gimple stmt = gsi_stmt (gsi);
2114 if (is_gimple_debug (stmt))
2115 continue;
2117 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2118 walk_stmt_load_store_addr_ops (stmt, info,
2119 visit_ref_for_mod_analysis,
2120 visit_ref_for_mod_analysis,
2121 visit_ref_for_mod_analysis);
2123 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2124 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2125 visit_ref_for_mod_analysis,
2126 visit_ref_for_mod_analysis,
2127 visit_ref_for_mod_analysis);
2131 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2133 static void
2134 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2136 int i;
2138 for (i = 0; i < param_count; i++)
2140 if (parms_ainfo[i].parm_visited_statements)
2141 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2142 if (parms_ainfo[i].pt_visited_statements)
2143 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2147 /* Initialize the array describing properties of of formal parameters
2148 of NODE, analyze their uses and compute jump functions associated
2149 with actual arguments of calls from within NODE. */
2151 void
2152 ipa_analyze_node (struct cgraph_node *node)
2154 struct ipa_node_params *info;
2155 struct param_analysis_info *parms_ainfo;
2156 int param_count;
2158 ipa_check_create_node_params ();
2159 ipa_check_create_edge_args ();
2160 info = IPA_NODE_REF (node);
2161 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
2162 ipa_initialize_node_params (node);
2164 param_count = ipa_get_param_count (info);
2165 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2166 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2168 ipa_analyze_params_uses (node, parms_ainfo);
2169 ipa_compute_jump_functions (node, parms_ainfo);
2171 free_parms_ainfo (parms_ainfo, param_count);
2172 pop_cfun ();
2175 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2176 attempt a type-based devirtualization. If successful, return the
2177 target function declaration, otherwise return NULL. */
2179 tree
2180 ipa_intraprocedural_devirtualization (gimple call)
2182 tree binfo, token, fndecl;
2183 struct ipa_jump_func jfunc;
2184 tree otr = gimple_call_fn (call);
2186 jfunc.type = IPA_JF_UNKNOWN;
2187 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2188 call, obj_type_ref_class (otr));
2189 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2190 return NULL_TREE;
2191 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2192 if (!binfo)
2193 return NULL_TREE;
2194 token = OBJ_TYPE_REF_TOKEN (otr);
2195 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2196 binfo);
2197 return fndecl;
2200 /* Update the jump function DST when the call graph edge corresponding to SRC is
2201 is being inlined, knowing that DST is of type ancestor and src of known
2202 type. */
2204 static void
2205 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2206 struct ipa_jump_func *dst)
2208 HOST_WIDE_INT combined_offset;
2209 tree combined_type;
2211 if (!ipa_get_jf_ancestor_type_preserved (dst))
2213 dst->type = IPA_JF_UNKNOWN;
2214 return;
2217 combined_offset = ipa_get_jf_known_type_offset (src)
2218 + ipa_get_jf_ancestor_offset (dst);
2219 combined_type = ipa_get_jf_ancestor_type (dst);
2221 ipa_set_jf_known_type (dst, combined_offset,
2222 ipa_get_jf_known_type_base_type (src),
2223 combined_type);
2226 /* Update the jump functions associated with call graph edge E when the call
2227 graph edge CS is being inlined, assuming that E->caller is already (possibly
2228 indirectly) inlined into CS->callee and that E has not been inlined. */
2230 static void
2231 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2232 struct cgraph_edge *e)
2234 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2235 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2236 int count = ipa_get_cs_argument_count (args);
2237 int i;
2239 for (i = 0; i < count; i++)
2241 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2243 if (dst->type == IPA_JF_ANCESTOR)
2245 struct ipa_jump_func *src;
2246 int dst_fid = dst->value.ancestor.formal_id;
2248 /* Variable number of arguments can cause havoc if we try to access
2249 one that does not exist in the inlined edge. So make sure we
2250 don't. */
2251 if (dst_fid >= ipa_get_cs_argument_count (top))
2253 dst->type = IPA_JF_UNKNOWN;
2254 continue;
2257 src = ipa_get_ith_jump_func (top, dst_fid);
2259 if (src->agg.items
2260 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2262 struct ipa_agg_jf_item *item;
2263 int j;
2265 /* Currently we do not produce clobber aggregate jump functions,
2266 replace with merging when we do. */
2267 gcc_assert (!dst->agg.items);
2269 dst->agg.items = vec_safe_copy (src->agg.items);
2270 dst->agg.by_ref = src->agg.by_ref;
2271 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2272 item->offset -= dst->value.ancestor.offset;
2275 if (src->type == IPA_JF_KNOWN_TYPE)
2276 combine_known_type_and_ancestor_jfs (src, dst);
2277 else if (src->type == IPA_JF_PASS_THROUGH
2278 && src->value.pass_through.operation == NOP_EXPR)
2280 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2281 dst->value.ancestor.agg_preserved &=
2282 src->value.pass_through.agg_preserved;
2283 dst->value.ancestor.type_preserved &=
2284 src->value.pass_through.type_preserved;
2286 else if (src->type == IPA_JF_ANCESTOR)
2288 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2289 dst->value.ancestor.offset += src->value.ancestor.offset;
2290 dst->value.ancestor.agg_preserved &=
2291 src->value.ancestor.agg_preserved;
2292 dst->value.ancestor.type_preserved &=
2293 src->value.ancestor.type_preserved;
2295 else
2296 dst->type = IPA_JF_UNKNOWN;
2298 else if (dst->type == IPA_JF_PASS_THROUGH)
2300 struct ipa_jump_func *src;
2301 /* We must check range due to calls with variable number of arguments
2302 and we cannot combine jump functions with operations. */
2303 if (dst->value.pass_through.operation == NOP_EXPR
2304 && (dst->value.pass_through.formal_id
2305 < ipa_get_cs_argument_count (top)))
2307 int dst_fid = dst->value.pass_through.formal_id;
2308 src = ipa_get_ith_jump_func (top, dst_fid);
2309 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2311 switch (src->type)
2313 case IPA_JF_UNKNOWN:
2314 dst->type = IPA_JF_UNKNOWN;
2315 break;
2316 case IPA_JF_KNOWN_TYPE:
2317 ipa_set_jf_known_type (dst,
2318 ipa_get_jf_known_type_offset (src),
2319 ipa_get_jf_known_type_base_type (src),
2320 ipa_get_jf_known_type_base_type (src));
2321 break;
2322 case IPA_JF_CONST:
2323 ipa_set_jf_cst_copy (dst, src);
2324 break;
2326 case IPA_JF_PASS_THROUGH:
2328 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2329 enum tree_code operation;
2330 operation = ipa_get_jf_pass_through_operation (src);
2332 if (operation == NOP_EXPR)
2334 bool agg_p, type_p;
2335 agg_p = dst_agg_p
2336 && ipa_get_jf_pass_through_agg_preserved (src);
2337 type_p = ipa_get_jf_pass_through_type_preserved (src)
2338 && ipa_get_jf_pass_through_type_preserved (dst);
2339 ipa_set_jf_simple_pass_through (dst, formal_id,
2340 agg_p, type_p);
2342 else
2344 tree operand = ipa_get_jf_pass_through_operand (src);
2345 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2346 operation);
2348 break;
2350 case IPA_JF_ANCESTOR:
2352 bool agg_p, type_p;
2353 agg_p = dst_agg_p
2354 && ipa_get_jf_ancestor_agg_preserved (src);
2355 type_p = ipa_get_jf_ancestor_type_preserved (src)
2356 && ipa_get_jf_pass_through_type_preserved (dst);
2357 ipa_set_ancestor_jf (dst,
2358 ipa_get_jf_ancestor_offset (src),
2359 ipa_get_jf_ancestor_type (src),
2360 ipa_get_jf_ancestor_formal_id (src),
2361 agg_p, type_p);
2362 break;
2364 default:
2365 gcc_unreachable ();
2368 if (src->agg.items
2369 && (dst_agg_p || !src->agg.by_ref))
2371 /* Currently we do not produce clobber aggregate jump
2372 functions, replace with merging when we do. */
2373 gcc_assert (!dst->agg.items);
2375 dst->agg.by_ref = src->agg.by_ref;
2376 dst->agg.items = vec_safe_copy (src->agg.items);
2379 else
2380 dst->type = IPA_JF_UNKNOWN;
2385 /* If TARGET is an addr_expr of a function declaration, make it the destination
2386 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2388 struct cgraph_edge *
2389 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2391 struct cgraph_node *callee;
2392 struct inline_edge_summary *es = inline_edge_summary (ie);
2393 bool unreachable = false;
2395 if (TREE_CODE (target) == ADDR_EXPR)
2396 target = TREE_OPERAND (target, 0);
2397 if (TREE_CODE (target) != FUNCTION_DECL)
2399 target = canonicalize_constructor_val (target, NULL);
2400 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2402 if (ie->indirect_info->member_ptr)
2403 /* Member pointer call that goes through a VMT lookup. */
2404 return NULL;
2406 if (dump_file)
2407 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2408 " in %s/%i, making it unreachable.\n",
2409 cgraph_node_name (ie->caller), ie->caller->symbol.order);
2410 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2411 callee = cgraph_get_create_node (target);
2412 unreachable = true;
2414 else
2415 callee = cgraph_get_node (target);
2417 else
2418 callee = cgraph_get_node (target);
2420 /* Because may-edges are not explicitely represented and vtable may be external,
2421 we may create the first reference to the object in the unit. */
2422 if (!callee || callee->global.inlined_to)
2425 /* We are better to ensure we can refer to it.
2426 In the case of static functions we are out of luck, since we already
2427 removed its body. In the case of public functions we may or may
2428 not introduce the reference. */
2429 if (!canonicalize_constructor_val (target, NULL)
2430 || !TREE_PUBLIC (target))
2432 if (dump_file)
2433 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2434 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2435 xstrdup (cgraph_node_name (ie->caller)),
2436 ie->caller->symbol.order,
2437 xstrdup (cgraph_node_name (ie->callee)),
2438 ie->callee->symbol.order);
2439 return NULL;
2441 callee = cgraph_get_create_real_symbol_node (target);
2443 ipa_check_create_node_params ();
2445 /* We can not make edges to inline clones. It is bug that someone removed
2446 the cgraph node too early. */
2447 gcc_assert (!callee->global.inlined_to);
2449 if (dump_file && !unreachable)
2451 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2452 "(%s/%i -> %s/%i), for stmt ",
2453 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2454 xstrdup (cgraph_node_name (ie->caller)),
2455 ie->caller->symbol.order,
2456 xstrdup (cgraph_node_name (callee)),
2457 callee->symbol.order);
2458 if (ie->call_stmt)
2459 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2460 else
2461 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2463 ie = cgraph_make_edge_direct (ie, callee);
2464 es = inline_edge_summary (ie);
2465 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2466 - eni_size_weights.call_cost);
2467 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2468 - eni_time_weights.call_cost);
2470 return ie;
2473 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2474 return NULL if there is not any. BY_REF specifies whether the value has to
2475 be passed by reference or by value. */
2477 tree
2478 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2479 HOST_WIDE_INT offset, bool by_ref)
2481 struct ipa_agg_jf_item *item;
2482 int i;
2484 if (by_ref != agg->by_ref)
2485 return NULL;
2487 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2488 if (item->offset == offset)
2490 /* Currently we do not have clobber values, return NULL for them once
2491 we do. */
2492 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2493 return item->value;
2495 return NULL;
2498 /* Remove a reference to SYMBOL from the list of references of a node given by
2499 reference description RDESC. Return true if the reference has been
2500 successfully found and removed. */
2502 static bool
2503 remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2505 struct ipa_ref *to_del;
2506 struct cgraph_edge *origin;
2508 origin = rdesc->cs;
2509 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
2510 origin->call_stmt, origin->lto_stmt_uid);
2511 if (!to_del)
2512 return false;
2514 ipa_remove_reference (to_del);
2515 if (dump_file)
2516 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2517 xstrdup (cgraph_node_name (origin->caller)),
2518 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
2519 return true;
2522 /* If JFUNC has a reference description with refcount different from
2523 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2524 NULL. JFUNC must be a constant jump function. */
2526 static struct ipa_cst_ref_desc *
2527 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2529 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2530 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2531 return rdesc;
2532 else
2533 return NULL;
2536 /* If the value of constant jump function JFUNC is an address of a function
2537 declaration, return the associated call graph node. Otherwise return
2538 NULL. */
2540 static cgraph_node *
2541 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2543 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2544 tree cst = ipa_get_jf_constant (jfunc);
2545 if (TREE_CODE (cst) != ADDR_EXPR
2546 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2547 return NULL;
2549 return cgraph_get_node (TREE_OPERAND (cst, 0));
2553 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2554 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2555 the edge specified in the rdesc. Return false if either the symbol or the
2556 reference could not be found, otherwise return true. */
2558 static bool
2559 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2561 struct ipa_cst_ref_desc *rdesc;
2562 if (jfunc->type == IPA_JF_CONST
2563 && (rdesc = jfunc_rdesc_usable (jfunc))
2564 && --rdesc->refcount == 0)
2566 symtab_node symbol = (symtab_node) cgraph_node_for_jfunc (jfunc);
2567 if (!symbol)
2568 return false;
2570 return remove_described_reference (symbol, rdesc);
2572 return true;
2575 /* Try to find a destination for indirect edge IE that corresponds to a simple
2576 call or a call of a member function pointer and where the destination is a
2577 pointer formal parameter described by jump function JFUNC. If it can be
2578 determined, return the newly direct edge, otherwise return NULL.
2579 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2581 static struct cgraph_edge *
2582 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2583 struct ipa_jump_func *jfunc,
2584 struct ipa_node_params *new_root_info)
2586 struct cgraph_edge *cs;
2587 tree target;
2588 bool agg_contents = ie->indirect_info->agg_contents;
2590 if (ie->indirect_info->agg_contents)
2591 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2592 ie->indirect_info->offset,
2593 ie->indirect_info->by_ref);
2594 else
2595 target = ipa_value_from_jfunc (new_root_info, jfunc);
2596 if (!target)
2597 return NULL;
2598 cs = ipa_make_edge_direct_to_target (ie, target);
2600 if (cs && !agg_contents)
2602 bool ok;
2603 gcc_checking_assert (cs->callee
2604 && (jfunc->type != IPA_JF_CONST
2605 || !cgraph_node_for_jfunc (jfunc)
2606 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2607 ok = try_decrement_rdesc_refcount (jfunc);
2608 gcc_checking_assert (ok);
2611 return cs;
2614 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2615 call based on a formal parameter which is described by jump function JFUNC
2616 and if it can be determined, make it direct and return the direct edge.
2617 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2618 are relative to. */
2620 static struct cgraph_edge *
2621 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2622 struct ipa_jump_func *jfunc,
2623 struct ipa_node_params *new_root_info)
2625 tree binfo, target;
2627 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2629 if (!binfo)
2630 return NULL;
2632 if (TREE_CODE (binfo) != TREE_BINFO)
2634 binfo = gimple_extract_devirt_binfo_from_cst
2635 (binfo, ie->indirect_info->otr_type);
2636 if (!binfo)
2637 return NULL;
2640 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2641 ie->indirect_info->otr_type);
2642 if (binfo)
2643 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2644 binfo);
2645 else
2646 return NULL;
2648 if (target)
2649 return ipa_make_edge_direct_to_target (ie, target);
2650 else
2651 return NULL;
2654 /* Update the param called notes associated with NODE when CS is being inlined,
2655 assuming NODE is (potentially indirectly) inlined into CS->callee.
2656 Moreover, if the callee is discovered to be constant, create a new cgraph
2657 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2658 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2660 static bool
2661 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2662 struct cgraph_node *node,
2663 vec<cgraph_edge_p> *new_edges)
2665 struct ipa_edge_args *top;
2666 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2667 struct ipa_node_params *new_root_info;
2668 bool res = false;
2670 ipa_check_create_edge_args ();
2671 top = IPA_EDGE_REF (cs);
2672 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2673 ? cs->caller->global.inlined_to
2674 : cs->caller);
2676 for (ie = node->indirect_calls; ie; ie = next_ie)
2678 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2679 struct ipa_jump_func *jfunc;
2680 int param_index;
2682 next_ie = ie->next_callee;
2684 if (ici->param_index == -1)
2685 continue;
2687 /* We must check range due to calls with variable number of arguments: */
2688 if (ici->param_index >= ipa_get_cs_argument_count (top))
2690 ici->param_index = -1;
2691 continue;
2694 param_index = ici->param_index;
2695 jfunc = ipa_get_ith_jump_func (top, param_index);
2697 if (!flag_indirect_inlining)
2698 new_direct_edge = NULL;
2699 else if (ici->polymorphic)
2700 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2701 new_root_info);
2702 else
2703 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2704 new_root_info);
2705 /* If speculation was removed, then we need to do nothing. */
2706 if (new_direct_edge && new_direct_edge != ie)
2708 new_direct_edge->indirect_inlining_edge = 1;
2709 top = IPA_EDGE_REF (cs);
2710 res = true;
2712 else if (new_direct_edge)
2714 new_direct_edge->indirect_inlining_edge = 1;
2715 if (new_direct_edge->call_stmt)
2716 new_direct_edge->call_stmt_cannot_inline_p
2717 = !gimple_check_call_matching_types (
2718 new_direct_edge->call_stmt,
2719 new_direct_edge->callee->symbol.decl, false);
2720 if (new_edges)
2722 new_edges->safe_push (new_direct_edge);
2723 res = true;
2725 top = IPA_EDGE_REF (cs);
2727 else if (jfunc->type == IPA_JF_PASS_THROUGH
2728 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2730 if (ici->agg_contents
2731 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2732 ici->param_index = -1;
2733 else
2734 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2736 else if (jfunc->type == IPA_JF_ANCESTOR)
2738 if (ici->agg_contents
2739 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2740 ici->param_index = -1;
2741 else
2743 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2744 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2747 else
2748 /* Either we can find a destination for this edge now or never. */
2749 ici->param_index = -1;
2752 return res;
2755 /* Recursively traverse subtree of NODE (including node) made of inlined
2756 cgraph_edges when CS has been inlined and invoke
2757 update_indirect_edges_after_inlining on all nodes and
2758 update_jump_functions_after_inlining on all non-inlined edges that lead out
2759 of this subtree. Newly discovered indirect edges will be added to
2760 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2761 created. */
2763 static bool
2764 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2765 struct cgraph_node *node,
2766 vec<cgraph_edge_p> *new_edges)
2768 struct cgraph_edge *e;
2769 bool res;
2771 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2773 for (e = node->callees; e; e = e->next_callee)
2774 if (!e->inline_failed)
2775 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2776 else
2777 update_jump_functions_after_inlining (cs, e);
2778 for (e = node->indirect_calls; e; e = e->next_callee)
2779 update_jump_functions_after_inlining (cs, e);
2781 return res;
2784 /* Combine two controlled uses counts as done during inlining. */
2786 static int
2787 combine_controlled_uses_counters (int c, int d)
2789 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2790 return IPA_UNDESCRIBED_USE;
2791 else
2792 return c + d - 1;
2795 /* Propagate number of controlled users from CS->caleee to the new root of the
2796 tree of inlined nodes. */
2798 static void
2799 propagate_controlled_uses (struct cgraph_edge *cs)
2801 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2802 struct cgraph_node *new_root = cs->caller->global.inlined_to
2803 ? cs->caller->global.inlined_to : cs->caller;
2804 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2805 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2806 int count, i;
2808 count = MIN (ipa_get_cs_argument_count (args),
2809 ipa_get_param_count (old_root_info));
2810 for (i = 0; i < count; i++)
2812 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2813 struct ipa_cst_ref_desc *rdesc;
2815 if (jf->type == IPA_JF_PASS_THROUGH)
2817 int src_idx, c, d;
2818 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2819 c = ipa_get_controlled_uses (new_root_info, src_idx);
2820 d = ipa_get_controlled_uses (old_root_info, i);
2822 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2823 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2824 c = combine_controlled_uses_counters (c, d);
2825 ipa_set_controlled_uses (new_root_info, src_idx, c);
2826 if (c == 0 && new_root_info->ipcp_orig_node)
2828 struct cgraph_node *n;
2829 struct ipa_ref *ref;
2830 tree t = new_root_info->known_vals[src_idx];
2832 if (t && TREE_CODE (t) == ADDR_EXPR
2833 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2834 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2835 && (ref = ipa_find_reference ((symtab_node) new_root,
2836 (symtab_node) n, NULL, 0)))
2838 if (dump_file)
2839 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2840 "reference from %s/%i to %s/%i.\n",
2841 xstrdup (cgraph_node_name (new_root)),
2842 new_root->symbol.order,
2843 xstrdup (cgraph_node_name (n)), n->symbol.order);
2844 ipa_remove_reference (ref);
2848 else if (jf->type == IPA_JF_CONST
2849 && (rdesc = jfunc_rdesc_usable (jf)))
2851 int d = ipa_get_controlled_uses (old_root_info, i);
2852 int c = rdesc->refcount;
2853 rdesc->refcount = combine_controlled_uses_counters (c, d);
2854 if (rdesc->refcount == 0)
2856 tree cst = ipa_get_jf_constant (jf);
2857 struct cgraph_node *n;
2858 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2859 && TREE_CODE (TREE_OPERAND (cst, 0))
2860 == FUNCTION_DECL);
2861 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2862 if (n)
2864 struct cgraph_node *clone;
2865 bool ok;
2866 ok = remove_described_reference ((symtab_node) n, rdesc);
2867 gcc_checking_assert (ok);
2869 clone = cs->caller;
2870 while (clone->global.inlined_to
2871 && clone != rdesc->cs->caller
2872 && IPA_NODE_REF (clone)->ipcp_orig_node)
2874 struct ipa_ref *ref;
2875 ref = ipa_find_reference ((symtab_node) clone,
2876 (symtab_node) n, NULL, 0);
2877 if (ref)
2879 if (dump_file)
2880 fprintf (dump_file, "ipa-prop: Removing "
2881 "cloning-created reference "
2882 "from %s/%i to %s/%i.\n",
2883 xstrdup (cgraph_node_name (clone)),
2884 clone->symbol.order,
2885 xstrdup (cgraph_node_name (n)),
2886 n->symbol.order);
2887 ipa_remove_reference (ref);
2889 clone = clone->callers->caller;
2896 for (i = ipa_get_param_count (old_root_info);
2897 i < ipa_get_cs_argument_count (args);
2898 i++)
2900 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2902 if (jf->type == IPA_JF_CONST)
2904 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2905 if (rdesc)
2906 rdesc->refcount = IPA_UNDESCRIBED_USE;
2908 else if (jf->type == IPA_JF_PASS_THROUGH)
2909 ipa_set_controlled_uses (new_root_info,
2910 jf->value.pass_through.formal_id,
2911 IPA_UNDESCRIBED_USE);
2915 /* Update jump functions and call note functions on inlining the call site CS.
2916 CS is expected to lead to a node already cloned by
2917 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2918 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2919 created. */
2921 bool
2922 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2923 vec<cgraph_edge_p> *new_edges)
2925 bool changed;
2926 /* Do nothing if the preparation phase has not been carried out yet
2927 (i.e. during early inlining). */
2928 if (!ipa_node_params_vector.exists ())
2929 return false;
2930 gcc_assert (ipa_edge_args_vector);
2932 propagate_controlled_uses (cs);
2933 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2935 return changed;
2938 /* Frees all dynamically allocated structures that the argument info points
2939 to. */
2941 void
2942 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2944 vec_free (args->jump_functions);
2945 memset (args, 0, sizeof (*args));
2948 /* Free all ipa_edge structures. */
2950 void
2951 ipa_free_all_edge_args (void)
2953 int i;
2954 struct ipa_edge_args *args;
2956 if (!ipa_edge_args_vector)
2957 return;
2959 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2960 ipa_free_edge_args_substructures (args);
2962 vec_free (ipa_edge_args_vector);
2965 /* Frees all dynamically allocated structures that the param info points
2966 to. */
2968 void
2969 ipa_free_node_params_substructures (struct ipa_node_params *info)
2971 info->descriptors.release ();
2972 free (info->lattices);
2973 /* Lattice values and their sources are deallocated with their alocation
2974 pool. */
2975 info->known_vals.release ();
2976 memset (info, 0, sizeof (*info));
2979 /* Free all ipa_node_params structures. */
2981 void
2982 ipa_free_all_node_params (void)
2984 int i;
2985 struct ipa_node_params *info;
2987 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2988 ipa_free_node_params_substructures (info);
2990 ipa_node_params_vector.release ();
2993 /* Set the aggregate replacements of NODE to be AGGVALS. */
2995 void
2996 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2997 struct ipa_agg_replacement_value *aggvals)
2999 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3000 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3002 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3005 /* Hook that is called by cgraph.c when an edge is removed. */
3007 static void
3008 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3010 struct ipa_edge_args *args;
3012 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3013 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3014 return;
3016 args = IPA_EDGE_REF (cs);
3017 if (args->jump_functions)
3019 struct ipa_jump_func *jf;
3020 int i;
3021 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3022 try_decrement_rdesc_refcount (jf);
3025 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3028 /* Hook that is called by cgraph.c when a node is removed. */
3030 static void
3031 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3033 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3034 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3035 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3036 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3037 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3040 /* Hook that is called by cgraph.c when an edge is duplicated. */
3042 static void
3043 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3044 __attribute__((unused)) void *data)
3046 struct ipa_edge_args *old_args, *new_args;
3047 unsigned int i;
3049 ipa_check_create_edge_args ();
3051 old_args = IPA_EDGE_REF (src);
3052 new_args = IPA_EDGE_REF (dst);
3054 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3056 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3058 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3059 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3061 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3063 if (src_jf->type == IPA_JF_CONST)
3065 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3067 if (!src_rdesc)
3068 dst_jf->value.constant.rdesc = NULL;
3069 else if (src->caller == dst->caller)
3071 struct ipa_ref *ref;
3072 symtab_node n = (symtab_node) cgraph_node_for_jfunc (src_jf);
3073 gcc_checking_assert (n);
3074 ref = ipa_find_reference ((symtab_node) src->caller, n,
3075 src->call_stmt, src->lto_stmt_uid);
3076 gcc_checking_assert (ref);
3077 ipa_clone_ref (ref, (symtab_node) dst->caller, ref->stmt);
3079 gcc_checking_assert (ipa_refdesc_pool);
3080 struct ipa_cst_ref_desc *dst_rdesc
3081 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3082 dst_rdesc->cs = dst;
3083 dst_rdesc->refcount = src_rdesc->refcount;
3084 dst_rdesc->next_duplicate = NULL;
3085 dst_jf->value.constant.rdesc = dst_rdesc;
3087 else if (src_rdesc->cs == src)
3089 struct ipa_cst_ref_desc *dst_rdesc;
3090 gcc_checking_assert (ipa_refdesc_pool);
3091 dst_rdesc
3092 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3093 dst_rdesc->cs = dst;
3094 dst_rdesc->refcount = src_rdesc->refcount;
3095 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3096 src_rdesc->next_duplicate = dst_rdesc;
3097 dst_jf->value.constant.rdesc = dst_rdesc;
3099 else
3101 struct ipa_cst_ref_desc *dst_rdesc;
3102 /* This can happen during inlining, when a JFUNC can refer to a
3103 reference taken in a function up in the tree of inline clones.
3104 We need to find the duplicate that refers to our tree of
3105 inline clones. */
3107 gcc_assert (dst->caller->global.inlined_to);
3108 for (dst_rdesc = src_rdesc->next_duplicate;
3109 dst_rdesc;
3110 dst_rdesc = dst_rdesc->next_duplicate)
3112 struct cgraph_node *top;
3113 top = dst_rdesc->cs->caller->global.inlined_to
3114 ? dst_rdesc->cs->caller->global.inlined_to
3115 : dst_rdesc->cs->caller;
3116 if (dst->caller->global.inlined_to == top)
3117 break;
3119 gcc_assert (dst_rdesc);
3120 dst_jf->value.constant.rdesc = dst_rdesc;
3126 /* Hook that is called by cgraph.c when a node is duplicated. */
3128 static void
3129 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3130 ATTRIBUTE_UNUSED void *data)
3132 struct ipa_node_params *old_info, *new_info;
3133 struct ipa_agg_replacement_value *old_av, *new_av;
3135 ipa_check_create_node_params ();
3136 old_info = IPA_NODE_REF (src);
3137 new_info = IPA_NODE_REF (dst);
3139 new_info->descriptors = old_info->descriptors.copy ();
3140 new_info->lattices = NULL;
3141 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3143 new_info->uses_analysis_done = old_info->uses_analysis_done;
3144 new_info->node_enqueued = old_info->node_enqueued;
3146 old_av = ipa_get_agg_replacements_for_node (src);
3147 if (!old_av)
3148 return;
3150 new_av = NULL;
3151 while (old_av)
3153 struct ipa_agg_replacement_value *v;
3155 v = ggc_alloc_ipa_agg_replacement_value ();
3156 memcpy (v, old_av, sizeof (*v));
3157 v->next = new_av;
3158 new_av = v;
3159 old_av = old_av->next;
3161 ipa_set_node_agg_value_chain (dst, new_av);
3165 /* Analyze newly added function into callgraph. */
3167 static void
3168 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3170 ipa_analyze_node (node);
3173 /* Register our cgraph hooks if they are not already there. */
3175 void
3176 ipa_register_cgraph_hooks (void)
3178 if (!edge_removal_hook_holder)
3179 edge_removal_hook_holder =
3180 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3181 if (!node_removal_hook_holder)
3182 node_removal_hook_holder =
3183 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3184 if (!edge_duplication_hook_holder)
3185 edge_duplication_hook_holder =
3186 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3187 if (!node_duplication_hook_holder)
3188 node_duplication_hook_holder =
3189 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3190 function_insertion_hook_holder =
3191 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3194 /* Unregister our cgraph hooks if they are not already there. */
3196 static void
3197 ipa_unregister_cgraph_hooks (void)
3199 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3200 edge_removal_hook_holder = NULL;
3201 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3202 node_removal_hook_holder = NULL;
3203 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3204 edge_duplication_hook_holder = NULL;
3205 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3206 node_duplication_hook_holder = NULL;
3207 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3208 function_insertion_hook_holder = NULL;
3211 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3212 longer needed after ipa-cp. */
3214 void
3215 ipa_free_all_structures_after_ipa_cp (void)
3217 if (!optimize)
3219 ipa_free_all_edge_args ();
3220 ipa_free_all_node_params ();
3221 free_alloc_pool (ipcp_sources_pool);
3222 free_alloc_pool (ipcp_values_pool);
3223 free_alloc_pool (ipcp_agg_lattice_pool);
3224 ipa_unregister_cgraph_hooks ();
3225 if (ipa_refdesc_pool)
3226 free_alloc_pool (ipa_refdesc_pool);
3230 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3231 longer needed after indirect inlining. */
3233 void
3234 ipa_free_all_structures_after_iinln (void)
3236 ipa_free_all_edge_args ();
3237 ipa_free_all_node_params ();
3238 ipa_unregister_cgraph_hooks ();
3239 if (ipcp_sources_pool)
3240 free_alloc_pool (ipcp_sources_pool);
3241 if (ipcp_values_pool)
3242 free_alloc_pool (ipcp_values_pool);
3243 if (ipcp_agg_lattice_pool)
3244 free_alloc_pool (ipcp_agg_lattice_pool);
3245 if (ipa_refdesc_pool)
3246 free_alloc_pool (ipa_refdesc_pool);
3249 /* Print ipa_tree_map data structures of all functions in the
3250 callgraph to F. */
3252 void
3253 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3255 int i, count;
3256 struct ipa_node_params *info;
3258 if (!node->symbol.definition)
3259 return;
3260 info = IPA_NODE_REF (node);
3261 fprintf (f, " function %s/%i parameter descriptors:\n",
3262 cgraph_node_name (node), node->symbol.order);
3263 count = ipa_get_param_count (info);
3264 for (i = 0; i < count; i++)
3266 int c;
3268 ipa_dump_param (f, info, i);
3269 if (ipa_is_param_used (info, i))
3270 fprintf (f, " used");
3271 c = ipa_get_controlled_uses (info, i);
3272 if (c == IPA_UNDESCRIBED_USE)
3273 fprintf (f, " undescribed_use");
3274 else
3275 fprintf (f, " controlled_uses=%i", c);
3276 fprintf (f, "\n");
3280 /* Print ipa_tree_map data structures of all functions in the
3281 callgraph to F. */
3283 void
3284 ipa_print_all_params (FILE * f)
3286 struct cgraph_node *node;
3288 fprintf (f, "\nFunction parameters:\n");
3289 FOR_EACH_FUNCTION (node)
3290 ipa_print_node_params (f, node);
3293 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3295 vec<tree>
3296 ipa_get_vector_of_formal_parms (tree fndecl)
3298 vec<tree> args;
3299 int count;
3300 tree parm;
3302 gcc_assert (!flag_wpa);
3303 count = count_formal_params (fndecl);
3304 args.create (count);
3305 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3306 args.quick_push (parm);
3308 return args;
3311 /* Return a heap allocated vector containing types of formal parameters of
3312 function type FNTYPE. */
3314 static inline vec<tree>
3315 get_vector_of_formal_parm_types (tree fntype)
3317 vec<tree> types;
3318 int count = 0;
3319 tree t;
3321 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3322 count++;
3324 types.create (count);
3325 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3326 types.quick_push (TREE_VALUE (t));
3328 return types;
3331 /* Modify the function declaration FNDECL and its type according to the plan in
3332 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3333 to reflect the actual parameters being modified which are determined by the
3334 base_index field. */
3336 void
3337 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3338 const char *synth_parm_prefix)
3340 vec<tree> oparms, otypes;
3341 tree orig_type, new_type = NULL;
3342 tree old_arg_types, t, new_arg_types = NULL;
3343 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3344 int i, len = adjustments.length ();
3345 tree new_reversed = NULL;
3346 bool care_for_types, last_parm_void;
3348 if (!synth_parm_prefix)
3349 synth_parm_prefix = "SYNTH";
3351 oparms = ipa_get_vector_of_formal_parms (fndecl);
3352 orig_type = TREE_TYPE (fndecl);
3353 old_arg_types = TYPE_ARG_TYPES (orig_type);
3355 /* The following test is an ugly hack, some functions simply don't have any
3356 arguments in their type. This is probably a bug but well... */
3357 care_for_types = (old_arg_types != NULL_TREE);
3358 if (care_for_types)
3360 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3361 == void_type_node);
3362 otypes = get_vector_of_formal_parm_types (orig_type);
3363 if (last_parm_void)
3364 gcc_assert (oparms.length () + 1 == otypes.length ());
3365 else
3366 gcc_assert (oparms.length () == otypes.length ());
3368 else
3370 last_parm_void = false;
3371 otypes.create (0);
3374 for (i = 0; i < len; i++)
3376 struct ipa_parm_adjustment *adj;
3377 gcc_assert (link);
3379 adj = &adjustments[i];
3380 parm = oparms[adj->base_index];
3381 adj->base = parm;
3383 if (adj->copy_param)
3385 if (care_for_types)
3386 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3387 new_arg_types);
3388 *link = parm;
3389 link = &DECL_CHAIN (parm);
3391 else if (!adj->remove_param)
3393 tree new_parm;
3394 tree ptype;
3396 if (adj->by_ref)
3397 ptype = build_pointer_type (adj->type);
3398 else
3399 ptype = adj->type;
3401 if (care_for_types)
3402 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3404 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3405 ptype);
3406 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3408 DECL_ARTIFICIAL (new_parm) = 1;
3409 DECL_ARG_TYPE (new_parm) = ptype;
3410 DECL_CONTEXT (new_parm) = fndecl;
3411 TREE_USED (new_parm) = 1;
3412 DECL_IGNORED_P (new_parm) = 1;
3413 layout_decl (new_parm, 0);
3415 adj->base = parm;
3416 adj->reduction = new_parm;
3418 *link = new_parm;
3420 link = &DECL_CHAIN (new_parm);
3424 *link = NULL_TREE;
3426 if (care_for_types)
3428 new_reversed = nreverse (new_arg_types);
3429 if (last_parm_void)
3431 if (new_reversed)
3432 TREE_CHAIN (new_arg_types) = void_list_node;
3433 else
3434 new_reversed = void_list_node;
3438 /* Use copy_node to preserve as much as possible from original type
3439 (debug info, attribute lists etc.)
3440 Exception is METHOD_TYPEs must have THIS argument.
3441 When we are asked to remove it, we need to build new FUNCTION_TYPE
3442 instead. */
3443 if (TREE_CODE (orig_type) != METHOD_TYPE
3444 || (adjustments[0].copy_param
3445 && adjustments[0].base_index == 0))
3447 new_type = build_distinct_type_copy (orig_type);
3448 TYPE_ARG_TYPES (new_type) = new_reversed;
3450 else
3452 new_type
3453 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3454 new_reversed));
3455 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3456 DECL_VINDEX (fndecl) = NULL_TREE;
3459 /* When signature changes, we need to clear builtin info. */
3460 if (DECL_BUILT_IN (fndecl))
3462 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3463 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3466 /* This is a new type, not a copy of an old type. Need to reassociate
3467 variants. We can handle everything except the main variant lazily. */
3468 t = TYPE_MAIN_VARIANT (orig_type);
3469 if (orig_type != t)
3471 TYPE_MAIN_VARIANT (new_type) = t;
3472 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3473 TYPE_NEXT_VARIANT (t) = new_type;
3475 else
3477 TYPE_MAIN_VARIANT (new_type) = new_type;
3478 TYPE_NEXT_VARIANT (new_type) = NULL;
3481 TREE_TYPE (fndecl) = new_type;
3482 DECL_VIRTUAL_P (fndecl) = 0;
3483 otypes.release ();
3484 oparms.release ();
3487 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3488 If this is a directly recursive call, CS must be NULL. Otherwise it must
3489 contain the corresponding call graph edge. */
3491 void
3492 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3493 ipa_parm_adjustment_vec adjustments)
3495 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3496 vec<tree> vargs;
3497 vec<tree, va_gc> **debug_args = NULL;
3498 gimple new_stmt;
3499 gimple_stmt_iterator gsi, prev_gsi;
3500 tree callee_decl;
3501 int i, len;
3503 len = adjustments.length ();
3504 vargs.create (len);
3505 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3506 ipa_remove_stmt_references ((symtab_node) current_node, stmt);
3508 gsi = gsi_for_stmt (stmt);
3509 prev_gsi = gsi;
3510 gsi_prev (&prev_gsi);
3511 for (i = 0; i < len; i++)
3513 struct ipa_parm_adjustment *adj;
3515 adj = &adjustments[i];
3517 if (adj->copy_param)
3519 tree arg = gimple_call_arg (stmt, adj->base_index);
3521 vargs.quick_push (arg);
3523 else if (!adj->remove_param)
3525 tree expr, base, off;
3526 location_t loc;
3527 unsigned int deref_align = 0;
3528 bool deref_base = false;
3530 /* We create a new parameter out of the value of the old one, we can
3531 do the following kind of transformations:
3533 - A scalar passed by reference is converted to a scalar passed by
3534 value. (adj->by_ref is false and the type of the original
3535 actual argument is a pointer to a scalar).
3537 - A part of an aggregate is passed instead of the whole aggregate.
3538 The part can be passed either by value or by reference, this is
3539 determined by value of adj->by_ref. Moreover, the code below
3540 handles both situations when the original aggregate is passed by
3541 value (its type is not a pointer) and when it is passed by
3542 reference (it is a pointer to an aggregate).
3544 When the new argument is passed by reference (adj->by_ref is true)
3545 it must be a part of an aggregate and therefore we form it by
3546 simply taking the address of a reference inside the original
3547 aggregate. */
3549 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3550 base = gimple_call_arg (stmt, adj->base_index);
3551 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3552 : EXPR_LOCATION (base);
3554 if (TREE_CODE (base) != ADDR_EXPR
3555 && POINTER_TYPE_P (TREE_TYPE (base)))
3556 off = build_int_cst (adj->alias_ptr_type,
3557 adj->offset / BITS_PER_UNIT);
3558 else
3560 HOST_WIDE_INT base_offset;
3561 tree prev_base;
3562 bool addrof;
3564 if (TREE_CODE (base) == ADDR_EXPR)
3566 base = TREE_OPERAND (base, 0);
3567 addrof = true;
3569 else
3570 addrof = false;
3571 prev_base = base;
3572 base = get_addr_base_and_unit_offset (base, &base_offset);
3573 /* Aggregate arguments can have non-invariant addresses. */
3574 if (!base)
3576 base = build_fold_addr_expr (prev_base);
3577 off = build_int_cst (adj->alias_ptr_type,
3578 adj->offset / BITS_PER_UNIT);
3580 else if (TREE_CODE (base) == MEM_REF)
3582 if (!addrof)
3584 deref_base = true;
3585 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3587 off = build_int_cst (adj->alias_ptr_type,
3588 base_offset
3589 + adj->offset / BITS_PER_UNIT);
3590 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3591 off);
3592 base = TREE_OPERAND (base, 0);
3594 else
3596 off = build_int_cst (adj->alias_ptr_type,
3597 base_offset
3598 + adj->offset / BITS_PER_UNIT);
3599 base = build_fold_addr_expr (base);
3603 if (!adj->by_ref)
3605 tree type = adj->type;
3606 unsigned int align;
3607 unsigned HOST_WIDE_INT misalign;
3609 if (deref_base)
3611 align = deref_align;
3612 misalign = 0;
3614 else
3616 get_pointer_alignment_1 (base, &align, &misalign);
3617 if (TYPE_ALIGN (type) > align)
3618 align = TYPE_ALIGN (type);
3620 misalign += (tree_to_double_int (off)
3621 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3622 * BITS_PER_UNIT);
3623 misalign = misalign & (align - 1);
3624 if (misalign != 0)
3625 align = (misalign & -misalign);
3626 if (align < TYPE_ALIGN (type))
3627 type = build_aligned_type (type, align);
3628 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3630 else
3632 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3633 expr = build_fold_addr_expr (expr);
3636 expr = force_gimple_operand_gsi (&gsi, expr,
3637 adj->by_ref
3638 || is_gimple_reg_type (adj->type),
3639 NULL, true, GSI_SAME_STMT);
3640 vargs.quick_push (expr);
3642 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3644 unsigned int ix;
3645 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3646 gimple def_temp;
3648 arg = gimple_call_arg (stmt, adj->base_index);
3649 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3651 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3652 continue;
3653 arg = fold_convert_loc (gimple_location (stmt),
3654 TREE_TYPE (origin), arg);
3656 if (debug_args == NULL)
3657 debug_args = decl_debug_args_insert (callee_decl);
3658 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3659 if (ddecl == origin)
3661 ddecl = (**debug_args)[ix + 1];
3662 break;
3664 if (ddecl == NULL)
3666 ddecl = make_node (DEBUG_EXPR_DECL);
3667 DECL_ARTIFICIAL (ddecl) = 1;
3668 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3669 DECL_MODE (ddecl) = DECL_MODE (origin);
3671 vec_safe_push (*debug_args, origin);
3672 vec_safe_push (*debug_args, ddecl);
3674 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3675 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3679 if (dump_file && (dump_flags & TDF_DETAILS))
3681 fprintf (dump_file, "replacing stmt:");
3682 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3685 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3686 vargs.release ();
3687 if (gimple_call_lhs (stmt))
3688 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3690 gimple_set_block (new_stmt, gimple_block (stmt));
3691 if (gimple_has_location (stmt))
3692 gimple_set_location (new_stmt, gimple_location (stmt));
3693 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3694 gimple_call_copy_flags (new_stmt, stmt);
3696 if (dump_file && (dump_flags & TDF_DETAILS))
3698 fprintf (dump_file, "with stmt:");
3699 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3700 fprintf (dump_file, "\n");
3702 gsi_replace (&gsi, new_stmt, true);
3703 if (cs)
3704 cgraph_set_call_stmt (cs, new_stmt);
3707 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3708 gsi_prev (&gsi);
3710 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3711 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3713 update_ssa (TODO_update_ssa);
3714 free_dominance_info (CDI_DOMINATORS);
3717 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3719 static bool
3720 index_in_adjustments_multiple_times_p (int base_index,
3721 ipa_parm_adjustment_vec adjustments)
3723 int i, len = adjustments.length ();
3724 bool one = false;
3726 for (i = 0; i < len; i++)
3728 struct ipa_parm_adjustment *adj;
3729 adj = &adjustments[i];
3731 if (adj->base_index == base_index)
3733 if (one)
3734 return true;
3735 else
3736 one = true;
3739 return false;
3743 /* Return adjustments that should have the same effect on function parameters
3744 and call arguments as if they were first changed according to adjustments in
3745 INNER and then by adjustments in OUTER. */
3747 ipa_parm_adjustment_vec
3748 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3749 ipa_parm_adjustment_vec outer)
3751 int i, outlen = outer.length ();
3752 int inlen = inner.length ();
3753 int removals = 0;
3754 ipa_parm_adjustment_vec adjustments, tmp;
3756 tmp.create (inlen);
3757 for (i = 0; i < inlen; i++)
3759 struct ipa_parm_adjustment *n;
3760 n = &inner[i];
3762 if (n->remove_param)
3763 removals++;
3764 else
3765 tmp.quick_push (*n);
3768 adjustments.create (outlen + removals);
3769 for (i = 0; i < outlen; i++)
3771 struct ipa_parm_adjustment r;
3772 struct ipa_parm_adjustment *out = &outer[i];
3773 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3775 memset (&r, 0, sizeof (r));
3776 gcc_assert (!in->remove_param);
3777 if (out->remove_param)
3779 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3781 r.remove_param = true;
3782 adjustments.quick_push (r);
3784 continue;
3787 r.base_index = in->base_index;
3788 r.type = out->type;
3790 /* FIXME: Create nonlocal value too. */
3792 if (in->copy_param && out->copy_param)
3793 r.copy_param = true;
3794 else if (in->copy_param)
3795 r.offset = out->offset;
3796 else if (out->copy_param)
3797 r.offset = in->offset;
3798 else
3799 r.offset = in->offset + out->offset;
3800 adjustments.quick_push (r);
3803 for (i = 0; i < inlen; i++)
3805 struct ipa_parm_adjustment *n = &inner[i];
3807 if (n->remove_param)
3808 adjustments.quick_push (*n);
3811 tmp.release ();
3812 return adjustments;
3815 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3816 friendly way, assuming they are meant to be applied to FNDECL. */
3818 void
3819 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3820 tree fndecl)
3822 int i, len = adjustments.length ();
3823 bool first = true;
3824 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3826 fprintf (file, "IPA param adjustments: ");
3827 for (i = 0; i < len; i++)
3829 struct ipa_parm_adjustment *adj;
3830 adj = &adjustments[i];
3832 if (!first)
3833 fprintf (file, " ");
3834 else
3835 first = false;
3837 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3838 print_generic_expr (file, parms[adj->base_index], 0);
3839 if (adj->base)
3841 fprintf (file, ", base: ");
3842 print_generic_expr (file, adj->base, 0);
3844 if (adj->reduction)
3846 fprintf (file, ", reduction: ");
3847 print_generic_expr (file, adj->reduction, 0);
3849 if (adj->new_ssa_base)
3851 fprintf (file, ", new_ssa_base: ");
3852 print_generic_expr (file, adj->new_ssa_base, 0);
3855 if (adj->copy_param)
3856 fprintf (file, ", copy_param");
3857 else if (adj->remove_param)
3858 fprintf (file, ", remove_param");
3859 else
3860 fprintf (file, ", offset %li", (long) adj->offset);
3861 if (adj->by_ref)
3862 fprintf (file, ", by_ref");
3863 print_node_brief (file, ", type: ", adj->type, 0);
3864 fprintf (file, "\n");
3866 parms.release ();
3869 /* Dump the AV linked list. */
3871 void
3872 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3874 bool comma = false;
3875 fprintf (f, " Aggregate replacements:");
3876 for (; av; av = av->next)
3878 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3879 av->index, av->offset);
3880 print_generic_expr (f, av->value, 0);
3881 comma = true;
3883 fprintf (f, "\n");
3886 /* Stream out jump function JUMP_FUNC to OB. */
3888 static void
3889 ipa_write_jump_function (struct output_block *ob,
3890 struct ipa_jump_func *jump_func)
3892 struct ipa_agg_jf_item *item;
3893 struct bitpack_d bp;
3894 int i, count;
3896 streamer_write_uhwi (ob, jump_func->type);
3897 switch (jump_func->type)
3899 case IPA_JF_UNKNOWN:
3900 break;
3901 case IPA_JF_KNOWN_TYPE:
3902 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3903 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3904 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3905 break;
3906 case IPA_JF_CONST:
3907 gcc_assert (
3908 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3909 stream_write_tree (ob, jump_func->value.constant.value, true);
3910 break;
3911 case IPA_JF_PASS_THROUGH:
3912 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3913 if (jump_func->value.pass_through.operation == NOP_EXPR)
3915 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3916 bp = bitpack_create (ob->main_stream);
3917 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3918 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
3919 streamer_write_bitpack (&bp);
3921 else
3923 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3924 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3926 break;
3927 case IPA_JF_ANCESTOR:
3928 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3929 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3930 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3931 bp = bitpack_create (ob->main_stream);
3932 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3933 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
3934 streamer_write_bitpack (&bp);
3935 break;
3938 count = vec_safe_length (jump_func->agg.items);
3939 streamer_write_uhwi (ob, count);
3940 if (count)
3942 bp = bitpack_create (ob->main_stream);
3943 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3944 streamer_write_bitpack (&bp);
3947 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3949 streamer_write_uhwi (ob, item->offset);
3950 stream_write_tree (ob, item->value, true);
3954 /* Read in jump function JUMP_FUNC from IB. */
3956 static void
3957 ipa_read_jump_function (struct lto_input_block *ib,
3958 struct ipa_jump_func *jump_func,
3959 struct cgraph_edge *cs,
3960 struct data_in *data_in)
3962 enum jump_func_type jftype;
3963 enum tree_code operation;
3964 int i, count;
3966 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3967 switch (jftype)
3969 case IPA_JF_UNKNOWN:
3970 jump_func->type = IPA_JF_UNKNOWN;
3971 break;
3972 case IPA_JF_KNOWN_TYPE:
3974 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3975 tree base_type = stream_read_tree (ib, data_in);
3976 tree component_type = stream_read_tree (ib, data_in);
3978 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
3979 break;
3981 case IPA_JF_CONST:
3982 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
3983 break;
3984 case IPA_JF_PASS_THROUGH:
3985 operation = (enum tree_code) streamer_read_uhwi (ib);
3986 if (operation == NOP_EXPR)
3988 int formal_id = streamer_read_uhwi (ib);
3989 struct bitpack_d bp = streamer_read_bitpack (ib);
3990 bool agg_preserved = bp_unpack_value (&bp, 1);
3991 bool type_preserved = bp_unpack_value (&bp, 1);
3992 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
3993 type_preserved);
3995 else
3997 tree operand = stream_read_tree (ib, data_in);
3998 int formal_id = streamer_read_uhwi (ib);
3999 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4000 operation);
4002 break;
4003 case IPA_JF_ANCESTOR:
4005 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4006 tree type = stream_read_tree (ib, data_in);
4007 int formal_id = streamer_read_uhwi (ib);
4008 struct bitpack_d bp = streamer_read_bitpack (ib);
4009 bool agg_preserved = bp_unpack_value (&bp, 1);
4010 bool type_preserved = bp_unpack_value (&bp, 1);
4012 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4013 type_preserved);
4014 break;
4018 count = streamer_read_uhwi (ib);
4019 vec_alloc (jump_func->agg.items, count);
4020 if (count)
4022 struct bitpack_d bp = streamer_read_bitpack (ib);
4023 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4025 for (i = 0; i < count; i++)
4027 struct ipa_agg_jf_item item;
4028 item.offset = streamer_read_uhwi (ib);
4029 item.value = stream_read_tree (ib, data_in);
4030 jump_func->agg.items->quick_push (item);
4034 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4035 relevant to indirect inlining to OB. */
4037 static void
4038 ipa_write_indirect_edge_info (struct output_block *ob,
4039 struct cgraph_edge *cs)
4041 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4042 struct bitpack_d bp;
4044 streamer_write_hwi (ob, ii->param_index);
4045 streamer_write_hwi (ob, ii->offset);
4046 bp = bitpack_create (ob->main_stream);
4047 bp_pack_value (&bp, ii->polymorphic, 1);
4048 bp_pack_value (&bp, ii->agg_contents, 1);
4049 bp_pack_value (&bp, ii->member_ptr, 1);
4050 bp_pack_value (&bp, ii->by_ref, 1);
4051 streamer_write_bitpack (&bp);
4053 if (ii->polymorphic)
4055 streamer_write_hwi (ob, ii->otr_token);
4056 stream_write_tree (ob, ii->otr_type, true);
4060 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4061 relevant to indirect inlining from IB. */
4063 static void
4064 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4065 struct data_in *data_in ATTRIBUTE_UNUSED,
4066 struct cgraph_edge *cs)
4068 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4069 struct bitpack_d bp;
4071 ii->param_index = (int) streamer_read_hwi (ib);
4072 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4073 bp = streamer_read_bitpack (ib);
4074 ii->polymorphic = bp_unpack_value (&bp, 1);
4075 ii->agg_contents = bp_unpack_value (&bp, 1);
4076 ii->member_ptr = bp_unpack_value (&bp, 1);
4077 ii->by_ref = bp_unpack_value (&bp, 1);
4078 if (ii->polymorphic)
4080 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4081 ii->otr_type = stream_read_tree (ib, data_in);
4085 /* Stream out NODE info to OB. */
4087 static void
4088 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4090 int node_ref;
4091 lto_symtab_encoder_t encoder;
4092 struct ipa_node_params *info = IPA_NODE_REF (node);
4093 int j;
4094 struct cgraph_edge *e;
4095 struct bitpack_d bp;
4097 encoder = ob->decl_state->symtab_node_encoder;
4098 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4099 streamer_write_uhwi (ob, node_ref);
4101 streamer_write_uhwi (ob, ipa_get_param_count (info));
4102 for (j = 0; j < ipa_get_param_count (info); j++)
4103 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4104 bp = bitpack_create (ob->main_stream);
4105 gcc_assert (info->uses_analysis_done
4106 || ipa_get_param_count (info) == 0);
4107 gcc_assert (!info->node_enqueued);
4108 gcc_assert (!info->ipcp_orig_node);
4109 for (j = 0; j < ipa_get_param_count (info); j++)
4110 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4111 streamer_write_bitpack (&bp);
4112 for (j = 0; j < ipa_get_param_count (info); j++)
4113 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4114 for (e = node->callees; e; e = e->next_callee)
4116 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4118 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4119 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4120 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4122 for (e = node->indirect_calls; e; e = e->next_callee)
4124 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4126 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4127 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4128 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4129 ipa_write_indirect_edge_info (ob, e);
4133 /* Stream in NODE info from IB. */
4135 static void
4136 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4137 struct data_in *data_in)
4139 struct ipa_node_params *info = IPA_NODE_REF (node);
4140 int k;
4141 struct cgraph_edge *e;
4142 struct bitpack_d bp;
4144 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4146 for (k = 0; k < ipa_get_param_count (info); k++)
4147 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4149 bp = streamer_read_bitpack (ib);
4150 if (ipa_get_param_count (info) != 0)
4151 info->uses_analysis_done = true;
4152 info->node_enqueued = false;
4153 for (k = 0; k < ipa_get_param_count (info); k++)
4154 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4155 for (k = 0; k < ipa_get_param_count (info); k++)
4156 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4157 for (e = node->callees; e; e = e->next_callee)
4159 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4160 int count = streamer_read_uhwi (ib);
4162 if (!count)
4163 continue;
4164 vec_safe_grow_cleared (args->jump_functions, count);
4166 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4167 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4168 data_in);
4170 for (e = node->indirect_calls; e; e = e->next_callee)
4172 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4173 int count = streamer_read_uhwi (ib);
4175 if (count)
4177 vec_safe_grow_cleared (args->jump_functions, count);
4178 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4179 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4180 data_in);
4182 ipa_read_indirect_edge_info (ib, data_in, e);
4186 /* Write jump functions for nodes in SET. */
4188 void
4189 ipa_prop_write_jump_functions (void)
4191 struct cgraph_node *node;
4192 struct output_block *ob;
4193 unsigned int count = 0;
4194 lto_symtab_encoder_iterator lsei;
4195 lto_symtab_encoder_t encoder;
4198 if (!ipa_node_params_vector.exists ())
4199 return;
4201 ob = create_output_block (LTO_section_jump_functions);
4202 encoder = ob->decl_state->symtab_node_encoder;
4203 ob->cgraph_node = NULL;
4204 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4205 lsei_next_function_in_partition (&lsei))
4207 node = lsei_cgraph_node (lsei);
4208 if (cgraph_function_with_gimple_body_p (node)
4209 && IPA_NODE_REF (node) != NULL)
4210 count++;
4213 streamer_write_uhwi (ob, count);
4215 /* Process all of the functions. */
4216 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4217 lsei_next_function_in_partition (&lsei))
4219 node = lsei_cgraph_node (lsei);
4220 if (cgraph_function_with_gimple_body_p (node)
4221 && IPA_NODE_REF (node) != NULL)
4222 ipa_write_node_info (ob, node);
4224 streamer_write_char_stream (ob->main_stream, 0);
4225 produce_asm (ob, NULL);
4226 destroy_output_block (ob);
4229 /* Read section in file FILE_DATA of length LEN with data DATA. */
4231 static void
4232 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4233 size_t len)
4235 const struct lto_function_header *header =
4236 (const struct lto_function_header *) data;
4237 const int cfg_offset = sizeof (struct lto_function_header);
4238 const int main_offset = cfg_offset + header->cfg_size;
4239 const int string_offset = main_offset + header->main_size;
4240 struct data_in *data_in;
4241 struct lto_input_block ib_main;
4242 unsigned int i;
4243 unsigned int count;
4245 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4246 header->main_size);
4248 data_in =
4249 lto_data_in_create (file_data, (const char *) data + string_offset,
4250 header->string_size, vNULL);
4251 count = streamer_read_uhwi (&ib_main);
4253 for (i = 0; i < count; i++)
4255 unsigned int index;
4256 struct cgraph_node *node;
4257 lto_symtab_encoder_t encoder;
4259 index = streamer_read_uhwi (&ib_main);
4260 encoder = file_data->symtab_node_encoder;
4261 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4262 gcc_assert (node->symbol.definition);
4263 ipa_read_node_info (&ib_main, node, data_in);
4265 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4266 len);
4267 lto_data_in_delete (data_in);
4270 /* Read ipcp jump functions. */
4272 void
4273 ipa_prop_read_jump_functions (void)
4275 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4276 struct lto_file_decl_data *file_data;
4277 unsigned int j = 0;
4279 ipa_check_create_node_params ();
4280 ipa_check_create_edge_args ();
4281 ipa_register_cgraph_hooks ();
4283 while ((file_data = file_data_vec[j++]))
4285 size_t len;
4286 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4288 if (data)
4289 ipa_prop_read_section (file_data, data, len);
4293 /* After merging units, we can get mismatch in argument counts.
4294 Also decl merging might've rendered parameter lists obsolete.
4295 Also compute called_with_variable_arg info. */
4297 void
4298 ipa_update_after_lto_read (void)
4300 ipa_check_create_node_params ();
4301 ipa_check_create_edge_args ();
4304 void
4305 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4307 int node_ref;
4308 unsigned int count = 0;
4309 lto_symtab_encoder_t encoder;
4310 struct ipa_agg_replacement_value *aggvals, *av;
4312 aggvals = ipa_get_agg_replacements_for_node (node);
4313 encoder = ob->decl_state->symtab_node_encoder;
4314 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4315 streamer_write_uhwi (ob, node_ref);
4317 for (av = aggvals; av; av = av->next)
4318 count++;
4319 streamer_write_uhwi (ob, count);
4321 for (av = aggvals; av; av = av->next)
4323 struct bitpack_d bp;
4325 streamer_write_uhwi (ob, av->offset);
4326 streamer_write_uhwi (ob, av->index);
4327 stream_write_tree (ob, av->value, true);
4329 bp = bitpack_create (ob->main_stream);
4330 bp_pack_value (&bp, av->by_ref, 1);
4331 streamer_write_bitpack (&bp);
4335 /* Stream in the aggregate value replacement chain for NODE from IB. */
4337 static void
4338 read_agg_replacement_chain (struct lto_input_block *ib,
4339 struct cgraph_node *node,
4340 struct data_in *data_in)
4342 struct ipa_agg_replacement_value *aggvals = NULL;
4343 unsigned int count, i;
4345 count = streamer_read_uhwi (ib);
4346 for (i = 0; i <count; i++)
4348 struct ipa_agg_replacement_value *av;
4349 struct bitpack_d bp;
4351 av = ggc_alloc_ipa_agg_replacement_value ();
4352 av->offset = streamer_read_uhwi (ib);
4353 av->index = streamer_read_uhwi (ib);
4354 av->value = stream_read_tree (ib, data_in);
4355 bp = streamer_read_bitpack (ib);
4356 av->by_ref = bp_unpack_value (&bp, 1);
4357 av->next = aggvals;
4358 aggvals = av;
4360 ipa_set_node_agg_value_chain (node, aggvals);
4363 /* Write all aggregate replacement for nodes in set. */
4365 void
4366 ipa_prop_write_all_agg_replacement (void)
4368 struct cgraph_node *node;
4369 struct output_block *ob;
4370 unsigned int count = 0;
4371 lto_symtab_encoder_iterator lsei;
4372 lto_symtab_encoder_t encoder;
4374 if (!ipa_node_agg_replacements)
4375 return;
4377 ob = create_output_block (LTO_section_ipcp_transform);
4378 encoder = ob->decl_state->symtab_node_encoder;
4379 ob->cgraph_node = NULL;
4380 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4381 lsei_next_function_in_partition (&lsei))
4383 node = lsei_cgraph_node (lsei);
4384 if (cgraph_function_with_gimple_body_p (node)
4385 && ipa_get_agg_replacements_for_node (node) != NULL)
4386 count++;
4389 streamer_write_uhwi (ob, count);
4391 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4392 lsei_next_function_in_partition (&lsei))
4394 node = lsei_cgraph_node (lsei);
4395 if (cgraph_function_with_gimple_body_p (node)
4396 && ipa_get_agg_replacements_for_node (node) != NULL)
4397 write_agg_replacement_chain (ob, node);
4399 streamer_write_char_stream (ob->main_stream, 0);
4400 produce_asm (ob, NULL);
4401 destroy_output_block (ob);
4404 /* Read replacements section in file FILE_DATA of length LEN with data
4405 DATA. */
4407 static void
4408 read_replacements_section (struct lto_file_decl_data *file_data,
4409 const char *data,
4410 size_t len)
4412 const struct lto_function_header *header =
4413 (const struct lto_function_header *) data;
4414 const int cfg_offset = sizeof (struct lto_function_header);
4415 const int main_offset = cfg_offset + header->cfg_size;
4416 const int string_offset = main_offset + header->main_size;
4417 struct data_in *data_in;
4418 struct lto_input_block ib_main;
4419 unsigned int i;
4420 unsigned int count;
4422 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4423 header->main_size);
4425 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4426 header->string_size, vNULL);
4427 count = streamer_read_uhwi (&ib_main);
4429 for (i = 0; i < count; i++)
4431 unsigned int index;
4432 struct cgraph_node *node;
4433 lto_symtab_encoder_t encoder;
4435 index = streamer_read_uhwi (&ib_main);
4436 encoder = file_data->symtab_node_encoder;
4437 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4438 gcc_assert (node->symbol.definition);
4439 read_agg_replacement_chain (&ib_main, node, data_in);
4441 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4442 len);
4443 lto_data_in_delete (data_in);
4446 /* Read IPA-CP aggregate replacements. */
4448 void
4449 ipa_prop_read_all_agg_replacement (void)
4451 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4452 struct lto_file_decl_data *file_data;
4453 unsigned int j = 0;
4455 while ((file_data = file_data_vec[j++]))
4457 size_t len;
4458 const char *data = lto_get_section_data (file_data,
4459 LTO_section_ipcp_transform,
4460 NULL, &len);
4461 if (data)
4462 read_replacements_section (file_data, data, len);
4466 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4467 NODE. */
4469 static void
4470 adjust_agg_replacement_values (struct cgraph_node *node,
4471 struct ipa_agg_replacement_value *aggval)
4473 struct ipa_agg_replacement_value *v;
4474 int i, c = 0, d = 0, *adj;
4476 if (!node->clone.combined_args_to_skip)
4477 return;
4479 for (v = aggval; v; v = v->next)
4481 gcc_assert (v->index >= 0);
4482 if (c < v->index)
4483 c = v->index;
4485 c++;
4487 adj = XALLOCAVEC (int, c);
4488 for (i = 0; i < c; i++)
4489 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4491 adj[i] = -1;
4492 d++;
4494 else
4495 adj[i] = i - d;
4497 for (v = aggval; v; v = v->next)
4498 v->index = adj[v->index];
4502 /* Function body transformation phase. */
4504 unsigned int
4505 ipcp_transform_function (struct cgraph_node *node)
4507 vec<ipa_param_descriptor_t> descriptors = vNULL;
4508 struct param_analysis_info *parms_ainfo;
4509 struct ipa_agg_replacement_value *aggval;
4510 gimple_stmt_iterator gsi;
4511 basic_block bb;
4512 int param_count;
4513 bool cfg_changed = false, something_changed = false;
4515 gcc_checking_assert (cfun);
4516 gcc_checking_assert (current_function_decl);
4518 if (dump_file)
4519 fprintf (dump_file, "Modification phase of node %s/%i\n",
4520 cgraph_node_name (node), node->symbol.order);
4522 aggval = ipa_get_agg_replacements_for_node (node);
4523 if (!aggval)
4524 return 0;
4525 param_count = count_formal_params (node->symbol.decl);
4526 if (param_count == 0)
4527 return 0;
4528 adjust_agg_replacement_values (node, aggval);
4529 if (dump_file)
4530 ipa_dump_agg_replacement_values (dump_file, aggval);
4531 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4532 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4533 descriptors.safe_grow_cleared (param_count);
4534 ipa_populate_param_decls (node, descriptors);
4536 FOR_EACH_BB (bb)
4537 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4539 struct ipa_agg_replacement_value *v;
4540 gimple stmt = gsi_stmt (gsi);
4541 tree rhs, val, t;
4542 HOST_WIDE_INT offset;
4543 int index;
4544 bool by_ref, vce;
4546 if (!gimple_assign_load_p (stmt))
4547 continue;
4548 rhs = gimple_assign_rhs1 (stmt);
4549 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4550 continue;
4552 vce = false;
4553 t = rhs;
4554 while (handled_component_p (t))
4556 /* V_C_E can do things like convert an array of integers to one
4557 bigger integer and similar things we do not handle below. */
4558 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4560 vce = true;
4561 break;
4563 t = TREE_OPERAND (t, 0);
4565 if (vce)
4566 continue;
4568 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4569 rhs, &index, &offset, &by_ref))
4570 continue;
4571 for (v = aggval; v; v = v->next)
4572 if (v->index == index
4573 && v->offset == offset)
4574 break;
4575 if (!v || v->by_ref != by_ref)
4576 continue;
4578 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4579 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4581 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4582 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4583 else if (TYPE_SIZE (TREE_TYPE (rhs))
4584 == TYPE_SIZE (TREE_TYPE (v->value)))
4585 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4586 else
4588 if (dump_file)
4590 fprintf (dump_file, " const ");
4591 print_generic_expr (dump_file, v->value, 0);
4592 fprintf (dump_file, " can't be converted to type of ");
4593 print_generic_expr (dump_file, rhs, 0);
4594 fprintf (dump_file, "\n");
4596 continue;
4599 else
4600 val = v->value;
4602 if (dump_file && (dump_flags & TDF_DETAILS))
4604 fprintf (dump_file, "Modifying stmt:\n ");
4605 print_gimple_stmt (dump_file, stmt, 0, 0);
4607 gimple_assign_set_rhs_from_tree (&gsi, val);
4608 update_stmt (stmt);
4610 if (dump_file && (dump_flags & TDF_DETAILS))
4612 fprintf (dump_file, "into:\n ");
4613 print_gimple_stmt (dump_file, stmt, 0, 0);
4614 fprintf (dump_file, "\n");
4617 something_changed = true;
4618 if (maybe_clean_eh_stmt (stmt)
4619 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4620 cfg_changed = true;
4623 (*ipa_node_agg_replacements)[node->uid] = NULL;
4624 free_parms_ainfo (parms_ainfo, param_count);
4625 descriptors.release ();
4627 if (!something_changed)
4628 return 0;
4629 else if (cfg_changed)
4630 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4631 else
4632 return TODO_update_ssa_only_virtuals;