* lib/target-supports.exp (check_weak_available): Return true for AIX.
[official-gcc.git] / gcc / ipa-prop.c
blob177283c72673d89bb8d2547f78fa5a7b3a9c3a9b
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "langhooks.h"
25 #include "ggc.h"
26 #include "target.h"
27 #include "cgraph.h"
28 #include "ipa-prop.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
45 struct param_analysis_info
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
51 /* Vector where the parameter infos are actually stored. */
52 vec<ipa_node_params_t> ipa_node_params_vector;
53 /* Vector of known aggregate values in cloned nodes. */
54 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
55 /* Vector where the parameter infos are actually stored. */
56 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60 static struct cgraph_node_hook_list *node_removal_hook_holder;
61 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
63 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65 /* Description of a reference to an IPA constant. */
66 struct ipa_cst_ref_desc
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge *cs;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc *next_duplicate;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
73 if out of control. */
74 int refcount;
77 /* Allocation pool for reference descriptions. */
79 static alloc_pool ipa_refdesc_pool;
81 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
82 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
84 static bool
85 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
87 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->symbol.decl);
88 struct cl_optimization *os;
90 if (!fs_opts)
91 return false;
92 os = TREE_OPTIMIZATION (fs_opts);
93 return !os->x_optimize || !os->x_flag_ipa_cp;
96 /* Return index of the formal whose tree is PTREE in function which corresponds
97 to INFO. */
99 static int
100 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
102 int i, count;
104 count = descriptors.length ();
105 for (i = 0; i < count; i++)
106 if (descriptors[i].decl == ptree)
107 return i;
109 return -1;
112 /* Return index of the formal whose tree is PTREE in function which corresponds
113 to INFO. */
116 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
118 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
122 NODE. */
124 static void
125 ipa_populate_param_decls (struct cgraph_node *node,
126 vec<ipa_param_descriptor_t> &descriptors)
128 tree fndecl;
129 tree fnargs;
130 tree parm;
131 int param_num;
133 fndecl = node->symbol.decl;
134 gcc_assert (gimple_has_body_p (fndecl));
135 fnargs = DECL_ARGUMENTS (fndecl);
136 param_num = 0;
137 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
139 descriptors[param_num].decl = parm;
140 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
141 param_num++;
145 /* Return how many formal parameters FNDECL has. */
147 static inline int
148 count_formal_params (tree fndecl)
150 tree parm;
151 int count = 0;
152 gcc_assert (gimple_has_body_p (fndecl));
154 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
155 count++;
157 return count;
160 /* Return the declaration of Ith formal parameter of the function corresponding
161 to INFO. Note there is no setter function as this array is built just once
162 using ipa_initialize_node_params. */
164 void
165 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
167 fprintf (file, "param #%i", i);
168 if (info->descriptors[i].decl)
170 fprintf (file, " ");
171 print_generic_expr (file, info->descriptors[i].decl, 0);
175 /* Initialize the ipa_node_params structure associated with NODE
176 to hold PARAM_COUNT parameters. */
178 void
179 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
181 struct ipa_node_params *info = IPA_NODE_REF (node);
183 if (!info->descriptors.exists () && param_count)
184 info->descriptors.safe_grow_cleared (param_count);
187 /* Initialize the ipa_node_params structure associated with NODE by counting
188 the function parameters, creating the descriptors and populating their
189 param_decls. */
191 void
192 ipa_initialize_node_params (struct cgraph_node *node)
194 struct ipa_node_params *info = IPA_NODE_REF (node);
196 if (!info->descriptors.exists ())
198 ipa_alloc_node_params (node, count_formal_params (node->symbol.decl));
199 ipa_populate_param_decls (node, info->descriptors);
203 /* Print the jump functions associated with call graph edge CS to file F. */
205 static void
206 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
208 int i, count;
210 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
211 for (i = 0; i < count; i++)
213 struct ipa_jump_func *jump_func;
214 enum jump_func_type type;
216 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
217 type = jump_func->type;
219 fprintf (f, " param %d: ", i);
220 if (type == IPA_JF_UNKNOWN)
221 fprintf (f, "UNKNOWN\n");
222 else if (type == IPA_JF_KNOWN_TYPE)
224 fprintf (f, "KNOWN TYPE: base ");
225 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
226 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
227 jump_func->value.known_type.offset);
228 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
229 fprintf (f, "\n");
231 else if (type == IPA_JF_CONST)
233 tree val = jump_func->value.constant.value;
234 fprintf (f, "CONST: ");
235 print_generic_expr (f, val, 0);
236 if (TREE_CODE (val) == ADDR_EXPR
237 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
239 fprintf (f, " -> ");
240 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
243 fprintf (f, "\n");
245 else if (type == IPA_JF_PASS_THROUGH)
247 fprintf (f, "PASS THROUGH: ");
248 fprintf (f, "%d, op %s",
249 jump_func->value.pass_through.formal_id,
250 tree_code_name[(int)
251 jump_func->value.pass_through.operation]);
252 if (jump_func->value.pass_through.operation != NOP_EXPR)
254 fprintf (f, " ");
255 print_generic_expr (f,
256 jump_func->value.pass_through.operand, 0);
258 if (jump_func->value.pass_through.agg_preserved)
259 fprintf (f, ", agg_preserved");
260 if (jump_func->value.pass_through.type_preserved)
261 fprintf (f, ", type_preserved");
262 fprintf (f, "\n");
264 else if (type == IPA_JF_ANCESTOR)
266 fprintf (f, "ANCESTOR: ");
267 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
268 jump_func->value.ancestor.formal_id,
269 jump_func->value.ancestor.offset);
270 print_generic_expr (f, jump_func->value.ancestor.type, 0);
271 if (jump_func->value.ancestor.agg_preserved)
272 fprintf (f, ", agg_preserved");
273 if (jump_func->value.ancestor.type_preserved)
274 fprintf (f, ", type_preserved");
275 fprintf (f, "\n");
278 if (jump_func->agg.items)
280 struct ipa_agg_jf_item *item;
281 int j;
283 fprintf (f, " Aggregate passed by %s:\n",
284 jump_func->agg.by_ref ? "reference" : "value");
285 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
287 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
288 item->offset);
289 if (TYPE_P (item->value))
290 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
291 tree_low_cst (TYPE_SIZE (item->value), 1));
292 else
294 fprintf (f, "cst: ");
295 print_generic_expr (f, item->value, 0);
297 fprintf (f, "\n");
304 /* Print the jump functions of all arguments on all call graph edges going from
305 NODE to file F. */
307 void
308 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
310 struct cgraph_edge *cs;
312 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
313 node->symbol.order);
314 for (cs = node->callees; cs; cs = cs->next_callee)
316 if (!ipa_edge_args_info_available_for_edge_p (cs))
317 continue;
319 fprintf (f, " callsite %s/%i -> %s/%i : \n",
320 xstrdup (cgraph_node_name (node)), node->symbol.order,
321 xstrdup (cgraph_node_name (cs->callee)),
322 cs->callee->symbol.order);
323 ipa_print_node_jump_functions_for_edge (f, cs);
326 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
328 struct cgraph_indirect_call_info *ii;
329 if (!ipa_edge_args_info_available_for_edge_p (cs))
330 continue;
332 ii = cs->indirect_info;
333 if (ii->agg_contents)
334 fprintf (f, " indirect %s callsite, calling param %i, "
335 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
336 ii->member_ptr ? "member ptr" : "aggregate",
337 ii->param_index, ii->offset,
338 ii->by_ref ? "by reference" : "by_value");
339 else
340 fprintf (f, " indirect %s callsite, calling param %i",
341 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
343 if (cs->call_stmt)
345 fprintf (f, ", for stmt ");
346 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
348 else
349 fprintf (f, "\n");
350 ipa_print_node_jump_functions_for_edge (f, cs);
354 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
356 void
357 ipa_print_all_jump_functions (FILE *f)
359 struct cgraph_node *node;
361 fprintf (f, "\nJump functions:\n");
362 FOR_EACH_FUNCTION (node)
364 ipa_print_node_jump_functions (f, node);
368 /* Set JFUNC to be a known type jump function. */
370 static void
371 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
372 tree base_type, tree component_type)
374 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
375 && TYPE_BINFO (component_type));
376 jfunc->type = IPA_JF_KNOWN_TYPE;
377 jfunc->value.known_type.offset = offset,
378 jfunc->value.known_type.base_type = base_type;
379 jfunc->value.known_type.component_type = component_type;
382 /* Set JFUNC to be a copy of another jmp (to be used by jump function
383 combination code). The two functions will share their rdesc. */
385 static void
386 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
387 struct ipa_jump_func *src)
390 gcc_checking_assert (src->type == IPA_JF_CONST);
391 dst->type = IPA_JF_CONST;
392 dst->value.constant = src->value.constant;
395 /* Set JFUNC to be a constant jmp function. */
397 static void
398 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
399 struct cgraph_edge *cs)
401 constant = unshare_expr (constant);
402 if (constant && EXPR_P (constant))
403 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
404 jfunc->type = IPA_JF_CONST;
405 jfunc->value.constant.value = unshare_expr_without_location (constant);
407 if (TREE_CODE (constant) == ADDR_EXPR
408 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
410 struct ipa_cst_ref_desc *rdesc;
411 if (!ipa_refdesc_pool)
412 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
413 sizeof (struct ipa_cst_ref_desc), 32);
415 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
416 rdesc->cs = cs;
417 rdesc->next_duplicate = NULL;
418 rdesc->refcount = 1;
419 jfunc->value.constant.rdesc = rdesc;
421 else
422 jfunc->value.constant.rdesc = NULL;
425 /* Set JFUNC to be a simple pass-through jump function. */
426 static void
427 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
428 bool agg_preserved, bool type_preserved)
430 jfunc->type = IPA_JF_PASS_THROUGH;
431 jfunc->value.pass_through.operand = NULL_TREE;
432 jfunc->value.pass_through.formal_id = formal_id;
433 jfunc->value.pass_through.operation = NOP_EXPR;
434 jfunc->value.pass_through.agg_preserved = agg_preserved;
435 jfunc->value.pass_through.type_preserved = type_preserved;
438 /* Set JFUNC to be an arithmetic pass through jump function. */
440 static void
441 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
442 tree operand, enum tree_code operation)
444 jfunc->type = IPA_JF_PASS_THROUGH;
445 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
446 jfunc->value.pass_through.formal_id = formal_id;
447 jfunc->value.pass_through.operation = operation;
448 jfunc->value.pass_through.agg_preserved = false;
449 jfunc->value.pass_through.type_preserved = false;
452 /* Set JFUNC to be an ancestor jump function. */
454 static void
455 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
456 tree type, int formal_id, bool agg_preserved,
457 bool type_preserved)
459 jfunc->type = IPA_JF_ANCESTOR;
460 jfunc->value.ancestor.formal_id = formal_id;
461 jfunc->value.ancestor.offset = offset;
462 jfunc->value.ancestor.type = type;
463 jfunc->value.ancestor.agg_preserved = agg_preserved;
464 jfunc->value.ancestor.type_preserved = type_preserved;
467 /* Extract the acual BINFO being described by JFUNC which must be a known type
468 jump function. */
470 tree
471 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
473 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
474 if (!base_binfo)
475 return NULL_TREE;
476 return get_binfo_at_offset (base_binfo,
477 jfunc->value.known_type.offset,
478 jfunc->value.known_type.component_type);
481 /* Structure to be passed in between detect_type_change and
482 check_stmt_for_type_change. */
484 struct type_change_info
486 /* Offset into the object where there is the virtual method pointer we are
487 looking for. */
488 HOST_WIDE_INT offset;
489 /* The declaration or SSA_NAME pointer of the base that we are checking for
490 type change. */
491 tree object;
492 /* If we actually can tell the type that the object has changed to, it is
493 stored in this field. Otherwise it remains NULL_TREE. */
494 tree known_current_type;
495 /* Set to true if dynamic type change has been detected. */
496 bool type_maybe_changed;
497 /* Set to true if multiple types have been encountered. known_current_type
498 must be disregarded in that case. */
499 bool multiple_types_encountered;
502 /* Return true if STMT can modify a virtual method table pointer.
504 This function makes special assumptions about both constructors and
505 destructors which are all the functions that are allowed to alter the VMT
506 pointers. It assumes that destructors begin with assignment into all VMT
507 pointers and that constructors essentially look in the following way:
509 1) The very first thing they do is that they call constructors of ancestor
510 sub-objects that have them.
512 2) Then VMT pointers of this and all its ancestors is set to new values
513 corresponding to the type corresponding to the constructor.
515 3) Only afterwards, other stuff such as constructor of member sub-objects
516 and the code written by the user is run. Only this may include calling
517 virtual functions, directly or indirectly.
519 There is no way to call a constructor of an ancestor sub-object in any
520 other way.
522 This means that we do not have to care whether constructors get the correct
523 type information because they will always change it (in fact, if we define
524 the type to be given by the VMT pointer, it is undefined).
526 The most important fact to derive from the above is that if, for some
527 statement in the section 3, we try to detect whether the dynamic type has
528 changed, we can safely ignore all calls as we examine the function body
529 backwards until we reach statements in section 2 because these calls cannot
530 be ancestor constructors or destructors (if the input is not bogus) and so
531 do not change the dynamic type (this holds true only for automatically
532 allocated objects but at the moment we devirtualize only these). We then
533 must detect that statements in section 2 change the dynamic type and can try
534 to derive the new type. That is enough and we can stop, we will never see
535 the calls into constructors of sub-objects in this code. Therefore we can
536 safely ignore all call statements that we traverse.
539 static bool
540 stmt_may_be_vtbl_ptr_store (gimple stmt)
542 if (is_gimple_call (stmt))
543 return false;
544 else if (is_gimple_assign (stmt))
546 tree lhs = gimple_assign_lhs (stmt);
548 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
550 if (flag_strict_aliasing
551 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
552 return false;
554 if (TREE_CODE (lhs) == COMPONENT_REF
555 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
556 return false;
557 /* In the future we might want to use get_base_ref_and_offset to find
558 if there is a field corresponding to the offset and if so, proceed
559 almost like if it was a component ref. */
562 return true;
565 /* If STMT can be proved to be an assignment to the virtual method table
566 pointer of ANALYZED_OBJ and the type associated with the new table
567 identified, return the type. Otherwise return NULL_TREE. */
569 static tree
570 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
572 HOST_WIDE_INT offset, size, max_size;
573 tree lhs, rhs, base;
575 if (!gimple_assign_single_p (stmt))
576 return NULL_TREE;
578 lhs = gimple_assign_lhs (stmt);
579 rhs = gimple_assign_rhs1 (stmt);
580 if (TREE_CODE (lhs) != COMPONENT_REF
581 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
582 || TREE_CODE (rhs) != ADDR_EXPR)
583 return NULL_TREE;
584 rhs = get_base_address (TREE_OPERAND (rhs, 0));
585 if (!rhs
586 || TREE_CODE (rhs) != VAR_DECL
587 || !DECL_VIRTUAL_P (rhs))
588 return NULL_TREE;
590 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
591 if (offset != tci->offset
592 || size != POINTER_SIZE
593 || max_size != POINTER_SIZE)
594 return NULL_TREE;
595 if (TREE_CODE (base) == MEM_REF)
597 if (TREE_CODE (tci->object) != MEM_REF
598 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
599 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
600 TREE_OPERAND (base, 1)))
601 return NULL_TREE;
603 else if (tci->object != base)
604 return NULL_TREE;
606 return DECL_CONTEXT (rhs);
609 /* Callback of walk_aliased_vdefs and a helper function for
610 detect_type_change to check whether a particular statement may modify
611 the virtual table pointer, and if possible also determine the new type of
612 the (sub-)object. It stores its result into DATA, which points to a
613 type_change_info structure. */
615 static bool
616 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
618 gimple stmt = SSA_NAME_DEF_STMT (vdef);
619 struct type_change_info *tci = (struct type_change_info *) data;
621 if (stmt_may_be_vtbl_ptr_store (stmt))
623 tree type;
624 type = extr_type_from_vtbl_ptr_store (stmt, tci);
625 if (tci->type_maybe_changed
626 && type != tci->known_current_type)
627 tci->multiple_types_encountered = true;
628 tci->known_current_type = type;
629 tci->type_maybe_changed = true;
630 return true;
632 else
633 return false;
638 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
639 callsite CALL) by looking for assignments to its virtual table pointer. If
640 it is, return true and fill in the jump function JFUNC with relevant type
641 information or set it to unknown. ARG is the object itself (not a pointer
642 to it, unless dereferenced). BASE is the base of the memory access as
643 returned by get_ref_base_and_extent, as is the offset. */
645 static bool
646 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
647 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
649 struct type_change_info tci;
650 ao_ref ao;
652 gcc_checking_assert (DECL_P (arg)
653 || TREE_CODE (arg) == MEM_REF
654 || handled_component_p (arg));
655 /* Const calls cannot call virtual methods through VMT and so type changes do
656 not matter. */
657 if (!flag_devirtualize || !gimple_vuse (call)
658 /* Be sure expected_type is polymorphic. */
659 || !comp_type
660 || TREE_CODE (comp_type) != RECORD_TYPE
661 || !TYPE_BINFO (comp_type)
662 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
663 return false;
665 ao_ref_init (&ao, arg);
666 ao.base = base;
667 ao.offset = offset;
668 ao.size = POINTER_SIZE;
669 ao.max_size = ao.size;
671 tci.offset = offset;
672 tci.object = get_base_address (arg);
673 tci.known_current_type = NULL_TREE;
674 tci.type_maybe_changed = false;
675 tci.multiple_types_encountered = false;
677 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
678 &tci, NULL);
679 if (!tci.type_maybe_changed)
680 return false;
682 if (!tci.known_current_type
683 || tci.multiple_types_encountered
684 || offset != 0)
685 jfunc->type = IPA_JF_UNKNOWN;
686 else
687 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
689 return true;
692 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
693 SSA name (its dereference will become the base and the offset is assumed to
694 be zero). */
696 static bool
697 detect_type_change_ssa (tree arg, tree comp_type,
698 gimple call, struct ipa_jump_func *jfunc)
700 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
701 if (!flag_devirtualize
702 || !POINTER_TYPE_P (TREE_TYPE (arg)))
703 return false;
705 arg = build2 (MEM_REF, ptr_type_node, arg,
706 build_int_cst (ptr_type_node, 0));
708 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
711 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
712 boolean variable pointed to by DATA. */
714 static bool
715 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
716 void *data)
718 bool *b = (bool *) data;
719 *b = true;
720 return true;
723 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
724 a value known not to be modified in this function before reaching the
725 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
726 information about the parameter. */
728 static bool
729 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
730 gimple stmt, tree parm_load)
732 bool modified = false;
733 bitmap *visited_stmts;
734 ao_ref refd;
736 if (parm_ainfo && parm_ainfo->parm_modified)
737 return false;
739 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
740 ao_ref_init (&refd, parm_load);
741 /* We can cache visited statements only when parm_ainfo is available and when
742 we are looking at a naked load of the whole parameter. */
743 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
744 visited_stmts = NULL;
745 else
746 visited_stmts = &parm_ainfo->parm_visited_statements;
747 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
748 visited_stmts);
749 if (parm_ainfo && modified)
750 parm_ainfo->parm_modified = true;
751 return !modified;
754 /* If STMT is an assignment that loads a value from an parameter declaration,
755 return the index of the parameter in ipa_node_params which has not been
756 modified. Otherwise return -1. */
758 static int
759 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
760 struct param_analysis_info *parms_ainfo,
761 gimple stmt)
763 int index;
764 tree op1;
766 if (!gimple_assign_single_p (stmt))
767 return -1;
769 op1 = gimple_assign_rhs1 (stmt);
770 if (TREE_CODE (op1) != PARM_DECL)
771 return -1;
773 index = ipa_get_param_decl_index_1 (descriptors, op1);
774 if (index < 0
775 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
776 : NULL, stmt, op1))
777 return -1;
779 return index;
782 /* Return true if memory reference REF loads data that are known to be
783 unmodified in this function before reaching statement STMT. PARM_AINFO, if
784 non-NULL, is a pointer to a structure containing temporary information about
785 PARM. */
787 static bool
788 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
789 gimple stmt, tree ref)
791 bool modified = false;
792 ao_ref refd;
794 gcc_checking_assert (gimple_vuse (stmt));
795 if (parm_ainfo && parm_ainfo->ref_modified)
796 return false;
798 ao_ref_init (&refd, ref);
799 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
800 NULL);
801 if (parm_ainfo && modified)
802 parm_ainfo->ref_modified = true;
803 return !modified;
806 /* Return true if the data pointed to by PARM is known to be unmodified in this
807 function before reaching call statement CALL into which it is passed.
808 PARM_AINFO is a pointer to a structure containing temporary information
809 about PARM. */
811 static bool
812 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
813 gimple call, tree parm)
815 bool modified = false;
816 ao_ref refd;
818 /* It's unnecessary to calculate anything about memory contnets for a const
819 function because it is not goin to use it. But do not cache the result
820 either. Also, no such calculations for non-pointers. */
821 if (!gimple_vuse (call)
822 || !POINTER_TYPE_P (TREE_TYPE (parm)))
823 return false;
825 if (parm_ainfo->pt_modified)
826 return false;
828 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
829 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
830 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
831 if (modified)
832 parm_ainfo->pt_modified = true;
833 return !modified;
836 /* Return true if we can prove that OP is a memory reference loading unmodified
837 data from an aggregate passed as a parameter and if the aggregate is passed
838 by reference, that the alias type of the load corresponds to the type of the
839 formal parameter (so that we can rely on this type for TBAA in callers).
840 INFO and PARMS_AINFO describe parameters of the current function (but the
841 latter can be NULL), STMT is the load statement. If function returns true,
842 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
843 within the aggregate and whether it is a load from a value passed by
844 reference respectively. */
846 static bool
847 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
848 struct param_analysis_info *parms_ainfo, gimple stmt,
849 tree op, int *index_p, HOST_WIDE_INT *offset_p,
850 bool *by_ref_p)
852 int index;
853 HOST_WIDE_INT size, max_size;
854 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
856 if (max_size == -1 || max_size != size || *offset_p < 0)
857 return false;
859 if (DECL_P (base))
861 int index = ipa_get_param_decl_index_1 (descriptors, base);
862 if (index >= 0
863 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
864 : NULL, stmt, op))
866 *index_p = index;
867 *by_ref_p = false;
868 return true;
870 return false;
873 if (TREE_CODE (base) != MEM_REF
874 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
875 || !integer_zerop (TREE_OPERAND (base, 1)))
876 return false;
878 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
880 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
881 index = ipa_get_param_decl_index_1 (descriptors, parm);
883 else
885 /* This branch catches situations where a pointer parameter is not a
886 gimple register, for example:
888 void hip7(S*) (struct S * p)
890 void (*<T2e4>) (struct S *) D.1867;
891 struct S * p.1;
893 <bb 2>:
894 p.1_1 = p;
895 D.1867_2 = p.1_1->f;
896 D.1867_2 ();
897 gdp = &p;
900 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
901 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
904 if (index >= 0
905 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
906 stmt, op))
908 *index_p = index;
909 *by_ref_p = true;
910 return true;
912 return false;
915 /* Just like the previous function, just without the param_analysis_info
916 pointer, for users outside of this file. */
918 bool
919 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
920 tree op, int *index_p, HOST_WIDE_INT *offset_p,
921 bool *by_ref_p)
923 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
924 offset_p, by_ref_p);
927 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
928 of an assignment statement STMT, try to determine whether we are actually
929 handling any of the following cases and construct an appropriate jump
930 function into JFUNC if so:
932 1) The passed value is loaded from a formal parameter which is not a gimple
933 register (most probably because it is addressable, the value has to be
934 scalar) and we can guarantee the value has not changed. This case can
935 therefore be described by a simple pass-through jump function. For example:
937 foo (int a)
939 int a.0;
941 a.0_2 = a;
942 bar (a.0_2);
944 2) The passed value can be described by a simple arithmetic pass-through
945 jump function. E.g.
947 foo (int a)
949 int D.2064;
951 D.2064_4 = a.1(D) + 4;
952 bar (D.2064_4);
954 This case can also occur in combination of the previous one, e.g.:
956 foo (int a, int z)
958 int a.0;
959 int D.2064;
961 a.0_3 = a;
962 D.2064_4 = a.0_3 + 4;
963 foo (D.2064_4);
965 3) The passed value is an address of an object within another one (which
966 also passed by reference). Such situations are described by an ancestor
967 jump function and describe situations such as:
969 B::foo() (struct B * const this)
971 struct A * D.1845;
973 D.1845_2 = &this_1(D)->D.1748;
974 A::bar (D.1845_2);
976 INFO is the structure describing individual parameters access different
977 stages of IPA optimizations. PARMS_AINFO contains the information that is
978 only needed for intraprocedural analysis. */
980 static void
981 compute_complex_assign_jump_func (struct ipa_node_params *info,
982 struct param_analysis_info *parms_ainfo,
983 struct ipa_jump_func *jfunc,
984 gimple call, gimple stmt, tree name,
985 tree param_type)
987 HOST_WIDE_INT offset, size, max_size;
988 tree op1, tc_ssa, base, ssa;
989 int index;
991 op1 = gimple_assign_rhs1 (stmt);
993 if (TREE_CODE (op1) == SSA_NAME)
995 if (SSA_NAME_IS_DEFAULT_DEF (op1))
996 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
997 else
998 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
999 SSA_NAME_DEF_STMT (op1));
1000 tc_ssa = op1;
1002 else
1004 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1005 tc_ssa = gimple_assign_lhs (stmt);
1008 if (index >= 0)
1010 tree op2 = gimple_assign_rhs2 (stmt);
1012 if (op2)
1014 if (!is_gimple_ip_invariant (op2)
1015 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1016 && !useless_type_conversion_p (TREE_TYPE (name),
1017 TREE_TYPE (op1))))
1018 return;
1020 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1021 gimple_assign_rhs_code (stmt));
1023 else if (gimple_assign_single_p (stmt))
1025 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1026 call, tc_ssa);
1027 bool type_p = false;
1029 if (param_type && POINTER_TYPE_P (param_type))
1030 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1031 call, jfunc);
1032 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1033 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1035 return;
1038 if (TREE_CODE (op1) != ADDR_EXPR)
1039 return;
1040 op1 = TREE_OPERAND (op1, 0);
1041 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1042 return;
1043 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1044 if (TREE_CODE (base) != MEM_REF
1045 /* If this is a varying address, punt. */
1046 || max_size == -1
1047 || max_size != size)
1048 return;
1049 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1050 ssa = TREE_OPERAND (base, 0);
1051 if (TREE_CODE (ssa) != SSA_NAME
1052 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1053 || offset < 0)
1054 return;
1056 /* Dynamic types are changed in constructors and destructors. */
1057 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1058 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1060 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1061 call, jfunc, offset);
1062 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1063 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1064 parm_ref_data_pass_through_p (&parms_ainfo[index],
1065 call, ssa), type_p);
1069 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1070 it looks like:
1072 iftmp.1_3 = &obj_2(D)->D.1762;
1074 The base of the MEM_REF must be a default definition SSA NAME of a
1075 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1076 whole MEM_REF expression is returned and the offset calculated from any
1077 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1078 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1080 static tree
1081 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1083 HOST_WIDE_INT size, max_size;
1084 tree expr, parm, obj;
1086 if (!gimple_assign_single_p (assign))
1087 return NULL_TREE;
1088 expr = gimple_assign_rhs1 (assign);
1090 if (TREE_CODE (expr) != ADDR_EXPR)
1091 return NULL_TREE;
1092 expr = TREE_OPERAND (expr, 0);
1093 obj = expr;
1094 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1096 if (TREE_CODE (expr) != MEM_REF
1097 /* If this is a varying address, punt. */
1098 || max_size == -1
1099 || max_size != size
1100 || *offset < 0)
1101 return NULL_TREE;
1102 parm = TREE_OPERAND (expr, 0);
1103 if (TREE_CODE (parm) != SSA_NAME
1104 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1105 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1106 return NULL_TREE;
1108 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1109 *obj_p = obj;
1110 return expr;
1114 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1115 statement PHI, try to find out whether NAME is in fact a
1116 multiple-inheritance typecast from a descendant into an ancestor of a formal
1117 parameter and thus can be described by an ancestor jump function and if so,
1118 write the appropriate function into JFUNC.
1120 Essentially we want to match the following pattern:
1122 if (obj_2(D) != 0B)
1123 goto <bb 3>;
1124 else
1125 goto <bb 4>;
1127 <bb 3>:
1128 iftmp.1_3 = &obj_2(D)->D.1762;
1130 <bb 4>:
1131 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1132 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1133 return D.1879_6; */
1135 static void
1136 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1137 struct param_analysis_info *parms_ainfo,
1138 struct ipa_jump_func *jfunc,
1139 gimple call, gimple phi, tree param_type)
1141 HOST_WIDE_INT offset;
1142 gimple assign, cond;
1143 basic_block phi_bb, assign_bb, cond_bb;
1144 tree tmp, parm, expr, obj;
1145 int index, i;
1147 if (gimple_phi_num_args (phi) != 2)
1148 return;
1150 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1151 tmp = PHI_ARG_DEF (phi, 0);
1152 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1153 tmp = PHI_ARG_DEF (phi, 1);
1154 else
1155 return;
1156 if (TREE_CODE (tmp) != SSA_NAME
1157 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1158 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1159 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1160 return;
1162 assign = SSA_NAME_DEF_STMT (tmp);
1163 assign_bb = gimple_bb (assign);
1164 if (!single_pred_p (assign_bb))
1165 return;
1166 expr = get_ancestor_addr_info (assign, &obj, &offset);
1167 if (!expr)
1168 return;
1169 parm = TREE_OPERAND (expr, 0);
1170 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1171 gcc_assert (index >= 0);
1173 cond_bb = single_pred (assign_bb);
1174 cond = last_stmt (cond_bb);
1175 if (!cond
1176 || gimple_code (cond) != GIMPLE_COND
1177 || gimple_cond_code (cond) != NE_EXPR
1178 || gimple_cond_lhs (cond) != parm
1179 || !integer_zerop (gimple_cond_rhs (cond)))
1180 return;
1182 phi_bb = gimple_bb (phi);
1183 for (i = 0; i < 2; i++)
1185 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1186 if (pred != assign_bb && pred != cond_bb)
1187 return;
1190 bool type_p = false;
1191 if (param_type && POINTER_TYPE_P (param_type))
1192 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1193 call, jfunc, offset);
1194 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1195 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1196 parm_ref_data_pass_through_p (&parms_ainfo[index],
1197 call, parm), type_p);
1200 /* Given OP which is passed as an actual argument to a called function,
1201 determine if it is possible to construct a KNOWN_TYPE jump function for it
1202 and if so, create one and store it to JFUNC.
1203 EXPECTED_TYPE represents a type the argument should be in */
1205 static void
1206 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1207 gimple call, tree expected_type)
1209 HOST_WIDE_INT offset, size, max_size;
1210 tree base;
1212 if (!flag_devirtualize
1213 || TREE_CODE (op) != ADDR_EXPR
1214 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1215 /* Be sure expected_type is polymorphic. */
1216 || !expected_type
1217 || TREE_CODE (expected_type) != RECORD_TYPE
1218 || !TYPE_BINFO (expected_type)
1219 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1220 return;
1222 op = TREE_OPERAND (op, 0);
1223 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1224 if (!DECL_P (base)
1225 || max_size == -1
1226 || max_size != size
1227 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1228 || is_global_var (base))
1229 return;
1231 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1232 return;
1234 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1235 expected_type);
1238 /* Inspect the given TYPE and return true iff it has the same structure (the
1239 same number of fields of the same types) as a C++ member pointer. If
1240 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1241 corresponding fields there. */
1243 static bool
1244 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1246 tree fld;
1248 if (TREE_CODE (type) != RECORD_TYPE)
1249 return false;
1251 fld = TYPE_FIELDS (type);
1252 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1253 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1254 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1255 return false;
1257 if (method_ptr)
1258 *method_ptr = fld;
1260 fld = DECL_CHAIN (fld);
1261 if (!fld || INTEGRAL_TYPE_P (fld)
1262 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1263 return false;
1264 if (delta)
1265 *delta = fld;
1267 if (DECL_CHAIN (fld))
1268 return false;
1270 return true;
1273 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1274 return the rhs of its defining statement. Otherwise return RHS as it
1275 is. */
1277 static inline tree
1278 get_ssa_def_if_simple_copy (tree rhs)
1280 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1282 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1284 if (gimple_assign_single_p (def_stmt))
1285 rhs = gimple_assign_rhs1 (def_stmt);
1286 else
1287 break;
1289 return rhs;
1292 /* Simple linked list, describing known contents of an aggregate beforere
1293 call. */
1295 struct ipa_known_agg_contents_list
1297 /* Offset and size of the described part of the aggregate. */
1298 HOST_WIDE_INT offset, size;
1299 /* Known constant value or NULL if the contents is known to be unknown. */
1300 tree constant;
1301 /* Pointer to the next structure in the list. */
1302 struct ipa_known_agg_contents_list *next;
1305 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1306 in ARG is filled in with constant values. ARG can either be an aggregate
1307 expression or a pointer to an aggregate. JFUNC is the jump function into
1308 which the constants are subsequently stored. */
1310 static void
1311 determine_known_aggregate_parts (gimple call, tree arg,
1312 struct ipa_jump_func *jfunc)
1314 struct ipa_known_agg_contents_list *list = NULL;
1315 int item_count = 0, const_count = 0;
1316 HOST_WIDE_INT arg_offset, arg_size;
1317 gimple_stmt_iterator gsi;
1318 tree arg_base;
1319 bool check_ref, by_ref;
1320 ao_ref r;
1322 /* The function operates in three stages. First, we prepare check_ref, r,
1323 arg_base and arg_offset based on what is actually passed as an actual
1324 argument. */
1326 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1328 by_ref = true;
1329 if (TREE_CODE (arg) == SSA_NAME)
1331 tree type_size;
1332 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1333 return;
1334 check_ref = true;
1335 arg_base = arg;
1336 arg_offset = 0;
1337 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1338 arg_size = tree_low_cst (type_size, 1);
1339 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1341 else if (TREE_CODE (arg) == ADDR_EXPR)
1343 HOST_WIDE_INT arg_max_size;
1345 arg = TREE_OPERAND (arg, 0);
1346 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1347 &arg_max_size);
1348 if (arg_max_size == -1
1349 || arg_max_size != arg_size
1350 || arg_offset < 0)
1351 return;
1352 if (DECL_P (arg_base))
1354 tree size;
1355 check_ref = false;
1356 size = build_int_cst (integer_type_node, arg_size);
1357 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1359 else
1360 return;
1362 else
1363 return;
1365 else
1367 HOST_WIDE_INT arg_max_size;
1369 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1371 by_ref = false;
1372 check_ref = false;
1373 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1374 &arg_max_size);
1375 if (arg_max_size == -1
1376 || arg_max_size != arg_size
1377 || arg_offset < 0)
1378 return;
1380 ao_ref_init (&r, arg);
1383 /* Second stage walks back the BB, looks at individual statements and as long
1384 as it is confident of how the statements affect contents of the
1385 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1386 describing it. */
1387 gsi = gsi_for_stmt (call);
1388 gsi_prev (&gsi);
1389 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1391 struct ipa_known_agg_contents_list *n, **p;
1392 gimple stmt = gsi_stmt (gsi);
1393 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1394 tree lhs, rhs, lhs_base;
1395 bool partial_overlap;
1397 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1398 continue;
1399 if (!gimple_assign_single_p (stmt))
1400 break;
1402 lhs = gimple_assign_lhs (stmt);
1403 rhs = gimple_assign_rhs1 (stmt);
1404 if (!is_gimple_reg_type (rhs)
1405 || TREE_CODE (lhs) == BIT_FIELD_REF
1406 || contains_bitfld_component_ref_p (lhs))
1407 break;
1409 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1410 &lhs_max_size);
1411 if (lhs_max_size == -1
1412 || lhs_max_size != lhs_size
1413 || (lhs_offset < arg_offset
1414 && lhs_offset + lhs_size > arg_offset)
1415 || (lhs_offset < arg_offset + arg_size
1416 && lhs_offset + lhs_size > arg_offset + arg_size))
1417 break;
1419 if (check_ref)
1421 if (TREE_CODE (lhs_base) != MEM_REF
1422 || TREE_OPERAND (lhs_base, 0) != arg_base
1423 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1424 break;
1426 else if (lhs_base != arg_base)
1428 if (DECL_P (lhs_base))
1429 continue;
1430 else
1431 break;
1434 if (lhs_offset + lhs_size < arg_offset
1435 || lhs_offset >= (arg_offset + arg_size))
1436 continue;
1438 partial_overlap = false;
1439 p = &list;
1440 while (*p && (*p)->offset < lhs_offset)
1442 if ((*p)->offset + (*p)->size > lhs_offset)
1444 partial_overlap = true;
1445 break;
1447 p = &(*p)->next;
1449 if (partial_overlap)
1450 break;
1451 if (*p && (*p)->offset < lhs_offset + lhs_size)
1453 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1454 /* We already know this value is subsequently overwritten with
1455 something else. */
1456 continue;
1457 else
1458 /* Otherwise this is a partial overlap which we cannot
1459 represent. */
1460 break;
1463 rhs = get_ssa_def_if_simple_copy (rhs);
1464 n = XALLOCA (struct ipa_known_agg_contents_list);
1465 n->size = lhs_size;
1466 n->offset = lhs_offset;
1467 if (is_gimple_ip_invariant (rhs))
1469 n->constant = rhs;
1470 const_count++;
1472 else
1473 n->constant = NULL_TREE;
1474 n->next = *p;
1475 *p = n;
1477 item_count++;
1478 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1479 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1480 break;
1483 /* Third stage just goes over the list and creates an appropriate vector of
1484 ipa_agg_jf_item structures out of it, of sourse only if there are
1485 any known constants to begin with. */
1487 if (const_count)
1489 jfunc->agg.by_ref = by_ref;
1490 vec_alloc (jfunc->agg.items, const_count);
1491 while (list)
1493 if (list->constant)
1495 struct ipa_agg_jf_item item;
1496 item.offset = list->offset - arg_offset;
1497 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1498 item.value = unshare_expr_without_location (list->constant);
1499 jfunc->agg.items->quick_push (item);
1501 list = list->next;
1506 static tree
1507 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1509 int n;
1510 tree type = (e->callee
1511 ? TREE_TYPE (e->callee->symbol.decl)
1512 : gimple_call_fntype (e->call_stmt));
1513 tree t = TYPE_ARG_TYPES (type);
1515 for (n = 0; n < i; n++)
1517 if (!t)
1518 break;
1519 t = TREE_CHAIN (t);
1521 if (t)
1522 return TREE_VALUE (t);
1523 if (!e->callee)
1524 return NULL;
1525 t = DECL_ARGUMENTS (e->callee->symbol.decl);
1526 for (n = 0; n < i; n++)
1528 if (!t)
1529 return NULL;
1530 t = TREE_CHAIN (t);
1532 if (t)
1533 return TREE_TYPE (t);
1534 return NULL;
1537 /* Compute jump function for all arguments of callsite CS and insert the
1538 information in the jump_functions array in the ipa_edge_args corresponding
1539 to this callsite. */
1541 static void
1542 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1543 struct cgraph_edge *cs)
1545 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1546 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1547 gimple call = cs->call_stmt;
1548 int n, arg_num = gimple_call_num_args (call);
1550 if (arg_num == 0 || args->jump_functions)
1551 return;
1552 vec_safe_grow_cleared (args->jump_functions, arg_num);
1554 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1555 return;
1557 for (n = 0; n < arg_num; n++)
1559 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1560 tree arg = gimple_call_arg (call, n);
1561 tree param_type = ipa_get_callee_param_type (cs, n);
1563 if (is_gimple_ip_invariant (arg))
1564 ipa_set_jf_constant (jfunc, arg, cs);
1565 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1566 && TREE_CODE (arg) == PARM_DECL)
1568 int index = ipa_get_param_decl_index (info, arg);
1570 gcc_assert (index >=0);
1571 /* Aggregate passed by value, check for pass-through, otherwise we
1572 will attempt to fill in aggregate contents later in this
1573 for cycle. */
1574 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1576 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1577 continue;
1580 else if (TREE_CODE (arg) == SSA_NAME)
1582 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1584 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1585 if (index >= 0)
1587 bool agg_p, type_p;
1588 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1589 call, arg);
1590 if (param_type && POINTER_TYPE_P (param_type))
1591 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1592 call, jfunc);
1593 else
1594 type_p = false;
1595 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1596 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1597 type_p);
1600 else
1602 gimple stmt = SSA_NAME_DEF_STMT (arg);
1603 if (is_gimple_assign (stmt))
1604 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1605 call, stmt, arg, param_type);
1606 else if (gimple_code (stmt) == GIMPLE_PHI)
1607 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1608 call, stmt, param_type);
1611 else
1612 compute_known_type_jump_func (arg, jfunc, call,
1613 param_type
1614 && POINTER_TYPE_P (param_type)
1615 ? TREE_TYPE (param_type)
1616 : NULL);
1618 if ((jfunc->type != IPA_JF_PASS_THROUGH
1619 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1620 && (jfunc->type != IPA_JF_ANCESTOR
1621 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1622 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1623 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1624 determine_known_aggregate_parts (call, arg, jfunc);
1628 /* Compute jump functions for all edges - both direct and indirect - outgoing
1629 from NODE. Also count the actual arguments in the process. */
1631 static void
1632 ipa_compute_jump_functions (struct cgraph_node *node,
1633 struct param_analysis_info *parms_ainfo)
1635 struct cgraph_edge *cs;
1637 for (cs = node->callees; cs; cs = cs->next_callee)
1639 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1640 NULL);
1641 /* We do not need to bother analyzing calls to unknown
1642 functions unless they may become known during lto/whopr. */
1643 if (!callee->symbol.definition && !flag_lto)
1644 continue;
1645 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1648 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1649 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1652 /* If STMT looks like a statement loading a value from a member pointer formal
1653 parameter, return that parameter and store the offset of the field to
1654 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1655 might be clobbered). If USE_DELTA, then we look for a use of the delta
1656 field rather than the pfn. */
1658 static tree
1659 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1660 HOST_WIDE_INT *offset_p)
1662 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1664 if (!gimple_assign_single_p (stmt))
1665 return NULL_TREE;
1667 rhs = gimple_assign_rhs1 (stmt);
1668 if (TREE_CODE (rhs) == COMPONENT_REF)
1670 ref_field = TREE_OPERAND (rhs, 1);
1671 rhs = TREE_OPERAND (rhs, 0);
1673 else
1674 ref_field = NULL_TREE;
1675 if (TREE_CODE (rhs) != MEM_REF)
1676 return NULL_TREE;
1677 rec = TREE_OPERAND (rhs, 0);
1678 if (TREE_CODE (rec) != ADDR_EXPR)
1679 return NULL_TREE;
1680 rec = TREE_OPERAND (rec, 0);
1681 if (TREE_CODE (rec) != PARM_DECL
1682 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1683 return NULL_TREE;
1684 ref_offset = TREE_OPERAND (rhs, 1);
1686 if (use_delta)
1687 fld = delta_field;
1688 else
1689 fld = ptr_field;
1690 if (offset_p)
1691 *offset_p = int_bit_position (fld);
1693 if (ref_field)
1695 if (integer_nonzerop (ref_offset))
1696 return NULL_TREE;
1697 return ref_field == fld ? rec : NULL_TREE;
1699 else
1700 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1701 : NULL_TREE;
1704 /* Returns true iff T is an SSA_NAME defined by a statement. */
1706 static bool
1707 ipa_is_ssa_with_stmt_def (tree t)
1709 if (TREE_CODE (t) == SSA_NAME
1710 && !SSA_NAME_IS_DEFAULT_DEF (t))
1711 return true;
1712 else
1713 return false;
1716 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1717 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1718 indirect call graph edge. */
1720 static struct cgraph_edge *
1721 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1723 struct cgraph_edge *cs;
1725 cs = cgraph_edge (node, stmt);
1726 cs->indirect_info->param_index = param_index;
1727 cs->indirect_info->offset = 0;
1728 cs->indirect_info->polymorphic = 0;
1729 cs->indirect_info->agg_contents = 0;
1730 cs->indirect_info->member_ptr = 0;
1731 return cs;
1734 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1735 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1736 intermediate information about each formal parameter. Currently it checks
1737 whether the call calls a pointer that is a formal parameter and if so, the
1738 parameter is marked with the called flag and an indirect call graph edge
1739 describing the call is created. This is very simple for ordinary pointers
1740 represented in SSA but not-so-nice when it comes to member pointers. The
1741 ugly part of this function does nothing more than trying to match the
1742 pattern of such a call. An example of such a pattern is the gimple dump
1743 below, the call is on the last line:
1745 <bb 2>:
1746 f$__delta_5 = f.__delta;
1747 f$__pfn_24 = f.__pfn;
1750 <bb 2>:
1751 f$__delta_5 = MEM[(struct *)&f];
1752 f$__pfn_24 = MEM[(struct *)&f + 4B];
1754 and a few lines below:
1756 <bb 5>
1757 D.2496_3 = (int) f$__pfn_24;
1758 D.2497_4 = D.2496_3 & 1;
1759 if (D.2497_4 != 0)
1760 goto <bb 3>;
1761 else
1762 goto <bb 4>;
1764 <bb 6>:
1765 D.2500_7 = (unsigned int) f$__delta_5;
1766 D.2501_8 = &S + D.2500_7;
1767 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1768 D.2503_10 = *D.2502_9;
1769 D.2504_12 = f$__pfn_24 + -1;
1770 D.2505_13 = (unsigned int) D.2504_12;
1771 D.2506_14 = D.2503_10 + D.2505_13;
1772 D.2507_15 = *D.2506_14;
1773 iftmp.11_16 = (String:: *) D.2507_15;
1775 <bb 7>:
1776 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1777 D.2500_19 = (unsigned int) f$__delta_5;
1778 D.2508_20 = &S + D.2500_19;
1779 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1781 Such patterns are results of simple calls to a member pointer:
1783 int doprinting (int (MyString::* f)(int) const)
1785 MyString S ("somestring");
1787 return (S.*f)(4);
1790 Moreover, the function also looks for called pointers loaded from aggregates
1791 passed by value or reference. */
1793 static void
1794 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1795 struct ipa_node_params *info,
1796 struct param_analysis_info *parms_ainfo,
1797 gimple call, tree target)
1799 gimple def;
1800 tree n1, n2;
1801 gimple d1, d2;
1802 tree rec, rec2, cond;
1803 gimple branch;
1804 int index;
1805 basic_block bb, virt_bb, join;
1806 HOST_WIDE_INT offset;
1807 bool by_ref;
1809 if (SSA_NAME_IS_DEFAULT_DEF (target))
1811 tree var = SSA_NAME_VAR (target);
1812 index = ipa_get_param_decl_index (info, var);
1813 if (index >= 0)
1814 ipa_note_param_call (node, index, call);
1815 return;
1818 def = SSA_NAME_DEF_STMT (target);
1819 if (gimple_assign_single_p (def)
1820 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1821 gimple_assign_rhs1 (def), &index, &offset,
1822 &by_ref))
1824 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1825 cs->indirect_info->offset = offset;
1826 cs->indirect_info->agg_contents = 1;
1827 cs->indirect_info->by_ref = by_ref;
1828 return;
1831 /* Now we need to try to match the complex pattern of calling a member
1832 pointer. */
1833 if (gimple_code (def) != GIMPLE_PHI
1834 || gimple_phi_num_args (def) != 2
1835 || !POINTER_TYPE_P (TREE_TYPE (target))
1836 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1837 return;
1839 /* First, we need to check whether one of these is a load from a member
1840 pointer that is a parameter to this function. */
1841 n1 = PHI_ARG_DEF (def, 0);
1842 n2 = PHI_ARG_DEF (def, 1);
1843 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1844 return;
1845 d1 = SSA_NAME_DEF_STMT (n1);
1846 d2 = SSA_NAME_DEF_STMT (n2);
1848 join = gimple_bb (def);
1849 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1851 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1852 return;
1854 bb = EDGE_PRED (join, 0)->src;
1855 virt_bb = gimple_bb (d2);
1857 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1859 bb = EDGE_PRED (join, 1)->src;
1860 virt_bb = gimple_bb (d1);
1862 else
1863 return;
1865 /* Second, we need to check that the basic blocks are laid out in the way
1866 corresponding to the pattern. */
1868 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1869 || single_pred (virt_bb) != bb
1870 || single_succ (virt_bb) != join)
1871 return;
1873 /* Third, let's see that the branching is done depending on the least
1874 significant bit of the pfn. */
1876 branch = last_stmt (bb);
1877 if (!branch || gimple_code (branch) != GIMPLE_COND)
1878 return;
1880 if ((gimple_cond_code (branch) != NE_EXPR
1881 && gimple_cond_code (branch) != EQ_EXPR)
1882 || !integer_zerop (gimple_cond_rhs (branch)))
1883 return;
1885 cond = gimple_cond_lhs (branch);
1886 if (!ipa_is_ssa_with_stmt_def (cond))
1887 return;
1889 def = SSA_NAME_DEF_STMT (cond);
1890 if (!is_gimple_assign (def)
1891 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1892 || !integer_onep (gimple_assign_rhs2 (def)))
1893 return;
1895 cond = gimple_assign_rhs1 (def);
1896 if (!ipa_is_ssa_with_stmt_def (cond))
1897 return;
1899 def = SSA_NAME_DEF_STMT (cond);
1901 if (is_gimple_assign (def)
1902 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1904 cond = gimple_assign_rhs1 (def);
1905 if (!ipa_is_ssa_with_stmt_def (cond))
1906 return;
1907 def = SSA_NAME_DEF_STMT (cond);
1910 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1911 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1912 == ptrmemfunc_vbit_in_delta),
1913 NULL);
1914 if (rec != rec2)
1915 return;
1917 index = ipa_get_param_decl_index (info, rec);
1918 if (index >= 0
1919 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1921 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1922 cs->indirect_info->offset = offset;
1923 cs->indirect_info->agg_contents = 1;
1924 cs->indirect_info->member_ptr = 1;
1927 return;
1930 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1931 object referenced in the expression is a formal parameter of the caller
1932 (described by INFO), create a call note for the statement. */
1934 static void
1935 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1936 struct ipa_node_params *info, gimple call,
1937 tree target)
1939 struct cgraph_edge *cs;
1940 struct cgraph_indirect_call_info *ii;
1941 struct ipa_jump_func jfunc;
1942 tree obj = OBJ_TYPE_REF_OBJECT (target);
1943 int index;
1944 HOST_WIDE_INT anc_offset;
1946 if (!flag_devirtualize)
1947 return;
1949 if (TREE_CODE (obj) != SSA_NAME)
1950 return;
1952 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1954 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1955 return;
1957 anc_offset = 0;
1958 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1959 gcc_assert (index >= 0);
1960 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1961 call, &jfunc))
1962 return;
1964 else
1966 gimple stmt = SSA_NAME_DEF_STMT (obj);
1967 tree expr;
1969 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1970 if (!expr)
1971 return;
1972 index = ipa_get_param_decl_index (info,
1973 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1974 gcc_assert (index >= 0);
1975 if (detect_type_change (obj, expr, obj_type_ref_class (target),
1976 call, &jfunc, anc_offset))
1977 return;
1980 cs = ipa_note_param_call (node, index, call);
1981 ii = cs->indirect_info;
1982 ii->offset = anc_offset;
1983 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1984 ii->otr_type = obj_type_ref_class (target);
1985 ii->polymorphic = 1;
1988 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1989 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1990 containing intermediate information about each formal parameter. */
1992 static void
1993 ipa_analyze_call_uses (struct cgraph_node *node,
1994 struct ipa_node_params *info,
1995 struct param_analysis_info *parms_ainfo, gimple call)
1997 tree target = gimple_call_fn (call);
1999 if (!target)
2000 return;
2001 if (TREE_CODE (target) == SSA_NAME)
2002 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2003 else if (virtual_method_call_p (target))
2004 ipa_analyze_virtual_call_uses (node, info, call, target);
2008 /* Analyze the call statement STMT with respect to formal parameters (described
2009 in INFO) of caller given by NODE. Currently it only checks whether formal
2010 parameters are called. PARMS_AINFO is a pointer to a vector containing
2011 intermediate information about each formal parameter. */
2013 static void
2014 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2015 struct param_analysis_info *parms_ainfo, gimple stmt)
2017 if (is_gimple_call (stmt))
2018 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2021 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2022 If OP is a parameter declaration, mark it as used in the info structure
2023 passed in DATA. */
2025 static bool
2026 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2027 tree op, void *data)
2029 struct ipa_node_params *info = (struct ipa_node_params *) data;
2031 op = get_base_address (op);
2032 if (op
2033 && TREE_CODE (op) == PARM_DECL)
2035 int index = ipa_get_param_decl_index (info, op);
2036 gcc_assert (index >= 0);
2037 ipa_set_param_used (info, index, true);
2040 return false;
2043 /* Scan the function body of NODE and inspect the uses of formal parameters.
2044 Store the findings in various structures of the associated ipa_node_params
2045 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2046 vector containing intermediate information about each formal parameter. */
2048 static void
2049 ipa_analyze_params_uses (struct cgraph_node *node,
2050 struct param_analysis_info *parms_ainfo)
2052 tree decl = node->symbol.decl;
2053 basic_block bb;
2054 struct function *func;
2055 gimple_stmt_iterator gsi;
2056 struct ipa_node_params *info = IPA_NODE_REF (node);
2057 int i;
2059 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2060 return;
2062 info->uses_analysis_done = 1;
2063 if (ipa_func_spec_opts_forbid_analysis_p (node))
2065 for (i = 0; i < ipa_get_param_count (info); i++)
2067 ipa_set_param_used (info, i, true);
2068 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2070 return;
2073 for (i = 0; i < ipa_get_param_count (info); i++)
2075 tree parm = ipa_get_param (info, i);
2076 int controlled_uses = 0;
2078 /* For SSA regs see if parameter is used. For non-SSA we compute
2079 the flag during modification analysis. */
2080 if (is_gimple_reg (parm))
2082 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
2083 parm);
2084 if (ddef && !has_zero_uses (ddef))
2086 imm_use_iterator imm_iter;
2087 use_operand_p use_p;
2089 ipa_set_param_used (info, i, true);
2090 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2091 if (!is_gimple_call (USE_STMT (use_p)))
2093 controlled_uses = IPA_UNDESCRIBED_USE;
2094 break;
2096 else
2097 controlled_uses++;
2099 else
2100 controlled_uses = 0;
2102 else
2103 controlled_uses = IPA_UNDESCRIBED_USE;
2104 ipa_set_controlled_uses (info, i, controlled_uses);
2107 func = DECL_STRUCT_FUNCTION (decl);
2108 FOR_EACH_BB_FN (bb, func)
2110 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2112 gimple stmt = gsi_stmt (gsi);
2114 if (is_gimple_debug (stmt))
2115 continue;
2117 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2118 walk_stmt_load_store_addr_ops (stmt, info,
2119 visit_ref_for_mod_analysis,
2120 visit_ref_for_mod_analysis,
2121 visit_ref_for_mod_analysis);
2123 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2124 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2125 visit_ref_for_mod_analysis,
2126 visit_ref_for_mod_analysis,
2127 visit_ref_for_mod_analysis);
2131 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2133 static void
2134 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2136 int i;
2138 for (i = 0; i < param_count; i++)
2140 if (parms_ainfo[i].parm_visited_statements)
2141 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2142 if (parms_ainfo[i].pt_visited_statements)
2143 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2147 /* Initialize the array describing properties of of formal parameters
2148 of NODE, analyze their uses and compute jump functions associated
2149 with actual arguments of calls from within NODE. */
2151 void
2152 ipa_analyze_node (struct cgraph_node *node)
2154 struct ipa_node_params *info;
2155 struct param_analysis_info *parms_ainfo;
2156 int param_count;
2158 ipa_check_create_node_params ();
2159 ipa_check_create_edge_args ();
2160 info = IPA_NODE_REF (node);
2161 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
2162 ipa_initialize_node_params (node);
2164 param_count = ipa_get_param_count (info);
2165 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2166 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2168 ipa_analyze_params_uses (node, parms_ainfo);
2169 ipa_compute_jump_functions (node, parms_ainfo);
2171 free_parms_ainfo (parms_ainfo, param_count);
2172 pop_cfun ();
2175 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2176 attempt a type-based devirtualization. If successful, return the
2177 target function declaration, otherwise return NULL. */
2179 tree
2180 ipa_intraprocedural_devirtualization (gimple call)
2182 tree binfo, token, fndecl;
2183 struct ipa_jump_func jfunc;
2184 tree otr = gimple_call_fn (call);
2186 jfunc.type = IPA_JF_UNKNOWN;
2187 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2188 call, obj_type_ref_class (otr));
2189 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2190 return NULL_TREE;
2191 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2192 if (!binfo)
2193 return NULL_TREE;
2194 token = OBJ_TYPE_REF_TOKEN (otr);
2195 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2196 binfo);
2197 return fndecl;
2200 /* Update the jump function DST when the call graph edge corresponding to SRC is
2201 is being inlined, knowing that DST is of type ancestor and src of known
2202 type. */
2204 static void
2205 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2206 struct ipa_jump_func *dst)
2208 HOST_WIDE_INT combined_offset;
2209 tree combined_type;
2211 if (!ipa_get_jf_ancestor_type_preserved (dst))
2213 dst->type = IPA_JF_UNKNOWN;
2214 return;
2217 combined_offset = ipa_get_jf_known_type_offset (src)
2218 + ipa_get_jf_ancestor_offset (dst);
2219 combined_type = ipa_get_jf_ancestor_type (dst);
2221 ipa_set_jf_known_type (dst, combined_offset,
2222 ipa_get_jf_known_type_base_type (src),
2223 combined_type);
2226 /* Update the jump functions associated with call graph edge E when the call
2227 graph edge CS is being inlined, assuming that E->caller is already (possibly
2228 indirectly) inlined into CS->callee and that E has not been inlined. */
2230 static void
2231 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2232 struct cgraph_edge *e)
2234 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2235 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2236 int count = ipa_get_cs_argument_count (args);
2237 int i;
2239 for (i = 0; i < count; i++)
2241 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2243 if (dst->type == IPA_JF_ANCESTOR)
2245 struct ipa_jump_func *src;
2246 int dst_fid = dst->value.ancestor.formal_id;
2248 /* Variable number of arguments can cause havoc if we try to access
2249 one that does not exist in the inlined edge. So make sure we
2250 don't. */
2251 if (dst_fid >= ipa_get_cs_argument_count (top))
2253 dst->type = IPA_JF_UNKNOWN;
2254 continue;
2257 src = ipa_get_ith_jump_func (top, dst_fid);
2259 if (src->agg.items
2260 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2262 struct ipa_agg_jf_item *item;
2263 int j;
2265 /* Currently we do not produce clobber aggregate jump functions,
2266 replace with merging when we do. */
2267 gcc_assert (!dst->agg.items);
2269 dst->agg.items = vec_safe_copy (src->agg.items);
2270 dst->agg.by_ref = src->agg.by_ref;
2271 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2272 item->offset -= dst->value.ancestor.offset;
2275 if (src->type == IPA_JF_KNOWN_TYPE)
2276 combine_known_type_and_ancestor_jfs (src, dst);
2277 else if (src->type == IPA_JF_PASS_THROUGH
2278 && src->value.pass_through.operation == NOP_EXPR)
2280 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2281 dst->value.ancestor.agg_preserved &=
2282 src->value.pass_through.agg_preserved;
2283 dst->value.ancestor.type_preserved &=
2284 src->value.pass_through.type_preserved;
2286 else if (src->type == IPA_JF_ANCESTOR)
2288 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2289 dst->value.ancestor.offset += src->value.ancestor.offset;
2290 dst->value.ancestor.agg_preserved &=
2291 src->value.ancestor.agg_preserved;
2292 dst->value.ancestor.type_preserved &=
2293 src->value.ancestor.type_preserved;
2295 else
2296 dst->type = IPA_JF_UNKNOWN;
2298 else if (dst->type == IPA_JF_PASS_THROUGH)
2300 struct ipa_jump_func *src;
2301 /* We must check range due to calls with variable number of arguments
2302 and we cannot combine jump functions with operations. */
2303 if (dst->value.pass_through.operation == NOP_EXPR
2304 && (dst->value.pass_through.formal_id
2305 < ipa_get_cs_argument_count (top)))
2307 int dst_fid = dst->value.pass_through.formal_id;
2308 src = ipa_get_ith_jump_func (top, dst_fid);
2309 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2311 switch (src->type)
2313 case IPA_JF_UNKNOWN:
2314 dst->type = IPA_JF_UNKNOWN;
2315 break;
2316 case IPA_JF_KNOWN_TYPE:
2317 ipa_set_jf_known_type (dst,
2318 ipa_get_jf_known_type_offset (src),
2319 ipa_get_jf_known_type_base_type (src),
2320 ipa_get_jf_known_type_base_type (src));
2321 break;
2322 case IPA_JF_CONST:
2323 ipa_set_jf_cst_copy (dst, src);
2324 break;
2326 case IPA_JF_PASS_THROUGH:
2328 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2329 enum tree_code operation;
2330 operation = ipa_get_jf_pass_through_operation (src);
2332 if (operation == NOP_EXPR)
2334 bool agg_p, type_p;
2335 agg_p = dst_agg_p
2336 && ipa_get_jf_pass_through_agg_preserved (src);
2337 type_p = ipa_get_jf_pass_through_type_preserved (src)
2338 && ipa_get_jf_pass_through_type_preserved (dst);
2339 ipa_set_jf_simple_pass_through (dst, formal_id,
2340 agg_p, type_p);
2342 else
2344 tree operand = ipa_get_jf_pass_through_operand (src);
2345 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2346 operation);
2348 break;
2350 case IPA_JF_ANCESTOR:
2352 bool agg_p, type_p;
2353 agg_p = dst_agg_p
2354 && ipa_get_jf_ancestor_agg_preserved (src);
2355 type_p = ipa_get_jf_ancestor_type_preserved (src)
2356 && ipa_get_jf_pass_through_type_preserved (dst);
2357 ipa_set_ancestor_jf (dst,
2358 ipa_get_jf_ancestor_offset (src),
2359 ipa_get_jf_ancestor_type (src),
2360 ipa_get_jf_ancestor_formal_id (src),
2361 agg_p, type_p);
2362 break;
2364 default:
2365 gcc_unreachable ();
2368 if (src->agg.items
2369 && (dst_agg_p || !src->agg.by_ref))
2371 /* Currently we do not produce clobber aggregate jump
2372 functions, replace with merging when we do. */
2373 gcc_assert (!dst->agg.items);
2375 dst->agg.by_ref = src->agg.by_ref;
2376 dst->agg.items = vec_safe_copy (src->agg.items);
2379 else
2380 dst->type = IPA_JF_UNKNOWN;
2385 /* If TARGET is an addr_expr of a function declaration, make it the destination
2386 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2388 struct cgraph_edge *
2389 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2391 struct cgraph_node *callee;
2392 struct inline_edge_summary *es = inline_edge_summary (ie);
2393 bool unreachable = false;
2395 if (TREE_CODE (target) == ADDR_EXPR)
2396 target = TREE_OPERAND (target, 0);
2397 if (TREE_CODE (target) != FUNCTION_DECL)
2399 target = canonicalize_constructor_val (target, NULL);
2400 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2402 if (ie->indirect_info->member_ptr)
2403 /* Member pointer call that goes through a VMT lookup. */
2404 return NULL;
2406 if (dump_file)
2407 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2408 " in %s/%i, making it unreachable.\n",
2409 cgraph_node_name (ie->caller), ie->caller->symbol.order);
2410 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2411 callee = cgraph_get_create_node (target);
2412 unreachable = true;
2414 else
2415 callee = cgraph_get_node (target);
2417 else
2418 callee = cgraph_get_node (target);
2420 /* Because may-edges are not explicitely represented and vtable may be external,
2421 we may create the first reference to the object in the unit. */
2422 if (!callee || callee->global.inlined_to)
2425 /* We are better to ensure we can refer to it.
2426 In the case of static functions we are out of luck, since we already
2427 removed its body. In the case of public functions we may or may
2428 not introduce the reference. */
2429 if (!canonicalize_constructor_val (target, NULL)
2430 || !TREE_PUBLIC (target))
2432 if (dump_file)
2433 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2434 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2435 xstrdup (cgraph_node_name (ie->caller)),
2436 ie->caller->symbol.order,
2437 xstrdup (cgraph_node_name (ie->callee)),
2438 ie->callee->symbol.order);
2439 return NULL;
2441 callee = cgraph_get_create_real_symbol_node (target);
2443 ipa_check_create_node_params ();
2445 /* We can not make edges to inline clones. It is bug that someone removed
2446 the cgraph node too early. */
2447 gcc_assert (!callee->global.inlined_to);
2449 if (dump_file && !unreachable)
2451 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2452 "(%s/%i -> %s/%i), for stmt ",
2453 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2454 xstrdup (cgraph_node_name (ie->caller)),
2455 ie->caller->symbol.order,
2456 xstrdup (cgraph_node_name (callee)),
2457 callee->symbol.order);
2458 if (ie->call_stmt)
2459 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2460 else
2461 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2463 ie = cgraph_make_edge_direct (ie, callee);
2464 es = inline_edge_summary (ie);
2465 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2466 - eni_size_weights.call_cost);
2467 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2468 - eni_time_weights.call_cost);
2470 return ie;
2473 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2474 return NULL if there is not any. BY_REF specifies whether the value has to
2475 be passed by reference or by value. */
2477 tree
2478 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2479 HOST_WIDE_INT offset, bool by_ref)
2481 struct ipa_agg_jf_item *item;
2482 int i;
2484 if (by_ref != agg->by_ref)
2485 return NULL;
2487 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2488 if (item->offset == offset)
2490 /* Currently we do not have clobber values, return NULL for them once
2491 we do. */
2492 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2493 return item->value;
2495 return NULL;
2498 /* Remove a reference to SYMBOL from the list of references of a node given by
2499 reference description RDESC. */
2501 static void
2502 remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2504 struct ipa_ref *to_del;
2505 struct cgraph_edge *origin;
2507 origin = rdesc->cs;
2508 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
2509 origin->call_stmt, origin->lto_stmt_uid);
2510 gcc_assert (to_del);
2511 ipa_remove_reference (to_del);
2512 if (dump_file)
2513 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2514 xstrdup (cgraph_node_name (origin->caller)),
2515 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
2518 /* If JFUNC has a reference description with refcount different from
2519 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2520 NULL. JFUNC must be a constant jump function. */
2522 static struct ipa_cst_ref_desc *
2523 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2525 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2526 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2527 return rdesc;
2528 else
2529 return NULL;
2532 /* Try to find a destination for indirect edge IE that corresponds to a simple
2533 call or a call of a member function pointer and where the destination is a
2534 pointer formal parameter described by jump function JFUNC. If it can be
2535 determined, return the newly direct edge, otherwise return NULL.
2536 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2538 static struct cgraph_edge *
2539 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2540 struct ipa_jump_func *jfunc,
2541 struct ipa_node_params *new_root_info)
2543 struct cgraph_edge *cs;
2544 tree target;
2545 bool agg_contents = ie->indirect_info->agg_contents;
2546 bool speculative = ie->speculative;
2547 struct ipa_cst_ref_desc *rdesc;
2549 if (ie->indirect_info->agg_contents)
2550 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2551 ie->indirect_info->offset,
2552 ie->indirect_info->by_ref);
2553 else
2554 target = ipa_value_from_jfunc (new_root_info, jfunc);
2555 if (!target)
2556 return NULL;
2557 cs = ipa_make_edge_direct_to_target (ie, target);
2559 /* FIXME: speculative edges can be handled. */
2560 if (cs && !agg_contents && !speculative
2561 && jfunc->type == IPA_JF_CONST
2562 && (rdesc = jfunc_rdesc_usable (jfunc))
2563 && --rdesc->refcount == 0)
2564 remove_described_reference ((symtab_node) cs->callee, rdesc);
2566 return cs;
2569 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2570 call based on a formal parameter which is described by jump function JFUNC
2571 and if it can be determined, make it direct and return the direct edge.
2572 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2573 are relative to. */
2575 static struct cgraph_edge *
2576 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2577 struct ipa_jump_func *jfunc,
2578 struct ipa_node_params *new_root_info)
2580 tree binfo, target;
2582 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2584 if (!binfo)
2585 return NULL;
2587 if (TREE_CODE (binfo) != TREE_BINFO)
2589 binfo = gimple_extract_devirt_binfo_from_cst
2590 (binfo, ie->indirect_info->otr_type);
2591 if (!binfo)
2592 return NULL;
2595 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2596 ie->indirect_info->otr_type);
2597 if (binfo)
2598 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2599 binfo);
2600 else
2601 return NULL;
2603 if (target)
2604 return ipa_make_edge_direct_to_target (ie, target);
2605 else
2606 return NULL;
2609 /* Update the param called notes associated with NODE when CS is being inlined,
2610 assuming NODE is (potentially indirectly) inlined into CS->callee.
2611 Moreover, if the callee is discovered to be constant, create a new cgraph
2612 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2613 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2615 static bool
2616 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2617 struct cgraph_node *node,
2618 vec<cgraph_edge_p> *new_edges)
2620 struct ipa_edge_args *top;
2621 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2622 struct ipa_node_params *new_root_info;
2623 bool res = false;
2625 ipa_check_create_edge_args ();
2626 top = IPA_EDGE_REF (cs);
2627 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2628 ? cs->caller->global.inlined_to
2629 : cs->caller);
2631 for (ie = node->indirect_calls; ie; ie = next_ie)
2633 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2634 struct ipa_jump_func *jfunc;
2635 int param_index;
2637 next_ie = ie->next_callee;
2639 if (ici->param_index == -1)
2640 continue;
2642 /* We must check range due to calls with variable number of arguments: */
2643 if (ici->param_index >= ipa_get_cs_argument_count (top))
2645 ici->param_index = -1;
2646 continue;
2649 param_index = ici->param_index;
2650 jfunc = ipa_get_ith_jump_func (top, param_index);
2652 if (!flag_indirect_inlining)
2653 new_direct_edge = NULL;
2654 else if (ici->polymorphic)
2655 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2656 new_root_info);
2657 else
2658 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2659 new_root_info);
2660 /* If speculation was removed, then we need to do nothing. */
2661 if (new_direct_edge && new_direct_edge != ie)
2663 new_direct_edge->indirect_inlining_edge = 1;
2664 top = IPA_EDGE_REF (cs);
2665 res = true;
2667 else if (new_direct_edge)
2669 new_direct_edge->indirect_inlining_edge = 1;
2670 if (new_direct_edge->call_stmt)
2671 new_direct_edge->call_stmt_cannot_inline_p
2672 = !gimple_check_call_matching_types (
2673 new_direct_edge->call_stmt,
2674 new_direct_edge->callee->symbol.decl, false);
2675 if (new_edges)
2677 new_edges->safe_push (new_direct_edge);
2678 res = true;
2680 top = IPA_EDGE_REF (cs);
2682 else if (jfunc->type == IPA_JF_PASS_THROUGH
2683 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2685 if (ici->agg_contents
2686 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2687 ici->param_index = -1;
2688 else
2689 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2691 else if (jfunc->type == IPA_JF_ANCESTOR)
2693 if (ici->agg_contents
2694 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2695 ici->param_index = -1;
2696 else
2698 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2699 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2702 else
2703 /* Either we can find a destination for this edge now or never. */
2704 ici->param_index = -1;
2707 return res;
2710 /* Recursively traverse subtree of NODE (including node) made of inlined
2711 cgraph_edges when CS has been inlined and invoke
2712 update_indirect_edges_after_inlining on all nodes and
2713 update_jump_functions_after_inlining on all non-inlined edges that lead out
2714 of this subtree. Newly discovered indirect edges will be added to
2715 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2716 created. */
2718 static bool
2719 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2720 struct cgraph_node *node,
2721 vec<cgraph_edge_p> *new_edges)
2723 struct cgraph_edge *e;
2724 bool res;
2726 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2728 for (e = node->callees; e; e = e->next_callee)
2729 if (!e->inline_failed)
2730 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2731 else
2732 update_jump_functions_after_inlining (cs, e);
2733 for (e = node->indirect_calls; e; e = e->next_callee)
2734 update_jump_functions_after_inlining (cs, e);
2736 return res;
2739 /* Combine two controlled uses counts as done during inlining. */
2741 static int
2742 combine_controlled_uses_counters (int c, int d)
2744 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2745 return IPA_UNDESCRIBED_USE;
2746 else
2747 return c + d - 1;
2750 /* Propagate number of controlled users from CS->caleee to the new root of the
2751 tree of inlined nodes. */
2753 static void
2754 propagate_controlled_uses (struct cgraph_edge *cs)
2756 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2757 struct cgraph_node *new_root = cs->caller->global.inlined_to
2758 ? cs->caller->global.inlined_to : cs->caller;
2759 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2760 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2761 int count, i;
2763 count = MIN (ipa_get_cs_argument_count (args),
2764 ipa_get_param_count (old_root_info));
2765 for (i = 0; i < count; i++)
2767 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2768 struct ipa_cst_ref_desc *rdesc;
2770 if (jf->type == IPA_JF_PASS_THROUGH)
2772 int src_idx, c, d;
2773 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2774 c = ipa_get_controlled_uses (new_root_info, src_idx);
2775 d = ipa_get_controlled_uses (old_root_info, i);
2777 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2778 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2779 c = combine_controlled_uses_counters (c, d);
2780 ipa_set_controlled_uses (new_root_info, src_idx, c);
2781 if (c == 0 && new_root_info->ipcp_orig_node)
2783 struct cgraph_node *n;
2784 struct ipa_ref *ref;
2785 tree t = new_root_info->known_vals[src_idx];
2787 if (t && TREE_CODE (t) == ADDR_EXPR
2788 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2789 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2790 && (ref = ipa_find_reference ((symtab_node) new_root,
2791 (symtab_node) n, NULL, 0)))
2793 if (dump_file)
2794 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2795 "reference from %s/%i to %s/%i.\n",
2796 xstrdup (cgraph_node_name (new_root)),
2797 new_root->symbol.order,
2798 xstrdup (cgraph_node_name (n)), n->symbol.order);
2799 ipa_remove_reference (ref);
2803 else if (jf->type == IPA_JF_CONST
2804 && (rdesc = jfunc_rdesc_usable (jf)))
2806 int d = ipa_get_controlled_uses (old_root_info, i);
2807 int c = rdesc->refcount;
2808 rdesc->refcount = combine_controlled_uses_counters (c, d);
2809 if (rdesc->refcount == 0)
2811 tree cst = ipa_get_jf_constant (jf);
2812 struct cgraph_node *n;
2813 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2814 && TREE_CODE (TREE_OPERAND (cst, 0))
2815 == FUNCTION_DECL);
2816 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2817 if (n)
2819 struct cgraph_node *clone;
2820 remove_described_reference ((symtab_node) n, rdesc);
2822 clone = cs->caller;
2823 while (clone->global.inlined_to
2824 && clone != rdesc->cs->caller
2825 && IPA_NODE_REF (clone)->ipcp_orig_node)
2827 struct ipa_ref *ref;
2828 ref = ipa_find_reference ((symtab_node) clone,
2829 (symtab_node) n, NULL, 0);
2830 if (ref)
2832 if (dump_file)
2833 fprintf (dump_file, "ipa-prop: Removing "
2834 "cloning-created reference "
2835 "from %s/%i to %s/%i.\n",
2836 xstrdup (cgraph_node_name (clone)),
2837 clone->symbol.order,
2838 xstrdup (cgraph_node_name (n)),
2839 n->symbol.order);
2840 ipa_remove_reference (ref);
2842 clone = clone->callers->caller;
2849 for (i = ipa_get_param_count (old_root_info);
2850 i < ipa_get_cs_argument_count (args);
2851 i++)
2853 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2855 if (jf->type == IPA_JF_CONST)
2857 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2858 if (rdesc)
2859 rdesc->refcount = IPA_UNDESCRIBED_USE;
2861 else if (jf->type == IPA_JF_PASS_THROUGH)
2862 ipa_set_controlled_uses (new_root_info,
2863 jf->value.pass_through.formal_id,
2864 IPA_UNDESCRIBED_USE);
2868 /* Update jump functions and call note functions on inlining the call site CS.
2869 CS is expected to lead to a node already cloned by
2870 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2871 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2872 created. */
2874 bool
2875 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2876 vec<cgraph_edge_p> *new_edges)
2878 bool changed;
2879 /* Do nothing if the preparation phase has not been carried out yet
2880 (i.e. during early inlining). */
2881 if (!ipa_node_params_vector.exists ())
2882 return false;
2883 gcc_assert (ipa_edge_args_vector);
2885 propagate_controlled_uses (cs);
2886 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2888 return changed;
2891 /* Frees all dynamically allocated structures that the argument info points
2892 to. */
2894 void
2895 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2897 vec_free (args->jump_functions);
2898 memset (args, 0, sizeof (*args));
2901 /* Free all ipa_edge structures. */
2903 void
2904 ipa_free_all_edge_args (void)
2906 int i;
2907 struct ipa_edge_args *args;
2909 if (!ipa_edge_args_vector)
2910 return;
2912 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2913 ipa_free_edge_args_substructures (args);
2915 vec_free (ipa_edge_args_vector);
2918 /* Frees all dynamically allocated structures that the param info points
2919 to. */
2921 void
2922 ipa_free_node_params_substructures (struct ipa_node_params *info)
2924 info->descriptors.release ();
2925 free (info->lattices);
2926 /* Lattice values and their sources are deallocated with their alocation
2927 pool. */
2928 info->known_vals.release ();
2929 memset (info, 0, sizeof (*info));
2932 /* Free all ipa_node_params structures. */
2934 void
2935 ipa_free_all_node_params (void)
2937 int i;
2938 struct ipa_node_params *info;
2940 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2941 ipa_free_node_params_substructures (info);
2943 ipa_node_params_vector.release ();
2946 /* Set the aggregate replacements of NODE to be AGGVALS. */
2948 void
2949 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2950 struct ipa_agg_replacement_value *aggvals)
2952 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2953 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2955 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2958 /* Hook that is called by cgraph.c when an edge is removed. */
2960 static void
2961 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2963 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2964 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
2965 return;
2966 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2969 /* Hook that is called by cgraph.c when a node is removed. */
2971 static void
2972 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2974 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2975 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2976 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2977 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2978 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
2981 /* Hook that is called by cgraph.c when an edge is duplicated. */
2983 static void
2984 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2985 __attribute__((unused)) void *data)
2987 struct ipa_edge_args *old_args, *new_args;
2988 unsigned int i;
2990 ipa_check_create_edge_args ();
2992 old_args = IPA_EDGE_REF (src);
2993 new_args = IPA_EDGE_REF (dst);
2995 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
2997 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
2999 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3000 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3002 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3004 if (src_jf->type == IPA_JF_CONST)
3006 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3008 if (!src_rdesc)
3009 dst_jf->value.constant.rdesc = NULL;
3010 else if (src_rdesc->cs == src)
3012 struct ipa_cst_ref_desc *dst_rdesc;
3013 gcc_checking_assert (ipa_refdesc_pool);
3014 dst_rdesc
3015 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3016 dst_rdesc->cs = dst;
3017 dst_rdesc->refcount = src_rdesc->refcount;
3018 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3019 src_rdesc->next_duplicate = dst_rdesc;
3020 dst_jf->value.constant.rdesc = dst_rdesc;
3022 else
3024 struct ipa_cst_ref_desc *dst_rdesc;
3025 /* This can happen during inlining, when a JFUNC can refer to a
3026 reference taken in a function up in the tree of inline clones.
3027 We need to find the duplicate that refers to our tree of
3028 inline clones. */
3030 gcc_assert (dst->caller->global.inlined_to);
3031 for (dst_rdesc = src_rdesc->next_duplicate;
3032 dst_rdesc;
3033 dst_rdesc = dst_rdesc->next_duplicate)
3035 struct cgraph_node *top;
3036 top = dst_rdesc->cs->caller->global.inlined_to
3037 ? dst_rdesc->cs->caller->global.inlined_to
3038 : dst_rdesc->cs->caller;
3039 if (dst->caller->global.inlined_to == top)
3040 break;
3042 gcc_assert (dst_rdesc);
3043 dst_jf->value.constant.rdesc = dst_rdesc;
3049 /* Hook that is called by cgraph.c when a node is duplicated. */
3051 static void
3052 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3053 ATTRIBUTE_UNUSED void *data)
3055 struct ipa_node_params *old_info, *new_info;
3056 struct ipa_agg_replacement_value *old_av, *new_av;
3058 ipa_check_create_node_params ();
3059 old_info = IPA_NODE_REF (src);
3060 new_info = IPA_NODE_REF (dst);
3062 new_info->descriptors = old_info->descriptors.copy ();
3063 new_info->lattices = NULL;
3064 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3066 new_info->uses_analysis_done = old_info->uses_analysis_done;
3067 new_info->node_enqueued = old_info->node_enqueued;
3069 old_av = ipa_get_agg_replacements_for_node (src);
3070 if (!old_av)
3071 return;
3073 new_av = NULL;
3074 while (old_av)
3076 struct ipa_agg_replacement_value *v;
3078 v = ggc_alloc_ipa_agg_replacement_value ();
3079 memcpy (v, old_av, sizeof (*v));
3080 v->next = new_av;
3081 new_av = v;
3082 old_av = old_av->next;
3084 ipa_set_node_agg_value_chain (dst, new_av);
3088 /* Analyze newly added function into callgraph. */
3090 static void
3091 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3093 ipa_analyze_node (node);
3096 /* Register our cgraph hooks if they are not already there. */
3098 void
3099 ipa_register_cgraph_hooks (void)
3101 if (!edge_removal_hook_holder)
3102 edge_removal_hook_holder =
3103 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3104 if (!node_removal_hook_holder)
3105 node_removal_hook_holder =
3106 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3107 if (!edge_duplication_hook_holder)
3108 edge_duplication_hook_holder =
3109 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3110 if (!node_duplication_hook_holder)
3111 node_duplication_hook_holder =
3112 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3113 function_insertion_hook_holder =
3114 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3117 /* Unregister our cgraph hooks if they are not already there. */
3119 static void
3120 ipa_unregister_cgraph_hooks (void)
3122 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3123 edge_removal_hook_holder = NULL;
3124 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3125 node_removal_hook_holder = NULL;
3126 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3127 edge_duplication_hook_holder = NULL;
3128 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3129 node_duplication_hook_holder = NULL;
3130 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3131 function_insertion_hook_holder = NULL;
3134 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3135 longer needed after ipa-cp. */
3137 void
3138 ipa_free_all_structures_after_ipa_cp (void)
3140 if (!optimize)
3142 ipa_free_all_edge_args ();
3143 ipa_free_all_node_params ();
3144 free_alloc_pool (ipcp_sources_pool);
3145 free_alloc_pool (ipcp_values_pool);
3146 free_alloc_pool (ipcp_agg_lattice_pool);
3147 ipa_unregister_cgraph_hooks ();
3148 if (ipa_refdesc_pool)
3149 free_alloc_pool (ipa_refdesc_pool);
3153 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3154 longer needed after indirect inlining. */
3156 void
3157 ipa_free_all_structures_after_iinln (void)
3159 ipa_free_all_edge_args ();
3160 ipa_free_all_node_params ();
3161 ipa_unregister_cgraph_hooks ();
3162 if (ipcp_sources_pool)
3163 free_alloc_pool (ipcp_sources_pool);
3164 if (ipcp_values_pool)
3165 free_alloc_pool (ipcp_values_pool);
3166 if (ipcp_agg_lattice_pool)
3167 free_alloc_pool (ipcp_agg_lattice_pool);
3168 if (ipa_refdesc_pool)
3169 free_alloc_pool (ipa_refdesc_pool);
3172 /* Print ipa_tree_map data structures of all functions in the
3173 callgraph to F. */
3175 void
3176 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3178 int i, count;
3179 struct ipa_node_params *info;
3181 if (!node->symbol.definition)
3182 return;
3183 info = IPA_NODE_REF (node);
3184 fprintf (f, " function %s/%i parameter descriptors:\n",
3185 cgraph_node_name (node), node->symbol.order);
3186 count = ipa_get_param_count (info);
3187 for (i = 0; i < count; i++)
3189 int c;
3191 ipa_dump_param (f, info, i);
3192 if (ipa_is_param_used (info, i))
3193 fprintf (f, " used");
3194 c = ipa_get_controlled_uses (info, i);
3195 if (c == IPA_UNDESCRIBED_USE)
3196 fprintf (f, " undescribed_use");
3197 else
3198 fprintf (f, " controlled_uses=%i", c);
3199 fprintf (f, "\n");
3203 /* Print ipa_tree_map data structures of all functions in the
3204 callgraph to F. */
3206 void
3207 ipa_print_all_params (FILE * f)
3209 struct cgraph_node *node;
3211 fprintf (f, "\nFunction parameters:\n");
3212 FOR_EACH_FUNCTION (node)
3213 ipa_print_node_params (f, node);
3216 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3218 vec<tree>
3219 ipa_get_vector_of_formal_parms (tree fndecl)
3221 vec<tree> args;
3222 int count;
3223 tree parm;
3225 gcc_assert (!flag_wpa);
3226 count = count_formal_params (fndecl);
3227 args.create (count);
3228 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3229 args.quick_push (parm);
3231 return args;
3234 /* Return a heap allocated vector containing types of formal parameters of
3235 function type FNTYPE. */
3237 static inline vec<tree>
3238 get_vector_of_formal_parm_types (tree fntype)
3240 vec<tree> types;
3241 int count = 0;
3242 tree t;
3244 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3245 count++;
3247 types.create (count);
3248 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3249 types.quick_push (TREE_VALUE (t));
3251 return types;
3254 /* Modify the function declaration FNDECL and its type according to the plan in
3255 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3256 to reflect the actual parameters being modified which are determined by the
3257 base_index field. */
3259 void
3260 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3261 const char *synth_parm_prefix)
3263 vec<tree> oparms, otypes;
3264 tree orig_type, new_type = NULL;
3265 tree old_arg_types, t, new_arg_types = NULL;
3266 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3267 int i, len = adjustments.length ();
3268 tree new_reversed = NULL;
3269 bool care_for_types, last_parm_void;
3271 if (!synth_parm_prefix)
3272 synth_parm_prefix = "SYNTH";
3274 oparms = ipa_get_vector_of_formal_parms (fndecl);
3275 orig_type = TREE_TYPE (fndecl);
3276 old_arg_types = TYPE_ARG_TYPES (orig_type);
3278 /* The following test is an ugly hack, some functions simply don't have any
3279 arguments in their type. This is probably a bug but well... */
3280 care_for_types = (old_arg_types != NULL_TREE);
3281 if (care_for_types)
3283 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3284 == void_type_node);
3285 otypes = get_vector_of_formal_parm_types (orig_type);
3286 if (last_parm_void)
3287 gcc_assert (oparms.length () + 1 == otypes.length ());
3288 else
3289 gcc_assert (oparms.length () == otypes.length ());
3291 else
3293 last_parm_void = false;
3294 otypes.create (0);
3297 for (i = 0; i < len; i++)
3299 struct ipa_parm_adjustment *adj;
3300 gcc_assert (link);
3302 adj = &adjustments[i];
3303 parm = oparms[adj->base_index];
3304 adj->base = parm;
3306 if (adj->copy_param)
3308 if (care_for_types)
3309 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3310 new_arg_types);
3311 *link = parm;
3312 link = &DECL_CHAIN (parm);
3314 else if (!adj->remove_param)
3316 tree new_parm;
3317 tree ptype;
3319 if (adj->by_ref)
3320 ptype = build_pointer_type (adj->type);
3321 else
3322 ptype = adj->type;
3324 if (care_for_types)
3325 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3327 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3328 ptype);
3329 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3331 DECL_ARTIFICIAL (new_parm) = 1;
3332 DECL_ARG_TYPE (new_parm) = ptype;
3333 DECL_CONTEXT (new_parm) = fndecl;
3334 TREE_USED (new_parm) = 1;
3335 DECL_IGNORED_P (new_parm) = 1;
3336 layout_decl (new_parm, 0);
3338 adj->base = parm;
3339 adj->reduction = new_parm;
3341 *link = new_parm;
3343 link = &DECL_CHAIN (new_parm);
3347 *link = NULL_TREE;
3349 if (care_for_types)
3351 new_reversed = nreverse (new_arg_types);
3352 if (last_parm_void)
3354 if (new_reversed)
3355 TREE_CHAIN (new_arg_types) = void_list_node;
3356 else
3357 new_reversed = void_list_node;
3361 /* Use copy_node to preserve as much as possible from original type
3362 (debug info, attribute lists etc.)
3363 Exception is METHOD_TYPEs must have THIS argument.
3364 When we are asked to remove it, we need to build new FUNCTION_TYPE
3365 instead. */
3366 if (TREE_CODE (orig_type) != METHOD_TYPE
3367 || (adjustments[0].copy_param
3368 && adjustments[0].base_index == 0))
3370 new_type = build_distinct_type_copy (orig_type);
3371 TYPE_ARG_TYPES (new_type) = new_reversed;
3373 else
3375 new_type
3376 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3377 new_reversed));
3378 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3379 DECL_VINDEX (fndecl) = NULL_TREE;
3382 /* When signature changes, we need to clear builtin info. */
3383 if (DECL_BUILT_IN (fndecl))
3385 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3386 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3389 /* This is a new type, not a copy of an old type. Need to reassociate
3390 variants. We can handle everything except the main variant lazily. */
3391 t = TYPE_MAIN_VARIANT (orig_type);
3392 if (orig_type != t)
3394 TYPE_MAIN_VARIANT (new_type) = t;
3395 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3396 TYPE_NEXT_VARIANT (t) = new_type;
3398 else
3400 TYPE_MAIN_VARIANT (new_type) = new_type;
3401 TYPE_NEXT_VARIANT (new_type) = NULL;
3404 TREE_TYPE (fndecl) = new_type;
3405 DECL_VIRTUAL_P (fndecl) = 0;
3406 otypes.release ();
3407 oparms.release ();
3410 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3411 If this is a directly recursive call, CS must be NULL. Otherwise it must
3412 contain the corresponding call graph edge. */
3414 void
3415 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3416 ipa_parm_adjustment_vec adjustments)
3418 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3419 vec<tree> vargs;
3420 vec<tree, va_gc> **debug_args = NULL;
3421 gimple new_stmt;
3422 gimple_stmt_iterator gsi, prev_gsi;
3423 tree callee_decl;
3424 int i, len;
3426 len = adjustments.length ();
3427 vargs.create (len);
3428 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3429 ipa_remove_stmt_references ((symtab_node) current_node, stmt);
3431 gsi = gsi_for_stmt (stmt);
3432 prev_gsi = gsi;
3433 gsi_prev (&prev_gsi);
3434 for (i = 0; i < len; i++)
3436 struct ipa_parm_adjustment *adj;
3438 adj = &adjustments[i];
3440 if (adj->copy_param)
3442 tree arg = gimple_call_arg (stmt, adj->base_index);
3444 vargs.quick_push (arg);
3446 else if (!adj->remove_param)
3448 tree expr, base, off;
3449 location_t loc;
3450 unsigned int deref_align;
3451 bool deref_base = false;
3453 /* We create a new parameter out of the value of the old one, we can
3454 do the following kind of transformations:
3456 - A scalar passed by reference is converted to a scalar passed by
3457 value. (adj->by_ref is false and the type of the original
3458 actual argument is a pointer to a scalar).
3460 - A part of an aggregate is passed instead of the whole aggregate.
3461 The part can be passed either by value or by reference, this is
3462 determined by value of adj->by_ref. Moreover, the code below
3463 handles both situations when the original aggregate is passed by
3464 value (its type is not a pointer) and when it is passed by
3465 reference (it is a pointer to an aggregate).
3467 When the new argument is passed by reference (adj->by_ref is true)
3468 it must be a part of an aggregate and therefore we form it by
3469 simply taking the address of a reference inside the original
3470 aggregate. */
3472 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3473 base = gimple_call_arg (stmt, adj->base_index);
3474 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3475 : EXPR_LOCATION (base);
3477 if (TREE_CODE (base) != ADDR_EXPR
3478 && POINTER_TYPE_P (TREE_TYPE (base)))
3479 off = build_int_cst (adj->alias_ptr_type,
3480 adj->offset / BITS_PER_UNIT);
3481 else
3483 HOST_WIDE_INT base_offset;
3484 tree prev_base;
3485 bool addrof;
3487 if (TREE_CODE (base) == ADDR_EXPR)
3489 base = TREE_OPERAND (base, 0);
3490 addrof = true;
3492 else
3493 addrof = false;
3494 prev_base = base;
3495 base = get_addr_base_and_unit_offset (base, &base_offset);
3496 /* Aggregate arguments can have non-invariant addresses. */
3497 if (!base)
3499 base = build_fold_addr_expr (prev_base);
3500 off = build_int_cst (adj->alias_ptr_type,
3501 adj->offset / BITS_PER_UNIT);
3503 else if (TREE_CODE (base) == MEM_REF)
3505 if (!addrof)
3507 deref_base = true;
3508 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3510 off = build_int_cst (adj->alias_ptr_type,
3511 base_offset
3512 + adj->offset / BITS_PER_UNIT);
3513 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3514 off);
3515 base = TREE_OPERAND (base, 0);
3517 else
3519 off = build_int_cst (adj->alias_ptr_type,
3520 base_offset
3521 + adj->offset / BITS_PER_UNIT);
3522 base = build_fold_addr_expr (base);
3526 if (!adj->by_ref)
3528 tree type = adj->type;
3529 unsigned int align;
3530 unsigned HOST_WIDE_INT misalign;
3532 if (deref_base)
3534 align = deref_align;
3535 misalign = 0;
3537 else
3539 get_pointer_alignment_1 (base, &align, &misalign);
3540 if (TYPE_ALIGN (type) > align)
3541 align = TYPE_ALIGN (type);
3543 misalign += (tree_to_double_int (off)
3544 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3545 * BITS_PER_UNIT);
3546 misalign = misalign & (align - 1);
3547 if (misalign != 0)
3548 align = (misalign & -misalign);
3549 if (align < TYPE_ALIGN (type))
3550 type = build_aligned_type (type, align);
3551 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3553 else
3555 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3556 expr = build_fold_addr_expr (expr);
3559 expr = force_gimple_operand_gsi (&gsi, expr,
3560 adj->by_ref
3561 || is_gimple_reg_type (adj->type),
3562 NULL, true, GSI_SAME_STMT);
3563 vargs.quick_push (expr);
3565 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3567 unsigned int ix;
3568 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3569 gimple def_temp;
3571 arg = gimple_call_arg (stmt, adj->base_index);
3572 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3574 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3575 continue;
3576 arg = fold_convert_loc (gimple_location (stmt),
3577 TREE_TYPE (origin), arg);
3579 if (debug_args == NULL)
3580 debug_args = decl_debug_args_insert (callee_decl);
3581 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3582 if (ddecl == origin)
3584 ddecl = (**debug_args)[ix + 1];
3585 break;
3587 if (ddecl == NULL)
3589 ddecl = make_node (DEBUG_EXPR_DECL);
3590 DECL_ARTIFICIAL (ddecl) = 1;
3591 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3592 DECL_MODE (ddecl) = DECL_MODE (origin);
3594 vec_safe_push (*debug_args, origin);
3595 vec_safe_push (*debug_args, ddecl);
3597 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3598 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3602 if (dump_file && (dump_flags & TDF_DETAILS))
3604 fprintf (dump_file, "replacing stmt:");
3605 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3608 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3609 vargs.release ();
3610 if (gimple_call_lhs (stmt))
3611 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3613 gimple_set_block (new_stmt, gimple_block (stmt));
3614 if (gimple_has_location (stmt))
3615 gimple_set_location (new_stmt, gimple_location (stmt));
3616 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3617 gimple_call_copy_flags (new_stmt, stmt);
3619 if (dump_file && (dump_flags & TDF_DETAILS))
3621 fprintf (dump_file, "with stmt:");
3622 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3623 fprintf (dump_file, "\n");
3625 gsi_replace (&gsi, new_stmt, true);
3626 if (cs)
3627 cgraph_set_call_stmt (cs, new_stmt);
3630 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3631 gsi_prev (&gsi);
3633 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3634 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3636 update_ssa (TODO_update_ssa);
3637 free_dominance_info (CDI_DOMINATORS);
3640 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3642 static bool
3643 index_in_adjustments_multiple_times_p (int base_index,
3644 ipa_parm_adjustment_vec adjustments)
3646 int i, len = adjustments.length ();
3647 bool one = false;
3649 for (i = 0; i < len; i++)
3651 struct ipa_parm_adjustment *adj;
3652 adj = &adjustments[i];
3654 if (adj->base_index == base_index)
3656 if (one)
3657 return true;
3658 else
3659 one = true;
3662 return false;
3666 /* Return adjustments that should have the same effect on function parameters
3667 and call arguments as if they were first changed according to adjustments in
3668 INNER and then by adjustments in OUTER. */
3670 ipa_parm_adjustment_vec
3671 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3672 ipa_parm_adjustment_vec outer)
3674 int i, outlen = outer.length ();
3675 int inlen = inner.length ();
3676 int removals = 0;
3677 ipa_parm_adjustment_vec adjustments, tmp;
3679 tmp.create (inlen);
3680 for (i = 0; i < inlen; i++)
3682 struct ipa_parm_adjustment *n;
3683 n = &inner[i];
3685 if (n->remove_param)
3686 removals++;
3687 else
3688 tmp.quick_push (*n);
3691 adjustments.create (outlen + removals);
3692 for (i = 0; i < outlen; i++)
3694 struct ipa_parm_adjustment r;
3695 struct ipa_parm_adjustment *out = &outer[i];
3696 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3698 memset (&r, 0, sizeof (r));
3699 gcc_assert (!in->remove_param);
3700 if (out->remove_param)
3702 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3704 r.remove_param = true;
3705 adjustments.quick_push (r);
3707 continue;
3710 r.base_index = in->base_index;
3711 r.type = out->type;
3713 /* FIXME: Create nonlocal value too. */
3715 if (in->copy_param && out->copy_param)
3716 r.copy_param = true;
3717 else if (in->copy_param)
3718 r.offset = out->offset;
3719 else if (out->copy_param)
3720 r.offset = in->offset;
3721 else
3722 r.offset = in->offset + out->offset;
3723 adjustments.quick_push (r);
3726 for (i = 0; i < inlen; i++)
3728 struct ipa_parm_adjustment *n = &inner[i];
3730 if (n->remove_param)
3731 adjustments.quick_push (*n);
3734 tmp.release ();
3735 return adjustments;
3738 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3739 friendly way, assuming they are meant to be applied to FNDECL. */
3741 void
3742 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3743 tree fndecl)
3745 int i, len = adjustments.length ();
3746 bool first = true;
3747 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3749 fprintf (file, "IPA param adjustments: ");
3750 for (i = 0; i < len; i++)
3752 struct ipa_parm_adjustment *adj;
3753 adj = &adjustments[i];
3755 if (!first)
3756 fprintf (file, " ");
3757 else
3758 first = false;
3760 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3761 print_generic_expr (file, parms[adj->base_index], 0);
3762 if (adj->base)
3764 fprintf (file, ", base: ");
3765 print_generic_expr (file, adj->base, 0);
3767 if (adj->reduction)
3769 fprintf (file, ", reduction: ");
3770 print_generic_expr (file, adj->reduction, 0);
3772 if (adj->new_ssa_base)
3774 fprintf (file, ", new_ssa_base: ");
3775 print_generic_expr (file, adj->new_ssa_base, 0);
3778 if (adj->copy_param)
3779 fprintf (file, ", copy_param");
3780 else if (adj->remove_param)
3781 fprintf (file, ", remove_param");
3782 else
3783 fprintf (file, ", offset %li", (long) adj->offset);
3784 if (adj->by_ref)
3785 fprintf (file, ", by_ref");
3786 print_node_brief (file, ", type: ", adj->type, 0);
3787 fprintf (file, "\n");
3789 parms.release ();
3792 /* Dump the AV linked list. */
3794 void
3795 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3797 bool comma = false;
3798 fprintf (f, " Aggregate replacements:");
3799 for (; av; av = av->next)
3801 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3802 av->index, av->offset);
3803 print_generic_expr (f, av->value, 0);
3804 comma = true;
3806 fprintf (f, "\n");
3809 /* Stream out jump function JUMP_FUNC to OB. */
3811 static void
3812 ipa_write_jump_function (struct output_block *ob,
3813 struct ipa_jump_func *jump_func)
3815 struct ipa_agg_jf_item *item;
3816 struct bitpack_d bp;
3817 int i, count;
3819 streamer_write_uhwi (ob, jump_func->type);
3820 switch (jump_func->type)
3822 case IPA_JF_UNKNOWN:
3823 break;
3824 case IPA_JF_KNOWN_TYPE:
3825 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3826 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3827 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3828 break;
3829 case IPA_JF_CONST:
3830 gcc_assert (
3831 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3832 stream_write_tree (ob, jump_func->value.constant.value, true);
3833 break;
3834 case IPA_JF_PASS_THROUGH:
3835 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3836 if (jump_func->value.pass_through.operation == NOP_EXPR)
3838 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3839 bp = bitpack_create (ob->main_stream);
3840 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3841 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
3842 streamer_write_bitpack (&bp);
3844 else
3846 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3847 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3849 break;
3850 case IPA_JF_ANCESTOR:
3851 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3852 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3853 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3854 bp = bitpack_create (ob->main_stream);
3855 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3856 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
3857 streamer_write_bitpack (&bp);
3858 break;
3861 count = vec_safe_length (jump_func->agg.items);
3862 streamer_write_uhwi (ob, count);
3863 if (count)
3865 bp = bitpack_create (ob->main_stream);
3866 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3867 streamer_write_bitpack (&bp);
3870 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3872 streamer_write_uhwi (ob, item->offset);
3873 stream_write_tree (ob, item->value, true);
3877 /* Read in jump function JUMP_FUNC from IB. */
3879 static void
3880 ipa_read_jump_function (struct lto_input_block *ib,
3881 struct ipa_jump_func *jump_func,
3882 struct cgraph_edge *cs,
3883 struct data_in *data_in)
3885 enum jump_func_type jftype;
3886 enum tree_code operation;
3887 int i, count;
3889 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3890 switch (jftype)
3892 case IPA_JF_UNKNOWN:
3893 jump_func->type = IPA_JF_UNKNOWN;
3894 break;
3895 case IPA_JF_KNOWN_TYPE:
3897 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3898 tree base_type = stream_read_tree (ib, data_in);
3899 tree component_type = stream_read_tree (ib, data_in);
3901 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
3902 break;
3904 case IPA_JF_CONST:
3905 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
3906 break;
3907 case IPA_JF_PASS_THROUGH:
3908 operation = (enum tree_code) streamer_read_uhwi (ib);
3909 if (operation == NOP_EXPR)
3911 int formal_id = streamer_read_uhwi (ib);
3912 struct bitpack_d bp = streamer_read_bitpack (ib);
3913 bool agg_preserved = bp_unpack_value (&bp, 1);
3914 bool type_preserved = bp_unpack_value (&bp, 1);
3915 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
3916 type_preserved);
3918 else
3920 tree operand = stream_read_tree (ib, data_in);
3921 int formal_id = streamer_read_uhwi (ib);
3922 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
3923 operation);
3925 break;
3926 case IPA_JF_ANCESTOR:
3928 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3929 tree type = stream_read_tree (ib, data_in);
3930 int formal_id = streamer_read_uhwi (ib);
3931 struct bitpack_d bp = streamer_read_bitpack (ib);
3932 bool agg_preserved = bp_unpack_value (&bp, 1);
3933 bool type_preserved = bp_unpack_value (&bp, 1);
3935 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
3936 type_preserved);
3937 break;
3941 count = streamer_read_uhwi (ib);
3942 vec_alloc (jump_func->agg.items, count);
3943 if (count)
3945 struct bitpack_d bp = streamer_read_bitpack (ib);
3946 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3948 for (i = 0; i < count; i++)
3950 struct ipa_agg_jf_item item;
3951 item.offset = streamer_read_uhwi (ib);
3952 item.value = stream_read_tree (ib, data_in);
3953 jump_func->agg.items->quick_push (item);
3957 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3958 relevant to indirect inlining to OB. */
3960 static void
3961 ipa_write_indirect_edge_info (struct output_block *ob,
3962 struct cgraph_edge *cs)
3964 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3965 struct bitpack_d bp;
3967 streamer_write_hwi (ob, ii->param_index);
3968 streamer_write_hwi (ob, ii->offset);
3969 bp = bitpack_create (ob->main_stream);
3970 bp_pack_value (&bp, ii->polymorphic, 1);
3971 bp_pack_value (&bp, ii->agg_contents, 1);
3972 bp_pack_value (&bp, ii->member_ptr, 1);
3973 bp_pack_value (&bp, ii->by_ref, 1);
3974 streamer_write_bitpack (&bp);
3976 if (ii->polymorphic)
3978 streamer_write_hwi (ob, ii->otr_token);
3979 stream_write_tree (ob, ii->otr_type, true);
3983 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3984 relevant to indirect inlining from IB. */
3986 static void
3987 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3988 struct data_in *data_in ATTRIBUTE_UNUSED,
3989 struct cgraph_edge *cs)
3991 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3992 struct bitpack_d bp;
3994 ii->param_index = (int) streamer_read_hwi (ib);
3995 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3996 bp = streamer_read_bitpack (ib);
3997 ii->polymorphic = bp_unpack_value (&bp, 1);
3998 ii->agg_contents = bp_unpack_value (&bp, 1);
3999 ii->member_ptr = bp_unpack_value (&bp, 1);
4000 ii->by_ref = bp_unpack_value (&bp, 1);
4001 if (ii->polymorphic)
4003 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4004 ii->otr_type = stream_read_tree (ib, data_in);
4008 /* Stream out NODE info to OB. */
4010 static void
4011 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4013 int node_ref;
4014 lto_symtab_encoder_t encoder;
4015 struct ipa_node_params *info = IPA_NODE_REF (node);
4016 int j;
4017 struct cgraph_edge *e;
4018 struct bitpack_d bp;
4020 encoder = ob->decl_state->symtab_node_encoder;
4021 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4022 streamer_write_uhwi (ob, node_ref);
4024 streamer_write_uhwi (ob, ipa_get_param_count (info));
4025 for (j = 0; j < ipa_get_param_count (info); j++)
4026 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4027 bp = bitpack_create (ob->main_stream);
4028 gcc_assert (info->uses_analysis_done
4029 || ipa_get_param_count (info) == 0);
4030 gcc_assert (!info->node_enqueued);
4031 gcc_assert (!info->ipcp_orig_node);
4032 for (j = 0; j < ipa_get_param_count (info); j++)
4033 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4034 streamer_write_bitpack (&bp);
4035 for (j = 0; j < ipa_get_param_count (info); j++)
4036 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4037 for (e = node->callees; e; e = e->next_callee)
4039 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4041 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4042 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4043 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4045 for (e = node->indirect_calls; e; e = e->next_callee)
4047 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4049 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4050 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4051 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4052 ipa_write_indirect_edge_info (ob, e);
4056 /* Stream in NODE info from IB. */
4058 static void
4059 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4060 struct data_in *data_in)
4062 struct ipa_node_params *info = IPA_NODE_REF (node);
4063 int k;
4064 struct cgraph_edge *e;
4065 struct bitpack_d bp;
4067 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4069 for (k = 0; k < ipa_get_param_count (info); k++)
4070 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4072 bp = streamer_read_bitpack (ib);
4073 if (ipa_get_param_count (info) != 0)
4074 info->uses_analysis_done = true;
4075 info->node_enqueued = false;
4076 for (k = 0; k < ipa_get_param_count (info); k++)
4077 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4078 for (k = 0; k < ipa_get_param_count (info); k++)
4079 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4080 for (e = node->callees; e; e = e->next_callee)
4082 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4083 int count = streamer_read_uhwi (ib);
4085 if (!count)
4086 continue;
4087 vec_safe_grow_cleared (args->jump_functions, count);
4089 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4090 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4091 data_in);
4093 for (e = node->indirect_calls; e; e = e->next_callee)
4095 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4096 int count = streamer_read_uhwi (ib);
4098 if (count)
4100 vec_safe_grow_cleared (args->jump_functions, count);
4101 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4102 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4103 data_in);
4105 ipa_read_indirect_edge_info (ib, data_in, e);
4109 /* Write jump functions for nodes in SET. */
4111 void
4112 ipa_prop_write_jump_functions (void)
4114 struct cgraph_node *node;
4115 struct output_block *ob;
4116 unsigned int count = 0;
4117 lto_symtab_encoder_iterator lsei;
4118 lto_symtab_encoder_t encoder;
4121 if (!ipa_node_params_vector.exists ())
4122 return;
4124 ob = create_output_block (LTO_section_jump_functions);
4125 encoder = ob->decl_state->symtab_node_encoder;
4126 ob->cgraph_node = NULL;
4127 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4128 lsei_next_function_in_partition (&lsei))
4130 node = lsei_cgraph_node (lsei);
4131 if (cgraph_function_with_gimple_body_p (node)
4132 && IPA_NODE_REF (node) != NULL)
4133 count++;
4136 streamer_write_uhwi (ob, count);
4138 /* Process all of the functions. */
4139 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4140 lsei_next_function_in_partition (&lsei))
4142 node = lsei_cgraph_node (lsei);
4143 if (cgraph_function_with_gimple_body_p (node)
4144 && IPA_NODE_REF (node) != NULL)
4145 ipa_write_node_info (ob, node);
4147 streamer_write_char_stream (ob->main_stream, 0);
4148 produce_asm (ob, NULL);
4149 destroy_output_block (ob);
4152 /* Read section in file FILE_DATA of length LEN with data DATA. */
4154 static void
4155 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4156 size_t len)
4158 const struct lto_function_header *header =
4159 (const struct lto_function_header *) data;
4160 const int cfg_offset = sizeof (struct lto_function_header);
4161 const int main_offset = cfg_offset + header->cfg_size;
4162 const int string_offset = main_offset + header->main_size;
4163 struct data_in *data_in;
4164 struct lto_input_block ib_main;
4165 unsigned int i;
4166 unsigned int count;
4168 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4169 header->main_size);
4171 data_in =
4172 lto_data_in_create (file_data, (const char *) data + string_offset,
4173 header->string_size, vNULL);
4174 count = streamer_read_uhwi (&ib_main);
4176 for (i = 0; i < count; i++)
4178 unsigned int index;
4179 struct cgraph_node *node;
4180 lto_symtab_encoder_t encoder;
4182 index = streamer_read_uhwi (&ib_main);
4183 encoder = file_data->symtab_node_encoder;
4184 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4185 gcc_assert (node->symbol.definition);
4186 ipa_read_node_info (&ib_main, node, data_in);
4188 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4189 len);
4190 lto_data_in_delete (data_in);
4193 /* Read ipcp jump functions. */
4195 void
4196 ipa_prop_read_jump_functions (void)
4198 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4199 struct lto_file_decl_data *file_data;
4200 unsigned int j = 0;
4202 ipa_check_create_node_params ();
4203 ipa_check_create_edge_args ();
4204 ipa_register_cgraph_hooks ();
4206 while ((file_data = file_data_vec[j++]))
4208 size_t len;
4209 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4211 if (data)
4212 ipa_prop_read_section (file_data, data, len);
4216 /* After merging units, we can get mismatch in argument counts.
4217 Also decl merging might've rendered parameter lists obsolete.
4218 Also compute called_with_variable_arg info. */
4220 void
4221 ipa_update_after_lto_read (void)
4223 ipa_check_create_node_params ();
4224 ipa_check_create_edge_args ();
4227 void
4228 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4230 int node_ref;
4231 unsigned int count = 0;
4232 lto_symtab_encoder_t encoder;
4233 struct ipa_agg_replacement_value *aggvals, *av;
4235 aggvals = ipa_get_agg_replacements_for_node (node);
4236 encoder = ob->decl_state->symtab_node_encoder;
4237 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4238 streamer_write_uhwi (ob, node_ref);
4240 for (av = aggvals; av; av = av->next)
4241 count++;
4242 streamer_write_uhwi (ob, count);
4244 for (av = aggvals; av; av = av->next)
4246 struct bitpack_d bp;
4248 streamer_write_uhwi (ob, av->offset);
4249 streamer_write_uhwi (ob, av->index);
4250 stream_write_tree (ob, av->value, true);
4252 bp = bitpack_create (ob->main_stream);
4253 bp_pack_value (&bp, av->by_ref, 1);
4254 streamer_write_bitpack (&bp);
4258 /* Stream in the aggregate value replacement chain for NODE from IB. */
4260 static void
4261 read_agg_replacement_chain (struct lto_input_block *ib,
4262 struct cgraph_node *node,
4263 struct data_in *data_in)
4265 struct ipa_agg_replacement_value *aggvals = NULL;
4266 unsigned int count, i;
4268 count = streamer_read_uhwi (ib);
4269 for (i = 0; i <count; i++)
4271 struct ipa_agg_replacement_value *av;
4272 struct bitpack_d bp;
4274 av = ggc_alloc_ipa_agg_replacement_value ();
4275 av->offset = streamer_read_uhwi (ib);
4276 av->index = streamer_read_uhwi (ib);
4277 av->value = stream_read_tree (ib, data_in);
4278 bp = streamer_read_bitpack (ib);
4279 av->by_ref = bp_unpack_value (&bp, 1);
4280 av->next = aggvals;
4281 aggvals = av;
4283 ipa_set_node_agg_value_chain (node, aggvals);
4286 /* Write all aggregate replacement for nodes in set. */
4288 void
4289 ipa_prop_write_all_agg_replacement (void)
4291 struct cgraph_node *node;
4292 struct output_block *ob;
4293 unsigned int count = 0;
4294 lto_symtab_encoder_iterator lsei;
4295 lto_symtab_encoder_t encoder;
4297 if (!ipa_node_agg_replacements)
4298 return;
4300 ob = create_output_block (LTO_section_ipcp_transform);
4301 encoder = ob->decl_state->symtab_node_encoder;
4302 ob->cgraph_node = NULL;
4303 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4304 lsei_next_function_in_partition (&lsei))
4306 node = lsei_cgraph_node (lsei);
4307 if (cgraph_function_with_gimple_body_p (node)
4308 && ipa_get_agg_replacements_for_node (node) != NULL)
4309 count++;
4312 streamer_write_uhwi (ob, count);
4314 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4315 lsei_next_function_in_partition (&lsei))
4317 node = lsei_cgraph_node (lsei);
4318 if (cgraph_function_with_gimple_body_p (node)
4319 && ipa_get_agg_replacements_for_node (node) != NULL)
4320 write_agg_replacement_chain (ob, node);
4322 streamer_write_char_stream (ob->main_stream, 0);
4323 produce_asm (ob, NULL);
4324 destroy_output_block (ob);
4327 /* Read replacements section in file FILE_DATA of length LEN with data
4328 DATA. */
4330 static void
4331 read_replacements_section (struct lto_file_decl_data *file_data,
4332 const char *data,
4333 size_t len)
4335 const struct lto_function_header *header =
4336 (const struct lto_function_header *) data;
4337 const int cfg_offset = sizeof (struct lto_function_header);
4338 const int main_offset = cfg_offset + header->cfg_size;
4339 const int string_offset = main_offset + header->main_size;
4340 struct data_in *data_in;
4341 struct lto_input_block ib_main;
4342 unsigned int i;
4343 unsigned int count;
4345 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4346 header->main_size);
4348 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4349 header->string_size, vNULL);
4350 count = streamer_read_uhwi (&ib_main);
4352 for (i = 0; i < count; i++)
4354 unsigned int index;
4355 struct cgraph_node *node;
4356 lto_symtab_encoder_t encoder;
4358 index = streamer_read_uhwi (&ib_main);
4359 encoder = file_data->symtab_node_encoder;
4360 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4361 gcc_assert (node->symbol.definition);
4362 read_agg_replacement_chain (&ib_main, node, data_in);
4364 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4365 len);
4366 lto_data_in_delete (data_in);
4369 /* Read IPA-CP aggregate replacements. */
4371 void
4372 ipa_prop_read_all_agg_replacement (void)
4374 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4375 struct lto_file_decl_data *file_data;
4376 unsigned int j = 0;
4378 while ((file_data = file_data_vec[j++]))
4380 size_t len;
4381 const char *data = lto_get_section_data (file_data,
4382 LTO_section_ipcp_transform,
4383 NULL, &len);
4384 if (data)
4385 read_replacements_section (file_data, data, len);
4389 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4390 NODE. */
4392 static void
4393 adjust_agg_replacement_values (struct cgraph_node *node,
4394 struct ipa_agg_replacement_value *aggval)
4396 struct ipa_agg_replacement_value *v;
4397 int i, c = 0, d = 0, *adj;
4399 if (!node->clone.combined_args_to_skip)
4400 return;
4402 for (v = aggval; v; v = v->next)
4404 gcc_assert (v->index >= 0);
4405 if (c < v->index)
4406 c = v->index;
4408 c++;
4410 adj = XALLOCAVEC (int, c);
4411 for (i = 0; i < c; i++)
4412 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4414 adj[i] = -1;
4415 d++;
4417 else
4418 adj[i] = i - d;
4420 for (v = aggval; v; v = v->next)
4421 v->index = adj[v->index];
4425 /* Function body transformation phase. */
4427 unsigned int
4428 ipcp_transform_function (struct cgraph_node *node)
4430 vec<ipa_param_descriptor_t> descriptors = vNULL;
4431 struct param_analysis_info *parms_ainfo;
4432 struct ipa_agg_replacement_value *aggval;
4433 gimple_stmt_iterator gsi;
4434 basic_block bb;
4435 int param_count;
4436 bool cfg_changed = false, something_changed = false;
4438 gcc_checking_assert (cfun);
4439 gcc_checking_assert (current_function_decl);
4441 if (dump_file)
4442 fprintf (dump_file, "Modification phase of node %s/%i\n",
4443 cgraph_node_name (node), node->symbol.order);
4445 aggval = ipa_get_agg_replacements_for_node (node);
4446 if (!aggval)
4447 return 0;
4448 param_count = count_formal_params (node->symbol.decl);
4449 if (param_count == 0)
4450 return 0;
4451 adjust_agg_replacement_values (node, aggval);
4452 if (dump_file)
4453 ipa_dump_agg_replacement_values (dump_file, aggval);
4454 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4455 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4456 descriptors.safe_grow_cleared (param_count);
4457 ipa_populate_param_decls (node, descriptors);
4459 FOR_EACH_BB (bb)
4460 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4462 struct ipa_agg_replacement_value *v;
4463 gimple stmt = gsi_stmt (gsi);
4464 tree rhs, val, t;
4465 HOST_WIDE_INT offset;
4466 int index;
4467 bool by_ref, vce;
4469 if (!gimple_assign_load_p (stmt))
4470 continue;
4471 rhs = gimple_assign_rhs1 (stmt);
4472 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4473 continue;
4475 vce = false;
4476 t = rhs;
4477 while (handled_component_p (t))
4479 /* V_C_E can do things like convert an array of integers to one
4480 bigger integer and similar things we do not handle below. */
4481 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4483 vce = true;
4484 break;
4486 t = TREE_OPERAND (t, 0);
4488 if (vce)
4489 continue;
4491 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4492 rhs, &index, &offset, &by_ref))
4493 continue;
4494 for (v = aggval; v; v = v->next)
4495 if (v->index == index
4496 && v->offset == offset)
4497 break;
4498 if (!v || v->by_ref != by_ref)
4499 continue;
4501 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4502 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4504 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4505 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4506 else if (TYPE_SIZE (TREE_TYPE (rhs))
4507 == TYPE_SIZE (TREE_TYPE (v->value)))
4508 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4509 else
4511 if (dump_file)
4513 fprintf (dump_file, " const ");
4514 print_generic_expr (dump_file, v->value, 0);
4515 fprintf (dump_file, " can't be converted to type of ");
4516 print_generic_expr (dump_file, rhs, 0);
4517 fprintf (dump_file, "\n");
4519 continue;
4522 else
4523 val = v->value;
4525 if (dump_file && (dump_flags & TDF_DETAILS))
4527 fprintf (dump_file, "Modifying stmt:\n ");
4528 print_gimple_stmt (dump_file, stmt, 0, 0);
4530 gimple_assign_set_rhs_from_tree (&gsi, val);
4531 update_stmt (stmt);
4533 if (dump_file && (dump_flags & TDF_DETAILS))
4535 fprintf (dump_file, "into:\n ");
4536 print_gimple_stmt (dump_file, stmt, 0, 0);
4537 fprintf (dump_file, "\n");
4540 something_changed = true;
4541 if (maybe_clean_eh_stmt (stmt)
4542 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4543 cfg_changed = true;
4546 (*ipa_node_agg_replacements)[node->uid] = NULL;
4547 free_parms_ainfo (parms_ainfo, param_count);
4548 descriptors.release ();
4550 if (!something_changed)
4551 return 0;
4552 else if (cfg_changed)
4553 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4554 else
4555 return TODO_update_ssa_only_virtuals;