2013-11-21 Edward Smith-Rowland <3dw4rd@verizon.net>
[official-gcc.git] / gcc / ipa-prop.c
blobd122dd555392080b1e970d29f740b441375ae044
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "gimple.h"
25 #include "expr.h"
26 #include "stor-layout.h"
27 #include "print-tree.h"
28 #include "gimplify.h"
29 #include "gimple-iterator.h"
30 #include "gimplify-me.h"
31 #include "gimple-walk.h"
32 #include "langhooks.h"
33 #include "ggc.h"
34 #include "target.h"
35 #include "ipa-prop.h"
36 #include "bitmap.h"
37 #include "gimple-ssa.h"
38 #include "tree-cfg.h"
39 #include "tree-phinodes.h"
40 #include "ssa-iterators.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-pass.h"
44 #include "tree-inline.h"
45 #include "ipa-inline.h"
46 #include "flags.h"
47 #include "diagnostic.h"
48 #include "gimple-pretty-print.h"
49 #include "lto-streamer.h"
50 #include "data-streamer.h"
51 #include "tree-streamer.h"
52 #include "params.h"
53 #include "ipa-utils.h"
55 /* Intermediate information about a parameter that is only useful during the
56 run of ipa_analyze_node and is not kept afterwards. */
58 struct param_analysis_info
60 bool parm_modified, ref_modified, pt_modified;
61 bitmap parm_visited_statements, pt_visited_statements;
64 /* Vector where the parameter infos are actually stored. */
65 vec<ipa_node_params_t> ipa_node_params_vector;
66 /* Vector of known aggregate values in cloned nodes. */
67 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
68 /* Vector where the parameter infos are actually stored. */
69 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
71 /* Holders of ipa cgraph hooks: */
72 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
73 static struct cgraph_node_hook_list *node_removal_hook_holder;
74 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
75 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
76 static struct cgraph_node_hook_list *function_insertion_hook_holder;
78 /* Description of a reference to an IPA constant. */
79 struct ipa_cst_ref_desc
81 /* Edge that corresponds to the statement which took the reference. */
82 struct cgraph_edge *cs;
83 /* Linked list of duplicates created when call graph edges are cloned. */
84 struct ipa_cst_ref_desc *next_duplicate;
85 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
86 if out of control. */
87 int refcount;
90 /* Allocation pool for reference descriptions. */
92 static alloc_pool ipa_refdesc_pool;
94 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
95 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
97 static bool
98 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
100 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
101 struct cl_optimization *os;
103 if (!fs_opts)
104 return false;
105 os = TREE_OPTIMIZATION (fs_opts);
106 return !os->x_optimize || !os->x_flag_ipa_cp;
109 /* Return index of the formal whose tree is PTREE in function which corresponds
110 to INFO. */
112 static int
113 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
115 int i, count;
117 count = descriptors.length ();
118 for (i = 0; i < count; i++)
119 if (descriptors[i].decl == ptree)
120 return i;
122 return -1;
125 /* Return index of the formal whose tree is PTREE in function which corresponds
126 to INFO. */
129 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
131 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
134 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
135 NODE. */
137 static void
138 ipa_populate_param_decls (struct cgraph_node *node,
139 vec<ipa_param_descriptor_t> &descriptors)
141 tree fndecl;
142 tree fnargs;
143 tree parm;
144 int param_num;
146 fndecl = node->decl;
147 gcc_assert (gimple_has_body_p (fndecl));
148 fnargs = DECL_ARGUMENTS (fndecl);
149 param_num = 0;
150 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
152 descriptors[param_num].decl = parm;
153 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
154 param_num++;
158 /* Return how many formal parameters FNDECL has. */
160 static inline int
161 count_formal_params (tree fndecl)
163 tree parm;
164 int count = 0;
165 gcc_assert (gimple_has_body_p (fndecl));
167 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
168 count++;
170 return count;
173 /* Return the declaration of Ith formal parameter of the function corresponding
174 to INFO. Note there is no setter function as this array is built just once
175 using ipa_initialize_node_params. */
177 void
178 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
180 fprintf (file, "param #%i", i);
181 if (info->descriptors[i].decl)
183 fprintf (file, " ");
184 print_generic_expr (file, info->descriptors[i].decl, 0);
188 /* Initialize the ipa_node_params structure associated with NODE
189 to hold PARAM_COUNT parameters. */
191 void
192 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
194 struct ipa_node_params *info = IPA_NODE_REF (node);
196 if (!info->descriptors.exists () && param_count)
197 info->descriptors.safe_grow_cleared (param_count);
200 /* Initialize the ipa_node_params structure associated with NODE by counting
201 the function parameters, creating the descriptors and populating their
202 param_decls. */
204 void
205 ipa_initialize_node_params (struct cgraph_node *node)
207 struct ipa_node_params *info = IPA_NODE_REF (node);
209 if (!info->descriptors.exists ())
211 ipa_alloc_node_params (node, count_formal_params (node->decl));
212 ipa_populate_param_decls (node, info->descriptors);
216 /* Print the jump functions associated with call graph edge CS to file F. */
218 static void
219 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
221 int i, count;
223 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
224 for (i = 0; i < count; i++)
226 struct ipa_jump_func *jump_func;
227 enum jump_func_type type;
229 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
230 type = jump_func->type;
232 fprintf (f, " param %d: ", i);
233 if (type == IPA_JF_UNKNOWN)
234 fprintf (f, "UNKNOWN\n");
235 else if (type == IPA_JF_KNOWN_TYPE)
237 fprintf (f, "KNOWN TYPE: base ");
238 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
239 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
240 jump_func->value.known_type.offset);
241 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
242 fprintf (f, "\n");
244 else if (type == IPA_JF_CONST)
246 tree val = jump_func->value.constant.value;
247 fprintf (f, "CONST: ");
248 print_generic_expr (f, val, 0);
249 if (TREE_CODE (val) == ADDR_EXPR
250 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
252 fprintf (f, " -> ");
253 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
256 fprintf (f, "\n");
258 else if (type == IPA_JF_PASS_THROUGH)
260 fprintf (f, "PASS THROUGH: ");
261 fprintf (f, "%d, op %s",
262 jump_func->value.pass_through.formal_id,
263 get_tree_code_name(jump_func->value.pass_through.operation));
264 if (jump_func->value.pass_through.operation != NOP_EXPR)
266 fprintf (f, " ");
267 print_generic_expr (f,
268 jump_func->value.pass_through.operand, 0);
270 if (jump_func->value.pass_through.agg_preserved)
271 fprintf (f, ", agg_preserved");
272 if (jump_func->value.pass_through.type_preserved)
273 fprintf (f, ", type_preserved");
274 fprintf (f, "\n");
276 else if (type == IPA_JF_ANCESTOR)
278 fprintf (f, "ANCESTOR: ");
279 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
280 jump_func->value.ancestor.formal_id,
281 jump_func->value.ancestor.offset);
282 print_generic_expr (f, jump_func->value.ancestor.type, 0);
283 if (jump_func->value.ancestor.agg_preserved)
284 fprintf (f, ", agg_preserved");
285 if (jump_func->value.ancestor.type_preserved)
286 fprintf (f, ", type_preserved");
287 fprintf (f, "\n");
290 if (jump_func->agg.items)
292 struct ipa_agg_jf_item *item;
293 int j;
295 fprintf (f, " Aggregate passed by %s:\n",
296 jump_func->agg.by_ref ? "reference" : "value");
297 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
299 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
300 item->offset);
301 if (TYPE_P (item->value))
302 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
303 tree_to_uhwi (TYPE_SIZE (item->value)));
304 else
306 fprintf (f, "cst: ");
307 print_generic_expr (f, item->value, 0);
309 fprintf (f, "\n");
316 /* Print the jump functions of all arguments on all call graph edges going from
317 NODE to file F. */
319 void
320 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
322 struct cgraph_edge *cs;
324 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
325 node->order);
326 for (cs = node->callees; cs; cs = cs->next_callee)
328 if (!ipa_edge_args_info_available_for_edge_p (cs))
329 continue;
331 fprintf (f, " callsite %s/%i -> %s/%i : \n",
332 xstrdup (node->name ()), node->order,
333 xstrdup (cs->callee->name ()),
334 cs->callee->order);
335 ipa_print_node_jump_functions_for_edge (f, cs);
338 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
340 struct cgraph_indirect_call_info *ii;
341 if (!ipa_edge_args_info_available_for_edge_p (cs))
342 continue;
344 ii = cs->indirect_info;
345 if (ii->agg_contents)
346 fprintf (f, " indirect %s callsite, calling param %i, "
347 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
348 ii->member_ptr ? "member ptr" : "aggregate",
349 ii->param_index, ii->offset,
350 ii->by_ref ? "by reference" : "by_value");
351 else
352 fprintf (f, " indirect %s callsite, calling param %i",
353 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
355 if (cs->call_stmt)
357 fprintf (f, ", for stmt ");
358 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
360 else
361 fprintf (f, "\n");
362 ipa_print_node_jump_functions_for_edge (f, cs);
366 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
368 void
369 ipa_print_all_jump_functions (FILE *f)
371 struct cgraph_node *node;
373 fprintf (f, "\nJump functions:\n");
374 FOR_EACH_FUNCTION (node)
376 ipa_print_node_jump_functions (f, node);
380 /* Set JFUNC to be a known type jump function. */
382 static void
383 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
384 tree base_type, tree component_type)
386 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
387 && TYPE_BINFO (component_type));
388 jfunc->type = IPA_JF_KNOWN_TYPE;
389 jfunc->value.known_type.offset = offset,
390 jfunc->value.known_type.base_type = base_type;
391 jfunc->value.known_type.component_type = component_type;
392 gcc_assert (component_type);
395 /* Set JFUNC to be a copy of another jmp (to be used by jump function
396 combination code). The two functions will share their rdesc. */
398 static void
399 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
400 struct ipa_jump_func *src)
403 gcc_checking_assert (src->type == IPA_JF_CONST);
404 dst->type = IPA_JF_CONST;
405 dst->value.constant = src->value.constant;
408 /* Set JFUNC to be a constant jmp function. */
410 static void
411 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
412 struct cgraph_edge *cs)
414 constant = unshare_expr (constant);
415 if (constant && EXPR_P (constant))
416 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
417 jfunc->type = IPA_JF_CONST;
418 jfunc->value.constant.value = unshare_expr_without_location (constant);
420 if (TREE_CODE (constant) == ADDR_EXPR
421 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
423 struct ipa_cst_ref_desc *rdesc;
424 if (!ipa_refdesc_pool)
425 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
426 sizeof (struct ipa_cst_ref_desc), 32);
428 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
429 rdesc->cs = cs;
430 rdesc->next_duplicate = NULL;
431 rdesc->refcount = 1;
432 jfunc->value.constant.rdesc = rdesc;
434 else
435 jfunc->value.constant.rdesc = NULL;
438 /* Set JFUNC to be a simple pass-through jump function. */
439 static void
440 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
441 bool agg_preserved, bool type_preserved)
443 jfunc->type = IPA_JF_PASS_THROUGH;
444 jfunc->value.pass_through.operand = NULL_TREE;
445 jfunc->value.pass_through.formal_id = formal_id;
446 jfunc->value.pass_through.operation = NOP_EXPR;
447 jfunc->value.pass_through.agg_preserved = agg_preserved;
448 jfunc->value.pass_through.type_preserved = type_preserved;
451 /* Set JFUNC to be an arithmetic pass through jump function. */
453 static void
454 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
455 tree operand, enum tree_code operation)
457 jfunc->type = IPA_JF_PASS_THROUGH;
458 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
459 jfunc->value.pass_through.formal_id = formal_id;
460 jfunc->value.pass_through.operation = operation;
461 jfunc->value.pass_through.agg_preserved = false;
462 jfunc->value.pass_through.type_preserved = false;
465 /* Set JFUNC to be an ancestor jump function. */
467 static void
468 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
469 tree type, int formal_id, bool agg_preserved,
470 bool type_preserved)
472 jfunc->type = IPA_JF_ANCESTOR;
473 jfunc->value.ancestor.formal_id = formal_id;
474 jfunc->value.ancestor.offset = offset;
475 jfunc->value.ancestor.type = type;
476 jfunc->value.ancestor.agg_preserved = agg_preserved;
477 jfunc->value.ancestor.type_preserved = type_preserved;
480 /* Extract the acual BINFO being described by JFUNC which must be a known type
481 jump function. */
483 tree
484 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
486 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
487 if (!base_binfo)
488 return NULL_TREE;
489 return get_binfo_at_offset (base_binfo,
490 jfunc->value.known_type.offset,
491 jfunc->value.known_type.component_type);
494 /* Structure to be passed in between detect_type_change and
495 check_stmt_for_type_change. */
497 struct type_change_info
499 /* Offset into the object where there is the virtual method pointer we are
500 looking for. */
501 HOST_WIDE_INT offset;
502 /* The declaration or SSA_NAME pointer of the base that we are checking for
503 type change. */
504 tree object;
505 /* If we actually can tell the type that the object has changed to, it is
506 stored in this field. Otherwise it remains NULL_TREE. */
507 tree known_current_type;
508 /* Set to true if dynamic type change has been detected. */
509 bool type_maybe_changed;
510 /* Set to true if multiple types have been encountered. known_current_type
511 must be disregarded in that case. */
512 bool multiple_types_encountered;
515 /* Return true if STMT can modify a virtual method table pointer.
517 This function makes special assumptions about both constructors and
518 destructors which are all the functions that are allowed to alter the VMT
519 pointers. It assumes that destructors begin with assignment into all VMT
520 pointers and that constructors essentially look in the following way:
522 1) The very first thing they do is that they call constructors of ancestor
523 sub-objects that have them.
525 2) Then VMT pointers of this and all its ancestors is set to new values
526 corresponding to the type corresponding to the constructor.
528 3) Only afterwards, other stuff such as constructor of member sub-objects
529 and the code written by the user is run. Only this may include calling
530 virtual functions, directly or indirectly.
532 There is no way to call a constructor of an ancestor sub-object in any
533 other way.
535 This means that we do not have to care whether constructors get the correct
536 type information because they will always change it (in fact, if we define
537 the type to be given by the VMT pointer, it is undefined).
539 The most important fact to derive from the above is that if, for some
540 statement in the section 3, we try to detect whether the dynamic type has
541 changed, we can safely ignore all calls as we examine the function body
542 backwards until we reach statements in section 2 because these calls cannot
543 be ancestor constructors or destructors (if the input is not bogus) and so
544 do not change the dynamic type (this holds true only for automatically
545 allocated objects but at the moment we devirtualize only these). We then
546 must detect that statements in section 2 change the dynamic type and can try
547 to derive the new type. That is enough and we can stop, we will never see
548 the calls into constructors of sub-objects in this code. Therefore we can
549 safely ignore all call statements that we traverse.
552 static bool
553 stmt_may_be_vtbl_ptr_store (gimple stmt)
555 if (is_gimple_call (stmt))
556 return false;
557 else if (is_gimple_assign (stmt))
559 tree lhs = gimple_assign_lhs (stmt);
561 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
563 if (flag_strict_aliasing
564 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
565 return false;
567 if (TREE_CODE (lhs) == COMPONENT_REF
568 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
569 return false;
570 /* In the future we might want to use get_base_ref_and_offset to find
571 if there is a field corresponding to the offset and if so, proceed
572 almost like if it was a component ref. */
575 return true;
578 /* If STMT can be proved to be an assignment to the virtual method table
579 pointer of ANALYZED_OBJ and the type associated with the new table
580 identified, return the type. Otherwise return NULL_TREE. */
582 static tree
583 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
585 HOST_WIDE_INT offset, size, max_size;
586 tree lhs, rhs, base;
588 if (!gimple_assign_single_p (stmt))
589 return NULL_TREE;
591 lhs = gimple_assign_lhs (stmt);
592 rhs = gimple_assign_rhs1 (stmt);
593 if (TREE_CODE (lhs) != COMPONENT_REF
594 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
595 || TREE_CODE (rhs) != ADDR_EXPR)
596 return NULL_TREE;
597 rhs = get_base_address (TREE_OPERAND (rhs, 0));
598 if (!rhs
599 || TREE_CODE (rhs) != VAR_DECL
600 || !DECL_VIRTUAL_P (rhs))
601 return NULL_TREE;
603 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
604 if (offset != tci->offset
605 || size != POINTER_SIZE
606 || max_size != POINTER_SIZE)
607 return NULL_TREE;
608 if (TREE_CODE (base) == MEM_REF)
610 if (TREE_CODE (tci->object) != MEM_REF
611 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
612 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
613 TREE_OPERAND (base, 1)))
614 return NULL_TREE;
616 else if (tci->object != base)
617 return NULL_TREE;
619 return DECL_CONTEXT (rhs);
622 /* Callback of walk_aliased_vdefs and a helper function for
623 detect_type_change to check whether a particular statement may modify
624 the virtual table pointer, and if possible also determine the new type of
625 the (sub-)object. It stores its result into DATA, which points to a
626 type_change_info structure. */
628 static bool
629 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
631 gimple stmt = SSA_NAME_DEF_STMT (vdef);
632 struct type_change_info *tci = (struct type_change_info *) data;
634 if (stmt_may_be_vtbl_ptr_store (stmt))
636 tree type;
637 type = extr_type_from_vtbl_ptr_store (stmt, tci);
638 if (tci->type_maybe_changed
639 && type != tci->known_current_type)
640 tci->multiple_types_encountered = true;
641 tci->known_current_type = type;
642 tci->type_maybe_changed = true;
643 return true;
645 else
646 return false;
651 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
652 callsite CALL) by looking for assignments to its virtual table pointer. If
653 it is, return true and fill in the jump function JFUNC with relevant type
654 information or set it to unknown. ARG is the object itself (not a pointer
655 to it, unless dereferenced). BASE is the base of the memory access as
656 returned by get_ref_base_and_extent, as is the offset. */
658 static bool
659 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
660 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
662 struct type_change_info tci;
663 ao_ref ao;
665 gcc_checking_assert (DECL_P (arg)
666 || TREE_CODE (arg) == MEM_REF
667 || handled_component_p (arg));
668 /* Const calls cannot call virtual methods through VMT and so type changes do
669 not matter. */
670 if (!flag_devirtualize || !gimple_vuse (call)
671 /* Be sure expected_type is polymorphic. */
672 || !comp_type
673 || TREE_CODE (comp_type) != RECORD_TYPE
674 || !TYPE_BINFO (comp_type)
675 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
676 return false;
678 ao_ref_init (&ao, arg);
679 ao.base = base;
680 ao.offset = offset;
681 ao.size = POINTER_SIZE;
682 ao.max_size = ao.size;
684 tci.offset = offset;
685 tci.object = get_base_address (arg);
686 tci.known_current_type = NULL_TREE;
687 tci.type_maybe_changed = false;
688 tci.multiple_types_encountered = false;
690 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
691 &tci, NULL);
692 if (!tci.type_maybe_changed)
693 return false;
695 if (!tci.known_current_type
696 || tci.multiple_types_encountered
697 || offset != 0)
698 jfunc->type = IPA_JF_UNKNOWN;
699 else
700 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
702 return true;
705 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
706 SSA name (its dereference will become the base and the offset is assumed to
707 be zero). */
709 static bool
710 detect_type_change_ssa (tree arg, tree comp_type,
711 gimple call, struct ipa_jump_func *jfunc)
713 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
714 if (!flag_devirtualize
715 || !POINTER_TYPE_P (TREE_TYPE (arg)))
716 return false;
718 arg = build2 (MEM_REF, ptr_type_node, arg,
719 build_int_cst (ptr_type_node, 0));
721 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
724 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
725 boolean variable pointed to by DATA. */
727 static bool
728 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
729 void *data)
731 bool *b = (bool *) data;
732 *b = true;
733 return true;
736 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
737 a value known not to be modified in this function before reaching the
738 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
739 information about the parameter. */
741 static bool
742 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
743 gimple stmt, tree parm_load)
745 bool modified = false;
746 bitmap *visited_stmts;
747 ao_ref refd;
749 if (parm_ainfo && parm_ainfo->parm_modified)
750 return false;
752 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
753 ao_ref_init (&refd, parm_load);
754 /* We can cache visited statements only when parm_ainfo is available and when
755 we are looking at a naked load of the whole parameter. */
756 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
757 visited_stmts = NULL;
758 else
759 visited_stmts = &parm_ainfo->parm_visited_statements;
760 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
761 visited_stmts);
762 if (parm_ainfo && modified)
763 parm_ainfo->parm_modified = true;
764 return !modified;
767 /* If STMT is an assignment that loads a value from an parameter declaration,
768 return the index of the parameter in ipa_node_params which has not been
769 modified. Otherwise return -1. */
771 static int
772 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
773 struct param_analysis_info *parms_ainfo,
774 gimple stmt)
776 int index;
777 tree op1;
779 if (!gimple_assign_single_p (stmt))
780 return -1;
782 op1 = gimple_assign_rhs1 (stmt);
783 if (TREE_CODE (op1) != PARM_DECL)
784 return -1;
786 index = ipa_get_param_decl_index_1 (descriptors, op1);
787 if (index < 0
788 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
789 : NULL, stmt, op1))
790 return -1;
792 return index;
795 /* Return true if memory reference REF loads data that are known to be
796 unmodified in this function before reaching statement STMT. PARM_AINFO, if
797 non-NULL, is a pointer to a structure containing temporary information about
798 PARM. */
800 static bool
801 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
802 gimple stmt, tree ref)
804 bool modified = false;
805 ao_ref refd;
807 gcc_checking_assert (gimple_vuse (stmt));
808 if (parm_ainfo && parm_ainfo->ref_modified)
809 return false;
811 ao_ref_init (&refd, ref);
812 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
813 NULL);
814 if (parm_ainfo && modified)
815 parm_ainfo->ref_modified = true;
816 return !modified;
819 /* Return true if the data pointed to by PARM is known to be unmodified in this
820 function before reaching call statement CALL into which it is passed.
821 PARM_AINFO is a pointer to a structure containing temporary information
822 about PARM. */
824 static bool
825 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
826 gimple call, tree parm)
828 bool modified = false;
829 ao_ref refd;
831 /* It's unnecessary to calculate anything about memory contnets for a const
832 function because it is not goin to use it. But do not cache the result
833 either. Also, no such calculations for non-pointers. */
834 if (!gimple_vuse (call)
835 || !POINTER_TYPE_P (TREE_TYPE (parm)))
836 return false;
838 if (parm_ainfo->pt_modified)
839 return false;
841 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
842 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
843 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
844 if (modified)
845 parm_ainfo->pt_modified = true;
846 return !modified;
849 /* Return true if we can prove that OP is a memory reference loading unmodified
850 data from an aggregate passed as a parameter and if the aggregate is passed
851 by reference, that the alias type of the load corresponds to the type of the
852 formal parameter (so that we can rely on this type for TBAA in callers).
853 INFO and PARMS_AINFO describe parameters of the current function (but the
854 latter can be NULL), STMT is the load statement. If function returns true,
855 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
856 within the aggregate and whether it is a load from a value passed by
857 reference respectively. */
859 static bool
860 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
861 struct param_analysis_info *parms_ainfo, gimple stmt,
862 tree op, int *index_p, HOST_WIDE_INT *offset_p,
863 HOST_WIDE_INT *size_p, bool *by_ref_p)
865 int index;
866 HOST_WIDE_INT size, max_size;
867 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
869 if (max_size == -1 || max_size != size || *offset_p < 0)
870 return false;
872 if (DECL_P (base))
874 int index = ipa_get_param_decl_index_1 (descriptors, base);
875 if (index >= 0
876 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
877 : NULL, stmt, op))
879 *index_p = index;
880 *by_ref_p = false;
881 if (size_p)
882 *size_p = size;
883 return true;
885 return false;
888 if (TREE_CODE (base) != MEM_REF
889 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
890 || !integer_zerop (TREE_OPERAND (base, 1)))
891 return false;
893 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
895 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
896 index = ipa_get_param_decl_index_1 (descriptors, parm);
898 else
900 /* This branch catches situations where a pointer parameter is not a
901 gimple register, for example:
903 void hip7(S*) (struct S * p)
905 void (*<T2e4>) (struct S *) D.1867;
906 struct S * p.1;
908 <bb 2>:
909 p.1_1 = p;
910 D.1867_2 = p.1_1->f;
911 D.1867_2 ();
912 gdp = &p;
915 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
916 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
919 if (index >= 0
920 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
921 stmt, op))
923 *index_p = index;
924 *by_ref_p = true;
925 if (size_p)
926 *size_p = size;
927 return true;
929 return false;
932 /* Just like the previous function, just without the param_analysis_info
933 pointer, for users outside of this file. */
935 bool
936 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
937 tree op, int *index_p, HOST_WIDE_INT *offset_p,
938 bool *by_ref_p)
940 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
941 offset_p, NULL, by_ref_p);
944 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
945 of an assignment statement STMT, try to determine whether we are actually
946 handling any of the following cases and construct an appropriate jump
947 function into JFUNC if so:
949 1) The passed value is loaded from a formal parameter which is not a gimple
950 register (most probably because it is addressable, the value has to be
951 scalar) and we can guarantee the value has not changed. This case can
952 therefore be described by a simple pass-through jump function. For example:
954 foo (int a)
956 int a.0;
958 a.0_2 = a;
959 bar (a.0_2);
961 2) The passed value can be described by a simple arithmetic pass-through
962 jump function. E.g.
964 foo (int a)
966 int D.2064;
968 D.2064_4 = a.1(D) + 4;
969 bar (D.2064_4);
971 This case can also occur in combination of the previous one, e.g.:
973 foo (int a, int z)
975 int a.0;
976 int D.2064;
978 a.0_3 = a;
979 D.2064_4 = a.0_3 + 4;
980 foo (D.2064_4);
982 3) The passed value is an address of an object within another one (which
983 also passed by reference). Such situations are described by an ancestor
984 jump function and describe situations such as:
986 B::foo() (struct B * const this)
988 struct A * D.1845;
990 D.1845_2 = &this_1(D)->D.1748;
991 A::bar (D.1845_2);
993 INFO is the structure describing individual parameters access different
994 stages of IPA optimizations. PARMS_AINFO contains the information that is
995 only needed for intraprocedural analysis. */
997 static void
998 compute_complex_assign_jump_func (struct ipa_node_params *info,
999 struct param_analysis_info *parms_ainfo,
1000 struct ipa_jump_func *jfunc,
1001 gimple call, gimple stmt, tree name,
1002 tree param_type)
1004 HOST_WIDE_INT offset, size, max_size;
1005 tree op1, tc_ssa, base, ssa;
1006 int index;
1008 op1 = gimple_assign_rhs1 (stmt);
1010 if (TREE_CODE (op1) == SSA_NAME)
1012 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1013 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1014 else
1015 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1016 SSA_NAME_DEF_STMT (op1));
1017 tc_ssa = op1;
1019 else
1021 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1022 tc_ssa = gimple_assign_lhs (stmt);
1025 if (index >= 0)
1027 tree op2 = gimple_assign_rhs2 (stmt);
1029 if (op2)
1031 if (!is_gimple_ip_invariant (op2)
1032 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1033 && !useless_type_conversion_p (TREE_TYPE (name),
1034 TREE_TYPE (op1))))
1035 return;
1037 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1038 gimple_assign_rhs_code (stmt));
1040 else if (gimple_assign_single_p (stmt))
1042 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1043 call, tc_ssa);
1044 bool type_p = false;
1046 if (param_type && POINTER_TYPE_P (param_type))
1047 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1048 call, jfunc);
1049 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1050 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1052 return;
1055 if (TREE_CODE (op1) != ADDR_EXPR)
1056 return;
1057 op1 = TREE_OPERAND (op1, 0);
1058 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1059 return;
1060 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1061 if (TREE_CODE (base) != MEM_REF
1062 /* If this is a varying address, punt. */
1063 || max_size == -1
1064 || max_size != size)
1065 return;
1066 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1067 ssa = TREE_OPERAND (base, 0);
1068 if (TREE_CODE (ssa) != SSA_NAME
1069 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1070 || offset < 0)
1071 return;
1073 /* Dynamic types are changed in constructors and destructors. */
1074 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1075 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1077 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1078 call, jfunc, offset);
1079 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1080 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1081 parm_ref_data_pass_through_p (&parms_ainfo[index],
1082 call, ssa), type_p);
1086 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1087 it looks like:
1089 iftmp.1_3 = &obj_2(D)->D.1762;
1091 The base of the MEM_REF must be a default definition SSA NAME of a
1092 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1093 whole MEM_REF expression is returned and the offset calculated from any
1094 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1095 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1097 static tree
1098 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1100 HOST_WIDE_INT size, max_size;
1101 tree expr, parm, obj;
1103 if (!gimple_assign_single_p (assign))
1104 return NULL_TREE;
1105 expr = gimple_assign_rhs1 (assign);
1107 if (TREE_CODE (expr) != ADDR_EXPR)
1108 return NULL_TREE;
1109 expr = TREE_OPERAND (expr, 0);
1110 obj = expr;
1111 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1113 if (TREE_CODE (expr) != MEM_REF
1114 /* If this is a varying address, punt. */
1115 || max_size == -1
1116 || max_size != size
1117 || *offset < 0)
1118 return NULL_TREE;
1119 parm = TREE_OPERAND (expr, 0);
1120 if (TREE_CODE (parm) != SSA_NAME
1121 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1122 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1123 return NULL_TREE;
1125 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1126 *obj_p = obj;
1127 return expr;
1131 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1132 statement PHI, try to find out whether NAME is in fact a
1133 multiple-inheritance typecast from a descendant into an ancestor of a formal
1134 parameter and thus can be described by an ancestor jump function and if so,
1135 write the appropriate function into JFUNC.
1137 Essentially we want to match the following pattern:
1139 if (obj_2(D) != 0B)
1140 goto <bb 3>;
1141 else
1142 goto <bb 4>;
1144 <bb 3>:
1145 iftmp.1_3 = &obj_2(D)->D.1762;
1147 <bb 4>:
1148 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1149 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1150 return D.1879_6; */
1152 static void
1153 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1154 struct param_analysis_info *parms_ainfo,
1155 struct ipa_jump_func *jfunc,
1156 gimple call, gimple phi, tree param_type)
1158 HOST_WIDE_INT offset;
1159 gimple assign, cond;
1160 basic_block phi_bb, assign_bb, cond_bb;
1161 tree tmp, parm, expr, obj;
1162 int index, i;
1164 if (gimple_phi_num_args (phi) != 2)
1165 return;
1167 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1168 tmp = PHI_ARG_DEF (phi, 0);
1169 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1170 tmp = PHI_ARG_DEF (phi, 1);
1171 else
1172 return;
1173 if (TREE_CODE (tmp) != SSA_NAME
1174 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1175 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1176 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1177 return;
1179 assign = SSA_NAME_DEF_STMT (tmp);
1180 assign_bb = gimple_bb (assign);
1181 if (!single_pred_p (assign_bb))
1182 return;
1183 expr = get_ancestor_addr_info (assign, &obj, &offset);
1184 if (!expr)
1185 return;
1186 parm = TREE_OPERAND (expr, 0);
1187 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1188 gcc_assert (index >= 0);
1190 cond_bb = single_pred (assign_bb);
1191 cond = last_stmt (cond_bb);
1192 if (!cond
1193 || gimple_code (cond) != GIMPLE_COND
1194 || gimple_cond_code (cond) != NE_EXPR
1195 || gimple_cond_lhs (cond) != parm
1196 || !integer_zerop (gimple_cond_rhs (cond)))
1197 return;
1199 phi_bb = gimple_bb (phi);
1200 for (i = 0; i < 2; i++)
1202 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1203 if (pred != assign_bb && pred != cond_bb)
1204 return;
1207 bool type_p = false;
1208 if (param_type && POINTER_TYPE_P (param_type))
1209 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1210 call, jfunc, offset);
1211 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1212 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1213 parm_ref_data_pass_through_p (&parms_ainfo[index],
1214 call, parm), type_p);
1217 /* Given OP which is passed as an actual argument to a called function,
1218 determine if it is possible to construct a KNOWN_TYPE jump function for it
1219 and if so, create one and store it to JFUNC.
1220 EXPECTED_TYPE represents a type the argument should be in */
1222 static void
1223 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1224 gimple call, tree expected_type)
1226 HOST_WIDE_INT offset, size, max_size;
1227 tree base;
1229 if (!flag_devirtualize
1230 || TREE_CODE (op) != ADDR_EXPR
1231 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1232 /* Be sure expected_type is polymorphic. */
1233 || !expected_type
1234 || TREE_CODE (expected_type) != RECORD_TYPE
1235 || !TYPE_BINFO (expected_type)
1236 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1237 return;
1239 op = TREE_OPERAND (op, 0);
1240 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1241 if (!DECL_P (base)
1242 || max_size == -1
1243 || max_size != size
1244 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1245 || is_global_var (base))
1246 return;
1248 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1249 return;
1251 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1252 expected_type);
1255 /* Inspect the given TYPE and return true iff it has the same structure (the
1256 same number of fields of the same types) as a C++ member pointer. If
1257 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1258 corresponding fields there. */
1260 static bool
1261 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1263 tree fld;
1265 if (TREE_CODE (type) != RECORD_TYPE)
1266 return false;
1268 fld = TYPE_FIELDS (type);
1269 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1270 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1271 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1272 return false;
1274 if (method_ptr)
1275 *method_ptr = fld;
1277 fld = DECL_CHAIN (fld);
1278 if (!fld || INTEGRAL_TYPE_P (fld)
1279 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1280 return false;
1281 if (delta)
1282 *delta = fld;
1284 if (DECL_CHAIN (fld))
1285 return false;
1287 return true;
1290 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1291 return the rhs of its defining statement. Otherwise return RHS as it
1292 is. */
1294 static inline tree
1295 get_ssa_def_if_simple_copy (tree rhs)
1297 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1299 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1301 if (gimple_assign_single_p (def_stmt))
1302 rhs = gimple_assign_rhs1 (def_stmt);
1303 else
1304 break;
1306 return rhs;
1309 /* Simple linked list, describing known contents of an aggregate beforere
1310 call. */
1312 struct ipa_known_agg_contents_list
1314 /* Offset and size of the described part of the aggregate. */
1315 HOST_WIDE_INT offset, size;
1316 /* Known constant value or NULL if the contents is known to be unknown. */
1317 tree constant;
1318 /* Pointer to the next structure in the list. */
1319 struct ipa_known_agg_contents_list *next;
1322 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1323 in ARG is filled in with constant values. ARG can either be an aggregate
1324 expression or a pointer to an aggregate. JFUNC is the jump function into
1325 which the constants are subsequently stored. */
1327 static void
1328 determine_known_aggregate_parts (gimple call, tree arg,
1329 struct ipa_jump_func *jfunc)
1331 struct ipa_known_agg_contents_list *list = NULL;
1332 int item_count = 0, const_count = 0;
1333 HOST_WIDE_INT arg_offset, arg_size;
1334 gimple_stmt_iterator gsi;
1335 tree arg_base;
1336 bool check_ref, by_ref;
1337 ao_ref r;
1339 /* The function operates in three stages. First, we prepare check_ref, r,
1340 arg_base and arg_offset based on what is actually passed as an actual
1341 argument. */
1343 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1345 by_ref = true;
1346 if (TREE_CODE (arg) == SSA_NAME)
1348 tree type_size;
1349 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)))))
1350 return;
1351 check_ref = true;
1352 arg_base = arg;
1353 arg_offset = 0;
1354 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1355 arg_size = tree_to_uhwi (type_size);
1356 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1358 else if (TREE_CODE (arg) == ADDR_EXPR)
1360 HOST_WIDE_INT arg_max_size;
1362 arg = TREE_OPERAND (arg, 0);
1363 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1364 &arg_max_size);
1365 if (arg_max_size == -1
1366 || arg_max_size != arg_size
1367 || arg_offset < 0)
1368 return;
1369 if (DECL_P (arg_base))
1371 tree size;
1372 check_ref = false;
1373 size = build_int_cst (integer_type_node, arg_size);
1374 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1376 else
1377 return;
1379 else
1380 return;
1382 else
1384 HOST_WIDE_INT arg_max_size;
1386 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1388 by_ref = false;
1389 check_ref = false;
1390 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1391 &arg_max_size);
1392 if (arg_max_size == -1
1393 || arg_max_size != arg_size
1394 || arg_offset < 0)
1395 return;
1397 ao_ref_init (&r, arg);
1400 /* Second stage walks back the BB, looks at individual statements and as long
1401 as it is confident of how the statements affect contents of the
1402 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1403 describing it. */
1404 gsi = gsi_for_stmt (call);
1405 gsi_prev (&gsi);
1406 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1408 struct ipa_known_agg_contents_list *n, **p;
1409 gimple stmt = gsi_stmt (gsi);
1410 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1411 tree lhs, rhs, lhs_base;
1412 bool partial_overlap;
1414 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1415 continue;
1416 if (!gimple_assign_single_p (stmt))
1417 break;
1419 lhs = gimple_assign_lhs (stmt);
1420 rhs = gimple_assign_rhs1 (stmt);
1421 if (!is_gimple_reg_type (rhs)
1422 || TREE_CODE (lhs) == BIT_FIELD_REF
1423 || contains_bitfld_component_ref_p (lhs))
1424 break;
1426 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1427 &lhs_max_size);
1428 if (lhs_max_size == -1
1429 || lhs_max_size != lhs_size
1430 || (lhs_offset < arg_offset
1431 && lhs_offset + lhs_size > arg_offset)
1432 || (lhs_offset < arg_offset + arg_size
1433 && lhs_offset + lhs_size > arg_offset + arg_size))
1434 break;
1436 if (check_ref)
1438 if (TREE_CODE (lhs_base) != MEM_REF
1439 || TREE_OPERAND (lhs_base, 0) != arg_base
1440 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1441 break;
1443 else if (lhs_base != arg_base)
1445 if (DECL_P (lhs_base))
1446 continue;
1447 else
1448 break;
1451 if (lhs_offset + lhs_size < arg_offset
1452 || lhs_offset >= (arg_offset + arg_size))
1453 continue;
1455 partial_overlap = false;
1456 p = &list;
1457 while (*p && (*p)->offset < lhs_offset)
1459 if ((*p)->offset + (*p)->size > lhs_offset)
1461 partial_overlap = true;
1462 break;
1464 p = &(*p)->next;
1466 if (partial_overlap)
1467 break;
1468 if (*p && (*p)->offset < lhs_offset + lhs_size)
1470 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1471 /* We already know this value is subsequently overwritten with
1472 something else. */
1473 continue;
1474 else
1475 /* Otherwise this is a partial overlap which we cannot
1476 represent. */
1477 break;
1480 rhs = get_ssa_def_if_simple_copy (rhs);
1481 n = XALLOCA (struct ipa_known_agg_contents_list);
1482 n->size = lhs_size;
1483 n->offset = lhs_offset;
1484 if (is_gimple_ip_invariant (rhs))
1486 n->constant = rhs;
1487 const_count++;
1489 else
1490 n->constant = NULL_TREE;
1491 n->next = *p;
1492 *p = n;
1494 item_count++;
1495 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1496 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1497 break;
1500 /* Third stage just goes over the list and creates an appropriate vector of
1501 ipa_agg_jf_item structures out of it, of sourse only if there are
1502 any known constants to begin with. */
1504 if (const_count)
1506 jfunc->agg.by_ref = by_ref;
1507 vec_alloc (jfunc->agg.items, const_count);
1508 while (list)
1510 if (list->constant)
1512 struct ipa_agg_jf_item item;
1513 item.offset = list->offset - arg_offset;
1514 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1515 item.value = unshare_expr_without_location (list->constant);
1516 jfunc->agg.items->quick_push (item);
1518 list = list->next;
1523 static tree
1524 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1526 int n;
1527 tree type = (e->callee
1528 ? TREE_TYPE (e->callee->decl)
1529 : gimple_call_fntype (e->call_stmt));
1530 tree t = TYPE_ARG_TYPES (type);
1532 for (n = 0; n < i; n++)
1534 if (!t)
1535 break;
1536 t = TREE_CHAIN (t);
1538 if (t)
1539 return TREE_VALUE (t);
1540 if (!e->callee)
1541 return NULL;
1542 t = DECL_ARGUMENTS (e->callee->decl);
1543 for (n = 0; n < i; n++)
1545 if (!t)
1546 return NULL;
1547 t = TREE_CHAIN (t);
1549 if (t)
1550 return TREE_TYPE (t);
1551 return NULL;
1554 /* Compute jump function for all arguments of callsite CS and insert the
1555 information in the jump_functions array in the ipa_edge_args corresponding
1556 to this callsite. */
1558 static void
1559 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1560 struct cgraph_edge *cs)
1562 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1563 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1564 gimple call = cs->call_stmt;
1565 int n, arg_num = gimple_call_num_args (call);
1567 if (arg_num == 0 || args->jump_functions)
1568 return;
1569 vec_safe_grow_cleared (args->jump_functions, arg_num);
1571 if (gimple_call_internal_p (call))
1572 return;
1573 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1574 return;
1576 for (n = 0; n < arg_num; n++)
1578 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1579 tree arg = gimple_call_arg (call, n);
1580 tree param_type = ipa_get_callee_param_type (cs, n);
1582 if (is_gimple_ip_invariant (arg))
1583 ipa_set_jf_constant (jfunc, arg, cs);
1584 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1585 && TREE_CODE (arg) == PARM_DECL)
1587 int index = ipa_get_param_decl_index (info, arg);
1589 gcc_assert (index >=0);
1590 /* Aggregate passed by value, check for pass-through, otherwise we
1591 will attempt to fill in aggregate contents later in this
1592 for cycle. */
1593 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1595 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1596 continue;
1599 else if (TREE_CODE (arg) == SSA_NAME)
1601 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1603 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1604 if (index >= 0)
1606 bool agg_p, type_p;
1607 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1608 call, arg);
1609 if (param_type && POINTER_TYPE_P (param_type))
1610 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1611 call, jfunc);
1612 else
1613 type_p = false;
1614 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1615 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1616 type_p);
1619 else
1621 gimple stmt = SSA_NAME_DEF_STMT (arg);
1622 if (is_gimple_assign (stmt))
1623 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1624 call, stmt, arg, param_type);
1625 else if (gimple_code (stmt) == GIMPLE_PHI)
1626 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1627 call, stmt, param_type);
1630 else
1631 compute_known_type_jump_func (arg, jfunc, call,
1632 param_type
1633 && POINTER_TYPE_P (param_type)
1634 ? TREE_TYPE (param_type)
1635 : NULL);
1637 if ((jfunc->type != IPA_JF_PASS_THROUGH
1638 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1639 && (jfunc->type != IPA_JF_ANCESTOR
1640 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1641 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1642 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1643 determine_known_aggregate_parts (call, arg, jfunc);
1647 /* Compute jump functions for all edges - both direct and indirect - outgoing
1648 from NODE. Also count the actual arguments in the process. */
1650 static void
1651 ipa_compute_jump_functions (struct cgraph_node *node,
1652 struct param_analysis_info *parms_ainfo)
1654 struct cgraph_edge *cs;
1656 for (cs = node->callees; cs; cs = cs->next_callee)
1658 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1659 NULL);
1660 /* We do not need to bother analyzing calls to unknown
1661 functions unless they may become known during lto/whopr. */
1662 if (!callee->definition && !flag_lto)
1663 continue;
1664 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1667 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1668 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1671 /* If STMT looks like a statement loading a value from a member pointer formal
1672 parameter, return that parameter and store the offset of the field to
1673 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1674 might be clobbered). If USE_DELTA, then we look for a use of the delta
1675 field rather than the pfn. */
1677 static tree
1678 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1679 HOST_WIDE_INT *offset_p)
1681 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1683 if (!gimple_assign_single_p (stmt))
1684 return NULL_TREE;
1686 rhs = gimple_assign_rhs1 (stmt);
1687 if (TREE_CODE (rhs) == COMPONENT_REF)
1689 ref_field = TREE_OPERAND (rhs, 1);
1690 rhs = TREE_OPERAND (rhs, 0);
1692 else
1693 ref_field = NULL_TREE;
1694 if (TREE_CODE (rhs) != MEM_REF)
1695 return NULL_TREE;
1696 rec = TREE_OPERAND (rhs, 0);
1697 if (TREE_CODE (rec) != ADDR_EXPR)
1698 return NULL_TREE;
1699 rec = TREE_OPERAND (rec, 0);
1700 if (TREE_CODE (rec) != PARM_DECL
1701 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1702 return NULL_TREE;
1703 ref_offset = TREE_OPERAND (rhs, 1);
1705 if (use_delta)
1706 fld = delta_field;
1707 else
1708 fld = ptr_field;
1709 if (offset_p)
1710 *offset_p = int_bit_position (fld);
1712 if (ref_field)
1714 if (integer_nonzerop (ref_offset))
1715 return NULL_TREE;
1716 return ref_field == fld ? rec : NULL_TREE;
1718 else
1719 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1720 : NULL_TREE;
1723 /* Returns true iff T is an SSA_NAME defined by a statement. */
1725 static bool
1726 ipa_is_ssa_with_stmt_def (tree t)
1728 if (TREE_CODE (t) == SSA_NAME
1729 && !SSA_NAME_IS_DEFAULT_DEF (t))
1730 return true;
1731 else
1732 return false;
1735 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1736 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1737 indirect call graph edge. */
1739 static struct cgraph_edge *
1740 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1742 struct cgraph_edge *cs;
1744 cs = cgraph_edge (node, stmt);
1745 cs->indirect_info->param_index = param_index;
1746 cs->indirect_info->agg_contents = 0;
1747 cs->indirect_info->member_ptr = 0;
1748 return cs;
1751 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1752 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1753 intermediate information about each formal parameter. Currently it checks
1754 whether the call calls a pointer that is a formal parameter and if so, the
1755 parameter is marked with the called flag and an indirect call graph edge
1756 describing the call is created. This is very simple for ordinary pointers
1757 represented in SSA but not-so-nice when it comes to member pointers. The
1758 ugly part of this function does nothing more than trying to match the
1759 pattern of such a call. An example of such a pattern is the gimple dump
1760 below, the call is on the last line:
1762 <bb 2>:
1763 f$__delta_5 = f.__delta;
1764 f$__pfn_24 = f.__pfn;
1767 <bb 2>:
1768 f$__delta_5 = MEM[(struct *)&f];
1769 f$__pfn_24 = MEM[(struct *)&f + 4B];
1771 and a few lines below:
1773 <bb 5>
1774 D.2496_3 = (int) f$__pfn_24;
1775 D.2497_4 = D.2496_3 & 1;
1776 if (D.2497_4 != 0)
1777 goto <bb 3>;
1778 else
1779 goto <bb 4>;
1781 <bb 6>:
1782 D.2500_7 = (unsigned int) f$__delta_5;
1783 D.2501_8 = &S + D.2500_7;
1784 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1785 D.2503_10 = *D.2502_9;
1786 D.2504_12 = f$__pfn_24 + -1;
1787 D.2505_13 = (unsigned int) D.2504_12;
1788 D.2506_14 = D.2503_10 + D.2505_13;
1789 D.2507_15 = *D.2506_14;
1790 iftmp.11_16 = (String:: *) D.2507_15;
1792 <bb 7>:
1793 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1794 D.2500_19 = (unsigned int) f$__delta_5;
1795 D.2508_20 = &S + D.2500_19;
1796 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1798 Such patterns are results of simple calls to a member pointer:
1800 int doprinting (int (MyString::* f)(int) const)
1802 MyString S ("somestring");
1804 return (S.*f)(4);
1807 Moreover, the function also looks for called pointers loaded from aggregates
1808 passed by value or reference. */
1810 static void
1811 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1812 struct ipa_node_params *info,
1813 struct param_analysis_info *parms_ainfo,
1814 gimple call, tree target)
1816 gimple def;
1817 tree n1, n2;
1818 gimple d1, d2;
1819 tree rec, rec2, cond;
1820 gimple branch;
1821 int index;
1822 basic_block bb, virt_bb, join;
1823 HOST_WIDE_INT offset;
1824 bool by_ref;
1826 if (SSA_NAME_IS_DEFAULT_DEF (target))
1828 tree var = SSA_NAME_VAR (target);
1829 index = ipa_get_param_decl_index (info, var);
1830 if (index >= 0)
1831 ipa_note_param_call (node, index, call);
1832 return;
1835 def = SSA_NAME_DEF_STMT (target);
1836 if (gimple_assign_single_p (def)
1837 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1838 gimple_assign_rhs1 (def), &index, &offset,
1839 NULL, &by_ref))
1841 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1842 if (cs->indirect_info->offset != offset)
1843 cs->indirect_info->outer_type = NULL;
1844 cs->indirect_info->offset = offset;
1845 cs->indirect_info->agg_contents = 1;
1846 cs->indirect_info->by_ref = by_ref;
1847 return;
1850 /* Now we need to try to match the complex pattern of calling a member
1851 pointer. */
1852 if (gimple_code (def) != GIMPLE_PHI
1853 || gimple_phi_num_args (def) != 2
1854 || !POINTER_TYPE_P (TREE_TYPE (target))
1855 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1856 return;
1858 /* First, we need to check whether one of these is a load from a member
1859 pointer that is a parameter to this function. */
1860 n1 = PHI_ARG_DEF (def, 0);
1861 n2 = PHI_ARG_DEF (def, 1);
1862 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1863 return;
1864 d1 = SSA_NAME_DEF_STMT (n1);
1865 d2 = SSA_NAME_DEF_STMT (n2);
1867 join = gimple_bb (def);
1868 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1870 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1871 return;
1873 bb = EDGE_PRED (join, 0)->src;
1874 virt_bb = gimple_bb (d2);
1876 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1878 bb = EDGE_PRED (join, 1)->src;
1879 virt_bb = gimple_bb (d1);
1881 else
1882 return;
1884 /* Second, we need to check that the basic blocks are laid out in the way
1885 corresponding to the pattern. */
1887 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1888 || single_pred (virt_bb) != bb
1889 || single_succ (virt_bb) != join)
1890 return;
1892 /* Third, let's see that the branching is done depending on the least
1893 significant bit of the pfn. */
1895 branch = last_stmt (bb);
1896 if (!branch || gimple_code (branch) != GIMPLE_COND)
1897 return;
1899 if ((gimple_cond_code (branch) != NE_EXPR
1900 && gimple_cond_code (branch) != EQ_EXPR)
1901 || !integer_zerop (gimple_cond_rhs (branch)))
1902 return;
1904 cond = gimple_cond_lhs (branch);
1905 if (!ipa_is_ssa_with_stmt_def (cond))
1906 return;
1908 def = SSA_NAME_DEF_STMT (cond);
1909 if (!is_gimple_assign (def)
1910 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1911 || !integer_onep (gimple_assign_rhs2 (def)))
1912 return;
1914 cond = gimple_assign_rhs1 (def);
1915 if (!ipa_is_ssa_with_stmt_def (cond))
1916 return;
1918 def = SSA_NAME_DEF_STMT (cond);
1920 if (is_gimple_assign (def)
1921 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1923 cond = gimple_assign_rhs1 (def);
1924 if (!ipa_is_ssa_with_stmt_def (cond))
1925 return;
1926 def = SSA_NAME_DEF_STMT (cond);
1929 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1930 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1931 == ptrmemfunc_vbit_in_delta),
1932 NULL);
1933 if (rec != rec2)
1934 return;
1936 index = ipa_get_param_decl_index (info, rec);
1937 if (index >= 0
1938 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1940 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1941 if (cs->indirect_info->offset != offset)
1942 cs->indirect_info->outer_type = NULL;
1943 cs->indirect_info->offset = offset;
1944 cs->indirect_info->agg_contents = 1;
1945 cs->indirect_info->member_ptr = 1;
1948 return;
1951 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1952 object referenced in the expression is a formal parameter of the caller
1953 (described by INFO), create a call note for the statement. */
1955 static void
1956 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1957 struct ipa_node_params *info, gimple call,
1958 tree target)
1960 struct cgraph_edge *cs;
1961 struct cgraph_indirect_call_info *ii;
1962 struct ipa_jump_func jfunc;
1963 tree obj = OBJ_TYPE_REF_OBJECT (target);
1964 int index;
1965 HOST_WIDE_INT anc_offset;
1967 if (!flag_devirtualize)
1968 return;
1970 if (TREE_CODE (obj) != SSA_NAME)
1971 return;
1973 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1975 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1976 return;
1978 anc_offset = 0;
1979 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1980 gcc_assert (index >= 0);
1981 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1982 call, &jfunc))
1983 return;
1985 else
1987 gimple stmt = SSA_NAME_DEF_STMT (obj);
1988 tree expr;
1990 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1991 if (!expr)
1992 return;
1993 index = ipa_get_param_decl_index (info,
1994 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1995 gcc_assert (index >= 0);
1996 if (detect_type_change (obj, expr, obj_type_ref_class (target),
1997 call, &jfunc, anc_offset))
1998 return;
2001 cs = ipa_note_param_call (node, index, call);
2002 ii = cs->indirect_info;
2003 ii->offset = anc_offset;
2004 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2005 ii->otr_type = obj_type_ref_class (target);
2006 ii->polymorphic = 1;
2009 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2010 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2011 containing intermediate information about each formal parameter. */
2013 static void
2014 ipa_analyze_call_uses (struct cgraph_node *node,
2015 struct ipa_node_params *info,
2016 struct param_analysis_info *parms_ainfo, gimple call)
2018 tree target = gimple_call_fn (call);
2020 if (!target)
2021 return;
2022 if (TREE_CODE (target) == SSA_NAME)
2023 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2024 else if (virtual_method_call_p (target))
2025 ipa_analyze_virtual_call_uses (node, info, call, target);
2029 /* Analyze the call statement STMT with respect to formal parameters (described
2030 in INFO) of caller given by NODE. Currently it only checks whether formal
2031 parameters are called. PARMS_AINFO is a pointer to a vector containing
2032 intermediate information about each formal parameter. */
2034 static void
2035 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2036 struct param_analysis_info *parms_ainfo, gimple stmt)
2038 if (is_gimple_call (stmt))
2039 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2042 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2043 If OP is a parameter declaration, mark it as used in the info structure
2044 passed in DATA. */
2046 static bool
2047 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2048 tree op, void *data)
2050 struct ipa_node_params *info = (struct ipa_node_params *) data;
2052 op = get_base_address (op);
2053 if (op
2054 && TREE_CODE (op) == PARM_DECL)
2056 int index = ipa_get_param_decl_index (info, op);
2057 gcc_assert (index >= 0);
2058 ipa_set_param_used (info, index, true);
2061 return false;
2064 /* Scan the function body of NODE and inspect the uses of formal parameters.
2065 Store the findings in various structures of the associated ipa_node_params
2066 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2067 vector containing intermediate information about each formal parameter. */
2069 static void
2070 ipa_analyze_params_uses (struct cgraph_node *node,
2071 struct param_analysis_info *parms_ainfo)
2073 tree decl = node->decl;
2074 basic_block bb;
2075 struct function *func;
2076 gimple_stmt_iterator gsi;
2077 struct ipa_node_params *info = IPA_NODE_REF (node);
2078 int i;
2080 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2081 return;
2083 info->uses_analysis_done = 1;
2084 if (ipa_func_spec_opts_forbid_analysis_p (node))
2086 for (i = 0; i < ipa_get_param_count (info); i++)
2088 ipa_set_param_used (info, i, true);
2089 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2091 return;
2094 for (i = 0; i < ipa_get_param_count (info); i++)
2096 tree parm = ipa_get_param (info, i);
2097 int controlled_uses = 0;
2099 /* For SSA regs see if parameter is used. For non-SSA we compute
2100 the flag during modification analysis. */
2101 if (is_gimple_reg (parm))
2103 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2104 parm);
2105 if (ddef && !has_zero_uses (ddef))
2107 imm_use_iterator imm_iter;
2108 use_operand_p use_p;
2110 ipa_set_param_used (info, i, true);
2111 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2112 if (!is_gimple_call (USE_STMT (use_p)))
2114 controlled_uses = IPA_UNDESCRIBED_USE;
2115 break;
2117 else
2118 controlled_uses++;
2120 else
2121 controlled_uses = 0;
2123 else
2124 controlled_uses = IPA_UNDESCRIBED_USE;
2125 ipa_set_controlled_uses (info, i, controlled_uses);
2128 func = DECL_STRUCT_FUNCTION (decl);
2129 FOR_EACH_BB_FN (bb, func)
2131 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2133 gimple stmt = gsi_stmt (gsi);
2135 if (is_gimple_debug (stmt))
2136 continue;
2138 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2139 walk_stmt_load_store_addr_ops (stmt, info,
2140 visit_ref_for_mod_analysis,
2141 visit_ref_for_mod_analysis,
2142 visit_ref_for_mod_analysis);
2144 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2145 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2146 visit_ref_for_mod_analysis,
2147 visit_ref_for_mod_analysis,
2148 visit_ref_for_mod_analysis);
2152 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2154 static void
2155 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2157 int i;
2159 for (i = 0; i < param_count; i++)
2161 if (parms_ainfo[i].parm_visited_statements)
2162 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2163 if (parms_ainfo[i].pt_visited_statements)
2164 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2168 /* Initialize the array describing properties of of formal parameters
2169 of NODE, analyze their uses and compute jump functions associated
2170 with actual arguments of calls from within NODE. */
2172 void
2173 ipa_analyze_node (struct cgraph_node *node)
2175 struct ipa_node_params *info;
2176 struct param_analysis_info *parms_ainfo;
2177 int param_count;
2179 ipa_check_create_node_params ();
2180 ipa_check_create_edge_args ();
2181 info = IPA_NODE_REF (node);
2182 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2183 ipa_initialize_node_params (node);
2185 param_count = ipa_get_param_count (info);
2186 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2187 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2189 ipa_analyze_params_uses (node, parms_ainfo);
2190 ipa_compute_jump_functions (node, parms_ainfo);
2192 free_parms_ainfo (parms_ainfo, param_count);
2193 pop_cfun ();
2196 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2197 attempt a type-based devirtualization. If successful, return the
2198 target function declaration, otherwise return NULL. */
2200 tree
2201 ipa_intraprocedural_devirtualization (gimple call)
2203 tree binfo, token, fndecl;
2204 struct ipa_jump_func jfunc;
2205 tree otr = gimple_call_fn (call);
2207 jfunc.type = IPA_JF_UNKNOWN;
2208 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2209 call, obj_type_ref_class (otr));
2210 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2211 return NULL_TREE;
2212 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2213 if (!binfo)
2214 return NULL_TREE;
2215 token = OBJ_TYPE_REF_TOKEN (otr);
2216 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2217 binfo);
2218 #ifdef ENABLE_CHECKING
2219 if (fndecl)
2220 gcc_assert (possible_polymorphic_call_target_p
2221 (otr, cgraph_get_node (fndecl)));
2222 #endif
2223 return fndecl;
2226 /* Update the jump function DST when the call graph edge corresponding to SRC is
2227 is being inlined, knowing that DST is of type ancestor and src of known
2228 type. */
2230 static void
2231 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2232 struct ipa_jump_func *dst)
2234 HOST_WIDE_INT combined_offset;
2235 tree combined_type;
2237 if (!ipa_get_jf_ancestor_type_preserved (dst))
2239 dst->type = IPA_JF_UNKNOWN;
2240 return;
2243 combined_offset = ipa_get_jf_known_type_offset (src)
2244 + ipa_get_jf_ancestor_offset (dst);
2245 combined_type = ipa_get_jf_ancestor_type (dst);
2247 ipa_set_jf_known_type (dst, combined_offset,
2248 ipa_get_jf_known_type_base_type (src),
2249 combined_type);
2252 /* Update the jump functions associated with call graph edge E when the call
2253 graph edge CS is being inlined, assuming that E->caller is already (possibly
2254 indirectly) inlined into CS->callee and that E has not been inlined. */
2256 static void
2257 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2258 struct cgraph_edge *e)
2260 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2261 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2262 int count = ipa_get_cs_argument_count (args);
2263 int i;
2265 for (i = 0; i < count; i++)
2267 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2269 if (dst->type == IPA_JF_ANCESTOR)
2271 struct ipa_jump_func *src;
2272 int dst_fid = dst->value.ancestor.formal_id;
2274 /* Variable number of arguments can cause havoc if we try to access
2275 one that does not exist in the inlined edge. So make sure we
2276 don't. */
2277 if (dst_fid >= ipa_get_cs_argument_count (top))
2279 dst->type = IPA_JF_UNKNOWN;
2280 continue;
2283 src = ipa_get_ith_jump_func (top, dst_fid);
2285 if (src->agg.items
2286 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2288 struct ipa_agg_jf_item *item;
2289 int j;
2291 /* Currently we do not produce clobber aggregate jump functions,
2292 replace with merging when we do. */
2293 gcc_assert (!dst->agg.items);
2295 dst->agg.items = vec_safe_copy (src->agg.items);
2296 dst->agg.by_ref = src->agg.by_ref;
2297 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2298 item->offset -= dst->value.ancestor.offset;
2301 if (src->type == IPA_JF_KNOWN_TYPE)
2302 combine_known_type_and_ancestor_jfs (src, dst);
2303 else if (src->type == IPA_JF_PASS_THROUGH
2304 && src->value.pass_through.operation == NOP_EXPR)
2306 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2307 dst->value.ancestor.agg_preserved &=
2308 src->value.pass_through.agg_preserved;
2309 dst->value.ancestor.type_preserved &=
2310 src->value.pass_through.type_preserved;
2312 else if (src->type == IPA_JF_ANCESTOR)
2314 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2315 dst->value.ancestor.offset += src->value.ancestor.offset;
2316 dst->value.ancestor.agg_preserved &=
2317 src->value.ancestor.agg_preserved;
2318 dst->value.ancestor.type_preserved &=
2319 src->value.ancestor.type_preserved;
2321 else
2322 dst->type = IPA_JF_UNKNOWN;
2324 else if (dst->type == IPA_JF_PASS_THROUGH)
2326 struct ipa_jump_func *src;
2327 /* We must check range due to calls with variable number of arguments
2328 and we cannot combine jump functions with operations. */
2329 if (dst->value.pass_through.operation == NOP_EXPR
2330 && (dst->value.pass_through.formal_id
2331 < ipa_get_cs_argument_count (top)))
2333 int dst_fid = dst->value.pass_through.formal_id;
2334 src = ipa_get_ith_jump_func (top, dst_fid);
2335 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2337 switch (src->type)
2339 case IPA_JF_UNKNOWN:
2340 dst->type = IPA_JF_UNKNOWN;
2341 break;
2342 case IPA_JF_KNOWN_TYPE:
2343 ipa_set_jf_known_type (dst,
2344 ipa_get_jf_known_type_offset (src),
2345 ipa_get_jf_known_type_base_type (src),
2346 ipa_get_jf_known_type_base_type (src));
2347 break;
2348 case IPA_JF_CONST:
2349 ipa_set_jf_cst_copy (dst, src);
2350 break;
2352 case IPA_JF_PASS_THROUGH:
2354 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2355 enum tree_code operation;
2356 operation = ipa_get_jf_pass_through_operation (src);
2358 if (operation == NOP_EXPR)
2360 bool agg_p, type_p;
2361 agg_p = dst_agg_p
2362 && ipa_get_jf_pass_through_agg_preserved (src);
2363 type_p = ipa_get_jf_pass_through_type_preserved (src)
2364 && ipa_get_jf_pass_through_type_preserved (dst);
2365 ipa_set_jf_simple_pass_through (dst, formal_id,
2366 agg_p, type_p);
2368 else
2370 tree operand = ipa_get_jf_pass_through_operand (src);
2371 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2372 operation);
2374 break;
2376 case IPA_JF_ANCESTOR:
2378 bool agg_p, type_p;
2379 agg_p = dst_agg_p
2380 && ipa_get_jf_ancestor_agg_preserved (src);
2381 type_p = ipa_get_jf_ancestor_type_preserved (src)
2382 && ipa_get_jf_pass_through_type_preserved (dst);
2383 ipa_set_ancestor_jf (dst,
2384 ipa_get_jf_ancestor_offset (src),
2385 ipa_get_jf_ancestor_type (src),
2386 ipa_get_jf_ancestor_formal_id (src),
2387 agg_p, type_p);
2388 break;
2390 default:
2391 gcc_unreachable ();
2394 if (src->agg.items
2395 && (dst_agg_p || !src->agg.by_ref))
2397 /* Currently we do not produce clobber aggregate jump
2398 functions, replace with merging when we do. */
2399 gcc_assert (!dst->agg.items);
2401 dst->agg.by_ref = src->agg.by_ref;
2402 dst->agg.items = vec_safe_copy (src->agg.items);
2405 else
2406 dst->type = IPA_JF_UNKNOWN;
2411 /* If TARGET is an addr_expr of a function declaration, make it the destination
2412 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2414 struct cgraph_edge *
2415 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2417 struct cgraph_node *callee;
2418 struct inline_edge_summary *es = inline_edge_summary (ie);
2419 bool unreachable = false;
2421 if (TREE_CODE (target) == ADDR_EXPR)
2422 target = TREE_OPERAND (target, 0);
2423 if (TREE_CODE (target) != FUNCTION_DECL)
2425 target = canonicalize_constructor_val (target, NULL);
2426 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2428 if (ie->indirect_info->member_ptr)
2429 /* Member pointer call that goes through a VMT lookup. */
2430 return NULL;
2432 if (dump_file)
2433 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2434 " in %s/%i, making it unreachable.\n",
2435 ie->caller->name (), ie->caller->order);
2436 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2437 callee = cgraph_get_create_node (target);
2438 unreachable = true;
2440 else
2441 callee = cgraph_get_node (target);
2443 else
2444 callee = cgraph_get_node (target);
2446 /* Because may-edges are not explicitely represented and vtable may be external,
2447 we may create the first reference to the object in the unit. */
2448 if (!callee || callee->global.inlined_to)
2451 /* We are better to ensure we can refer to it.
2452 In the case of static functions we are out of luck, since we already
2453 removed its body. In the case of public functions we may or may
2454 not introduce the reference. */
2455 if (!canonicalize_constructor_val (target, NULL)
2456 || !TREE_PUBLIC (target))
2458 if (dump_file)
2459 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2460 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2461 xstrdup (ie->caller->name ()),
2462 ie->caller->order,
2463 xstrdup (ie->callee->name ()),
2464 ie->callee->order);
2465 return NULL;
2467 callee = cgraph_get_create_node (target);
2469 ipa_check_create_node_params ();
2471 /* We can not make edges to inline clones. It is bug that someone removed
2472 the cgraph node too early. */
2473 gcc_assert (!callee->global.inlined_to);
2475 if (dump_file && !unreachable)
2477 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2478 "(%s/%i -> %s/%i), for stmt ",
2479 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2480 xstrdup (ie->caller->name ()),
2481 ie->caller->order,
2482 xstrdup (callee->name ()),
2483 callee->order);
2484 if (ie->call_stmt)
2485 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2486 else
2487 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2489 ie = cgraph_make_edge_direct (ie, callee);
2490 es = inline_edge_summary (ie);
2491 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2492 - eni_size_weights.call_cost);
2493 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2494 - eni_time_weights.call_cost);
2496 return ie;
2499 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2500 return NULL if there is not any. BY_REF specifies whether the value has to
2501 be passed by reference or by value. */
2503 tree
2504 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2505 HOST_WIDE_INT offset, bool by_ref)
2507 struct ipa_agg_jf_item *item;
2508 int i;
2510 if (by_ref != agg->by_ref)
2511 return NULL;
2513 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2514 if (item->offset == offset)
2516 /* Currently we do not have clobber values, return NULL for them once
2517 we do. */
2518 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2519 return item->value;
2521 return NULL;
2524 /* Remove a reference to SYMBOL from the list of references of a node given by
2525 reference description RDESC. Return true if the reference has been
2526 successfully found and removed. */
2528 static bool
2529 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2531 struct ipa_ref *to_del;
2532 struct cgraph_edge *origin;
2534 origin = rdesc->cs;
2535 if (!origin)
2536 return false;
2537 to_del = ipa_find_reference (origin->caller, symbol,
2538 origin->call_stmt, origin->lto_stmt_uid);
2539 if (!to_del)
2540 return false;
2542 ipa_remove_reference (to_del);
2543 if (dump_file)
2544 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2545 xstrdup (origin->caller->name ()),
2546 origin->caller->order, xstrdup (symbol->name ()));
2547 return true;
2550 /* If JFUNC has a reference description with refcount different from
2551 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2552 NULL. JFUNC must be a constant jump function. */
2554 static struct ipa_cst_ref_desc *
2555 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2557 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2558 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2559 return rdesc;
2560 else
2561 return NULL;
2564 /* If the value of constant jump function JFUNC is an address of a function
2565 declaration, return the associated call graph node. Otherwise return
2566 NULL. */
2568 static cgraph_node *
2569 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2571 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2572 tree cst = ipa_get_jf_constant (jfunc);
2573 if (TREE_CODE (cst) != ADDR_EXPR
2574 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2575 return NULL;
2577 return cgraph_get_node (TREE_OPERAND (cst, 0));
2581 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2582 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2583 the edge specified in the rdesc. Return false if either the symbol or the
2584 reference could not be found, otherwise return true. */
2586 static bool
2587 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2589 struct ipa_cst_ref_desc *rdesc;
2590 if (jfunc->type == IPA_JF_CONST
2591 && (rdesc = jfunc_rdesc_usable (jfunc))
2592 && --rdesc->refcount == 0)
2594 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2595 if (!symbol)
2596 return false;
2598 return remove_described_reference (symbol, rdesc);
2600 return true;
2603 /* Try to find a destination for indirect edge IE that corresponds to a simple
2604 call or a call of a member function pointer and where the destination is a
2605 pointer formal parameter described by jump function JFUNC. If it can be
2606 determined, return the newly direct edge, otherwise return NULL.
2607 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2609 static struct cgraph_edge *
2610 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2611 struct ipa_jump_func *jfunc,
2612 struct ipa_node_params *new_root_info)
2614 struct cgraph_edge *cs;
2615 tree target;
2616 bool agg_contents = ie->indirect_info->agg_contents;
2618 if (ie->indirect_info->agg_contents)
2619 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2620 ie->indirect_info->offset,
2621 ie->indirect_info->by_ref);
2622 else
2623 target = ipa_value_from_jfunc (new_root_info, jfunc);
2624 if (!target)
2625 return NULL;
2626 cs = ipa_make_edge_direct_to_target (ie, target);
2628 if (cs && !agg_contents)
2630 bool ok;
2631 gcc_checking_assert (cs->callee
2632 && (cs != ie
2633 || jfunc->type != IPA_JF_CONST
2634 || !cgraph_node_for_jfunc (jfunc)
2635 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2636 ok = try_decrement_rdesc_refcount (jfunc);
2637 gcc_checking_assert (ok);
2640 return cs;
2643 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2644 call based on a formal parameter which is described by jump function JFUNC
2645 and if it can be determined, make it direct and return the direct edge.
2646 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2647 are relative to. */
2649 static struct cgraph_edge *
2650 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2651 struct ipa_jump_func *jfunc,
2652 struct ipa_node_params *new_root_info)
2654 tree binfo, target;
2656 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2658 if (!binfo)
2659 return NULL;
2661 if (TREE_CODE (binfo) != TREE_BINFO)
2663 binfo = gimple_extract_devirt_binfo_from_cst
2664 (binfo, ie->indirect_info->otr_type);
2665 if (!binfo)
2666 return NULL;
2669 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2670 ie->indirect_info->otr_type);
2671 if (binfo)
2672 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2673 binfo);
2674 else
2675 return NULL;
2677 if (target)
2679 #ifdef ENABLE_CHECKING
2680 gcc_assert (possible_polymorphic_call_target_p
2681 (ie, cgraph_get_node (target)));
2682 #endif
2683 return ipa_make_edge_direct_to_target (ie, target);
2685 else
2686 return NULL;
2689 /* Update the param called notes associated with NODE when CS is being inlined,
2690 assuming NODE is (potentially indirectly) inlined into CS->callee.
2691 Moreover, if the callee is discovered to be constant, create a new cgraph
2692 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2693 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2695 static bool
2696 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2697 struct cgraph_node *node,
2698 vec<cgraph_edge_p> *new_edges)
2700 struct ipa_edge_args *top;
2701 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2702 struct ipa_node_params *new_root_info;
2703 bool res = false;
2705 ipa_check_create_edge_args ();
2706 top = IPA_EDGE_REF (cs);
2707 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2708 ? cs->caller->global.inlined_to
2709 : cs->caller);
2711 for (ie = node->indirect_calls; ie; ie = next_ie)
2713 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2714 struct ipa_jump_func *jfunc;
2715 int param_index;
2717 next_ie = ie->next_callee;
2719 if (ici->param_index == -1)
2720 continue;
2722 /* We must check range due to calls with variable number of arguments: */
2723 if (ici->param_index >= ipa_get_cs_argument_count (top))
2725 ici->param_index = -1;
2726 continue;
2729 param_index = ici->param_index;
2730 jfunc = ipa_get_ith_jump_func (top, param_index);
2732 if (!flag_indirect_inlining)
2733 new_direct_edge = NULL;
2734 else if (ici->polymorphic)
2735 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2736 new_root_info);
2737 else
2738 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2739 new_root_info);
2740 /* If speculation was removed, then we need to do nothing. */
2741 if (new_direct_edge && new_direct_edge != ie)
2743 new_direct_edge->indirect_inlining_edge = 1;
2744 top = IPA_EDGE_REF (cs);
2745 res = true;
2747 else if (new_direct_edge)
2749 new_direct_edge->indirect_inlining_edge = 1;
2750 if (new_direct_edge->call_stmt)
2751 new_direct_edge->call_stmt_cannot_inline_p
2752 = !gimple_check_call_matching_types (
2753 new_direct_edge->call_stmt,
2754 new_direct_edge->callee->decl, false);
2755 if (new_edges)
2757 new_edges->safe_push (new_direct_edge);
2758 res = true;
2760 top = IPA_EDGE_REF (cs);
2762 else if (jfunc->type == IPA_JF_PASS_THROUGH
2763 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2765 if (ici->agg_contents
2766 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2767 ici->param_index = -1;
2768 else
2769 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2771 else if (jfunc->type == IPA_JF_ANCESTOR)
2773 if (ici->agg_contents
2774 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2775 ici->param_index = -1;
2776 else
2778 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2779 if (ipa_get_jf_ancestor_offset (jfunc))
2780 ici->outer_type = NULL;
2781 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2784 else
2785 /* Either we can find a destination for this edge now or never. */
2786 ici->param_index = -1;
2789 return res;
2792 /* Recursively traverse subtree of NODE (including node) made of inlined
2793 cgraph_edges when CS has been inlined and invoke
2794 update_indirect_edges_after_inlining on all nodes and
2795 update_jump_functions_after_inlining on all non-inlined edges that lead out
2796 of this subtree. Newly discovered indirect edges will be added to
2797 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2798 created. */
2800 static bool
2801 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2802 struct cgraph_node *node,
2803 vec<cgraph_edge_p> *new_edges)
2805 struct cgraph_edge *e;
2806 bool res;
2808 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2810 for (e = node->callees; e; e = e->next_callee)
2811 if (!e->inline_failed)
2812 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2813 else
2814 update_jump_functions_after_inlining (cs, e);
2815 for (e = node->indirect_calls; e; e = e->next_callee)
2816 update_jump_functions_after_inlining (cs, e);
2818 return res;
2821 /* Combine two controlled uses counts as done during inlining. */
2823 static int
2824 combine_controlled_uses_counters (int c, int d)
2826 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2827 return IPA_UNDESCRIBED_USE;
2828 else
2829 return c + d - 1;
2832 /* Propagate number of controlled users from CS->caleee to the new root of the
2833 tree of inlined nodes. */
2835 static void
2836 propagate_controlled_uses (struct cgraph_edge *cs)
2838 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2839 struct cgraph_node *new_root = cs->caller->global.inlined_to
2840 ? cs->caller->global.inlined_to : cs->caller;
2841 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2842 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2843 int count, i;
2845 count = MIN (ipa_get_cs_argument_count (args),
2846 ipa_get_param_count (old_root_info));
2847 for (i = 0; i < count; i++)
2849 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2850 struct ipa_cst_ref_desc *rdesc;
2852 if (jf->type == IPA_JF_PASS_THROUGH)
2854 int src_idx, c, d;
2855 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2856 c = ipa_get_controlled_uses (new_root_info, src_idx);
2857 d = ipa_get_controlled_uses (old_root_info, i);
2859 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2860 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2861 c = combine_controlled_uses_counters (c, d);
2862 ipa_set_controlled_uses (new_root_info, src_idx, c);
2863 if (c == 0 && new_root_info->ipcp_orig_node)
2865 struct cgraph_node *n;
2866 struct ipa_ref *ref;
2867 tree t = new_root_info->known_vals[src_idx];
2869 if (t && TREE_CODE (t) == ADDR_EXPR
2870 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2871 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2872 && (ref = ipa_find_reference (new_root,
2873 n, NULL, 0)))
2875 if (dump_file)
2876 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2877 "reference from %s/%i to %s/%i.\n",
2878 xstrdup (new_root->name ()),
2879 new_root->order,
2880 xstrdup (n->name ()), n->order);
2881 ipa_remove_reference (ref);
2885 else if (jf->type == IPA_JF_CONST
2886 && (rdesc = jfunc_rdesc_usable (jf)))
2888 int d = ipa_get_controlled_uses (old_root_info, i);
2889 int c = rdesc->refcount;
2890 rdesc->refcount = combine_controlled_uses_counters (c, d);
2891 if (rdesc->refcount == 0)
2893 tree cst = ipa_get_jf_constant (jf);
2894 struct cgraph_node *n;
2895 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2896 && TREE_CODE (TREE_OPERAND (cst, 0))
2897 == FUNCTION_DECL);
2898 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2899 if (n)
2901 struct cgraph_node *clone;
2902 bool ok;
2903 ok = remove_described_reference (n, rdesc);
2904 gcc_checking_assert (ok);
2906 clone = cs->caller;
2907 while (clone->global.inlined_to
2908 && clone != rdesc->cs->caller
2909 && IPA_NODE_REF (clone)->ipcp_orig_node)
2911 struct ipa_ref *ref;
2912 ref = ipa_find_reference (clone,
2913 n, NULL, 0);
2914 if (ref)
2916 if (dump_file)
2917 fprintf (dump_file, "ipa-prop: Removing "
2918 "cloning-created reference "
2919 "from %s/%i to %s/%i.\n",
2920 xstrdup (clone->name ()),
2921 clone->order,
2922 xstrdup (n->name ()),
2923 n->order);
2924 ipa_remove_reference (ref);
2926 clone = clone->callers->caller;
2933 for (i = ipa_get_param_count (old_root_info);
2934 i < ipa_get_cs_argument_count (args);
2935 i++)
2937 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2939 if (jf->type == IPA_JF_CONST)
2941 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2942 if (rdesc)
2943 rdesc->refcount = IPA_UNDESCRIBED_USE;
2945 else if (jf->type == IPA_JF_PASS_THROUGH)
2946 ipa_set_controlled_uses (new_root_info,
2947 jf->value.pass_through.formal_id,
2948 IPA_UNDESCRIBED_USE);
2952 /* Update jump functions and call note functions on inlining the call site CS.
2953 CS is expected to lead to a node already cloned by
2954 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2955 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2956 created. */
2958 bool
2959 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2960 vec<cgraph_edge_p> *new_edges)
2962 bool changed;
2963 /* Do nothing if the preparation phase has not been carried out yet
2964 (i.e. during early inlining). */
2965 if (!ipa_node_params_vector.exists ())
2966 return false;
2967 gcc_assert (ipa_edge_args_vector);
2969 propagate_controlled_uses (cs);
2970 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2972 return changed;
2975 /* Frees all dynamically allocated structures that the argument info points
2976 to. */
2978 void
2979 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2981 vec_free (args->jump_functions);
2982 memset (args, 0, sizeof (*args));
2985 /* Free all ipa_edge structures. */
2987 void
2988 ipa_free_all_edge_args (void)
2990 int i;
2991 struct ipa_edge_args *args;
2993 if (!ipa_edge_args_vector)
2994 return;
2996 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2997 ipa_free_edge_args_substructures (args);
2999 vec_free (ipa_edge_args_vector);
3002 /* Frees all dynamically allocated structures that the param info points
3003 to. */
3005 void
3006 ipa_free_node_params_substructures (struct ipa_node_params *info)
3008 info->descriptors.release ();
3009 free (info->lattices);
3010 /* Lattice values and their sources are deallocated with their alocation
3011 pool. */
3012 info->known_vals.release ();
3013 memset (info, 0, sizeof (*info));
3016 /* Free all ipa_node_params structures. */
3018 void
3019 ipa_free_all_node_params (void)
3021 int i;
3022 struct ipa_node_params *info;
3024 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3025 ipa_free_node_params_substructures (info);
3027 ipa_node_params_vector.release ();
3030 /* Set the aggregate replacements of NODE to be AGGVALS. */
3032 void
3033 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3034 struct ipa_agg_replacement_value *aggvals)
3036 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3037 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3039 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3042 /* Hook that is called by cgraph.c when an edge is removed. */
3044 static void
3045 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3047 struct ipa_edge_args *args;
3049 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3050 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3051 return;
3053 args = IPA_EDGE_REF (cs);
3054 if (args->jump_functions)
3056 struct ipa_jump_func *jf;
3057 int i;
3058 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3060 struct ipa_cst_ref_desc *rdesc;
3061 try_decrement_rdesc_refcount (jf);
3062 if (jf->type == IPA_JF_CONST
3063 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3064 && rdesc->cs == cs)
3065 rdesc->cs = NULL;
3069 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3072 /* Hook that is called by cgraph.c when a node is removed. */
3074 static void
3075 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3077 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3078 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3079 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3080 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3081 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3084 /* Hook that is called by cgraph.c when an edge is duplicated. */
3086 static void
3087 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3088 __attribute__((unused)) void *data)
3090 struct ipa_edge_args *old_args, *new_args;
3091 unsigned int i;
3093 ipa_check_create_edge_args ();
3095 old_args = IPA_EDGE_REF (src);
3096 new_args = IPA_EDGE_REF (dst);
3098 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3100 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3102 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3103 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3105 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3107 if (src_jf->type == IPA_JF_CONST)
3109 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3111 if (!src_rdesc)
3112 dst_jf->value.constant.rdesc = NULL;
3113 else if (src->caller == dst->caller)
3115 struct ipa_ref *ref;
3116 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3117 gcc_checking_assert (n);
3118 ref = ipa_find_reference (src->caller, n,
3119 src->call_stmt, src->lto_stmt_uid);
3120 gcc_checking_assert (ref);
3121 ipa_clone_ref (ref, dst->caller, ref->stmt);
3123 gcc_checking_assert (ipa_refdesc_pool);
3124 struct ipa_cst_ref_desc *dst_rdesc
3125 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3126 dst_rdesc->cs = dst;
3127 dst_rdesc->refcount = src_rdesc->refcount;
3128 dst_rdesc->next_duplicate = NULL;
3129 dst_jf->value.constant.rdesc = dst_rdesc;
3131 else if (src_rdesc->cs == src)
3133 struct ipa_cst_ref_desc *dst_rdesc;
3134 gcc_checking_assert (ipa_refdesc_pool);
3135 dst_rdesc
3136 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3137 dst_rdesc->cs = dst;
3138 dst_rdesc->refcount = src_rdesc->refcount;
3139 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3140 src_rdesc->next_duplicate = dst_rdesc;
3141 dst_jf->value.constant.rdesc = dst_rdesc;
3143 else
3145 struct ipa_cst_ref_desc *dst_rdesc;
3146 /* This can happen during inlining, when a JFUNC can refer to a
3147 reference taken in a function up in the tree of inline clones.
3148 We need to find the duplicate that refers to our tree of
3149 inline clones. */
3151 gcc_assert (dst->caller->global.inlined_to);
3152 for (dst_rdesc = src_rdesc->next_duplicate;
3153 dst_rdesc;
3154 dst_rdesc = dst_rdesc->next_duplicate)
3156 struct cgraph_node *top;
3157 top = dst_rdesc->cs->caller->global.inlined_to
3158 ? dst_rdesc->cs->caller->global.inlined_to
3159 : dst_rdesc->cs->caller;
3160 if (dst->caller->global.inlined_to == top)
3161 break;
3163 gcc_assert (dst_rdesc);
3164 dst_jf->value.constant.rdesc = dst_rdesc;
3170 /* Hook that is called by cgraph.c when a node is duplicated. */
3172 static void
3173 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3174 ATTRIBUTE_UNUSED void *data)
3176 struct ipa_node_params *old_info, *new_info;
3177 struct ipa_agg_replacement_value *old_av, *new_av;
3179 ipa_check_create_node_params ();
3180 old_info = IPA_NODE_REF (src);
3181 new_info = IPA_NODE_REF (dst);
3183 new_info->descriptors = old_info->descriptors.copy ();
3184 new_info->lattices = NULL;
3185 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3187 new_info->uses_analysis_done = old_info->uses_analysis_done;
3188 new_info->node_enqueued = old_info->node_enqueued;
3190 old_av = ipa_get_agg_replacements_for_node (src);
3191 if (!old_av)
3192 return;
3194 new_av = NULL;
3195 while (old_av)
3197 struct ipa_agg_replacement_value *v;
3199 v = ggc_alloc_ipa_agg_replacement_value ();
3200 memcpy (v, old_av, sizeof (*v));
3201 v->next = new_av;
3202 new_av = v;
3203 old_av = old_av->next;
3205 ipa_set_node_agg_value_chain (dst, new_av);
3209 /* Analyze newly added function into callgraph. */
3211 static void
3212 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3214 ipa_analyze_node (node);
3217 /* Register our cgraph hooks if they are not already there. */
3219 void
3220 ipa_register_cgraph_hooks (void)
3222 if (!edge_removal_hook_holder)
3223 edge_removal_hook_holder =
3224 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3225 if (!node_removal_hook_holder)
3226 node_removal_hook_holder =
3227 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3228 if (!edge_duplication_hook_holder)
3229 edge_duplication_hook_holder =
3230 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3231 if (!node_duplication_hook_holder)
3232 node_duplication_hook_holder =
3233 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3234 function_insertion_hook_holder =
3235 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3238 /* Unregister our cgraph hooks if they are not already there. */
3240 static void
3241 ipa_unregister_cgraph_hooks (void)
3243 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3244 edge_removal_hook_holder = NULL;
3245 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3246 node_removal_hook_holder = NULL;
3247 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3248 edge_duplication_hook_holder = NULL;
3249 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3250 node_duplication_hook_holder = NULL;
3251 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3252 function_insertion_hook_holder = NULL;
3255 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3256 longer needed after ipa-cp. */
3258 void
3259 ipa_free_all_structures_after_ipa_cp (void)
3261 if (!optimize)
3263 ipa_free_all_edge_args ();
3264 ipa_free_all_node_params ();
3265 free_alloc_pool (ipcp_sources_pool);
3266 free_alloc_pool (ipcp_values_pool);
3267 free_alloc_pool (ipcp_agg_lattice_pool);
3268 ipa_unregister_cgraph_hooks ();
3269 if (ipa_refdesc_pool)
3270 free_alloc_pool (ipa_refdesc_pool);
3274 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3275 longer needed after indirect inlining. */
3277 void
3278 ipa_free_all_structures_after_iinln (void)
3280 ipa_free_all_edge_args ();
3281 ipa_free_all_node_params ();
3282 ipa_unregister_cgraph_hooks ();
3283 if (ipcp_sources_pool)
3284 free_alloc_pool (ipcp_sources_pool);
3285 if (ipcp_values_pool)
3286 free_alloc_pool (ipcp_values_pool);
3287 if (ipcp_agg_lattice_pool)
3288 free_alloc_pool (ipcp_agg_lattice_pool);
3289 if (ipa_refdesc_pool)
3290 free_alloc_pool (ipa_refdesc_pool);
3293 /* Print ipa_tree_map data structures of all functions in the
3294 callgraph to F. */
3296 void
3297 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3299 int i, count;
3300 struct ipa_node_params *info;
3302 if (!node->definition)
3303 return;
3304 info = IPA_NODE_REF (node);
3305 fprintf (f, " function %s/%i parameter descriptors:\n",
3306 node->name (), node->order);
3307 count = ipa_get_param_count (info);
3308 for (i = 0; i < count; i++)
3310 int c;
3312 ipa_dump_param (f, info, i);
3313 if (ipa_is_param_used (info, i))
3314 fprintf (f, " used");
3315 c = ipa_get_controlled_uses (info, i);
3316 if (c == IPA_UNDESCRIBED_USE)
3317 fprintf (f, " undescribed_use");
3318 else
3319 fprintf (f, " controlled_uses=%i", c);
3320 fprintf (f, "\n");
3324 /* Print ipa_tree_map data structures of all functions in the
3325 callgraph to F. */
3327 void
3328 ipa_print_all_params (FILE * f)
3330 struct cgraph_node *node;
3332 fprintf (f, "\nFunction parameters:\n");
3333 FOR_EACH_FUNCTION (node)
3334 ipa_print_node_params (f, node);
3337 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3339 vec<tree>
3340 ipa_get_vector_of_formal_parms (tree fndecl)
3342 vec<tree> args;
3343 int count;
3344 tree parm;
3346 gcc_assert (!flag_wpa);
3347 count = count_formal_params (fndecl);
3348 args.create (count);
3349 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3350 args.quick_push (parm);
3352 return args;
3355 /* Return a heap allocated vector containing types of formal parameters of
3356 function type FNTYPE. */
3358 static inline vec<tree>
3359 get_vector_of_formal_parm_types (tree fntype)
3361 vec<tree> types;
3362 int count = 0;
3363 tree t;
3365 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3366 count++;
3368 types.create (count);
3369 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3370 types.quick_push (TREE_VALUE (t));
3372 return types;
3375 /* Modify the function declaration FNDECL and its type according to the plan in
3376 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3377 to reflect the actual parameters being modified which are determined by the
3378 base_index field. */
3380 void
3381 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3382 const char *synth_parm_prefix)
3384 vec<tree> oparms, otypes;
3385 tree orig_type, new_type = NULL;
3386 tree old_arg_types, t, new_arg_types = NULL;
3387 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3388 int i, len = adjustments.length ();
3389 tree new_reversed = NULL;
3390 bool care_for_types, last_parm_void;
3392 if (!synth_parm_prefix)
3393 synth_parm_prefix = "SYNTH";
3395 oparms = ipa_get_vector_of_formal_parms (fndecl);
3396 orig_type = TREE_TYPE (fndecl);
3397 old_arg_types = TYPE_ARG_TYPES (orig_type);
3399 /* The following test is an ugly hack, some functions simply don't have any
3400 arguments in their type. This is probably a bug but well... */
3401 care_for_types = (old_arg_types != NULL_TREE);
3402 if (care_for_types)
3404 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3405 == void_type_node);
3406 otypes = get_vector_of_formal_parm_types (orig_type);
3407 if (last_parm_void)
3408 gcc_assert (oparms.length () + 1 == otypes.length ());
3409 else
3410 gcc_assert (oparms.length () == otypes.length ());
3412 else
3414 last_parm_void = false;
3415 otypes.create (0);
3418 for (i = 0; i < len; i++)
3420 struct ipa_parm_adjustment *adj;
3421 gcc_assert (link);
3423 adj = &adjustments[i];
3424 parm = oparms[adj->base_index];
3425 adj->base = parm;
3427 if (adj->copy_param)
3429 if (care_for_types)
3430 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3431 new_arg_types);
3432 *link = parm;
3433 link = &DECL_CHAIN (parm);
3435 else if (!adj->remove_param)
3437 tree new_parm;
3438 tree ptype;
3440 if (adj->by_ref)
3441 ptype = build_pointer_type (adj->type);
3442 else
3443 ptype = adj->type;
3445 if (care_for_types)
3446 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3448 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3449 ptype);
3450 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3452 DECL_ARTIFICIAL (new_parm) = 1;
3453 DECL_ARG_TYPE (new_parm) = ptype;
3454 DECL_CONTEXT (new_parm) = fndecl;
3455 TREE_USED (new_parm) = 1;
3456 DECL_IGNORED_P (new_parm) = 1;
3457 layout_decl (new_parm, 0);
3459 adj->base = parm;
3460 adj->reduction = new_parm;
3462 *link = new_parm;
3464 link = &DECL_CHAIN (new_parm);
3468 *link = NULL_TREE;
3470 if (care_for_types)
3472 new_reversed = nreverse (new_arg_types);
3473 if (last_parm_void)
3475 if (new_reversed)
3476 TREE_CHAIN (new_arg_types) = void_list_node;
3477 else
3478 new_reversed = void_list_node;
3482 /* Use copy_node to preserve as much as possible from original type
3483 (debug info, attribute lists etc.)
3484 Exception is METHOD_TYPEs must have THIS argument.
3485 When we are asked to remove it, we need to build new FUNCTION_TYPE
3486 instead. */
3487 if (TREE_CODE (orig_type) != METHOD_TYPE
3488 || (adjustments[0].copy_param
3489 && adjustments[0].base_index == 0))
3491 new_type = build_distinct_type_copy (orig_type);
3492 TYPE_ARG_TYPES (new_type) = new_reversed;
3494 else
3496 new_type
3497 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3498 new_reversed));
3499 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3500 DECL_VINDEX (fndecl) = NULL_TREE;
3503 /* When signature changes, we need to clear builtin info. */
3504 if (DECL_BUILT_IN (fndecl))
3506 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3507 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3510 /* This is a new type, not a copy of an old type. Need to reassociate
3511 variants. We can handle everything except the main variant lazily. */
3512 t = TYPE_MAIN_VARIANT (orig_type);
3513 if (orig_type != t)
3515 TYPE_MAIN_VARIANT (new_type) = t;
3516 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3517 TYPE_NEXT_VARIANT (t) = new_type;
3519 else
3521 TYPE_MAIN_VARIANT (new_type) = new_type;
3522 TYPE_NEXT_VARIANT (new_type) = NULL;
3525 TREE_TYPE (fndecl) = new_type;
3526 DECL_VIRTUAL_P (fndecl) = 0;
3527 otypes.release ();
3528 oparms.release ();
3531 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3532 If this is a directly recursive call, CS must be NULL. Otherwise it must
3533 contain the corresponding call graph edge. */
3535 void
3536 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3537 ipa_parm_adjustment_vec adjustments)
3539 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3540 vec<tree> vargs;
3541 vec<tree, va_gc> **debug_args = NULL;
3542 gimple new_stmt;
3543 gimple_stmt_iterator gsi, prev_gsi;
3544 tree callee_decl;
3545 int i, len;
3547 len = adjustments.length ();
3548 vargs.create (len);
3549 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3550 ipa_remove_stmt_references (current_node, stmt);
3552 gsi = gsi_for_stmt (stmt);
3553 prev_gsi = gsi;
3554 gsi_prev (&prev_gsi);
3555 for (i = 0; i < len; i++)
3557 struct ipa_parm_adjustment *adj;
3559 adj = &adjustments[i];
3561 if (adj->copy_param)
3563 tree arg = gimple_call_arg (stmt, adj->base_index);
3565 vargs.quick_push (arg);
3567 else if (!adj->remove_param)
3569 tree expr, base, off;
3570 location_t loc;
3571 unsigned int deref_align = 0;
3572 bool deref_base = false;
3574 /* We create a new parameter out of the value of the old one, we can
3575 do the following kind of transformations:
3577 - A scalar passed by reference is converted to a scalar passed by
3578 value. (adj->by_ref is false and the type of the original
3579 actual argument is a pointer to a scalar).
3581 - A part of an aggregate is passed instead of the whole aggregate.
3582 The part can be passed either by value or by reference, this is
3583 determined by value of adj->by_ref. Moreover, the code below
3584 handles both situations when the original aggregate is passed by
3585 value (its type is not a pointer) and when it is passed by
3586 reference (it is a pointer to an aggregate).
3588 When the new argument is passed by reference (adj->by_ref is true)
3589 it must be a part of an aggregate and therefore we form it by
3590 simply taking the address of a reference inside the original
3591 aggregate. */
3593 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3594 base = gimple_call_arg (stmt, adj->base_index);
3595 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3596 : EXPR_LOCATION (base);
3598 if (TREE_CODE (base) != ADDR_EXPR
3599 && POINTER_TYPE_P (TREE_TYPE (base)))
3600 off = build_int_cst (adj->alias_ptr_type,
3601 adj->offset / BITS_PER_UNIT);
3602 else
3604 HOST_WIDE_INT base_offset;
3605 tree prev_base;
3606 bool addrof;
3608 if (TREE_CODE (base) == ADDR_EXPR)
3610 base = TREE_OPERAND (base, 0);
3611 addrof = true;
3613 else
3614 addrof = false;
3615 prev_base = base;
3616 base = get_addr_base_and_unit_offset (base, &base_offset);
3617 /* Aggregate arguments can have non-invariant addresses. */
3618 if (!base)
3620 base = build_fold_addr_expr (prev_base);
3621 off = build_int_cst (adj->alias_ptr_type,
3622 adj->offset / BITS_PER_UNIT);
3624 else if (TREE_CODE (base) == MEM_REF)
3626 if (!addrof)
3628 deref_base = true;
3629 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3631 off = build_int_cst (adj->alias_ptr_type,
3632 base_offset
3633 + adj->offset / BITS_PER_UNIT);
3634 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3635 off);
3636 base = TREE_OPERAND (base, 0);
3638 else
3640 off = build_int_cst (adj->alias_ptr_type,
3641 base_offset
3642 + adj->offset / BITS_PER_UNIT);
3643 base = build_fold_addr_expr (base);
3647 if (!adj->by_ref)
3649 tree type = adj->type;
3650 unsigned int align;
3651 unsigned HOST_WIDE_INT misalign;
3653 if (deref_base)
3655 align = deref_align;
3656 misalign = 0;
3658 else
3660 get_pointer_alignment_1 (base, &align, &misalign);
3661 if (TYPE_ALIGN (type) > align)
3662 align = TYPE_ALIGN (type);
3664 misalign += (tree_to_double_int (off)
3665 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3666 * BITS_PER_UNIT);
3667 misalign = misalign & (align - 1);
3668 if (misalign != 0)
3669 align = (misalign & -misalign);
3670 if (align < TYPE_ALIGN (type))
3671 type = build_aligned_type (type, align);
3672 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3674 else
3676 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3677 expr = build_fold_addr_expr (expr);
3680 expr = force_gimple_operand_gsi (&gsi, expr,
3681 adj->by_ref
3682 || is_gimple_reg_type (adj->type),
3683 NULL, true, GSI_SAME_STMT);
3684 vargs.quick_push (expr);
3686 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3688 unsigned int ix;
3689 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3690 gimple def_temp;
3692 arg = gimple_call_arg (stmt, adj->base_index);
3693 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3695 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3696 continue;
3697 arg = fold_convert_loc (gimple_location (stmt),
3698 TREE_TYPE (origin), arg);
3700 if (debug_args == NULL)
3701 debug_args = decl_debug_args_insert (callee_decl);
3702 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3703 if (ddecl == origin)
3705 ddecl = (**debug_args)[ix + 1];
3706 break;
3708 if (ddecl == NULL)
3710 ddecl = make_node (DEBUG_EXPR_DECL);
3711 DECL_ARTIFICIAL (ddecl) = 1;
3712 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3713 DECL_MODE (ddecl) = DECL_MODE (origin);
3715 vec_safe_push (*debug_args, origin);
3716 vec_safe_push (*debug_args, ddecl);
3718 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3719 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3723 if (dump_file && (dump_flags & TDF_DETAILS))
3725 fprintf (dump_file, "replacing stmt:");
3726 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3729 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3730 vargs.release ();
3731 if (gimple_call_lhs (stmt))
3732 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3734 gimple_set_block (new_stmt, gimple_block (stmt));
3735 if (gimple_has_location (stmt))
3736 gimple_set_location (new_stmt, gimple_location (stmt));
3737 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3738 gimple_call_copy_flags (new_stmt, stmt);
3740 if (dump_file && (dump_flags & TDF_DETAILS))
3742 fprintf (dump_file, "with stmt:");
3743 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3744 fprintf (dump_file, "\n");
3746 gsi_replace (&gsi, new_stmt, true);
3747 if (cs)
3748 cgraph_set_call_stmt (cs, new_stmt);
3751 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3752 gsi_prev (&gsi);
3754 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3755 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3757 update_ssa (TODO_update_ssa);
3758 free_dominance_info (CDI_DOMINATORS);
3761 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3763 static bool
3764 index_in_adjustments_multiple_times_p (int base_index,
3765 ipa_parm_adjustment_vec adjustments)
3767 int i, len = adjustments.length ();
3768 bool one = false;
3770 for (i = 0; i < len; i++)
3772 struct ipa_parm_adjustment *adj;
3773 adj = &adjustments[i];
3775 if (adj->base_index == base_index)
3777 if (one)
3778 return true;
3779 else
3780 one = true;
3783 return false;
3787 /* Return adjustments that should have the same effect on function parameters
3788 and call arguments as if they were first changed according to adjustments in
3789 INNER and then by adjustments in OUTER. */
3791 ipa_parm_adjustment_vec
3792 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3793 ipa_parm_adjustment_vec outer)
3795 int i, outlen = outer.length ();
3796 int inlen = inner.length ();
3797 int removals = 0;
3798 ipa_parm_adjustment_vec adjustments, tmp;
3800 tmp.create (inlen);
3801 for (i = 0; i < inlen; i++)
3803 struct ipa_parm_adjustment *n;
3804 n = &inner[i];
3806 if (n->remove_param)
3807 removals++;
3808 else
3809 tmp.quick_push (*n);
3812 adjustments.create (outlen + removals);
3813 for (i = 0; i < outlen; i++)
3815 struct ipa_parm_adjustment r;
3816 struct ipa_parm_adjustment *out = &outer[i];
3817 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3819 memset (&r, 0, sizeof (r));
3820 gcc_assert (!in->remove_param);
3821 if (out->remove_param)
3823 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3825 r.remove_param = true;
3826 adjustments.quick_push (r);
3828 continue;
3831 r.base_index = in->base_index;
3832 r.type = out->type;
3834 /* FIXME: Create nonlocal value too. */
3836 if (in->copy_param && out->copy_param)
3837 r.copy_param = true;
3838 else if (in->copy_param)
3839 r.offset = out->offset;
3840 else if (out->copy_param)
3841 r.offset = in->offset;
3842 else
3843 r.offset = in->offset + out->offset;
3844 adjustments.quick_push (r);
3847 for (i = 0; i < inlen; i++)
3849 struct ipa_parm_adjustment *n = &inner[i];
3851 if (n->remove_param)
3852 adjustments.quick_push (*n);
3855 tmp.release ();
3856 return adjustments;
3859 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3860 friendly way, assuming they are meant to be applied to FNDECL. */
3862 void
3863 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3864 tree fndecl)
3866 int i, len = adjustments.length ();
3867 bool first = true;
3868 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3870 fprintf (file, "IPA param adjustments: ");
3871 for (i = 0; i < len; i++)
3873 struct ipa_parm_adjustment *adj;
3874 adj = &adjustments[i];
3876 if (!first)
3877 fprintf (file, " ");
3878 else
3879 first = false;
3881 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3882 print_generic_expr (file, parms[adj->base_index], 0);
3883 if (adj->base)
3885 fprintf (file, ", base: ");
3886 print_generic_expr (file, adj->base, 0);
3888 if (adj->reduction)
3890 fprintf (file, ", reduction: ");
3891 print_generic_expr (file, adj->reduction, 0);
3893 if (adj->new_ssa_base)
3895 fprintf (file, ", new_ssa_base: ");
3896 print_generic_expr (file, adj->new_ssa_base, 0);
3899 if (adj->copy_param)
3900 fprintf (file, ", copy_param");
3901 else if (adj->remove_param)
3902 fprintf (file, ", remove_param");
3903 else
3904 fprintf (file, ", offset %li", (long) adj->offset);
3905 if (adj->by_ref)
3906 fprintf (file, ", by_ref");
3907 print_node_brief (file, ", type: ", adj->type, 0);
3908 fprintf (file, "\n");
3910 parms.release ();
3913 /* Dump the AV linked list. */
3915 void
3916 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3918 bool comma = false;
3919 fprintf (f, " Aggregate replacements:");
3920 for (; av; av = av->next)
3922 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3923 av->index, av->offset);
3924 print_generic_expr (f, av->value, 0);
3925 comma = true;
3927 fprintf (f, "\n");
3930 /* Stream out jump function JUMP_FUNC to OB. */
3932 static void
3933 ipa_write_jump_function (struct output_block *ob,
3934 struct ipa_jump_func *jump_func)
3936 struct ipa_agg_jf_item *item;
3937 struct bitpack_d bp;
3938 int i, count;
3940 streamer_write_uhwi (ob, jump_func->type);
3941 switch (jump_func->type)
3943 case IPA_JF_UNKNOWN:
3944 break;
3945 case IPA_JF_KNOWN_TYPE:
3946 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3947 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3948 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3949 break;
3950 case IPA_JF_CONST:
3951 gcc_assert (
3952 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3953 stream_write_tree (ob, jump_func->value.constant.value, true);
3954 break;
3955 case IPA_JF_PASS_THROUGH:
3956 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3957 if (jump_func->value.pass_through.operation == NOP_EXPR)
3959 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3960 bp = bitpack_create (ob->main_stream);
3961 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3962 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
3963 streamer_write_bitpack (&bp);
3965 else
3967 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3968 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3970 break;
3971 case IPA_JF_ANCESTOR:
3972 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3973 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3974 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3975 bp = bitpack_create (ob->main_stream);
3976 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3977 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
3978 streamer_write_bitpack (&bp);
3979 break;
3982 count = vec_safe_length (jump_func->agg.items);
3983 streamer_write_uhwi (ob, count);
3984 if (count)
3986 bp = bitpack_create (ob->main_stream);
3987 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3988 streamer_write_bitpack (&bp);
3991 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3993 streamer_write_uhwi (ob, item->offset);
3994 stream_write_tree (ob, item->value, true);
3998 /* Read in jump function JUMP_FUNC from IB. */
4000 static void
4001 ipa_read_jump_function (struct lto_input_block *ib,
4002 struct ipa_jump_func *jump_func,
4003 struct cgraph_edge *cs,
4004 struct data_in *data_in)
4006 enum jump_func_type jftype;
4007 enum tree_code operation;
4008 int i, count;
4010 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4011 switch (jftype)
4013 case IPA_JF_UNKNOWN:
4014 jump_func->type = IPA_JF_UNKNOWN;
4015 break;
4016 case IPA_JF_KNOWN_TYPE:
4018 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4019 tree base_type = stream_read_tree (ib, data_in);
4020 tree component_type = stream_read_tree (ib, data_in);
4022 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4023 break;
4025 case IPA_JF_CONST:
4026 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4027 break;
4028 case IPA_JF_PASS_THROUGH:
4029 operation = (enum tree_code) streamer_read_uhwi (ib);
4030 if (operation == NOP_EXPR)
4032 int formal_id = streamer_read_uhwi (ib);
4033 struct bitpack_d bp = streamer_read_bitpack (ib);
4034 bool agg_preserved = bp_unpack_value (&bp, 1);
4035 bool type_preserved = bp_unpack_value (&bp, 1);
4036 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4037 type_preserved);
4039 else
4041 tree operand = stream_read_tree (ib, data_in);
4042 int formal_id = streamer_read_uhwi (ib);
4043 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4044 operation);
4046 break;
4047 case IPA_JF_ANCESTOR:
4049 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4050 tree type = stream_read_tree (ib, data_in);
4051 int formal_id = streamer_read_uhwi (ib);
4052 struct bitpack_d bp = streamer_read_bitpack (ib);
4053 bool agg_preserved = bp_unpack_value (&bp, 1);
4054 bool type_preserved = bp_unpack_value (&bp, 1);
4056 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4057 type_preserved);
4058 break;
4062 count = streamer_read_uhwi (ib);
4063 vec_alloc (jump_func->agg.items, count);
4064 if (count)
4066 struct bitpack_d bp = streamer_read_bitpack (ib);
4067 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4069 for (i = 0; i < count; i++)
4071 struct ipa_agg_jf_item item;
4072 item.offset = streamer_read_uhwi (ib);
4073 item.value = stream_read_tree (ib, data_in);
4074 jump_func->agg.items->quick_push (item);
4078 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4079 relevant to indirect inlining to OB. */
4081 static void
4082 ipa_write_indirect_edge_info (struct output_block *ob,
4083 struct cgraph_edge *cs)
4085 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4086 struct bitpack_d bp;
4088 streamer_write_hwi (ob, ii->param_index);
4089 streamer_write_hwi (ob, ii->offset);
4090 bp = bitpack_create (ob->main_stream);
4091 bp_pack_value (&bp, ii->polymorphic, 1);
4092 bp_pack_value (&bp, ii->agg_contents, 1);
4093 bp_pack_value (&bp, ii->member_ptr, 1);
4094 bp_pack_value (&bp, ii->by_ref, 1);
4095 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4096 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4097 streamer_write_bitpack (&bp);
4099 if (ii->polymorphic)
4101 streamer_write_hwi (ob, ii->otr_token);
4102 stream_write_tree (ob, ii->otr_type, true);
4103 stream_write_tree (ob, ii->outer_type, true);
4107 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4108 relevant to indirect inlining from IB. */
4110 static void
4111 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4112 struct data_in *data_in ATTRIBUTE_UNUSED,
4113 struct cgraph_edge *cs)
4115 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4116 struct bitpack_d bp;
4118 ii->param_index = (int) streamer_read_hwi (ib);
4119 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4120 bp = streamer_read_bitpack (ib);
4121 ii->polymorphic = bp_unpack_value (&bp, 1);
4122 ii->agg_contents = bp_unpack_value (&bp, 1);
4123 ii->member_ptr = bp_unpack_value (&bp, 1);
4124 ii->by_ref = bp_unpack_value (&bp, 1);
4125 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4126 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4127 if (ii->polymorphic)
4129 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4130 ii->otr_type = stream_read_tree (ib, data_in);
4131 ii->outer_type = stream_read_tree (ib, data_in);
4135 /* Stream out NODE info to OB. */
4137 static void
4138 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4140 int node_ref;
4141 lto_symtab_encoder_t encoder;
4142 struct ipa_node_params *info = IPA_NODE_REF (node);
4143 int j;
4144 struct cgraph_edge *e;
4145 struct bitpack_d bp;
4147 encoder = ob->decl_state->symtab_node_encoder;
4148 node_ref = lto_symtab_encoder_encode (encoder, node);
4149 streamer_write_uhwi (ob, node_ref);
4151 streamer_write_uhwi (ob, ipa_get_param_count (info));
4152 for (j = 0; j < ipa_get_param_count (info); j++)
4153 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4154 bp = bitpack_create (ob->main_stream);
4155 gcc_assert (info->uses_analysis_done
4156 || ipa_get_param_count (info) == 0);
4157 gcc_assert (!info->node_enqueued);
4158 gcc_assert (!info->ipcp_orig_node);
4159 for (j = 0; j < ipa_get_param_count (info); j++)
4160 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4161 streamer_write_bitpack (&bp);
4162 for (j = 0; j < ipa_get_param_count (info); j++)
4163 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4164 for (e = node->callees; e; e = e->next_callee)
4166 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4168 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4169 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4170 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4172 for (e = node->indirect_calls; e; e = e->next_callee)
4174 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4176 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4177 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4178 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4179 ipa_write_indirect_edge_info (ob, e);
4183 /* Stream in NODE info from IB. */
4185 static void
4186 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4187 struct data_in *data_in)
4189 struct ipa_node_params *info = IPA_NODE_REF (node);
4190 int k;
4191 struct cgraph_edge *e;
4192 struct bitpack_d bp;
4194 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4196 for (k = 0; k < ipa_get_param_count (info); k++)
4197 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4199 bp = streamer_read_bitpack (ib);
4200 if (ipa_get_param_count (info) != 0)
4201 info->uses_analysis_done = true;
4202 info->node_enqueued = false;
4203 for (k = 0; k < ipa_get_param_count (info); k++)
4204 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4205 for (k = 0; k < ipa_get_param_count (info); k++)
4206 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4207 for (e = node->callees; e; e = e->next_callee)
4209 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4210 int count = streamer_read_uhwi (ib);
4212 if (!count)
4213 continue;
4214 vec_safe_grow_cleared (args->jump_functions, count);
4216 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4217 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4218 data_in);
4220 for (e = node->indirect_calls; e; e = e->next_callee)
4222 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4223 int count = streamer_read_uhwi (ib);
4225 if (count)
4227 vec_safe_grow_cleared (args->jump_functions, count);
4228 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4229 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4230 data_in);
4232 ipa_read_indirect_edge_info (ib, data_in, e);
4236 /* Write jump functions for nodes in SET. */
4238 void
4239 ipa_prop_write_jump_functions (void)
4241 struct cgraph_node *node;
4242 struct output_block *ob;
4243 unsigned int count = 0;
4244 lto_symtab_encoder_iterator lsei;
4245 lto_symtab_encoder_t encoder;
4248 if (!ipa_node_params_vector.exists ())
4249 return;
4251 ob = create_output_block (LTO_section_jump_functions);
4252 encoder = ob->decl_state->symtab_node_encoder;
4253 ob->cgraph_node = NULL;
4254 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4255 lsei_next_function_in_partition (&lsei))
4257 node = lsei_cgraph_node (lsei);
4258 if (cgraph_function_with_gimple_body_p (node)
4259 && IPA_NODE_REF (node) != NULL)
4260 count++;
4263 streamer_write_uhwi (ob, count);
4265 /* Process all of the functions. */
4266 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4267 lsei_next_function_in_partition (&lsei))
4269 node = lsei_cgraph_node (lsei);
4270 if (cgraph_function_with_gimple_body_p (node)
4271 && IPA_NODE_REF (node) != NULL)
4272 ipa_write_node_info (ob, node);
4274 streamer_write_char_stream (ob->main_stream, 0);
4275 produce_asm (ob, NULL);
4276 destroy_output_block (ob);
4279 /* Read section in file FILE_DATA of length LEN with data DATA. */
4281 static void
4282 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4283 size_t len)
4285 const struct lto_function_header *header =
4286 (const struct lto_function_header *) data;
4287 const int cfg_offset = sizeof (struct lto_function_header);
4288 const int main_offset = cfg_offset + header->cfg_size;
4289 const int string_offset = main_offset + header->main_size;
4290 struct data_in *data_in;
4291 struct lto_input_block ib_main;
4292 unsigned int i;
4293 unsigned int count;
4295 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4296 header->main_size);
4298 data_in =
4299 lto_data_in_create (file_data, (const char *) data + string_offset,
4300 header->string_size, vNULL);
4301 count = streamer_read_uhwi (&ib_main);
4303 for (i = 0; i < count; i++)
4305 unsigned int index;
4306 struct cgraph_node *node;
4307 lto_symtab_encoder_t encoder;
4309 index = streamer_read_uhwi (&ib_main);
4310 encoder = file_data->symtab_node_encoder;
4311 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4312 gcc_assert (node->definition);
4313 ipa_read_node_info (&ib_main, node, data_in);
4315 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4316 len);
4317 lto_data_in_delete (data_in);
4320 /* Read ipcp jump functions. */
4322 void
4323 ipa_prop_read_jump_functions (void)
4325 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4326 struct lto_file_decl_data *file_data;
4327 unsigned int j = 0;
4329 ipa_check_create_node_params ();
4330 ipa_check_create_edge_args ();
4331 ipa_register_cgraph_hooks ();
4333 while ((file_data = file_data_vec[j++]))
4335 size_t len;
4336 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4338 if (data)
4339 ipa_prop_read_section (file_data, data, len);
4343 /* After merging units, we can get mismatch in argument counts.
4344 Also decl merging might've rendered parameter lists obsolete.
4345 Also compute called_with_variable_arg info. */
4347 void
4348 ipa_update_after_lto_read (void)
4350 ipa_check_create_node_params ();
4351 ipa_check_create_edge_args ();
4354 void
4355 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4357 int node_ref;
4358 unsigned int count = 0;
4359 lto_symtab_encoder_t encoder;
4360 struct ipa_agg_replacement_value *aggvals, *av;
4362 aggvals = ipa_get_agg_replacements_for_node (node);
4363 encoder = ob->decl_state->symtab_node_encoder;
4364 node_ref = lto_symtab_encoder_encode (encoder, node);
4365 streamer_write_uhwi (ob, node_ref);
4367 for (av = aggvals; av; av = av->next)
4368 count++;
4369 streamer_write_uhwi (ob, count);
4371 for (av = aggvals; av; av = av->next)
4373 struct bitpack_d bp;
4375 streamer_write_uhwi (ob, av->offset);
4376 streamer_write_uhwi (ob, av->index);
4377 stream_write_tree (ob, av->value, true);
4379 bp = bitpack_create (ob->main_stream);
4380 bp_pack_value (&bp, av->by_ref, 1);
4381 streamer_write_bitpack (&bp);
4385 /* Stream in the aggregate value replacement chain for NODE from IB. */
4387 static void
4388 read_agg_replacement_chain (struct lto_input_block *ib,
4389 struct cgraph_node *node,
4390 struct data_in *data_in)
4392 struct ipa_agg_replacement_value *aggvals = NULL;
4393 unsigned int count, i;
4395 count = streamer_read_uhwi (ib);
4396 for (i = 0; i <count; i++)
4398 struct ipa_agg_replacement_value *av;
4399 struct bitpack_d bp;
4401 av = ggc_alloc_ipa_agg_replacement_value ();
4402 av->offset = streamer_read_uhwi (ib);
4403 av->index = streamer_read_uhwi (ib);
4404 av->value = stream_read_tree (ib, data_in);
4405 bp = streamer_read_bitpack (ib);
4406 av->by_ref = bp_unpack_value (&bp, 1);
4407 av->next = aggvals;
4408 aggvals = av;
4410 ipa_set_node_agg_value_chain (node, aggvals);
4413 /* Write all aggregate replacement for nodes in set. */
4415 void
4416 ipa_prop_write_all_agg_replacement (void)
4418 struct cgraph_node *node;
4419 struct output_block *ob;
4420 unsigned int count = 0;
4421 lto_symtab_encoder_iterator lsei;
4422 lto_symtab_encoder_t encoder;
4424 if (!ipa_node_agg_replacements)
4425 return;
4427 ob = create_output_block (LTO_section_ipcp_transform);
4428 encoder = ob->decl_state->symtab_node_encoder;
4429 ob->cgraph_node = NULL;
4430 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4431 lsei_next_function_in_partition (&lsei))
4433 node = lsei_cgraph_node (lsei);
4434 if (cgraph_function_with_gimple_body_p (node)
4435 && ipa_get_agg_replacements_for_node (node) != NULL)
4436 count++;
4439 streamer_write_uhwi (ob, count);
4441 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4442 lsei_next_function_in_partition (&lsei))
4444 node = lsei_cgraph_node (lsei);
4445 if (cgraph_function_with_gimple_body_p (node)
4446 && ipa_get_agg_replacements_for_node (node) != NULL)
4447 write_agg_replacement_chain (ob, node);
4449 streamer_write_char_stream (ob->main_stream, 0);
4450 produce_asm (ob, NULL);
4451 destroy_output_block (ob);
4454 /* Read replacements section in file FILE_DATA of length LEN with data
4455 DATA. */
4457 static void
4458 read_replacements_section (struct lto_file_decl_data *file_data,
4459 const char *data,
4460 size_t len)
4462 const struct lto_function_header *header =
4463 (const struct lto_function_header *) data;
4464 const int cfg_offset = sizeof (struct lto_function_header);
4465 const int main_offset = cfg_offset + header->cfg_size;
4466 const int string_offset = main_offset + header->main_size;
4467 struct data_in *data_in;
4468 struct lto_input_block ib_main;
4469 unsigned int i;
4470 unsigned int count;
4472 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4473 header->main_size);
4475 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4476 header->string_size, vNULL);
4477 count = streamer_read_uhwi (&ib_main);
4479 for (i = 0; i < count; i++)
4481 unsigned int index;
4482 struct cgraph_node *node;
4483 lto_symtab_encoder_t encoder;
4485 index = streamer_read_uhwi (&ib_main);
4486 encoder = file_data->symtab_node_encoder;
4487 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4488 gcc_assert (node->definition);
4489 read_agg_replacement_chain (&ib_main, node, data_in);
4491 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4492 len);
4493 lto_data_in_delete (data_in);
4496 /* Read IPA-CP aggregate replacements. */
4498 void
4499 ipa_prop_read_all_agg_replacement (void)
4501 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4502 struct lto_file_decl_data *file_data;
4503 unsigned int j = 0;
4505 while ((file_data = file_data_vec[j++]))
4507 size_t len;
4508 const char *data = lto_get_section_data (file_data,
4509 LTO_section_ipcp_transform,
4510 NULL, &len);
4511 if (data)
4512 read_replacements_section (file_data, data, len);
4516 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4517 NODE. */
4519 static void
4520 adjust_agg_replacement_values (struct cgraph_node *node,
4521 struct ipa_agg_replacement_value *aggval)
4523 struct ipa_agg_replacement_value *v;
4524 int i, c = 0, d = 0, *adj;
4526 if (!node->clone.combined_args_to_skip)
4527 return;
4529 for (v = aggval; v; v = v->next)
4531 gcc_assert (v->index >= 0);
4532 if (c < v->index)
4533 c = v->index;
4535 c++;
4537 adj = XALLOCAVEC (int, c);
4538 for (i = 0; i < c; i++)
4539 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4541 adj[i] = -1;
4542 d++;
4544 else
4545 adj[i] = i - d;
4547 for (v = aggval; v; v = v->next)
4548 v->index = adj[v->index];
4552 /* Function body transformation phase. */
4554 unsigned int
4555 ipcp_transform_function (struct cgraph_node *node)
4557 vec<ipa_param_descriptor_t> descriptors = vNULL;
4558 struct param_analysis_info *parms_ainfo;
4559 struct ipa_agg_replacement_value *aggval;
4560 gimple_stmt_iterator gsi;
4561 basic_block bb;
4562 int param_count;
4563 bool cfg_changed = false, something_changed = false;
4565 gcc_checking_assert (cfun);
4566 gcc_checking_assert (current_function_decl);
4568 if (dump_file)
4569 fprintf (dump_file, "Modification phase of node %s/%i\n",
4570 node->name (), node->order);
4572 aggval = ipa_get_agg_replacements_for_node (node);
4573 if (!aggval)
4574 return 0;
4575 param_count = count_formal_params (node->decl);
4576 if (param_count == 0)
4577 return 0;
4578 adjust_agg_replacement_values (node, aggval);
4579 if (dump_file)
4580 ipa_dump_agg_replacement_values (dump_file, aggval);
4581 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4582 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4583 descriptors.safe_grow_cleared (param_count);
4584 ipa_populate_param_decls (node, descriptors);
4586 FOR_EACH_BB (bb)
4587 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4589 struct ipa_agg_replacement_value *v;
4590 gimple stmt = gsi_stmt (gsi);
4591 tree rhs, val, t;
4592 HOST_WIDE_INT offset, size;
4593 int index;
4594 bool by_ref, vce;
4596 if (!gimple_assign_load_p (stmt))
4597 continue;
4598 rhs = gimple_assign_rhs1 (stmt);
4599 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4600 continue;
4602 vce = false;
4603 t = rhs;
4604 while (handled_component_p (t))
4606 /* V_C_E can do things like convert an array of integers to one
4607 bigger integer and similar things we do not handle below. */
4608 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4610 vce = true;
4611 break;
4613 t = TREE_OPERAND (t, 0);
4615 if (vce)
4616 continue;
4618 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4619 rhs, &index, &offset, &size, &by_ref))
4620 continue;
4621 for (v = aggval; v; v = v->next)
4622 if (v->index == index
4623 && v->offset == offset)
4624 break;
4625 if (!v
4626 || v->by_ref != by_ref
4627 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4628 continue;
4630 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4631 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4633 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4634 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4635 else if (TYPE_SIZE (TREE_TYPE (rhs))
4636 == TYPE_SIZE (TREE_TYPE (v->value)))
4637 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4638 else
4640 if (dump_file)
4642 fprintf (dump_file, " const ");
4643 print_generic_expr (dump_file, v->value, 0);
4644 fprintf (dump_file, " can't be converted to type of ");
4645 print_generic_expr (dump_file, rhs, 0);
4646 fprintf (dump_file, "\n");
4648 continue;
4651 else
4652 val = v->value;
4654 if (dump_file && (dump_flags & TDF_DETAILS))
4656 fprintf (dump_file, "Modifying stmt:\n ");
4657 print_gimple_stmt (dump_file, stmt, 0, 0);
4659 gimple_assign_set_rhs_from_tree (&gsi, val);
4660 update_stmt (stmt);
4662 if (dump_file && (dump_flags & TDF_DETAILS))
4664 fprintf (dump_file, "into:\n ");
4665 print_gimple_stmt (dump_file, stmt, 0, 0);
4666 fprintf (dump_file, "\n");
4669 something_changed = true;
4670 if (maybe_clean_eh_stmt (stmt)
4671 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4672 cfg_changed = true;
4675 (*ipa_node_agg_replacements)[node->uid] = NULL;
4676 free_parms_ainfo (parms_ainfo, param_count);
4677 descriptors.release ();
4679 if (!something_changed)
4680 return 0;
4681 else if (cfg_changed)
4682 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4683 else
4684 return TODO_update_ssa_only_virtuals;