* Makefile.in (C_COMMON_OBJS): Depend on c-cilkplus.o.
[official-gcc.git] / gcc / ipa-prop.c
blobeb464e4d51ea2a516448a342fbaa7a8616d4fb25
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "gimple.h"
25 #include "gimplify.h"
26 #include "gimple-iterator.h"
27 #include "gimplify-me.h"
28 #include "gimple-walk.h"
29 #include "langhooks.h"
30 #include "ggc.h"
31 #include "target.h"
32 #include "ipa-prop.h"
33 #include "bitmap.h"
34 #include "gimple-ssa.h"
35 #include "tree-cfg.h"
36 #include "tree-phinodes.h"
37 #include "ssa-iterators.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-pass.h"
41 #include "tree-inline.h"
42 #include "ipa-inline.h"
43 #include "flags.h"
44 #include "diagnostic.h"
45 #include "gimple-pretty-print.h"
46 #include "lto-streamer.h"
47 #include "data-streamer.h"
48 #include "tree-streamer.h"
49 #include "params.h"
50 #include "ipa-utils.h"
52 /* Intermediate information about a parameter that is only useful during the
53 run of ipa_analyze_node and is not kept afterwards. */
55 struct param_analysis_info
57 bool parm_modified, ref_modified, pt_modified;
58 bitmap parm_visited_statements, pt_visited_statements;
61 /* Vector where the parameter infos are actually stored. */
62 vec<ipa_node_params_t> ipa_node_params_vector;
63 /* Vector of known aggregate values in cloned nodes. */
64 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
65 /* Vector where the parameter infos are actually stored. */
66 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
68 /* Holders of ipa cgraph hooks: */
69 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
70 static struct cgraph_node_hook_list *node_removal_hook_holder;
71 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
72 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
73 static struct cgraph_node_hook_list *function_insertion_hook_holder;
75 /* Description of a reference to an IPA constant. */
76 struct ipa_cst_ref_desc
78 /* Edge that corresponds to the statement which took the reference. */
79 struct cgraph_edge *cs;
80 /* Linked list of duplicates created when call graph edges are cloned. */
81 struct ipa_cst_ref_desc *next_duplicate;
82 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
83 if out of control. */
84 int refcount;
87 /* Allocation pool for reference descriptions. */
89 static alloc_pool ipa_refdesc_pool;
91 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
92 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
94 static bool
95 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
97 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
98 struct cl_optimization *os;
100 if (!fs_opts)
101 return false;
102 os = TREE_OPTIMIZATION (fs_opts);
103 return !os->x_optimize || !os->x_flag_ipa_cp;
106 /* Return index of the formal whose tree is PTREE in function which corresponds
107 to INFO. */
109 static int
110 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
112 int i, count;
114 count = descriptors.length ();
115 for (i = 0; i < count; i++)
116 if (descriptors[i].decl == ptree)
117 return i;
119 return -1;
122 /* Return index of the formal whose tree is PTREE in function which corresponds
123 to INFO. */
126 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
128 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
131 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
132 NODE. */
134 static void
135 ipa_populate_param_decls (struct cgraph_node *node,
136 vec<ipa_param_descriptor_t> &descriptors)
138 tree fndecl;
139 tree fnargs;
140 tree parm;
141 int param_num;
143 fndecl = node->decl;
144 gcc_assert (gimple_has_body_p (fndecl));
145 fnargs = DECL_ARGUMENTS (fndecl);
146 param_num = 0;
147 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
149 descriptors[param_num].decl = parm;
150 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
151 param_num++;
155 /* Return how many formal parameters FNDECL has. */
157 static inline int
158 count_formal_params (tree fndecl)
160 tree parm;
161 int count = 0;
162 gcc_assert (gimple_has_body_p (fndecl));
164 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
165 count++;
167 return count;
170 /* Return the declaration of Ith formal parameter of the function corresponding
171 to INFO. Note there is no setter function as this array is built just once
172 using ipa_initialize_node_params. */
174 void
175 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
177 fprintf (file, "param #%i", i);
178 if (info->descriptors[i].decl)
180 fprintf (file, " ");
181 print_generic_expr (file, info->descriptors[i].decl, 0);
185 /* Initialize the ipa_node_params structure associated with NODE
186 to hold PARAM_COUNT parameters. */
188 void
189 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
191 struct ipa_node_params *info = IPA_NODE_REF (node);
193 if (!info->descriptors.exists () && param_count)
194 info->descriptors.safe_grow_cleared (param_count);
197 /* Initialize the ipa_node_params structure associated with NODE by counting
198 the function parameters, creating the descriptors and populating their
199 param_decls. */
201 void
202 ipa_initialize_node_params (struct cgraph_node *node)
204 struct ipa_node_params *info = IPA_NODE_REF (node);
206 if (!info->descriptors.exists ())
208 ipa_alloc_node_params (node, count_formal_params (node->decl));
209 ipa_populate_param_decls (node, info->descriptors);
213 /* Print the jump functions associated with call graph edge CS to file F. */
215 static void
216 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
218 int i, count;
220 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
221 for (i = 0; i < count; i++)
223 struct ipa_jump_func *jump_func;
224 enum jump_func_type type;
226 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
227 type = jump_func->type;
229 fprintf (f, " param %d: ", i);
230 if (type == IPA_JF_UNKNOWN)
231 fprintf (f, "UNKNOWN\n");
232 else if (type == IPA_JF_KNOWN_TYPE)
234 fprintf (f, "KNOWN TYPE: base ");
235 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
236 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
237 jump_func->value.known_type.offset);
238 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
239 fprintf (f, "\n");
241 else if (type == IPA_JF_CONST)
243 tree val = jump_func->value.constant.value;
244 fprintf (f, "CONST: ");
245 print_generic_expr (f, val, 0);
246 if (TREE_CODE (val) == ADDR_EXPR
247 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
249 fprintf (f, " -> ");
250 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
253 fprintf (f, "\n");
255 else if (type == IPA_JF_PASS_THROUGH)
257 fprintf (f, "PASS THROUGH: ");
258 fprintf (f, "%d, op %s",
259 jump_func->value.pass_through.formal_id,
260 get_tree_code_name(jump_func->value.pass_through.operation));
261 if (jump_func->value.pass_through.operation != NOP_EXPR)
263 fprintf (f, " ");
264 print_generic_expr (f,
265 jump_func->value.pass_through.operand, 0);
267 if (jump_func->value.pass_through.agg_preserved)
268 fprintf (f, ", agg_preserved");
269 if (jump_func->value.pass_through.type_preserved)
270 fprintf (f, ", type_preserved");
271 fprintf (f, "\n");
273 else if (type == IPA_JF_ANCESTOR)
275 fprintf (f, "ANCESTOR: ");
276 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
277 jump_func->value.ancestor.formal_id,
278 jump_func->value.ancestor.offset);
279 print_generic_expr (f, jump_func->value.ancestor.type, 0);
280 if (jump_func->value.ancestor.agg_preserved)
281 fprintf (f, ", agg_preserved");
282 if (jump_func->value.ancestor.type_preserved)
283 fprintf (f, ", type_preserved");
284 fprintf (f, "\n");
287 if (jump_func->agg.items)
289 struct ipa_agg_jf_item *item;
290 int j;
292 fprintf (f, " Aggregate passed by %s:\n",
293 jump_func->agg.by_ref ? "reference" : "value");
294 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
296 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
297 item->offset);
298 if (TYPE_P (item->value))
299 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
300 tree_low_cst (TYPE_SIZE (item->value), 1));
301 else
303 fprintf (f, "cst: ");
304 print_generic_expr (f, item->value, 0);
306 fprintf (f, "\n");
313 /* Print the jump functions of all arguments on all call graph edges going from
314 NODE to file F. */
316 void
317 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
319 struct cgraph_edge *cs;
321 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
322 node->order);
323 for (cs = node->callees; cs; cs = cs->next_callee)
325 if (!ipa_edge_args_info_available_for_edge_p (cs))
326 continue;
328 fprintf (f, " callsite %s/%i -> %s/%i : \n",
329 xstrdup (cgraph_node_name (node)), node->order,
330 xstrdup (cgraph_node_name (cs->callee)),
331 cs->callee->order);
332 ipa_print_node_jump_functions_for_edge (f, cs);
335 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
337 struct cgraph_indirect_call_info *ii;
338 if (!ipa_edge_args_info_available_for_edge_p (cs))
339 continue;
341 ii = cs->indirect_info;
342 if (ii->agg_contents)
343 fprintf (f, " indirect %s callsite, calling param %i, "
344 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
345 ii->member_ptr ? "member ptr" : "aggregate",
346 ii->param_index, ii->offset,
347 ii->by_ref ? "by reference" : "by_value");
348 else
349 fprintf (f, " indirect %s callsite, calling param %i",
350 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
352 if (cs->call_stmt)
354 fprintf (f, ", for stmt ");
355 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
357 else
358 fprintf (f, "\n");
359 ipa_print_node_jump_functions_for_edge (f, cs);
363 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
365 void
366 ipa_print_all_jump_functions (FILE *f)
368 struct cgraph_node *node;
370 fprintf (f, "\nJump functions:\n");
371 FOR_EACH_FUNCTION (node)
373 ipa_print_node_jump_functions (f, node);
377 /* Set JFUNC to be a known type jump function. */
379 static void
380 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
381 tree base_type, tree component_type)
383 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
384 && TYPE_BINFO (component_type));
385 jfunc->type = IPA_JF_KNOWN_TYPE;
386 jfunc->value.known_type.offset = offset,
387 jfunc->value.known_type.base_type = base_type;
388 jfunc->value.known_type.component_type = component_type;
391 /* Set JFUNC to be a copy of another jmp (to be used by jump function
392 combination code). The two functions will share their rdesc. */
394 static void
395 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
396 struct ipa_jump_func *src)
399 gcc_checking_assert (src->type == IPA_JF_CONST);
400 dst->type = IPA_JF_CONST;
401 dst->value.constant = src->value.constant;
404 /* Set JFUNC to be a constant jmp function. */
406 static void
407 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
408 struct cgraph_edge *cs)
410 constant = unshare_expr (constant);
411 if (constant && EXPR_P (constant))
412 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
413 jfunc->type = IPA_JF_CONST;
414 jfunc->value.constant.value = unshare_expr_without_location (constant);
416 if (TREE_CODE (constant) == ADDR_EXPR
417 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
419 struct ipa_cst_ref_desc *rdesc;
420 if (!ipa_refdesc_pool)
421 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
422 sizeof (struct ipa_cst_ref_desc), 32);
424 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
425 rdesc->cs = cs;
426 rdesc->next_duplicate = NULL;
427 rdesc->refcount = 1;
428 jfunc->value.constant.rdesc = rdesc;
430 else
431 jfunc->value.constant.rdesc = NULL;
434 /* Set JFUNC to be a simple pass-through jump function. */
435 static void
436 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
437 bool agg_preserved, bool type_preserved)
439 jfunc->type = IPA_JF_PASS_THROUGH;
440 jfunc->value.pass_through.operand = NULL_TREE;
441 jfunc->value.pass_through.formal_id = formal_id;
442 jfunc->value.pass_through.operation = NOP_EXPR;
443 jfunc->value.pass_through.agg_preserved = agg_preserved;
444 jfunc->value.pass_through.type_preserved = type_preserved;
447 /* Set JFUNC to be an arithmetic pass through jump function. */
449 static void
450 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
451 tree operand, enum tree_code operation)
453 jfunc->type = IPA_JF_PASS_THROUGH;
454 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
455 jfunc->value.pass_through.formal_id = formal_id;
456 jfunc->value.pass_through.operation = operation;
457 jfunc->value.pass_through.agg_preserved = false;
458 jfunc->value.pass_through.type_preserved = false;
461 /* Set JFUNC to be an ancestor jump function. */
463 static void
464 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
465 tree type, int formal_id, bool agg_preserved,
466 bool type_preserved)
468 jfunc->type = IPA_JF_ANCESTOR;
469 jfunc->value.ancestor.formal_id = formal_id;
470 jfunc->value.ancestor.offset = offset;
471 jfunc->value.ancestor.type = type;
472 jfunc->value.ancestor.agg_preserved = agg_preserved;
473 jfunc->value.ancestor.type_preserved = type_preserved;
476 /* Extract the acual BINFO being described by JFUNC which must be a known type
477 jump function. */
479 tree
480 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
482 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
483 if (!base_binfo)
484 return NULL_TREE;
485 return get_binfo_at_offset (base_binfo,
486 jfunc->value.known_type.offset,
487 jfunc->value.known_type.component_type);
490 /* Structure to be passed in between detect_type_change and
491 check_stmt_for_type_change. */
493 struct type_change_info
495 /* Offset into the object where there is the virtual method pointer we are
496 looking for. */
497 HOST_WIDE_INT offset;
498 /* The declaration or SSA_NAME pointer of the base that we are checking for
499 type change. */
500 tree object;
501 /* If we actually can tell the type that the object has changed to, it is
502 stored in this field. Otherwise it remains NULL_TREE. */
503 tree known_current_type;
504 /* Set to true if dynamic type change has been detected. */
505 bool type_maybe_changed;
506 /* Set to true if multiple types have been encountered. known_current_type
507 must be disregarded in that case. */
508 bool multiple_types_encountered;
511 /* Return true if STMT can modify a virtual method table pointer.
513 This function makes special assumptions about both constructors and
514 destructors which are all the functions that are allowed to alter the VMT
515 pointers. It assumes that destructors begin with assignment into all VMT
516 pointers and that constructors essentially look in the following way:
518 1) The very first thing they do is that they call constructors of ancestor
519 sub-objects that have them.
521 2) Then VMT pointers of this and all its ancestors is set to new values
522 corresponding to the type corresponding to the constructor.
524 3) Only afterwards, other stuff such as constructor of member sub-objects
525 and the code written by the user is run. Only this may include calling
526 virtual functions, directly or indirectly.
528 There is no way to call a constructor of an ancestor sub-object in any
529 other way.
531 This means that we do not have to care whether constructors get the correct
532 type information because they will always change it (in fact, if we define
533 the type to be given by the VMT pointer, it is undefined).
535 The most important fact to derive from the above is that if, for some
536 statement in the section 3, we try to detect whether the dynamic type has
537 changed, we can safely ignore all calls as we examine the function body
538 backwards until we reach statements in section 2 because these calls cannot
539 be ancestor constructors or destructors (if the input is not bogus) and so
540 do not change the dynamic type (this holds true only for automatically
541 allocated objects but at the moment we devirtualize only these). We then
542 must detect that statements in section 2 change the dynamic type and can try
543 to derive the new type. That is enough and we can stop, we will never see
544 the calls into constructors of sub-objects in this code. Therefore we can
545 safely ignore all call statements that we traverse.
548 static bool
549 stmt_may_be_vtbl_ptr_store (gimple stmt)
551 if (is_gimple_call (stmt))
552 return false;
553 else if (is_gimple_assign (stmt))
555 tree lhs = gimple_assign_lhs (stmt);
557 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
559 if (flag_strict_aliasing
560 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
561 return false;
563 if (TREE_CODE (lhs) == COMPONENT_REF
564 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
565 return false;
566 /* In the future we might want to use get_base_ref_and_offset to find
567 if there is a field corresponding to the offset and if so, proceed
568 almost like if it was a component ref. */
571 return true;
574 /* If STMT can be proved to be an assignment to the virtual method table
575 pointer of ANALYZED_OBJ and the type associated with the new table
576 identified, return the type. Otherwise return NULL_TREE. */
578 static tree
579 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
581 HOST_WIDE_INT offset, size, max_size;
582 tree lhs, rhs, base;
584 if (!gimple_assign_single_p (stmt))
585 return NULL_TREE;
587 lhs = gimple_assign_lhs (stmt);
588 rhs = gimple_assign_rhs1 (stmt);
589 if (TREE_CODE (lhs) != COMPONENT_REF
590 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
591 || TREE_CODE (rhs) != ADDR_EXPR)
592 return NULL_TREE;
593 rhs = get_base_address (TREE_OPERAND (rhs, 0));
594 if (!rhs
595 || TREE_CODE (rhs) != VAR_DECL
596 || !DECL_VIRTUAL_P (rhs))
597 return NULL_TREE;
599 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
600 if (offset != tci->offset
601 || size != POINTER_SIZE
602 || max_size != POINTER_SIZE)
603 return NULL_TREE;
604 if (TREE_CODE (base) == MEM_REF)
606 if (TREE_CODE (tci->object) != MEM_REF
607 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
608 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
609 TREE_OPERAND (base, 1)))
610 return NULL_TREE;
612 else if (tci->object != base)
613 return NULL_TREE;
615 return DECL_CONTEXT (rhs);
618 /* Callback of walk_aliased_vdefs and a helper function for
619 detect_type_change to check whether a particular statement may modify
620 the virtual table pointer, and if possible also determine the new type of
621 the (sub-)object. It stores its result into DATA, which points to a
622 type_change_info structure. */
624 static bool
625 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
627 gimple stmt = SSA_NAME_DEF_STMT (vdef);
628 struct type_change_info *tci = (struct type_change_info *) data;
630 if (stmt_may_be_vtbl_ptr_store (stmt))
632 tree type;
633 type = extr_type_from_vtbl_ptr_store (stmt, tci);
634 if (tci->type_maybe_changed
635 && type != tci->known_current_type)
636 tci->multiple_types_encountered = true;
637 tci->known_current_type = type;
638 tci->type_maybe_changed = true;
639 return true;
641 else
642 return false;
647 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
648 callsite CALL) by looking for assignments to its virtual table pointer. If
649 it is, return true and fill in the jump function JFUNC with relevant type
650 information or set it to unknown. ARG is the object itself (not a pointer
651 to it, unless dereferenced). BASE is the base of the memory access as
652 returned by get_ref_base_and_extent, as is the offset. */
654 static bool
655 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
656 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
658 struct type_change_info tci;
659 ao_ref ao;
661 gcc_checking_assert (DECL_P (arg)
662 || TREE_CODE (arg) == MEM_REF
663 || handled_component_p (arg));
664 /* Const calls cannot call virtual methods through VMT and so type changes do
665 not matter. */
666 if (!flag_devirtualize || !gimple_vuse (call)
667 /* Be sure expected_type is polymorphic. */
668 || !comp_type
669 || TREE_CODE (comp_type) != RECORD_TYPE
670 || !TYPE_BINFO (comp_type)
671 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
672 return false;
674 ao_ref_init (&ao, arg);
675 ao.base = base;
676 ao.offset = offset;
677 ao.size = POINTER_SIZE;
678 ao.max_size = ao.size;
680 tci.offset = offset;
681 tci.object = get_base_address (arg);
682 tci.known_current_type = NULL_TREE;
683 tci.type_maybe_changed = false;
684 tci.multiple_types_encountered = false;
686 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
687 &tci, NULL);
688 if (!tci.type_maybe_changed)
689 return false;
691 if (!tci.known_current_type
692 || tci.multiple_types_encountered
693 || offset != 0)
694 jfunc->type = IPA_JF_UNKNOWN;
695 else
696 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
698 return true;
701 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
702 SSA name (its dereference will become the base and the offset is assumed to
703 be zero). */
705 static bool
706 detect_type_change_ssa (tree arg, tree comp_type,
707 gimple call, struct ipa_jump_func *jfunc)
709 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
710 if (!flag_devirtualize
711 || !POINTER_TYPE_P (TREE_TYPE (arg)))
712 return false;
714 arg = build2 (MEM_REF, ptr_type_node, arg,
715 build_int_cst (ptr_type_node, 0));
717 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
720 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
721 boolean variable pointed to by DATA. */
723 static bool
724 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
725 void *data)
727 bool *b = (bool *) data;
728 *b = true;
729 return true;
732 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
733 a value known not to be modified in this function before reaching the
734 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
735 information about the parameter. */
737 static bool
738 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
739 gimple stmt, tree parm_load)
741 bool modified = false;
742 bitmap *visited_stmts;
743 ao_ref refd;
745 if (parm_ainfo && parm_ainfo->parm_modified)
746 return false;
748 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
749 ao_ref_init (&refd, parm_load);
750 /* We can cache visited statements only when parm_ainfo is available and when
751 we are looking at a naked load of the whole parameter. */
752 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
753 visited_stmts = NULL;
754 else
755 visited_stmts = &parm_ainfo->parm_visited_statements;
756 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
757 visited_stmts);
758 if (parm_ainfo && modified)
759 parm_ainfo->parm_modified = true;
760 return !modified;
763 /* If STMT is an assignment that loads a value from an parameter declaration,
764 return the index of the parameter in ipa_node_params which has not been
765 modified. Otherwise return -1. */
767 static int
768 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
769 struct param_analysis_info *parms_ainfo,
770 gimple stmt)
772 int index;
773 tree op1;
775 if (!gimple_assign_single_p (stmt))
776 return -1;
778 op1 = gimple_assign_rhs1 (stmt);
779 if (TREE_CODE (op1) != PARM_DECL)
780 return -1;
782 index = ipa_get_param_decl_index_1 (descriptors, op1);
783 if (index < 0
784 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
785 : NULL, stmt, op1))
786 return -1;
788 return index;
791 /* Return true if memory reference REF loads data that are known to be
792 unmodified in this function before reaching statement STMT. PARM_AINFO, if
793 non-NULL, is a pointer to a structure containing temporary information about
794 PARM. */
796 static bool
797 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
798 gimple stmt, tree ref)
800 bool modified = false;
801 ao_ref refd;
803 gcc_checking_assert (gimple_vuse (stmt));
804 if (parm_ainfo && parm_ainfo->ref_modified)
805 return false;
807 ao_ref_init (&refd, ref);
808 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
809 NULL);
810 if (parm_ainfo && modified)
811 parm_ainfo->ref_modified = true;
812 return !modified;
815 /* Return true if the data pointed to by PARM is known to be unmodified in this
816 function before reaching call statement CALL into which it is passed.
817 PARM_AINFO is a pointer to a structure containing temporary information
818 about PARM. */
820 static bool
821 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
822 gimple call, tree parm)
824 bool modified = false;
825 ao_ref refd;
827 /* It's unnecessary to calculate anything about memory contnets for a const
828 function because it is not goin to use it. But do not cache the result
829 either. Also, no such calculations for non-pointers. */
830 if (!gimple_vuse (call)
831 || !POINTER_TYPE_P (TREE_TYPE (parm)))
832 return false;
834 if (parm_ainfo->pt_modified)
835 return false;
837 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
838 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
839 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
840 if (modified)
841 parm_ainfo->pt_modified = true;
842 return !modified;
845 /* Return true if we can prove that OP is a memory reference loading unmodified
846 data from an aggregate passed as a parameter and if the aggregate is passed
847 by reference, that the alias type of the load corresponds to the type of the
848 formal parameter (so that we can rely on this type for TBAA in callers).
849 INFO and PARMS_AINFO describe parameters of the current function (but the
850 latter can be NULL), STMT is the load statement. If function returns true,
851 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
852 within the aggregate and whether it is a load from a value passed by
853 reference respectively. */
855 static bool
856 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
857 struct param_analysis_info *parms_ainfo, gimple stmt,
858 tree op, int *index_p, HOST_WIDE_INT *offset_p,
859 HOST_WIDE_INT *size_p, bool *by_ref_p)
861 int index;
862 HOST_WIDE_INT size, max_size;
863 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
865 if (max_size == -1 || max_size != size || *offset_p < 0)
866 return false;
868 if (DECL_P (base))
870 int index = ipa_get_param_decl_index_1 (descriptors, base);
871 if (index >= 0
872 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
873 : NULL, stmt, op))
875 *index_p = index;
876 *by_ref_p = false;
877 if (size_p)
878 *size_p = size;
879 return true;
881 return false;
884 if (TREE_CODE (base) != MEM_REF
885 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
886 || !integer_zerop (TREE_OPERAND (base, 1)))
887 return false;
889 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
891 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
892 index = ipa_get_param_decl_index_1 (descriptors, parm);
894 else
896 /* This branch catches situations where a pointer parameter is not a
897 gimple register, for example:
899 void hip7(S*) (struct S * p)
901 void (*<T2e4>) (struct S *) D.1867;
902 struct S * p.1;
904 <bb 2>:
905 p.1_1 = p;
906 D.1867_2 = p.1_1->f;
907 D.1867_2 ();
908 gdp = &p;
911 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
912 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
915 if (index >= 0
916 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
917 stmt, op))
919 *index_p = index;
920 *by_ref_p = true;
921 if (size_p)
922 *size_p = size;
923 return true;
925 return false;
928 /* Just like the previous function, just without the param_analysis_info
929 pointer, for users outside of this file. */
931 bool
932 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
933 tree op, int *index_p, HOST_WIDE_INT *offset_p,
934 bool *by_ref_p)
936 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
937 offset_p, NULL, by_ref_p);
940 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
941 of an assignment statement STMT, try to determine whether we are actually
942 handling any of the following cases and construct an appropriate jump
943 function into JFUNC if so:
945 1) The passed value is loaded from a formal parameter which is not a gimple
946 register (most probably because it is addressable, the value has to be
947 scalar) and we can guarantee the value has not changed. This case can
948 therefore be described by a simple pass-through jump function. For example:
950 foo (int a)
952 int a.0;
954 a.0_2 = a;
955 bar (a.0_2);
957 2) The passed value can be described by a simple arithmetic pass-through
958 jump function. E.g.
960 foo (int a)
962 int D.2064;
964 D.2064_4 = a.1(D) + 4;
965 bar (D.2064_4);
967 This case can also occur in combination of the previous one, e.g.:
969 foo (int a, int z)
971 int a.0;
972 int D.2064;
974 a.0_3 = a;
975 D.2064_4 = a.0_3 + 4;
976 foo (D.2064_4);
978 3) The passed value is an address of an object within another one (which
979 also passed by reference). Such situations are described by an ancestor
980 jump function and describe situations such as:
982 B::foo() (struct B * const this)
984 struct A * D.1845;
986 D.1845_2 = &this_1(D)->D.1748;
987 A::bar (D.1845_2);
989 INFO is the structure describing individual parameters access different
990 stages of IPA optimizations. PARMS_AINFO contains the information that is
991 only needed for intraprocedural analysis. */
993 static void
994 compute_complex_assign_jump_func (struct ipa_node_params *info,
995 struct param_analysis_info *parms_ainfo,
996 struct ipa_jump_func *jfunc,
997 gimple call, gimple stmt, tree name,
998 tree param_type)
1000 HOST_WIDE_INT offset, size, max_size;
1001 tree op1, tc_ssa, base, ssa;
1002 int index;
1004 op1 = gimple_assign_rhs1 (stmt);
1006 if (TREE_CODE (op1) == SSA_NAME)
1008 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1009 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1010 else
1011 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1012 SSA_NAME_DEF_STMT (op1));
1013 tc_ssa = op1;
1015 else
1017 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1018 tc_ssa = gimple_assign_lhs (stmt);
1021 if (index >= 0)
1023 tree op2 = gimple_assign_rhs2 (stmt);
1025 if (op2)
1027 if (!is_gimple_ip_invariant (op2)
1028 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1029 && !useless_type_conversion_p (TREE_TYPE (name),
1030 TREE_TYPE (op1))))
1031 return;
1033 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1034 gimple_assign_rhs_code (stmt));
1036 else if (gimple_assign_single_p (stmt))
1038 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1039 call, tc_ssa);
1040 bool type_p = false;
1042 if (param_type && POINTER_TYPE_P (param_type))
1043 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1044 call, jfunc);
1045 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1046 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1048 return;
1051 if (TREE_CODE (op1) != ADDR_EXPR)
1052 return;
1053 op1 = TREE_OPERAND (op1, 0);
1054 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1055 return;
1056 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1057 if (TREE_CODE (base) != MEM_REF
1058 /* If this is a varying address, punt. */
1059 || max_size == -1
1060 || max_size != size)
1061 return;
1062 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1063 ssa = TREE_OPERAND (base, 0);
1064 if (TREE_CODE (ssa) != SSA_NAME
1065 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1066 || offset < 0)
1067 return;
1069 /* Dynamic types are changed in constructors and destructors. */
1070 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1071 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1073 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1074 call, jfunc, offset);
1075 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1076 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1077 parm_ref_data_pass_through_p (&parms_ainfo[index],
1078 call, ssa), type_p);
1082 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1083 it looks like:
1085 iftmp.1_3 = &obj_2(D)->D.1762;
1087 The base of the MEM_REF must be a default definition SSA NAME of a
1088 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1089 whole MEM_REF expression is returned and the offset calculated from any
1090 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1091 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1093 static tree
1094 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1096 HOST_WIDE_INT size, max_size;
1097 tree expr, parm, obj;
1099 if (!gimple_assign_single_p (assign))
1100 return NULL_TREE;
1101 expr = gimple_assign_rhs1 (assign);
1103 if (TREE_CODE (expr) != ADDR_EXPR)
1104 return NULL_TREE;
1105 expr = TREE_OPERAND (expr, 0);
1106 obj = expr;
1107 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1109 if (TREE_CODE (expr) != MEM_REF
1110 /* If this is a varying address, punt. */
1111 || max_size == -1
1112 || max_size != size
1113 || *offset < 0)
1114 return NULL_TREE;
1115 parm = TREE_OPERAND (expr, 0);
1116 if (TREE_CODE (parm) != SSA_NAME
1117 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1118 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1119 return NULL_TREE;
1121 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1122 *obj_p = obj;
1123 return expr;
1127 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1128 statement PHI, try to find out whether NAME is in fact a
1129 multiple-inheritance typecast from a descendant into an ancestor of a formal
1130 parameter and thus can be described by an ancestor jump function and if so,
1131 write the appropriate function into JFUNC.
1133 Essentially we want to match the following pattern:
1135 if (obj_2(D) != 0B)
1136 goto <bb 3>;
1137 else
1138 goto <bb 4>;
1140 <bb 3>:
1141 iftmp.1_3 = &obj_2(D)->D.1762;
1143 <bb 4>:
1144 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1145 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1146 return D.1879_6; */
1148 static void
1149 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1150 struct param_analysis_info *parms_ainfo,
1151 struct ipa_jump_func *jfunc,
1152 gimple call, gimple phi, tree param_type)
1154 HOST_WIDE_INT offset;
1155 gimple assign, cond;
1156 basic_block phi_bb, assign_bb, cond_bb;
1157 tree tmp, parm, expr, obj;
1158 int index, i;
1160 if (gimple_phi_num_args (phi) != 2)
1161 return;
1163 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1164 tmp = PHI_ARG_DEF (phi, 0);
1165 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1166 tmp = PHI_ARG_DEF (phi, 1);
1167 else
1168 return;
1169 if (TREE_CODE (tmp) != SSA_NAME
1170 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1171 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1172 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1173 return;
1175 assign = SSA_NAME_DEF_STMT (tmp);
1176 assign_bb = gimple_bb (assign);
1177 if (!single_pred_p (assign_bb))
1178 return;
1179 expr = get_ancestor_addr_info (assign, &obj, &offset);
1180 if (!expr)
1181 return;
1182 parm = TREE_OPERAND (expr, 0);
1183 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1184 gcc_assert (index >= 0);
1186 cond_bb = single_pred (assign_bb);
1187 cond = last_stmt (cond_bb);
1188 if (!cond
1189 || gimple_code (cond) != GIMPLE_COND
1190 || gimple_cond_code (cond) != NE_EXPR
1191 || gimple_cond_lhs (cond) != parm
1192 || !integer_zerop (gimple_cond_rhs (cond)))
1193 return;
1195 phi_bb = gimple_bb (phi);
1196 for (i = 0; i < 2; i++)
1198 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1199 if (pred != assign_bb && pred != cond_bb)
1200 return;
1203 bool type_p = false;
1204 if (param_type && POINTER_TYPE_P (param_type))
1205 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1206 call, jfunc, offset);
1207 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1208 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1209 parm_ref_data_pass_through_p (&parms_ainfo[index],
1210 call, parm), type_p);
1213 /* Given OP which is passed as an actual argument to a called function,
1214 determine if it is possible to construct a KNOWN_TYPE jump function for it
1215 and if so, create one and store it to JFUNC.
1216 EXPECTED_TYPE represents a type the argument should be in */
1218 static void
1219 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1220 gimple call, tree expected_type)
1222 HOST_WIDE_INT offset, size, max_size;
1223 tree base;
1225 if (!flag_devirtualize
1226 || TREE_CODE (op) != ADDR_EXPR
1227 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1228 /* Be sure expected_type is polymorphic. */
1229 || !expected_type
1230 || TREE_CODE (expected_type) != RECORD_TYPE
1231 || !TYPE_BINFO (expected_type)
1232 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1233 return;
1235 op = TREE_OPERAND (op, 0);
1236 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1237 if (!DECL_P (base)
1238 || max_size == -1
1239 || max_size != size
1240 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1241 || is_global_var (base))
1242 return;
1244 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1245 return;
1247 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1248 expected_type);
1251 /* Inspect the given TYPE and return true iff it has the same structure (the
1252 same number of fields of the same types) as a C++ member pointer. If
1253 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1254 corresponding fields there. */
1256 static bool
1257 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1259 tree fld;
1261 if (TREE_CODE (type) != RECORD_TYPE)
1262 return false;
1264 fld = TYPE_FIELDS (type);
1265 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1266 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1267 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1268 return false;
1270 if (method_ptr)
1271 *method_ptr = fld;
1273 fld = DECL_CHAIN (fld);
1274 if (!fld || INTEGRAL_TYPE_P (fld)
1275 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1276 return false;
1277 if (delta)
1278 *delta = fld;
1280 if (DECL_CHAIN (fld))
1281 return false;
1283 return true;
1286 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1287 return the rhs of its defining statement. Otherwise return RHS as it
1288 is. */
1290 static inline tree
1291 get_ssa_def_if_simple_copy (tree rhs)
1293 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1295 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1297 if (gimple_assign_single_p (def_stmt))
1298 rhs = gimple_assign_rhs1 (def_stmt);
1299 else
1300 break;
1302 return rhs;
1305 /* Simple linked list, describing known contents of an aggregate beforere
1306 call. */
1308 struct ipa_known_agg_contents_list
1310 /* Offset and size of the described part of the aggregate. */
1311 HOST_WIDE_INT offset, size;
1312 /* Known constant value or NULL if the contents is known to be unknown. */
1313 tree constant;
1314 /* Pointer to the next structure in the list. */
1315 struct ipa_known_agg_contents_list *next;
1318 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1319 in ARG is filled in with constant values. ARG can either be an aggregate
1320 expression or a pointer to an aggregate. JFUNC is the jump function into
1321 which the constants are subsequently stored. */
1323 static void
1324 determine_known_aggregate_parts (gimple call, tree arg,
1325 struct ipa_jump_func *jfunc)
1327 struct ipa_known_agg_contents_list *list = NULL;
1328 int item_count = 0, const_count = 0;
1329 HOST_WIDE_INT arg_offset, arg_size;
1330 gimple_stmt_iterator gsi;
1331 tree arg_base;
1332 bool check_ref, by_ref;
1333 ao_ref r;
1335 /* The function operates in three stages. First, we prepare check_ref, r,
1336 arg_base and arg_offset based on what is actually passed as an actual
1337 argument. */
1339 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1341 by_ref = true;
1342 if (TREE_CODE (arg) == SSA_NAME)
1344 tree type_size;
1345 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1346 return;
1347 check_ref = true;
1348 arg_base = arg;
1349 arg_offset = 0;
1350 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1351 arg_size = tree_low_cst (type_size, 1);
1352 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1354 else if (TREE_CODE (arg) == ADDR_EXPR)
1356 HOST_WIDE_INT arg_max_size;
1358 arg = TREE_OPERAND (arg, 0);
1359 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1360 &arg_max_size);
1361 if (arg_max_size == -1
1362 || arg_max_size != arg_size
1363 || arg_offset < 0)
1364 return;
1365 if (DECL_P (arg_base))
1367 tree size;
1368 check_ref = false;
1369 size = build_int_cst (integer_type_node, arg_size);
1370 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1372 else
1373 return;
1375 else
1376 return;
1378 else
1380 HOST_WIDE_INT arg_max_size;
1382 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1384 by_ref = false;
1385 check_ref = false;
1386 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1387 &arg_max_size);
1388 if (arg_max_size == -1
1389 || arg_max_size != arg_size
1390 || arg_offset < 0)
1391 return;
1393 ao_ref_init (&r, arg);
1396 /* Second stage walks back the BB, looks at individual statements and as long
1397 as it is confident of how the statements affect contents of the
1398 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1399 describing it. */
1400 gsi = gsi_for_stmt (call);
1401 gsi_prev (&gsi);
1402 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1404 struct ipa_known_agg_contents_list *n, **p;
1405 gimple stmt = gsi_stmt (gsi);
1406 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1407 tree lhs, rhs, lhs_base;
1408 bool partial_overlap;
1410 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1411 continue;
1412 if (!gimple_assign_single_p (stmt))
1413 break;
1415 lhs = gimple_assign_lhs (stmt);
1416 rhs = gimple_assign_rhs1 (stmt);
1417 if (!is_gimple_reg_type (rhs)
1418 || TREE_CODE (lhs) == BIT_FIELD_REF
1419 || contains_bitfld_component_ref_p (lhs))
1420 break;
1422 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1423 &lhs_max_size);
1424 if (lhs_max_size == -1
1425 || lhs_max_size != lhs_size
1426 || (lhs_offset < arg_offset
1427 && lhs_offset + lhs_size > arg_offset)
1428 || (lhs_offset < arg_offset + arg_size
1429 && lhs_offset + lhs_size > arg_offset + arg_size))
1430 break;
1432 if (check_ref)
1434 if (TREE_CODE (lhs_base) != MEM_REF
1435 || TREE_OPERAND (lhs_base, 0) != arg_base
1436 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1437 break;
1439 else if (lhs_base != arg_base)
1441 if (DECL_P (lhs_base))
1442 continue;
1443 else
1444 break;
1447 if (lhs_offset + lhs_size < arg_offset
1448 || lhs_offset >= (arg_offset + arg_size))
1449 continue;
1451 partial_overlap = false;
1452 p = &list;
1453 while (*p && (*p)->offset < lhs_offset)
1455 if ((*p)->offset + (*p)->size > lhs_offset)
1457 partial_overlap = true;
1458 break;
1460 p = &(*p)->next;
1462 if (partial_overlap)
1463 break;
1464 if (*p && (*p)->offset < lhs_offset + lhs_size)
1466 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1467 /* We already know this value is subsequently overwritten with
1468 something else. */
1469 continue;
1470 else
1471 /* Otherwise this is a partial overlap which we cannot
1472 represent. */
1473 break;
1476 rhs = get_ssa_def_if_simple_copy (rhs);
1477 n = XALLOCA (struct ipa_known_agg_contents_list);
1478 n->size = lhs_size;
1479 n->offset = lhs_offset;
1480 if (is_gimple_ip_invariant (rhs))
1482 n->constant = rhs;
1483 const_count++;
1485 else
1486 n->constant = NULL_TREE;
1487 n->next = *p;
1488 *p = n;
1490 item_count++;
1491 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1492 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1493 break;
1496 /* Third stage just goes over the list and creates an appropriate vector of
1497 ipa_agg_jf_item structures out of it, of sourse only if there are
1498 any known constants to begin with. */
1500 if (const_count)
1502 jfunc->agg.by_ref = by_ref;
1503 vec_alloc (jfunc->agg.items, const_count);
1504 while (list)
1506 if (list->constant)
1508 struct ipa_agg_jf_item item;
1509 item.offset = list->offset - arg_offset;
1510 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1511 item.value = unshare_expr_without_location (list->constant);
1512 jfunc->agg.items->quick_push (item);
1514 list = list->next;
1519 static tree
1520 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1522 int n;
1523 tree type = (e->callee
1524 ? TREE_TYPE (e->callee->decl)
1525 : gimple_call_fntype (e->call_stmt));
1526 tree t = TYPE_ARG_TYPES (type);
1528 for (n = 0; n < i; n++)
1530 if (!t)
1531 break;
1532 t = TREE_CHAIN (t);
1534 if (t)
1535 return TREE_VALUE (t);
1536 if (!e->callee)
1537 return NULL;
1538 t = DECL_ARGUMENTS (e->callee->decl);
1539 for (n = 0; n < i; n++)
1541 if (!t)
1542 return NULL;
1543 t = TREE_CHAIN (t);
1545 if (t)
1546 return TREE_TYPE (t);
1547 return NULL;
1550 /* Compute jump function for all arguments of callsite CS and insert the
1551 information in the jump_functions array in the ipa_edge_args corresponding
1552 to this callsite. */
1554 static void
1555 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1556 struct cgraph_edge *cs)
1558 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1559 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1560 gimple call = cs->call_stmt;
1561 int n, arg_num = gimple_call_num_args (call);
1563 if (arg_num == 0 || args->jump_functions)
1564 return;
1565 vec_safe_grow_cleared (args->jump_functions, arg_num);
1567 if (gimple_call_internal_p (call))
1568 return;
1569 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1570 return;
1572 for (n = 0; n < arg_num; n++)
1574 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1575 tree arg = gimple_call_arg (call, n);
1576 tree param_type = ipa_get_callee_param_type (cs, n);
1578 if (is_gimple_ip_invariant (arg))
1579 ipa_set_jf_constant (jfunc, arg, cs);
1580 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1581 && TREE_CODE (arg) == PARM_DECL)
1583 int index = ipa_get_param_decl_index (info, arg);
1585 gcc_assert (index >=0);
1586 /* Aggregate passed by value, check for pass-through, otherwise we
1587 will attempt to fill in aggregate contents later in this
1588 for cycle. */
1589 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1591 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1592 continue;
1595 else if (TREE_CODE (arg) == SSA_NAME)
1597 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1599 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1600 if (index >= 0)
1602 bool agg_p, type_p;
1603 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1604 call, arg);
1605 if (param_type && POINTER_TYPE_P (param_type))
1606 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1607 call, jfunc);
1608 else
1609 type_p = false;
1610 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1611 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1612 type_p);
1615 else
1617 gimple stmt = SSA_NAME_DEF_STMT (arg);
1618 if (is_gimple_assign (stmt))
1619 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1620 call, stmt, arg, param_type);
1621 else if (gimple_code (stmt) == GIMPLE_PHI)
1622 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1623 call, stmt, param_type);
1626 else
1627 compute_known_type_jump_func (arg, jfunc, call,
1628 param_type
1629 && POINTER_TYPE_P (param_type)
1630 ? TREE_TYPE (param_type)
1631 : NULL);
1633 if ((jfunc->type != IPA_JF_PASS_THROUGH
1634 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1635 && (jfunc->type != IPA_JF_ANCESTOR
1636 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1637 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1638 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1639 determine_known_aggregate_parts (call, arg, jfunc);
1643 /* Compute jump functions for all edges - both direct and indirect - outgoing
1644 from NODE. Also count the actual arguments in the process. */
1646 static void
1647 ipa_compute_jump_functions (struct cgraph_node *node,
1648 struct param_analysis_info *parms_ainfo)
1650 struct cgraph_edge *cs;
1652 for (cs = node->callees; cs; cs = cs->next_callee)
1654 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1655 NULL);
1656 /* We do not need to bother analyzing calls to unknown
1657 functions unless they may become known during lto/whopr. */
1658 if (!callee->definition && !flag_lto)
1659 continue;
1660 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1663 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1664 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1667 /* If STMT looks like a statement loading a value from a member pointer formal
1668 parameter, return that parameter and store the offset of the field to
1669 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1670 might be clobbered). If USE_DELTA, then we look for a use of the delta
1671 field rather than the pfn. */
1673 static tree
1674 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1675 HOST_WIDE_INT *offset_p)
1677 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1679 if (!gimple_assign_single_p (stmt))
1680 return NULL_TREE;
1682 rhs = gimple_assign_rhs1 (stmt);
1683 if (TREE_CODE (rhs) == COMPONENT_REF)
1685 ref_field = TREE_OPERAND (rhs, 1);
1686 rhs = TREE_OPERAND (rhs, 0);
1688 else
1689 ref_field = NULL_TREE;
1690 if (TREE_CODE (rhs) != MEM_REF)
1691 return NULL_TREE;
1692 rec = TREE_OPERAND (rhs, 0);
1693 if (TREE_CODE (rec) != ADDR_EXPR)
1694 return NULL_TREE;
1695 rec = TREE_OPERAND (rec, 0);
1696 if (TREE_CODE (rec) != PARM_DECL
1697 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1698 return NULL_TREE;
1699 ref_offset = TREE_OPERAND (rhs, 1);
1701 if (use_delta)
1702 fld = delta_field;
1703 else
1704 fld = ptr_field;
1705 if (offset_p)
1706 *offset_p = int_bit_position (fld);
1708 if (ref_field)
1710 if (integer_nonzerop (ref_offset))
1711 return NULL_TREE;
1712 return ref_field == fld ? rec : NULL_TREE;
1714 else
1715 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1716 : NULL_TREE;
1719 /* Returns true iff T is an SSA_NAME defined by a statement. */
1721 static bool
1722 ipa_is_ssa_with_stmt_def (tree t)
1724 if (TREE_CODE (t) == SSA_NAME
1725 && !SSA_NAME_IS_DEFAULT_DEF (t))
1726 return true;
1727 else
1728 return false;
1731 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1732 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1733 indirect call graph edge. */
1735 static struct cgraph_edge *
1736 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1738 struct cgraph_edge *cs;
1740 cs = cgraph_edge (node, stmt);
1741 cs->indirect_info->param_index = param_index;
1742 cs->indirect_info->offset = 0;
1743 cs->indirect_info->polymorphic = 0;
1744 cs->indirect_info->agg_contents = 0;
1745 cs->indirect_info->member_ptr = 0;
1746 return cs;
1749 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1750 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1751 intermediate information about each formal parameter. Currently it checks
1752 whether the call calls a pointer that is a formal parameter and if so, the
1753 parameter is marked with the called flag and an indirect call graph edge
1754 describing the call is created. This is very simple for ordinary pointers
1755 represented in SSA but not-so-nice when it comes to member pointers. The
1756 ugly part of this function does nothing more than trying to match the
1757 pattern of such a call. An example of such a pattern is the gimple dump
1758 below, the call is on the last line:
1760 <bb 2>:
1761 f$__delta_5 = f.__delta;
1762 f$__pfn_24 = f.__pfn;
1765 <bb 2>:
1766 f$__delta_5 = MEM[(struct *)&f];
1767 f$__pfn_24 = MEM[(struct *)&f + 4B];
1769 and a few lines below:
1771 <bb 5>
1772 D.2496_3 = (int) f$__pfn_24;
1773 D.2497_4 = D.2496_3 & 1;
1774 if (D.2497_4 != 0)
1775 goto <bb 3>;
1776 else
1777 goto <bb 4>;
1779 <bb 6>:
1780 D.2500_7 = (unsigned int) f$__delta_5;
1781 D.2501_8 = &S + D.2500_7;
1782 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1783 D.2503_10 = *D.2502_9;
1784 D.2504_12 = f$__pfn_24 + -1;
1785 D.2505_13 = (unsigned int) D.2504_12;
1786 D.2506_14 = D.2503_10 + D.2505_13;
1787 D.2507_15 = *D.2506_14;
1788 iftmp.11_16 = (String:: *) D.2507_15;
1790 <bb 7>:
1791 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1792 D.2500_19 = (unsigned int) f$__delta_5;
1793 D.2508_20 = &S + D.2500_19;
1794 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1796 Such patterns are results of simple calls to a member pointer:
1798 int doprinting (int (MyString::* f)(int) const)
1800 MyString S ("somestring");
1802 return (S.*f)(4);
1805 Moreover, the function also looks for called pointers loaded from aggregates
1806 passed by value or reference. */
1808 static void
1809 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1810 struct ipa_node_params *info,
1811 struct param_analysis_info *parms_ainfo,
1812 gimple call, tree target)
1814 gimple def;
1815 tree n1, n2;
1816 gimple d1, d2;
1817 tree rec, rec2, cond;
1818 gimple branch;
1819 int index;
1820 basic_block bb, virt_bb, join;
1821 HOST_WIDE_INT offset;
1822 bool by_ref;
1824 if (SSA_NAME_IS_DEFAULT_DEF (target))
1826 tree var = SSA_NAME_VAR (target);
1827 index = ipa_get_param_decl_index (info, var);
1828 if (index >= 0)
1829 ipa_note_param_call (node, index, call);
1830 return;
1833 def = SSA_NAME_DEF_STMT (target);
1834 if (gimple_assign_single_p (def)
1835 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1836 gimple_assign_rhs1 (def), &index, &offset,
1837 NULL, &by_ref))
1839 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1840 cs->indirect_info->offset = offset;
1841 cs->indirect_info->agg_contents = 1;
1842 cs->indirect_info->by_ref = by_ref;
1843 return;
1846 /* Now we need to try to match the complex pattern of calling a member
1847 pointer. */
1848 if (gimple_code (def) != GIMPLE_PHI
1849 || gimple_phi_num_args (def) != 2
1850 || !POINTER_TYPE_P (TREE_TYPE (target))
1851 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1852 return;
1854 /* First, we need to check whether one of these is a load from a member
1855 pointer that is a parameter to this function. */
1856 n1 = PHI_ARG_DEF (def, 0);
1857 n2 = PHI_ARG_DEF (def, 1);
1858 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1859 return;
1860 d1 = SSA_NAME_DEF_STMT (n1);
1861 d2 = SSA_NAME_DEF_STMT (n2);
1863 join = gimple_bb (def);
1864 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1866 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1867 return;
1869 bb = EDGE_PRED (join, 0)->src;
1870 virt_bb = gimple_bb (d2);
1872 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1874 bb = EDGE_PRED (join, 1)->src;
1875 virt_bb = gimple_bb (d1);
1877 else
1878 return;
1880 /* Second, we need to check that the basic blocks are laid out in the way
1881 corresponding to the pattern. */
1883 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1884 || single_pred (virt_bb) != bb
1885 || single_succ (virt_bb) != join)
1886 return;
1888 /* Third, let's see that the branching is done depending on the least
1889 significant bit of the pfn. */
1891 branch = last_stmt (bb);
1892 if (!branch || gimple_code (branch) != GIMPLE_COND)
1893 return;
1895 if ((gimple_cond_code (branch) != NE_EXPR
1896 && gimple_cond_code (branch) != EQ_EXPR)
1897 || !integer_zerop (gimple_cond_rhs (branch)))
1898 return;
1900 cond = gimple_cond_lhs (branch);
1901 if (!ipa_is_ssa_with_stmt_def (cond))
1902 return;
1904 def = SSA_NAME_DEF_STMT (cond);
1905 if (!is_gimple_assign (def)
1906 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1907 || !integer_onep (gimple_assign_rhs2 (def)))
1908 return;
1910 cond = gimple_assign_rhs1 (def);
1911 if (!ipa_is_ssa_with_stmt_def (cond))
1912 return;
1914 def = SSA_NAME_DEF_STMT (cond);
1916 if (is_gimple_assign (def)
1917 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1919 cond = gimple_assign_rhs1 (def);
1920 if (!ipa_is_ssa_with_stmt_def (cond))
1921 return;
1922 def = SSA_NAME_DEF_STMT (cond);
1925 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1926 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1927 == ptrmemfunc_vbit_in_delta),
1928 NULL);
1929 if (rec != rec2)
1930 return;
1932 index = ipa_get_param_decl_index (info, rec);
1933 if (index >= 0
1934 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1936 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1937 cs->indirect_info->offset = offset;
1938 cs->indirect_info->agg_contents = 1;
1939 cs->indirect_info->member_ptr = 1;
1942 return;
1945 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1946 object referenced in the expression is a formal parameter of the caller
1947 (described by INFO), create a call note for the statement. */
1949 static void
1950 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1951 struct ipa_node_params *info, gimple call,
1952 tree target)
1954 struct cgraph_edge *cs;
1955 struct cgraph_indirect_call_info *ii;
1956 struct ipa_jump_func jfunc;
1957 tree obj = OBJ_TYPE_REF_OBJECT (target);
1958 int index;
1959 HOST_WIDE_INT anc_offset;
1961 if (!flag_devirtualize)
1962 return;
1964 if (TREE_CODE (obj) != SSA_NAME)
1965 return;
1967 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1969 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1970 return;
1972 anc_offset = 0;
1973 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1974 gcc_assert (index >= 0);
1975 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1976 call, &jfunc))
1977 return;
1979 else
1981 gimple stmt = SSA_NAME_DEF_STMT (obj);
1982 tree expr;
1984 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1985 if (!expr)
1986 return;
1987 index = ipa_get_param_decl_index (info,
1988 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1989 gcc_assert (index >= 0);
1990 if (detect_type_change (obj, expr, obj_type_ref_class (target),
1991 call, &jfunc, anc_offset))
1992 return;
1995 cs = ipa_note_param_call (node, index, call);
1996 ii = cs->indirect_info;
1997 ii->offset = anc_offset;
1998 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1999 ii->otr_type = obj_type_ref_class (target);
2000 ii->polymorphic = 1;
2003 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2004 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2005 containing intermediate information about each formal parameter. */
2007 static void
2008 ipa_analyze_call_uses (struct cgraph_node *node,
2009 struct ipa_node_params *info,
2010 struct param_analysis_info *parms_ainfo, gimple call)
2012 tree target = gimple_call_fn (call);
2014 if (!target)
2015 return;
2016 if (TREE_CODE (target) == SSA_NAME)
2017 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2018 else if (virtual_method_call_p (target))
2019 ipa_analyze_virtual_call_uses (node, info, call, target);
2023 /* Analyze the call statement STMT with respect to formal parameters (described
2024 in INFO) of caller given by NODE. Currently it only checks whether formal
2025 parameters are called. PARMS_AINFO is a pointer to a vector containing
2026 intermediate information about each formal parameter. */
2028 static void
2029 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2030 struct param_analysis_info *parms_ainfo, gimple stmt)
2032 if (is_gimple_call (stmt))
2033 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2036 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2037 If OP is a parameter declaration, mark it as used in the info structure
2038 passed in DATA. */
2040 static bool
2041 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2042 tree op, void *data)
2044 struct ipa_node_params *info = (struct ipa_node_params *) data;
2046 op = get_base_address (op);
2047 if (op
2048 && TREE_CODE (op) == PARM_DECL)
2050 int index = ipa_get_param_decl_index (info, op);
2051 gcc_assert (index >= 0);
2052 ipa_set_param_used (info, index, true);
2055 return false;
2058 /* Scan the function body of NODE and inspect the uses of formal parameters.
2059 Store the findings in various structures of the associated ipa_node_params
2060 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2061 vector containing intermediate information about each formal parameter. */
2063 static void
2064 ipa_analyze_params_uses (struct cgraph_node *node,
2065 struct param_analysis_info *parms_ainfo)
2067 tree decl = node->decl;
2068 basic_block bb;
2069 struct function *func;
2070 gimple_stmt_iterator gsi;
2071 struct ipa_node_params *info = IPA_NODE_REF (node);
2072 int i;
2074 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2075 return;
2077 info->uses_analysis_done = 1;
2078 if (ipa_func_spec_opts_forbid_analysis_p (node))
2080 for (i = 0; i < ipa_get_param_count (info); i++)
2082 ipa_set_param_used (info, i, true);
2083 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2085 return;
2088 for (i = 0; i < ipa_get_param_count (info); i++)
2090 tree parm = ipa_get_param (info, i);
2091 int controlled_uses = 0;
2093 /* For SSA regs see if parameter is used. For non-SSA we compute
2094 the flag during modification analysis. */
2095 if (is_gimple_reg (parm))
2097 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2098 parm);
2099 if (ddef && !has_zero_uses (ddef))
2101 imm_use_iterator imm_iter;
2102 use_operand_p use_p;
2104 ipa_set_param_used (info, i, true);
2105 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2106 if (!is_gimple_call (USE_STMT (use_p)))
2108 controlled_uses = IPA_UNDESCRIBED_USE;
2109 break;
2111 else
2112 controlled_uses++;
2114 else
2115 controlled_uses = 0;
2117 else
2118 controlled_uses = IPA_UNDESCRIBED_USE;
2119 ipa_set_controlled_uses (info, i, controlled_uses);
2122 func = DECL_STRUCT_FUNCTION (decl);
2123 FOR_EACH_BB_FN (bb, func)
2125 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2127 gimple stmt = gsi_stmt (gsi);
2129 if (is_gimple_debug (stmt))
2130 continue;
2132 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2133 walk_stmt_load_store_addr_ops (stmt, info,
2134 visit_ref_for_mod_analysis,
2135 visit_ref_for_mod_analysis,
2136 visit_ref_for_mod_analysis);
2138 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2139 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2140 visit_ref_for_mod_analysis,
2141 visit_ref_for_mod_analysis,
2142 visit_ref_for_mod_analysis);
2146 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2148 static void
2149 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2151 int i;
2153 for (i = 0; i < param_count; i++)
2155 if (parms_ainfo[i].parm_visited_statements)
2156 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2157 if (parms_ainfo[i].pt_visited_statements)
2158 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2162 /* Initialize the array describing properties of of formal parameters
2163 of NODE, analyze their uses and compute jump functions associated
2164 with actual arguments of calls from within NODE. */
2166 void
2167 ipa_analyze_node (struct cgraph_node *node)
2169 struct ipa_node_params *info;
2170 struct param_analysis_info *parms_ainfo;
2171 int param_count;
2173 ipa_check_create_node_params ();
2174 ipa_check_create_edge_args ();
2175 info = IPA_NODE_REF (node);
2176 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2177 ipa_initialize_node_params (node);
2179 param_count = ipa_get_param_count (info);
2180 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2181 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2183 ipa_analyze_params_uses (node, parms_ainfo);
2184 ipa_compute_jump_functions (node, parms_ainfo);
2186 free_parms_ainfo (parms_ainfo, param_count);
2187 pop_cfun ();
2190 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2191 attempt a type-based devirtualization. If successful, return the
2192 target function declaration, otherwise return NULL. */
2194 tree
2195 ipa_intraprocedural_devirtualization (gimple call)
2197 tree binfo, token, fndecl;
2198 struct ipa_jump_func jfunc;
2199 tree otr = gimple_call_fn (call);
2201 jfunc.type = IPA_JF_UNKNOWN;
2202 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2203 call, obj_type_ref_class (otr));
2204 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2205 return NULL_TREE;
2206 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2207 if (!binfo)
2208 return NULL_TREE;
2209 token = OBJ_TYPE_REF_TOKEN (otr);
2210 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2211 binfo);
2212 #ifdef ENABLE_CHECKING
2213 if (fndecl)
2214 gcc_assert (possible_polymorphic_call_target_p
2215 (otr, cgraph_get_node (fndecl)));
2216 #endif
2217 return fndecl;
2220 /* Update the jump function DST when the call graph edge corresponding to SRC is
2221 is being inlined, knowing that DST is of type ancestor and src of known
2222 type. */
2224 static void
2225 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2226 struct ipa_jump_func *dst)
2228 HOST_WIDE_INT combined_offset;
2229 tree combined_type;
2231 if (!ipa_get_jf_ancestor_type_preserved (dst))
2233 dst->type = IPA_JF_UNKNOWN;
2234 return;
2237 combined_offset = ipa_get_jf_known_type_offset (src)
2238 + ipa_get_jf_ancestor_offset (dst);
2239 combined_type = ipa_get_jf_ancestor_type (dst);
2241 ipa_set_jf_known_type (dst, combined_offset,
2242 ipa_get_jf_known_type_base_type (src),
2243 combined_type);
2246 /* Update the jump functions associated with call graph edge E when the call
2247 graph edge CS is being inlined, assuming that E->caller is already (possibly
2248 indirectly) inlined into CS->callee and that E has not been inlined. */
2250 static void
2251 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2252 struct cgraph_edge *e)
2254 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2255 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2256 int count = ipa_get_cs_argument_count (args);
2257 int i;
2259 for (i = 0; i < count; i++)
2261 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2263 if (dst->type == IPA_JF_ANCESTOR)
2265 struct ipa_jump_func *src;
2266 int dst_fid = dst->value.ancestor.formal_id;
2268 /* Variable number of arguments can cause havoc if we try to access
2269 one that does not exist in the inlined edge. So make sure we
2270 don't. */
2271 if (dst_fid >= ipa_get_cs_argument_count (top))
2273 dst->type = IPA_JF_UNKNOWN;
2274 continue;
2277 src = ipa_get_ith_jump_func (top, dst_fid);
2279 if (src->agg.items
2280 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2282 struct ipa_agg_jf_item *item;
2283 int j;
2285 /* Currently we do not produce clobber aggregate jump functions,
2286 replace with merging when we do. */
2287 gcc_assert (!dst->agg.items);
2289 dst->agg.items = vec_safe_copy (src->agg.items);
2290 dst->agg.by_ref = src->agg.by_ref;
2291 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2292 item->offset -= dst->value.ancestor.offset;
2295 if (src->type == IPA_JF_KNOWN_TYPE)
2296 combine_known_type_and_ancestor_jfs (src, dst);
2297 else if (src->type == IPA_JF_PASS_THROUGH
2298 && src->value.pass_through.operation == NOP_EXPR)
2300 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2301 dst->value.ancestor.agg_preserved &=
2302 src->value.pass_through.agg_preserved;
2303 dst->value.ancestor.type_preserved &=
2304 src->value.pass_through.type_preserved;
2306 else if (src->type == IPA_JF_ANCESTOR)
2308 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2309 dst->value.ancestor.offset += src->value.ancestor.offset;
2310 dst->value.ancestor.agg_preserved &=
2311 src->value.ancestor.agg_preserved;
2312 dst->value.ancestor.type_preserved &=
2313 src->value.ancestor.type_preserved;
2315 else
2316 dst->type = IPA_JF_UNKNOWN;
2318 else if (dst->type == IPA_JF_PASS_THROUGH)
2320 struct ipa_jump_func *src;
2321 /* We must check range due to calls with variable number of arguments
2322 and we cannot combine jump functions with operations. */
2323 if (dst->value.pass_through.operation == NOP_EXPR
2324 && (dst->value.pass_through.formal_id
2325 < ipa_get_cs_argument_count (top)))
2327 int dst_fid = dst->value.pass_through.formal_id;
2328 src = ipa_get_ith_jump_func (top, dst_fid);
2329 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2331 switch (src->type)
2333 case IPA_JF_UNKNOWN:
2334 dst->type = IPA_JF_UNKNOWN;
2335 break;
2336 case IPA_JF_KNOWN_TYPE:
2337 ipa_set_jf_known_type (dst,
2338 ipa_get_jf_known_type_offset (src),
2339 ipa_get_jf_known_type_base_type (src),
2340 ipa_get_jf_known_type_base_type (src));
2341 break;
2342 case IPA_JF_CONST:
2343 ipa_set_jf_cst_copy (dst, src);
2344 break;
2346 case IPA_JF_PASS_THROUGH:
2348 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2349 enum tree_code operation;
2350 operation = ipa_get_jf_pass_through_operation (src);
2352 if (operation == NOP_EXPR)
2354 bool agg_p, type_p;
2355 agg_p = dst_agg_p
2356 && ipa_get_jf_pass_through_agg_preserved (src);
2357 type_p = ipa_get_jf_pass_through_type_preserved (src)
2358 && ipa_get_jf_pass_through_type_preserved (dst);
2359 ipa_set_jf_simple_pass_through (dst, formal_id,
2360 agg_p, type_p);
2362 else
2364 tree operand = ipa_get_jf_pass_through_operand (src);
2365 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2366 operation);
2368 break;
2370 case IPA_JF_ANCESTOR:
2372 bool agg_p, type_p;
2373 agg_p = dst_agg_p
2374 && ipa_get_jf_ancestor_agg_preserved (src);
2375 type_p = ipa_get_jf_ancestor_type_preserved (src)
2376 && ipa_get_jf_pass_through_type_preserved (dst);
2377 ipa_set_ancestor_jf (dst,
2378 ipa_get_jf_ancestor_offset (src),
2379 ipa_get_jf_ancestor_type (src),
2380 ipa_get_jf_ancestor_formal_id (src),
2381 agg_p, type_p);
2382 break;
2384 default:
2385 gcc_unreachable ();
2388 if (src->agg.items
2389 && (dst_agg_p || !src->agg.by_ref))
2391 /* Currently we do not produce clobber aggregate jump
2392 functions, replace with merging when we do. */
2393 gcc_assert (!dst->agg.items);
2395 dst->agg.by_ref = src->agg.by_ref;
2396 dst->agg.items = vec_safe_copy (src->agg.items);
2399 else
2400 dst->type = IPA_JF_UNKNOWN;
2405 /* If TARGET is an addr_expr of a function declaration, make it the destination
2406 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2408 struct cgraph_edge *
2409 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2411 struct cgraph_node *callee;
2412 struct inline_edge_summary *es = inline_edge_summary (ie);
2413 bool unreachable = false;
2415 if (TREE_CODE (target) == ADDR_EXPR)
2416 target = TREE_OPERAND (target, 0);
2417 if (TREE_CODE (target) != FUNCTION_DECL)
2419 target = canonicalize_constructor_val (target, NULL);
2420 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2422 if (ie->indirect_info->member_ptr)
2423 /* Member pointer call that goes through a VMT lookup. */
2424 return NULL;
2426 if (dump_file)
2427 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2428 " in %s/%i, making it unreachable.\n",
2429 cgraph_node_name (ie->caller), ie->caller->order);
2430 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2431 callee = cgraph_get_create_node (target);
2432 unreachable = true;
2434 else
2435 callee = cgraph_get_node (target);
2437 else
2438 callee = cgraph_get_node (target);
2440 /* Because may-edges are not explicitely represented and vtable may be external,
2441 we may create the first reference to the object in the unit. */
2442 if (!callee || callee->global.inlined_to)
2445 /* We are better to ensure we can refer to it.
2446 In the case of static functions we are out of luck, since we already
2447 removed its body. In the case of public functions we may or may
2448 not introduce the reference. */
2449 if (!canonicalize_constructor_val (target, NULL)
2450 || !TREE_PUBLIC (target))
2452 if (dump_file)
2453 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2454 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2455 xstrdup (cgraph_node_name (ie->caller)),
2456 ie->caller->order,
2457 xstrdup (cgraph_node_name (ie->callee)),
2458 ie->callee->order);
2459 return NULL;
2461 callee = cgraph_get_create_node (target);
2463 ipa_check_create_node_params ();
2465 /* We can not make edges to inline clones. It is bug that someone removed
2466 the cgraph node too early. */
2467 gcc_assert (!callee->global.inlined_to);
2469 if (dump_file && !unreachable)
2471 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2472 "(%s/%i -> %s/%i), for stmt ",
2473 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2474 xstrdup (cgraph_node_name (ie->caller)),
2475 ie->caller->order,
2476 xstrdup (cgraph_node_name (callee)),
2477 callee->order);
2478 if (ie->call_stmt)
2479 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2480 else
2481 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2483 ie = cgraph_make_edge_direct (ie, callee);
2484 es = inline_edge_summary (ie);
2485 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2486 - eni_size_weights.call_cost);
2487 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2488 - eni_time_weights.call_cost);
2490 return ie;
2493 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2494 return NULL if there is not any. BY_REF specifies whether the value has to
2495 be passed by reference or by value. */
2497 tree
2498 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2499 HOST_WIDE_INT offset, bool by_ref)
2501 struct ipa_agg_jf_item *item;
2502 int i;
2504 if (by_ref != agg->by_ref)
2505 return NULL;
2507 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2508 if (item->offset == offset)
2510 /* Currently we do not have clobber values, return NULL for them once
2511 we do. */
2512 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2513 return item->value;
2515 return NULL;
2518 /* Remove a reference to SYMBOL from the list of references of a node given by
2519 reference description RDESC. Return true if the reference has been
2520 successfully found and removed. */
2522 static bool
2523 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2525 struct ipa_ref *to_del;
2526 struct cgraph_edge *origin;
2528 origin = rdesc->cs;
2529 if (!origin)
2530 return false;
2531 to_del = ipa_find_reference (origin->caller, symbol,
2532 origin->call_stmt, origin->lto_stmt_uid);
2533 if (!to_del)
2534 return false;
2536 ipa_remove_reference (to_del);
2537 if (dump_file)
2538 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2539 xstrdup (cgraph_node_name (origin->caller)),
2540 origin->caller->order, xstrdup (symtab_node_name (symbol)));
2541 return true;
2544 /* If JFUNC has a reference description with refcount different from
2545 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2546 NULL. JFUNC must be a constant jump function. */
2548 static struct ipa_cst_ref_desc *
2549 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2551 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2552 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2553 return rdesc;
2554 else
2555 return NULL;
2558 /* If the value of constant jump function JFUNC is an address of a function
2559 declaration, return the associated call graph node. Otherwise return
2560 NULL. */
2562 static cgraph_node *
2563 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2565 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2566 tree cst = ipa_get_jf_constant (jfunc);
2567 if (TREE_CODE (cst) != ADDR_EXPR
2568 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2569 return NULL;
2571 return cgraph_get_node (TREE_OPERAND (cst, 0));
2575 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2576 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2577 the edge specified in the rdesc. Return false if either the symbol or the
2578 reference could not be found, otherwise return true. */
2580 static bool
2581 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2583 struct ipa_cst_ref_desc *rdesc;
2584 if (jfunc->type == IPA_JF_CONST
2585 && (rdesc = jfunc_rdesc_usable (jfunc))
2586 && --rdesc->refcount == 0)
2588 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2589 if (!symbol)
2590 return false;
2592 return remove_described_reference (symbol, rdesc);
2594 return true;
2597 /* Try to find a destination for indirect edge IE that corresponds to a simple
2598 call or a call of a member function pointer and where the destination is a
2599 pointer formal parameter described by jump function JFUNC. If it can be
2600 determined, return the newly direct edge, otherwise return NULL.
2601 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2603 static struct cgraph_edge *
2604 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2605 struct ipa_jump_func *jfunc,
2606 struct ipa_node_params *new_root_info)
2608 struct cgraph_edge *cs;
2609 tree target;
2610 bool agg_contents = ie->indirect_info->agg_contents;
2612 if (ie->indirect_info->agg_contents)
2613 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2614 ie->indirect_info->offset,
2615 ie->indirect_info->by_ref);
2616 else
2617 target = ipa_value_from_jfunc (new_root_info, jfunc);
2618 if (!target)
2619 return NULL;
2620 cs = ipa_make_edge_direct_to_target (ie, target);
2622 if (cs && !agg_contents)
2624 bool ok;
2625 gcc_checking_assert (cs->callee
2626 && (cs != ie
2627 || jfunc->type != IPA_JF_CONST
2628 || !cgraph_node_for_jfunc (jfunc)
2629 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2630 ok = try_decrement_rdesc_refcount (jfunc);
2631 gcc_checking_assert (ok);
2634 return cs;
2637 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2638 call based on a formal parameter which is described by jump function JFUNC
2639 and if it can be determined, make it direct and return the direct edge.
2640 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2641 are relative to. */
2643 static struct cgraph_edge *
2644 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2645 struct ipa_jump_func *jfunc,
2646 struct ipa_node_params *new_root_info)
2648 tree binfo, target;
2650 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2652 if (!binfo)
2653 return NULL;
2655 if (TREE_CODE (binfo) != TREE_BINFO)
2657 binfo = gimple_extract_devirt_binfo_from_cst
2658 (binfo, ie->indirect_info->otr_type);
2659 if (!binfo)
2660 return NULL;
2663 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2664 ie->indirect_info->otr_type);
2665 if (binfo)
2666 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2667 binfo);
2668 else
2669 return NULL;
2671 if (target)
2673 #ifdef ENABLE_CHECKING
2674 gcc_assert (possible_polymorphic_call_target_p
2675 (ie, cgraph_get_node (target)));
2676 #endif
2677 return ipa_make_edge_direct_to_target (ie, target);
2679 else
2680 return NULL;
2683 /* Update the param called notes associated with NODE when CS is being inlined,
2684 assuming NODE is (potentially indirectly) inlined into CS->callee.
2685 Moreover, if the callee is discovered to be constant, create a new cgraph
2686 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2687 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2689 static bool
2690 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2691 struct cgraph_node *node,
2692 vec<cgraph_edge_p> *new_edges)
2694 struct ipa_edge_args *top;
2695 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2696 struct ipa_node_params *new_root_info;
2697 bool res = false;
2699 ipa_check_create_edge_args ();
2700 top = IPA_EDGE_REF (cs);
2701 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2702 ? cs->caller->global.inlined_to
2703 : cs->caller);
2705 for (ie = node->indirect_calls; ie; ie = next_ie)
2707 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2708 struct ipa_jump_func *jfunc;
2709 int param_index;
2711 next_ie = ie->next_callee;
2713 if (ici->param_index == -1)
2714 continue;
2716 /* We must check range due to calls with variable number of arguments: */
2717 if (ici->param_index >= ipa_get_cs_argument_count (top))
2719 ici->param_index = -1;
2720 continue;
2723 param_index = ici->param_index;
2724 jfunc = ipa_get_ith_jump_func (top, param_index);
2726 if (!flag_indirect_inlining)
2727 new_direct_edge = NULL;
2728 else if (ici->polymorphic)
2729 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2730 new_root_info);
2731 else
2732 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2733 new_root_info);
2734 /* If speculation was removed, then we need to do nothing. */
2735 if (new_direct_edge && new_direct_edge != ie)
2737 new_direct_edge->indirect_inlining_edge = 1;
2738 top = IPA_EDGE_REF (cs);
2739 res = true;
2741 else if (new_direct_edge)
2743 new_direct_edge->indirect_inlining_edge = 1;
2744 if (new_direct_edge->call_stmt)
2745 new_direct_edge->call_stmt_cannot_inline_p
2746 = !gimple_check_call_matching_types (
2747 new_direct_edge->call_stmt,
2748 new_direct_edge->callee->decl, false);
2749 if (new_edges)
2751 new_edges->safe_push (new_direct_edge);
2752 res = true;
2754 top = IPA_EDGE_REF (cs);
2756 else if (jfunc->type == IPA_JF_PASS_THROUGH
2757 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2759 if (ici->agg_contents
2760 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2761 ici->param_index = -1;
2762 else
2763 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2765 else if (jfunc->type == IPA_JF_ANCESTOR)
2767 if (ici->agg_contents
2768 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2769 ici->param_index = -1;
2770 else
2772 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2773 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2776 else
2777 /* Either we can find a destination for this edge now or never. */
2778 ici->param_index = -1;
2781 return res;
2784 /* Recursively traverse subtree of NODE (including node) made of inlined
2785 cgraph_edges when CS has been inlined and invoke
2786 update_indirect_edges_after_inlining on all nodes and
2787 update_jump_functions_after_inlining on all non-inlined edges that lead out
2788 of this subtree. Newly discovered indirect edges will be added to
2789 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2790 created. */
2792 static bool
2793 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2794 struct cgraph_node *node,
2795 vec<cgraph_edge_p> *new_edges)
2797 struct cgraph_edge *e;
2798 bool res;
2800 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2802 for (e = node->callees; e; e = e->next_callee)
2803 if (!e->inline_failed)
2804 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2805 else
2806 update_jump_functions_after_inlining (cs, e);
2807 for (e = node->indirect_calls; e; e = e->next_callee)
2808 update_jump_functions_after_inlining (cs, e);
2810 return res;
2813 /* Combine two controlled uses counts as done during inlining. */
2815 static int
2816 combine_controlled_uses_counters (int c, int d)
2818 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2819 return IPA_UNDESCRIBED_USE;
2820 else
2821 return c + d - 1;
2824 /* Propagate number of controlled users from CS->caleee to the new root of the
2825 tree of inlined nodes. */
2827 static void
2828 propagate_controlled_uses (struct cgraph_edge *cs)
2830 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2831 struct cgraph_node *new_root = cs->caller->global.inlined_to
2832 ? cs->caller->global.inlined_to : cs->caller;
2833 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2834 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2835 int count, i;
2837 count = MIN (ipa_get_cs_argument_count (args),
2838 ipa_get_param_count (old_root_info));
2839 for (i = 0; i < count; i++)
2841 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2842 struct ipa_cst_ref_desc *rdesc;
2844 if (jf->type == IPA_JF_PASS_THROUGH)
2846 int src_idx, c, d;
2847 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2848 c = ipa_get_controlled_uses (new_root_info, src_idx);
2849 d = ipa_get_controlled_uses (old_root_info, i);
2851 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2852 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2853 c = combine_controlled_uses_counters (c, d);
2854 ipa_set_controlled_uses (new_root_info, src_idx, c);
2855 if (c == 0 && new_root_info->ipcp_orig_node)
2857 struct cgraph_node *n;
2858 struct ipa_ref *ref;
2859 tree t = new_root_info->known_vals[src_idx];
2861 if (t && TREE_CODE (t) == ADDR_EXPR
2862 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2863 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2864 && (ref = ipa_find_reference (new_root,
2865 n, NULL, 0)))
2867 if (dump_file)
2868 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2869 "reference from %s/%i to %s/%i.\n",
2870 xstrdup (cgraph_node_name (new_root)),
2871 new_root->order,
2872 xstrdup (cgraph_node_name (n)), n->order);
2873 ipa_remove_reference (ref);
2877 else if (jf->type == IPA_JF_CONST
2878 && (rdesc = jfunc_rdesc_usable (jf)))
2880 int d = ipa_get_controlled_uses (old_root_info, i);
2881 int c = rdesc->refcount;
2882 rdesc->refcount = combine_controlled_uses_counters (c, d);
2883 if (rdesc->refcount == 0)
2885 tree cst = ipa_get_jf_constant (jf);
2886 struct cgraph_node *n;
2887 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2888 && TREE_CODE (TREE_OPERAND (cst, 0))
2889 == FUNCTION_DECL);
2890 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2891 if (n)
2893 struct cgraph_node *clone;
2894 bool ok;
2895 ok = remove_described_reference (n, rdesc);
2896 gcc_checking_assert (ok);
2898 clone = cs->caller;
2899 while (clone->global.inlined_to
2900 && clone != rdesc->cs->caller
2901 && IPA_NODE_REF (clone)->ipcp_orig_node)
2903 struct ipa_ref *ref;
2904 ref = ipa_find_reference (clone,
2905 n, NULL, 0);
2906 if (ref)
2908 if (dump_file)
2909 fprintf (dump_file, "ipa-prop: Removing "
2910 "cloning-created reference "
2911 "from %s/%i to %s/%i.\n",
2912 xstrdup (cgraph_node_name (clone)),
2913 clone->order,
2914 xstrdup (cgraph_node_name (n)),
2915 n->order);
2916 ipa_remove_reference (ref);
2918 clone = clone->callers->caller;
2925 for (i = ipa_get_param_count (old_root_info);
2926 i < ipa_get_cs_argument_count (args);
2927 i++)
2929 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2931 if (jf->type == IPA_JF_CONST)
2933 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2934 if (rdesc)
2935 rdesc->refcount = IPA_UNDESCRIBED_USE;
2937 else if (jf->type == IPA_JF_PASS_THROUGH)
2938 ipa_set_controlled_uses (new_root_info,
2939 jf->value.pass_through.formal_id,
2940 IPA_UNDESCRIBED_USE);
2944 /* Update jump functions and call note functions on inlining the call site CS.
2945 CS is expected to lead to a node already cloned by
2946 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2947 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2948 created. */
2950 bool
2951 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2952 vec<cgraph_edge_p> *new_edges)
2954 bool changed;
2955 /* Do nothing if the preparation phase has not been carried out yet
2956 (i.e. during early inlining). */
2957 if (!ipa_node_params_vector.exists ())
2958 return false;
2959 gcc_assert (ipa_edge_args_vector);
2961 propagate_controlled_uses (cs);
2962 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2964 return changed;
2967 /* Frees all dynamically allocated structures that the argument info points
2968 to. */
2970 void
2971 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2973 vec_free (args->jump_functions);
2974 memset (args, 0, sizeof (*args));
2977 /* Free all ipa_edge structures. */
2979 void
2980 ipa_free_all_edge_args (void)
2982 int i;
2983 struct ipa_edge_args *args;
2985 if (!ipa_edge_args_vector)
2986 return;
2988 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2989 ipa_free_edge_args_substructures (args);
2991 vec_free (ipa_edge_args_vector);
2994 /* Frees all dynamically allocated structures that the param info points
2995 to. */
2997 void
2998 ipa_free_node_params_substructures (struct ipa_node_params *info)
3000 info->descriptors.release ();
3001 free (info->lattices);
3002 /* Lattice values and their sources are deallocated with their alocation
3003 pool. */
3004 info->known_vals.release ();
3005 memset (info, 0, sizeof (*info));
3008 /* Free all ipa_node_params structures. */
3010 void
3011 ipa_free_all_node_params (void)
3013 int i;
3014 struct ipa_node_params *info;
3016 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3017 ipa_free_node_params_substructures (info);
3019 ipa_node_params_vector.release ();
3022 /* Set the aggregate replacements of NODE to be AGGVALS. */
3024 void
3025 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3026 struct ipa_agg_replacement_value *aggvals)
3028 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3029 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3031 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3034 /* Hook that is called by cgraph.c when an edge is removed. */
3036 static void
3037 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3039 struct ipa_edge_args *args;
3041 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3042 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3043 return;
3045 args = IPA_EDGE_REF (cs);
3046 if (args->jump_functions)
3048 struct ipa_jump_func *jf;
3049 int i;
3050 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3052 struct ipa_cst_ref_desc *rdesc;
3053 try_decrement_rdesc_refcount (jf);
3054 if (jf->type == IPA_JF_CONST
3055 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3056 && rdesc->cs == cs)
3057 rdesc->cs = NULL;
3061 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3064 /* Hook that is called by cgraph.c when a node is removed. */
3066 static void
3067 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3069 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3070 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3071 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3072 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3073 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3076 /* Hook that is called by cgraph.c when an edge is duplicated. */
3078 static void
3079 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3080 __attribute__((unused)) void *data)
3082 struct ipa_edge_args *old_args, *new_args;
3083 unsigned int i;
3085 ipa_check_create_edge_args ();
3087 old_args = IPA_EDGE_REF (src);
3088 new_args = IPA_EDGE_REF (dst);
3090 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3092 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3094 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3095 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3097 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3099 if (src_jf->type == IPA_JF_CONST)
3101 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3103 if (!src_rdesc)
3104 dst_jf->value.constant.rdesc = NULL;
3105 else if (src->caller == dst->caller)
3107 struct ipa_ref *ref;
3108 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3109 gcc_checking_assert (n);
3110 ref = ipa_find_reference (src->caller, n,
3111 src->call_stmt, src->lto_stmt_uid);
3112 gcc_checking_assert (ref);
3113 ipa_clone_ref (ref, dst->caller, ref->stmt);
3115 gcc_checking_assert (ipa_refdesc_pool);
3116 struct ipa_cst_ref_desc *dst_rdesc
3117 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3118 dst_rdesc->cs = dst;
3119 dst_rdesc->refcount = src_rdesc->refcount;
3120 dst_rdesc->next_duplicate = NULL;
3121 dst_jf->value.constant.rdesc = dst_rdesc;
3123 else if (src_rdesc->cs == src)
3125 struct ipa_cst_ref_desc *dst_rdesc;
3126 gcc_checking_assert (ipa_refdesc_pool);
3127 dst_rdesc
3128 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3129 dst_rdesc->cs = dst;
3130 dst_rdesc->refcount = src_rdesc->refcount;
3131 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3132 src_rdesc->next_duplicate = dst_rdesc;
3133 dst_jf->value.constant.rdesc = dst_rdesc;
3135 else
3137 struct ipa_cst_ref_desc *dst_rdesc;
3138 /* This can happen during inlining, when a JFUNC can refer to a
3139 reference taken in a function up in the tree of inline clones.
3140 We need to find the duplicate that refers to our tree of
3141 inline clones. */
3143 gcc_assert (dst->caller->global.inlined_to);
3144 for (dst_rdesc = src_rdesc->next_duplicate;
3145 dst_rdesc;
3146 dst_rdesc = dst_rdesc->next_duplicate)
3148 struct cgraph_node *top;
3149 top = dst_rdesc->cs->caller->global.inlined_to
3150 ? dst_rdesc->cs->caller->global.inlined_to
3151 : dst_rdesc->cs->caller;
3152 if (dst->caller->global.inlined_to == top)
3153 break;
3155 gcc_assert (dst_rdesc);
3156 dst_jf->value.constant.rdesc = dst_rdesc;
3162 /* Hook that is called by cgraph.c when a node is duplicated. */
3164 static void
3165 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3166 ATTRIBUTE_UNUSED void *data)
3168 struct ipa_node_params *old_info, *new_info;
3169 struct ipa_agg_replacement_value *old_av, *new_av;
3171 ipa_check_create_node_params ();
3172 old_info = IPA_NODE_REF (src);
3173 new_info = IPA_NODE_REF (dst);
3175 new_info->descriptors = old_info->descriptors.copy ();
3176 new_info->lattices = NULL;
3177 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3179 new_info->uses_analysis_done = old_info->uses_analysis_done;
3180 new_info->node_enqueued = old_info->node_enqueued;
3182 old_av = ipa_get_agg_replacements_for_node (src);
3183 if (!old_av)
3184 return;
3186 new_av = NULL;
3187 while (old_av)
3189 struct ipa_agg_replacement_value *v;
3191 v = ggc_alloc_ipa_agg_replacement_value ();
3192 memcpy (v, old_av, sizeof (*v));
3193 v->next = new_av;
3194 new_av = v;
3195 old_av = old_av->next;
3197 ipa_set_node_agg_value_chain (dst, new_av);
3201 /* Analyze newly added function into callgraph. */
3203 static void
3204 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3206 ipa_analyze_node (node);
3209 /* Register our cgraph hooks if they are not already there. */
3211 void
3212 ipa_register_cgraph_hooks (void)
3214 if (!edge_removal_hook_holder)
3215 edge_removal_hook_holder =
3216 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3217 if (!node_removal_hook_holder)
3218 node_removal_hook_holder =
3219 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3220 if (!edge_duplication_hook_holder)
3221 edge_duplication_hook_holder =
3222 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3223 if (!node_duplication_hook_holder)
3224 node_duplication_hook_holder =
3225 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3226 function_insertion_hook_holder =
3227 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3230 /* Unregister our cgraph hooks if they are not already there. */
3232 static void
3233 ipa_unregister_cgraph_hooks (void)
3235 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3236 edge_removal_hook_holder = NULL;
3237 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3238 node_removal_hook_holder = NULL;
3239 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3240 edge_duplication_hook_holder = NULL;
3241 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3242 node_duplication_hook_holder = NULL;
3243 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3244 function_insertion_hook_holder = NULL;
3247 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3248 longer needed after ipa-cp. */
3250 void
3251 ipa_free_all_structures_after_ipa_cp (void)
3253 if (!optimize)
3255 ipa_free_all_edge_args ();
3256 ipa_free_all_node_params ();
3257 free_alloc_pool (ipcp_sources_pool);
3258 free_alloc_pool (ipcp_values_pool);
3259 free_alloc_pool (ipcp_agg_lattice_pool);
3260 ipa_unregister_cgraph_hooks ();
3261 if (ipa_refdesc_pool)
3262 free_alloc_pool (ipa_refdesc_pool);
3266 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3267 longer needed after indirect inlining. */
3269 void
3270 ipa_free_all_structures_after_iinln (void)
3272 ipa_free_all_edge_args ();
3273 ipa_free_all_node_params ();
3274 ipa_unregister_cgraph_hooks ();
3275 if (ipcp_sources_pool)
3276 free_alloc_pool (ipcp_sources_pool);
3277 if (ipcp_values_pool)
3278 free_alloc_pool (ipcp_values_pool);
3279 if (ipcp_agg_lattice_pool)
3280 free_alloc_pool (ipcp_agg_lattice_pool);
3281 if (ipa_refdesc_pool)
3282 free_alloc_pool (ipa_refdesc_pool);
3285 /* Print ipa_tree_map data structures of all functions in the
3286 callgraph to F. */
3288 void
3289 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3291 int i, count;
3292 struct ipa_node_params *info;
3294 if (!node->definition)
3295 return;
3296 info = IPA_NODE_REF (node);
3297 fprintf (f, " function %s/%i parameter descriptors:\n",
3298 cgraph_node_name (node), node->order);
3299 count = ipa_get_param_count (info);
3300 for (i = 0; i < count; i++)
3302 int c;
3304 ipa_dump_param (f, info, i);
3305 if (ipa_is_param_used (info, i))
3306 fprintf (f, " used");
3307 c = ipa_get_controlled_uses (info, i);
3308 if (c == IPA_UNDESCRIBED_USE)
3309 fprintf (f, " undescribed_use");
3310 else
3311 fprintf (f, " controlled_uses=%i", c);
3312 fprintf (f, "\n");
3316 /* Print ipa_tree_map data structures of all functions in the
3317 callgraph to F. */
3319 void
3320 ipa_print_all_params (FILE * f)
3322 struct cgraph_node *node;
3324 fprintf (f, "\nFunction parameters:\n");
3325 FOR_EACH_FUNCTION (node)
3326 ipa_print_node_params (f, node);
3329 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3331 vec<tree>
3332 ipa_get_vector_of_formal_parms (tree fndecl)
3334 vec<tree> args;
3335 int count;
3336 tree parm;
3338 gcc_assert (!flag_wpa);
3339 count = count_formal_params (fndecl);
3340 args.create (count);
3341 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3342 args.quick_push (parm);
3344 return args;
3347 /* Return a heap allocated vector containing types of formal parameters of
3348 function type FNTYPE. */
3350 static inline vec<tree>
3351 get_vector_of_formal_parm_types (tree fntype)
3353 vec<tree> types;
3354 int count = 0;
3355 tree t;
3357 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3358 count++;
3360 types.create (count);
3361 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3362 types.quick_push (TREE_VALUE (t));
3364 return types;
3367 /* Modify the function declaration FNDECL and its type according to the plan in
3368 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3369 to reflect the actual parameters being modified which are determined by the
3370 base_index field. */
3372 void
3373 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3374 const char *synth_parm_prefix)
3376 vec<tree> oparms, otypes;
3377 tree orig_type, new_type = NULL;
3378 tree old_arg_types, t, new_arg_types = NULL;
3379 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3380 int i, len = adjustments.length ();
3381 tree new_reversed = NULL;
3382 bool care_for_types, last_parm_void;
3384 if (!synth_parm_prefix)
3385 synth_parm_prefix = "SYNTH";
3387 oparms = ipa_get_vector_of_formal_parms (fndecl);
3388 orig_type = TREE_TYPE (fndecl);
3389 old_arg_types = TYPE_ARG_TYPES (orig_type);
3391 /* The following test is an ugly hack, some functions simply don't have any
3392 arguments in their type. This is probably a bug but well... */
3393 care_for_types = (old_arg_types != NULL_TREE);
3394 if (care_for_types)
3396 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3397 == void_type_node);
3398 otypes = get_vector_of_formal_parm_types (orig_type);
3399 if (last_parm_void)
3400 gcc_assert (oparms.length () + 1 == otypes.length ());
3401 else
3402 gcc_assert (oparms.length () == otypes.length ());
3404 else
3406 last_parm_void = false;
3407 otypes.create (0);
3410 for (i = 0; i < len; i++)
3412 struct ipa_parm_adjustment *adj;
3413 gcc_assert (link);
3415 adj = &adjustments[i];
3416 parm = oparms[adj->base_index];
3417 adj->base = parm;
3419 if (adj->copy_param)
3421 if (care_for_types)
3422 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3423 new_arg_types);
3424 *link = parm;
3425 link = &DECL_CHAIN (parm);
3427 else if (!adj->remove_param)
3429 tree new_parm;
3430 tree ptype;
3432 if (adj->by_ref)
3433 ptype = build_pointer_type (adj->type);
3434 else
3435 ptype = adj->type;
3437 if (care_for_types)
3438 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3440 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3441 ptype);
3442 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3444 DECL_ARTIFICIAL (new_parm) = 1;
3445 DECL_ARG_TYPE (new_parm) = ptype;
3446 DECL_CONTEXT (new_parm) = fndecl;
3447 TREE_USED (new_parm) = 1;
3448 DECL_IGNORED_P (new_parm) = 1;
3449 layout_decl (new_parm, 0);
3451 adj->base = parm;
3452 adj->reduction = new_parm;
3454 *link = new_parm;
3456 link = &DECL_CHAIN (new_parm);
3460 *link = NULL_TREE;
3462 if (care_for_types)
3464 new_reversed = nreverse (new_arg_types);
3465 if (last_parm_void)
3467 if (new_reversed)
3468 TREE_CHAIN (new_arg_types) = void_list_node;
3469 else
3470 new_reversed = void_list_node;
3474 /* Use copy_node to preserve as much as possible from original type
3475 (debug info, attribute lists etc.)
3476 Exception is METHOD_TYPEs must have THIS argument.
3477 When we are asked to remove it, we need to build new FUNCTION_TYPE
3478 instead. */
3479 if (TREE_CODE (orig_type) != METHOD_TYPE
3480 || (adjustments[0].copy_param
3481 && adjustments[0].base_index == 0))
3483 new_type = build_distinct_type_copy (orig_type);
3484 TYPE_ARG_TYPES (new_type) = new_reversed;
3486 else
3488 new_type
3489 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3490 new_reversed));
3491 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3492 DECL_VINDEX (fndecl) = NULL_TREE;
3495 /* When signature changes, we need to clear builtin info. */
3496 if (DECL_BUILT_IN (fndecl))
3498 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3499 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3502 /* This is a new type, not a copy of an old type. Need to reassociate
3503 variants. We can handle everything except the main variant lazily. */
3504 t = TYPE_MAIN_VARIANT (orig_type);
3505 if (orig_type != t)
3507 TYPE_MAIN_VARIANT (new_type) = t;
3508 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3509 TYPE_NEXT_VARIANT (t) = new_type;
3511 else
3513 TYPE_MAIN_VARIANT (new_type) = new_type;
3514 TYPE_NEXT_VARIANT (new_type) = NULL;
3517 TREE_TYPE (fndecl) = new_type;
3518 DECL_VIRTUAL_P (fndecl) = 0;
3519 otypes.release ();
3520 oparms.release ();
3523 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3524 If this is a directly recursive call, CS must be NULL. Otherwise it must
3525 contain the corresponding call graph edge. */
3527 void
3528 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3529 ipa_parm_adjustment_vec adjustments)
3531 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3532 vec<tree> vargs;
3533 vec<tree, va_gc> **debug_args = NULL;
3534 gimple new_stmt;
3535 gimple_stmt_iterator gsi, prev_gsi;
3536 tree callee_decl;
3537 int i, len;
3539 len = adjustments.length ();
3540 vargs.create (len);
3541 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3542 ipa_remove_stmt_references (current_node, stmt);
3544 gsi = gsi_for_stmt (stmt);
3545 prev_gsi = gsi;
3546 gsi_prev (&prev_gsi);
3547 for (i = 0; i < len; i++)
3549 struct ipa_parm_adjustment *adj;
3551 adj = &adjustments[i];
3553 if (adj->copy_param)
3555 tree arg = gimple_call_arg (stmt, adj->base_index);
3557 vargs.quick_push (arg);
3559 else if (!adj->remove_param)
3561 tree expr, base, off;
3562 location_t loc;
3563 unsigned int deref_align = 0;
3564 bool deref_base = false;
3566 /* We create a new parameter out of the value of the old one, we can
3567 do the following kind of transformations:
3569 - A scalar passed by reference is converted to a scalar passed by
3570 value. (adj->by_ref is false and the type of the original
3571 actual argument is a pointer to a scalar).
3573 - A part of an aggregate is passed instead of the whole aggregate.
3574 The part can be passed either by value or by reference, this is
3575 determined by value of adj->by_ref. Moreover, the code below
3576 handles both situations when the original aggregate is passed by
3577 value (its type is not a pointer) and when it is passed by
3578 reference (it is a pointer to an aggregate).
3580 When the new argument is passed by reference (adj->by_ref is true)
3581 it must be a part of an aggregate and therefore we form it by
3582 simply taking the address of a reference inside the original
3583 aggregate. */
3585 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3586 base = gimple_call_arg (stmt, adj->base_index);
3587 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3588 : EXPR_LOCATION (base);
3590 if (TREE_CODE (base) != ADDR_EXPR
3591 && POINTER_TYPE_P (TREE_TYPE (base)))
3592 off = build_int_cst (adj->alias_ptr_type,
3593 adj->offset / BITS_PER_UNIT);
3594 else
3596 HOST_WIDE_INT base_offset;
3597 tree prev_base;
3598 bool addrof;
3600 if (TREE_CODE (base) == ADDR_EXPR)
3602 base = TREE_OPERAND (base, 0);
3603 addrof = true;
3605 else
3606 addrof = false;
3607 prev_base = base;
3608 base = get_addr_base_and_unit_offset (base, &base_offset);
3609 /* Aggregate arguments can have non-invariant addresses. */
3610 if (!base)
3612 base = build_fold_addr_expr (prev_base);
3613 off = build_int_cst (adj->alias_ptr_type,
3614 adj->offset / BITS_PER_UNIT);
3616 else if (TREE_CODE (base) == MEM_REF)
3618 if (!addrof)
3620 deref_base = true;
3621 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3623 off = build_int_cst (adj->alias_ptr_type,
3624 base_offset
3625 + adj->offset / BITS_PER_UNIT);
3626 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3627 off);
3628 base = TREE_OPERAND (base, 0);
3630 else
3632 off = build_int_cst (adj->alias_ptr_type,
3633 base_offset
3634 + adj->offset / BITS_PER_UNIT);
3635 base = build_fold_addr_expr (base);
3639 if (!adj->by_ref)
3641 tree type = adj->type;
3642 unsigned int align;
3643 unsigned HOST_WIDE_INT misalign;
3645 if (deref_base)
3647 align = deref_align;
3648 misalign = 0;
3650 else
3652 get_pointer_alignment_1 (base, &align, &misalign);
3653 if (TYPE_ALIGN (type) > align)
3654 align = TYPE_ALIGN (type);
3656 misalign += (tree_to_double_int (off)
3657 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3658 * BITS_PER_UNIT);
3659 misalign = misalign & (align - 1);
3660 if (misalign != 0)
3661 align = (misalign & -misalign);
3662 if (align < TYPE_ALIGN (type))
3663 type = build_aligned_type (type, align);
3664 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3666 else
3668 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3669 expr = build_fold_addr_expr (expr);
3672 expr = force_gimple_operand_gsi (&gsi, expr,
3673 adj->by_ref
3674 || is_gimple_reg_type (adj->type),
3675 NULL, true, GSI_SAME_STMT);
3676 vargs.quick_push (expr);
3678 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3680 unsigned int ix;
3681 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3682 gimple def_temp;
3684 arg = gimple_call_arg (stmt, adj->base_index);
3685 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3687 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3688 continue;
3689 arg = fold_convert_loc (gimple_location (stmt),
3690 TREE_TYPE (origin), arg);
3692 if (debug_args == NULL)
3693 debug_args = decl_debug_args_insert (callee_decl);
3694 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3695 if (ddecl == origin)
3697 ddecl = (**debug_args)[ix + 1];
3698 break;
3700 if (ddecl == NULL)
3702 ddecl = make_node (DEBUG_EXPR_DECL);
3703 DECL_ARTIFICIAL (ddecl) = 1;
3704 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3705 DECL_MODE (ddecl) = DECL_MODE (origin);
3707 vec_safe_push (*debug_args, origin);
3708 vec_safe_push (*debug_args, ddecl);
3710 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3711 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3715 if (dump_file && (dump_flags & TDF_DETAILS))
3717 fprintf (dump_file, "replacing stmt:");
3718 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3721 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3722 vargs.release ();
3723 if (gimple_call_lhs (stmt))
3724 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3726 gimple_set_block (new_stmt, gimple_block (stmt));
3727 if (gimple_has_location (stmt))
3728 gimple_set_location (new_stmt, gimple_location (stmt));
3729 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3730 gimple_call_copy_flags (new_stmt, stmt);
3732 if (dump_file && (dump_flags & TDF_DETAILS))
3734 fprintf (dump_file, "with stmt:");
3735 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3736 fprintf (dump_file, "\n");
3738 gsi_replace (&gsi, new_stmt, true);
3739 if (cs)
3740 cgraph_set_call_stmt (cs, new_stmt);
3743 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3744 gsi_prev (&gsi);
3746 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3747 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3749 update_ssa (TODO_update_ssa);
3750 free_dominance_info (CDI_DOMINATORS);
3753 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3755 static bool
3756 index_in_adjustments_multiple_times_p (int base_index,
3757 ipa_parm_adjustment_vec adjustments)
3759 int i, len = adjustments.length ();
3760 bool one = false;
3762 for (i = 0; i < len; i++)
3764 struct ipa_parm_adjustment *adj;
3765 adj = &adjustments[i];
3767 if (adj->base_index == base_index)
3769 if (one)
3770 return true;
3771 else
3772 one = true;
3775 return false;
3779 /* Return adjustments that should have the same effect on function parameters
3780 and call arguments as if they were first changed according to adjustments in
3781 INNER and then by adjustments in OUTER. */
3783 ipa_parm_adjustment_vec
3784 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3785 ipa_parm_adjustment_vec outer)
3787 int i, outlen = outer.length ();
3788 int inlen = inner.length ();
3789 int removals = 0;
3790 ipa_parm_adjustment_vec adjustments, tmp;
3792 tmp.create (inlen);
3793 for (i = 0; i < inlen; i++)
3795 struct ipa_parm_adjustment *n;
3796 n = &inner[i];
3798 if (n->remove_param)
3799 removals++;
3800 else
3801 tmp.quick_push (*n);
3804 adjustments.create (outlen + removals);
3805 for (i = 0; i < outlen; i++)
3807 struct ipa_parm_adjustment r;
3808 struct ipa_parm_adjustment *out = &outer[i];
3809 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3811 memset (&r, 0, sizeof (r));
3812 gcc_assert (!in->remove_param);
3813 if (out->remove_param)
3815 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3817 r.remove_param = true;
3818 adjustments.quick_push (r);
3820 continue;
3823 r.base_index = in->base_index;
3824 r.type = out->type;
3826 /* FIXME: Create nonlocal value too. */
3828 if (in->copy_param && out->copy_param)
3829 r.copy_param = true;
3830 else if (in->copy_param)
3831 r.offset = out->offset;
3832 else if (out->copy_param)
3833 r.offset = in->offset;
3834 else
3835 r.offset = in->offset + out->offset;
3836 adjustments.quick_push (r);
3839 for (i = 0; i < inlen; i++)
3841 struct ipa_parm_adjustment *n = &inner[i];
3843 if (n->remove_param)
3844 adjustments.quick_push (*n);
3847 tmp.release ();
3848 return adjustments;
3851 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3852 friendly way, assuming they are meant to be applied to FNDECL. */
3854 void
3855 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3856 tree fndecl)
3858 int i, len = adjustments.length ();
3859 bool first = true;
3860 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3862 fprintf (file, "IPA param adjustments: ");
3863 for (i = 0; i < len; i++)
3865 struct ipa_parm_adjustment *adj;
3866 adj = &adjustments[i];
3868 if (!first)
3869 fprintf (file, " ");
3870 else
3871 first = false;
3873 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3874 print_generic_expr (file, parms[adj->base_index], 0);
3875 if (adj->base)
3877 fprintf (file, ", base: ");
3878 print_generic_expr (file, adj->base, 0);
3880 if (adj->reduction)
3882 fprintf (file, ", reduction: ");
3883 print_generic_expr (file, adj->reduction, 0);
3885 if (adj->new_ssa_base)
3887 fprintf (file, ", new_ssa_base: ");
3888 print_generic_expr (file, adj->new_ssa_base, 0);
3891 if (adj->copy_param)
3892 fprintf (file, ", copy_param");
3893 else if (adj->remove_param)
3894 fprintf (file, ", remove_param");
3895 else
3896 fprintf (file, ", offset %li", (long) adj->offset);
3897 if (adj->by_ref)
3898 fprintf (file, ", by_ref");
3899 print_node_brief (file, ", type: ", adj->type, 0);
3900 fprintf (file, "\n");
3902 parms.release ();
3905 /* Dump the AV linked list. */
3907 void
3908 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3910 bool comma = false;
3911 fprintf (f, " Aggregate replacements:");
3912 for (; av; av = av->next)
3914 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3915 av->index, av->offset);
3916 print_generic_expr (f, av->value, 0);
3917 comma = true;
3919 fprintf (f, "\n");
3922 /* Stream out jump function JUMP_FUNC to OB. */
3924 static void
3925 ipa_write_jump_function (struct output_block *ob,
3926 struct ipa_jump_func *jump_func)
3928 struct ipa_agg_jf_item *item;
3929 struct bitpack_d bp;
3930 int i, count;
3932 streamer_write_uhwi (ob, jump_func->type);
3933 switch (jump_func->type)
3935 case IPA_JF_UNKNOWN:
3936 break;
3937 case IPA_JF_KNOWN_TYPE:
3938 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3939 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3940 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3941 break;
3942 case IPA_JF_CONST:
3943 gcc_assert (
3944 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3945 stream_write_tree (ob, jump_func->value.constant.value, true);
3946 break;
3947 case IPA_JF_PASS_THROUGH:
3948 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3949 if (jump_func->value.pass_through.operation == NOP_EXPR)
3951 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3952 bp = bitpack_create (ob->main_stream);
3953 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3954 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
3955 streamer_write_bitpack (&bp);
3957 else
3959 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3960 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3962 break;
3963 case IPA_JF_ANCESTOR:
3964 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3965 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3966 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3967 bp = bitpack_create (ob->main_stream);
3968 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3969 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
3970 streamer_write_bitpack (&bp);
3971 break;
3974 count = vec_safe_length (jump_func->agg.items);
3975 streamer_write_uhwi (ob, count);
3976 if (count)
3978 bp = bitpack_create (ob->main_stream);
3979 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3980 streamer_write_bitpack (&bp);
3983 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3985 streamer_write_uhwi (ob, item->offset);
3986 stream_write_tree (ob, item->value, true);
3990 /* Read in jump function JUMP_FUNC from IB. */
3992 static void
3993 ipa_read_jump_function (struct lto_input_block *ib,
3994 struct ipa_jump_func *jump_func,
3995 struct cgraph_edge *cs,
3996 struct data_in *data_in)
3998 enum jump_func_type jftype;
3999 enum tree_code operation;
4000 int i, count;
4002 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4003 switch (jftype)
4005 case IPA_JF_UNKNOWN:
4006 jump_func->type = IPA_JF_UNKNOWN;
4007 break;
4008 case IPA_JF_KNOWN_TYPE:
4010 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4011 tree base_type = stream_read_tree (ib, data_in);
4012 tree component_type = stream_read_tree (ib, data_in);
4014 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4015 break;
4017 case IPA_JF_CONST:
4018 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4019 break;
4020 case IPA_JF_PASS_THROUGH:
4021 operation = (enum tree_code) streamer_read_uhwi (ib);
4022 if (operation == NOP_EXPR)
4024 int formal_id = streamer_read_uhwi (ib);
4025 struct bitpack_d bp = streamer_read_bitpack (ib);
4026 bool agg_preserved = bp_unpack_value (&bp, 1);
4027 bool type_preserved = bp_unpack_value (&bp, 1);
4028 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4029 type_preserved);
4031 else
4033 tree operand = stream_read_tree (ib, data_in);
4034 int formal_id = streamer_read_uhwi (ib);
4035 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4036 operation);
4038 break;
4039 case IPA_JF_ANCESTOR:
4041 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4042 tree type = stream_read_tree (ib, data_in);
4043 int formal_id = streamer_read_uhwi (ib);
4044 struct bitpack_d bp = streamer_read_bitpack (ib);
4045 bool agg_preserved = bp_unpack_value (&bp, 1);
4046 bool type_preserved = bp_unpack_value (&bp, 1);
4048 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4049 type_preserved);
4050 break;
4054 count = streamer_read_uhwi (ib);
4055 vec_alloc (jump_func->agg.items, count);
4056 if (count)
4058 struct bitpack_d bp = streamer_read_bitpack (ib);
4059 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4061 for (i = 0; i < count; i++)
4063 struct ipa_agg_jf_item item;
4064 item.offset = streamer_read_uhwi (ib);
4065 item.value = stream_read_tree (ib, data_in);
4066 jump_func->agg.items->quick_push (item);
4070 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4071 relevant to indirect inlining to OB. */
4073 static void
4074 ipa_write_indirect_edge_info (struct output_block *ob,
4075 struct cgraph_edge *cs)
4077 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4078 struct bitpack_d bp;
4080 streamer_write_hwi (ob, ii->param_index);
4081 streamer_write_hwi (ob, ii->offset);
4082 bp = bitpack_create (ob->main_stream);
4083 bp_pack_value (&bp, ii->polymorphic, 1);
4084 bp_pack_value (&bp, ii->agg_contents, 1);
4085 bp_pack_value (&bp, ii->member_ptr, 1);
4086 bp_pack_value (&bp, ii->by_ref, 1);
4087 streamer_write_bitpack (&bp);
4089 if (ii->polymorphic)
4091 streamer_write_hwi (ob, ii->otr_token);
4092 stream_write_tree (ob, ii->otr_type, true);
4096 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4097 relevant to indirect inlining from IB. */
4099 static void
4100 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4101 struct data_in *data_in ATTRIBUTE_UNUSED,
4102 struct cgraph_edge *cs)
4104 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4105 struct bitpack_d bp;
4107 ii->param_index = (int) streamer_read_hwi (ib);
4108 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4109 bp = streamer_read_bitpack (ib);
4110 ii->polymorphic = bp_unpack_value (&bp, 1);
4111 ii->agg_contents = bp_unpack_value (&bp, 1);
4112 ii->member_ptr = bp_unpack_value (&bp, 1);
4113 ii->by_ref = bp_unpack_value (&bp, 1);
4114 if (ii->polymorphic)
4116 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4117 ii->otr_type = stream_read_tree (ib, data_in);
4121 /* Stream out NODE info to OB. */
4123 static void
4124 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4126 int node_ref;
4127 lto_symtab_encoder_t encoder;
4128 struct ipa_node_params *info = IPA_NODE_REF (node);
4129 int j;
4130 struct cgraph_edge *e;
4131 struct bitpack_d bp;
4133 encoder = ob->decl_state->symtab_node_encoder;
4134 node_ref = lto_symtab_encoder_encode (encoder, node);
4135 streamer_write_uhwi (ob, node_ref);
4137 streamer_write_uhwi (ob, ipa_get_param_count (info));
4138 for (j = 0; j < ipa_get_param_count (info); j++)
4139 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4140 bp = bitpack_create (ob->main_stream);
4141 gcc_assert (info->uses_analysis_done
4142 || ipa_get_param_count (info) == 0);
4143 gcc_assert (!info->node_enqueued);
4144 gcc_assert (!info->ipcp_orig_node);
4145 for (j = 0; j < ipa_get_param_count (info); j++)
4146 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4147 streamer_write_bitpack (&bp);
4148 for (j = 0; j < ipa_get_param_count (info); j++)
4149 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4150 for (e = node->callees; e; e = e->next_callee)
4152 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4154 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4155 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4156 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4158 for (e = node->indirect_calls; e; e = e->next_callee)
4160 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4162 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4163 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4164 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4165 ipa_write_indirect_edge_info (ob, e);
4169 /* Stream in NODE info from IB. */
4171 static void
4172 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4173 struct data_in *data_in)
4175 struct ipa_node_params *info = IPA_NODE_REF (node);
4176 int k;
4177 struct cgraph_edge *e;
4178 struct bitpack_d bp;
4180 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4182 for (k = 0; k < ipa_get_param_count (info); k++)
4183 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4185 bp = streamer_read_bitpack (ib);
4186 if (ipa_get_param_count (info) != 0)
4187 info->uses_analysis_done = true;
4188 info->node_enqueued = false;
4189 for (k = 0; k < ipa_get_param_count (info); k++)
4190 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4191 for (k = 0; k < ipa_get_param_count (info); k++)
4192 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4193 for (e = node->callees; e; e = e->next_callee)
4195 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4196 int count = streamer_read_uhwi (ib);
4198 if (!count)
4199 continue;
4200 vec_safe_grow_cleared (args->jump_functions, count);
4202 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4203 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4204 data_in);
4206 for (e = node->indirect_calls; e; e = e->next_callee)
4208 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4209 int count = streamer_read_uhwi (ib);
4211 if (count)
4213 vec_safe_grow_cleared (args->jump_functions, count);
4214 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4215 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4216 data_in);
4218 ipa_read_indirect_edge_info (ib, data_in, e);
4222 /* Write jump functions for nodes in SET. */
4224 void
4225 ipa_prop_write_jump_functions (void)
4227 struct cgraph_node *node;
4228 struct output_block *ob;
4229 unsigned int count = 0;
4230 lto_symtab_encoder_iterator lsei;
4231 lto_symtab_encoder_t encoder;
4234 if (!ipa_node_params_vector.exists ())
4235 return;
4237 ob = create_output_block (LTO_section_jump_functions);
4238 encoder = ob->decl_state->symtab_node_encoder;
4239 ob->cgraph_node = NULL;
4240 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4241 lsei_next_function_in_partition (&lsei))
4243 node = lsei_cgraph_node (lsei);
4244 if (cgraph_function_with_gimple_body_p (node)
4245 && IPA_NODE_REF (node) != NULL)
4246 count++;
4249 streamer_write_uhwi (ob, count);
4251 /* Process all of the functions. */
4252 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4253 lsei_next_function_in_partition (&lsei))
4255 node = lsei_cgraph_node (lsei);
4256 if (cgraph_function_with_gimple_body_p (node)
4257 && IPA_NODE_REF (node) != NULL)
4258 ipa_write_node_info (ob, node);
4260 streamer_write_char_stream (ob->main_stream, 0);
4261 produce_asm (ob, NULL);
4262 destroy_output_block (ob);
4265 /* Read section in file FILE_DATA of length LEN with data DATA. */
4267 static void
4268 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4269 size_t len)
4271 const struct lto_function_header *header =
4272 (const struct lto_function_header *) data;
4273 const int cfg_offset = sizeof (struct lto_function_header);
4274 const int main_offset = cfg_offset + header->cfg_size;
4275 const int string_offset = main_offset + header->main_size;
4276 struct data_in *data_in;
4277 struct lto_input_block ib_main;
4278 unsigned int i;
4279 unsigned int count;
4281 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4282 header->main_size);
4284 data_in =
4285 lto_data_in_create (file_data, (const char *) data + string_offset,
4286 header->string_size, vNULL);
4287 count = streamer_read_uhwi (&ib_main);
4289 for (i = 0; i < count; i++)
4291 unsigned int index;
4292 struct cgraph_node *node;
4293 lto_symtab_encoder_t encoder;
4295 index = streamer_read_uhwi (&ib_main);
4296 encoder = file_data->symtab_node_encoder;
4297 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4298 gcc_assert (node->definition);
4299 ipa_read_node_info (&ib_main, node, data_in);
4301 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4302 len);
4303 lto_data_in_delete (data_in);
4306 /* Read ipcp jump functions. */
4308 void
4309 ipa_prop_read_jump_functions (void)
4311 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4312 struct lto_file_decl_data *file_data;
4313 unsigned int j = 0;
4315 ipa_check_create_node_params ();
4316 ipa_check_create_edge_args ();
4317 ipa_register_cgraph_hooks ();
4319 while ((file_data = file_data_vec[j++]))
4321 size_t len;
4322 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4324 if (data)
4325 ipa_prop_read_section (file_data, data, len);
4329 /* After merging units, we can get mismatch in argument counts.
4330 Also decl merging might've rendered parameter lists obsolete.
4331 Also compute called_with_variable_arg info. */
4333 void
4334 ipa_update_after_lto_read (void)
4336 ipa_check_create_node_params ();
4337 ipa_check_create_edge_args ();
4340 void
4341 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4343 int node_ref;
4344 unsigned int count = 0;
4345 lto_symtab_encoder_t encoder;
4346 struct ipa_agg_replacement_value *aggvals, *av;
4348 aggvals = ipa_get_agg_replacements_for_node (node);
4349 encoder = ob->decl_state->symtab_node_encoder;
4350 node_ref = lto_symtab_encoder_encode (encoder, node);
4351 streamer_write_uhwi (ob, node_ref);
4353 for (av = aggvals; av; av = av->next)
4354 count++;
4355 streamer_write_uhwi (ob, count);
4357 for (av = aggvals; av; av = av->next)
4359 struct bitpack_d bp;
4361 streamer_write_uhwi (ob, av->offset);
4362 streamer_write_uhwi (ob, av->index);
4363 stream_write_tree (ob, av->value, true);
4365 bp = bitpack_create (ob->main_stream);
4366 bp_pack_value (&bp, av->by_ref, 1);
4367 streamer_write_bitpack (&bp);
4371 /* Stream in the aggregate value replacement chain for NODE from IB. */
4373 static void
4374 read_agg_replacement_chain (struct lto_input_block *ib,
4375 struct cgraph_node *node,
4376 struct data_in *data_in)
4378 struct ipa_agg_replacement_value *aggvals = NULL;
4379 unsigned int count, i;
4381 count = streamer_read_uhwi (ib);
4382 for (i = 0; i <count; i++)
4384 struct ipa_agg_replacement_value *av;
4385 struct bitpack_d bp;
4387 av = ggc_alloc_ipa_agg_replacement_value ();
4388 av->offset = streamer_read_uhwi (ib);
4389 av->index = streamer_read_uhwi (ib);
4390 av->value = stream_read_tree (ib, data_in);
4391 bp = streamer_read_bitpack (ib);
4392 av->by_ref = bp_unpack_value (&bp, 1);
4393 av->next = aggvals;
4394 aggvals = av;
4396 ipa_set_node_agg_value_chain (node, aggvals);
4399 /* Write all aggregate replacement for nodes in set. */
4401 void
4402 ipa_prop_write_all_agg_replacement (void)
4404 struct cgraph_node *node;
4405 struct output_block *ob;
4406 unsigned int count = 0;
4407 lto_symtab_encoder_iterator lsei;
4408 lto_symtab_encoder_t encoder;
4410 if (!ipa_node_agg_replacements)
4411 return;
4413 ob = create_output_block (LTO_section_ipcp_transform);
4414 encoder = ob->decl_state->symtab_node_encoder;
4415 ob->cgraph_node = NULL;
4416 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4417 lsei_next_function_in_partition (&lsei))
4419 node = lsei_cgraph_node (lsei);
4420 if (cgraph_function_with_gimple_body_p (node)
4421 && ipa_get_agg_replacements_for_node (node) != NULL)
4422 count++;
4425 streamer_write_uhwi (ob, count);
4427 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4428 lsei_next_function_in_partition (&lsei))
4430 node = lsei_cgraph_node (lsei);
4431 if (cgraph_function_with_gimple_body_p (node)
4432 && ipa_get_agg_replacements_for_node (node) != NULL)
4433 write_agg_replacement_chain (ob, node);
4435 streamer_write_char_stream (ob->main_stream, 0);
4436 produce_asm (ob, NULL);
4437 destroy_output_block (ob);
4440 /* Read replacements section in file FILE_DATA of length LEN with data
4441 DATA. */
4443 static void
4444 read_replacements_section (struct lto_file_decl_data *file_data,
4445 const char *data,
4446 size_t len)
4448 const struct lto_function_header *header =
4449 (const struct lto_function_header *) data;
4450 const int cfg_offset = sizeof (struct lto_function_header);
4451 const int main_offset = cfg_offset + header->cfg_size;
4452 const int string_offset = main_offset + header->main_size;
4453 struct data_in *data_in;
4454 struct lto_input_block ib_main;
4455 unsigned int i;
4456 unsigned int count;
4458 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4459 header->main_size);
4461 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4462 header->string_size, vNULL);
4463 count = streamer_read_uhwi (&ib_main);
4465 for (i = 0; i < count; i++)
4467 unsigned int index;
4468 struct cgraph_node *node;
4469 lto_symtab_encoder_t encoder;
4471 index = streamer_read_uhwi (&ib_main);
4472 encoder = file_data->symtab_node_encoder;
4473 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4474 gcc_assert (node->definition);
4475 read_agg_replacement_chain (&ib_main, node, data_in);
4477 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4478 len);
4479 lto_data_in_delete (data_in);
4482 /* Read IPA-CP aggregate replacements. */
4484 void
4485 ipa_prop_read_all_agg_replacement (void)
4487 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4488 struct lto_file_decl_data *file_data;
4489 unsigned int j = 0;
4491 while ((file_data = file_data_vec[j++]))
4493 size_t len;
4494 const char *data = lto_get_section_data (file_data,
4495 LTO_section_ipcp_transform,
4496 NULL, &len);
4497 if (data)
4498 read_replacements_section (file_data, data, len);
4502 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4503 NODE. */
4505 static void
4506 adjust_agg_replacement_values (struct cgraph_node *node,
4507 struct ipa_agg_replacement_value *aggval)
4509 struct ipa_agg_replacement_value *v;
4510 int i, c = 0, d = 0, *adj;
4512 if (!node->clone.combined_args_to_skip)
4513 return;
4515 for (v = aggval; v; v = v->next)
4517 gcc_assert (v->index >= 0);
4518 if (c < v->index)
4519 c = v->index;
4521 c++;
4523 adj = XALLOCAVEC (int, c);
4524 for (i = 0; i < c; i++)
4525 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4527 adj[i] = -1;
4528 d++;
4530 else
4531 adj[i] = i - d;
4533 for (v = aggval; v; v = v->next)
4534 v->index = adj[v->index];
4538 /* Function body transformation phase. */
4540 unsigned int
4541 ipcp_transform_function (struct cgraph_node *node)
4543 vec<ipa_param_descriptor_t> descriptors = vNULL;
4544 struct param_analysis_info *parms_ainfo;
4545 struct ipa_agg_replacement_value *aggval;
4546 gimple_stmt_iterator gsi;
4547 basic_block bb;
4548 int param_count;
4549 bool cfg_changed = false, something_changed = false;
4551 gcc_checking_assert (cfun);
4552 gcc_checking_assert (current_function_decl);
4554 if (dump_file)
4555 fprintf (dump_file, "Modification phase of node %s/%i\n",
4556 cgraph_node_name (node), node->order);
4558 aggval = ipa_get_agg_replacements_for_node (node);
4559 if (!aggval)
4560 return 0;
4561 param_count = count_formal_params (node->decl);
4562 if (param_count == 0)
4563 return 0;
4564 adjust_agg_replacement_values (node, aggval);
4565 if (dump_file)
4566 ipa_dump_agg_replacement_values (dump_file, aggval);
4567 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4568 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4569 descriptors.safe_grow_cleared (param_count);
4570 ipa_populate_param_decls (node, descriptors);
4572 FOR_EACH_BB (bb)
4573 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4575 struct ipa_agg_replacement_value *v;
4576 gimple stmt = gsi_stmt (gsi);
4577 tree rhs, val, t;
4578 HOST_WIDE_INT offset, size;
4579 int index;
4580 bool by_ref, vce;
4582 if (!gimple_assign_load_p (stmt))
4583 continue;
4584 rhs = gimple_assign_rhs1 (stmt);
4585 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4586 continue;
4588 vce = false;
4589 t = rhs;
4590 while (handled_component_p (t))
4592 /* V_C_E can do things like convert an array of integers to one
4593 bigger integer and similar things we do not handle below. */
4594 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4596 vce = true;
4597 break;
4599 t = TREE_OPERAND (t, 0);
4601 if (vce)
4602 continue;
4604 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4605 rhs, &index, &offset, &size, &by_ref))
4606 continue;
4607 for (v = aggval; v; v = v->next)
4608 if (v->index == index
4609 && v->offset == offset)
4610 break;
4611 if (!v
4612 || v->by_ref != by_ref
4613 || tree_low_cst (TYPE_SIZE (TREE_TYPE (v->value)), 0) != size)
4614 continue;
4616 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4617 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4619 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4620 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4621 else if (TYPE_SIZE (TREE_TYPE (rhs))
4622 == TYPE_SIZE (TREE_TYPE (v->value)))
4623 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4624 else
4626 if (dump_file)
4628 fprintf (dump_file, " const ");
4629 print_generic_expr (dump_file, v->value, 0);
4630 fprintf (dump_file, " can't be converted to type of ");
4631 print_generic_expr (dump_file, rhs, 0);
4632 fprintf (dump_file, "\n");
4634 continue;
4637 else
4638 val = v->value;
4640 if (dump_file && (dump_flags & TDF_DETAILS))
4642 fprintf (dump_file, "Modifying stmt:\n ");
4643 print_gimple_stmt (dump_file, stmt, 0, 0);
4645 gimple_assign_set_rhs_from_tree (&gsi, val);
4646 update_stmt (stmt);
4648 if (dump_file && (dump_flags & TDF_DETAILS))
4650 fprintf (dump_file, "into:\n ");
4651 print_gimple_stmt (dump_file, stmt, 0, 0);
4652 fprintf (dump_file, "\n");
4655 something_changed = true;
4656 if (maybe_clean_eh_stmt (stmt)
4657 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4658 cfg_changed = true;
4661 (*ipa_node_agg_replacements)[node->uid] = NULL;
4662 free_parms_ainfo (parms_ainfo, param_count);
4663 descriptors.release ();
4665 if (!something_changed)
4666 return 0;
4667 else if (cfg_changed)
4668 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4669 else
4670 return TODO_update_ssa_only_virtuals;