Merge from trunk: 215733-215743
[official-gcc.git] / gcc-4_9 / gcc / ipa-prop.c
blobadfcb0354e2eb6ac0e50ec07bec97c73da6b6b56
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "l-ipo.h"
60 #include "ipa-utils.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
64 /* Intermediate information about a parameter that is only useful during the
65 run of ipa_analyze_node and is not kept afterwards. */
67 struct param_analysis_info
69 bool parm_modified, ref_modified, pt_modified;
70 bitmap parm_visited_statements, pt_visited_statements;
73 /* Vector where the parameter infos are actually stored. */
74 vec<ipa_node_params> ipa_node_params_vector;
75 /* Vector of known aggregate values in cloned nodes. */
76 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
77 /* Vector where the parameter infos are actually stored. */
78 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
80 /* Holders of ipa cgraph hooks: */
81 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
82 static struct cgraph_node_hook_list *node_removal_hook_holder;
83 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
84 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
85 static struct cgraph_node_hook_list *function_insertion_hook_holder;
87 /* Description of a reference to an IPA constant. */
88 struct ipa_cst_ref_desc
90 /* Edge that corresponds to the statement which took the reference. */
91 struct cgraph_edge *cs;
92 /* Linked list of duplicates created when call graph edges are cloned. */
93 struct ipa_cst_ref_desc *next_duplicate;
94 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
95 if out of control. */
96 int refcount;
99 /* Allocation pool for reference descriptions. */
101 static alloc_pool ipa_refdesc_pool;
103 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
104 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
106 static bool
107 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
109 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
110 struct cl_optimization *os;
112 if (!fs_opts)
113 return false;
114 os = TREE_OPTIMIZATION (fs_opts);
115 return !os->x_optimize || !os->x_flag_ipa_cp;
118 /* Return index of the formal whose tree is PTREE in function which corresponds
119 to INFO. */
121 static int
122 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
124 int i, count;
126 count = descriptors.length ();
127 for (i = 0; i < count; i++)
128 if (descriptors[i].decl == ptree)
129 return i;
131 return -1;
134 /* Return index of the formal whose tree is PTREE in function which corresponds
135 to INFO. */
138 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
140 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
143 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
144 NODE. */
146 static void
147 ipa_populate_param_decls (struct cgraph_node *node,
148 vec<ipa_param_descriptor> &descriptors)
150 tree fndecl;
151 tree fnargs;
152 tree parm;
153 int param_num;
155 fndecl = node->decl;
156 gcc_assert (gimple_has_body_p (fndecl));
157 fnargs = DECL_ARGUMENTS (fndecl);
158 param_num = 0;
159 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
161 descriptors[param_num].decl = parm;
162 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
163 param_num++;
167 /* Return how many formal parameters FNDECL has. */
169 static inline int
170 count_formal_params (tree fndecl)
172 tree parm;
173 int count = 0;
174 gcc_assert (gimple_has_body_p (fndecl));
176 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
177 count++;
179 return count;
182 /* Return the declaration of Ith formal parameter of the function corresponding
183 to INFO. Note there is no setter function as this array is built just once
184 using ipa_initialize_node_params. */
186 void
187 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
189 fprintf (file, "param #%i", i);
190 if (info->descriptors[i].decl)
192 fprintf (file, " ");
193 print_generic_expr (file, info->descriptors[i].decl, 0);
197 /* Initialize the ipa_node_params structure associated with NODE
198 to hold PARAM_COUNT parameters. */
200 void
201 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
203 struct ipa_node_params *info = IPA_NODE_REF (node);
205 if (!info->descriptors.exists () && param_count)
206 info->descriptors.safe_grow_cleared (param_count);
209 /* Initialize the ipa_node_params structure associated with NODE by counting
210 the function parameters, creating the descriptors and populating their
211 param_decls. */
213 void
214 ipa_initialize_node_params (struct cgraph_node *node)
216 struct ipa_node_params *info = IPA_NODE_REF (node);
218 if (!info->descriptors.exists ())
220 ipa_alloc_node_params (node, count_formal_params (node->decl));
221 ipa_populate_param_decls (node, info->descriptors);
225 /* Print the jump functions associated with call graph edge CS to file F. */
227 static void
228 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
230 int i, count;
232 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
233 for (i = 0; i < count; i++)
235 struct ipa_jump_func *jump_func;
236 enum jump_func_type type;
238 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
239 type = jump_func->type;
241 fprintf (f, " param %d: ", i);
242 if (type == IPA_JF_UNKNOWN)
243 fprintf (f, "UNKNOWN\n");
244 else if (type == IPA_JF_KNOWN_TYPE)
246 fprintf (f, "KNOWN TYPE: base ");
247 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
248 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
249 jump_func->value.known_type.offset);
250 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
251 fprintf (f, "\n");
253 else if (type == IPA_JF_CONST)
255 tree val = jump_func->value.constant.value;
256 fprintf (f, "CONST: ");
257 print_generic_expr (f, val, 0);
258 if (TREE_CODE (val) == ADDR_EXPR
259 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
261 fprintf (f, " -> ");
262 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
265 fprintf (f, "\n");
267 else if (type == IPA_JF_PASS_THROUGH)
269 fprintf (f, "PASS THROUGH: ");
270 fprintf (f, "%d, op %s",
271 jump_func->value.pass_through.formal_id,
272 get_tree_code_name(jump_func->value.pass_through.operation));
273 if (jump_func->value.pass_through.operation != NOP_EXPR)
275 fprintf (f, " ");
276 print_generic_expr (f,
277 jump_func->value.pass_through.operand, 0);
279 if (jump_func->value.pass_through.agg_preserved)
280 fprintf (f, ", agg_preserved");
281 if (jump_func->value.pass_through.type_preserved)
282 fprintf (f, ", type_preserved");
283 fprintf (f, "\n");
285 else if (type == IPA_JF_ANCESTOR)
287 fprintf (f, "ANCESTOR: ");
288 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
289 jump_func->value.ancestor.formal_id,
290 jump_func->value.ancestor.offset);
291 print_generic_expr (f, jump_func->value.ancestor.type, 0);
292 if (jump_func->value.ancestor.agg_preserved)
293 fprintf (f, ", agg_preserved");
294 if (jump_func->value.ancestor.type_preserved)
295 fprintf (f, ", type_preserved");
296 fprintf (f, "\n");
299 if (jump_func->agg.items)
301 struct ipa_agg_jf_item *item;
302 int j;
304 fprintf (f, " Aggregate passed by %s:\n",
305 jump_func->agg.by_ref ? "reference" : "value");
306 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
308 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
309 item->offset);
310 if (TYPE_P (item->value))
311 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
312 tree_to_uhwi (TYPE_SIZE (item->value)));
313 else
315 fprintf (f, "cst: ");
316 print_generic_expr (f, item->value, 0);
318 fprintf (f, "\n");
325 /* Print the jump functions of all arguments on all call graph edges going from
326 NODE to file F. */
328 void
329 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
331 struct cgraph_edge *cs;
333 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
334 node->order);
335 for (cs = node->callees; cs; cs = cs->next_callee)
337 if (!ipa_edge_args_info_available_for_edge_p (cs))
338 continue;
340 fprintf (f, " callsite %s/%i -> %s/%i : \n",
341 xstrdup (node->name ()), node->order,
342 xstrdup (cs->callee->name ()),
343 cs->callee->order);
344 ipa_print_node_jump_functions_for_edge (f, cs);
347 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
349 struct cgraph_indirect_call_info *ii;
350 if (!ipa_edge_args_info_available_for_edge_p (cs))
351 continue;
353 ii = cs->indirect_info;
354 if (ii->agg_contents)
355 fprintf (f, " indirect %s callsite, calling param %i, "
356 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
357 ii->member_ptr ? "member ptr" : "aggregate",
358 ii->param_index, ii->offset,
359 ii->by_ref ? "by reference" : "by_value");
360 else
361 fprintf (f, " indirect %s callsite, calling param %i, "
362 "offset " HOST_WIDE_INT_PRINT_DEC,
363 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
364 ii->offset);
366 if (cs->call_stmt)
368 fprintf (f, ", for stmt ");
369 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
371 else
372 fprintf (f, "\n");
373 ipa_print_node_jump_functions_for_edge (f, cs);
377 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
379 void
380 ipa_print_all_jump_functions (FILE *f)
382 struct cgraph_node *node;
384 fprintf (f, "\nJump functions:\n");
385 FOR_EACH_FUNCTION (node)
387 ipa_print_node_jump_functions (f, node);
391 /* Set JFUNC to be a known type jump function. */
393 static void
394 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
395 tree base_type, tree component_type)
397 gcc_assert (contains_polymorphic_type_p (base_type)
398 && contains_polymorphic_type_p (component_type));
399 if (!flag_devirtualize)
400 return;
401 jfunc->type = IPA_JF_KNOWN_TYPE;
402 jfunc->value.known_type.offset = offset,
403 jfunc->value.known_type.base_type = base_type;
404 jfunc->value.known_type.component_type = component_type;
405 gcc_assert (component_type);
408 /* Set JFUNC to be a copy of another jmp (to be used by jump function
409 combination code). The two functions will share their rdesc. */
411 static void
412 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
413 struct ipa_jump_func *src)
416 gcc_checking_assert (src->type == IPA_JF_CONST);
417 dst->type = IPA_JF_CONST;
418 dst->value.constant = src->value.constant;
421 /* Set JFUNC to be a constant jmp function. */
423 static void
424 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
425 struct cgraph_edge *cs)
427 constant = unshare_expr (constant);
428 if (constant && EXPR_P (constant))
429 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
430 jfunc->type = IPA_JF_CONST;
431 jfunc->value.constant.value = unshare_expr_without_location (constant);
433 if (TREE_CODE (constant) == ADDR_EXPR
434 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
436 struct ipa_cst_ref_desc *rdesc;
437 if (!ipa_refdesc_pool)
438 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
439 sizeof (struct ipa_cst_ref_desc), 32);
441 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
442 rdesc->cs = cs;
443 rdesc->next_duplicate = NULL;
444 rdesc->refcount = 1;
445 jfunc->value.constant.rdesc = rdesc;
447 else
448 jfunc->value.constant.rdesc = NULL;
451 /* Set JFUNC to be a simple pass-through jump function. */
452 static void
453 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
454 bool agg_preserved, bool type_preserved)
456 jfunc->type = IPA_JF_PASS_THROUGH;
457 jfunc->value.pass_through.operand = NULL_TREE;
458 jfunc->value.pass_through.formal_id = formal_id;
459 jfunc->value.pass_through.operation = NOP_EXPR;
460 jfunc->value.pass_through.agg_preserved = agg_preserved;
461 jfunc->value.pass_through.type_preserved = type_preserved;
464 /* Set JFUNC to be an arithmetic pass through jump function. */
466 static void
467 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
468 tree operand, enum tree_code operation)
470 jfunc->type = IPA_JF_PASS_THROUGH;
471 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
472 jfunc->value.pass_through.formal_id = formal_id;
473 jfunc->value.pass_through.operation = operation;
474 jfunc->value.pass_through.agg_preserved = false;
475 jfunc->value.pass_through.type_preserved = false;
478 /* Set JFUNC to be an ancestor jump function. */
480 static void
481 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
482 tree type, int formal_id, bool agg_preserved,
483 bool type_preserved)
485 if (!flag_devirtualize)
486 type_preserved = false;
487 if (!type_preserved)
488 type = NULL_TREE;
489 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
490 jfunc->type = IPA_JF_ANCESTOR;
491 jfunc->value.ancestor.formal_id = formal_id;
492 jfunc->value.ancestor.offset = offset;
493 jfunc->value.ancestor.type = type_preserved ? type : NULL;
494 jfunc->value.ancestor.agg_preserved = agg_preserved;
495 jfunc->value.ancestor.type_preserved = type_preserved;
498 /* Extract the acual BINFO being described by JFUNC which must be a known type
499 jump function. */
501 tree
502 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
504 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
505 if (!base_binfo)
506 return NULL_TREE;
507 return get_binfo_at_offset (base_binfo,
508 jfunc->value.known_type.offset,
509 jfunc->value.known_type.component_type);
512 /* Structure to be passed in between detect_type_change and
513 check_stmt_for_type_change. */
515 struct type_change_info
517 /* Offset into the object where there is the virtual method pointer we are
518 looking for. */
519 HOST_WIDE_INT offset;
520 /* The declaration or SSA_NAME pointer of the base that we are checking for
521 type change. */
522 tree object;
523 /* If we actually can tell the type that the object has changed to, it is
524 stored in this field. Otherwise it remains NULL_TREE. */
525 tree known_current_type;
526 /* Set to true if dynamic type change has been detected. */
527 bool type_maybe_changed;
528 /* Set to true if multiple types have been encountered. known_current_type
529 must be disregarded in that case. */
530 bool multiple_types_encountered;
533 /* Return true if STMT can modify a virtual method table pointer.
535 This function makes special assumptions about both constructors and
536 destructors which are all the functions that are allowed to alter the VMT
537 pointers. It assumes that destructors begin with assignment into all VMT
538 pointers and that constructors essentially look in the following way:
540 1) The very first thing they do is that they call constructors of ancestor
541 sub-objects that have them.
543 2) Then VMT pointers of this and all its ancestors is set to new values
544 corresponding to the type corresponding to the constructor.
546 3) Only afterwards, other stuff such as constructor of member sub-objects
547 and the code written by the user is run. Only this may include calling
548 virtual functions, directly or indirectly.
550 There is no way to call a constructor of an ancestor sub-object in any
551 other way.
553 This means that we do not have to care whether constructors get the correct
554 type information because they will always change it (in fact, if we define
555 the type to be given by the VMT pointer, it is undefined).
557 The most important fact to derive from the above is that if, for some
558 statement in the section 3, we try to detect whether the dynamic type has
559 changed, we can safely ignore all calls as we examine the function body
560 backwards until we reach statements in section 2 because these calls cannot
561 be ancestor constructors or destructors (if the input is not bogus) and so
562 do not change the dynamic type (this holds true only for automatically
563 allocated objects but at the moment we devirtualize only these). We then
564 must detect that statements in section 2 change the dynamic type and can try
565 to derive the new type. That is enough and we can stop, we will never see
566 the calls into constructors of sub-objects in this code. Therefore we can
567 safely ignore all call statements that we traverse.
570 static bool
571 stmt_may_be_vtbl_ptr_store (gimple stmt)
573 if (is_gimple_call (stmt))
574 return false;
575 /* TODO: Skip clobbers, doing so triggers problem in PR60306. */
576 else if (is_gimple_assign (stmt))
578 tree lhs = gimple_assign_lhs (stmt);
580 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
582 if (flag_strict_aliasing
583 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
584 return false;
586 if (TREE_CODE (lhs) == COMPONENT_REF
587 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
588 return false;
589 /* In the future we might want to use get_base_ref_and_offset to find
590 if there is a field corresponding to the offset and if so, proceed
591 almost like if it was a component ref. */
594 return true;
597 /* If STMT can be proved to be an assignment to the virtual method table
598 pointer of ANALYZED_OBJ and the type associated with the new table
599 identified, return the type. Otherwise return NULL_TREE. */
601 static tree
602 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
604 HOST_WIDE_INT offset, size, max_size;
605 tree lhs, rhs, base, binfo;
607 if (!gimple_assign_single_p (stmt))
608 return NULL_TREE;
610 lhs = gimple_assign_lhs (stmt);
611 rhs = gimple_assign_rhs1 (stmt);
612 if (TREE_CODE (lhs) != COMPONENT_REF
613 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
614 return NULL_TREE;
616 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
617 if (offset != tci->offset
618 || size != POINTER_SIZE
619 || max_size != POINTER_SIZE)
620 return NULL_TREE;
621 if (TREE_CODE (base) == MEM_REF)
623 if (TREE_CODE (tci->object) != MEM_REF
624 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
625 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
626 TREE_OPERAND (base, 1)))
627 return NULL_TREE;
629 else if (tci->object != base)
630 return NULL_TREE;
632 binfo = vtable_pointer_value_to_binfo (rhs);
634 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
635 base of outer type. In this case we would need to either
636 work on binfos or translate it back to outer type and offset.
637 KNOWN_TYPE jump functions are not ready for that, yet. */
638 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
639 return NULL;
641 return BINFO_TYPE (binfo);
644 /* Callback of walk_aliased_vdefs and a helper function for
645 detect_type_change to check whether a particular statement may modify
646 the virtual table pointer, and if possible also determine the new type of
647 the (sub-)object. It stores its result into DATA, which points to a
648 type_change_info structure. */
650 static bool
651 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
653 gimple stmt = SSA_NAME_DEF_STMT (vdef);
654 struct type_change_info *tci = (struct type_change_info *) data;
656 if (stmt_may_be_vtbl_ptr_store (stmt))
658 tree type;
659 type = extr_type_from_vtbl_ptr_store (stmt, tci);
660 if (tci->type_maybe_changed
661 && type != tci->known_current_type)
662 tci->multiple_types_encountered = true;
663 tci->known_current_type = type;
664 tci->type_maybe_changed = true;
665 return true;
667 else
668 return false;
673 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
674 callsite CALL) by looking for assignments to its virtual table pointer. If
675 it is, return true and fill in the jump function JFUNC with relevant type
676 information or set it to unknown. ARG is the object itself (not a pointer
677 to it, unless dereferenced). BASE is the base of the memory access as
678 returned by get_ref_base_and_extent, as is the offset. */
680 static bool
681 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
682 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
684 struct type_change_info tci;
685 ao_ref ao;
687 gcc_checking_assert (DECL_P (arg)
688 || TREE_CODE (arg) == MEM_REF
689 || handled_component_p (arg));
691 if (!flag_devirtualize)
692 return false;
694 /* C++ methods are not allowed to change THIS pointer unless they
695 are constructors or destructors. */
696 if (TREE_CODE (base) == MEM_REF
697 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
698 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
699 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0))) == PARM_DECL
700 && TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
701 && !DECL_CXX_CONSTRUCTOR_P (current_function_decl)
702 && !DECL_CXX_DESTRUCTOR_P (current_function_decl)
703 && (SSA_NAME_VAR (TREE_OPERAND (base, 0))
704 == DECL_ARGUMENTS (current_function_decl)))
706 gcc_assert (comp_type);
707 return false;
710 /* Const calls cannot call virtual methods through VMT and so type changes do
711 not matter. */
712 if (!flag_devirtualize || !gimple_vuse (call)
713 /* Be sure expected_type is polymorphic. */
714 || !comp_type
715 || TREE_CODE (comp_type) != RECORD_TYPE
716 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
717 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
718 return true;
720 ao_ref_init (&ao, arg);
721 ao.base = base;
722 ao.offset = offset;
723 ao.size = POINTER_SIZE;
724 ao.max_size = ao.size;
726 tci.offset = offset;
727 tci.object = get_base_address (arg);
728 tci.known_current_type = NULL_TREE;
729 tci.type_maybe_changed = false;
730 tci.multiple_types_encountered = false;
732 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
733 &tci, NULL);
734 if (!tci.type_maybe_changed)
735 return false;
737 if (!tci.known_current_type
738 || tci.multiple_types_encountered
739 || offset != 0)
740 jfunc->type = IPA_JF_UNKNOWN;
741 else
742 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
744 return true;
747 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
748 SSA name (its dereference will become the base and the offset is assumed to
749 be zero). */
751 static bool
752 detect_type_change_ssa (tree arg, tree comp_type,
753 gimple call, struct ipa_jump_func *jfunc)
755 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
756 if (!flag_devirtualize
757 || !POINTER_TYPE_P (TREE_TYPE (arg)))
758 return false;
760 arg = build2 (MEM_REF, ptr_type_node, arg,
761 build_int_cst (ptr_type_node, 0));
763 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
766 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
767 boolean variable pointed to by DATA. */
769 static bool
770 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
771 void *data)
773 bool *b = (bool *) data;
774 *b = true;
775 return true;
778 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
779 a value known not to be modified in this function before reaching the
780 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
781 information about the parameter. */
783 static bool
784 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
785 gimple stmt, tree parm_load)
787 bool modified = false;
788 bitmap *visited_stmts;
789 ao_ref refd;
791 if (parm_ainfo && parm_ainfo->parm_modified)
792 return false;
794 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
795 ao_ref_init (&refd, parm_load);
796 /* We can cache visited statements only when parm_ainfo is available and when
797 we are looking at a naked load of the whole parameter. */
798 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
799 visited_stmts = NULL;
800 else
801 visited_stmts = &parm_ainfo->parm_visited_statements;
802 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
803 visited_stmts);
804 if (parm_ainfo && modified)
805 parm_ainfo->parm_modified = true;
806 return !modified;
809 /* If STMT is an assignment that loads a value from an parameter declaration,
810 return the index of the parameter in ipa_node_params which has not been
811 modified. Otherwise return -1. */
813 static int
814 load_from_unmodified_param (vec<ipa_param_descriptor> descriptors,
815 struct param_analysis_info *parms_ainfo,
816 gimple stmt)
818 int index;
819 tree op1;
821 if (!gimple_assign_single_p (stmt))
822 return -1;
824 op1 = gimple_assign_rhs1 (stmt);
825 if (TREE_CODE (op1) != PARM_DECL)
826 return -1;
828 index = ipa_get_param_decl_index_1 (descriptors, op1);
829 if (index < 0
830 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
831 : NULL, stmt, op1))
832 return -1;
834 return index;
837 /* Return true if memory reference REF loads data that are known to be
838 unmodified in this function before reaching statement STMT. PARM_AINFO, if
839 non-NULL, is a pointer to a structure containing temporary information about
840 PARM. */
842 static bool
843 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
844 gimple stmt, tree ref)
846 bool modified = false;
847 ao_ref refd;
849 gcc_checking_assert (gimple_vuse (stmt));
850 if (parm_ainfo && parm_ainfo->ref_modified)
851 return false;
853 ao_ref_init (&refd, ref);
854 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
855 NULL);
856 if (parm_ainfo && modified)
857 parm_ainfo->ref_modified = true;
858 return !modified;
861 /* Return true if the data pointed to by PARM is known to be unmodified in this
862 function before reaching call statement CALL into which it is passed.
863 PARM_AINFO is a pointer to a structure containing temporary information
864 about PARM. */
866 static bool
867 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
868 gimple call, tree parm)
870 bool modified = false;
871 ao_ref refd;
873 /* It's unnecessary to calculate anything about memory contnets for a const
874 function because it is not goin to use it. But do not cache the result
875 either. Also, no such calculations for non-pointers. */
876 if (!gimple_vuse (call)
877 || !POINTER_TYPE_P (TREE_TYPE (parm)))
878 return false;
880 if (parm_ainfo->pt_modified)
881 return false;
883 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
884 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
885 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
886 if (modified)
887 parm_ainfo->pt_modified = true;
888 return !modified;
891 /* Return true if we can prove that OP is a memory reference loading unmodified
892 data from an aggregate passed as a parameter and if the aggregate is passed
893 by reference, that the alias type of the load corresponds to the type of the
894 formal parameter (so that we can rely on this type for TBAA in callers).
895 INFO and PARMS_AINFO describe parameters of the current function (but the
896 latter can be NULL), STMT is the load statement. If function returns true,
897 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
898 within the aggregate and whether it is a load from a value passed by
899 reference respectively. */
901 static bool
902 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor> descriptors,
903 struct param_analysis_info *parms_ainfo, gimple stmt,
904 tree op, int *index_p, HOST_WIDE_INT *offset_p,
905 HOST_WIDE_INT *size_p, bool *by_ref_p)
907 int index;
908 HOST_WIDE_INT size, max_size;
909 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
911 if (max_size == -1 || max_size != size || *offset_p < 0)
912 return false;
914 if (DECL_P (base))
916 int index = ipa_get_param_decl_index_1 (descriptors, base);
917 if (index >= 0
918 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
919 : NULL, stmt, op))
921 *index_p = index;
922 *by_ref_p = false;
923 if (size_p)
924 *size_p = size;
925 return true;
927 return false;
930 if (TREE_CODE (base) != MEM_REF
931 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
932 || !integer_zerop (TREE_OPERAND (base, 1)))
933 return false;
935 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
937 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
938 index = ipa_get_param_decl_index_1 (descriptors, parm);
940 else
942 /* This branch catches situations where a pointer parameter is not a
943 gimple register, for example:
945 void hip7(S*) (struct S * p)
947 void (*<T2e4>) (struct S *) D.1867;
948 struct S * p.1;
950 <bb 2>:
951 p.1_1 = p;
952 D.1867_2 = p.1_1->f;
953 D.1867_2 ();
954 gdp = &p;
957 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
958 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
961 if (index >= 0
962 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
963 stmt, op))
965 *index_p = index;
966 *by_ref_p = true;
967 if (size_p)
968 *size_p = size;
969 return true;
971 return false;
974 /* Just like the previous function, just without the param_analysis_info
975 pointer, for users outside of this file. */
977 bool
978 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
979 tree op, int *index_p, HOST_WIDE_INT *offset_p,
980 bool *by_ref_p)
982 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
983 offset_p, NULL, by_ref_p);
986 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
987 of an assignment statement STMT, try to determine whether we are actually
988 handling any of the following cases and construct an appropriate jump
989 function into JFUNC if so:
991 1) The passed value is loaded from a formal parameter which is not a gimple
992 register (most probably because it is addressable, the value has to be
993 scalar) and we can guarantee the value has not changed. This case can
994 therefore be described by a simple pass-through jump function. For example:
996 foo (int a)
998 int a.0;
1000 a.0_2 = a;
1001 bar (a.0_2);
1003 2) The passed value can be described by a simple arithmetic pass-through
1004 jump function. E.g.
1006 foo (int a)
1008 int D.2064;
1010 D.2064_4 = a.1(D) + 4;
1011 bar (D.2064_4);
1013 This case can also occur in combination of the previous one, e.g.:
1015 foo (int a, int z)
1017 int a.0;
1018 int D.2064;
1020 a.0_3 = a;
1021 D.2064_4 = a.0_3 + 4;
1022 foo (D.2064_4);
1024 3) The passed value is an address of an object within another one (which
1025 also passed by reference). Such situations are described by an ancestor
1026 jump function and describe situations such as:
1028 B::foo() (struct B * const this)
1030 struct A * D.1845;
1032 D.1845_2 = &this_1(D)->D.1748;
1033 A::bar (D.1845_2);
1035 INFO is the structure describing individual parameters access different
1036 stages of IPA optimizations. PARMS_AINFO contains the information that is
1037 only needed for intraprocedural analysis. */
1039 static void
1040 compute_complex_assign_jump_func (struct ipa_node_params *info,
1041 struct param_analysis_info *parms_ainfo,
1042 struct ipa_jump_func *jfunc,
1043 gimple call, gimple stmt, tree name,
1044 tree param_type)
1046 HOST_WIDE_INT offset, size, max_size;
1047 tree op1, tc_ssa, base, ssa;
1048 int index;
1050 op1 = gimple_assign_rhs1 (stmt);
1052 if (TREE_CODE (op1) == SSA_NAME)
1054 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1055 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1056 else
1057 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1058 SSA_NAME_DEF_STMT (op1));
1059 tc_ssa = op1;
1061 else
1063 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1064 tc_ssa = gimple_assign_lhs (stmt);
1067 if (index >= 0)
1069 tree op2 = gimple_assign_rhs2 (stmt);
1071 if (op2)
1073 if (!is_gimple_ip_invariant (op2)
1074 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1075 && !useless_type_conversion_p (TREE_TYPE (name),
1076 TREE_TYPE (op1))))
1077 return;
1079 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1080 gimple_assign_rhs_code (stmt));
1082 else if (gimple_assign_single_p (stmt))
1084 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1085 call, tc_ssa);
1086 bool type_p = false;
1088 if (param_type && POINTER_TYPE_P (param_type))
1089 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1090 call, jfunc);
1091 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1092 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1094 return;
1097 if (TREE_CODE (op1) != ADDR_EXPR)
1098 return;
1099 op1 = TREE_OPERAND (op1, 0);
1100 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1101 return;
1102 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1103 if (TREE_CODE (base) != MEM_REF
1104 /* If this is a varying address, punt. */
1105 || max_size == -1
1106 || max_size != size)
1107 return;
1108 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1109 ssa = TREE_OPERAND (base, 0);
1110 if (TREE_CODE (ssa) != SSA_NAME
1111 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1112 || offset < 0)
1113 return;
1115 /* Dynamic types are changed in constructors and destructors. */
1116 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1117 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1119 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1120 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1121 call, jfunc, offset));
1122 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1123 ipa_set_ancestor_jf (jfunc, offset,
1124 type_p ? TREE_TYPE (param_type) : NULL, index,
1125 parm_ref_data_pass_through_p (&parms_ainfo[index],
1126 call, ssa), type_p);
1130 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1131 it looks like:
1133 iftmp.1_3 = &obj_2(D)->D.1762;
1135 The base of the MEM_REF must be a default definition SSA NAME of a
1136 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1137 whole MEM_REF expression is returned and the offset calculated from any
1138 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1139 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1141 static tree
1142 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1144 HOST_WIDE_INT size, max_size;
1145 tree expr, parm, obj;
1147 if (!gimple_assign_single_p (assign))
1148 return NULL_TREE;
1149 expr = gimple_assign_rhs1 (assign);
1151 if (TREE_CODE (expr) != ADDR_EXPR)
1152 return NULL_TREE;
1153 expr = TREE_OPERAND (expr, 0);
1154 obj = expr;
1155 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1157 if (TREE_CODE (expr) != MEM_REF
1158 /* If this is a varying address, punt. */
1159 || max_size == -1
1160 || max_size != size
1161 || *offset < 0)
1162 return NULL_TREE;
1163 parm = TREE_OPERAND (expr, 0);
1164 if (TREE_CODE (parm) != SSA_NAME
1165 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1166 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1167 return NULL_TREE;
1169 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1170 *obj_p = obj;
1171 return expr;
1175 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1176 statement PHI, try to find out whether NAME is in fact a
1177 multiple-inheritance typecast from a descendant into an ancestor of a formal
1178 parameter and thus can be described by an ancestor jump function and if so,
1179 write the appropriate function into JFUNC.
1181 Essentially we want to match the following pattern:
1183 if (obj_2(D) != 0B)
1184 goto <bb 3>;
1185 else
1186 goto <bb 4>;
1188 <bb 3>:
1189 iftmp.1_3 = &obj_2(D)->D.1762;
1191 <bb 4>:
1192 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1193 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1194 return D.1879_6; */
1196 static void
1197 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1198 struct param_analysis_info *parms_ainfo,
1199 struct ipa_jump_func *jfunc,
1200 gimple call, gimple phi, tree param_type)
1202 HOST_WIDE_INT offset;
1203 gimple assign, cond;
1204 basic_block phi_bb, assign_bb, cond_bb;
1205 tree tmp, parm, expr, obj;
1206 int index, i;
1208 if (gimple_phi_num_args (phi) != 2)
1209 return;
1211 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1212 tmp = PHI_ARG_DEF (phi, 0);
1213 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1214 tmp = PHI_ARG_DEF (phi, 1);
1215 else
1216 return;
1217 if (TREE_CODE (tmp) != SSA_NAME
1218 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1219 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1220 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1221 return;
1223 assign = SSA_NAME_DEF_STMT (tmp);
1224 assign_bb = gimple_bb (assign);
1225 if (!single_pred_p (assign_bb))
1226 return;
1227 expr = get_ancestor_addr_info (assign, &obj, &offset);
1228 if (!expr)
1229 return;
1230 parm = TREE_OPERAND (expr, 0);
1231 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1232 if (index < 0)
1233 return;
1235 cond_bb = single_pred (assign_bb);
1236 cond = last_stmt (cond_bb);
1237 if (!cond
1238 || gimple_code (cond) != GIMPLE_COND
1239 || gimple_cond_code (cond) != NE_EXPR
1240 || gimple_cond_lhs (cond) != parm
1241 || !integer_zerop (gimple_cond_rhs (cond)))
1242 return;
1244 phi_bb = gimple_bb (phi);
1245 for (i = 0; i < 2; i++)
1247 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1248 if (pred != assign_bb && pred != cond_bb)
1249 return;
1252 bool type_p = false;
1253 if (param_type && POINTER_TYPE_P (param_type)
1254 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1255 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1256 call, jfunc, offset);
1257 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1258 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL, index,
1259 parm_ref_data_pass_through_p (&parms_ainfo[index],
1260 call, parm), type_p);
1263 /* Given OP which is passed as an actual argument to a called function,
1264 determine if it is possible to construct a KNOWN_TYPE jump function for it
1265 and if so, create one and store it to JFUNC.
1266 EXPECTED_TYPE represents a type the argument should be in */
1268 static void
1269 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1270 gimple call, tree expected_type)
1272 HOST_WIDE_INT offset, size, max_size;
1273 tree base;
1275 if (!flag_devirtualize
1276 || TREE_CODE (op) != ADDR_EXPR
1277 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1278 /* Be sure expected_type is polymorphic. */
1279 || !expected_type
1280 || !contains_polymorphic_type_p (expected_type))
1281 return;
1283 op = TREE_OPERAND (op, 0);
1284 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1285 if (!DECL_P (base)
1286 || max_size == -1
1287 || max_size != size
1288 || !contains_polymorphic_type_p (TREE_TYPE (base))
1289 || is_global_var (base))
1290 return;
1292 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1293 return;
1295 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1296 expected_type);
1299 /* Inspect the given TYPE and return true iff it has the same structure (the
1300 same number of fields of the same types) as a C++ member pointer. If
1301 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1302 corresponding fields there. */
1304 static bool
1305 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1307 tree fld;
1309 if (TREE_CODE (type) != RECORD_TYPE)
1310 return false;
1312 fld = TYPE_FIELDS (type);
1313 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1314 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1315 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1316 return false;
1318 if (method_ptr)
1319 *method_ptr = fld;
1321 fld = DECL_CHAIN (fld);
1322 if (!fld || INTEGRAL_TYPE_P (fld)
1323 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1324 return false;
1325 if (delta)
1326 *delta = fld;
1328 if (DECL_CHAIN (fld))
1329 return false;
1331 return true;
1334 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1335 return the rhs of its defining statement. Otherwise return RHS as it
1336 is. */
1338 static inline tree
1339 get_ssa_def_if_simple_copy (tree rhs)
1341 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1343 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1345 if (gimple_assign_single_p (def_stmt))
1346 rhs = gimple_assign_rhs1 (def_stmt);
1347 else
1348 break;
1350 return rhs;
1353 /* Simple linked list, describing known contents of an aggregate beforere
1354 call. */
1356 struct ipa_known_agg_contents_list
1358 /* Offset and size of the described part of the aggregate. */
1359 HOST_WIDE_INT offset, size;
1360 /* Known constant value or NULL if the contents is known to be unknown. */
1361 tree constant;
1362 /* Pointer to the next structure in the list. */
1363 struct ipa_known_agg_contents_list *next;
1366 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1367 in ARG is filled in with constant values. ARG can either be an aggregate
1368 expression or a pointer to an aggregate. ARG_TYPE is the type of the aggregate.
1369 JFUNC is the jump function into which the constants are subsequently stored. */
1371 static void
1372 determine_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1373 struct ipa_jump_func *jfunc)
1375 struct ipa_known_agg_contents_list *list = NULL;
1376 int item_count = 0, const_count = 0;
1377 HOST_WIDE_INT arg_offset, arg_size;
1378 gimple_stmt_iterator gsi;
1379 tree arg_base;
1380 bool check_ref, by_ref;
1381 ao_ref r;
1383 /* The function operates in three stages. First, we prepare check_ref, r,
1384 arg_base and arg_offset based on what is actually passed as an actual
1385 argument. */
1387 if (POINTER_TYPE_P (arg_type))
1389 by_ref = true;
1390 if (TREE_CODE (arg) == SSA_NAME)
1392 tree type_size;
1393 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1394 return;
1395 check_ref = true;
1396 arg_base = arg;
1397 arg_offset = 0;
1398 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1399 arg_size = tree_to_uhwi (type_size);
1400 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1402 else if (TREE_CODE (arg) == ADDR_EXPR)
1404 HOST_WIDE_INT arg_max_size;
1406 arg = TREE_OPERAND (arg, 0);
1407 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1408 &arg_max_size);
1409 if (arg_max_size == -1
1410 || arg_max_size != arg_size
1411 || arg_offset < 0)
1412 return;
1413 if (DECL_P (arg_base))
1415 tree size;
1416 check_ref = false;
1417 size = build_int_cst (integer_type_node, arg_size);
1418 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1420 else
1421 return;
1423 else
1424 return;
1426 else
1428 HOST_WIDE_INT arg_max_size;
1430 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1432 by_ref = false;
1433 check_ref = false;
1434 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1435 &arg_max_size);
1436 if (arg_max_size == -1
1437 || arg_max_size != arg_size
1438 || arg_offset < 0)
1439 return;
1441 ao_ref_init (&r, arg);
1444 /* Second stage walks back the BB, looks at individual statements and as long
1445 as it is confident of how the statements affect contents of the
1446 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1447 describing it. */
1448 gsi = gsi_for_stmt (call);
1449 gsi_prev (&gsi);
1450 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1452 struct ipa_known_agg_contents_list *n, **p;
1453 gimple stmt = gsi_stmt (gsi);
1454 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1455 tree lhs, rhs, lhs_base;
1456 bool partial_overlap;
1458 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1459 continue;
1460 if (!gimple_assign_single_p (stmt))
1461 break;
1463 lhs = gimple_assign_lhs (stmt);
1464 rhs = gimple_assign_rhs1 (stmt);
1465 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1466 || TREE_CODE (lhs) == BIT_FIELD_REF
1467 || contains_bitfld_component_ref_p (lhs))
1468 break;
1470 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1471 &lhs_max_size);
1472 if (lhs_max_size == -1
1473 || lhs_max_size != lhs_size
1474 || (lhs_offset < arg_offset
1475 && lhs_offset + lhs_size > arg_offset)
1476 || (lhs_offset < arg_offset + arg_size
1477 && lhs_offset + lhs_size > arg_offset + arg_size))
1478 break;
1480 if (check_ref)
1482 if (TREE_CODE (lhs_base) != MEM_REF
1483 || TREE_OPERAND (lhs_base, 0) != arg_base
1484 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1485 break;
1487 else if (lhs_base != arg_base)
1489 if (DECL_P (lhs_base))
1490 continue;
1491 else
1492 break;
1495 if (lhs_offset + lhs_size < arg_offset
1496 || lhs_offset >= (arg_offset + arg_size))
1497 continue;
1499 partial_overlap = false;
1500 p = &list;
1501 while (*p && (*p)->offset < lhs_offset)
1503 if ((*p)->offset + (*p)->size > lhs_offset)
1505 partial_overlap = true;
1506 break;
1508 p = &(*p)->next;
1510 if (partial_overlap)
1511 break;
1512 if (*p && (*p)->offset < lhs_offset + lhs_size)
1514 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1515 /* We already know this value is subsequently overwritten with
1516 something else. */
1517 continue;
1518 else
1519 /* Otherwise this is a partial overlap which we cannot
1520 represent. */
1521 break;
1524 rhs = get_ssa_def_if_simple_copy (rhs);
1525 n = XALLOCA (struct ipa_known_agg_contents_list);
1526 n->size = lhs_size;
1527 n->offset = lhs_offset;
1528 if (is_gimple_ip_invariant (rhs))
1530 n->constant = rhs;
1531 const_count++;
1533 else
1534 n->constant = NULL_TREE;
1535 n->next = *p;
1536 *p = n;
1538 item_count++;
1539 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1540 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1541 break;
1544 /* Third stage just goes over the list and creates an appropriate vector of
1545 ipa_agg_jf_item structures out of it, of sourse only if there are
1546 any known constants to begin with. */
1548 if (const_count)
1550 jfunc->agg.by_ref = by_ref;
1551 vec_alloc (jfunc->agg.items, const_count);
1552 while (list)
1554 if (list->constant)
1556 struct ipa_agg_jf_item item;
1557 item.offset = list->offset - arg_offset;
1558 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1559 item.value = unshare_expr_without_location (list->constant);
1560 jfunc->agg.items->quick_push (item);
1562 list = list->next;
1567 static tree
1568 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1570 int n;
1571 tree type = (e->callee
1572 ? TREE_TYPE (e->callee->decl)
1573 : gimple_call_fntype (e->call_stmt));
1574 tree t = TYPE_ARG_TYPES (type);
1576 for (n = 0; n < i; n++)
1578 if (!t)
1579 break;
1580 t = TREE_CHAIN (t);
1582 if (t)
1583 return TREE_VALUE (t);
1584 if (!e->callee)
1585 return NULL;
1586 t = DECL_ARGUMENTS (e->callee->decl);
1587 for (n = 0; n < i; n++)
1589 if (!t)
1590 return NULL;
1591 t = TREE_CHAIN (t);
1593 if (t)
1594 return TREE_TYPE (t);
1595 return NULL;
1598 /* Compute jump function for all arguments of callsite CS and insert the
1599 information in the jump_functions array in the ipa_edge_args corresponding
1600 to this callsite. */
1602 static void
1603 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1604 struct cgraph_edge *cs)
1606 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1607 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1608 gimple call = cs->call_stmt;
1609 int n, arg_num = gimple_call_num_args (call);
1611 if (arg_num == 0 || args->jump_functions)
1612 return;
1613 vec_safe_grow_cleared (args->jump_functions, arg_num);
1615 if (gimple_call_internal_p (call))
1616 return;
1617 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1618 return;
1620 for (n = 0; n < arg_num; n++)
1622 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1623 tree arg = gimple_call_arg (call, n);
1624 tree param_type = ipa_get_callee_param_type (cs, n);
1626 if (is_gimple_ip_invariant (arg))
1628 if (L_IPO_COMP_MODE && TREE_CODE (arg) == ADDR_EXPR
1629 && TREE_CODE (TREE_OPERAND (arg, 0)) == FUNCTION_DECL)
1631 tree fdecl = TREE_OPERAND (arg, 0);
1632 tree real_fdecl = cgraph_lipo_get_resolved_node (fdecl)->decl;
1633 if (fdecl != real_fdecl)
1635 arg = unshare_expr (arg);
1636 TREE_OPERAND (arg, 0) = real_fdecl;
1639 ipa_set_jf_constant (jfunc, arg, cs);
1641 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1642 && TREE_CODE (arg) == PARM_DECL)
1644 int index = ipa_get_param_decl_index (info, arg);
1646 gcc_assert (index >=0);
1647 /* Aggregate passed by value, check for pass-through, otherwise we
1648 will attempt to fill in aggregate contents later in this
1649 for cycle. */
1650 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1652 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1653 continue;
1656 else if (TREE_CODE (arg) == SSA_NAME)
1658 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1660 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1661 if (index >= 0)
1663 bool agg_p, type_p;
1664 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1665 call, arg);
1666 if (param_type && POINTER_TYPE_P (param_type))
1667 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1668 call, jfunc);
1669 else
1670 type_p = false;
1671 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1672 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1673 type_p);
1676 else
1678 gimple stmt = SSA_NAME_DEF_STMT (arg);
1679 if (is_gimple_assign (stmt))
1680 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1681 call, stmt, arg, param_type);
1682 else if (gimple_code (stmt) == GIMPLE_PHI)
1683 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1684 call, stmt, param_type);
1687 else
1688 compute_known_type_jump_func (arg, jfunc, call,
1689 param_type
1690 && POINTER_TYPE_P (param_type)
1691 ? TREE_TYPE (param_type)
1692 : NULL);
1694 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1695 passed (because type conversions are ignored in gimple). Usually we can
1696 safely get type from function declaration, but in case of K&R prototypes or
1697 variadic functions we can try our luck with type of the pointer passed.
1698 TODO: Since we look for actual initialization of the memory object, we may better
1699 work out the type based on the memory stores we find. */
1700 if (!param_type)
1701 param_type = TREE_TYPE (arg);
1703 if ((jfunc->type != IPA_JF_PASS_THROUGH
1704 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1705 && (jfunc->type != IPA_JF_ANCESTOR
1706 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1707 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1708 || POINTER_TYPE_P (param_type)))
1709 determine_known_aggregate_parts (call, arg, param_type, jfunc);
1713 /* Compute jump functions for all edges - both direct and indirect - outgoing
1714 from NODE. Also count the actual arguments in the process. */
1716 static void
1717 ipa_compute_jump_functions (struct cgraph_node *node,
1718 struct param_analysis_info *parms_ainfo)
1720 struct cgraph_edge *cs;
1722 for (cs = node->callees; cs; cs = cs->next_callee)
1724 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1725 NULL);
1726 /* We do not need to bother analyzing calls to unknown
1727 functions unless they may become known during lto/whopr. */
1728 if (!callee->definition && !flag_lto)
1729 continue;
1730 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1733 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1734 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1737 /* If STMT looks like a statement loading a value from a member pointer formal
1738 parameter, return that parameter and store the offset of the field to
1739 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1740 might be clobbered). If USE_DELTA, then we look for a use of the delta
1741 field rather than the pfn. */
1743 static tree
1744 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1745 HOST_WIDE_INT *offset_p)
1747 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1749 if (!gimple_assign_single_p (stmt))
1750 return NULL_TREE;
1752 rhs = gimple_assign_rhs1 (stmt);
1753 if (TREE_CODE (rhs) == COMPONENT_REF)
1755 ref_field = TREE_OPERAND (rhs, 1);
1756 rhs = TREE_OPERAND (rhs, 0);
1758 else
1759 ref_field = NULL_TREE;
1760 if (TREE_CODE (rhs) != MEM_REF)
1761 return NULL_TREE;
1762 rec = TREE_OPERAND (rhs, 0);
1763 if (TREE_CODE (rec) != ADDR_EXPR)
1764 return NULL_TREE;
1765 rec = TREE_OPERAND (rec, 0);
1766 if (TREE_CODE (rec) != PARM_DECL
1767 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1768 return NULL_TREE;
1769 ref_offset = TREE_OPERAND (rhs, 1);
1771 if (use_delta)
1772 fld = delta_field;
1773 else
1774 fld = ptr_field;
1775 if (offset_p)
1776 *offset_p = int_bit_position (fld);
1778 if (ref_field)
1780 if (integer_nonzerop (ref_offset))
1781 return NULL_TREE;
1782 return ref_field == fld ? rec : NULL_TREE;
1784 else
1785 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1786 : NULL_TREE;
1789 /* Returns true iff T is an SSA_NAME defined by a statement. */
1791 static bool
1792 ipa_is_ssa_with_stmt_def (tree t)
1794 if (TREE_CODE (t) == SSA_NAME
1795 && !SSA_NAME_IS_DEFAULT_DEF (t))
1796 return true;
1797 else
1798 return false;
1801 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1802 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1803 indirect call graph edge. */
1805 static struct cgraph_edge *
1806 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1808 struct cgraph_edge *cs;
1810 cs = cgraph_edge (node, stmt);
1811 cs->indirect_info->param_index = param_index;
1812 cs->indirect_info->agg_contents = 0;
1813 cs->indirect_info->member_ptr = 0;
1814 return cs;
1817 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1818 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1819 intermediate information about each formal parameter. Currently it checks
1820 whether the call calls a pointer that is a formal parameter and if so, the
1821 parameter is marked with the called flag and an indirect call graph edge
1822 describing the call is created. This is very simple for ordinary pointers
1823 represented in SSA but not-so-nice when it comes to member pointers. The
1824 ugly part of this function does nothing more than trying to match the
1825 pattern of such a call. An example of such a pattern is the gimple dump
1826 below, the call is on the last line:
1828 <bb 2>:
1829 f$__delta_5 = f.__delta;
1830 f$__pfn_24 = f.__pfn;
1833 <bb 2>:
1834 f$__delta_5 = MEM[(struct *)&f];
1835 f$__pfn_24 = MEM[(struct *)&f + 4B];
1837 and a few lines below:
1839 <bb 5>
1840 D.2496_3 = (int) f$__pfn_24;
1841 D.2497_4 = D.2496_3 & 1;
1842 if (D.2497_4 != 0)
1843 goto <bb 3>;
1844 else
1845 goto <bb 4>;
1847 <bb 6>:
1848 D.2500_7 = (unsigned int) f$__delta_5;
1849 D.2501_8 = &S + D.2500_7;
1850 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1851 D.2503_10 = *D.2502_9;
1852 D.2504_12 = f$__pfn_24 + -1;
1853 D.2505_13 = (unsigned int) D.2504_12;
1854 D.2506_14 = D.2503_10 + D.2505_13;
1855 D.2507_15 = *D.2506_14;
1856 iftmp.11_16 = (String:: *) D.2507_15;
1858 <bb 7>:
1859 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1860 D.2500_19 = (unsigned int) f$__delta_5;
1861 D.2508_20 = &S + D.2500_19;
1862 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1864 Such patterns are results of simple calls to a member pointer:
1866 int doprinting (int (MyString::* f)(int) const)
1868 MyString S ("somestring");
1870 return (S.*f)(4);
1873 Moreover, the function also looks for called pointers loaded from aggregates
1874 passed by value or reference. */
1876 static void
1877 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1878 struct ipa_node_params *info,
1879 struct param_analysis_info *parms_ainfo,
1880 gimple call, tree target)
1882 gimple def;
1883 tree n1, n2;
1884 gimple d1, d2;
1885 tree rec, rec2, cond;
1886 gimple branch;
1887 int index;
1888 basic_block bb, virt_bb, join;
1889 HOST_WIDE_INT offset;
1890 bool by_ref;
1892 if (SSA_NAME_IS_DEFAULT_DEF (target))
1894 tree var = SSA_NAME_VAR (target);
1895 index = ipa_get_param_decl_index (info, var);
1896 if (index >= 0)
1897 ipa_note_param_call (node, index, call);
1898 return;
1901 def = SSA_NAME_DEF_STMT (target);
1902 if (gimple_assign_single_p (def)
1903 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1904 gimple_assign_rhs1 (def), &index, &offset,
1905 NULL, &by_ref))
1907 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1908 if (cs->indirect_info->offset != offset)
1909 cs->indirect_info->outer_type = NULL;
1910 cs->indirect_info->offset = offset;
1911 cs->indirect_info->agg_contents = 1;
1912 cs->indirect_info->by_ref = by_ref;
1913 return;
1916 /* Now we need to try to match the complex pattern of calling a member
1917 pointer. */
1918 if (gimple_code (def) != GIMPLE_PHI
1919 || gimple_phi_num_args (def) != 2
1920 || !POINTER_TYPE_P (TREE_TYPE (target))
1921 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1922 return;
1924 /* First, we need to check whether one of these is a load from a member
1925 pointer that is a parameter to this function. */
1926 n1 = PHI_ARG_DEF (def, 0);
1927 n2 = PHI_ARG_DEF (def, 1);
1928 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1929 return;
1930 d1 = SSA_NAME_DEF_STMT (n1);
1931 d2 = SSA_NAME_DEF_STMT (n2);
1933 join = gimple_bb (def);
1934 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1936 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1937 return;
1939 bb = EDGE_PRED (join, 0)->src;
1940 virt_bb = gimple_bb (d2);
1942 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1944 bb = EDGE_PRED (join, 1)->src;
1945 virt_bb = gimple_bb (d1);
1947 else
1948 return;
1950 /* Second, we need to check that the basic blocks are laid out in the way
1951 corresponding to the pattern. */
1953 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1954 || single_pred (virt_bb) != bb
1955 || single_succ (virt_bb) != join)
1956 return;
1958 /* Third, let's see that the branching is done depending on the least
1959 significant bit of the pfn. */
1961 branch = last_stmt (bb);
1962 if (!branch || gimple_code (branch) != GIMPLE_COND)
1963 return;
1965 if ((gimple_cond_code (branch) != NE_EXPR
1966 && gimple_cond_code (branch) != EQ_EXPR)
1967 || !integer_zerop (gimple_cond_rhs (branch)))
1968 return;
1970 cond = gimple_cond_lhs (branch);
1971 if (!ipa_is_ssa_with_stmt_def (cond))
1972 return;
1974 def = SSA_NAME_DEF_STMT (cond);
1975 if (!is_gimple_assign (def)
1976 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1977 || !integer_onep (gimple_assign_rhs2 (def)))
1978 return;
1980 cond = gimple_assign_rhs1 (def);
1981 if (!ipa_is_ssa_with_stmt_def (cond))
1982 return;
1984 def = SSA_NAME_DEF_STMT (cond);
1986 if (is_gimple_assign (def)
1987 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1989 cond = gimple_assign_rhs1 (def);
1990 if (!ipa_is_ssa_with_stmt_def (cond))
1991 return;
1992 def = SSA_NAME_DEF_STMT (cond);
1995 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1996 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1997 == ptrmemfunc_vbit_in_delta),
1998 NULL);
1999 if (rec != rec2)
2000 return;
2002 index = ipa_get_param_decl_index (info, rec);
2003 if (index >= 0
2004 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
2006 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
2007 if (cs->indirect_info->offset != offset)
2008 cs->indirect_info->outer_type = NULL;
2009 cs->indirect_info->offset = offset;
2010 cs->indirect_info->agg_contents = 1;
2011 cs->indirect_info->member_ptr = 1;
2014 return;
2017 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2018 object referenced in the expression is a formal parameter of the caller
2019 (described by INFO), create a call note for the statement. */
2021 static void
2022 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
2023 struct ipa_node_params *info, gimple call,
2024 tree target)
2026 struct cgraph_edge *cs;
2027 struct cgraph_indirect_call_info *ii;
2028 struct ipa_jump_func jfunc;
2029 tree obj = OBJ_TYPE_REF_OBJECT (target);
2030 int index;
2031 HOST_WIDE_INT anc_offset;
2033 if (!flag_devirtualize)
2034 return;
2036 if (TREE_CODE (obj) != SSA_NAME)
2037 return;
2039 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2041 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2042 return;
2044 anc_offset = 0;
2045 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2046 gcc_assert (index >= 0);
2047 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2048 call, &jfunc))
2049 return;
2051 else
2053 gimple stmt = SSA_NAME_DEF_STMT (obj);
2054 tree expr;
2056 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2057 if (!expr)
2058 return;
2059 index = ipa_get_param_decl_index (info,
2060 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2061 gcc_assert (index >= 0);
2062 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2063 call, &jfunc, anc_offset))
2064 return;
2067 cs = ipa_note_param_call (node, index, call);
2068 ii = cs->indirect_info;
2069 ii->offset = anc_offset;
2070 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2071 ii->otr_type = obj_type_ref_class (target);
2072 ii->polymorphic = 1;
2075 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2076 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2077 containing intermediate information about each formal parameter. */
2079 static void
2080 ipa_analyze_call_uses (struct cgraph_node *node,
2081 struct ipa_node_params *info,
2082 struct param_analysis_info *parms_ainfo, gimple call)
2084 tree target = gimple_call_fn (call);
2085 struct cgraph_edge *cs;
2087 if (!target
2088 || (TREE_CODE (target) != SSA_NAME
2089 && !virtual_method_call_p (target)))
2090 return;
2092 /* If we previously turned the call into a direct call, there is
2093 no need to analyze. */
2094 cs = cgraph_edge (node, call);
2095 if (cs && !cs->indirect_unknown_callee)
2096 return;
2097 if (TREE_CODE (target) == SSA_NAME)
2098 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2099 else if (virtual_method_call_p (target))
2100 ipa_analyze_virtual_call_uses (node, info, call, target);
2104 /* Analyze the call statement STMT with respect to formal parameters (described
2105 in INFO) of caller given by NODE. Currently it only checks whether formal
2106 parameters are called. PARMS_AINFO is a pointer to a vector containing
2107 intermediate information about each formal parameter. */
2109 static void
2110 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2111 struct param_analysis_info *parms_ainfo, gimple stmt)
2113 if (is_gimple_call (stmt))
2114 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2117 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2118 If OP is a parameter declaration, mark it as used in the info structure
2119 passed in DATA. */
2121 static bool
2122 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2124 struct ipa_node_params *info = (struct ipa_node_params *) data;
2126 op = get_base_address (op);
2127 if (op
2128 && TREE_CODE (op) == PARM_DECL)
2130 int index = ipa_get_param_decl_index (info, op);
2131 gcc_assert (index >= 0);
2132 ipa_set_param_used (info, index, true);
2135 return false;
2138 /* Scan the function body of NODE and inspect the uses of formal parameters.
2139 Store the findings in various structures of the associated ipa_node_params
2140 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2141 vector containing intermediate information about each formal parameter. */
2143 static void
2144 ipa_analyze_params_uses (struct cgraph_node *node,
2145 struct param_analysis_info *parms_ainfo)
2147 tree decl = node->decl;
2148 basic_block bb;
2149 struct function *func;
2150 gimple_stmt_iterator gsi;
2151 struct ipa_node_params *info = IPA_NODE_REF (node);
2152 int i;
2154 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2155 return;
2157 info->uses_analysis_done = 1;
2158 if (ipa_func_spec_opts_forbid_analysis_p (node))
2160 for (i = 0; i < ipa_get_param_count (info); i++)
2162 ipa_set_param_used (info, i, true);
2163 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2165 return;
2168 for (i = 0; i < ipa_get_param_count (info); i++)
2170 tree parm = ipa_get_param (info, i);
2171 int controlled_uses = 0;
2173 /* For SSA regs see if parameter is used. For non-SSA we compute
2174 the flag during modification analysis. */
2175 if (is_gimple_reg (parm))
2177 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2178 parm);
2179 if (ddef && !has_zero_uses (ddef))
2181 imm_use_iterator imm_iter;
2182 use_operand_p use_p;
2184 ipa_set_param_used (info, i, true);
2185 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2186 if (!is_gimple_call (USE_STMT (use_p)))
2188 if (!is_gimple_debug (USE_STMT (use_p)))
2190 controlled_uses = IPA_UNDESCRIBED_USE;
2191 break;
2194 else
2195 controlled_uses++;
2197 else
2198 controlled_uses = 0;
2200 else
2201 controlled_uses = IPA_UNDESCRIBED_USE;
2202 ipa_set_controlled_uses (info, i, controlled_uses);
2205 func = DECL_STRUCT_FUNCTION (decl);
2206 FOR_EACH_BB_FN (bb, func)
2208 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2210 gimple stmt = gsi_stmt (gsi);
2212 if (is_gimple_debug (stmt))
2213 continue;
2215 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2216 walk_stmt_load_store_addr_ops (stmt, info,
2217 visit_ref_for_mod_analysis,
2218 visit_ref_for_mod_analysis,
2219 visit_ref_for_mod_analysis);
2221 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2222 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2223 visit_ref_for_mod_analysis,
2224 visit_ref_for_mod_analysis,
2225 visit_ref_for_mod_analysis);
2229 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2231 static void
2232 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2234 int i;
2236 for (i = 0; i < param_count; i++)
2238 if (parms_ainfo[i].parm_visited_statements)
2239 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2240 if (parms_ainfo[i].pt_visited_statements)
2241 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2245 /* Initialize the array describing properties of of formal parameters
2246 of NODE, analyze their uses and compute jump functions associated
2247 with actual arguments of calls from within NODE. */
2249 void
2250 ipa_analyze_node (struct cgraph_node *node)
2252 struct ipa_node_params *info;
2253 struct param_analysis_info *parms_ainfo;
2254 int param_count;
2256 ipa_check_create_node_params ();
2257 ipa_check_create_edge_args ();
2258 info = IPA_NODE_REF (node);
2259 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2260 ipa_initialize_node_params (node);
2262 param_count = ipa_get_param_count (info);
2263 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2264 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2266 ipa_analyze_params_uses (node, parms_ainfo);
2267 ipa_compute_jump_functions (node, parms_ainfo);
2269 free_parms_ainfo (parms_ainfo, param_count);
2270 pop_cfun ();
2273 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2274 attempt a type-based devirtualization. If successful, return the
2275 target function declaration, otherwise return NULL. */
2277 tree
2278 ipa_intraprocedural_devirtualization (gimple call)
2280 tree binfo, token, fndecl;
2281 struct ipa_jump_func jfunc;
2282 tree otr = gimple_call_fn (call);
2284 jfunc.type = IPA_JF_UNKNOWN;
2285 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2286 call, obj_type_ref_class (otr));
2287 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2288 return NULL_TREE;
2289 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2290 if (!binfo)
2291 return NULL_TREE;
2292 token = OBJ_TYPE_REF_TOKEN (otr);
2293 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2294 binfo);
2295 #ifdef ENABLE_CHECKING
2296 if (fndecl)
2297 gcc_assert (possible_polymorphic_call_target_p
2298 (otr, cgraph_get_node (fndecl)));
2299 #endif
2300 return fndecl;
2303 /* Update the jump function DST when the call graph edge corresponding to SRC is
2304 is being inlined, knowing that DST is of type ancestor and src of known
2305 type. */
2307 static void
2308 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2309 struct ipa_jump_func *dst)
2311 HOST_WIDE_INT combined_offset;
2312 tree combined_type;
2314 if (!ipa_get_jf_ancestor_type_preserved (dst))
2316 dst->type = IPA_JF_UNKNOWN;
2317 return;
2320 combined_offset = ipa_get_jf_known_type_offset (src)
2321 + ipa_get_jf_ancestor_offset (dst);
2322 combined_type = ipa_get_jf_ancestor_type (dst);
2324 ipa_set_jf_known_type (dst, combined_offset,
2325 ipa_get_jf_known_type_base_type (src),
2326 combined_type);
2329 /* Update the jump functions associated with call graph edge E when the call
2330 graph edge CS is being inlined, assuming that E->caller is already (possibly
2331 indirectly) inlined into CS->callee and that E has not been inlined. */
2333 static void
2334 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2335 struct cgraph_edge *e)
2337 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2338 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2339 int count = ipa_get_cs_argument_count (args);
2340 int i;
2342 for (i = 0; i < count; i++)
2344 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2346 if (dst->type == IPA_JF_ANCESTOR)
2348 struct ipa_jump_func *src;
2349 int dst_fid = dst->value.ancestor.formal_id;
2351 /* Variable number of arguments can cause havoc if we try to access
2352 one that does not exist in the inlined edge. So make sure we
2353 don't. */
2354 if (dst_fid >= ipa_get_cs_argument_count (top))
2356 dst->type = IPA_JF_UNKNOWN;
2357 continue;
2360 src = ipa_get_ith_jump_func (top, dst_fid);
2362 if (src->agg.items
2363 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2365 struct ipa_agg_jf_item *item;
2366 int j;
2368 /* Currently we do not produce clobber aggregate jump functions,
2369 replace with merging when we do. */
2370 gcc_assert (!dst->agg.items);
2372 dst->agg.items = vec_safe_copy (src->agg.items);
2373 dst->agg.by_ref = src->agg.by_ref;
2374 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2375 item->offset -= dst->value.ancestor.offset;
2378 if (src->type == IPA_JF_KNOWN_TYPE)
2379 combine_known_type_and_ancestor_jfs (src, dst);
2380 else if (src->type == IPA_JF_PASS_THROUGH
2381 && src->value.pass_through.operation == NOP_EXPR)
2383 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2384 dst->value.ancestor.agg_preserved &=
2385 src->value.pass_through.agg_preserved;
2386 dst->value.ancestor.type_preserved &=
2387 src->value.pass_through.type_preserved;
2389 else if (src->type == IPA_JF_ANCESTOR)
2391 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2392 dst->value.ancestor.offset += src->value.ancestor.offset;
2393 dst->value.ancestor.agg_preserved &=
2394 src->value.ancestor.agg_preserved;
2395 dst->value.ancestor.type_preserved &=
2396 src->value.ancestor.type_preserved;
2398 else
2399 dst->type = IPA_JF_UNKNOWN;
2401 else if (dst->type == IPA_JF_PASS_THROUGH)
2403 struct ipa_jump_func *src;
2404 /* We must check range due to calls with variable number of arguments
2405 and we cannot combine jump functions with operations. */
2406 if (dst->value.pass_through.operation == NOP_EXPR
2407 && (dst->value.pass_through.formal_id
2408 < ipa_get_cs_argument_count (top)))
2410 int dst_fid = dst->value.pass_through.formal_id;
2411 src = ipa_get_ith_jump_func (top, dst_fid);
2412 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2414 switch (src->type)
2416 case IPA_JF_UNKNOWN:
2417 dst->type = IPA_JF_UNKNOWN;
2418 break;
2419 case IPA_JF_KNOWN_TYPE:
2420 if (ipa_get_jf_pass_through_type_preserved (dst))
2421 ipa_set_jf_known_type (dst,
2422 ipa_get_jf_known_type_offset (src),
2423 ipa_get_jf_known_type_base_type (src),
2424 ipa_get_jf_known_type_component_type (src));
2425 else
2426 dst->type = IPA_JF_UNKNOWN;
2427 break;
2428 case IPA_JF_CONST:
2429 ipa_set_jf_cst_copy (dst, src);
2430 break;
2432 case IPA_JF_PASS_THROUGH:
2434 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2435 enum tree_code operation;
2436 operation = ipa_get_jf_pass_through_operation (src);
2438 if (operation == NOP_EXPR)
2440 bool agg_p, type_p;
2441 agg_p = dst_agg_p
2442 && ipa_get_jf_pass_through_agg_preserved (src);
2443 type_p = ipa_get_jf_pass_through_type_preserved (src)
2444 && ipa_get_jf_pass_through_type_preserved (dst);
2445 ipa_set_jf_simple_pass_through (dst, formal_id,
2446 agg_p, type_p);
2448 else
2450 tree operand = ipa_get_jf_pass_through_operand (src);
2451 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2452 operation);
2454 break;
2456 case IPA_JF_ANCESTOR:
2458 bool agg_p, type_p;
2459 agg_p = dst_agg_p
2460 && ipa_get_jf_ancestor_agg_preserved (src);
2461 type_p = ipa_get_jf_ancestor_type_preserved (src)
2462 && ipa_get_jf_pass_through_type_preserved (dst);
2463 ipa_set_ancestor_jf (dst,
2464 ipa_get_jf_ancestor_offset (src),
2465 ipa_get_jf_ancestor_type (src),
2466 ipa_get_jf_ancestor_formal_id (src),
2467 agg_p, type_p);
2468 break;
2470 default:
2471 gcc_unreachable ();
2474 if (src->agg.items
2475 && (dst_agg_p || !src->agg.by_ref))
2477 /* Currently we do not produce clobber aggregate jump
2478 functions, replace with merging when we do. */
2479 gcc_assert (!dst->agg.items);
2481 dst->agg.by_ref = src->agg.by_ref;
2482 dst->agg.items = vec_safe_copy (src->agg.items);
2485 else
2486 dst->type = IPA_JF_UNKNOWN;
2491 /* If TARGET is an addr_expr of a function declaration, make it the destination
2492 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2494 struct cgraph_edge *
2495 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2497 struct cgraph_node *callee;
2498 struct inline_edge_summary *es = inline_edge_summary (ie);
2499 bool unreachable = false;
2501 if (TREE_CODE (target) == ADDR_EXPR)
2502 target = TREE_OPERAND (target, 0);
2503 if (TREE_CODE (target) != FUNCTION_DECL)
2505 target = canonicalize_constructor_val (target, NULL);
2506 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2508 if (ie->indirect_info->member_ptr)
2509 /* Member pointer call that goes through a VMT lookup. */
2510 return NULL;
2512 if (dump_file)
2513 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2514 " in %s/%i, making it unreachable.\n",
2515 ie->caller->name (), ie->caller->order);
2516 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2517 callee = cgraph_get_create_node (target);
2518 unreachable = true;
2520 else
2521 callee = cgraph_get_node (target);
2523 else
2524 callee = cgraph_get_node (target);
2526 /* Because may-edges are not explicitely represented and vtable may be external,
2527 we may create the first reference to the object in the unit. */
2528 if (!callee || callee->global.inlined_to)
2531 /* We are better to ensure we can refer to it.
2532 In the case of static functions we are out of luck, since we already
2533 removed its body. In the case of public functions we may or may
2534 not introduce the reference. */
2535 if (!canonicalize_constructor_val (target, NULL)
2536 || !TREE_PUBLIC (target))
2538 if (dump_file)
2539 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2540 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2541 xstrdup (ie->caller->name ()),
2542 ie->caller->order,
2543 xstrdup (ie->callee->name ()),
2544 ie->callee->order);
2545 return NULL;
2547 callee = cgraph_get_create_node (target);
2549 ipa_check_create_node_params ();
2551 /* We can not make edges to inline clones. It is bug that someone removed
2552 the cgraph node too early. */
2553 gcc_assert (!callee->global.inlined_to);
2555 if (dump_file && !unreachable)
2557 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2558 "(%s/%i -> %s/%i), for stmt ",
2559 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2560 xstrdup (ie->caller->name ()),
2561 ie->caller->order,
2562 xstrdup (callee->name ()),
2563 callee->order);
2564 if (ie->call_stmt)
2565 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2566 else
2567 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2569 ie = cgraph_make_edge_direct (ie, callee);
2570 es = inline_edge_summary (ie);
2571 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2572 - eni_size_weights.call_cost);
2573 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2574 - eni_time_weights.call_cost);
2576 return ie;
2579 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2580 return NULL if there is not any. BY_REF specifies whether the value has to
2581 be passed by reference or by value. */
2583 tree
2584 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2585 HOST_WIDE_INT offset, bool by_ref)
2587 struct ipa_agg_jf_item *item;
2588 int i;
2590 if (by_ref != agg->by_ref)
2591 return NULL;
2593 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2594 if (item->offset == offset)
2596 /* Currently we do not have clobber values, return NULL for them once
2597 we do. */
2598 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2599 return item->value;
2601 return NULL;
2604 /* Remove a reference to SYMBOL from the list of references of a node given by
2605 reference description RDESC. Return true if the reference has been
2606 successfully found and removed. */
2608 static bool
2609 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2611 struct ipa_ref *to_del;
2612 struct cgraph_edge *origin;
2614 origin = rdesc->cs;
2615 if (!origin)
2616 return false;
2617 to_del = ipa_find_reference (origin->caller, symbol,
2618 origin->call_stmt, origin->lto_stmt_uid);
2619 if (!to_del)
2620 return false;
2622 ipa_remove_reference (to_del);
2623 if (dump_file)
2624 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2625 xstrdup (origin->caller->name ()),
2626 origin->caller->order, xstrdup (symbol->name ()));
2627 return true;
2630 /* If JFUNC has a reference description with refcount different from
2631 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2632 NULL. JFUNC must be a constant jump function. */
2634 static struct ipa_cst_ref_desc *
2635 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2637 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2638 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2639 return rdesc;
2640 else
2641 return NULL;
2644 /* If the value of constant jump function JFUNC is an address of a function
2645 declaration, return the associated call graph node. Otherwise return
2646 NULL. */
2648 static cgraph_node *
2649 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2651 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2652 tree cst = ipa_get_jf_constant (jfunc);
2653 if (TREE_CODE (cst) != ADDR_EXPR
2654 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2655 return NULL;
2657 return cgraph_get_node (TREE_OPERAND (cst, 0));
2661 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2662 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2663 the edge specified in the rdesc. Return false if either the symbol or the
2664 reference could not be found, otherwise return true. */
2666 static bool
2667 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2669 struct ipa_cst_ref_desc *rdesc;
2670 if (jfunc->type == IPA_JF_CONST
2671 && (rdesc = jfunc_rdesc_usable (jfunc))
2672 && --rdesc->refcount == 0)
2674 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2675 if (!symbol)
2676 return false;
2678 return remove_described_reference (symbol, rdesc);
2680 return true;
2683 /* Try to find a destination for indirect edge IE that corresponds to a simple
2684 call or a call of a member function pointer and where the destination is a
2685 pointer formal parameter described by jump function JFUNC. If it can be
2686 determined, return the newly direct edge, otherwise return NULL.
2687 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2689 static struct cgraph_edge *
2690 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2691 struct ipa_jump_func *jfunc,
2692 struct ipa_node_params *new_root_info)
2694 struct cgraph_edge *cs;
2695 tree target;
2696 bool agg_contents = ie->indirect_info->agg_contents;
2698 if (ie->indirect_info->agg_contents)
2699 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2700 ie->indirect_info->offset,
2701 ie->indirect_info->by_ref);
2702 else
2703 target = ipa_value_from_jfunc (new_root_info, jfunc);
2704 if (!target)
2705 return NULL;
2706 cs = ipa_make_edge_direct_to_target (ie, target);
2708 if (cs && !agg_contents)
2710 bool ok;
2711 gcc_checking_assert (cs->callee
2712 && (cs != ie
2713 || jfunc->type != IPA_JF_CONST
2714 || !cgraph_node_for_jfunc (jfunc)
2715 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2716 ok = try_decrement_rdesc_refcount (jfunc);
2717 gcc_checking_assert (ok);
2720 return cs;
2723 /* Return the target to be used in cases of impossible devirtualization. IE
2724 and target (the latter can be NULL) are dumped when dumping is enabled. */
2726 tree
2727 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2729 if (dump_file)
2731 if (target)
2732 fprintf (dump_file,
2733 "Type inconsistent devirtualization: %s/%i->%s\n",
2734 ie->caller->name (), ie->caller->order,
2735 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2736 else
2737 fprintf (dump_file,
2738 "No devirtualization target in %s/%i\n",
2739 ie->caller->name (), ie->caller->order);
2741 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2742 cgraph_get_create_node (new_target);
2743 return new_target;
2746 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2747 call based on a formal parameter which is described by jump function JFUNC
2748 and if it can be determined, make it direct and return the direct edge.
2749 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2750 are relative to. */
2752 static struct cgraph_edge *
2753 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2754 struct ipa_jump_func *jfunc,
2755 struct ipa_node_params *new_root_info)
2757 tree binfo, target;
2759 if (!flag_devirtualize
2760 /* FIXME_LIPO -- LIPO is not yet compatible
2761 with ipa devirt. */
2762 || flag_dyn_ipa)
2763 return NULL;
2765 /* First try to do lookup via known virtual table pointer value. */
2766 if (!ie->indirect_info->by_ref)
2768 tree vtable;
2769 unsigned HOST_WIDE_INT offset;
2770 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2771 ie->indirect_info->offset,
2772 true);
2773 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2775 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2776 vtable, offset);
2777 if (target)
2779 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2780 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2781 || !possible_polymorphic_call_target_p
2782 (ie, cgraph_get_node (target)))
2783 target = ipa_impossible_devirt_target (ie, target);
2784 return ipa_make_edge_direct_to_target (ie, target);
2789 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2791 if (!binfo)
2792 return NULL;
2794 if (TREE_CODE (binfo) != TREE_BINFO)
2796 ipa_polymorphic_call_context context;
2797 vec <cgraph_node *>targets;
2798 bool final;
2800 if (!get_polymorphic_call_info_from_invariant
2801 (&context, binfo, ie->indirect_info->otr_type,
2802 ie->indirect_info->offset))
2803 return NULL;
2804 targets = possible_polymorphic_call_targets
2805 (ie->indirect_info->otr_type,
2806 ie->indirect_info->otr_token,
2807 context, &final);
2808 if (!final || targets.length () > 1)
2809 return NULL;
2810 if (targets.length () == 1)
2811 target = targets[0]->decl;
2812 else
2813 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2815 else
2817 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2818 ie->indirect_info->otr_type);
2819 if (binfo)
2820 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2821 binfo);
2822 else
2823 return NULL;
2826 if (target)
2828 if (!possible_polymorphic_call_target_p (ie, cgraph_get_node (target)))
2829 target = ipa_impossible_devirt_target (ie, target);
2830 return ipa_make_edge_direct_to_target (ie, target);
2832 else
2833 return NULL;
2836 /* Update the param called notes associated with NODE when CS is being inlined,
2837 assuming NODE is (potentially indirectly) inlined into CS->callee.
2838 Moreover, if the callee is discovered to be constant, create a new cgraph
2839 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2840 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2842 static bool
2843 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2844 struct cgraph_node *node,
2845 vec<cgraph_edge_p> *new_edges)
2847 struct ipa_edge_args *top;
2848 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2849 struct ipa_node_params *new_root_info;
2850 bool res = false;
2852 ipa_check_create_edge_args ();
2853 top = IPA_EDGE_REF (cs);
2854 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2855 ? cs->caller->global.inlined_to
2856 : cs->caller);
2858 for (ie = node->indirect_calls; ie; ie = next_ie)
2860 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2861 struct ipa_jump_func *jfunc;
2862 int param_index;
2864 next_ie = ie->next_callee;
2866 if (ici->param_index == -1)
2867 continue;
2869 /* We must check range due to calls with variable number of arguments: */
2870 if (ici->param_index >= ipa_get_cs_argument_count (top))
2872 ici->param_index = -1;
2873 continue;
2876 param_index = ici->param_index;
2877 jfunc = ipa_get_ith_jump_func (top, param_index);
2879 if (!flag_indirect_inlining)
2880 new_direct_edge = NULL;
2881 else if (ici->polymorphic)
2882 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2883 new_root_info);
2884 else
2885 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2886 new_root_info);
2887 /* If speculation was removed, then we need to do nothing. */
2888 if (new_direct_edge && new_direct_edge != ie)
2890 new_direct_edge->indirect_inlining_edge = 1;
2891 top = IPA_EDGE_REF (cs);
2892 res = true;
2894 else if (new_direct_edge)
2896 new_direct_edge->indirect_inlining_edge = 1;
2897 if (new_direct_edge->call_stmt)
2898 new_direct_edge->call_stmt_cannot_inline_p
2899 = !gimple_check_call_matching_types (
2900 new_direct_edge->call_stmt,
2901 new_direct_edge->callee->decl, false);
2902 if (new_edges)
2904 new_edges->safe_push (new_direct_edge);
2905 res = true;
2907 top = IPA_EDGE_REF (cs);
2909 else if (jfunc->type == IPA_JF_PASS_THROUGH
2910 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2912 if ((ici->agg_contents
2913 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2914 || (ici->polymorphic
2915 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
2916 ici->param_index = -1;
2917 else
2918 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2920 else if (jfunc->type == IPA_JF_ANCESTOR)
2922 if ((ici->agg_contents
2923 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2924 || (ici->polymorphic
2925 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
2926 ici->param_index = -1;
2927 else
2929 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2930 if (ipa_get_jf_ancestor_offset (jfunc))
2931 ici->outer_type = NULL;
2932 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2935 else
2936 /* Either we can find a destination for this edge now or never. */
2937 ici->param_index = -1;
2940 return res;
2943 /* Recursively traverse subtree of NODE (including node) made of inlined
2944 cgraph_edges when CS has been inlined and invoke
2945 update_indirect_edges_after_inlining on all nodes and
2946 update_jump_functions_after_inlining on all non-inlined edges that lead out
2947 of this subtree. Newly discovered indirect edges will be added to
2948 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2949 created. */
2951 static bool
2952 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2953 struct cgraph_node *node,
2954 vec<cgraph_edge_p> *new_edges)
2956 struct cgraph_edge *e;
2957 bool res;
2959 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2961 for (e = node->callees; e; e = e->next_callee)
2962 if (!e->inline_failed)
2963 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2964 else
2965 update_jump_functions_after_inlining (cs, e);
2966 for (e = node->indirect_calls; e; e = e->next_callee)
2967 update_jump_functions_after_inlining (cs, e);
2969 return res;
2972 /* Combine two controlled uses counts as done during inlining. */
2974 static int
2975 combine_controlled_uses_counters (int c, int d)
2977 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2978 return IPA_UNDESCRIBED_USE;
2979 else
2980 return c + d - 1;
2983 /* Propagate number of controlled users from CS->caleee to the new root of the
2984 tree of inlined nodes. */
2986 static void
2987 propagate_controlled_uses (struct cgraph_edge *cs)
2989 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2990 struct cgraph_node *new_root = cs->caller->global.inlined_to
2991 ? cs->caller->global.inlined_to : cs->caller;
2992 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2993 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2994 int count, i;
2996 count = MIN (ipa_get_cs_argument_count (args),
2997 ipa_get_param_count (old_root_info));
2998 for (i = 0; i < count; i++)
3000 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3001 struct ipa_cst_ref_desc *rdesc;
3003 if (jf->type == IPA_JF_PASS_THROUGH)
3005 int src_idx, c, d;
3006 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3007 c = ipa_get_controlled_uses (new_root_info, src_idx);
3008 d = ipa_get_controlled_uses (old_root_info, i);
3010 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3011 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3012 c = combine_controlled_uses_counters (c, d);
3013 ipa_set_controlled_uses (new_root_info, src_idx, c);
3014 if (c == 0 && new_root_info->ipcp_orig_node)
3016 struct cgraph_node *n;
3017 struct ipa_ref *ref;
3018 tree t = new_root_info->known_vals[src_idx];
3020 if (t && TREE_CODE (t) == ADDR_EXPR
3021 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3022 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
3023 && (ref = ipa_find_reference (new_root,
3024 n, NULL, 0)))
3026 if (dump_file)
3027 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3028 "reference from %s/%i to %s/%i.\n",
3029 xstrdup (new_root->name ()),
3030 new_root->order,
3031 xstrdup (n->name ()), n->order);
3032 ipa_remove_reference (ref);
3036 else if (jf->type == IPA_JF_CONST
3037 && (rdesc = jfunc_rdesc_usable (jf)))
3039 int d = ipa_get_controlled_uses (old_root_info, i);
3040 int c = rdesc->refcount;
3041 rdesc->refcount = combine_controlled_uses_counters (c, d);
3042 if (rdesc->refcount == 0)
3044 tree cst = ipa_get_jf_constant (jf);
3045 struct cgraph_node *n;
3046 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3047 && TREE_CODE (TREE_OPERAND (cst, 0))
3048 == FUNCTION_DECL);
3049 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3050 if (n)
3052 struct cgraph_node *clone;
3053 bool ok;
3054 ok = remove_described_reference (n, rdesc);
3055 gcc_checking_assert (ok);
3057 clone = cs->caller;
3058 while (clone->global.inlined_to
3059 && clone != rdesc->cs->caller
3060 && IPA_NODE_REF (clone)->ipcp_orig_node)
3062 struct ipa_ref *ref;
3063 ref = ipa_find_reference (clone,
3064 n, NULL, 0);
3065 if (ref)
3067 if (dump_file)
3068 fprintf (dump_file, "ipa-prop: Removing "
3069 "cloning-created reference "
3070 "from %s/%i to %s/%i.\n",
3071 xstrdup (clone->name ()),
3072 clone->order,
3073 xstrdup (n->name ()),
3074 n->order);
3075 ipa_remove_reference (ref);
3077 clone = clone->callers->caller;
3084 for (i = ipa_get_param_count (old_root_info);
3085 i < ipa_get_cs_argument_count (args);
3086 i++)
3088 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3090 if (jf->type == IPA_JF_CONST)
3092 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3093 if (rdesc)
3094 rdesc->refcount = IPA_UNDESCRIBED_USE;
3096 else if (jf->type == IPA_JF_PASS_THROUGH)
3097 ipa_set_controlled_uses (new_root_info,
3098 jf->value.pass_through.formal_id,
3099 IPA_UNDESCRIBED_USE);
3103 /* Update jump functions and call note functions on inlining the call site CS.
3104 CS is expected to lead to a node already cloned by
3105 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3106 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3107 created. */
3109 bool
3110 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3111 vec<cgraph_edge_p> *new_edges)
3113 bool changed;
3114 /* Do nothing if the preparation phase has not been carried out yet
3115 (i.e. during early inlining). */
3116 if (!ipa_node_params_vector.exists ())
3117 return false;
3118 gcc_assert (ipa_edge_args_vector);
3120 propagate_controlled_uses (cs);
3121 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3123 return changed;
3126 /* Frees all dynamically allocated structures that the argument info points
3127 to. */
3129 void
3130 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3132 vec_free (args->jump_functions);
3133 memset (args, 0, sizeof (*args));
3136 /* Free all ipa_edge structures. */
3138 void
3139 ipa_free_all_edge_args (void)
3141 int i;
3142 struct ipa_edge_args *args;
3144 if (!ipa_edge_args_vector)
3145 return;
3147 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3148 ipa_free_edge_args_substructures (args);
3150 vec_free (ipa_edge_args_vector);
3153 /* Frees all dynamically allocated structures that the param info points
3154 to. */
3156 void
3157 ipa_free_node_params_substructures (struct ipa_node_params *info)
3159 info->descriptors.release ();
3160 free (info->lattices);
3161 /* Lattice values and their sources are deallocated with their alocation
3162 pool. */
3163 info->known_vals.release ();
3164 memset (info, 0, sizeof (*info));
3167 /* Free all ipa_node_params structures. */
3169 void
3170 ipa_free_all_node_params (void)
3172 int i;
3173 struct ipa_node_params *info;
3175 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3176 ipa_free_node_params_substructures (info);
3178 ipa_node_params_vector.release ();
3181 /* Set the aggregate replacements of NODE to be AGGVALS. */
3183 void
3184 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3185 struct ipa_agg_replacement_value *aggvals)
3187 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3188 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3190 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3193 /* Hook that is called by cgraph.c when an edge is removed. */
3195 static void
3196 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3198 struct ipa_edge_args *args;
3200 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3201 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3202 return;
3204 args = IPA_EDGE_REF (cs);
3205 if (args->jump_functions)
3207 struct ipa_jump_func *jf;
3208 int i;
3209 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3211 struct ipa_cst_ref_desc *rdesc;
3212 try_decrement_rdesc_refcount (jf);
3213 if (jf->type == IPA_JF_CONST
3214 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3215 && rdesc->cs == cs)
3216 rdesc->cs = NULL;
3220 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3223 /* Hook that is called by cgraph.c when a node is removed. */
3225 static void
3226 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3228 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3229 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3230 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3231 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3232 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3235 /* Hook that is called by cgraph.c when an edge is duplicated. */
3237 static void
3238 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3239 __attribute__((unused)) void *data)
3241 struct ipa_edge_args *old_args, *new_args;
3242 unsigned int i;
3244 ipa_check_create_edge_args ();
3246 old_args = IPA_EDGE_REF (src);
3247 new_args = IPA_EDGE_REF (dst);
3249 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3251 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3253 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3254 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3256 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3258 if (src_jf->type == IPA_JF_CONST)
3260 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3262 if (!src_rdesc)
3263 dst_jf->value.constant.rdesc = NULL;
3264 else if (src->caller == dst->caller)
3266 struct ipa_ref *ref;
3267 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3268 gcc_checking_assert (n);
3269 ref = ipa_find_reference (src->caller, n,
3270 src->call_stmt, src->lto_stmt_uid);
3271 gcc_checking_assert (ref);
3272 ipa_clone_ref (ref, dst->caller, ref->stmt);
3274 gcc_checking_assert (ipa_refdesc_pool);
3275 struct ipa_cst_ref_desc *dst_rdesc
3276 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3277 dst_rdesc->cs = dst;
3278 dst_rdesc->refcount = src_rdesc->refcount;
3279 dst_rdesc->next_duplicate = NULL;
3280 dst_jf->value.constant.rdesc = dst_rdesc;
3282 else if (src_rdesc->cs == src)
3284 struct ipa_cst_ref_desc *dst_rdesc;
3285 gcc_checking_assert (ipa_refdesc_pool);
3286 dst_rdesc
3287 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3288 dst_rdesc->cs = dst;
3289 dst_rdesc->refcount = src_rdesc->refcount;
3290 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3291 src_rdesc->next_duplicate = dst_rdesc;
3292 dst_jf->value.constant.rdesc = dst_rdesc;
3294 else
3296 struct ipa_cst_ref_desc *dst_rdesc;
3297 /* This can happen during inlining, when a JFUNC can refer to a
3298 reference taken in a function up in the tree of inline clones.
3299 We need to find the duplicate that refers to our tree of
3300 inline clones. */
3302 gcc_assert (dst->caller->global.inlined_to);
3303 for (dst_rdesc = src_rdesc->next_duplicate;
3304 dst_rdesc;
3305 dst_rdesc = dst_rdesc->next_duplicate)
3307 struct cgraph_node *top;
3308 top = dst_rdesc->cs->caller->global.inlined_to
3309 ? dst_rdesc->cs->caller->global.inlined_to
3310 : dst_rdesc->cs->caller;
3311 if (dst->caller->global.inlined_to == top)
3312 break;
3314 gcc_assert (dst_rdesc);
3315 dst_jf->value.constant.rdesc = dst_rdesc;
3321 /* Hook that is called by cgraph.c when a node is duplicated. */
3323 static void
3324 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3325 ATTRIBUTE_UNUSED void *data)
3327 struct ipa_node_params *old_info, *new_info;
3328 struct ipa_agg_replacement_value *old_av, *new_av;
3330 ipa_check_create_node_params ();
3331 old_info = IPA_NODE_REF (src);
3332 new_info = IPA_NODE_REF (dst);
3334 new_info->descriptors = old_info->descriptors.copy ();
3335 new_info->lattices = NULL;
3336 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3338 new_info->uses_analysis_done = old_info->uses_analysis_done;
3339 new_info->node_enqueued = old_info->node_enqueued;
3341 old_av = ipa_get_agg_replacements_for_node (src);
3342 if (!old_av)
3343 return;
3345 new_av = NULL;
3346 while (old_av)
3348 struct ipa_agg_replacement_value *v;
3350 v = ggc_alloc_ipa_agg_replacement_value ();
3351 memcpy (v, old_av, sizeof (*v));
3352 v->next = new_av;
3353 new_av = v;
3354 old_av = old_av->next;
3356 ipa_set_node_agg_value_chain (dst, new_av);
3360 /* Analyze newly added function into callgraph. */
3362 static void
3363 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3365 if (cgraph_function_with_gimple_body_p (node))
3366 ipa_analyze_node (node);
3369 /* Register our cgraph hooks if they are not already there. */
3371 void
3372 ipa_register_cgraph_hooks (void)
3374 if (!edge_removal_hook_holder)
3375 edge_removal_hook_holder =
3376 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3377 if (!node_removal_hook_holder)
3378 node_removal_hook_holder =
3379 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3380 if (!edge_duplication_hook_holder)
3381 edge_duplication_hook_holder =
3382 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3383 if (!node_duplication_hook_holder)
3384 node_duplication_hook_holder =
3385 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3386 function_insertion_hook_holder =
3387 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3390 /* Unregister our cgraph hooks if they are not already there. */
3392 static void
3393 ipa_unregister_cgraph_hooks (void)
3395 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3396 edge_removal_hook_holder = NULL;
3397 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3398 node_removal_hook_holder = NULL;
3399 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3400 edge_duplication_hook_holder = NULL;
3401 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3402 node_duplication_hook_holder = NULL;
3403 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3404 function_insertion_hook_holder = NULL;
3407 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3408 longer needed after ipa-cp. */
3410 void
3411 ipa_free_all_structures_after_ipa_cp (void)
3413 if (!optimize)
3415 ipa_free_all_edge_args ();
3416 ipa_free_all_node_params ();
3417 free_alloc_pool (ipcp_sources_pool);
3418 free_alloc_pool (ipcp_values_pool);
3419 free_alloc_pool (ipcp_agg_lattice_pool);
3420 ipa_unregister_cgraph_hooks ();
3421 if (ipa_refdesc_pool)
3422 free_alloc_pool (ipa_refdesc_pool);
3426 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3427 longer needed after indirect inlining. */
3429 void
3430 ipa_free_all_structures_after_iinln (void)
3432 ipa_free_all_edge_args ();
3433 ipa_free_all_node_params ();
3434 ipa_unregister_cgraph_hooks ();
3435 if (ipcp_sources_pool)
3436 free_alloc_pool (ipcp_sources_pool);
3437 if (ipcp_values_pool)
3438 free_alloc_pool (ipcp_values_pool);
3439 if (ipcp_agg_lattice_pool)
3440 free_alloc_pool (ipcp_agg_lattice_pool);
3441 if (ipa_refdesc_pool)
3442 free_alloc_pool (ipa_refdesc_pool);
3445 /* Print ipa_tree_map data structures of all functions in the
3446 callgraph to F. */
3448 void
3449 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3451 int i, count;
3452 struct ipa_node_params *info;
3454 if (!node->definition)
3455 return;
3456 info = IPA_NODE_REF (node);
3457 fprintf (f, " function %s/%i parameter descriptors:\n",
3458 node->name (), node->order);
3459 count = ipa_get_param_count (info);
3460 for (i = 0; i < count; i++)
3462 int c;
3464 fprintf (f, " ");
3465 ipa_dump_param (f, info, i);
3466 if (ipa_is_param_used (info, i))
3467 fprintf (f, " used");
3468 c = ipa_get_controlled_uses (info, i);
3469 if (c == IPA_UNDESCRIBED_USE)
3470 fprintf (f, " undescribed_use");
3471 else
3472 fprintf (f, " controlled_uses=%i", c);
3473 fprintf (f, "\n");
3477 /* Print ipa_tree_map data structures of all functions in the
3478 callgraph to F. */
3480 void
3481 ipa_print_all_params (FILE * f)
3483 struct cgraph_node *node;
3485 fprintf (f, "\nFunction parameters:\n");
3486 FOR_EACH_FUNCTION (node)
3487 ipa_print_node_params (f, node);
3490 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3492 vec<tree>
3493 ipa_get_vector_of_formal_parms (tree fndecl)
3495 vec<tree> args;
3496 int count;
3497 tree parm;
3499 gcc_assert (!flag_wpa);
3500 count = count_formal_params (fndecl);
3501 args.create (count);
3502 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3503 args.quick_push (parm);
3505 return args;
3508 /* Return a heap allocated vector containing types of formal parameters of
3509 function type FNTYPE. */
3511 vec<tree>
3512 ipa_get_vector_of_formal_parm_types (tree fntype)
3514 vec<tree> types;
3515 int count = 0;
3516 tree t;
3518 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3519 count++;
3521 types.create (count);
3522 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3523 types.quick_push (TREE_VALUE (t));
3525 return types;
3528 /* Modify the function declaration FNDECL and its type according to the plan in
3529 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3530 to reflect the actual parameters being modified which are determined by the
3531 base_index field. */
3533 void
3534 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3536 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3537 tree orig_type = TREE_TYPE (fndecl);
3538 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3540 /* The following test is an ugly hack, some functions simply don't have any
3541 arguments in their type. This is probably a bug but well... */
3542 bool care_for_types = (old_arg_types != NULL_TREE);
3543 bool last_parm_void;
3544 vec<tree> otypes;
3545 if (care_for_types)
3547 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3548 == void_type_node);
3549 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3550 if (last_parm_void)
3551 gcc_assert (oparms.length () + 1 == otypes.length ());
3552 else
3553 gcc_assert (oparms.length () == otypes.length ());
3555 else
3557 last_parm_void = false;
3558 otypes.create (0);
3561 int len = adjustments.length ();
3562 tree *link = &DECL_ARGUMENTS (fndecl);
3563 tree new_arg_types = NULL;
3564 for (int i = 0; i < len; i++)
3566 struct ipa_parm_adjustment *adj;
3567 gcc_assert (link);
3569 adj = &adjustments[i];
3570 tree parm;
3571 if (adj->op == IPA_PARM_OP_NEW)
3572 parm = NULL;
3573 else
3574 parm = oparms[adj->base_index];
3575 adj->base = parm;
3577 if (adj->op == IPA_PARM_OP_COPY)
3579 if (care_for_types)
3580 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3581 new_arg_types);
3582 *link = parm;
3583 link = &DECL_CHAIN (parm);
3585 else if (adj->op != IPA_PARM_OP_REMOVE)
3587 tree new_parm;
3588 tree ptype;
3590 if (adj->by_ref)
3591 ptype = build_pointer_type (adj->type);
3592 else
3594 ptype = adj->type;
3595 if (is_gimple_reg_type (ptype))
3597 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3598 if (TYPE_ALIGN (ptype) < malign)
3599 ptype = build_aligned_type (ptype, malign);
3603 if (care_for_types)
3604 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3606 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3607 ptype);
3608 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3609 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3610 DECL_ARTIFICIAL (new_parm) = 1;
3611 DECL_ARG_TYPE (new_parm) = ptype;
3612 DECL_CONTEXT (new_parm) = fndecl;
3613 TREE_USED (new_parm) = 1;
3614 DECL_IGNORED_P (new_parm) = 1;
3615 layout_decl (new_parm, 0);
3617 if (adj->op == IPA_PARM_OP_NEW)
3618 adj->base = NULL;
3619 else
3620 adj->base = parm;
3621 adj->new_decl = new_parm;
3623 *link = new_parm;
3624 link = &DECL_CHAIN (new_parm);
3628 *link = NULL_TREE;
3630 tree new_reversed = NULL;
3631 if (care_for_types)
3633 new_reversed = nreverse (new_arg_types);
3634 if (last_parm_void)
3636 if (new_reversed)
3637 TREE_CHAIN (new_arg_types) = void_list_node;
3638 else
3639 new_reversed = void_list_node;
3643 /* Use copy_node to preserve as much as possible from original type
3644 (debug info, attribute lists etc.)
3645 Exception is METHOD_TYPEs must have THIS argument.
3646 When we are asked to remove it, we need to build new FUNCTION_TYPE
3647 instead. */
3648 tree new_type = NULL;
3649 if (TREE_CODE (orig_type) != METHOD_TYPE
3650 || (adjustments[0].op == IPA_PARM_OP_COPY
3651 && adjustments[0].base_index == 0))
3653 new_type = build_distinct_type_copy (orig_type);
3654 TYPE_ARG_TYPES (new_type) = new_reversed;
3656 else
3658 new_type
3659 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3660 new_reversed));
3661 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3662 DECL_VINDEX (fndecl) = NULL_TREE;
3665 /* When signature changes, we need to clear builtin info. */
3666 if (DECL_BUILT_IN (fndecl))
3668 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3669 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3672 /* This is a new type, not a copy of an old type. Need to reassociate
3673 variants. We can handle everything except the main variant lazily. */
3674 tree t = TYPE_MAIN_VARIANT (orig_type);
3675 if (orig_type != t)
3677 TYPE_MAIN_VARIANT (new_type) = t;
3678 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3679 TYPE_NEXT_VARIANT (t) = new_type;
3681 else
3683 TYPE_MAIN_VARIANT (new_type) = new_type;
3684 TYPE_NEXT_VARIANT (new_type) = NULL;
3687 TREE_TYPE (fndecl) = new_type;
3688 DECL_VIRTUAL_P (fndecl) = 0;
3689 DECL_LANG_SPECIFIC (fndecl) = NULL;
3690 otypes.release ();
3691 oparms.release ();
3694 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3695 If this is a directly recursive call, CS must be NULL. Otherwise it must
3696 contain the corresponding call graph edge. */
3698 void
3699 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3700 ipa_parm_adjustment_vec adjustments)
3702 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3703 vec<tree> vargs;
3704 vec<tree, va_gc> **debug_args = NULL;
3705 gimple new_stmt;
3706 gimple_stmt_iterator gsi, prev_gsi;
3707 tree callee_decl;
3708 int i, len;
3710 len = adjustments.length ();
3711 vargs.create (len);
3712 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3713 ipa_remove_stmt_references (current_node, stmt);
3715 gsi = gsi_for_stmt (stmt);
3716 prev_gsi = gsi;
3717 gsi_prev (&prev_gsi);
3718 for (i = 0; i < len; i++)
3720 struct ipa_parm_adjustment *adj;
3722 adj = &adjustments[i];
3724 if (adj->op == IPA_PARM_OP_COPY)
3726 tree arg = gimple_call_arg (stmt, adj->base_index);
3728 vargs.quick_push (arg);
3730 else if (adj->op != IPA_PARM_OP_REMOVE)
3732 tree expr, base, off;
3733 location_t loc;
3734 unsigned int deref_align = 0;
3735 bool deref_base = false;
3737 /* We create a new parameter out of the value of the old one, we can
3738 do the following kind of transformations:
3740 - A scalar passed by reference is converted to a scalar passed by
3741 value. (adj->by_ref is false and the type of the original
3742 actual argument is a pointer to a scalar).
3744 - A part of an aggregate is passed instead of the whole aggregate.
3745 The part can be passed either by value or by reference, this is
3746 determined by value of adj->by_ref. Moreover, the code below
3747 handles both situations when the original aggregate is passed by
3748 value (its type is not a pointer) and when it is passed by
3749 reference (it is a pointer to an aggregate).
3751 When the new argument is passed by reference (adj->by_ref is true)
3752 it must be a part of an aggregate and therefore we form it by
3753 simply taking the address of a reference inside the original
3754 aggregate. */
3756 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3757 base = gimple_call_arg (stmt, adj->base_index);
3758 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3759 : EXPR_LOCATION (base);
3761 if (TREE_CODE (base) != ADDR_EXPR
3762 && POINTER_TYPE_P (TREE_TYPE (base)))
3763 off = build_int_cst (adj->alias_ptr_type,
3764 adj->offset / BITS_PER_UNIT);
3765 else
3767 HOST_WIDE_INT base_offset;
3768 tree prev_base;
3769 bool addrof;
3771 if (TREE_CODE (base) == ADDR_EXPR)
3773 base = TREE_OPERAND (base, 0);
3774 addrof = true;
3776 else
3777 addrof = false;
3778 prev_base = base;
3779 base = get_addr_base_and_unit_offset (base, &base_offset);
3780 /* Aggregate arguments can have non-invariant addresses. */
3781 if (!base)
3783 base = build_fold_addr_expr (prev_base);
3784 off = build_int_cst (adj->alias_ptr_type,
3785 adj->offset / BITS_PER_UNIT);
3787 else if (TREE_CODE (base) == MEM_REF)
3789 if (!addrof)
3791 deref_base = true;
3792 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3794 off = build_int_cst (adj->alias_ptr_type,
3795 base_offset
3796 + adj->offset / BITS_PER_UNIT);
3797 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3798 off);
3799 base = TREE_OPERAND (base, 0);
3801 else
3803 off = build_int_cst (adj->alias_ptr_type,
3804 base_offset
3805 + adj->offset / BITS_PER_UNIT);
3806 base = build_fold_addr_expr (base);
3810 if (!adj->by_ref)
3812 tree type = adj->type;
3813 unsigned int align;
3814 unsigned HOST_WIDE_INT misalign;
3816 if (deref_base)
3818 align = deref_align;
3819 misalign = 0;
3821 else
3823 get_pointer_alignment_1 (base, &align, &misalign);
3824 if (TYPE_ALIGN (type) > align)
3825 align = TYPE_ALIGN (type);
3827 misalign += (tree_to_double_int (off)
3828 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3829 * BITS_PER_UNIT);
3830 misalign = misalign & (align - 1);
3831 if (misalign != 0)
3832 align = (misalign & -misalign);
3833 if (align < TYPE_ALIGN (type))
3834 type = build_aligned_type (type, align);
3835 base = force_gimple_operand_gsi (&gsi, base,
3836 true, NULL, true, GSI_SAME_STMT);
3837 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3838 /* If expr is not a valid gimple call argument emit
3839 a load into a temporary. */
3840 if (is_gimple_reg_type (TREE_TYPE (expr)))
3842 gimple tem = gimple_build_assign (NULL_TREE, expr);
3843 if (gimple_in_ssa_p (cfun))
3845 gimple_set_vuse (tem, gimple_vuse (stmt));
3846 expr = make_ssa_name (TREE_TYPE (expr), tem);
3848 else
3849 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
3850 gimple_assign_set_lhs (tem, expr);
3851 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
3854 else
3856 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3857 expr = build_fold_addr_expr (expr);
3858 expr = force_gimple_operand_gsi (&gsi, expr,
3859 true, NULL, true, GSI_SAME_STMT);
3861 vargs.quick_push (expr);
3863 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
3865 unsigned int ix;
3866 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3867 gimple def_temp;
3869 arg = gimple_call_arg (stmt, adj->base_index);
3870 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3872 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3873 continue;
3874 arg = fold_convert_loc (gimple_location (stmt),
3875 TREE_TYPE (origin), arg);
3877 if (debug_args == NULL)
3878 debug_args = decl_debug_args_insert (callee_decl);
3879 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3880 if (ddecl == origin)
3882 ddecl = (**debug_args)[ix + 1];
3883 break;
3885 if (ddecl == NULL)
3887 ddecl = make_node (DEBUG_EXPR_DECL);
3888 DECL_ARTIFICIAL (ddecl) = 1;
3889 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3890 DECL_MODE (ddecl) = DECL_MODE (origin);
3892 vec_safe_push (*debug_args, origin);
3893 vec_safe_push (*debug_args, ddecl);
3895 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3896 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3900 if (dump_file && (dump_flags & TDF_DETAILS))
3902 fprintf (dump_file, "replacing stmt:");
3903 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3906 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3907 vargs.release ();
3908 if (gimple_call_lhs (stmt))
3909 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3911 gimple_set_block (new_stmt, gimple_block (stmt));
3912 if (gimple_has_location (stmt))
3913 gimple_set_location (new_stmt, gimple_location (stmt));
3914 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3915 gimple_call_copy_flags (new_stmt, stmt);
3916 if (gimple_in_ssa_p (cfun))
3918 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3919 if (gimple_vdef (stmt))
3921 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3922 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
3926 if (dump_file && (dump_flags & TDF_DETAILS))
3928 fprintf (dump_file, "with stmt:");
3929 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3930 fprintf (dump_file, "\n");
3932 gsi_replace (&gsi, new_stmt, true);
3933 if (cs)
3934 cgraph_set_call_stmt (cs, new_stmt);
3937 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3938 gsi_prev (&gsi);
3940 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
3943 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3944 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3945 specifies whether the function should care about type incompatibility the
3946 current and new expressions. If it is false, the function will leave
3947 incompatibility issues to the caller. Return true iff the expression
3948 was modified. */
3950 bool
3951 ipa_modify_expr (tree *expr, bool convert,
3952 ipa_parm_adjustment_vec adjustments)
3954 struct ipa_parm_adjustment *cand
3955 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
3956 if (!cand)
3957 return false;
3959 tree src;
3960 if (cand->by_ref)
3961 src = build_simple_mem_ref (cand->new_decl);
3962 else
3963 src = cand->new_decl;
3965 if (dump_file && (dump_flags & TDF_DETAILS))
3967 fprintf (dump_file, "About to replace expr ");
3968 print_generic_expr (dump_file, *expr, 0);
3969 fprintf (dump_file, " with ");
3970 print_generic_expr (dump_file, src, 0);
3971 fprintf (dump_file, "\n");
3974 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3976 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3977 *expr = vce;
3979 else
3980 *expr = src;
3981 return true;
3984 /* If T is an SSA_NAME, return NULL if it is not a default def or
3985 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3986 the base variable is always returned, regardless if it is a default
3987 def. Return T if it is not an SSA_NAME. */
3989 static tree
3990 get_ssa_base_param (tree t, bool ignore_default_def)
3992 if (TREE_CODE (t) == SSA_NAME)
3994 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
3995 return SSA_NAME_VAR (t);
3996 else
3997 return NULL_TREE;
3999 return t;
4002 /* Given an expression, return an adjustment entry specifying the
4003 transformation to be done on EXPR. If no suitable adjustment entry
4004 was found, returns NULL.
4006 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4007 default def, otherwise bail on them.
4009 If CONVERT is non-NULL, this function will set *CONVERT if the
4010 expression provided is a component reference. ADJUSTMENTS is the
4011 adjustments vector. */
4013 ipa_parm_adjustment *
4014 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4015 ipa_parm_adjustment_vec adjustments,
4016 bool ignore_default_def)
4018 if (TREE_CODE (**expr) == BIT_FIELD_REF
4019 || TREE_CODE (**expr) == IMAGPART_EXPR
4020 || TREE_CODE (**expr) == REALPART_EXPR)
4022 *expr = &TREE_OPERAND (**expr, 0);
4023 if (convert)
4024 *convert = true;
4027 HOST_WIDE_INT offset, size, max_size;
4028 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4029 if (!base || size == -1 || max_size == -1)
4030 return NULL;
4032 if (TREE_CODE (base) == MEM_REF)
4034 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4035 base = TREE_OPERAND (base, 0);
4038 base = get_ssa_base_param (base, ignore_default_def);
4039 if (!base || TREE_CODE (base) != PARM_DECL)
4040 return NULL;
4042 struct ipa_parm_adjustment *cand = NULL;
4043 unsigned int len = adjustments.length ();
4044 for (unsigned i = 0; i < len; i++)
4046 struct ipa_parm_adjustment *adj = &adjustments[i];
4048 if (adj->base == base
4049 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4051 cand = adj;
4052 break;
4056 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4057 return NULL;
4058 return cand;
4061 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4063 static bool
4064 index_in_adjustments_multiple_times_p (int base_index,
4065 ipa_parm_adjustment_vec adjustments)
4067 int i, len = adjustments.length ();
4068 bool one = false;
4070 for (i = 0; i < len; i++)
4072 struct ipa_parm_adjustment *adj;
4073 adj = &adjustments[i];
4075 if (adj->base_index == base_index)
4077 if (one)
4078 return true;
4079 else
4080 one = true;
4083 return false;
4087 /* Return adjustments that should have the same effect on function parameters
4088 and call arguments as if they were first changed according to adjustments in
4089 INNER and then by adjustments in OUTER. */
4091 ipa_parm_adjustment_vec
4092 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4093 ipa_parm_adjustment_vec outer)
4095 int i, outlen = outer.length ();
4096 int inlen = inner.length ();
4097 int removals = 0;
4098 ipa_parm_adjustment_vec adjustments, tmp;
4100 tmp.create (inlen);
4101 for (i = 0; i < inlen; i++)
4103 struct ipa_parm_adjustment *n;
4104 n = &inner[i];
4106 if (n->op == IPA_PARM_OP_REMOVE)
4107 removals++;
4108 else
4110 /* FIXME: Handling of new arguments are not implemented yet. */
4111 gcc_assert (n->op != IPA_PARM_OP_NEW);
4112 tmp.quick_push (*n);
4116 adjustments.create (outlen + removals);
4117 for (i = 0; i < outlen; i++)
4119 struct ipa_parm_adjustment r;
4120 struct ipa_parm_adjustment *out = &outer[i];
4121 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4123 memset (&r, 0, sizeof (r));
4124 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4125 if (out->op == IPA_PARM_OP_REMOVE)
4127 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4129 r.op = IPA_PARM_OP_REMOVE;
4130 adjustments.quick_push (r);
4132 continue;
4134 else
4136 /* FIXME: Handling of new arguments are not implemented yet. */
4137 gcc_assert (out->op != IPA_PARM_OP_NEW);
4140 r.base_index = in->base_index;
4141 r.type = out->type;
4143 /* FIXME: Create nonlocal value too. */
4145 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4146 r.op = IPA_PARM_OP_COPY;
4147 else if (in->op == IPA_PARM_OP_COPY)
4148 r.offset = out->offset;
4149 else if (out->op == IPA_PARM_OP_COPY)
4150 r.offset = in->offset;
4151 else
4152 r.offset = in->offset + out->offset;
4153 adjustments.quick_push (r);
4156 for (i = 0; i < inlen; i++)
4158 struct ipa_parm_adjustment *n = &inner[i];
4160 if (n->op == IPA_PARM_OP_REMOVE)
4161 adjustments.quick_push (*n);
4164 tmp.release ();
4165 return adjustments;
4168 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4169 friendly way, assuming they are meant to be applied to FNDECL. */
4171 void
4172 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4173 tree fndecl)
4175 int i, len = adjustments.length ();
4176 bool first = true;
4177 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4179 fprintf (file, "IPA param adjustments: ");
4180 for (i = 0; i < len; i++)
4182 struct ipa_parm_adjustment *adj;
4183 adj = &adjustments[i];
4185 if (!first)
4186 fprintf (file, " ");
4187 else
4188 first = false;
4190 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4191 print_generic_expr (file, parms[adj->base_index], 0);
4192 if (adj->base)
4194 fprintf (file, ", base: ");
4195 print_generic_expr (file, adj->base, 0);
4197 if (adj->new_decl)
4199 fprintf (file, ", new_decl: ");
4200 print_generic_expr (file, adj->new_decl, 0);
4202 if (adj->new_ssa_base)
4204 fprintf (file, ", new_ssa_base: ");
4205 print_generic_expr (file, adj->new_ssa_base, 0);
4208 if (adj->op == IPA_PARM_OP_COPY)
4209 fprintf (file, ", copy_param");
4210 else if (adj->op == IPA_PARM_OP_REMOVE)
4211 fprintf (file, ", remove_param");
4212 else
4213 fprintf (file, ", offset %li", (long) adj->offset);
4214 if (adj->by_ref)
4215 fprintf (file, ", by_ref");
4216 print_node_brief (file, ", type: ", adj->type, 0);
4217 fprintf (file, "\n");
4219 parms.release ();
4222 /* Dump the AV linked list. */
4224 void
4225 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4227 bool comma = false;
4228 fprintf (f, " Aggregate replacements:");
4229 for (; av; av = av->next)
4231 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4232 av->index, av->offset);
4233 print_generic_expr (f, av->value, 0);
4234 comma = true;
4236 fprintf (f, "\n");
4239 /* Stream out jump function JUMP_FUNC to OB. */
4241 static void
4242 ipa_write_jump_function (struct output_block *ob,
4243 struct ipa_jump_func *jump_func)
4245 struct ipa_agg_jf_item *item;
4246 struct bitpack_d bp;
4247 int i, count;
4249 streamer_write_uhwi (ob, jump_func->type);
4250 switch (jump_func->type)
4252 case IPA_JF_UNKNOWN:
4253 break;
4254 case IPA_JF_KNOWN_TYPE:
4255 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4256 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4257 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4258 break;
4259 case IPA_JF_CONST:
4260 gcc_assert (
4261 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4262 stream_write_tree (ob, jump_func->value.constant.value, true);
4263 break;
4264 case IPA_JF_PASS_THROUGH:
4265 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4266 if (jump_func->value.pass_through.operation == NOP_EXPR)
4268 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4269 bp = bitpack_create (ob->main_stream);
4270 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4271 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4272 streamer_write_bitpack (&bp);
4274 else
4276 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4277 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4279 break;
4280 case IPA_JF_ANCESTOR:
4281 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4282 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4283 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4284 bp = bitpack_create (ob->main_stream);
4285 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4286 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4287 streamer_write_bitpack (&bp);
4288 break;
4291 count = vec_safe_length (jump_func->agg.items);
4292 streamer_write_uhwi (ob, count);
4293 if (count)
4295 bp = bitpack_create (ob->main_stream);
4296 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4297 streamer_write_bitpack (&bp);
4300 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4302 streamer_write_uhwi (ob, item->offset);
4303 stream_write_tree (ob, item->value, true);
4307 /* Read in jump function JUMP_FUNC from IB. */
4309 static void
4310 ipa_read_jump_function (struct lto_input_block *ib,
4311 struct ipa_jump_func *jump_func,
4312 struct cgraph_edge *cs,
4313 struct data_in *data_in)
4315 enum jump_func_type jftype;
4316 enum tree_code operation;
4317 int i, count;
4319 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4320 switch (jftype)
4322 case IPA_JF_UNKNOWN:
4323 jump_func->type = IPA_JF_UNKNOWN;
4324 break;
4325 case IPA_JF_KNOWN_TYPE:
4327 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4328 tree base_type = stream_read_tree (ib, data_in);
4329 tree component_type = stream_read_tree (ib, data_in);
4331 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4332 break;
4334 case IPA_JF_CONST:
4335 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4336 break;
4337 case IPA_JF_PASS_THROUGH:
4338 operation = (enum tree_code) streamer_read_uhwi (ib);
4339 if (operation == NOP_EXPR)
4341 int formal_id = streamer_read_uhwi (ib);
4342 struct bitpack_d bp = streamer_read_bitpack (ib);
4343 bool agg_preserved = bp_unpack_value (&bp, 1);
4344 bool type_preserved = bp_unpack_value (&bp, 1);
4345 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4346 type_preserved);
4348 else
4350 tree operand = stream_read_tree (ib, data_in);
4351 int formal_id = streamer_read_uhwi (ib);
4352 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4353 operation);
4355 break;
4356 case IPA_JF_ANCESTOR:
4358 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4359 tree type = stream_read_tree (ib, data_in);
4360 int formal_id = streamer_read_uhwi (ib);
4361 struct bitpack_d bp = streamer_read_bitpack (ib);
4362 bool agg_preserved = bp_unpack_value (&bp, 1);
4363 bool type_preserved = bp_unpack_value (&bp, 1);
4365 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4366 type_preserved);
4367 break;
4371 count = streamer_read_uhwi (ib);
4372 vec_alloc (jump_func->agg.items, count);
4373 if (count)
4375 struct bitpack_d bp = streamer_read_bitpack (ib);
4376 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4378 for (i = 0; i < count; i++)
4380 struct ipa_agg_jf_item item;
4381 item.offset = streamer_read_uhwi (ib);
4382 item.value = stream_read_tree (ib, data_in);
4383 jump_func->agg.items->quick_push (item);
4387 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4388 relevant to indirect inlining to OB. */
4390 static void
4391 ipa_write_indirect_edge_info (struct output_block *ob,
4392 struct cgraph_edge *cs)
4394 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4395 struct bitpack_d bp;
4397 streamer_write_hwi (ob, ii->param_index);
4398 streamer_write_hwi (ob, ii->offset);
4399 bp = bitpack_create (ob->main_stream);
4400 bp_pack_value (&bp, ii->polymorphic, 1);
4401 bp_pack_value (&bp, ii->agg_contents, 1);
4402 bp_pack_value (&bp, ii->member_ptr, 1);
4403 bp_pack_value (&bp, ii->by_ref, 1);
4404 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4405 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4406 streamer_write_bitpack (&bp);
4408 if (ii->polymorphic)
4410 streamer_write_hwi (ob, ii->otr_token);
4411 stream_write_tree (ob, ii->otr_type, true);
4412 stream_write_tree (ob, ii->outer_type, true);
4416 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4417 relevant to indirect inlining from IB. */
4419 static void
4420 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4421 struct data_in *data_in ATTRIBUTE_UNUSED,
4422 struct cgraph_edge *cs)
4424 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4425 struct bitpack_d bp;
4427 ii->param_index = (int) streamer_read_hwi (ib);
4428 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4429 bp = streamer_read_bitpack (ib);
4430 ii->polymorphic = bp_unpack_value (&bp, 1);
4431 ii->agg_contents = bp_unpack_value (&bp, 1);
4432 ii->member_ptr = bp_unpack_value (&bp, 1);
4433 ii->by_ref = bp_unpack_value (&bp, 1);
4434 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4435 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4436 if (ii->polymorphic)
4438 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4439 ii->otr_type = stream_read_tree (ib, data_in);
4440 ii->outer_type = stream_read_tree (ib, data_in);
4444 /* Stream out NODE info to OB. */
4446 static void
4447 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4449 int node_ref;
4450 lto_symtab_encoder_t encoder;
4451 struct ipa_node_params *info = IPA_NODE_REF (node);
4452 int j;
4453 struct cgraph_edge *e;
4454 struct bitpack_d bp;
4456 encoder = ob->decl_state->symtab_node_encoder;
4457 node_ref = lto_symtab_encoder_encode (encoder, node);
4458 streamer_write_uhwi (ob, node_ref);
4460 streamer_write_uhwi (ob, ipa_get_param_count (info));
4461 for (j = 0; j < ipa_get_param_count (info); j++)
4462 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4463 bp = bitpack_create (ob->main_stream);
4464 gcc_assert (info->uses_analysis_done
4465 || ipa_get_param_count (info) == 0);
4466 gcc_assert (!info->node_enqueued);
4467 gcc_assert (!info->ipcp_orig_node);
4468 for (j = 0; j < ipa_get_param_count (info); j++)
4469 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4470 streamer_write_bitpack (&bp);
4471 for (j = 0; j < ipa_get_param_count (info); j++)
4472 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4473 for (e = node->callees; e; e = e->next_callee)
4475 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4477 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4478 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4479 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4481 for (e = node->indirect_calls; e; e = e->next_callee)
4483 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4485 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4486 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4487 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4488 ipa_write_indirect_edge_info (ob, e);
4492 /* Stream in NODE info from IB. */
4494 static void
4495 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4496 struct data_in *data_in)
4498 struct ipa_node_params *info = IPA_NODE_REF (node);
4499 int k;
4500 struct cgraph_edge *e;
4501 struct bitpack_d bp;
4503 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4505 for (k = 0; k < ipa_get_param_count (info); k++)
4506 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4508 bp = streamer_read_bitpack (ib);
4509 if (ipa_get_param_count (info) != 0)
4510 info->uses_analysis_done = true;
4511 info->node_enqueued = false;
4512 for (k = 0; k < ipa_get_param_count (info); k++)
4513 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4514 for (k = 0; k < ipa_get_param_count (info); k++)
4515 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4516 for (e = node->callees; e; e = e->next_callee)
4518 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4519 int count = streamer_read_uhwi (ib);
4521 if (!count)
4522 continue;
4523 vec_safe_grow_cleared (args->jump_functions, count);
4525 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4526 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4527 data_in);
4529 for (e = node->indirect_calls; e; e = e->next_callee)
4531 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4532 int count = streamer_read_uhwi (ib);
4534 if (count)
4536 vec_safe_grow_cleared (args->jump_functions, count);
4537 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4538 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4539 data_in);
4541 ipa_read_indirect_edge_info (ib, data_in, e);
4545 /* Write jump functions for nodes in SET. */
4547 void
4548 ipa_prop_write_jump_functions (void)
4550 struct cgraph_node *node;
4551 struct output_block *ob;
4552 unsigned int count = 0;
4553 lto_symtab_encoder_iterator lsei;
4554 lto_symtab_encoder_t encoder;
4557 if (!ipa_node_params_vector.exists ())
4558 return;
4560 ob = create_output_block (LTO_section_jump_functions);
4561 encoder = ob->decl_state->symtab_node_encoder;
4562 ob->cgraph_node = NULL;
4563 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4564 lsei_next_function_in_partition (&lsei))
4566 node = lsei_cgraph_node (lsei);
4567 if (cgraph_function_with_gimple_body_p (node)
4568 && IPA_NODE_REF (node) != NULL)
4569 count++;
4572 streamer_write_uhwi (ob, count);
4574 /* Process all of the functions. */
4575 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4576 lsei_next_function_in_partition (&lsei))
4578 node = lsei_cgraph_node (lsei);
4579 if (cgraph_function_with_gimple_body_p (node)
4580 && IPA_NODE_REF (node) != NULL)
4581 ipa_write_node_info (ob, node);
4583 streamer_write_char_stream (ob->main_stream, 0);
4584 produce_asm (ob, NULL);
4585 destroy_output_block (ob);
4588 /* Read section in file FILE_DATA of length LEN with data DATA. */
4590 static void
4591 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4592 size_t len)
4594 const struct lto_function_header *header =
4595 (const struct lto_function_header *) data;
4596 const int cfg_offset = sizeof (struct lto_function_header);
4597 const int main_offset = cfg_offset + header->cfg_size;
4598 const int string_offset = main_offset + header->main_size;
4599 struct data_in *data_in;
4600 struct lto_input_block ib_main;
4601 unsigned int i;
4602 unsigned int count;
4604 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4605 header->main_size);
4607 data_in =
4608 lto_data_in_create (file_data, (const char *) data + string_offset,
4609 header->string_size, vNULL);
4610 count = streamer_read_uhwi (&ib_main);
4612 for (i = 0; i < count; i++)
4614 unsigned int index;
4615 struct cgraph_node *node;
4616 lto_symtab_encoder_t encoder;
4618 index = streamer_read_uhwi (&ib_main);
4619 encoder = file_data->symtab_node_encoder;
4620 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4621 gcc_assert (node->definition);
4622 ipa_read_node_info (&ib_main, node, data_in);
4624 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4625 len);
4626 lto_data_in_delete (data_in);
4629 /* Read ipcp jump functions. */
4631 void
4632 ipa_prop_read_jump_functions (void)
4634 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4635 struct lto_file_decl_data *file_data;
4636 unsigned int j = 0;
4638 ipa_check_create_node_params ();
4639 ipa_check_create_edge_args ();
4640 ipa_register_cgraph_hooks ();
4642 while ((file_data = file_data_vec[j++]))
4644 size_t len;
4645 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4647 if (data)
4648 ipa_prop_read_section (file_data, data, len);
4652 /* After merging units, we can get mismatch in argument counts.
4653 Also decl merging might've rendered parameter lists obsolete.
4654 Also compute called_with_variable_arg info. */
4656 void
4657 ipa_update_after_lto_read (void)
4659 ipa_check_create_node_params ();
4660 ipa_check_create_edge_args ();
4663 void
4664 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4666 int node_ref;
4667 unsigned int count = 0;
4668 lto_symtab_encoder_t encoder;
4669 struct ipa_agg_replacement_value *aggvals, *av;
4671 aggvals = ipa_get_agg_replacements_for_node (node);
4672 encoder = ob->decl_state->symtab_node_encoder;
4673 node_ref = lto_symtab_encoder_encode (encoder, node);
4674 streamer_write_uhwi (ob, node_ref);
4676 for (av = aggvals; av; av = av->next)
4677 count++;
4678 streamer_write_uhwi (ob, count);
4680 for (av = aggvals; av; av = av->next)
4682 struct bitpack_d bp;
4684 streamer_write_uhwi (ob, av->offset);
4685 streamer_write_uhwi (ob, av->index);
4686 stream_write_tree (ob, av->value, true);
4688 bp = bitpack_create (ob->main_stream);
4689 bp_pack_value (&bp, av->by_ref, 1);
4690 streamer_write_bitpack (&bp);
4694 /* Stream in the aggregate value replacement chain for NODE from IB. */
4696 static void
4697 read_agg_replacement_chain (struct lto_input_block *ib,
4698 struct cgraph_node *node,
4699 struct data_in *data_in)
4701 struct ipa_agg_replacement_value *aggvals = NULL;
4702 unsigned int count, i;
4704 count = streamer_read_uhwi (ib);
4705 for (i = 0; i <count; i++)
4707 struct ipa_agg_replacement_value *av;
4708 struct bitpack_d bp;
4710 av = ggc_alloc_ipa_agg_replacement_value ();
4711 av->offset = streamer_read_uhwi (ib);
4712 av->index = streamer_read_uhwi (ib);
4713 av->value = stream_read_tree (ib, data_in);
4714 bp = streamer_read_bitpack (ib);
4715 av->by_ref = bp_unpack_value (&bp, 1);
4716 av->next = aggvals;
4717 aggvals = av;
4719 ipa_set_node_agg_value_chain (node, aggvals);
4722 /* Write all aggregate replacement for nodes in set. */
4724 void
4725 ipa_prop_write_all_agg_replacement (void)
4727 struct cgraph_node *node;
4728 struct output_block *ob;
4729 unsigned int count = 0;
4730 lto_symtab_encoder_iterator lsei;
4731 lto_symtab_encoder_t encoder;
4733 if (!ipa_node_agg_replacements)
4734 return;
4736 ob = create_output_block (LTO_section_ipcp_transform);
4737 encoder = ob->decl_state->symtab_node_encoder;
4738 ob->cgraph_node = NULL;
4739 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4740 lsei_next_function_in_partition (&lsei))
4742 node = lsei_cgraph_node (lsei);
4743 if (cgraph_function_with_gimple_body_p (node)
4744 && ipa_get_agg_replacements_for_node (node) != NULL)
4745 count++;
4748 streamer_write_uhwi (ob, count);
4750 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4751 lsei_next_function_in_partition (&lsei))
4753 node = lsei_cgraph_node (lsei);
4754 if (cgraph_function_with_gimple_body_p (node)
4755 && ipa_get_agg_replacements_for_node (node) != NULL)
4756 write_agg_replacement_chain (ob, node);
4758 streamer_write_char_stream (ob->main_stream, 0);
4759 produce_asm (ob, NULL);
4760 destroy_output_block (ob);
4763 /* Read replacements section in file FILE_DATA of length LEN with data
4764 DATA. */
4766 static void
4767 read_replacements_section (struct lto_file_decl_data *file_data,
4768 const char *data,
4769 size_t len)
4771 const struct lto_function_header *header =
4772 (const struct lto_function_header *) data;
4773 const int cfg_offset = sizeof (struct lto_function_header);
4774 const int main_offset = cfg_offset + header->cfg_size;
4775 const int string_offset = main_offset + header->main_size;
4776 struct data_in *data_in;
4777 struct lto_input_block ib_main;
4778 unsigned int i;
4779 unsigned int count;
4781 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4782 header->main_size);
4784 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4785 header->string_size, vNULL);
4786 count = streamer_read_uhwi (&ib_main);
4788 for (i = 0; i < count; i++)
4790 unsigned int index;
4791 struct cgraph_node *node;
4792 lto_symtab_encoder_t encoder;
4794 index = streamer_read_uhwi (&ib_main);
4795 encoder = file_data->symtab_node_encoder;
4796 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4797 gcc_assert (node->definition);
4798 read_agg_replacement_chain (&ib_main, node, data_in);
4800 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4801 len);
4802 lto_data_in_delete (data_in);
4805 /* Read IPA-CP aggregate replacements. */
4807 void
4808 ipa_prop_read_all_agg_replacement (void)
4810 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4811 struct lto_file_decl_data *file_data;
4812 unsigned int j = 0;
4814 while ((file_data = file_data_vec[j++]))
4816 size_t len;
4817 const char *data = lto_get_section_data (file_data,
4818 LTO_section_ipcp_transform,
4819 NULL, &len);
4820 if (data)
4821 read_replacements_section (file_data, data, len);
4825 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4826 NODE. */
4828 static void
4829 adjust_agg_replacement_values (struct cgraph_node *node,
4830 struct ipa_agg_replacement_value *aggval)
4832 struct ipa_agg_replacement_value *v;
4833 int i, c = 0, d = 0, *adj;
4835 if (!node->clone.combined_args_to_skip)
4836 return;
4838 for (v = aggval; v; v = v->next)
4840 gcc_assert (v->index >= 0);
4841 if (c < v->index)
4842 c = v->index;
4844 c++;
4846 adj = XALLOCAVEC (int, c);
4847 for (i = 0; i < c; i++)
4848 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4850 adj[i] = -1;
4851 d++;
4853 else
4854 adj[i] = i - d;
4856 for (v = aggval; v; v = v->next)
4857 v->index = adj[v->index];
4861 /* Function body transformation phase. */
4863 unsigned int
4864 ipcp_transform_function (struct cgraph_node *node)
4866 vec<ipa_param_descriptor> descriptors = vNULL;
4867 struct param_analysis_info *parms_ainfo;
4868 struct ipa_agg_replacement_value *aggval;
4869 gimple_stmt_iterator gsi;
4870 basic_block bb;
4871 int param_count;
4872 bool cfg_changed = false, something_changed = false;
4874 gcc_checking_assert (cfun);
4875 gcc_checking_assert (current_function_decl);
4877 if (dump_file)
4878 fprintf (dump_file, "Modification phase of node %s/%i\n",
4879 node->name (), node->order);
4881 aggval = ipa_get_agg_replacements_for_node (node);
4882 if (!aggval)
4883 return 0;
4884 param_count = count_formal_params (node->decl);
4885 if (param_count == 0)
4886 return 0;
4887 adjust_agg_replacement_values (node, aggval);
4888 if (dump_file)
4889 ipa_dump_agg_replacement_values (dump_file, aggval);
4890 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4891 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4892 descriptors.safe_grow_cleared (param_count);
4893 ipa_populate_param_decls (node, descriptors);
4895 FOR_EACH_BB_FN (bb, cfun)
4896 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4898 struct ipa_agg_replacement_value *v;
4899 gimple stmt = gsi_stmt (gsi);
4900 tree rhs, val, t;
4901 HOST_WIDE_INT offset, size;
4902 int index;
4903 bool by_ref, vce;
4905 if (!gimple_assign_load_p (stmt))
4906 continue;
4907 rhs = gimple_assign_rhs1 (stmt);
4908 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4909 continue;
4911 vce = false;
4912 t = rhs;
4913 while (handled_component_p (t))
4915 /* V_C_E can do things like convert an array of integers to one
4916 bigger integer and similar things we do not handle below. */
4917 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4919 vce = true;
4920 break;
4922 t = TREE_OPERAND (t, 0);
4924 if (vce)
4925 continue;
4927 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4928 rhs, &index, &offset, &size, &by_ref))
4929 continue;
4930 for (v = aggval; v; v = v->next)
4931 if (v->index == index
4932 && v->offset == offset)
4933 break;
4934 if (!v
4935 || v->by_ref != by_ref
4936 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4937 continue;
4939 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4940 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4942 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4943 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4944 else if (TYPE_SIZE (TREE_TYPE (rhs))
4945 == TYPE_SIZE (TREE_TYPE (v->value)))
4946 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4947 else
4949 if (dump_file)
4951 fprintf (dump_file, " const ");
4952 print_generic_expr (dump_file, v->value, 0);
4953 fprintf (dump_file, " can't be converted to type of ");
4954 print_generic_expr (dump_file, rhs, 0);
4955 fprintf (dump_file, "\n");
4957 continue;
4960 else
4961 val = v->value;
4963 if (dump_file && (dump_flags & TDF_DETAILS))
4965 fprintf (dump_file, "Modifying stmt:\n ");
4966 print_gimple_stmt (dump_file, stmt, 0, 0);
4968 gimple_assign_set_rhs_from_tree (&gsi, val);
4969 update_stmt (stmt);
4971 if (dump_file && (dump_flags & TDF_DETAILS))
4973 fprintf (dump_file, "into:\n ");
4974 print_gimple_stmt (dump_file, stmt, 0, 0);
4975 fprintf (dump_file, "\n");
4978 something_changed = true;
4979 if (maybe_clean_eh_stmt (stmt)
4980 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4981 cfg_changed = true;
4984 (*ipa_node_agg_replacements)[node->uid] = NULL;
4985 free_parms_ainfo (parms_ainfo, param_count);
4986 descriptors.release ();
4988 if (!something_changed)
4989 return 0;
4990 else if (cfg_changed)
4991 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4992 else
4993 return TODO_update_ssa_only_virtuals;