Merged revisions 208012,208018-208019,208021,208023-208030,208033,208037,208040-20804...
[official-gcc.git] / main / gcc / ipa-prop.c
blob0f431011f44eae26123719348c83280b41b6e3d2
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "l-ipo.h"
60 #include "ipa-utils.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
64 /* Intermediate information about a parameter that is only useful during the
65 run of ipa_analyze_node and is not kept afterwards. */
67 struct param_analysis_info
69 bool parm_modified, ref_modified, pt_modified;
70 bitmap parm_visited_statements, pt_visited_statements;
73 /* Vector where the parameter infos are actually stored. */
74 vec<ipa_node_params> ipa_node_params_vector;
75 /* Vector of known aggregate values in cloned nodes. */
76 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
77 /* Vector where the parameter infos are actually stored. */
78 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
80 /* Holders of ipa cgraph hooks: */
81 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
82 static struct cgraph_node_hook_list *node_removal_hook_holder;
83 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
84 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
85 static struct cgraph_node_hook_list *function_insertion_hook_holder;
87 /* Description of a reference to an IPA constant. */
88 struct ipa_cst_ref_desc
90 /* Edge that corresponds to the statement which took the reference. */
91 struct cgraph_edge *cs;
92 /* Linked list of duplicates created when call graph edges are cloned. */
93 struct ipa_cst_ref_desc *next_duplicate;
94 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
95 if out of control. */
96 int refcount;
99 /* Allocation pool for reference descriptions. */
101 static alloc_pool ipa_refdesc_pool;
103 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
104 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
106 static bool
107 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
109 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
110 struct cl_optimization *os;
112 if (!fs_opts)
113 return false;
114 os = TREE_OPTIMIZATION (fs_opts);
115 return !os->x_optimize || !os->x_flag_ipa_cp;
118 /* Return index of the formal whose tree is PTREE in function which corresponds
119 to INFO. */
121 static int
122 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
124 int i, count;
126 count = descriptors.length ();
127 for (i = 0; i < count; i++)
128 if (descriptors[i].decl == ptree)
129 return i;
131 return -1;
134 /* Return index of the formal whose tree is PTREE in function which corresponds
135 to INFO. */
138 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
140 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
143 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
144 NODE. */
146 static void
147 ipa_populate_param_decls (struct cgraph_node *node,
148 vec<ipa_param_descriptor> &descriptors)
150 tree fndecl;
151 tree fnargs;
152 tree parm;
153 int param_num;
155 fndecl = node->decl;
156 gcc_assert (gimple_has_body_p (fndecl));
157 fnargs = DECL_ARGUMENTS (fndecl);
158 param_num = 0;
159 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
161 descriptors[param_num].decl = parm;
162 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
163 param_num++;
167 /* Return how many formal parameters FNDECL has. */
169 static inline int
170 count_formal_params (tree fndecl)
172 tree parm;
173 int count = 0;
174 gcc_assert (gimple_has_body_p (fndecl));
176 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
177 count++;
179 return count;
182 /* Return the declaration of Ith formal parameter of the function corresponding
183 to INFO. Note there is no setter function as this array is built just once
184 using ipa_initialize_node_params. */
186 void
187 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
189 fprintf (file, "param #%i", i);
190 if (info->descriptors[i].decl)
192 fprintf (file, " ");
193 print_generic_expr (file, info->descriptors[i].decl, 0);
197 /* Initialize the ipa_node_params structure associated with NODE
198 to hold PARAM_COUNT parameters. */
200 void
201 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
203 struct ipa_node_params *info = IPA_NODE_REF (node);
205 if (!info->descriptors.exists () && param_count)
206 info->descriptors.safe_grow_cleared (param_count);
209 /* Initialize the ipa_node_params structure associated with NODE by counting
210 the function parameters, creating the descriptors and populating their
211 param_decls. */
213 void
214 ipa_initialize_node_params (struct cgraph_node *node)
216 struct ipa_node_params *info = IPA_NODE_REF (node);
218 if (!info->descriptors.exists ())
220 ipa_alloc_node_params (node, count_formal_params (node->decl));
221 ipa_populate_param_decls (node, info->descriptors);
225 /* Print the jump functions associated with call graph edge CS to file F. */
227 static void
228 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
230 int i, count;
232 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
233 for (i = 0; i < count; i++)
235 struct ipa_jump_func *jump_func;
236 enum jump_func_type type;
238 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
239 type = jump_func->type;
241 fprintf (f, " param %d: ", i);
242 if (type == IPA_JF_UNKNOWN)
243 fprintf (f, "UNKNOWN\n");
244 else if (type == IPA_JF_KNOWN_TYPE)
246 fprintf (f, "KNOWN TYPE: base ");
247 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
248 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
249 jump_func->value.known_type.offset);
250 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
251 fprintf (f, "\n");
253 else if (type == IPA_JF_CONST)
255 tree val = jump_func->value.constant.value;
256 fprintf (f, "CONST: ");
257 print_generic_expr (f, val, 0);
258 if (TREE_CODE (val) == ADDR_EXPR
259 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
261 fprintf (f, " -> ");
262 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
265 fprintf (f, "\n");
267 else if (type == IPA_JF_PASS_THROUGH)
269 fprintf (f, "PASS THROUGH: ");
270 fprintf (f, "%d, op %s",
271 jump_func->value.pass_through.formal_id,
272 get_tree_code_name(jump_func->value.pass_through.operation));
273 if (jump_func->value.pass_through.operation != NOP_EXPR)
275 fprintf (f, " ");
276 print_generic_expr (f,
277 jump_func->value.pass_through.operand, 0);
279 if (jump_func->value.pass_through.agg_preserved)
280 fprintf (f, ", agg_preserved");
281 if (jump_func->value.pass_through.type_preserved)
282 fprintf (f, ", type_preserved");
283 fprintf (f, "\n");
285 else if (type == IPA_JF_ANCESTOR)
287 fprintf (f, "ANCESTOR: ");
288 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
289 jump_func->value.ancestor.formal_id,
290 jump_func->value.ancestor.offset);
291 print_generic_expr (f, jump_func->value.ancestor.type, 0);
292 if (jump_func->value.ancestor.agg_preserved)
293 fprintf (f, ", agg_preserved");
294 if (jump_func->value.ancestor.type_preserved)
295 fprintf (f, ", type_preserved");
296 fprintf (f, "\n");
299 if (jump_func->agg.items)
301 struct ipa_agg_jf_item *item;
302 int j;
304 fprintf (f, " Aggregate passed by %s:\n",
305 jump_func->agg.by_ref ? "reference" : "value");
306 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
308 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
309 item->offset);
310 if (TYPE_P (item->value))
311 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
312 tree_to_uhwi (TYPE_SIZE (item->value)));
313 else
315 fprintf (f, "cst: ");
316 print_generic_expr (f, item->value, 0);
318 fprintf (f, "\n");
325 /* Print the jump functions of all arguments on all call graph edges going from
326 NODE to file F. */
328 void
329 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
331 struct cgraph_edge *cs;
333 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
334 node->order);
335 for (cs = node->callees; cs; cs = cs->next_callee)
337 if (!ipa_edge_args_info_available_for_edge_p (cs))
338 continue;
340 fprintf (f, " callsite %s/%i -> %s/%i : \n",
341 xstrdup (node->name ()), node->order,
342 xstrdup (cs->callee->name ()),
343 cs->callee->order);
344 ipa_print_node_jump_functions_for_edge (f, cs);
347 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
349 struct cgraph_indirect_call_info *ii;
350 if (!ipa_edge_args_info_available_for_edge_p (cs))
351 continue;
353 ii = cs->indirect_info;
354 if (ii->agg_contents)
355 fprintf (f, " indirect %s callsite, calling param %i, "
356 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
357 ii->member_ptr ? "member ptr" : "aggregate",
358 ii->param_index, ii->offset,
359 ii->by_ref ? "by reference" : "by_value");
360 else
361 fprintf (f, " indirect %s callsite, calling param %i, "
362 "offset " HOST_WIDE_INT_PRINT_DEC,
363 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
364 ii->offset);
366 if (cs->call_stmt)
368 fprintf (f, ", for stmt ");
369 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
371 else
372 fprintf (f, "\n");
373 ipa_print_node_jump_functions_for_edge (f, cs);
377 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
379 void
380 ipa_print_all_jump_functions (FILE *f)
382 struct cgraph_node *node;
384 fprintf (f, "\nJump functions:\n");
385 FOR_EACH_FUNCTION (node)
387 ipa_print_node_jump_functions (f, node);
391 /* Set JFUNC to be a known type jump function. */
393 static void
394 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
395 tree base_type, tree component_type)
397 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
398 && TYPE_BINFO (component_type));
399 if (!flag_devirtualize)
400 return;
401 gcc_assert (BINFO_VTABLE (TYPE_BINFO (component_type)));
402 jfunc->type = IPA_JF_KNOWN_TYPE;
403 jfunc->value.known_type.offset = offset,
404 jfunc->value.known_type.base_type = base_type;
405 jfunc->value.known_type.component_type = component_type;
406 gcc_assert (component_type);
409 /* Set JFUNC to be a copy of another jmp (to be used by jump function
410 combination code). The two functions will share their rdesc. */
412 static void
413 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
414 struct ipa_jump_func *src)
417 gcc_checking_assert (src->type == IPA_JF_CONST);
418 dst->type = IPA_JF_CONST;
419 dst->value.constant = src->value.constant;
422 /* Set JFUNC to be a constant jmp function. */
424 static void
425 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
426 struct cgraph_edge *cs)
428 constant = unshare_expr (constant);
429 if (constant && EXPR_P (constant))
430 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
431 jfunc->type = IPA_JF_CONST;
432 jfunc->value.constant.value = unshare_expr_without_location (constant);
434 if (TREE_CODE (constant) == ADDR_EXPR
435 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
437 struct ipa_cst_ref_desc *rdesc;
438 if (!ipa_refdesc_pool)
439 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
440 sizeof (struct ipa_cst_ref_desc), 32);
442 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
443 rdesc->cs = cs;
444 rdesc->next_duplicate = NULL;
445 rdesc->refcount = 1;
446 jfunc->value.constant.rdesc = rdesc;
448 else
449 jfunc->value.constant.rdesc = NULL;
452 /* Set JFUNC to be a simple pass-through jump function. */
453 static void
454 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
455 bool agg_preserved, bool type_preserved)
457 jfunc->type = IPA_JF_PASS_THROUGH;
458 jfunc->value.pass_through.operand = NULL_TREE;
459 jfunc->value.pass_through.formal_id = formal_id;
460 jfunc->value.pass_through.operation = NOP_EXPR;
461 jfunc->value.pass_through.agg_preserved = agg_preserved;
462 jfunc->value.pass_through.type_preserved = type_preserved;
465 /* Set JFUNC to be an arithmetic pass through jump function. */
467 static void
468 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
469 tree operand, enum tree_code operation)
471 jfunc->type = IPA_JF_PASS_THROUGH;
472 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
473 jfunc->value.pass_through.formal_id = formal_id;
474 jfunc->value.pass_through.operation = operation;
475 jfunc->value.pass_through.agg_preserved = false;
476 jfunc->value.pass_through.type_preserved = false;
479 /* Set JFUNC to be an ancestor jump function. */
481 static void
482 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
483 tree type, int formal_id, bool agg_preserved,
484 bool type_preserved)
486 if (!flag_devirtualize)
487 type_preserved = false;
488 gcc_assert (!type_preserved
489 || (TREE_CODE (type) == RECORD_TYPE
490 && TYPE_BINFO (type)
491 && BINFO_VTABLE (TYPE_BINFO (type))));
492 jfunc->type = IPA_JF_ANCESTOR;
493 jfunc->value.ancestor.formal_id = formal_id;
494 jfunc->value.ancestor.offset = offset;
495 jfunc->value.ancestor.type = type_preserved ? type : NULL;
496 jfunc->value.ancestor.agg_preserved = agg_preserved;
497 jfunc->value.ancestor.type_preserved = type_preserved;
500 /* Extract the acual BINFO being described by JFUNC which must be a known type
501 jump function. */
503 tree
504 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
506 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
507 if (!base_binfo)
508 return NULL_TREE;
509 return get_binfo_at_offset (base_binfo,
510 jfunc->value.known_type.offset,
511 jfunc->value.known_type.component_type);
514 /* Structure to be passed in between detect_type_change and
515 check_stmt_for_type_change. */
517 struct type_change_info
519 /* Offset into the object where there is the virtual method pointer we are
520 looking for. */
521 HOST_WIDE_INT offset;
522 /* The declaration or SSA_NAME pointer of the base that we are checking for
523 type change. */
524 tree object;
525 /* If we actually can tell the type that the object has changed to, it is
526 stored in this field. Otherwise it remains NULL_TREE. */
527 tree known_current_type;
528 /* Set to true if dynamic type change has been detected. */
529 bool type_maybe_changed;
530 /* Set to true if multiple types have been encountered. known_current_type
531 must be disregarded in that case. */
532 bool multiple_types_encountered;
535 /* Return true if STMT can modify a virtual method table pointer.
537 This function makes special assumptions about both constructors and
538 destructors which are all the functions that are allowed to alter the VMT
539 pointers. It assumes that destructors begin with assignment into all VMT
540 pointers and that constructors essentially look in the following way:
542 1) The very first thing they do is that they call constructors of ancestor
543 sub-objects that have them.
545 2) Then VMT pointers of this and all its ancestors is set to new values
546 corresponding to the type corresponding to the constructor.
548 3) Only afterwards, other stuff such as constructor of member sub-objects
549 and the code written by the user is run. Only this may include calling
550 virtual functions, directly or indirectly.
552 There is no way to call a constructor of an ancestor sub-object in any
553 other way.
555 This means that we do not have to care whether constructors get the correct
556 type information because they will always change it (in fact, if we define
557 the type to be given by the VMT pointer, it is undefined).
559 The most important fact to derive from the above is that if, for some
560 statement in the section 3, we try to detect whether the dynamic type has
561 changed, we can safely ignore all calls as we examine the function body
562 backwards until we reach statements in section 2 because these calls cannot
563 be ancestor constructors or destructors (if the input is not bogus) and so
564 do not change the dynamic type (this holds true only for automatically
565 allocated objects but at the moment we devirtualize only these). We then
566 must detect that statements in section 2 change the dynamic type and can try
567 to derive the new type. That is enough and we can stop, we will never see
568 the calls into constructors of sub-objects in this code. Therefore we can
569 safely ignore all call statements that we traverse.
572 static bool
573 stmt_may_be_vtbl_ptr_store (gimple stmt)
575 if (is_gimple_call (stmt))
576 return false;
577 /* TODO: Skip clobbers, doing so triggers problem in PR60306. */
578 else if (is_gimple_assign (stmt))
580 tree lhs = gimple_assign_lhs (stmt);
582 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
584 if (flag_strict_aliasing
585 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
586 return false;
588 if (TREE_CODE (lhs) == COMPONENT_REF
589 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
590 return false;
591 /* In the future we might want to use get_base_ref_and_offset to find
592 if there is a field corresponding to the offset and if so, proceed
593 almost like if it was a component ref. */
596 return true;
599 /* If STMT can be proved to be an assignment to the virtual method table
600 pointer of ANALYZED_OBJ and the type associated with the new table
601 identified, return the type. Otherwise return NULL_TREE. */
603 static tree
604 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
606 HOST_WIDE_INT offset, size, max_size;
607 tree lhs, rhs, base, binfo;
609 if (!gimple_assign_single_p (stmt))
610 return NULL_TREE;
612 lhs = gimple_assign_lhs (stmt);
613 rhs = gimple_assign_rhs1 (stmt);
614 if (TREE_CODE (lhs) != COMPONENT_REF
615 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
616 return NULL_TREE;
618 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
619 if (offset != tci->offset
620 || size != POINTER_SIZE
621 || max_size != POINTER_SIZE)
622 return NULL_TREE;
623 if (TREE_CODE (base) == MEM_REF)
625 if (TREE_CODE (tci->object) != MEM_REF
626 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
627 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
628 TREE_OPERAND (base, 1)))
629 return NULL_TREE;
631 else if (tci->object != base)
632 return NULL_TREE;
634 binfo = vtable_pointer_value_to_binfo (rhs);
636 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
637 base of outer type. In this case we would need to either
638 work on binfos or translate it back to outer type and offset.
639 KNOWN_TYPE jump functions are not ready for that, yet. */
640 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
641 return NULL;
643 return BINFO_TYPE (binfo);
646 /* Callback of walk_aliased_vdefs and a helper function for
647 detect_type_change to check whether a particular statement may modify
648 the virtual table pointer, and if possible also determine the new type of
649 the (sub-)object. It stores its result into DATA, which points to a
650 type_change_info structure. */
652 static bool
653 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
655 gimple stmt = SSA_NAME_DEF_STMT (vdef);
656 struct type_change_info *tci = (struct type_change_info *) data;
658 if (stmt_may_be_vtbl_ptr_store (stmt))
660 tree type;
661 type = extr_type_from_vtbl_ptr_store (stmt, tci);
662 if (tci->type_maybe_changed
663 && type != tci->known_current_type)
664 tci->multiple_types_encountered = true;
665 tci->known_current_type = type;
666 tci->type_maybe_changed = true;
667 return true;
669 else
670 return false;
675 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
676 callsite CALL) by looking for assignments to its virtual table pointer. If
677 it is, return true and fill in the jump function JFUNC with relevant type
678 information or set it to unknown. ARG is the object itself (not a pointer
679 to it, unless dereferenced). BASE is the base of the memory access as
680 returned by get_ref_base_and_extent, as is the offset. */
682 static bool
683 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
684 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
686 struct type_change_info tci;
687 ao_ref ao;
689 gcc_checking_assert (DECL_P (arg)
690 || TREE_CODE (arg) == MEM_REF
691 || handled_component_p (arg));
692 /* Const calls cannot call virtual methods through VMT and so type changes do
693 not matter. */
694 if (!flag_devirtualize || !gimple_vuse (call)
695 /* Be sure expected_type is polymorphic. */
696 || !comp_type
697 || TREE_CODE (comp_type) != RECORD_TYPE
698 || !TYPE_BINFO (comp_type)
699 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
700 return true;
702 /* C++ methods are not allowed to change THIS pointer unless they
703 are constructors or destructors. */
704 if (TREE_CODE (base) == MEM_REF
705 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
706 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
707 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0))) == PARM_DECL
708 && TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
709 && !DECL_CXX_CONSTRUCTOR_P (current_function_decl)
710 && !DECL_CXX_DESTRUCTOR_P (current_function_decl)
711 && (SSA_NAME_VAR (TREE_OPERAND (base, 0))
712 == DECL_ARGUMENTS (current_function_decl)))
713 return false;
715 ao_ref_init (&ao, arg);
716 ao.base = base;
717 ao.offset = offset;
718 ao.size = POINTER_SIZE;
719 ao.max_size = ao.size;
721 tci.offset = offset;
722 tci.object = get_base_address (arg);
723 tci.known_current_type = NULL_TREE;
724 tci.type_maybe_changed = false;
725 tci.multiple_types_encountered = false;
727 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
728 &tci, NULL);
729 if (!tci.type_maybe_changed)
730 return false;
732 if (!tci.known_current_type
733 || tci.multiple_types_encountered
734 || offset != 0)
735 jfunc->type = IPA_JF_UNKNOWN;
736 else
737 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
739 return true;
742 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
743 SSA name (its dereference will become the base and the offset is assumed to
744 be zero). */
746 static bool
747 detect_type_change_ssa (tree arg, tree comp_type,
748 gimple call, struct ipa_jump_func *jfunc)
750 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
751 if (!flag_devirtualize
752 || !POINTER_TYPE_P (TREE_TYPE (arg)))
753 return false;
755 arg = build2 (MEM_REF, ptr_type_node, arg,
756 build_int_cst (ptr_type_node, 0));
758 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
761 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
762 boolean variable pointed to by DATA. */
764 static bool
765 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
766 void *data)
768 bool *b = (bool *) data;
769 *b = true;
770 return true;
773 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
774 a value known not to be modified in this function before reaching the
775 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
776 information about the parameter. */
778 static bool
779 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
780 gimple stmt, tree parm_load)
782 bool modified = false;
783 bitmap *visited_stmts;
784 ao_ref refd;
786 if (parm_ainfo && parm_ainfo->parm_modified)
787 return false;
789 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
790 ao_ref_init (&refd, parm_load);
791 /* We can cache visited statements only when parm_ainfo is available and when
792 we are looking at a naked load of the whole parameter. */
793 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
794 visited_stmts = NULL;
795 else
796 visited_stmts = &parm_ainfo->parm_visited_statements;
797 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
798 visited_stmts);
799 if (parm_ainfo && modified)
800 parm_ainfo->parm_modified = true;
801 return !modified;
804 /* If STMT is an assignment that loads a value from an parameter declaration,
805 return the index of the parameter in ipa_node_params which has not been
806 modified. Otherwise return -1. */
808 static int
809 load_from_unmodified_param (vec<ipa_param_descriptor> descriptors,
810 struct param_analysis_info *parms_ainfo,
811 gimple stmt)
813 int index;
814 tree op1;
816 if (!gimple_assign_single_p (stmt))
817 return -1;
819 op1 = gimple_assign_rhs1 (stmt);
820 if (TREE_CODE (op1) != PARM_DECL)
821 return -1;
823 index = ipa_get_param_decl_index_1 (descriptors, op1);
824 if (index < 0
825 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
826 : NULL, stmt, op1))
827 return -1;
829 return index;
832 /* Return true if memory reference REF loads data that are known to be
833 unmodified in this function before reaching statement STMT. PARM_AINFO, if
834 non-NULL, is a pointer to a structure containing temporary information about
835 PARM. */
837 static bool
838 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
839 gimple stmt, tree ref)
841 bool modified = false;
842 ao_ref refd;
844 gcc_checking_assert (gimple_vuse (stmt));
845 if (parm_ainfo && parm_ainfo->ref_modified)
846 return false;
848 ao_ref_init (&refd, ref);
849 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
850 NULL);
851 if (parm_ainfo && modified)
852 parm_ainfo->ref_modified = true;
853 return !modified;
856 /* Return true if the data pointed to by PARM is known to be unmodified in this
857 function before reaching call statement CALL into which it is passed.
858 PARM_AINFO is a pointer to a structure containing temporary information
859 about PARM. */
861 static bool
862 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
863 gimple call, tree parm)
865 bool modified = false;
866 ao_ref refd;
868 /* It's unnecessary to calculate anything about memory contnets for a const
869 function because it is not goin to use it. But do not cache the result
870 either. Also, no such calculations for non-pointers. */
871 if (!gimple_vuse (call)
872 || !POINTER_TYPE_P (TREE_TYPE (parm)))
873 return false;
875 if (parm_ainfo->pt_modified)
876 return false;
878 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
879 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
880 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
881 if (modified)
882 parm_ainfo->pt_modified = true;
883 return !modified;
886 /* Return true if we can prove that OP is a memory reference loading unmodified
887 data from an aggregate passed as a parameter and if the aggregate is passed
888 by reference, that the alias type of the load corresponds to the type of the
889 formal parameter (so that we can rely on this type for TBAA in callers).
890 INFO and PARMS_AINFO describe parameters of the current function (but the
891 latter can be NULL), STMT is the load statement. If function returns true,
892 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
893 within the aggregate and whether it is a load from a value passed by
894 reference respectively. */
896 static bool
897 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor> descriptors,
898 struct param_analysis_info *parms_ainfo, gimple stmt,
899 tree op, int *index_p, HOST_WIDE_INT *offset_p,
900 HOST_WIDE_INT *size_p, bool *by_ref_p)
902 int index;
903 HOST_WIDE_INT size, max_size;
904 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
906 if (max_size == -1 || max_size != size || *offset_p < 0)
907 return false;
909 if (DECL_P (base))
911 int index = ipa_get_param_decl_index_1 (descriptors, base);
912 if (index >= 0
913 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
914 : NULL, stmt, op))
916 *index_p = index;
917 *by_ref_p = false;
918 if (size_p)
919 *size_p = size;
920 return true;
922 return false;
925 if (TREE_CODE (base) != MEM_REF
926 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
927 || !integer_zerop (TREE_OPERAND (base, 1)))
928 return false;
930 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
932 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
933 index = ipa_get_param_decl_index_1 (descriptors, parm);
935 else
937 /* This branch catches situations where a pointer parameter is not a
938 gimple register, for example:
940 void hip7(S*) (struct S * p)
942 void (*<T2e4>) (struct S *) D.1867;
943 struct S * p.1;
945 <bb 2>:
946 p.1_1 = p;
947 D.1867_2 = p.1_1->f;
948 D.1867_2 ();
949 gdp = &p;
952 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
953 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
956 if (index >= 0
957 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
958 stmt, op))
960 *index_p = index;
961 *by_ref_p = true;
962 if (size_p)
963 *size_p = size;
964 return true;
966 return false;
969 /* Just like the previous function, just without the param_analysis_info
970 pointer, for users outside of this file. */
972 bool
973 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
974 tree op, int *index_p, HOST_WIDE_INT *offset_p,
975 bool *by_ref_p)
977 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
978 offset_p, NULL, by_ref_p);
981 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
982 of an assignment statement STMT, try to determine whether we are actually
983 handling any of the following cases and construct an appropriate jump
984 function into JFUNC if so:
986 1) The passed value is loaded from a formal parameter which is not a gimple
987 register (most probably because it is addressable, the value has to be
988 scalar) and we can guarantee the value has not changed. This case can
989 therefore be described by a simple pass-through jump function. For example:
991 foo (int a)
993 int a.0;
995 a.0_2 = a;
996 bar (a.0_2);
998 2) The passed value can be described by a simple arithmetic pass-through
999 jump function. E.g.
1001 foo (int a)
1003 int D.2064;
1005 D.2064_4 = a.1(D) + 4;
1006 bar (D.2064_4);
1008 This case can also occur in combination of the previous one, e.g.:
1010 foo (int a, int z)
1012 int a.0;
1013 int D.2064;
1015 a.0_3 = a;
1016 D.2064_4 = a.0_3 + 4;
1017 foo (D.2064_4);
1019 3) The passed value is an address of an object within another one (which
1020 also passed by reference). Such situations are described by an ancestor
1021 jump function and describe situations such as:
1023 B::foo() (struct B * const this)
1025 struct A * D.1845;
1027 D.1845_2 = &this_1(D)->D.1748;
1028 A::bar (D.1845_2);
1030 INFO is the structure describing individual parameters access different
1031 stages of IPA optimizations. PARMS_AINFO contains the information that is
1032 only needed for intraprocedural analysis. */
1034 static void
1035 compute_complex_assign_jump_func (struct ipa_node_params *info,
1036 struct param_analysis_info *parms_ainfo,
1037 struct ipa_jump_func *jfunc,
1038 gimple call, gimple stmt, tree name,
1039 tree param_type)
1041 HOST_WIDE_INT offset, size, max_size;
1042 tree op1, tc_ssa, base, ssa;
1043 int index;
1045 op1 = gimple_assign_rhs1 (stmt);
1047 if (TREE_CODE (op1) == SSA_NAME)
1049 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1050 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1051 else
1052 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1053 SSA_NAME_DEF_STMT (op1));
1054 tc_ssa = op1;
1056 else
1058 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1059 tc_ssa = gimple_assign_lhs (stmt);
1062 if (index >= 0)
1064 tree op2 = gimple_assign_rhs2 (stmt);
1066 if (op2)
1068 if (!is_gimple_ip_invariant (op2)
1069 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1070 && !useless_type_conversion_p (TREE_TYPE (name),
1071 TREE_TYPE (op1))))
1072 return;
1074 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1075 gimple_assign_rhs_code (stmt));
1077 else if (gimple_assign_single_p (stmt))
1079 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1080 call, tc_ssa);
1081 bool type_p = false;
1083 if (param_type && POINTER_TYPE_P (param_type))
1084 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1085 call, jfunc);
1086 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1087 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1089 return;
1092 if (TREE_CODE (op1) != ADDR_EXPR)
1093 return;
1094 op1 = TREE_OPERAND (op1, 0);
1095 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1096 return;
1097 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1098 if (TREE_CODE (base) != MEM_REF
1099 /* If this is a varying address, punt. */
1100 || max_size == -1
1101 || max_size != size)
1102 return;
1103 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1104 ssa = TREE_OPERAND (base, 0);
1105 if (TREE_CODE (ssa) != SSA_NAME
1106 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1107 || offset < 0)
1108 return;
1110 /* Dynamic types are changed in constructors and destructors. */
1111 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1112 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1114 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1115 call, jfunc, offset);
1116 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1117 ipa_set_ancestor_jf (jfunc, offset,
1118 type_p ? TREE_TYPE (param_type) : NULL, index,
1119 parm_ref_data_pass_through_p (&parms_ainfo[index],
1120 call, ssa), type_p);
1124 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1125 it looks like:
1127 iftmp.1_3 = &obj_2(D)->D.1762;
1129 The base of the MEM_REF must be a default definition SSA NAME of a
1130 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1131 whole MEM_REF expression is returned and the offset calculated from any
1132 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1133 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1135 static tree
1136 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1138 HOST_WIDE_INT size, max_size;
1139 tree expr, parm, obj;
1141 if (!gimple_assign_single_p (assign))
1142 return NULL_TREE;
1143 expr = gimple_assign_rhs1 (assign);
1145 if (TREE_CODE (expr) != ADDR_EXPR)
1146 return NULL_TREE;
1147 expr = TREE_OPERAND (expr, 0);
1148 obj = expr;
1149 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1151 if (TREE_CODE (expr) != MEM_REF
1152 /* If this is a varying address, punt. */
1153 || max_size == -1
1154 || max_size != size
1155 || *offset < 0)
1156 return NULL_TREE;
1157 parm = TREE_OPERAND (expr, 0);
1158 if (TREE_CODE (parm) != SSA_NAME
1159 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1160 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1161 return NULL_TREE;
1163 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1164 *obj_p = obj;
1165 return expr;
1169 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1170 statement PHI, try to find out whether NAME is in fact a
1171 multiple-inheritance typecast from a descendant into an ancestor of a formal
1172 parameter and thus can be described by an ancestor jump function and if so,
1173 write the appropriate function into JFUNC.
1175 Essentially we want to match the following pattern:
1177 if (obj_2(D) != 0B)
1178 goto <bb 3>;
1179 else
1180 goto <bb 4>;
1182 <bb 3>:
1183 iftmp.1_3 = &obj_2(D)->D.1762;
1185 <bb 4>:
1186 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1187 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1188 return D.1879_6; */
1190 static void
1191 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1192 struct param_analysis_info *parms_ainfo,
1193 struct ipa_jump_func *jfunc,
1194 gimple call, gimple phi, tree param_type)
1196 HOST_WIDE_INT offset;
1197 gimple assign, cond;
1198 basic_block phi_bb, assign_bb, cond_bb;
1199 tree tmp, parm, expr, obj;
1200 int index, i;
1202 if (gimple_phi_num_args (phi) != 2)
1203 return;
1205 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1206 tmp = PHI_ARG_DEF (phi, 0);
1207 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1208 tmp = PHI_ARG_DEF (phi, 1);
1209 else
1210 return;
1211 if (TREE_CODE (tmp) != SSA_NAME
1212 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1213 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1214 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1215 return;
1217 assign = SSA_NAME_DEF_STMT (tmp);
1218 assign_bb = gimple_bb (assign);
1219 if (!single_pred_p (assign_bb))
1220 return;
1221 expr = get_ancestor_addr_info (assign, &obj, &offset);
1222 if (!expr)
1223 return;
1224 parm = TREE_OPERAND (expr, 0);
1225 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1226 if (index < 0)
1227 return;
1229 cond_bb = single_pred (assign_bb);
1230 cond = last_stmt (cond_bb);
1231 if (!cond
1232 || gimple_code (cond) != GIMPLE_COND
1233 || gimple_cond_code (cond) != NE_EXPR
1234 || gimple_cond_lhs (cond) != parm
1235 || !integer_zerop (gimple_cond_rhs (cond)))
1236 return;
1238 phi_bb = gimple_bb (phi);
1239 for (i = 0; i < 2; i++)
1241 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1242 if (pred != assign_bb && pred != cond_bb)
1243 return;
1246 bool type_p = false;
1247 if (param_type && POINTER_TYPE_P (param_type))
1248 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1249 call, jfunc, offset);
1250 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1251 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL, index,
1252 parm_ref_data_pass_through_p (&parms_ainfo[index],
1253 call, parm), type_p);
1256 /* Given OP which is passed as an actual argument to a called function,
1257 determine if it is possible to construct a KNOWN_TYPE jump function for it
1258 and if so, create one and store it to JFUNC.
1259 EXPECTED_TYPE represents a type the argument should be in */
1261 static void
1262 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1263 gimple call, tree expected_type)
1265 HOST_WIDE_INT offset, size, max_size;
1266 tree base;
1268 if (!flag_devirtualize
1269 || TREE_CODE (op) != ADDR_EXPR
1270 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1271 /* Be sure expected_type is polymorphic. */
1272 || !expected_type
1273 || TREE_CODE (expected_type) != RECORD_TYPE
1274 || !TYPE_BINFO (expected_type)
1275 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1276 return;
1278 op = TREE_OPERAND (op, 0);
1279 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1280 if (!DECL_P (base)
1281 || max_size == -1
1282 || max_size != size
1283 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1284 || is_global_var (base))
1285 return;
1287 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1288 return;
1290 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1291 expected_type);
1294 /* Inspect the given TYPE and return true iff it has the same structure (the
1295 same number of fields of the same types) as a C++ member pointer. If
1296 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1297 corresponding fields there. */
1299 static bool
1300 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1302 tree fld;
1304 if (TREE_CODE (type) != RECORD_TYPE)
1305 return false;
1307 fld = TYPE_FIELDS (type);
1308 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1309 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1310 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1311 return false;
1313 if (method_ptr)
1314 *method_ptr = fld;
1316 fld = DECL_CHAIN (fld);
1317 if (!fld || INTEGRAL_TYPE_P (fld)
1318 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1319 return false;
1320 if (delta)
1321 *delta = fld;
1323 if (DECL_CHAIN (fld))
1324 return false;
1326 return true;
1329 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1330 return the rhs of its defining statement. Otherwise return RHS as it
1331 is. */
1333 static inline tree
1334 get_ssa_def_if_simple_copy (tree rhs)
1336 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1338 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1340 if (gimple_assign_single_p (def_stmt))
1341 rhs = gimple_assign_rhs1 (def_stmt);
1342 else
1343 break;
1345 return rhs;
1348 /* Simple linked list, describing known contents of an aggregate beforere
1349 call. */
1351 struct ipa_known_agg_contents_list
1353 /* Offset and size of the described part of the aggregate. */
1354 HOST_WIDE_INT offset, size;
1355 /* Known constant value or NULL if the contents is known to be unknown. */
1356 tree constant;
1357 /* Pointer to the next structure in the list. */
1358 struct ipa_known_agg_contents_list *next;
1361 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1362 in ARG is filled in with constant values. ARG can either be an aggregate
1363 expression or a pointer to an aggregate. ARG_TYPE is the type of the aggregate.
1364 JFUNC is the jump function into which the constants are subsequently stored. */
1366 static void
1367 determine_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1368 struct ipa_jump_func *jfunc)
1370 struct ipa_known_agg_contents_list *list = NULL;
1371 int item_count = 0, const_count = 0;
1372 HOST_WIDE_INT arg_offset, arg_size;
1373 gimple_stmt_iterator gsi;
1374 tree arg_base;
1375 bool check_ref, by_ref;
1376 ao_ref r;
1378 /* The function operates in three stages. First, we prepare check_ref, r,
1379 arg_base and arg_offset based on what is actually passed as an actual
1380 argument. */
1382 if (POINTER_TYPE_P (arg_type))
1384 by_ref = true;
1385 if (TREE_CODE (arg) == SSA_NAME)
1387 tree type_size;
1388 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1389 return;
1390 check_ref = true;
1391 arg_base = arg;
1392 arg_offset = 0;
1393 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1394 arg_size = tree_to_uhwi (type_size);
1395 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1397 else if (TREE_CODE (arg) == ADDR_EXPR)
1399 HOST_WIDE_INT arg_max_size;
1401 arg = TREE_OPERAND (arg, 0);
1402 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1403 &arg_max_size);
1404 if (arg_max_size == -1
1405 || arg_max_size != arg_size
1406 || arg_offset < 0)
1407 return;
1408 if (DECL_P (arg_base))
1410 tree size;
1411 check_ref = false;
1412 size = build_int_cst (integer_type_node, arg_size);
1413 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1415 else
1416 return;
1418 else
1419 return;
1421 else
1423 HOST_WIDE_INT arg_max_size;
1425 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1427 by_ref = false;
1428 check_ref = false;
1429 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1430 &arg_max_size);
1431 if (arg_max_size == -1
1432 || arg_max_size != arg_size
1433 || arg_offset < 0)
1434 return;
1436 ao_ref_init (&r, arg);
1439 /* Second stage walks back the BB, looks at individual statements and as long
1440 as it is confident of how the statements affect contents of the
1441 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1442 describing it. */
1443 gsi = gsi_for_stmt (call);
1444 gsi_prev (&gsi);
1445 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1447 struct ipa_known_agg_contents_list *n, **p;
1448 gimple stmt = gsi_stmt (gsi);
1449 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1450 tree lhs, rhs, lhs_base;
1451 bool partial_overlap;
1453 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1454 continue;
1455 if (!gimple_assign_single_p (stmt))
1456 break;
1458 lhs = gimple_assign_lhs (stmt);
1459 rhs = gimple_assign_rhs1 (stmt);
1460 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1461 || TREE_CODE (lhs) == BIT_FIELD_REF
1462 || contains_bitfld_component_ref_p (lhs))
1463 break;
1465 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1466 &lhs_max_size);
1467 if (lhs_max_size == -1
1468 || lhs_max_size != lhs_size
1469 || (lhs_offset < arg_offset
1470 && lhs_offset + lhs_size > arg_offset)
1471 || (lhs_offset < arg_offset + arg_size
1472 && lhs_offset + lhs_size > arg_offset + arg_size))
1473 break;
1475 if (check_ref)
1477 if (TREE_CODE (lhs_base) != MEM_REF
1478 || TREE_OPERAND (lhs_base, 0) != arg_base
1479 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1480 break;
1482 else if (lhs_base != arg_base)
1484 if (DECL_P (lhs_base))
1485 continue;
1486 else
1487 break;
1490 if (lhs_offset + lhs_size < arg_offset
1491 || lhs_offset >= (arg_offset + arg_size))
1492 continue;
1494 partial_overlap = false;
1495 p = &list;
1496 while (*p && (*p)->offset < lhs_offset)
1498 if ((*p)->offset + (*p)->size > lhs_offset)
1500 partial_overlap = true;
1501 break;
1503 p = &(*p)->next;
1505 if (partial_overlap)
1506 break;
1507 if (*p && (*p)->offset < lhs_offset + lhs_size)
1509 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1510 /* We already know this value is subsequently overwritten with
1511 something else. */
1512 continue;
1513 else
1514 /* Otherwise this is a partial overlap which we cannot
1515 represent. */
1516 break;
1519 rhs = get_ssa_def_if_simple_copy (rhs);
1520 n = XALLOCA (struct ipa_known_agg_contents_list);
1521 n->size = lhs_size;
1522 n->offset = lhs_offset;
1523 if (is_gimple_ip_invariant (rhs))
1525 n->constant = rhs;
1526 const_count++;
1528 else
1529 n->constant = NULL_TREE;
1530 n->next = *p;
1531 *p = n;
1533 item_count++;
1534 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1535 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1536 break;
1539 /* Third stage just goes over the list and creates an appropriate vector of
1540 ipa_agg_jf_item structures out of it, of sourse only if there are
1541 any known constants to begin with. */
1543 if (const_count)
1545 jfunc->agg.by_ref = by_ref;
1546 vec_alloc (jfunc->agg.items, const_count);
1547 while (list)
1549 if (list->constant)
1551 struct ipa_agg_jf_item item;
1552 item.offset = list->offset - arg_offset;
1553 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1554 item.value = unshare_expr_without_location (list->constant);
1555 jfunc->agg.items->quick_push (item);
1557 list = list->next;
1562 static tree
1563 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1565 int n;
1566 tree type = (e->callee
1567 ? TREE_TYPE (e->callee->decl)
1568 : gimple_call_fntype (e->call_stmt));
1569 tree t = TYPE_ARG_TYPES (type);
1571 for (n = 0; n < i; n++)
1573 if (!t)
1574 break;
1575 t = TREE_CHAIN (t);
1577 if (t)
1578 return TREE_VALUE (t);
1579 if (!e->callee)
1580 return NULL;
1581 t = DECL_ARGUMENTS (e->callee->decl);
1582 for (n = 0; n < i; n++)
1584 if (!t)
1585 return NULL;
1586 t = TREE_CHAIN (t);
1588 if (t)
1589 return TREE_TYPE (t);
1590 return NULL;
1593 /* Compute jump function for all arguments of callsite CS and insert the
1594 information in the jump_functions array in the ipa_edge_args corresponding
1595 to this callsite. */
1597 static void
1598 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1599 struct cgraph_edge *cs)
1601 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1602 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1603 gimple call = cs->call_stmt;
1604 int n, arg_num = gimple_call_num_args (call);
1606 if (arg_num == 0 || args->jump_functions)
1607 return;
1608 vec_safe_grow_cleared (args->jump_functions, arg_num);
1610 if (gimple_call_internal_p (call))
1611 return;
1612 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1613 return;
1615 for (n = 0; n < arg_num; n++)
1617 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1618 tree arg = gimple_call_arg (call, n);
1619 tree param_type = ipa_get_callee_param_type (cs, n);
1621 if (is_gimple_ip_invariant (arg))
1623 if (L_IPO_COMP_MODE && TREE_CODE (arg) == ADDR_EXPR
1624 && TREE_CODE (TREE_OPERAND (arg, 0)) == FUNCTION_DECL)
1626 arg = TREE_OPERAND (arg, 0);
1627 arg = cgraph_lipo_get_resolved_node (arg)->decl;
1629 ipa_set_jf_constant (jfunc, arg, cs);
1631 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1632 && TREE_CODE (arg) == PARM_DECL)
1634 int index = ipa_get_param_decl_index (info, arg);
1636 gcc_assert (index >=0);
1637 /* Aggregate passed by value, check for pass-through, otherwise we
1638 will attempt to fill in aggregate contents later in this
1639 for cycle. */
1640 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1642 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1643 continue;
1646 else if (TREE_CODE (arg) == SSA_NAME)
1648 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1650 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1651 if (index >= 0)
1653 bool agg_p, type_p;
1654 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1655 call, arg);
1656 if (param_type && POINTER_TYPE_P (param_type))
1657 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1658 call, jfunc);
1659 else
1660 type_p = false;
1661 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1662 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1663 type_p);
1666 else
1668 gimple stmt = SSA_NAME_DEF_STMT (arg);
1669 if (is_gimple_assign (stmt))
1670 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1671 call, stmt, arg, param_type);
1672 else if (gimple_code (stmt) == GIMPLE_PHI)
1673 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1674 call, stmt, param_type);
1677 else
1678 compute_known_type_jump_func (arg, jfunc, call,
1679 param_type
1680 && POINTER_TYPE_P (param_type)
1681 ? TREE_TYPE (param_type)
1682 : NULL);
1684 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1685 passed (because type conversions are ignored in gimple). Usually we can
1686 safely get type from function declaration, but in case of K&R prototypes or
1687 variadic functions we can try our luck with type of the pointer passed.
1688 TODO: Since we look for actual initialization of the memory object, we may better
1689 work out the type based on the memory stores we find. */
1690 if (!param_type)
1691 param_type = TREE_TYPE (arg);
1693 if ((jfunc->type != IPA_JF_PASS_THROUGH
1694 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1695 && (jfunc->type != IPA_JF_ANCESTOR
1696 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1697 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1698 || POINTER_TYPE_P (param_type)))
1699 determine_known_aggregate_parts (call, arg, param_type, jfunc);
1703 /* Compute jump functions for all edges - both direct and indirect - outgoing
1704 from NODE. Also count the actual arguments in the process. */
1706 static void
1707 ipa_compute_jump_functions (struct cgraph_node *node,
1708 struct param_analysis_info *parms_ainfo)
1710 struct cgraph_edge *cs;
1712 for (cs = node->callees; cs; cs = cs->next_callee)
1714 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1715 NULL);
1716 /* We do not need to bother analyzing calls to unknown
1717 functions unless they may become known during lto/whopr. */
1718 if (!callee->definition && !flag_lto)
1719 continue;
1720 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1723 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1724 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1727 /* If STMT looks like a statement loading a value from a member pointer formal
1728 parameter, return that parameter and store the offset of the field to
1729 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1730 might be clobbered). If USE_DELTA, then we look for a use of the delta
1731 field rather than the pfn. */
1733 static tree
1734 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1735 HOST_WIDE_INT *offset_p)
1737 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1739 if (!gimple_assign_single_p (stmt))
1740 return NULL_TREE;
1742 rhs = gimple_assign_rhs1 (stmt);
1743 if (TREE_CODE (rhs) == COMPONENT_REF)
1745 ref_field = TREE_OPERAND (rhs, 1);
1746 rhs = TREE_OPERAND (rhs, 0);
1748 else
1749 ref_field = NULL_TREE;
1750 if (TREE_CODE (rhs) != MEM_REF)
1751 return NULL_TREE;
1752 rec = TREE_OPERAND (rhs, 0);
1753 if (TREE_CODE (rec) != ADDR_EXPR)
1754 return NULL_TREE;
1755 rec = TREE_OPERAND (rec, 0);
1756 if (TREE_CODE (rec) != PARM_DECL
1757 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1758 return NULL_TREE;
1759 ref_offset = TREE_OPERAND (rhs, 1);
1761 if (use_delta)
1762 fld = delta_field;
1763 else
1764 fld = ptr_field;
1765 if (offset_p)
1766 *offset_p = int_bit_position (fld);
1768 if (ref_field)
1770 if (integer_nonzerop (ref_offset))
1771 return NULL_TREE;
1772 return ref_field == fld ? rec : NULL_TREE;
1774 else
1775 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1776 : NULL_TREE;
1779 /* Returns true iff T is an SSA_NAME defined by a statement. */
1781 static bool
1782 ipa_is_ssa_with_stmt_def (tree t)
1784 if (TREE_CODE (t) == SSA_NAME
1785 && !SSA_NAME_IS_DEFAULT_DEF (t))
1786 return true;
1787 else
1788 return false;
1791 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1792 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1793 indirect call graph edge. */
1795 static struct cgraph_edge *
1796 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1798 struct cgraph_edge *cs;
1800 cs = cgraph_edge (node, stmt);
1801 cs->indirect_info->param_index = param_index;
1802 cs->indirect_info->agg_contents = 0;
1803 cs->indirect_info->member_ptr = 0;
1804 return cs;
1807 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1808 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1809 intermediate information about each formal parameter. Currently it checks
1810 whether the call calls a pointer that is a formal parameter and if so, the
1811 parameter is marked with the called flag and an indirect call graph edge
1812 describing the call is created. This is very simple for ordinary pointers
1813 represented in SSA but not-so-nice when it comes to member pointers. The
1814 ugly part of this function does nothing more than trying to match the
1815 pattern of such a call. An example of such a pattern is the gimple dump
1816 below, the call is on the last line:
1818 <bb 2>:
1819 f$__delta_5 = f.__delta;
1820 f$__pfn_24 = f.__pfn;
1823 <bb 2>:
1824 f$__delta_5 = MEM[(struct *)&f];
1825 f$__pfn_24 = MEM[(struct *)&f + 4B];
1827 and a few lines below:
1829 <bb 5>
1830 D.2496_3 = (int) f$__pfn_24;
1831 D.2497_4 = D.2496_3 & 1;
1832 if (D.2497_4 != 0)
1833 goto <bb 3>;
1834 else
1835 goto <bb 4>;
1837 <bb 6>:
1838 D.2500_7 = (unsigned int) f$__delta_5;
1839 D.2501_8 = &S + D.2500_7;
1840 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1841 D.2503_10 = *D.2502_9;
1842 D.2504_12 = f$__pfn_24 + -1;
1843 D.2505_13 = (unsigned int) D.2504_12;
1844 D.2506_14 = D.2503_10 + D.2505_13;
1845 D.2507_15 = *D.2506_14;
1846 iftmp.11_16 = (String:: *) D.2507_15;
1848 <bb 7>:
1849 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1850 D.2500_19 = (unsigned int) f$__delta_5;
1851 D.2508_20 = &S + D.2500_19;
1852 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1854 Such patterns are results of simple calls to a member pointer:
1856 int doprinting (int (MyString::* f)(int) const)
1858 MyString S ("somestring");
1860 return (S.*f)(4);
1863 Moreover, the function also looks for called pointers loaded from aggregates
1864 passed by value or reference. */
1866 static void
1867 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1868 struct ipa_node_params *info,
1869 struct param_analysis_info *parms_ainfo,
1870 gimple call, tree target)
1872 gimple def;
1873 tree n1, n2;
1874 gimple d1, d2;
1875 tree rec, rec2, cond;
1876 gimple branch;
1877 int index;
1878 basic_block bb, virt_bb, join;
1879 HOST_WIDE_INT offset;
1880 bool by_ref;
1882 if (SSA_NAME_IS_DEFAULT_DEF (target))
1884 tree var = SSA_NAME_VAR (target);
1885 index = ipa_get_param_decl_index (info, var);
1886 if (index >= 0)
1887 ipa_note_param_call (node, index, call);
1888 return;
1891 def = SSA_NAME_DEF_STMT (target);
1892 if (gimple_assign_single_p (def)
1893 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1894 gimple_assign_rhs1 (def), &index, &offset,
1895 NULL, &by_ref))
1897 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1898 if (cs->indirect_info->offset != offset)
1899 cs->indirect_info->outer_type = NULL;
1900 cs->indirect_info->offset = offset;
1901 cs->indirect_info->agg_contents = 1;
1902 cs->indirect_info->by_ref = by_ref;
1903 return;
1906 /* Now we need to try to match the complex pattern of calling a member
1907 pointer. */
1908 if (gimple_code (def) != GIMPLE_PHI
1909 || gimple_phi_num_args (def) != 2
1910 || !POINTER_TYPE_P (TREE_TYPE (target))
1911 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1912 return;
1914 /* First, we need to check whether one of these is a load from a member
1915 pointer that is a parameter to this function. */
1916 n1 = PHI_ARG_DEF (def, 0);
1917 n2 = PHI_ARG_DEF (def, 1);
1918 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1919 return;
1920 d1 = SSA_NAME_DEF_STMT (n1);
1921 d2 = SSA_NAME_DEF_STMT (n2);
1923 join = gimple_bb (def);
1924 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1926 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1927 return;
1929 bb = EDGE_PRED (join, 0)->src;
1930 virt_bb = gimple_bb (d2);
1932 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1934 bb = EDGE_PRED (join, 1)->src;
1935 virt_bb = gimple_bb (d1);
1937 else
1938 return;
1940 /* Second, we need to check that the basic blocks are laid out in the way
1941 corresponding to the pattern. */
1943 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1944 || single_pred (virt_bb) != bb
1945 || single_succ (virt_bb) != join)
1946 return;
1948 /* Third, let's see that the branching is done depending on the least
1949 significant bit of the pfn. */
1951 branch = last_stmt (bb);
1952 if (!branch || gimple_code (branch) != GIMPLE_COND)
1953 return;
1955 if ((gimple_cond_code (branch) != NE_EXPR
1956 && gimple_cond_code (branch) != EQ_EXPR)
1957 || !integer_zerop (gimple_cond_rhs (branch)))
1958 return;
1960 cond = gimple_cond_lhs (branch);
1961 if (!ipa_is_ssa_with_stmt_def (cond))
1962 return;
1964 def = SSA_NAME_DEF_STMT (cond);
1965 if (!is_gimple_assign (def)
1966 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1967 || !integer_onep (gimple_assign_rhs2 (def)))
1968 return;
1970 cond = gimple_assign_rhs1 (def);
1971 if (!ipa_is_ssa_with_stmt_def (cond))
1972 return;
1974 def = SSA_NAME_DEF_STMT (cond);
1976 if (is_gimple_assign (def)
1977 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1979 cond = gimple_assign_rhs1 (def);
1980 if (!ipa_is_ssa_with_stmt_def (cond))
1981 return;
1982 def = SSA_NAME_DEF_STMT (cond);
1985 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1986 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1987 == ptrmemfunc_vbit_in_delta),
1988 NULL);
1989 if (rec != rec2)
1990 return;
1992 index = ipa_get_param_decl_index (info, rec);
1993 if (index >= 0
1994 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1996 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1997 if (cs->indirect_info->offset != offset)
1998 cs->indirect_info->outer_type = NULL;
1999 cs->indirect_info->offset = offset;
2000 cs->indirect_info->agg_contents = 1;
2001 cs->indirect_info->member_ptr = 1;
2004 return;
2007 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2008 object referenced in the expression is a formal parameter of the caller
2009 (described by INFO), create a call note for the statement. */
2011 static void
2012 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
2013 struct ipa_node_params *info, gimple call,
2014 tree target)
2016 struct cgraph_edge *cs;
2017 struct cgraph_indirect_call_info *ii;
2018 struct ipa_jump_func jfunc;
2019 tree obj = OBJ_TYPE_REF_OBJECT (target);
2020 int index;
2021 HOST_WIDE_INT anc_offset;
2023 if (!flag_devirtualize)
2024 return;
2026 if (TREE_CODE (obj) != SSA_NAME)
2027 return;
2029 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2031 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2032 return;
2034 anc_offset = 0;
2035 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2036 gcc_assert (index >= 0);
2037 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2038 call, &jfunc))
2039 return;
2041 else
2043 gimple stmt = SSA_NAME_DEF_STMT (obj);
2044 tree expr;
2046 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2047 if (!expr)
2048 return;
2049 index = ipa_get_param_decl_index (info,
2050 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2051 gcc_assert (index >= 0);
2052 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2053 call, &jfunc, anc_offset))
2054 return;
2057 cs = ipa_note_param_call (node, index, call);
2058 ii = cs->indirect_info;
2059 ii->offset = anc_offset;
2060 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2061 ii->otr_type = obj_type_ref_class (target);
2062 ii->polymorphic = 1;
2065 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2066 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2067 containing intermediate information about each formal parameter. */
2069 static void
2070 ipa_analyze_call_uses (struct cgraph_node *node,
2071 struct ipa_node_params *info,
2072 struct param_analysis_info *parms_ainfo, gimple call)
2074 tree target = gimple_call_fn (call);
2075 struct cgraph_edge *cs;
2077 if (!target
2078 || (TREE_CODE (target) != SSA_NAME
2079 && !virtual_method_call_p (target)))
2080 return;
2082 /* If we previously turned the call into a direct call, there is
2083 no need to analyze. */
2084 cs = cgraph_edge (node, call);
2085 if (cs && !cs->indirect_unknown_callee)
2086 return;
2087 if (TREE_CODE (target) == SSA_NAME)
2088 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2089 else if (virtual_method_call_p (target))
2090 ipa_analyze_virtual_call_uses (node, info, call, target);
2094 /* Analyze the call statement STMT with respect to formal parameters (described
2095 in INFO) of caller given by NODE. Currently it only checks whether formal
2096 parameters are called. PARMS_AINFO is a pointer to a vector containing
2097 intermediate information about each formal parameter. */
2099 static void
2100 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2101 struct param_analysis_info *parms_ainfo, gimple stmt)
2103 if (is_gimple_call (stmt))
2104 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2107 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2108 If OP is a parameter declaration, mark it as used in the info structure
2109 passed in DATA. */
2111 static bool
2112 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2114 struct ipa_node_params *info = (struct ipa_node_params *) data;
2116 op = get_base_address (op);
2117 if (op
2118 && TREE_CODE (op) == PARM_DECL)
2120 int index = ipa_get_param_decl_index (info, op);
2121 gcc_assert (index >= 0);
2122 ipa_set_param_used (info, index, true);
2125 return false;
2128 /* Scan the function body of NODE and inspect the uses of formal parameters.
2129 Store the findings in various structures of the associated ipa_node_params
2130 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2131 vector containing intermediate information about each formal parameter. */
2133 static void
2134 ipa_analyze_params_uses (struct cgraph_node *node,
2135 struct param_analysis_info *parms_ainfo)
2137 tree decl = node->decl;
2138 basic_block bb;
2139 struct function *func;
2140 gimple_stmt_iterator gsi;
2141 struct ipa_node_params *info = IPA_NODE_REF (node);
2142 int i;
2144 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2145 return;
2147 info->uses_analysis_done = 1;
2148 if (ipa_func_spec_opts_forbid_analysis_p (node))
2150 for (i = 0; i < ipa_get_param_count (info); i++)
2152 ipa_set_param_used (info, i, true);
2153 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2155 return;
2158 for (i = 0; i < ipa_get_param_count (info); i++)
2160 tree parm = ipa_get_param (info, i);
2161 int controlled_uses = 0;
2163 /* For SSA regs see if parameter is used. For non-SSA we compute
2164 the flag during modification analysis. */
2165 if (is_gimple_reg (parm))
2167 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2168 parm);
2169 if (ddef && !has_zero_uses (ddef))
2171 imm_use_iterator imm_iter;
2172 use_operand_p use_p;
2174 ipa_set_param_used (info, i, true);
2175 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2176 if (!is_gimple_call (USE_STMT (use_p)))
2178 if (!is_gimple_debug (USE_STMT (use_p)))
2180 controlled_uses = IPA_UNDESCRIBED_USE;
2181 break;
2184 else
2185 controlled_uses++;
2187 else
2188 controlled_uses = 0;
2190 else
2191 controlled_uses = IPA_UNDESCRIBED_USE;
2192 ipa_set_controlled_uses (info, i, controlled_uses);
2195 func = DECL_STRUCT_FUNCTION (decl);
2196 FOR_EACH_BB_FN (bb, func)
2198 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2200 gimple stmt = gsi_stmt (gsi);
2202 if (is_gimple_debug (stmt))
2203 continue;
2205 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2206 walk_stmt_load_store_addr_ops (stmt, info,
2207 visit_ref_for_mod_analysis,
2208 visit_ref_for_mod_analysis,
2209 visit_ref_for_mod_analysis);
2211 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2212 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2213 visit_ref_for_mod_analysis,
2214 visit_ref_for_mod_analysis,
2215 visit_ref_for_mod_analysis);
2219 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2221 static void
2222 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2224 int i;
2226 for (i = 0; i < param_count; i++)
2228 if (parms_ainfo[i].parm_visited_statements)
2229 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2230 if (parms_ainfo[i].pt_visited_statements)
2231 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2235 /* Initialize the array describing properties of of formal parameters
2236 of NODE, analyze their uses and compute jump functions associated
2237 with actual arguments of calls from within NODE. */
2239 void
2240 ipa_analyze_node (struct cgraph_node *node)
2242 struct ipa_node_params *info;
2243 struct param_analysis_info *parms_ainfo;
2244 int param_count;
2246 ipa_check_create_node_params ();
2247 ipa_check_create_edge_args ();
2248 info = IPA_NODE_REF (node);
2249 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2250 ipa_initialize_node_params (node);
2252 param_count = ipa_get_param_count (info);
2253 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2254 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2256 ipa_analyze_params_uses (node, parms_ainfo);
2257 ipa_compute_jump_functions (node, parms_ainfo);
2259 free_parms_ainfo (parms_ainfo, param_count);
2260 pop_cfun ();
2263 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2264 attempt a type-based devirtualization. If successful, return the
2265 target function declaration, otherwise return NULL. */
2267 tree
2268 ipa_intraprocedural_devirtualization (gimple call)
2270 tree binfo, token, fndecl;
2271 struct ipa_jump_func jfunc;
2272 tree otr = gimple_call_fn (call);
2274 jfunc.type = IPA_JF_UNKNOWN;
2275 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2276 call, obj_type_ref_class (otr));
2277 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2278 return NULL_TREE;
2279 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2280 if (!binfo)
2281 return NULL_TREE;
2282 token = OBJ_TYPE_REF_TOKEN (otr);
2283 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2284 binfo);
2285 #ifdef ENABLE_CHECKING
2286 if (fndecl)
2287 gcc_assert (possible_polymorphic_call_target_p
2288 (otr, cgraph_get_node (fndecl)));
2289 #endif
2290 return fndecl;
2293 /* Update the jump function DST when the call graph edge corresponding to SRC is
2294 is being inlined, knowing that DST is of type ancestor and src of known
2295 type. */
2297 static void
2298 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2299 struct ipa_jump_func *dst)
2301 HOST_WIDE_INT combined_offset;
2302 tree combined_type;
2304 if (!ipa_get_jf_ancestor_type_preserved (dst))
2306 dst->type = IPA_JF_UNKNOWN;
2307 return;
2310 combined_offset = ipa_get_jf_known_type_offset (src)
2311 + ipa_get_jf_ancestor_offset (dst);
2312 combined_type = ipa_get_jf_ancestor_type (dst);
2314 ipa_set_jf_known_type (dst, combined_offset,
2315 ipa_get_jf_known_type_base_type (src),
2316 combined_type);
2319 /* Update the jump functions associated with call graph edge E when the call
2320 graph edge CS is being inlined, assuming that E->caller is already (possibly
2321 indirectly) inlined into CS->callee and that E has not been inlined. */
2323 static void
2324 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2325 struct cgraph_edge *e)
2327 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2328 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2329 int count = ipa_get_cs_argument_count (args);
2330 int i;
2332 for (i = 0; i < count; i++)
2334 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2336 if (dst->type == IPA_JF_ANCESTOR)
2338 struct ipa_jump_func *src;
2339 int dst_fid = dst->value.ancestor.formal_id;
2341 /* Variable number of arguments can cause havoc if we try to access
2342 one that does not exist in the inlined edge. So make sure we
2343 don't. */
2344 if (dst_fid >= ipa_get_cs_argument_count (top))
2346 dst->type = IPA_JF_UNKNOWN;
2347 continue;
2350 src = ipa_get_ith_jump_func (top, dst_fid);
2352 if (src->agg.items
2353 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2355 struct ipa_agg_jf_item *item;
2356 int j;
2358 /* Currently we do not produce clobber aggregate jump functions,
2359 replace with merging when we do. */
2360 gcc_assert (!dst->agg.items);
2362 dst->agg.items = vec_safe_copy (src->agg.items);
2363 dst->agg.by_ref = src->agg.by_ref;
2364 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2365 item->offset -= dst->value.ancestor.offset;
2368 if (src->type == IPA_JF_KNOWN_TYPE)
2369 combine_known_type_and_ancestor_jfs (src, dst);
2370 else if (src->type == IPA_JF_PASS_THROUGH
2371 && src->value.pass_through.operation == NOP_EXPR)
2373 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2374 dst->value.ancestor.agg_preserved &=
2375 src->value.pass_through.agg_preserved;
2376 dst->value.ancestor.type_preserved &=
2377 src->value.pass_through.type_preserved;
2379 else if (src->type == IPA_JF_ANCESTOR)
2381 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2382 dst->value.ancestor.offset += src->value.ancestor.offset;
2383 dst->value.ancestor.agg_preserved &=
2384 src->value.ancestor.agg_preserved;
2385 dst->value.ancestor.type_preserved &=
2386 src->value.ancestor.type_preserved;
2388 else
2389 dst->type = IPA_JF_UNKNOWN;
2391 else if (dst->type == IPA_JF_PASS_THROUGH)
2393 struct ipa_jump_func *src;
2394 /* We must check range due to calls with variable number of arguments
2395 and we cannot combine jump functions with operations. */
2396 if (dst->value.pass_through.operation == NOP_EXPR
2397 && (dst->value.pass_through.formal_id
2398 < ipa_get_cs_argument_count (top)))
2400 int dst_fid = dst->value.pass_through.formal_id;
2401 src = ipa_get_ith_jump_func (top, dst_fid);
2402 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2404 switch (src->type)
2406 case IPA_JF_UNKNOWN:
2407 dst->type = IPA_JF_UNKNOWN;
2408 break;
2409 case IPA_JF_KNOWN_TYPE:
2410 if (ipa_get_jf_pass_through_type_preserved (dst))
2411 ipa_set_jf_known_type (dst,
2412 ipa_get_jf_known_type_offset (src),
2413 ipa_get_jf_known_type_base_type (src),
2414 ipa_get_jf_known_type_component_type (src));
2415 else
2416 dst->type = IPA_JF_UNKNOWN;
2417 break;
2418 case IPA_JF_CONST:
2419 ipa_set_jf_cst_copy (dst, src);
2420 break;
2422 case IPA_JF_PASS_THROUGH:
2424 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2425 enum tree_code operation;
2426 operation = ipa_get_jf_pass_through_operation (src);
2428 if (operation == NOP_EXPR)
2430 bool agg_p, type_p;
2431 agg_p = dst_agg_p
2432 && ipa_get_jf_pass_through_agg_preserved (src);
2433 type_p = ipa_get_jf_pass_through_type_preserved (src)
2434 && ipa_get_jf_pass_through_type_preserved (dst);
2435 ipa_set_jf_simple_pass_through (dst, formal_id,
2436 agg_p, type_p);
2438 else
2440 tree operand = ipa_get_jf_pass_through_operand (src);
2441 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2442 operation);
2444 break;
2446 case IPA_JF_ANCESTOR:
2448 bool agg_p, type_p;
2449 agg_p = dst_agg_p
2450 && ipa_get_jf_ancestor_agg_preserved (src);
2451 type_p = ipa_get_jf_ancestor_type_preserved (src)
2452 && ipa_get_jf_pass_through_type_preserved (dst);
2453 ipa_set_ancestor_jf (dst,
2454 ipa_get_jf_ancestor_offset (src),
2455 ipa_get_jf_ancestor_type (src),
2456 ipa_get_jf_ancestor_formal_id (src),
2457 agg_p, type_p);
2458 break;
2460 default:
2461 gcc_unreachable ();
2464 if (src->agg.items
2465 && (dst_agg_p || !src->agg.by_ref))
2467 /* Currently we do not produce clobber aggregate jump
2468 functions, replace with merging when we do. */
2469 gcc_assert (!dst->agg.items);
2471 dst->agg.by_ref = src->agg.by_ref;
2472 dst->agg.items = vec_safe_copy (src->agg.items);
2475 else
2476 dst->type = IPA_JF_UNKNOWN;
2481 /* If TARGET is an addr_expr of a function declaration, make it the destination
2482 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2484 struct cgraph_edge *
2485 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2487 struct cgraph_node *callee;
2488 struct inline_edge_summary *es = inline_edge_summary (ie);
2489 bool unreachable = false;
2491 if (TREE_CODE (target) == ADDR_EXPR)
2492 target = TREE_OPERAND (target, 0);
2493 if (TREE_CODE (target) != FUNCTION_DECL)
2495 target = canonicalize_constructor_val (target, NULL);
2496 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2498 if (ie->indirect_info->member_ptr)
2499 /* Member pointer call that goes through a VMT lookup. */
2500 return NULL;
2502 if (dump_file)
2503 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2504 " in %s/%i, making it unreachable.\n",
2505 ie->caller->name (), ie->caller->order);
2506 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2507 callee = cgraph_get_create_node (target);
2508 unreachable = true;
2510 else
2511 callee = cgraph_get_node (target);
2513 else
2514 callee = cgraph_get_node (target);
2516 /* Because may-edges are not explicitely represented and vtable may be external,
2517 we may create the first reference to the object in the unit. */
2518 if (!callee || callee->global.inlined_to)
2521 /* We are better to ensure we can refer to it.
2522 In the case of static functions we are out of luck, since we already
2523 removed its body. In the case of public functions we may or may
2524 not introduce the reference. */
2525 if (!canonicalize_constructor_val (target, NULL)
2526 || !TREE_PUBLIC (target))
2528 if (dump_file)
2529 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2530 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2531 xstrdup (ie->caller->name ()),
2532 ie->caller->order,
2533 xstrdup (ie->callee->name ()),
2534 ie->callee->order);
2535 return NULL;
2537 callee = cgraph_get_create_node (target);
2539 ipa_check_create_node_params ();
2541 /* We can not make edges to inline clones. It is bug that someone removed
2542 the cgraph node too early. */
2543 gcc_assert (!callee->global.inlined_to);
2545 if (dump_file && !unreachable)
2547 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2548 "(%s/%i -> %s/%i), for stmt ",
2549 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2550 xstrdup (ie->caller->name ()),
2551 ie->caller->order,
2552 xstrdup (callee->name ()),
2553 callee->order);
2554 if (ie->call_stmt)
2555 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2556 else
2557 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2559 ie = cgraph_make_edge_direct (ie, callee);
2560 es = inline_edge_summary (ie);
2561 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2562 - eni_size_weights.call_cost);
2563 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2564 - eni_time_weights.call_cost);
2566 return ie;
2569 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2570 return NULL if there is not any. BY_REF specifies whether the value has to
2571 be passed by reference or by value. */
2573 tree
2574 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2575 HOST_WIDE_INT offset, bool by_ref)
2577 struct ipa_agg_jf_item *item;
2578 int i;
2580 if (by_ref != agg->by_ref)
2581 return NULL;
2583 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2584 if (item->offset == offset)
2586 /* Currently we do not have clobber values, return NULL for them once
2587 we do. */
2588 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2589 return item->value;
2591 return NULL;
2594 /* Remove a reference to SYMBOL from the list of references of a node given by
2595 reference description RDESC. Return true if the reference has been
2596 successfully found and removed. */
2598 static bool
2599 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2601 struct ipa_ref *to_del;
2602 struct cgraph_edge *origin;
2604 origin = rdesc->cs;
2605 if (!origin)
2606 return false;
2607 to_del = ipa_find_reference (origin->caller, symbol,
2608 origin->call_stmt, origin->lto_stmt_uid);
2609 if (!to_del)
2610 return false;
2612 ipa_remove_reference (to_del);
2613 if (dump_file)
2614 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2615 xstrdup (origin->caller->name ()),
2616 origin->caller->order, xstrdup (symbol->name ()));
2617 return true;
2620 /* If JFUNC has a reference description with refcount different from
2621 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2622 NULL. JFUNC must be a constant jump function. */
2624 static struct ipa_cst_ref_desc *
2625 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2627 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2628 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2629 return rdesc;
2630 else
2631 return NULL;
2634 /* If the value of constant jump function JFUNC is an address of a function
2635 declaration, return the associated call graph node. Otherwise return
2636 NULL. */
2638 static cgraph_node *
2639 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2641 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2642 tree cst = ipa_get_jf_constant (jfunc);
2643 if (TREE_CODE (cst) != ADDR_EXPR
2644 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2645 return NULL;
2647 return cgraph_get_node (TREE_OPERAND (cst, 0));
2651 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2652 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2653 the edge specified in the rdesc. Return false if either the symbol or the
2654 reference could not be found, otherwise return true. */
2656 static bool
2657 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2659 struct ipa_cst_ref_desc *rdesc;
2660 if (jfunc->type == IPA_JF_CONST
2661 && (rdesc = jfunc_rdesc_usable (jfunc))
2662 && --rdesc->refcount == 0)
2664 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2665 if (!symbol)
2666 return false;
2668 return remove_described_reference (symbol, rdesc);
2670 return true;
2673 /* Try to find a destination for indirect edge IE that corresponds to a simple
2674 call or a call of a member function pointer and where the destination is a
2675 pointer formal parameter described by jump function JFUNC. If it can be
2676 determined, return the newly direct edge, otherwise return NULL.
2677 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2679 static struct cgraph_edge *
2680 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2681 struct ipa_jump_func *jfunc,
2682 struct ipa_node_params *new_root_info)
2684 struct cgraph_edge *cs;
2685 tree target;
2686 bool agg_contents = ie->indirect_info->agg_contents;
2688 if (ie->indirect_info->agg_contents)
2689 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2690 ie->indirect_info->offset,
2691 ie->indirect_info->by_ref);
2692 else
2693 target = ipa_value_from_jfunc (new_root_info, jfunc);
2694 if (!target)
2695 return NULL;
2696 cs = ipa_make_edge_direct_to_target (ie, target);
2698 if (cs && !agg_contents)
2700 bool ok;
2701 gcc_checking_assert (cs->callee
2702 && (cs != ie
2703 || jfunc->type != IPA_JF_CONST
2704 || !cgraph_node_for_jfunc (jfunc)
2705 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2706 ok = try_decrement_rdesc_refcount (jfunc);
2707 gcc_checking_assert (ok);
2710 return cs;
2713 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2714 call based on a formal parameter which is described by jump function JFUNC
2715 and if it can be determined, make it direct and return the direct edge.
2716 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2717 are relative to. */
2719 static struct cgraph_edge *
2720 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2721 struct ipa_jump_func *jfunc,
2722 struct ipa_node_params *new_root_info)
2724 tree binfo, target;
2726 if (!flag_devirtualize)
2727 return NULL;
2729 /* First try to do lookup via known virtual table pointer value. */
2730 if (!ie->indirect_info->by_ref)
2732 tree vtable;
2733 unsigned HOST_WIDE_INT offset;
2734 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2735 ie->indirect_info->offset,
2736 true);
2737 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2739 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2740 vtable, offset);
2741 if (target)
2743 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2744 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2745 || !possible_polymorphic_call_target_p
2746 (ie, cgraph_get_node (target)))
2748 if (dump_file)
2749 fprintf (dump_file,
2750 "Type inconsident devirtualization: %s/%i->%s\n",
2751 ie->caller->name (), ie->caller->order,
2752 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2753 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2754 cgraph_get_create_node (target);
2756 return ipa_make_edge_direct_to_target (ie, target);
2761 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2763 if (!binfo)
2764 return NULL;
2766 if (TREE_CODE (binfo) != TREE_BINFO)
2768 ipa_polymorphic_call_context context;
2769 vec <cgraph_node *>targets;
2770 bool final;
2772 if (!get_polymorphic_call_info_from_invariant
2773 (&context, binfo, ie->indirect_info->otr_type,
2774 ie->indirect_info->offset))
2775 return NULL;
2776 targets = possible_polymorphic_call_targets
2777 (ie->indirect_info->otr_type,
2778 ie->indirect_info->otr_token,
2779 context, &final);
2780 if (!final || targets.length () > 1)
2781 return NULL;
2782 if (targets.length () == 1)
2783 target = targets[0]->decl;
2784 else
2786 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2787 cgraph_get_create_node (target);
2790 else
2792 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2793 ie->indirect_info->otr_type);
2794 if (binfo)
2795 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2796 binfo);
2797 else
2798 return NULL;
2801 if (target)
2803 #ifdef ENABLE_CHECKING
2804 gcc_assert (possible_polymorphic_call_target_p
2805 (ie, cgraph_get_node (target)));
2806 #endif
2807 return ipa_make_edge_direct_to_target (ie, target);
2809 else
2810 return NULL;
2813 /* Update the param called notes associated with NODE when CS is being inlined,
2814 assuming NODE is (potentially indirectly) inlined into CS->callee.
2815 Moreover, if the callee is discovered to be constant, create a new cgraph
2816 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2817 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2819 static bool
2820 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2821 struct cgraph_node *node,
2822 vec<cgraph_edge_p> *new_edges)
2824 struct ipa_edge_args *top;
2825 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2826 struct ipa_node_params *new_root_info;
2827 bool res = false;
2829 ipa_check_create_edge_args ();
2830 top = IPA_EDGE_REF (cs);
2831 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2832 ? cs->caller->global.inlined_to
2833 : cs->caller);
2835 for (ie = node->indirect_calls; ie; ie = next_ie)
2837 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2838 struct ipa_jump_func *jfunc;
2839 int param_index;
2841 next_ie = ie->next_callee;
2843 if (ici->param_index == -1)
2844 continue;
2846 /* We must check range due to calls with variable number of arguments: */
2847 if (ici->param_index >= ipa_get_cs_argument_count (top))
2849 ici->param_index = -1;
2850 continue;
2853 param_index = ici->param_index;
2854 jfunc = ipa_get_ith_jump_func (top, param_index);
2856 if (!flag_indirect_inlining)
2857 new_direct_edge = NULL;
2858 else if (ici->polymorphic)
2859 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2860 new_root_info);
2861 else
2862 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2863 new_root_info);
2864 /* If speculation was removed, then we need to do nothing. */
2865 if (new_direct_edge && new_direct_edge != ie)
2867 new_direct_edge->indirect_inlining_edge = 1;
2868 top = IPA_EDGE_REF (cs);
2869 res = true;
2871 else if (new_direct_edge)
2873 new_direct_edge->indirect_inlining_edge = 1;
2874 if (new_direct_edge->call_stmt)
2875 new_direct_edge->call_stmt_cannot_inline_p
2876 = !gimple_check_call_matching_types (
2877 new_direct_edge->call_stmt,
2878 new_direct_edge->callee->decl, false);
2879 if (new_edges)
2881 new_edges->safe_push (new_direct_edge);
2882 res = true;
2884 top = IPA_EDGE_REF (cs);
2886 else if (jfunc->type == IPA_JF_PASS_THROUGH
2887 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2889 if (ici->agg_contents
2890 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2891 ici->param_index = -1;
2892 else
2893 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2895 else if (jfunc->type == IPA_JF_ANCESTOR)
2897 if (ici->agg_contents
2898 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2899 ici->param_index = -1;
2900 else
2902 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2903 if (ipa_get_jf_ancestor_offset (jfunc))
2904 ici->outer_type = NULL;
2905 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2908 else
2909 /* Either we can find a destination for this edge now or never. */
2910 ici->param_index = -1;
2913 return res;
2916 /* Recursively traverse subtree of NODE (including node) made of inlined
2917 cgraph_edges when CS has been inlined and invoke
2918 update_indirect_edges_after_inlining on all nodes and
2919 update_jump_functions_after_inlining on all non-inlined edges that lead out
2920 of this subtree. Newly discovered indirect edges will be added to
2921 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2922 created. */
2924 static bool
2925 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2926 struct cgraph_node *node,
2927 vec<cgraph_edge_p> *new_edges)
2929 struct cgraph_edge *e;
2930 bool res;
2932 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2934 for (e = node->callees; e; e = e->next_callee)
2935 if (!e->inline_failed)
2936 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2937 else
2938 update_jump_functions_after_inlining (cs, e);
2939 for (e = node->indirect_calls; e; e = e->next_callee)
2940 update_jump_functions_after_inlining (cs, e);
2942 return res;
2945 /* Combine two controlled uses counts as done during inlining. */
2947 static int
2948 combine_controlled_uses_counters (int c, int d)
2950 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2951 return IPA_UNDESCRIBED_USE;
2952 else
2953 return c + d - 1;
2956 /* Propagate number of controlled users from CS->caleee to the new root of the
2957 tree of inlined nodes. */
2959 static void
2960 propagate_controlled_uses (struct cgraph_edge *cs)
2962 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2963 struct cgraph_node *new_root = cs->caller->global.inlined_to
2964 ? cs->caller->global.inlined_to : cs->caller;
2965 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2966 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2967 int count, i;
2969 count = MIN (ipa_get_cs_argument_count (args),
2970 ipa_get_param_count (old_root_info));
2971 for (i = 0; i < count; i++)
2973 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2974 struct ipa_cst_ref_desc *rdesc;
2976 if (jf->type == IPA_JF_PASS_THROUGH)
2978 int src_idx, c, d;
2979 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2980 c = ipa_get_controlled_uses (new_root_info, src_idx);
2981 d = ipa_get_controlled_uses (old_root_info, i);
2983 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2984 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2985 c = combine_controlled_uses_counters (c, d);
2986 ipa_set_controlled_uses (new_root_info, src_idx, c);
2987 if (c == 0 && new_root_info->ipcp_orig_node)
2989 struct cgraph_node *n;
2990 struct ipa_ref *ref;
2991 tree t = new_root_info->known_vals[src_idx];
2993 if (t && TREE_CODE (t) == ADDR_EXPR
2994 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2995 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2996 && (ref = ipa_find_reference (new_root,
2997 n, NULL, 0)))
2999 if (dump_file)
3000 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3001 "reference from %s/%i to %s/%i.\n",
3002 xstrdup (new_root->name ()),
3003 new_root->order,
3004 xstrdup (n->name ()), n->order);
3005 ipa_remove_reference (ref);
3009 else if (jf->type == IPA_JF_CONST
3010 && (rdesc = jfunc_rdesc_usable (jf)))
3012 int d = ipa_get_controlled_uses (old_root_info, i);
3013 int c = rdesc->refcount;
3014 rdesc->refcount = combine_controlled_uses_counters (c, d);
3015 if (rdesc->refcount == 0)
3017 tree cst = ipa_get_jf_constant (jf);
3018 struct cgraph_node *n;
3019 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3020 && TREE_CODE (TREE_OPERAND (cst, 0))
3021 == FUNCTION_DECL);
3022 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3023 if (n)
3025 struct cgraph_node *clone;
3026 bool ok;
3027 ok = remove_described_reference (n, rdesc);
3028 gcc_checking_assert (ok);
3030 clone = cs->caller;
3031 while (clone->global.inlined_to
3032 && clone != rdesc->cs->caller
3033 && IPA_NODE_REF (clone)->ipcp_orig_node)
3035 struct ipa_ref *ref;
3036 ref = ipa_find_reference (clone,
3037 n, NULL, 0);
3038 if (ref)
3040 if (dump_file)
3041 fprintf (dump_file, "ipa-prop: Removing "
3042 "cloning-created reference "
3043 "from %s/%i to %s/%i.\n",
3044 xstrdup (clone->name ()),
3045 clone->order,
3046 xstrdup (n->name ()),
3047 n->order);
3048 ipa_remove_reference (ref);
3050 clone = clone->callers->caller;
3057 for (i = ipa_get_param_count (old_root_info);
3058 i < ipa_get_cs_argument_count (args);
3059 i++)
3061 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3063 if (jf->type == IPA_JF_CONST)
3065 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3066 if (rdesc)
3067 rdesc->refcount = IPA_UNDESCRIBED_USE;
3069 else if (jf->type == IPA_JF_PASS_THROUGH)
3070 ipa_set_controlled_uses (new_root_info,
3071 jf->value.pass_through.formal_id,
3072 IPA_UNDESCRIBED_USE);
3076 /* Update jump functions and call note functions on inlining the call site CS.
3077 CS is expected to lead to a node already cloned by
3078 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3079 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3080 created. */
3082 bool
3083 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3084 vec<cgraph_edge_p> *new_edges)
3086 bool changed;
3087 /* Do nothing if the preparation phase has not been carried out yet
3088 (i.e. during early inlining). */
3089 if (!ipa_node_params_vector.exists ())
3090 return false;
3091 gcc_assert (ipa_edge_args_vector);
3093 propagate_controlled_uses (cs);
3094 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3096 return changed;
3099 /* Frees all dynamically allocated structures that the argument info points
3100 to. */
3102 void
3103 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3105 vec_free (args->jump_functions);
3106 memset (args, 0, sizeof (*args));
3109 /* Free all ipa_edge structures. */
3111 void
3112 ipa_free_all_edge_args (void)
3114 int i;
3115 struct ipa_edge_args *args;
3117 if (!ipa_edge_args_vector)
3118 return;
3120 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3121 ipa_free_edge_args_substructures (args);
3123 vec_free (ipa_edge_args_vector);
3126 /* Frees all dynamically allocated structures that the param info points
3127 to. */
3129 void
3130 ipa_free_node_params_substructures (struct ipa_node_params *info)
3132 info->descriptors.release ();
3133 free (info->lattices);
3134 /* Lattice values and their sources are deallocated with their alocation
3135 pool. */
3136 info->known_vals.release ();
3137 memset (info, 0, sizeof (*info));
3140 /* Free all ipa_node_params structures. */
3142 void
3143 ipa_free_all_node_params (void)
3145 int i;
3146 struct ipa_node_params *info;
3148 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3149 ipa_free_node_params_substructures (info);
3151 ipa_node_params_vector.release ();
3154 /* Set the aggregate replacements of NODE to be AGGVALS. */
3156 void
3157 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3158 struct ipa_agg_replacement_value *aggvals)
3160 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3161 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3163 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3166 /* Hook that is called by cgraph.c when an edge is removed. */
3168 static void
3169 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3171 struct ipa_edge_args *args;
3173 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3174 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3175 return;
3177 args = IPA_EDGE_REF (cs);
3178 if (args->jump_functions)
3180 struct ipa_jump_func *jf;
3181 int i;
3182 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3184 struct ipa_cst_ref_desc *rdesc;
3185 try_decrement_rdesc_refcount (jf);
3186 if (jf->type == IPA_JF_CONST
3187 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3188 && rdesc->cs == cs)
3189 rdesc->cs = NULL;
3193 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3196 /* Hook that is called by cgraph.c when a node is removed. */
3198 static void
3199 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3201 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3202 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3203 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3204 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3205 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3208 /* Hook that is called by cgraph.c when an edge is duplicated. */
3210 static void
3211 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3212 __attribute__((unused)) void *data)
3214 struct ipa_edge_args *old_args, *new_args;
3215 unsigned int i;
3217 ipa_check_create_edge_args ();
3219 old_args = IPA_EDGE_REF (src);
3220 new_args = IPA_EDGE_REF (dst);
3222 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3224 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3226 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3227 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3229 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3231 if (src_jf->type == IPA_JF_CONST)
3233 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3235 if (!src_rdesc)
3236 dst_jf->value.constant.rdesc = NULL;
3237 else if (src->caller == dst->caller)
3239 struct ipa_ref *ref;
3240 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3241 gcc_checking_assert (n);
3242 ref = ipa_find_reference (src->caller, n,
3243 src->call_stmt, src->lto_stmt_uid);
3244 gcc_checking_assert (ref);
3245 ipa_clone_ref (ref, dst->caller, ref->stmt);
3247 gcc_checking_assert (ipa_refdesc_pool);
3248 struct ipa_cst_ref_desc *dst_rdesc
3249 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3250 dst_rdesc->cs = dst;
3251 dst_rdesc->refcount = src_rdesc->refcount;
3252 dst_rdesc->next_duplicate = NULL;
3253 dst_jf->value.constant.rdesc = dst_rdesc;
3255 else if (src_rdesc->cs == src)
3257 struct ipa_cst_ref_desc *dst_rdesc;
3258 gcc_checking_assert (ipa_refdesc_pool);
3259 dst_rdesc
3260 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3261 dst_rdesc->cs = dst;
3262 dst_rdesc->refcount = src_rdesc->refcount;
3263 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3264 src_rdesc->next_duplicate = dst_rdesc;
3265 dst_jf->value.constant.rdesc = dst_rdesc;
3267 else
3269 struct ipa_cst_ref_desc *dst_rdesc;
3270 /* This can happen during inlining, when a JFUNC can refer to a
3271 reference taken in a function up in the tree of inline clones.
3272 We need to find the duplicate that refers to our tree of
3273 inline clones. */
3275 gcc_assert (dst->caller->global.inlined_to);
3276 for (dst_rdesc = src_rdesc->next_duplicate;
3277 dst_rdesc;
3278 dst_rdesc = dst_rdesc->next_duplicate)
3280 struct cgraph_node *top;
3281 top = dst_rdesc->cs->caller->global.inlined_to
3282 ? dst_rdesc->cs->caller->global.inlined_to
3283 : dst_rdesc->cs->caller;
3284 if (dst->caller->global.inlined_to == top)
3285 break;
3287 gcc_assert (dst_rdesc);
3288 dst_jf->value.constant.rdesc = dst_rdesc;
3294 /* Hook that is called by cgraph.c when a node is duplicated. */
3296 static void
3297 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3298 ATTRIBUTE_UNUSED void *data)
3300 struct ipa_node_params *old_info, *new_info;
3301 struct ipa_agg_replacement_value *old_av, *new_av;
3303 ipa_check_create_node_params ();
3304 old_info = IPA_NODE_REF (src);
3305 new_info = IPA_NODE_REF (dst);
3307 new_info->descriptors = old_info->descriptors.copy ();
3308 new_info->lattices = NULL;
3309 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3311 new_info->uses_analysis_done = old_info->uses_analysis_done;
3312 new_info->node_enqueued = old_info->node_enqueued;
3314 old_av = ipa_get_agg_replacements_for_node (src);
3315 if (!old_av)
3316 return;
3318 new_av = NULL;
3319 while (old_av)
3321 struct ipa_agg_replacement_value *v;
3323 v = ggc_alloc_ipa_agg_replacement_value ();
3324 memcpy (v, old_av, sizeof (*v));
3325 v->next = new_av;
3326 new_av = v;
3327 old_av = old_av->next;
3329 ipa_set_node_agg_value_chain (dst, new_av);
3333 /* Analyze newly added function into callgraph. */
3335 static void
3336 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3338 if (cgraph_function_with_gimple_body_p (node))
3339 ipa_analyze_node (node);
3342 /* Register our cgraph hooks if they are not already there. */
3344 void
3345 ipa_register_cgraph_hooks (void)
3347 if (!edge_removal_hook_holder)
3348 edge_removal_hook_holder =
3349 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3350 if (!node_removal_hook_holder)
3351 node_removal_hook_holder =
3352 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3353 if (!edge_duplication_hook_holder)
3354 edge_duplication_hook_holder =
3355 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3356 if (!node_duplication_hook_holder)
3357 node_duplication_hook_holder =
3358 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3359 function_insertion_hook_holder =
3360 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3363 /* Unregister our cgraph hooks if they are not already there. */
3365 static void
3366 ipa_unregister_cgraph_hooks (void)
3368 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3369 edge_removal_hook_holder = NULL;
3370 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3371 node_removal_hook_holder = NULL;
3372 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3373 edge_duplication_hook_holder = NULL;
3374 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3375 node_duplication_hook_holder = NULL;
3376 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3377 function_insertion_hook_holder = NULL;
3380 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3381 longer needed after ipa-cp. */
3383 void
3384 ipa_free_all_structures_after_ipa_cp (void)
3386 if (!optimize)
3388 ipa_free_all_edge_args ();
3389 ipa_free_all_node_params ();
3390 free_alloc_pool (ipcp_sources_pool);
3391 free_alloc_pool (ipcp_values_pool);
3392 free_alloc_pool (ipcp_agg_lattice_pool);
3393 ipa_unregister_cgraph_hooks ();
3394 if (ipa_refdesc_pool)
3395 free_alloc_pool (ipa_refdesc_pool);
3399 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3400 longer needed after indirect inlining. */
3402 void
3403 ipa_free_all_structures_after_iinln (void)
3405 ipa_free_all_edge_args ();
3406 ipa_free_all_node_params ();
3407 ipa_unregister_cgraph_hooks ();
3408 if (ipcp_sources_pool)
3409 free_alloc_pool (ipcp_sources_pool);
3410 if (ipcp_values_pool)
3411 free_alloc_pool (ipcp_values_pool);
3412 if (ipcp_agg_lattice_pool)
3413 free_alloc_pool (ipcp_agg_lattice_pool);
3414 if (ipa_refdesc_pool)
3415 free_alloc_pool (ipa_refdesc_pool);
3418 /* Print ipa_tree_map data structures of all functions in the
3419 callgraph to F. */
3421 void
3422 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3424 int i, count;
3425 struct ipa_node_params *info;
3427 if (!node->definition)
3428 return;
3429 info = IPA_NODE_REF (node);
3430 fprintf (f, " function %s/%i parameter descriptors:\n",
3431 node->name (), node->order);
3432 count = ipa_get_param_count (info);
3433 for (i = 0; i < count; i++)
3435 int c;
3437 fprintf (f, " ");
3438 ipa_dump_param (f, info, i);
3439 if (ipa_is_param_used (info, i))
3440 fprintf (f, " used");
3441 c = ipa_get_controlled_uses (info, i);
3442 if (c == IPA_UNDESCRIBED_USE)
3443 fprintf (f, " undescribed_use");
3444 else
3445 fprintf (f, " controlled_uses=%i", c);
3446 fprintf (f, "\n");
3450 /* Print ipa_tree_map data structures of all functions in the
3451 callgraph to F. */
3453 void
3454 ipa_print_all_params (FILE * f)
3456 struct cgraph_node *node;
3458 fprintf (f, "\nFunction parameters:\n");
3459 FOR_EACH_FUNCTION (node)
3460 ipa_print_node_params (f, node);
3463 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3465 vec<tree>
3466 ipa_get_vector_of_formal_parms (tree fndecl)
3468 vec<tree> args;
3469 int count;
3470 tree parm;
3472 gcc_assert (!flag_wpa);
3473 count = count_formal_params (fndecl);
3474 args.create (count);
3475 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3476 args.quick_push (parm);
3478 return args;
3481 /* Return a heap allocated vector containing types of formal parameters of
3482 function type FNTYPE. */
3484 vec<tree>
3485 ipa_get_vector_of_formal_parm_types (tree fntype)
3487 vec<tree> types;
3488 int count = 0;
3489 tree t;
3491 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3492 count++;
3494 types.create (count);
3495 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3496 types.quick_push (TREE_VALUE (t));
3498 return types;
3501 /* Modify the function declaration FNDECL and its type according to the plan in
3502 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3503 to reflect the actual parameters being modified which are determined by the
3504 base_index field. */
3506 void
3507 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3509 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3510 tree orig_type = TREE_TYPE (fndecl);
3511 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3513 /* The following test is an ugly hack, some functions simply don't have any
3514 arguments in their type. This is probably a bug but well... */
3515 bool care_for_types = (old_arg_types != NULL_TREE);
3516 bool last_parm_void;
3517 vec<tree> otypes;
3518 if (care_for_types)
3520 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3521 == void_type_node);
3522 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3523 if (last_parm_void)
3524 gcc_assert (oparms.length () + 1 == otypes.length ());
3525 else
3526 gcc_assert (oparms.length () == otypes.length ());
3528 else
3530 last_parm_void = false;
3531 otypes.create (0);
3534 int len = adjustments.length ();
3535 tree *link = &DECL_ARGUMENTS (fndecl);
3536 tree new_arg_types = NULL;
3537 for (int i = 0; i < len; i++)
3539 struct ipa_parm_adjustment *adj;
3540 gcc_assert (link);
3542 adj = &adjustments[i];
3543 tree parm;
3544 if (adj->op == IPA_PARM_OP_NEW)
3545 parm = NULL;
3546 else
3547 parm = oparms[adj->base_index];
3548 adj->base = parm;
3550 if (adj->op == IPA_PARM_OP_COPY)
3552 if (care_for_types)
3553 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3554 new_arg_types);
3555 *link = parm;
3556 link = &DECL_CHAIN (parm);
3558 else if (adj->op != IPA_PARM_OP_REMOVE)
3560 tree new_parm;
3561 tree ptype;
3563 if (adj->by_ref)
3564 ptype = build_pointer_type (adj->type);
3565 else
3567 ptype = adj->type;
3568 if (is_gimple_reg_type (ptype))
3570 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3571 if (TYPE_ALIGN (ptype) < malign)
3572 ptype = build_aligned_type (ptype, malign);
3576 if (care_for_types)
3577 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3579 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3580 ptype);
3581 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3582 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3583 DECL_ARTIFICIAL (new_parm) = 1;
3584 DECL_ARG_TYPE (new_parm) = ptype;
3585 DECL_CONTEXT (new_parm) = fndecl;
3586 TREE_USED (new_parm) = 1;
3587 DECL_IGNORED_P (new_parm) = 1;
3588 layout_decl (new_parm, 0);
3590 if (adj->op == IPA_PARM_OP_NEW)
3591 adj->base = NULL;
3592 else
3593 adj->base = parm;
3594 adj->new_decl = new_parm;
3596 *link = new_parm;
3597 link = &DECL_CHAIN (new_parm);
3601 *link = NULL_TREE;
3603 tree new_reversed = NULL;
3604 if (care_for_types)
3606 new_reversed = nreverse (new_arg_types);
3607 if (last_parm_void)
3609 if (new_reversed)
3610 TREE_CHAIN (new_arg_types) = void_list_node;
3611 else
3612 new_reversed = void_list_node;
3616 /* Use copy_node to preserve as much as possible from original type
3617 (debug info, attribute lists etc.)
3618 Exception is METHOD_TYPEs must have THIS argument.
3619 When we are asked to remove it, we need to build new FUNCTION_TYPE
3620 instead. */
3621 tree new_type = NULL;
3622 if (TREE_CODE (orig_type) != METHOD_TYPE
3623 || (adjustments[0].op == IPA_PARM_OP_COPY
3624 && adjustments[0].base_index == 0))
3626 new_type = build_distinct_type_copy (orig_type);
3627 TYPE_ARG_TYPES (new_type) = new_reversed;
3629 else
3631 new_type
3632 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3633 new_reversed));
3634 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3635 DECL_VINDEX (fndecl) = NULL_TREE;
3638 /* When signature changes, we need to clear builtin info. */
3639 if (DECL_BUILT_IN (fndecl))
3641 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3642 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3645 /* This is a new type, not a copy of an old type. Need to reassociate
3646 variants. We can handle everything except the main variant lazily. */
3647 tree t = TYPE_MAIN_VARIANT (orig_type);
3648 if (orig_type != t)
3650 TYPE_MAIN_VARIANT (new_type) = t;
3651 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3652 TYPE_NEXT_VARIANT (t) = new_type;
3654 else
3656 TYPE_MAIN_VARIANT (new_type) = new_type;
3657 TYPE_NEXT_VARIANT (new_type) = NULL;
3660 TREE_TYPE (fndecl) = new_type;
3661 DECL_VIRTUAL_P (fndecl) = 0;
3662 otypes.release ();
3663 oparms.release ();
3666 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3667 If this is a directly recursive call, CS must be NULL. Otherwise it must
3668 contain the corresponding call graph edge. */
3670 void
3671 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3672 ipa_parm_adjustment_vec adjustments)
3674 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3675 vec<tree> vargs;
3676 vec<tree, va_gc> **debug_args = NULL;
3677 gimple new_stmt;
3678 gimple_stmt_iterator gsi, prev_gsi;
3679 tree callee_decl;
3680 int i, len;
3682 len = adjustments.length ();
3683 vargs.create (len);
3684 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3685 ipa_remove_stmt_references (current_node, stmt);
3687 gsi = gsi_for_stmt (stmt);
3688 prev_gsi = gsi;
3689 gsi_prev (&prev_gsi);
3690 for (i = 0; i < len; i++)
3692 struct ipa_parm_adjustment *adj;
3694 adj = &adjustments[i];
3696 if (adj->op == IPA_PARM_OP_COPY)
3698 tree arg = gimple_call_arg (stmt, adj->base_index);
3700 vargs.quick_push (arg);
3702 else if (adj->op != IPA_PARM_OP_REMOVE)
3704 tree expr, base, off;
3705 location_t loc;
3706 unsigned int deref_align = 0;
3707 bool deref_base = false;
3709 /* We create a new parameter out of the value of the old one, we can
3710 do the following kind of transformations:
3712 - A scalar passed by reference is converted to a scalar passed by
3713 value. (adj->by_ref is false and the type of the original
3714 actual argument is a pointer to a scalar).
3716 - A part of an aggregate is passed instead of the whole aggregate.
3717 The part can be passed either by value or by reference, this is
3718 determined by value of adj->by_ref. Moreover, the code below
3719 handles both situations when the original aggregate is passed by
3720 value (its type is not a pointer) and when it is passed by
3721 reference (it is a pointer to an aggregate).
3723 When the new argument is passed by reference (adj->by_ref is true)
3724 it must be a part of an aggregate and therefore we form it by
3725 simply taking the address of a reference inside the original
3726 aggregate. */
3728 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3729 base = gimple_call_arg (stmt, adj->base_index);
3730 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3731 : EXPR_LOCATION (base);
3733 if (TREE_CODE (base) != ADDR_EXPR
3734 && POINTER_TYPE_P (TREE_TYPE (base)))
3735 off = build_int_cst (adj->alias_ptr_type,
3736 adj->offset / BITS_PER_UNIT);
3737 else
3739 HOST_WIDE_INT base_offset;
3740 tree prev_base;
3741 bool addrof;
3743 if (TREE_CODE (base) == ADDR_EXPR)
3745 base = TREE_OPERAND (base, 0);
3746 addrof = true;
3748 else
3749 addrof = false;
3750 prev_base = base;
3751 base = get_addr_base_and_unit_offset (base, &base_offset);
3752 /* Aggregate arguments can have non-invariant addresses. */
3753 if (!base)
3755 base = build_fold_addr_expr (prev_base);
3756 off = build_int_cst (adj->alias_ptr_type,
3757 adj->offset / BITS_PER_UNIT);
3759 else if (TREE_CODE (base) == MEM_REF)
3761 if (!addrof)
3763 deref_base = true;
3764 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3766 off = build_int_cst (adj->alias_ptr_type,
3767 base_offset
3768 + adj->offset / BITS_PER_UNIT);
3769 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3770 off);
3771 base = TREE_OPERAND (base, 0);
3773 else
3775 off = build_int_cst (adj->alias_ptr_type,
3776 base_offset
3777 + adj->offset / BITS_PER_UNIT);
3778 base = build_fold_addr_expr (base);
3782 if (!adj->by_ref)
3784 tree type = adj->type;
3785 unsigned int align;
3786 unsigned HOST_WIDE_INT misalign;
3788 if (deref_base)
3790 align = deref_align;
3791 misalign = 0;
3793 else
3795 get_pointer_alignment_1 (base, &align, &misalign);
3796 if (TYPE_ALIGN (type) > align)
3797 align = TYPE_ALIGN (type);
3799 misalign += (tree_to_double_int (off)
3800 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3801 * BITS_PER_UNIT);
3802 misalign = misalign & (align - 1);
3803 if (misalign != 0)
3804 align = (misalign & -misalign);
3805 if (align < TYPE_ALIGN (type))
3806 type = build_aligned_type (type, align);
3807 base = force_gimple_operand_gsi (&gsi, base,
3808 true, NULL, true, GSI_SAME_STMT);
3809 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3810 /* If expr is not a valid gimple call argument emit
3811 a load into a temporary. */
3812 if (is_gimple_reg_type (TREE_TYPE (expr)))
3814 gimple tem = gimple_build_assign (NULL_TREE, expr);
3815 if (gimple_in_ssa_p (cfun))
3817 gimple_set_vuse (tem, gimple_vuse (stmt));
3818 expr = make_ssa_name (TREE_TYPE (expr), tem);
3820 else
3821 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
3822 gimple_assign_set_lhs (tem, expr);
3823 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
3826 else
3828 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3829 expr = build_fold_addr_expr (expr);
3830 expr = force_gimple_operand_gsi (&gsi, expr,
3831 true, NULL, true, GSI_SAME_STMT);
3833 vargs.quick_push (expr);
3835 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
3837 unsigned int ix;
3838 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3839 gimple def_temp;
3841 arg = gimple_call_arg (stmt, adj->base_index);
3842 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3844 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3845 continue;
3846 arg = fold_convert_loc (gimple_location (stmt),
3847 TREE_TYPE (origin), arg);
3849 if (debug_args == NULL)
3850 debug_args = decl_debug_args_insert (callee_decl);
3851 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3852 if (ddecl == origin)
3854 ddecl = (**debug_args)[ix + 1];
3855 break;
3857 if (ddecl == NULL)
3859 ddecl = make_node (DEBUG_EXPR_DECL);
3860 DECL_ARTIFICIAL (ddecl) = 1;
3861 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3862 DECL_MODE (ddecl) = DECL_MODE (origin);
3864 vec_safe_push (*debug_args, origin);
3865 vec_safe_push (*debug_args, ddecl);
3867 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3868 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3872 if (dump_file && (dump_flags & TDF_DETAILS))
3874 fprintf (dump_file, "replacing stmt:");
3875 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3878 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3879 vargs.release ();
3880 if (gimple_call_lhs (stmt))
3881 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3883 gimple_set_block (new_stmt, gimple_block (stmt));
3884 if (gimple_has_location (stmt))
3885 gimple_set_location (new_stmt, gimple_location (stmt));
3886 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3887 gimple_call_copy_flags (new_stmt, stmt);
3888 if (gimple_in_ssa_p (cfun))
3890 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3891 if (gimple_vdef (stmt))
3893 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3894 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
3898 if (dump_file && (dump_flags & TDF_DETAILS))
3900 fprintf (dump_file, "with stmt:");
3901 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3902 fprintf (dump_file, "\n");
3904 gsi_replace (&gsi, new_stmt, true);
3905 if (cs)
3906 cgraph_set_call_stmt (cs, new_stmt);
3909 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3910 gsi_prev (&gsi);
3912 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
3915 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3916 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3917 specifies whether the function should care about type incompatibility the
3918 current and new expressions. If it is false, the function will leave
3919 incompatibility issues to the caller. Return true iff the expression
3920 was modified. */
3922 bool
3923 ipa_modify_expr (tree *expr, bool convert,
3924 ipa_parm_adjustment_vec adjustments)
3926 struct ipa_parm_adjustment *cand
3927 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
3928 if (!cand)
3929 return false;
3931 tree src;
3932 if (cand->by_ref)
3933 src = build_simple_mem_ref (cand->new_decl);
3934 else
3935 src = cand->new_decl;
3937 if (dump_file && (dump_flags & TDF_DETAILS))
3939 fprintf (dump_file, "About to replace expr ");
3940 print_generic_expr (dump_file, *expr, 0);
3941 fprintf (dump_file, " with ");
3942 print_generic_expr (dump_file, src, 0);
3943 fprintf (dump_file, "\n");
3946 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3948 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3949 *expr = vce;
3951 else
3952 *expr = src;
3953 return true;
3956 /* If T is an SSA_NAME, return NULL if it is not a default def or
3957 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3958 the base variable is always returned, regardless if it is a default
3959 def. Return T if it is not an SSA_NAME. */
3961 static tree
3962 get_ssa_base_param (tree t, bool ignore_default_def)
3964 if (TREE_CODE (t) == SSA_NAME)
3966 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
3967 return SSA_NAME_VAR (t);
3968 else
3969 return NULL_TREE;
3971 return t;
3974 /* Given an expression, return an adjustment entry specifying the
3975 transformation to be done on EXPR. If no suitable adjustment entry
3976 was found, returns NULL.
3978 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3979 default def, otherwise bail on them.
3981 If CONVERT is non-NULL, this function will set *CONVERT if the
3982 expression provided is a component reference. ADJUSTMENTS is the
3983 adjustments vector. */
3985 ipa_parm_adjustment *
3986 ipa_get_adjustment_candidate (tree **expr, bool *convert,
3987 ipa_parm_adjustment_vec adjustments,
3988 bool ignore_default_def)
3990 if (TREE_CODE (**expr) == BIT_FIELD_REF
3991 || TREE_CODE (**expr) == IMAGPART_EXPR
3992 || TREE_CODE (**expr) == REALPART_EXPR)
3994 *expr = &TREE_OPERAND (**expr, 0);
3995 if (convert)
3996 *convert = true;
3999 HOST_WIDE_INT offset, size, max_size;
4000 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4001 if (!base || size == -1 || max_size == -1)
4002 return NULL;
4004 if (TREE_CODE (base) == MEM_REF)
4006 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4007 base = TREE_OPERAND (base, 0);
4010 base = get_ssa_base_param (base, ignore_default_def);
4011 if (!base || TREE_CODE (base) != PARM_DECL)
4012 return NULL;
4014 struct ipa_parm_adjustment *cand = NULL;
4015 unsigned int len = adjustments.length ();
4016 for (unsigned i = 0; i < len; i++)
4018 struct ipa_parm_adjustment *adj = &adjustments[i];
4020 if (adj->base == base
4021 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4023 cand = adj;
4024 break;
4028 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4029 return NULL;
4030 return cand;
4033 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4035 static bool
4036 index_in_adjustments_multiple_times_p (int base_index,
4037 ipa_parm_adjustment_vec adjustments)
4039 int i, len = adjustments.length ();
4040 bool one = false;
4042 for (i = 0; i < len; i++)
4044 struct ipa_parm_adjustment *adj;
4045 adj = &adjustments[i];
4047 if (adj->base_index == base_index)
4049 if (one)
4050 return true;
4051 else
4052 one = true;
4055 return false;
4059 /* Return adjustments that should have the same effect on function parameters
4060 and call arguments as if they were first changed according to adjustments in
4061 INNER and then by adjustments in OUTER. */
4063 ipa_parm_adjustment_vec
4064 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4065 ipa_parm_adjustment_vec outer)
4067 int i, outlen = outer.length ();
4068 int inlen = inner.length ();
4069 int removals = 0;
4070 ipa_parm_adjustment_vec adjustments, tmp;
4072 tmp.create (inlen);
4073 for (i = 0; i < inlen; i++)
4075 struct ipa_parm_adjustment *n;
4076 n = &inner[i];
4078 if (n->op == IPA_PARM_OP_REMOVE)
4079 removals++;
4080 else
4082 /* FIXME: Handling of new arguments are not implemented yet. */
4083 gcc_assert (n->op != IPA_PARM_OP_NEW);
4084 tmp.quick_push (*n);
4088 adjustments.create (outlen + removals);
4089 for (i = 0; i < outlen; i++)
4091 struct ipa_parm_adjustment r;
4092 struct ipa_parm_adjustment *out = &outer[i];
4093 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4095 memset (&r, 0, sizeof (r));
4096 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4097 if (out->op == IPA_PARM_OP_REMOVE)
4099 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4101 r.op = IPA_PARM_OP_REMOVE;
4102 adjustments.quick_push (r);
4104 continue;
4106 else
4108 /* FIXME: Handling of new arguments are not implemented yet. */
4109 gcc_assert (out->op != IPA_PARM_OP_NEW);
4112 r.base_index = in->base_index;
4113 r.type = out->type;
4115 /* FIXME: Create nonlocal value too. */
4117 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4118 r.op = IPA_PARM_OP_COPY;
4119 else if (in->op == IPA_PARM_OP_COPY)
4120 r.offset = out->offset;
4121 else if (out->op == IPA_PARM_OP_COPY)
4122 r.offset = in->offset;
4123 else
4124 r.offset = in->offset + out->offset;
4125 adjustments.quick_push (r);
4128 for (i = 0; i < inlen; i++)
4130 struct ipa_parm_adjustment *n = &inner[i];
4132 if (n->op == IPA_PARM_OP_REMOVE)
4133 adjustments.quick_push (*n);
4136 tmp.release ();
4137 return adjustments;
4140 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4141 friendly way, assuming they are meant to be applied to FNDECL. */
4143 void
4144 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4145 tree fndecl)
4147 int i, len = adjustments.length ();
4148 bool first = true;
4149 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4151 fprintf (file, "IPA param adjustments: ");
4152 for (i = 0; i < len; i++)
4154 struct ipa_parm_adjustment *adj;
4155 adj = &adjustments[i];
4157 if (!first)
4158 fprintf (file, " ");
4159 else
4160 first = false;
4162 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4163 print_generic_expr (file, parms[adj->base_index], 0);
4164 if (adj->base)
4166 fprintf (file, ", base: ");
4167 print_generic_expr (file, adj->base, 0);
4169 if (adj->new_decl)
4171 fprintf (file, ", new_decl: ");
4172 print_generic_expr (file, adj->new_decl, 0);
4174 if (adj->new_ssa_base)
4176 fprintf (file, ", new_ssa_base: ");
4177 print_generic_expr (file, adj->new_ssa_base, 0);
4180 if (adj->op == IPA_PARM_OP_COPY)
4181 fprintf (file, ", copy_param");
4182 else if (adj->op == IPA_PARM_OP_REMOVE)
4183 fprintf (file, ", remove_param");
4184 else
4185 fprintf (file, ", offset %li", (long) adj->offset);
4186 if (adj->by_ref)
4187 fprintf (file, ", by_ref");
4188 print_node_brief (file, ", type: ", adj->type, 0);
4189 fprintf (file, "\n");
4191 parms.release ();
4194 /* Dump the AV linked list. */
4196 void
4197 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4199 bool comma = false;
4200 fprintf (f, " Aggregate replacements:");
4201 for (; av; av = av->next)
4203 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4204 av->index, av->offset);
4205 print_generic_expr (f, av->value, 0);
4206 comma = true;
4208 fprintf (f, "\n");
4211 /* Stream out jump function JUMP_FUNC to OB. */
4213 static void
4214 ipa_write_jump_function (struct output_block *ob,
4215 struct ipa_jump_func *jump_func)
4217 struct ipa_agg_jf_item *item;
4218 struct bitpack_d bp;
4219 int i, count;
4221 streamer_write_uhwi (ob, jump_func->type);
4222 switch (jump_func->type)
4224 case IPA_JF_UNKNOWN:
4225 break;
4226 case IPA_JF_KNOWN_TYPE:
4227 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4228 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4229 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4230 break;
4231 case IPA_JF_CONST:
4232 gcc_assert (
4233 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4234 stream_write_tree (ob, jump_func->value.constant.value, true);
4235 break;
4236 case IPA_JF_PASS_THROUGH:
4237 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4238 if (jump_func->value.pass_through.operation == NOP_EXPR)
4240 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4241 bp = bitpack_create (ob->main_stream);
4242 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4243 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4244 streamer_write_bitpack (&bp);
4246 else
4248 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4249 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4251 break;
4252 case IPA_JF_ANCESTOR:
4253 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4254 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4255 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4256 bp = bitpack_create (ob->main_stream);
4257 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4258 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4259 streamer_write_bitpack (&bp);
4260 break;
4263 count = vec_safe_length (jump_func->agg.items);
4264 streamer_write_uhwi (ob, count);
4265 if (count)
4267 bp = bitpack_create (ob->main_stream);
4268 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4269 streamer_write_bitpack (&bp);
4272 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4274 streamer_write_uhwi (ob, item->offset);
4275 stream_write_tree (ob, item->value, true);
4279 /* Read in jump function JUMP_FUNC from IB. */
4281 static void
4282 ipa_read_jump_function (struct lto_input_block *ib,
4283 struct ipa_jump_func *jump_func,
4284 struct cgraph_edge *cs,
4285 struct data_in *data_in)
4287 enum jump_func_type jftype;
4288 enum tree_code operation;
4289 int i, count;
4291 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4292 switch (jftype)
4294 case IPA_JF_UNKNOWN:
4295 jump_func->type = IPA_JF_UNKNOWN;
4296 break;
4297 case IPA_JF_KNOWN_TYPE:
4299 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4300 tree base_type = stream_read_tree (ib, data_in);
4301 tree component_type = stream_read_tree (ib, data_in);
4303 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4304 break;
4306 case IPA_JF_CONST:
4307 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4308 break;
4309 case IPA_JF_PASS_THROUGH:
4310 operation = (enum tree_code) streamer_read_uhwi (ib);
4311 if (operation == NOP_EXPR)
4313 int formal_id = streamer_read_uhwi (ib);
4314 struct bitpack_d bp = streamer_read_bitpack (ib);
4315 bool agg_preserved = bp_unpack_value (&bp, 1);
4316 bool type_preserved = bp_unpack_value (&bp, 1);
4317 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4318 type_preserved);
4320 else
4322 tree operand = stream_read_tree (ib, data_in);
4323 int formal_id = streamer_read_uhwi (ib);
4324 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4325 operation);
4327 break;
4328 case IPA_JF_ANCESTOR:
4330 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4331 tree type = stream_read_tree (ib, data_in);
4332 int formal_id = streamer_read_uhwi (ib);
4333 struct bitpack_d bp = streamer_read_bitpack (ib);
4334 bool agg_preserved = bp_unpack_value (&bp, 1);
4335 bool type_preserved = bp_unpack_value (&bp, 1);
4337 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4338 type_preserved);
4339 break;
4343 count = streamer_read_uhwi (ib);
4344 vec_alloc (jump_func->agg.items, count);
4345 if (count)
4347 struct bitpack_d bp = streamer_read_bitpack (ib);
4348 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4350 for (i = 0; i < count; i++)
4352 struct ipa_agg_jf_item item;
4353 item.offset = streamer_read_uhwi (ib);
4354 item.value = stream_read_tree (ib, data_in);
4355 jump_func->agg.items->quick_push (item);
4359 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4360 relevant to indirect inlining to OB. */
4362 static void
4363 ipa_write_indirect_edge_info (struct output_block *ob,
4364 struct cgraph_edge *cs)
4366 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4367 struct bitpack_d bp;
4369 streamer_write_hwi (ob, ii->param_index);
4370 streamer_write_hwi (ob, ii->offset);
4371 bp = bitpack_create (ob->main_stream);
4372 bp_pack_value (&bp, ii->polymorphic, 1);
4373 bp_pack_value (&bp, ii->agg_contents, 1);
4374 bp_pack_value (&bp, ii->member_ptr, 1);
4375 bp_pack_value (&bp, ii->by_ref, 1);
4376 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4377 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4378 streamer_write_bitpack (&bp);
4380 if (ii->polymorphic)
4382 streamer_write_hwi (ob, ii->otr_token);
4383 stream_write_tree (ob, ii->otr_type, true);
4384 stream_write_tree (ob, ii->outer_type, true);
4388 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4389 relevant to indirect inlining from IB. */
4391 static void
4392 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4393 struct data_in *data_in ATTRIBUTE_UNUSED,
4394 struct cgraph_edge *cs)
4396 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4397 struct bitpack_d bp;
4399 ii->param_index = (int) streamer_read_hwi (ib);
4400 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4401 bp = streamer_read_bitpack (ib);
4402 ii->polymorphic = bp_unpack_value (&bp, 1);
4403 ii->agg_contents = bp_unpack_value (&bp, 1);
4404 ii->member_ptr = bp_unpack_value (&bp, 1);
4405 ii->by_ref = bp_unpack_value (&bp, 1);
4406 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4407 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4408 if (ii->polymorphic)
4410 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4411 ii->otr_type = stream_read_tree (ib, data_in);
4412 ii->outer_type = stream_read_tree (ib, data_in);
4416 /* Stream out NODE info to OB. */
4418 static void
4419 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4421 int node_ref;
4422 lto_symtab_encoder_t encoder;
4423 struct ipa_node_params *info = IPA_NODE_REF (node);
4424 int j;
4425 struct cgraph_edge *e;
4426 struct bitpack_d bp;
4428 encoder = ob->decl_state->symtab_node_encoder;
4429 node_ref = lto_symtab_encoder_encode (encoder, node);
4430 streamer_write_uhwi (ob, node_ref);
4432 streamer_write_uhwi (ob, ipa_get_param_count (info));
4433 for (j = 0; j < ipa_get_param_count (info); j++)
4434 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4435 bp = bitpack_create (ob->main_stream);
4436 gcc_assert (info->uses_analysis_done
4437 || ipa_get_param_count (info) == 0);
4438 gcc_assert (!info->node_enqueued);
4439 gcc_assert (!info->ipcp_orig_node);
4440 for (j = 0; j < ipa_get_param_count (info); j++)
4441 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4442 streamer_write_bitpack (&bp);
4443 for (j = 0; j < ipa_get_param_count (info); j++)
4444 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4445 for (e = node->callees; e; e = e->next_callee)
4447 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4449 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4450 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4451 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4453 for (e = node->indirect_calls; e; e = e->next_callee)
4455 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4457 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4458 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4459 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4460 ipa_write_indirect_edge_info (ob, e);
4464 /* Stream in NODE info from IB. */
4466 static void
4467 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4468 struct data_in *data_in)
4470 struct ipa_node_params *info = IPA_NODE_REF (node);
4471 int k;
4472 struct cgraph_edge *e;
4473 struct bitpack_d bp;
4475 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4477 for (k = 0; k < ipa_get_param_count (info); k++)
4478 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4480 bp = streamer_read_bitpack (ib);
4481 if (ipa_get_param_count (info) != 0)
4482 info->uses_analysis_done = true;
4483 info->node_enqueued = false;
4484 for (k = 0; k < ipa_get_param_count (info); k++)
4485 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4486 for (k = 0; k < ipa_get_param_count (info); k++)
4487 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4488 for (e = node->callees; e; e = e->next_callee)
4490 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4491 int count = streamer_read_uhwi (ib);
4493 if (!count)
4494 continue;
4495 vec_safe_grow_cleared (args->jump_functions, count);
4497 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4498 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4499 data_in);
4501 for (e = node->indirect_calls; e; e = e->next_callee)
4503 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4504 int count = streamer_read_uhwi (ib);
4506 if (count)
4508 vec_safe_grow_cleared (args->jump_functions, count);
4509 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4510 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4511 data_in);
4513 ipa_read_indirect_edge_info (ib, data_in, e);
4517 /* Write jump functions for nodes in SET. */
4519 void
4520 ipa_prop_write_jump_functions (void)
4522 struct cgraph_node *node;
4523 struct output_block *ob;
4524 unsigned int count = 0;
4525 lto_symtab_encoder_iterator lsei;
4526 lto_symtab_encoder_t encoder;
4529 if (!ipa_node_params_vector.exists ())
4530 return;
4532 ob = create_output_block (LTO_section_jump_functions);
4533 encoder = ob->decl_state->symtab_node_encoder;
4534 ob->cgraph_node = NULL;
4535 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4536 lsei_next_function_in_partition (&lsei))
4538 node = lsei_cgraph_node (lsei);
4539 if (cgraph_function_with_gimple_body_p (node)
4540 && IPA_NODE_REF (node) != NULL)
4541 count++;
4544 streamer_write_uhwi (ob, count);
4546 /* Process all of the functions. */
4547 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4548 lsei_next_function_in_partition (&lsei))
4550 node = lsei_cgraph_node (lsei);
4551 if (cgraph_function_with_gimple_body_p (node)
4552 && IPA_NODE_REF (node) != NULL)
4553 ipa_write_node_info (ob, node);
4555 streamer_write_char_stream (ob->main_stream, 0);
4556 produce_asm (ob, NULL);
4557 destroy_output_block (ob);
4560 /* Read section in file FILE_DATA of length LEN with data DATA. */
4562 static void
4563 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4564 size_t len)
4566 const struct lto_function_header *header =
4567 (const struct lto_function_header *) data;
4568 const int cfg_offset = sizeof (struct lto_function_header);
4569 const int main_offset = cfg_offset + header->cfg_size;
4570 const int string_offset = main_offset + header->main_size;
4571 struct data_in *data_in;
4572 struct lto_input_block ib_main;
4573 unsigned int i;
4574 unsigned int count;
4576 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4577 header->main_size);
4579 data_in =
4580 lto_data_in_create (file_data, (const char *) data + string_offset,
4581 header->string_size, vNULL);
4582 count = streamer_read_uhwi (&ib_main);
4584 for (i = 0; i < count; i++)
4586 unsigned int index;
4587 struct cgraph_node *node;
4588 lto_symtab_encoder_t encoder;
4590 index = streamer_read_uhwi (&ib_main);
4591 encoder = file_data->symtab_node_encoder;
4592 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4593 gcc_assert (node->definition);
4594 ipa_read_node_info (&ib_main, node, data_in);
4596 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4597 len);
4598 lto_data_in_delete (data_in);
4601 /* Read ipcp jump functions. */
4603 void
4604 ipa_prop_read_jump_functions (void)
4606 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4607 struct lto_file_decl_data *file_data;
4608 unsigned int j = 0;
4610 ipa_check_create_node_params ();
4611 ipa_check_create_edge_args ();
4612 ipa_register_cgraph_hooks ();
4614 while ((file_data = file_data_vec[j++]))
4616 size_t len;
4617 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4619 if (data)
4620 ipa_prop_read_section (file_data, data, len);
4624 /* After merging units, we can get mismatch in argument counts.
4625 Also decl merging might've rendered parameter lists obsolete.
4626 Also compute called_with_variable_arg info. */
4628 void
4629 ipa_update_after_lto_read (void)
4631 ipa_check_create_node_params ();
4632 ipa_check_create_edge_args ();
4635 void
4636 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4638 int node_ref;
4639 unsigned int count = 0;
4640 lto_symtab_encoder_t encoder;
4641 struct ipa_agg_replacement_value *aggvals, *av;
4643 aggvals = ipa_get_agg_replacements_for_node (node);
4644 encoder = ob->decl_state->symtab_node_encoder;
4645 node_ref = lto_symtab_encoder_encode (encoder, node);
4646 streamer_write_uhwi (ob, node_ref);
4648 for (av = aggvals; av; av = av->next)
4649 count++;
4650 streamer_write_uhwi (ob, count);
4652 for (av = aggvals; av; av = av->next)
4654 struct bitpack_d bp;
4656 streamer_write_uhwi (ob, av->offset);
4657 streamer_write_uhwi (ob, av->index);
4658 stream_write_tree (ob, av->value, true);
4660 bp = bitpack_create (ob->main_stream);
4661 bp_pack_value (&bp, av->by_ref, 1);
4662 streamer_write_bitpack (&bp);
4666 /* Stream in the aggregate value replacement chain for NODE from IB. */
4668 static void
4669 read_agg_replacement_chain (struct lto_input_block *ib,
4670 struct cgraph_node *node,
4671 struct data_in *data_in)
4673 struct ipa_agg_replacement_value *aggvals = NULL;
4674 unsigned int count, i;
4676 count = streamer_read_uhwi (ib);
4677 for (i = 0; i <count; i++)
4679 struct ipa_agg_replacement_value *av;
4680 struct bitpack_d bp;
4682 av = ggc_alloc_ipa_agg_replacement_value ();
4683 av->offset = streamer_read_uhwi (ib);
4684 av->index = streamer_read_uhwi (ib);
4685 av->value = stream_read_tree (ib, data_in);
4686 bp = streamer_read_bitpack (ib);
4687 av->by_ref = bp_unpack_value (&bp, 1);
4688 av->next = aggvals;
4689 aggvals = av;
4691 ipa_set_node_agg_value_chain (node, aggvals);
4694 /* Write all aggregate replacement for nodes in set. */
4696 void
4697 ipa_prop_write_all_agg_replacement (void)
4699 struct cgraph_node *node;
4700 struct output_block *ob;
4701 unsigned int count = 0;
4702 lto_symtab_encoder_iterator lsei;
4703 lto_symtab_encoder_t encoder;
4705 if (!ipa_node_agg_replacements)
4706 return;
4708 ob = create_output_block (LTO_section_ipcp_transform);
4709 encoder = ob->decl_state->symtab_node_encoder;
4710 ob->cgraph_node = NULL;
4711 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4712 lsei_next_function_in_partition (&lsei))
4714 node = lsei_cgraph_node (lsei);
4715 if (cgraph_function_with_gimple_body_p (node)
4716 && ipa_get_agg_replacements_for_node (node) != NULL)
4717 count++;
4720 streamer_write_uhwi (ob, count);
4722 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4723 lsei_next_function_in_partition (&lsei))
4725 node = lsei_cgraph_node (lsei);
4726 if (cgraph_function_with_gimple_body_p (node)
4727 && ipa_get_agg_replacements_for_node (node) != NULL)
4728 write_agg_replacement_chain (ob, node);
4730 streamer_write_char_stream (ob->main_stream, 0);
4731 produce_asm (ob, NULL);
4732 destroy_output_block (ob);
4735 /* Read replacements section in file FILE_DATA of length LEN with data
4736 DATA. */
4738 static void
4739 read_replacements_section (struct lto_file_decl_data *file_data,
4740 const char *data,
4741 size_t len)
4743 const struct lto_function_header *header =
4744 (const struct lto_function_header *) data;
4745 const int cfg_offset = sizeof (struct lto_function_header);
4746 const int main_offset = cfg_offset + header->cfg_size;
4747 const int string_offset = main_offset + header->main_size;
4748 struct data_in *data_in;
4749 struct lto_input_block ib_main;
4750 unsigned int i;
4751 unsigned int count;
4753 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4754 header->main_size);
4756 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4757 header->string_size, vNULL);
4758 count = streamer_read_uhwi (&ib_main);
4760 for (i = 0; i < count; i++)
4762 unsigned int index;
4763 struct cgraph_node *node;
4764 lto_symtab_encoder_t encoder;
4766 index = streamer_read_uhwi (&ib_main);
4767 encoder = file_data->symtab_node_encoder;
4768 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4769 gcc_assert (node->definition);
4770 read_agg_replacement_chain (&ib_main, node, data_in);
4772 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4773 len);
4774 lto_data_in_delete (data_in);
4777 /* Read IPA-CP aggregate replacements. */
4779 void
4780 ipa_prop_read_all_agg_replacement (void)
4782 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4783 struct lto_file_decl_data *file_data;
4784 unsigned int j = 0;
4786 while ((file_data = file_data_vec[j++]))
4788 size_t len;
4789 const char *data = lto_get_section_data (file_data,
4790 LTO_section_ipcp_transform,
4791 NULL, &len);
4792 if (data)
4793 read_replacements_section (file_data, data, len);
4797 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4798 NODE. */
4800 static void
4801 adjust_agg_replacement_values (struct cgraph_node *node,
4802 struct ipa_agg_replacement_value *aggval)
4804 struct ipa_agg_replacement_value *v;
4805 int i, c = 0, d = 0, *adj;
4807 if (!node->clone.combined_args_to_skip)
4808 return;
4810 for (v = aggval; v; v = v->next)
4812 gcc_assert (v->index >= 0);
4813 if (c < v->index)
4814 c = v->index;
4816 c++;
4818 adj = XALLOCAVEC (int, c);
4819 for (i = 0; i < c; i++)
4820 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4822 adj[i] = -1;
4823 d++;
4825 else
4826 adj[i] = i - d;
4828 for (v = aggval; v; v = v->next)
4829 v->index = adj[v->index];
4833 /* Function body transformation phase. */
4835 unsigned int
4836 ipcp_transform_function (struct cgraph_node *node)
4838 vec<ipa_param_descriptor> descriptors = vNULL;
4839 struct param_analysis_info *parms_ainfo;
4840 struct ipa_agg_replacement_value *aggval;
4841 gimple_stmt_iterator gsi;
4842 basic_block bb;
4843 int param_count;
4844 bool cfg_changed = false, something_changed = false;
4846 gcc_checking_assert (cfun);
4847 gcc_checking_assert (current_function_decl);
4849 if (dump_file)
4850 fprintf (dump_file, "Modification phase of node %s/%i\n",
4851 node->name (), node->order);
4853 aggval = ipa_get_agg_replacements_for_node (node);
4854 if (!aggval)
4855 return 0;
4856 param_count = count_formal_params (node->decl);
4857 if (param_count == 0)
4858 return 0;
4859 adjust_agg_replacement_values (node, aggval);
4860 if (dump_file)
4861 ipa_dump_agg_replacement_values (dump_file, aggval);
4862 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4863 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4864 descriptors.safe_grow_cleared (param_count);
4865 ipa_populate_param_decls (node, descriptors);
4867 FOR_EACH_BB_FN (bb, cfun)
4868 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4870 struct ipa_agg_replacement_value *v;
4871 gimple stmt = gsi_stmt (gsi);
4872 tree rhs, val, t;
4873 HOST_WIDE_INT offset, size;
4874 int index;
4875 bool by_ref, vce;
4877 if (!gimple_assign_load_p (stmt))
4878 continue;
4879 rhs = gimple_assign_rhs1 (stmt);
4880 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4881 continue;
4883 vce = false;
4884 t = rhs;
4885 while (handled_component_p (t))
4887 /* V_C_E can do things like convert an array of integers to one
4888 bigger integer and similar things we do not handle below. */
4889 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4891 vce = true;
4892 break;
4894 t = TREE_OPERAND (t, 0);
4896 if (vce)
4897 continue;
4899 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4900 rhs, &index, &offset, &size, &by_ref))
4901 continue;
4902 for (v = aggval; v; v = v->next)
4903 if (v->index == index
4904 && v->offset == offset)
4905 break;
4906 if (!v
4907 || v->by_ref != by_ref
4908 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4909 continue;
4911 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4912 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4914 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4915 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4916 else if (TYPE_SIZE (TREE_TYPE (rhs))
4917 == TYPE_SIZE (TREE_TYPE (v->value)))
4918 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4919 else
4921 if (dump_file)
4923 fprintf (dump_file, " const ");
4924 print_generic_expr (dump_file, v->value, 0);
4925 fprintf (dump_file, " can't be converted to type of ");
4926 print_generic_expr (dump_file, rhs, 0);
4927 fprintf (dump_file, "\n");
4929 continue;
4932 else
4933 val = v->value;
4935 if (dump_file && (dump_flags & TDF_DETAILS))
4937 fprintf (dump_file, "Modifying stmt:\n ");
4938 print_gimple_stmt (dump_file, stmt, 0, 0);
4940 gimple_assign_set_rhs_from_tree (&gsi, val);
4941 update_stmt (stmt);
4943 if (dump_file && (dump_flags & TDF_DETAILS))
4945 fprintf (dump_file, "into:\n ");
4946 print_gimple_stmt (dump_file, stmt, 0, 0);
4947 fprintf (dump_file, "\n");
4950 something_changed = true;
4951 if (maybe_clean_eh_stmt (stmt)
4952 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4953 cfg_changed = true;
4956 (*ipa_node_agg_replacements)[node->uid] = NULL;
4957 free_parms_ainfo (parms_ainfo, param_count);
4958 descriptors.release ();
4960 if (!something_changed)
4961 return 0;
4962 else if (cfg_changed)
4963 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4964 else
4965 return TODO_update_ssa_only_virtuals;