* lto-partition.c (add_symbol_to_partition_1,
[official-gcc.git] / gcc / ipa-prop.c
blob368b93b7d4ac9161893fa1a8ba14511b6a471746
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
63 /* Intermediate information about a parameter that is only useful during the
64 run of ipa_analyze_node and is not kept afterwards. */
66 struct param_analysis_info
68 bool parm_modified, ref_modified, pt_modified;
69 bitmap parm_visited_statements, pt_visited_statements;
72 /* Vector where the parameter infos are actually stored. */
73 vec<ipa_node_params> ipa_node_params_vector;
74 /* Vector of known aggregate values in cloned nodes. */
75 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
76 /* Vector where the parameter infos are actually stored. */
77 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
79 /* Holders of ipa cgraph hooks: */
80 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
81 static struct cgraph_node_hook_list *node_removal_hook_holder;
82 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
83 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
84 static struct cgraph_node_hook_list *function_insertion_hook_holder;
86 /* Description of a reference to an IPA constant. */
87 struct ipa_cst_ref_desc
89 /* Edge that corresponds to the statement which took the reference. */
90 struct cgraph_edge *cs;
91 /* Linked list of duplicates created when call graph edges are cloned. */
92 struct ipa_cst_ref_desc *next_duplicate;
93 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
94 if out of control. */
95 int refcount;
98 /* Allocation pool for reference descriptions. */
100 static alloc_pool ipa_refdesc_pool;
102 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
103 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
105 static bool
106 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
108 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
109 struct cl_optimization *os;
111 if (!fs_opts)
112 return false;
113 os = TREE_OPTIMIZATION (fs_opts);
114 return !os->x_optimize || !os->x_flag_ipa_cp;
117 /* Return index of the formal whose tree is PTREE in function which corresponds
118 to INFO. */
120 static int
121 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
123 int i, count;
125 count = descriptors.length ();
126 for (i = 0; i < count; i++)
127 if (descriptors[i].decl == ptree)
128 return i;
130 return -1;
133 /* Return index of the formal whose tree is PTREE in function which corresponds
134 to INFO. */
137 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
139 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
142 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
143 NODE. */
145 static void
146 ipa_populate_param_decls (struct cgraph_node *node,
147 vec<ipa_param_descriptor> &descriptors)
149 tree fndecl;
150 tree fnargs;
151 tree parm;
152 int param_num;
154 fndecl = node->decl;
155 gcc_assert (gimple_has_body_p (fndecl));
156 fnargs = DECL_ARGUMENTS (fndecl);
157 param_num = 0;
158 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
160 descriptors[param_num].decl = parm;
161 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
162 param_num++;
166 /* Return how many formal parameters FNDECL has. */
168 static inline int
169 count_formal_params (tree fndecl)
171 tree parm;
172 int count = 0;
173 gcc_assert (gimple_has_body_p (fndecl));
175 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
176 count++;
178 return count;
181 /* Return the declaration of Ith formal parameter of the function corresponding
182 to INFO. Note there is no setter function as this array is built just once
183 using ipa_initialize_node_params. */
185 void
186 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
188 fprintf (file, "param #%i", i);
189 if (info->descriptors[i].decl)
191 fprintf (file, " ");
192 print_generic_expr (file, info->descriptors[i].decl, 0);
196 /* Initialize the ipa_node_params structure associated with NODE
197 to hold PARAM_COUNT parameters. */
199 void
200 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
202 struct ipa_node_params *info = IPA_NODE_REF (node);
204 if (!info->descriptors.exists () && param_count)
205 info->descriptors.safe_grow_cleared (param_count);
208 /* Initialize the ipa_node_params structure associated with NODE by counting
209 the function parameters, creating the descriptors and populating their
210 param_decls. */
212 void
213 ipa_initialize_node_params (struct cgraph_node *node)
215 struct ipa_node_params *info = IPA_NODE_REF (node);
217 if (!info->descriptors.exists ())
219 ipa_alloc_node_params (node, count_formal_params (node->decl));
220 ipa_populate_param_decls (node, info->descriptors);
224 /* Print the jump functions associated with call graph edge CS to file F. */
226 static void
227 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
229 int i, count;
231 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
232 for (i = 0; i < count; i++)
234 struct ipa_jump_func *jump_func;
235 enum jump_func_type type;
237 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
238 type = jump_func->type;
240 fprintf (f, " param %d: ", i);
241 if (type == IPA_JF_UNKNOWN)
242 fprintf (f, "UNKNOWN\n");
243 else if (type == IPA_JF_KNOWN_TYPE)
245 fprintf (f, "KNOWN TYPE: base ");
246 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
247 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
248 jump_func->value.known_type.offset);
249 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
250 fprintf (f, "\n");
252 else if (type == IPA_JF_CONST)
254 tree val = jump_func->value.constant.value;
255 fprintf (f, "CONST: ");
256 print_generic_expr (f, val, 0);
257 if (TREE_CODE (val) == ADDR_EXPR
258 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
260 fprintf (f, " -> ");
261 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
264 fprintf (f, "\n");
266 else if (type == IPA_JF_PASS_THROUGH)
268 fprintf (f, "PASS THROUGH: ");
269 fprintf (f, "%d, op %s",
270 jump_func->value.pass_through.formal_id,
271 get_tree_code_name(jump_func->value.pass_through.operation));
272 if (jump_func->value.pass_through.operation != NOP_EXPR)
274 fprintf (f, " ");
275 print_generic_expr (f,
276 jump_func->value.pass_through.operand, 0);
278 if (jump_func->value.pass_through.agg_preserved)
279 fprintf (f, ", agg_preserved");
280 if (jump_func->value.pass_through.type_preserved)
281 fprintf (f, ", type_preserved");
282 fprintf (f, "\n");
284 else if (type == IPA_JF_ANCESTOR)
286 fprintf (f, "ANCESTOR: ");
287 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
288 jump_func->value.ancestor.formal_id,
289 jump_func->value.ancestor.offset);
290 print_generic_expr (f, jump_func->value.ancestor.type, 0);
291 if (jump_func->value.ancestor.agg_preserved)
292 fprintf (f, ", agg_preserved");
293 if (jump_func->value.ancestor.type_preserved)
294 fprintf (f, ", type_preserved");
295 fprintf (f, "\n");
298 if (jump_func->agg.items)
300 struct ipa_agg_jf_item *item;
301 int j;
303 fprintf (f, " Aggregate passed by %s:\n",
304 jump_func->agg.by_ref ? "reference" : "value");
305 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
307 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
308 item->offset);
309 if (TYPE_P (item->value))
310 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
311 tree_to_uhwi (TYPE_SIZE (item->value)));
312 else
314 fprintf (f, "cst: ");
315 print_generic_expr (f, item->value, 0);
317 fprintf (f, "\n");
324 /* Print the jump functions of all arguments on all call graph edges going from
325 NODE to file F. */
327 void
328 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
330 struct cgraph_edge *cs;
332 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
333 node->order);
334 for (cs = node->callees; cs; cs = cs->next_callee)
336 if (!ipa_edge_args_info_available_for_edge_p (cs))
337 continue;
339 fprintf (f, " callsite %s/%i -> %s/%i : \n",
340 xstrdup (node->name ()), node->order,
341 xstrdup (cs->callee->name ()),
342 cs->callee->order);
343 ipa_print_node_jump_functions_for_edge (f, cs);
346 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
348 struct cgraph_indirect_call_info *ii;
349 if (!ipa_edge_args_info_available_for_edge_p (cs))
350 continue;
352 ii = cs->indirect_info;
353 if (ii->agg_contents)
354 fprintf (f, " indirect %s callsite, calling param %i, "
355 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
356 ii->member_ptr ? "member ptr" : "aggregate",
357 ii->param_index, ii->offset,
358 ii->by_ref ? "by reference" : "by_value");
359 else
360 fprintf (f, " indirect %s callsite, calling param %i, "
361 "offset " HOST_WIDE_INT_PRINT_DEC,
362 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
363 ii->offset);
365 if (cs->call_stmt)
367 fprintf (f, ", for stmt ");
368 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
370 else
371 fprintf (f, "\n");
372 ipa_print_node_jump_functions_for_edge (f, cs);
376 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
378 void
379 ipa_print_all_jump_functions (FILE *f)
381 struct cgraph_node *node;
383 fprintf (f, "\nJump functions:\n");
384 FOR_EACH_FUNCTION (node)
386 ipa_print_node_jump_functions (f, node);
390 /* Set JFUNC to be a known type jump function. */
392 static void
393 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
394 tree base_type, tree component_type)
396 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
397 && TYPE_BINFO (component_type));
398 if (!flag_devirtualize)
399 return;
400 gcc_assert (BINFO_VTABLE (TYPE_BINFO (component_type)));
401 jfunc->type = IPA_JF_KNOWN_TYPE;
402 jfunc->value.known_type.offset = offset,
403 jfunc->value.known_type.base_type = base_type;
404 jfunc->value.known_type.component_type = component_type;
405 gcc_assert (component_type);
408 /* Set JFUNC to be a copy of another jmp (to be used by jump function
409 combination code). The two functions will share their rdesc. */
411 static void
412 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
413 struct ipa_jump_func *src)
416 gcc_checking_assert (src->type == IPA_JF_CONST);
417 dst->type = IPA_JF_CONST;
418 dst->value.constant = src->value.constant;
421 /* Set JFUNC to be a constant jmp function. */
423 static void
424 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
425 struct cgraph_edge *cs)
427 constant = unshare_expr (constant);
428 if (constant && EXPR_P (constant))
429 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
430 jfunc->type = IPA_JF_CONST;
431 jfunc->value.constant.value = unshare_expr_without_location (constant);
433 if (TREE_CODE (constant) == ADDR_EXPR
434 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
436 struct ipa_cst_ref_desc *rdesc;
437 if (!ipa_refdesc_pool)
438 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
439 sizeof (struct ipa_cst_ref_desc), 32);
441 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
442 rdesc->cs = cs;
443 rdesc->next_duplicate = NULL;
444 rdesc->refcount = 1;
445 jfunc->value.constant.rdesc = rdesc;
447 else
448 jfunc->value.constant.rdesc = NULL;
451 /* Set JFUNC to be a simple pass-through jump function. */
452 static void
453 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
454 bool agg_preserved, bool type_preserved)
456 jfunc->type = IPA_JF_PASS_THROUGH;
457 jfunc->value.pass_through.operand = NULL_TREE;
458 jfunc->value.pass_through.formal_id = formal_id;
459 jfunc->value.pass_through.operation = NOP_EXPR;
460 jfunc->value.pass_through.agg_preserved = agg_preserved;
461 jfunc->value.pass_through.type_preserved = type_preserved;
464 /* Set JFUNC to be an arithmetic pass through jump function. */
466 static void
467 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
468 tree operand, enum tree_code operation)
470 jfunc->type = IPA_JF_PASS_THROUGH;
471 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
472 jfunc->value.pass_through.formal_id = formal_id;
473 jfunc->value.pass_through.operation = operation;
474 jfunc->value.pass_through.agg_preserved = false;
475 jfunc->value.pass_through.type_preserved = false;
478 /* Set JFUNC to be an ancestor jump function. */
480 static void
481 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
482 tree type, int formal_id, bool agg_preserved,
483 bool type_preserved)
485 if (!flag_devirtualize)
486 type_preserved = false;
487 gcc_assert (!type_preserved
488 || (TREE_CODE (type) == RECORD_TYPE
489 && TYPE_BINFO (type)
490 && BINFO_VTABLE (TYPE_BINFO (type))));
491 jfunc->type = IPA_JF_ANCESTOR;
492 jfunc->value.ancestor.formal_id = formal_id;
493 jfunc->value.ancestor.offset = offset;
494 jfunc->value.ancestor.type = type_preserved ? type : NULL;
495 jfunc->value.ancestor.agg_preserved = agg_preserved;
496 jfunc->value.ancestor.type_preserved = type_preserved;
499 /* Extract the acual BINFO being described by JFUNC which must be a known type
500 jump function. */
502 tree
503 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
505 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
506 if (!base_binfo)
507 return NULL_TREE;
508 return get_binfo_at_offset (base_binfo,
509 jfunc->value.known_type.offset,
510 jfunc->value.known_type.component_type);
513 /* Structure to be passed in between detect_type_change and
514 check_stmt_for_type_change. */
516 struct type_change_info
518 /* Offset into the object where there is the virtual method pointer we are
519 looking for. */
520 HOST_WIDE_INT offset;
521 /* The declaration or SSA_NAME pointer of the base that we are checking for
522 type change. */
523 tree object;
524 /* If we actually can tell the type that the object has changed to, it is
525 stored in this field. Otherwise it remains NULL_TREE. */
526 tree known_current_type;
527 /* Set to true if dynamic type change has been detected. */
528 bool type_maybe_changed;
529 /* Set to true if multiple types have been encountered. known_current_type
530 must be disregarded in that case. */
531 bool multiple_types_encountered;
534 /* Return true if STMT can modify a virtual method table pointer.
536 This function makes special assumptions about both constructors and
537 destructors which are all the functions that are allowed to alter the VMT
538 pointers. It assumes that destructors begin with assignment into all VMT
539 pointers and that constructors essentially look in the following way:
541 1) The very first thing they do is that they call constructors of ancestor
542 sub-objects that have them.
544 2) Then VMT pointers of this and all its ancestors is set to new values
545 corresponding to the type corresponding to the constructor.
547 3) Only afterwards, other stuff such as constructor of member sub-objects
548 and the code written by the user is run. Only this may include calling
549 virtual functions, directly or indirectly.
551 There is no way to call a constructor of an ancestor sub-object in any
552 other way.
554 This means that we do not have to care whether constructors get the correct
555 type information because they will always change it (in fact, if we define
556 the type to be given by the VMT pointer, it is undefined).
558 The most important fact to derive from the above is that if, for some
559 statement in the section 3, we try to detect whether the dynamic type has
560 changed, we can safely ignore all calls as we examine the function body
561 backwards until we reach statements in section 2 because these calls cannot
562 be ancestor constructors or destructors (if the input is not bogus) and so
563 do not change the dynamic type (this holds true only for automatically
564 allocated objects but at the moment we devirtualize only these). We then
565 must detect that statements in section 2 change the dynamic type and can try
566 to derive the new type. That is enough and we can stop, we will never see
567 the calls into constructors of sub-objects in this code. Therefore we can
568 safely ignore all call statements that we traverse.
571 static bool
572 stmt_may_be_vtbl_ptr_store (gimple stmt)
574 if (is_gimple_call (stmt))
575 return false;
576 else if (gimple_clobber_p (stmt))
577 return false;
578 else if (is_gimple_assign (stmt))
580 tree lhs = gimple_assign_lhs (stmt);
582 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
584 if (flag_strict_aliasing
585 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
586 return false;
588 if (TREE_CODE (lhs) == COMPONENT_REF
589 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
590 return false;
591 /* In the future we might want to use get_base_ref_and_offset to find
592 if there is a field corresponding to the offset and if so, proceed
593 almost like if it was a component ref. */
596 return true;
599 /* If STMT can be proved to be an assignment to the virtual method table
600 pointer of ANALYZED_OBJ and the type associated with the new table
601 identified, return the type. Otherwise return NULL_TREE. */
603 static tree
604 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
606 HOST_WIDE_INT offset, size, max_size;
607 tree lhs, rhs, base, binfo;
609 if (!gimple_assign_single_p (stmt))
610 return NULL_TREE;
612 lhs = gimple_assign_lhs (stmt);
613 rhs = gimple_assign_rhs1 (stmt);
614 if (TREE_CODE (lhs) != COMPONENT_REF
615 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
616 return NULL_TREE;
618 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
619 if (offset != tci->offset
620 || size != POINTER_SIZE
621 || max_size != POINTER_SIZE)
622 return NULL_TREE;
623 if (TREE_CODE (base) == MEM_REF)
625 if (TREE_CODE (tci->object) != MEM_REF
626 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
627 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
628 TREE_OPERAND (base, 1)))
629 return NULL_TREE;
631 else if (tci->object != base)
632 return NULL_TREE;
634 binfo = vtable_pointer_value_to_binfo (rhs);
636 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
637 base of outer type. In this case we would need to either
638 work on binfos or translate it back to outer type and offset.
639 KNOWN_TYPE jump functions are not ready for that, yet. */
640 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
641 return NULL;
643 return BINFO_TYPE (binfo);
646 /* Callback of walk_aliased_vdefs and a helper function for
647 detect_type_change to check whether a particular statement may modify
648 the virtual table pointer, and if possible also determine the new type of
649 the (sub-)object. It stores its result into DATA, which points to a
650 type_change_info structure. */
652 static bool
653 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
655 gimple stmt = SSA_NAME_DEF_STMT (vdef);
656 struct type_change_info *tci = (struct type_change_info *) data;
658 if (stmt_may_be_vtbl_ptr_store (stmt))
660 tree type;
661 type = extr_type_from_vtbl_ptr_store (stmt, tci);
662 if (tci->type_maybe_changed
663 && type != tci->known_current_type)
664 tci->multiple_types_encountered = true;
665 tci->known_current_type = type;
666 tci->type_maybe_changed = true;
667 return true;
669 else
670 return false;
675 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
676 callsite CALL) by looking for assignments to its virtual table pointer. If
677 it is, return true and fill in the jump function JFUNC with relevant type
678 information or set it to unknown. ARG is the object itself (not a pointer
679 to it, unless dereferenced). BASE is the base of the memory access as
680 returned by get_ref_base_and_extent, as is the offset. */
682 static bool
683 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
684 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
686 struct type_change_info tci;
687 ao_ref ao;
689 gcc_checking_assert (DECL_P (arg)
690 || TREE_CODE (arg) == MEM_REF
691 || handled_component_p (arg));
692 /* Const calls cannot call virtual methods through VMT and so type changes do
693 not matter. */
694 if (!flag_devirtualize || !gimple_vuse (call)
695 /* Be sure expected_type is polymorphic. */
696 || !comp_type
697 || TREE_CODE (comp_type) != RECORD_TYPE
698 || !TYPE_BINFO (comp_type)
699 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
700 return true;
702 /* C++ methods are not allowed to change THIS pointer unless they
703 are constructors or destructors. */
704 if (TREE_CODE (base) == MEM_REF
705 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
706 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
707 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0))) == PARM_DECL
708 && TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
709 && !DECL_CXX_CONSTRUCTOR_P (current_function_decl)
710 && !DECL_CXX_DESTRUCTOR_P (current_function_decl)
711 && (SSA_NAME_VAR (TREE_OPERAND (base, 0))
712 == DECL_ARGUMENTS (current_function_decl)))
713 return false;
715 ao_ref_init (&ao, arg);
716 ao.base = base;
717 ao.offset = offset;
718 ao.size = POINTER_SIZE;
719 ao.max_size = ao.size;
721 tci.offset = offset;
722 tci.object = get_base_address (arg);
723 tci.known_current_type = NULL_TREE;
724 tci.type_maybe_changed = false;
725 tci.multiple_types_encountered = false;
727 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
728 &tci, NULL);
729 if (!tci.type_maybe_changed)
730 return false;
732 if (!tci.known_current_type
733 || tci.multiple_types_encountered
734 || offset != 0)
735 jfunc->type = IPA_JF_UNKNOWN;
736 else
737 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
739 return true;
742 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
743 SSA name (its dereference will become the base and the offset is assumed to
744 be zero). */
746 static bool
747 detect_type_change_ssa (tree arg, tree comp_type,
748 gimple call, struct ipa_jump_func *jfunc)
750 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
751 if (!flag_devirtualize
752 || !POINTER_TYPE_P (TREE_TYPE (arg)))
753 return false;
755 arg = build2 (MEM_REF, ptr_type_node, arg,
756 build_int_cst (ptr_type_node, 0));
758 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
761 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
762 boolean variable pointed to by DATA. */
764 static bool
765 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
766 void *data)
768 bool *b = (bool *) data;
769 *b = true;
770 return true;
773 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
774 a value known not to be modified in this function before reaching the
775 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
776 information about the parameter. */
778 static bool
779 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
780 gimple stmt, tree parm_load)
782 bool modified = false;
783 bitmap *visited_stmts;
784 ao_ref refd;
786 if (parm_ainfo && parm_ainfo->parm_modified)
787 return false;
789 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
790 ao_ref_init (&refd, parm_load);
791 /* We can cache visited statements only when parm_ainfo is available and when
792 we are looking at a naked load of the whole parameter. */
793 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
794 visited_stmts = NULL;
795 else
796 visited_stmts = &parm_ainfo->parm_visited_statements;
797 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
798 visited_stmts);
799 if (parm_ainfo && modified)
800 parm_ainfo->parm_modified = true;
801 return !modified;
804 /* If STMT is an assignment that loads a value from an parameter declaration,
805 return the index of the parameter in ipa_node_params which has not been
806 modified. Otherwise return -1. */
808 static int
809 load_from_unmodified_param (vec<ipa_param_descriptor> descriptors,
810 struct param_analysis_info *parms_ainfo,
811 gimple stmt)
813 int index;
814 tree op1;
816 if (!gimple_assign_single_p (stmt))
817 return -1;
819 op1 = gimple_assign_rhs1 (stmt);
820 if (TREE_CODE (op1) != PARM_DECL)
821 return -1;
823 index = ipa_get_param_decl_index_1 (descriptors, op1);
824 if (index < 0
825 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
826 : NULL, stmt, op1))
827 return -1;
829 return index;
832 /* Return true if memory reference REF loads data that are known to be
833 unmodified in this function before reaching statement STMT. PARM_AINFO, if
834 non-NULL, is a pointer to a structure containing temporary information about
835 PARM. */
837 static bool
838 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
839 gimple stmt, tree ref)
841 bool modified = false;
842 ao_ref refd;
844 gcc_checking_assert (gimple_vuse (stmt));
845 if (parm_ainfo && parm_ainfo->ref_modified)
846 return false;
848 ao_ref_init (&refd, ref);
849 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
850 NULL);
851 if (parm_ainfo && modified)
852 parm_ainfo->ref_modified = true;
853 return !modified;
856 /* Return true if the data pointed to by PARM is known to be unmodified in this
857 function before reaching call statement CALL into which it is passed.
858 PARM_AINFO is a pointer to a structure containing temporary information
859 about PARM. */
861 static bool
862 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
863 gimple call, tree parm)
865 bool modified = false;
866 ao_ref refd;
868 /* It's unnecessary to calculate anything about memory contnets for a const
869 function because it is not goin to use it. But do not cache the result
870 either. Also, no such calculations for non-pointers. */
871 if (!gimple_vuse (call)
872 || !POINTER_TYPE_P (TREE_TYPE (parm)))
873 return false;
875 if (parm_ainfo->pt_modified)
876 return false;
878 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
879 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
880 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
881 if (modified)
882 parm_ainfo->pt_modified = true;
883 return !modified;
886 /* Return true if we can prove that OP is a memory reference loading unmodified
887 data from an aggregate passed as a parameter and if the aggregate is passed
888 by reference, that the alias type of the load corresponds to the type of the
889 formal parameter (so that we can rely on this type for TBAA in callers).
890 INFO and PARMS_AINFO describe parameters of the current function (but the
891 latter can be NULL), STMT is the load statement. If function returns true,
892 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
893 within the aggregate and whether it is a load from a value passed by
894 reference respectively. */
896 static bool
897 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor> descriptors,
898 struct param_analysis_info *parms_ainfo, gimple stmt,
899 tree op, int *index_p, HOST_WIDE_INT *offset_p,
900 HOST_WIDE_INT *size_p, bool *by_ref_p)
902 int index;
903 HOST_WIDE_INT size, max_size;
904 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
906 if (max_size == -1 || max_size != size || *offset_p < 0)
907 return false;
909 if (DECL_P (base))
911 int index = ipa_get_param_decl_index_1 (descriptors, base);
912 if (index >= 0
913 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
914 : NULL, stmt, op))
916 *index_p = index;
917 *by_ref_p = false;
918 if (size_p)
919 *size_p = size;
920 return true;
922 return false;
925 if (TREE_CODE (base) != MEM_REF
926 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
927 || !integer_zerop (TREE_OPERAND (base, 1)))
928 return false;
930 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
932 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
933 index = ipa_get_param_decl_index_1 (descriptors, parm);
935 else
937 /* This branch catches situations where a pointer parameter is not a
938 gimple register, for example:
940 void hip7(S*) (struct S * p)
942 void (*<T2e4>) (struct S *) D.1867;
943 struct S * p.1;
945 <bb 2>:
946 p.1_1 = p;
947 D.1867_2 = p.1_1->f;
948 D.1867_2 ();
949 gdp = &p;
952 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
953 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
956 if (index >= 0
957 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
958 stmt, op))
960 *index_p = index;
961 *by_ref_p = true;
962 if (size_p)
963 *size_p = size;
964 return true;
966 return false;
969 /* Just like the previous function, just without the param_analysis_info
970 pointer, for users outside of this file. */
972 bool
973 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
974 tree op, int *index_p, HOST_WIDE_INT *offset_p,
975 bool *by_ref_p)
977 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
978 offset_p, NULL, by_ref_p);
981 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
982 of an assignment statement STMT, try to determine whether we are actually
983 handling any of the following cases and construct an appropriate jump
984 function into JFUNC if so:
986 1) The passed value is loaded from a formal parameter which is not a gimple
987 register (most probably because it is addressable, the value has to be
988 scalar) and we can guarantee the value has not changed. This case can
989 therefore be described by a simple pass-through jump function. For example:
991 foo (int a)
993 int a.0;
995 a.0_2 = a;
996 bar (a.0_2);
998 2) The passed value can be described by a simple arithmetic pass-through
999 jump function. E.g.
1001 foo (int a)
1003 int D.2064;
1005 D.2064_4 = a.1(D) + 4;
1006 bar (D.2064_4);
1008 This case can also occur in combination of the previous one, e.g.:
1010 foo (int a, int z)
1012 int a.0;
1013 int D.2064;
1015 a.0_3 = a;
1016 D.2064_4 = a.0_3 + 4;
1017 foo (D.2064_4);
1019 3) The passed value is an address of an object within another one (which
1020 also passed by reference). Such situations are described by an ancestor
1021 jump function and describe situations such as:
1023 B::foo() (struct B * const this)
1025 struct A * D.1845;
1027 D.1845_2 = &this_1(D)->D.1748;
1028 A::bar (D.1845_2);
1030 INFO is the structure describing individual parameters access different
1031 stages of IPA optimizations. PARMS_AINFO contains the information that is
1032 only needed for intraprocedural analysis. */
1034 static void
1035 compute_complex_assign_jump_func (struct ipa_node_params *info,
1036 struct param_analysis_info *parms_ainfo,
1037 struct ipa_jump_func *jfunc,
1038 gimple call, gimple stmt, tree name,
1039 tree param_type)
1041 HOST_WIDE_INT offset, size, max_size;
1042 tree op1, tc_ssa, base, ssa;
1043 int index;
1045 op1 = gimple_assign_rhs1 (stmt);
1047 if (TREE_CODE (op1) == SSA_NAME)
1049 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1050 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1051 else
1052 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1053 SSA_NAME_DEF_STMT (op1));
1054 tc_ssa = op1;
1056 else
1058 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1059 tc_ssa = gimple_assign_lhs (stmt);
1062 if (index >= 0)
1064 tree op2 = gimple_assign_rhs2 (stmt);
1066 if (op2)
1068 if (!is_gimple_ip_invariant (op2)
1069 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1070 && !useless_type_conversion_p (TREE_TYPE (name),
1071 TREE_TYPE (op1))))
1072 return;
1074 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1075 gimple_assign_rhs_code (stmt));
1077 else if (gimple_assign_single_p (stmt))
1079 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1080 call, tc_ssa);
1081 bool type_p = false;
1083 if (param_type && POINTER_TYPE_P (param_type))
1084 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1085 call, jfunc);
1086 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1087 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1089 return;
1092 if (TREE_CODE (op1) != ADDR_EXPR)
1093 return;
1094 op1 = TREE_OPERAND (op1, 0);
1095 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1096 return;
1097 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1098 if (TREE_CODE (base) != MEM_REF
1099 /* If this is a varying address, punt. */
1100 || max_size == -1
1101 || max_size != size)
1102 return;
1103 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1104 ssa = TREE_OPERAND (base, 0);
1105 if (TREE_CODE (ssa) != SSA_NAME
1106 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1107 || offset < 0)
1108 return;
1110 /* Dynamic types are changed in constructors and destructors. */
1111 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1112 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1114 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1115 call, jfunc, offset);
1116 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1117 ipa_set_ancestor_jf (jfunc, offset,
1118 type_p ? TREE_TYPE (param_type) : NULL, index,
1119 parm_ref_data_pass_through_p (&parms_ainfo[index],
1120 call, ssa), type_p);
1124 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1125 it looks like:
1127 iftmp.1_3 = &obj_2(D)->D.1762;
1129 The base of the MEM_REF must be a default definition SSA NAME of a
1130 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1131 whole MEM_REF expression is returned and the offset calculated from any
1132 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1133 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1135 static tree
1136 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1138 HOST_WIDE_INT size, max_size;
1139 tree expr, parm, obj;
1141 if (!gimple_assign_single_p (assign))
1142 return NULL_TREE;
1143 expr = gimple_assign_rhs1 (assign);
1145 if (TREE_CODE (expr) != ADDR_EXPR)
1146 return NULL_TREE;
1147 expr = TREE_OPERAND (expr, 0);
1148 obj = expr;
1149 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1151 if (TREE_CODE (expr) != MEM_REF
1152 /* If this is a varying address, punt. */
1153 || max_size == -1
1154 || max_size != size
1155 || *offset < 0)
1156 return NULL_TREE;
1157 parm = TREE_OPERAND (expr, 0);
1158 if (TREE_CODE (parm) != SSA_NAME
1159 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1160 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1161 return NULL_TREE;
1163 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1164 *obj_p = obj;
1165 return expr;
1169 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1170 statement PHI, try to find out whether NAME is in fact a
1171 multiple-inheritance typecast from a descendant into an ancestor of a formal
1172 parameter and thus can be described by an ancestor jump function and if so,
1173 write the appropriate function into JFUNC.
1175 Essentially we want to match the following pattern:
1177 if (obj_2(D) != 0B)
1178 goto <bb 3>;
1179 else
1180 goto <bb 4>;
1182 <bb 3>:
1183 iftmp.1_3 = &obj_2(D)->D.1762;
1185 <bb 4>:
1186 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1187 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1188 return D.1879_6; */
1190 static void
1191 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1192 struct param_analysis_info *parms_ainfo,
1193 struct ipa_jump_func *jfunc,
1194 gimple call, gimple phi, tree param_type)
1196 HOST_WIDE_INT offset;
1197 gimple assign, cond;
1198 basic_block phi_bb, assign_bb, cond_bb;
1199 tree tmp, parm, expr, obj;
1200 int index, i;
1202 if (gimple_phi_num_args (phi) != 2)
1203 return;
1205 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1206 tmp = PHI_ARG_DEF (phi, 0);
1207 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1208 tmp = PHI_ARG_DEF (phi, 1);
1209 else
1210 return;
1211 if (TREE_CODE (tmp) != SSA_NAME
1212 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1213 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1214 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1215 return;
1217 assign = SSA_NAME_DEF_STMT (tmp);
1218 assign_bb = gimple_bb (assign);
1219 if (!single_pred_p (assign_bb))
1220 return;
1221 expr = get_ancestor_addr_info (assign, &obj, &offset);
1222 if (!expr)
1223 return;
1224 parm = TREE_OPERAND (expr, 0);
1225 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1226 if (index < 0)
1227 return;
1229 cond_bb = single_pred (assign_bb);
1230 cond = last_stmt (cond_bb);
1231 if (!cond
1232 || gimple_code (cond) != GIMPLE_COND
1233 || gimple_cond_code (cond) != NE_EXPR
1234 || gimple_cond_lhs (cond) != parm
1235 || !integer_zerop (gimple_cond_rhs (cond)))
1236 return;
1238 phi_bb = gimple_bb (phi);
1239 for (i = 0; i < 2; i++)
1241 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1242 if (pred != assign_bb && pred != cond_bb)
1243 return;
1246 bool type_p = false;
1247 if (param_type && POINTER_TYPE_P (param_type))
1248 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1249 call, jfunc, offset);
1250 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1251 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL, index,
1252 parm_ref_data_pass_through_p (&parms_ainfo[index],
1253 call, parm), type_p);
1256 /* Given OP which is passed as an actual argument to a called function,
1257 determine if it is possible to construct a KNOWN_TYPE jump function for it
1258 and if so, create one and store it to JFUNC.
1259 EXPECTED_TYPE represents a type the argument should be in */
1261 static void
1262 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1263 gimple call, tree expected_type)
1265 HOST_WIDE_INT offset, size, max_size;
1266 tree base;
1268 if (!flag_devirtualize
1269 || TREE_CODE (op) != ADDR_EXPR
1270 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1271 /* Be sure expected_type is polymorphic. */
1272 || !expected_type
1273 || TREE_CODE (expected_type) != RECORD_TYPE
1274 || !TYPE_BINFO (expected_type)
1275 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1276 return;
1278 op = TREE_OPERAND (op, 0);
1279 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1280 if (!DECL_P (base)
1281 || max_size == -1
1282 || max_size != size
1283 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1284 || is_global_var (base))
1285 return;
1287 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1288 return;
1290 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1291 expected_type);
1294 /* Inspect the given TYPE and return true iff it has the same structure (the
1295 same number of fields of the same types) as a C++ member pointer. If
1296 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1297 corresponding fields there. */
1299 static bool
1300 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1302 tree fld;
1304 if (TREE_CODE (type) != RECORD_TYPE)
1305 return false;
1307 fld = TYPE_FIELDS (type);
1308 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1309 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1310 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1311 return false;
1313 if (method_ptr)
1314 *method_ptr = fld;
1316 fld = DECL_CHAIN (fld);
1317 if (!fld || INTEGRAL_TYPE_P (fld)
1318 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1319 return false;
1320 if (delta)
1321 *delta = fld;
1323 if (DECL_CHAIN (fld))
1324 return false;
1326 return true;
1329 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1330 return the rhs of its defining statement. Otherwise return RHS as it
1331 is. */
1333 static inline tree
1334 get_ssa_def_if_simple_copy (tree rhs)
1336 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1338 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1340 if (gimple_assign_single_p (def_stmt))
1341 rhs = gimple_assign_rhs1 (def_stmt);
1342 else
1343 break;
1345 return rhs;
1348 /* Simple linked list, describing known contents of an aggregate beforere
1349 call. */
1351 struct ipa_known_agg_contents_list
1353 /* Offset and size of the described part of the aggregate. */
1354 HOST_WIDE_INT offset, size;
1355 /* Known constant value or NULL if the contents is known to be unknown. */
1356 tree constant;
1357 /* Pointer to the next structure in the list. */
1358 struct ipa_known_agg_contents_list *next;
1361 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1362 in ARG is filled in with constant values. ARG can either be an aggregate
1363 expression or a pointer to an aggregate. ARG_TYPE is the type of the aggregate.
1364 JFUNC is the jump function into which the constants are subsequently stored. */
1366 static void
1367 determine_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1368 struct ipa_jump_func *jfunc)
1370 struct ipa_known_agg_contents_list *list = NULL;
1371 int item_count = 0, const_count = 0;
1372 HOST_WIDE_INT arg_offset, arg_size;
1373 gimple_stmt_iterator gsi;
1374 tree arg_base;
1375 bool check_ref, by_ref;
1376 ao_ref r;
1378 /* The function operates in three stages. First, we prepare check_ref, r,
1379 arg_base and arg_offset based on what is actually passed as an actual
1380 argument. */
1382 if (POINTER_TYPE_P (arg_type))
1384 by_ref = true;
1385 if (TREE_CODE (arg) == SSA_NAME)
1387 tree type_size;
1388 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1389 return;
1390 check_ref = true;
1391 arg_base = arg;
1392 arg_offset = 0;
1393 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1394 arg_size = tree_to_uhwi (type_size);
1395 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1397 else if (TREE_CODE (arg) == ADDR_EXPR)
1399 HOST_WIDE_INT arg_max_size;
1401 arg = TREE_OPERAND (arg, 0);
1402 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1403 &arg_max_size);
1404 if (arg_max_size == -1
1405 || arg_max_size != arg_size
1406 || arg_offset < 0)
1407 return;
1408 if (DECL_P (arg_base))
1410 tree size;
1411 check_ref = false;
1412 size = build_int_cst (integer_type_node, arg_size);
1413 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1415 else
1416 return;
1418 else
1419 return;
1421 else
1423 HOST_WIDE_INT arg_max_size;
1425 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1427 by_ref = false;
1428 check_ref = false;
1429 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1430 &arg_max_size);
1431 if (arg_max_size == -1
1432 || arg_max_size != arg_size
1433 || arg_offset < 0)
1434 return;
1436 ao_ref_init (&r, arg);
1439 /* Second stage walks back the BB, looks at individual statements and as long
1440 as it is confident of how the statements affect contents of the
1441 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1442 describing it. */
1443 gsi = gsi_for_stmt (call);
1444 gsi_prev (&gsi);
1445 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1447 struct ipa_known_agg_contents_list *n, **p;
1448 gimple stmt = gsi_stmt (gsi);
1449 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1450 tree lhs, rhs, lhs_base;
1451 bool partial_overlap;
1453 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1454 continue;
1455 if (!gimple_assign_single_p (stmt))
1456 break;
1458 lhs = gimple_assign_lhs (stmt);
1459 rhs = gimple_assign_rhs1 (stmt);
1460 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1461 || TREE_CODE (lhs) == BIT_FIELD_REF
1462 || contains_bitfld_component_ref_p (lhs))
1463 break;
1465 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1466 &lhs_max_size);
1467 if (lhs_max_size == -1
1468 || lhs_max_size != lhs_size
1469 || (lhs_offset < arg_offset
1470 && lhs_offset + lhs_size > arg_offset)
1471 || (lhs_offset < arg_offset + arg_size
1472 && lhs_offset + lhs_size > arg_offset + arg_size))
1473 break;
1475 if (check_ref)
1477 if (TREE_CODE (lhs_base) != MEM_REF
1478 || TREE_OPERAND (lhs_base, 0) != arg_base
1479 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1480 break;
1482 else if (lhs_base != arg_base)
1484 if (DECL_P (lhs_base))
1485 continue;
1486 else
1487 break;
1490 if (lhs_offset + lhs_size < arg_offset
1491 || lhs_offset >= (arg_offset + arg_size))
1492 continue;
1494 partial_overlap = false;
1495 p = &list;
1496 while (*p && (*p)->offset < lhs_offset)
1498 if ((*p)->offset + (*p)->size > lhs_offset)
1500 partial_overlap = true;
1501 break;
1503 p = &(*p)->next;
1505 if (partial_overlap)
1506 break;
1507 if (*p && (*p)->offset < lhs_offset + lhs_size)
1509 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1510 /* We already know this value is subsequently overwritten with
1511 something else. */
1512 continue;
1513 else
1514 /* Otherwise this is a partial overlap which we cannot
1515 represent. */
1516 break;
1519 rhs = get_ssa_def_if_simple_copy (rhs);
1520 n = XALLOCA (struct ipa_known_agg_contents_list);
1521 n->size = lhs_size;
1522 n->offset = lhs_offset;
1523 if (is_gimple_ip_invariant (rhs))
1525 n->constant = rhs;
1526 const_count++;
1528 else
1529 n->constant = NULL_TREE;
1530 n->next = *p;
1531 *p = n;
1533 item_count++;
1534 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1535 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1536 break;
1539 /* Third stage just goes over the list and creates an appropriate vector of
1540 ipa_agg_jf_item structures out of it, of sourse only if there are
1541 any known constants to begin with. */
1543 if (const_count)
1545 jfunc->agg.by_ref = by_ref;
1546 vec_alloc (jfunc->agg.items, const_count);
1547 while (list)
1549 if (list->constant)
1551 struct ipa_agg_jf_item item;
1552 item.offset = list->offset - arg_offset;
1553 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1554 item.value = unshare_expr_without_location (list->constant);
1555 jfunc->agg.items->quick_push (item);
1557 list = list->next;
1562 static tree
1563 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1565 int n;
1566 tree type = (e->callee
1567 ? TREE_TYPE (e->callee->decl)
1568 : gimple_call_fntype (e->call_stmt));
1569 tree t = TYPE_ARG_TYPES (type);
1571 for (n = 0; n < i; n++)
1573 if (!t)
1574 break;
1575 t = TREE_CHAIN (t);
1577 if (t)
1578 return TREE_VALUE (t);
1579 if (!e->callee)
1580 return NULL;
1581 t = DECL_ARGUMENTS (e->callee->decl);
1582 for (n = 0; n < i; n++)
1584 if (!t)
1585 return NULL;
1586 t = TREE_CHAIN (t);
1588 if (t)
1589 return TREE_TYPE (t);
1590 return NULL;
1593 /* Compute jump function for all arguments of callsite CS and insert the
1594 information in the jump_functions array in the ipa_edge_args corresponding
1595 to this callsite. */
1597 static void
1598 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1599 struct cgraph_edge *cs)
1601 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1602 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1603 gimple call = cs->call_stmt;
1604 int n, arg_num = gimple_call_num_args (call);
1606 if (arg_num == 0 || args->jump_functions)
1607 return;
1608 vec_safe_grow_cleared (args->jump_functions, arg_num);
1610 if (gimple_call_internal_p (call))
1611 return;
1612 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1613 return;
1615 for (n = 0; n < arg_num; n++)
1617 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1618 tree arg = gimple_call_arg (call, n);
1619 tree param_type = ipa_get_callee_param_type (cs, n);
1621 if (is_gimple_ip_invariant (arg))
1622 ipa_set_jf_constant (jfunc, arg, cs);
1623 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1624 && TREE_CODE (arg) == PARM_DECL)
1626 int index = ipa_get_param_decl_index (info, arg);
1628 gcc_assert (index >=0);
1629 /* Aggregate passed by value, check for pass-through, otherwise we
1630 will attempt to fill in aggregate contents later in this
1631 for cycle. */
1632 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1634 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1635 continue;
1638 else if (TREE_CODE (arg) == SSA_NAME)
1640 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1642 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1643 if (index >= 0)
1645 bool agg_p, type_p;
1646 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1647 call, arg);
1648 if (param_type && POINTER_TYPE_P (param_type))
1649 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1650 call, jfunc);
1651 else
1652 type_p = false;
1653 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1654 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1655 type_p);
1658 else
1660 gimple stmt = SSA_NAME_DEF_STMT (arg);
1661 if (is_gimple_assign (stmt))
1662 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1663 call, stmt, arg, param_type);
1664 else if (gimple_code (stmt) == GIMPLE_PHI)
1665 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1666 call, stmt, param_type);
1669 else
1670 compute_known_type_jump_func (arg, jfunc, call,
1671 param_type
1672 && POINTER_TYPE_P (param_type)
1673 ? TREE_TYPE (param_type)
1674 : NULL);
1676 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1677 passed (because type conversions are ignored in gimple). Usually we can
1678 safely get type from function declaration, but in case of K&R prototypes or
1679 variadic functions we can try our luck with type of the pointer passed.
1680 TODO: Since we look for actual initialization of the memory object, we may better
1681 work out the type based on the memory stores we find. */
1682 if (!param_type)
1683 param_type = TREE_TYPE (arg);
1685 if ((jfunc->type != IPA_JF_PASS_THROUGH
1686 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1687 && (jfunc->type != IPA_JF_ANCESTOR
1688 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1689 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1690 || POINTER_TYPE_P (param_type)))
1691 determine_known_aggregate_parts (call, arg, param_type, jfunc);
1695 /* Compute jump functions for all edges - both direct and indirect - outgoing
1696 from NODE. Also count the actual arguments in the process. */
1698 static void
1699 ipa_compute_jump_functions (struct cgraph_node *node,
1700 struct param_analysis_info *parms_ainfo)
1702 struct cgraph_edge *cs;
1704 for (cs = node->callees; cs; cs = cs->next_callee)
1706 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1707 NULL);
1708 /* We do not need to bother analyzing calls to unknown
1709 functions unless they may become known during lto/whopr. */
1710 if (!callee->definition && !flag_lto)
1711 continue;
1712 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1715 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1716 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1719 /* If STMT looks like a statement loading a value from a member pointer formal
1720 parameter, return that parameter and store the offset of the field to
1721 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1722 might be clobbered). If USE_DELTA, then we look for a use of the delta
1723 field rather than the pfn. */
1725 static tree
1726 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1727 HOST_WIDE_INT *offset_p)
1729 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1731 if (!gimple_assign_single_p (stmt))
1732 return NULL_TREE;
1734 rhs = gimple_assign_rhs1 (stmt);
1735 if (TREE_CODE (rhs) == COMPONENT_REF)
1737 ref_field = TREE_OPERAND (rhs, 1);
1738 rhs = TREE_OPERAND (rhs, 0);
1740 else
1741 ref_field = NULL_TREE;
1742 if (TREE_CODE (rhs) != MEM_REF)
1743 return NULL_TREE;
1744 rec = TREE_OPERAND (rhs, 0);
1745 if (TREE_CODE (rec) != ADDR_EXPR)
1746 return NULL_TREE;
1747 rec = TREE_OPERAND (rec, 0);
1748 if (TREE_CODE (rec) != PARM_DECL
1749 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1750 return NULL_TREE;
1751 ref_offset = TREE_OPERAND (rhs, 1);
1753 if (use_delta)
1754 fld = delta_field;
1755 else
1756 fld = ptr_field;
1757 if (offset_p)
1758 *offset_p = int_bit_position (fld);
1760 if (ref_field)
1762 if (integer_nonzerop (ref_offset))
1763 return NULL_TREE;
1764 return ref_field == fld ? rec : NULL_TREE;
1766 else
1767 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1768 : NULL_TREE;
1771 /* Returns true iff T is an SSA_NAME defined by a statement. */
1773 static bool
1774 ipa_is_ssa_with_stmt_def (tree t)
1776 if (TREE_CODE (t) == SSA_NAME
1777 && !SSA_NAME_IS_DEFAULT_DEF (t))
1778 return true;
1779 else
1780 return false;
1783 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1784 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1785 indirect call graph edge. */
1787 static struct cgraph_edge *
1788 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1790 struct cgraph_edge *cs;
1792 cs = cgraph_edge (node, stmt);
1793 cs->indirect_info->param_index = param_index;
1794 cs->indirect_info->agg_contents = 0;
1795 cs->indirect_info->member_ptr = 0;
1796 return cs;
1799 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1800 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1801 intermediate information about each formal parameter. Currently it checks
1802 whether the call calls a pointer that is a formal parameter and if so, the
1803 parameter is marked with the called flag and an indirect call graph edge
1804 describing the call is created. This is very simple for ordinary pointers
1805 represented in SSA but not-so-nice when it comes to member pointers. The
1806 ugly part of this function does nothing more than trying to match the
1807 pattern of such a call. An example of such a pattern is the gimple dump
1808 below, the call is on the last line:
1810 <bb 2>:
1811 f$__delta_5 = f.__delta;
1812 f$__pfn_24 = f.__pfn;
1815 <bb 2>:
1816 f$__delta_5 = MEM[(struct *)&f];
1817 f$__pfn_24 = MEM[(struct *)&f + 4B];
1819 and a few lines below:
1821 <bb 5>
1822 D.2496_3 = (int) f$__pfn_24;
1823 D.2497_4 = D.2496_3 & 1;
1824 if (D.2497_4 != 0)
1825 goto <bb 3>;
1826 else
1827 goto <bb 4>;
1829 <bb 6>:
1830 D.2500_7 = (unsigned int) f$__delta_5;
1831 D.2501_8 = &S + D.2500_7;
1832 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1833 D.2503_10 = *D.2502_9;
1834 D.2504_12 = f$__pfn_24 + -1;
1835 D.2505_13 = (unsigned int) D.2504_12;
1836 D.2506_14 = D.2503_10 + D.2505_13;
1837 D.2507_15 = *D.2506_14;
1838 iftmp.11_16 = (String:: *) D.2507_15;
1840 <bb 7>:
1841 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1842 D.2500_19 = (unsigned int) f$__delta_5;
1843 D.2508_20 = &S + D.2500_19;
1844 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1846 Such patterns are results of simple calls to a member pointer:
1848 int doprinting (int (MyString::* f)(int) const)
1850 MyString S ("somestring");
1852 return (S.*f)(4);
1855 Moreover, the function also looks for called pointers loaded from aggregates
1856 passed by value or reference. */
1858 static void
1859 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1860 struct ipa_node_params *info,
1861 struct param_analysis_info *parms_ainfo,
1862 gimple call, tree target)
1864 gimple def;
1865 tree n1, n2;
1866 gimple d1, d2;
1867 tree rec, rec2, cond;
1868 gimple branch;
1869 int index;
1870 basic_block bb, virt_bb, join;
1871 HOST_WIDE_INT offset;
1872 bool by_ref;
1874 if (SSA_NAME_IS_DEFAULT_DEF (target))
1876 tree var = SSA_NAME_VAR (target);
1877 index = ipa_get_param_decl_index (info, var);
1878 if (index >= 0)
1879 ipa_note_param_call (node, index, call);
1880 return;
1883 def = SSA_NAME_DEF_STMT (target);
1884 if (gimple_assign_single_p (def)
1885 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1886 gimple_assign_rhs1 (def), &index, &offset,
1887 NULL, &by_ref))
1889 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1890 if (cs->indirect_info->offset != offset)
1891 cs->indirect_info->outer_type = NULL;
1892 cs->indirect_info->offset = offset;
1893 cs->indirect_info->agg_contents = 1;
1894 cs->indirect_info->by_ref = by_ref;
1895 return;
1898 /* Now we need to try to match the complex pattern of calling a member
1899 pointer. */
1900 if (gimple_code (def) != GIMPLE_PHI
1901 || gimple_phi_num_args (def) != 2
1902 || !POINTER_TYPE_P (TREE_TYPE (target))
1903 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1904 return;
1906 /* First, we need to check whether one of these is a load from a member
1907 pointer that is a parameter to this function. */
1908 n1 = PHI_ARG_DEF (def, 0);
1909 n2 = PHI_ARG_DEF (def, 1);
1910 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1911 return;
1912 d1 = SSA_NAME_DEF_STMT (n1);
1913 d2 = SSA_NAME_DEF_STMT (n2);
1915 join = gimple_bb (def);
1916 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1918 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1919 return;
1921 bb = EDGE_PRED (join, 0)->src;
1922 virt_bb = gimple_bb (d2);
1924 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1926 bb = EDGE_PRED (join, 1)->src;
1927 virt_bb = gimple_bb (d1);
1929 else
1930 return;
1932 /* Second, we need to check that the basic blocks are laid out in the way
1933 corresponding to the pattern. */
1935 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1936 || single_pred (virt_bb) != bb
1937 || single_succ (virt_bb) != join)
1938 return;
1940 /* Third, let's see that the branching is done depending on the least
1941 significant bit of the pfn. */
1943 branch = last_stmt (bb);
1944 if (!branch || gimple_code (branch) != GIMPLE_COND)
1945 return;
1947 if ((gimple_cond_code (branch) != NE_EXPR
1948 && gimple_cond_code (branch) != EQ_EXPR)
1949 || !integer_zerop (gimple_cond_rhs (branch)))
1950 return;
1952 cond = gimple_cond_lhs (branch);
1953 if (!ipa_is_ssa_with_stmt_def (cond))
1954 return;
1956 def = SSA_NAME_DEF_STMT (cond);
1957 if (!is_gimple_assign (def)
1958 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1959 || !integer_onep (gimple_assign_rhs2 (def)))
1960 return;
1962 cond = gimple_assign_rhs1 (def);
1963 if (!ipa_is_ssa_with_stmt_def (cond))
1964 return;
1966 def = SSA_NAME_DEF_STMT (cond);
1968 if (is_gimple_assign (def)
1969 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1971 cond = gimple_assign_rhs1 (def);
1972 if (!ipa_is_ssa_with_stmt_def (cond))
1973 return;
1974 def = SSA_NAME_DEF_STMT (cond);
1977 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1978 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1979 == ptrmemfunc_vbit_in_delta),
1980 NULL);
1981 if (rec != rec2)
1982 return;
1984 index = ipa_get_param_decl_index (info, rec);
1985 if (index >= 0
1986 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1988 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1989 if (cs->indirect_info->offset != offset)
1990 cs->indirect_info->outer_type = NULL;
1991 cs->indirect_info->offset = offset;
1992 cs->indirect_info->agg_contents = 1;
1993 cs->indirect_info->member_ptr = 1;
1996 return;
1999 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2000 object referenced in the expression is a formal parameter of the caller
2001 (described by INFO), create a call note for the statement. */
2003 static void
2004 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
2005 struct ipa_node_params *info, gimple call,
2006 tree target)
2008 struct cgraph_edge *cs;
2009 struct cgraph_indirect_call_info *ii;
2010 struct ipa_jump_func jfunc;
2011 tree obj = OBJ_TYPE_REF_OBJECT (target);
2012 int index;
2013 HOST_WIDE_INT anc_offset;
2015 if (!flag_devirtualize)
2016 return;
2018 if (TREE_CODE (obj) != SSA_NAME)
2019 return;
2021 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2023 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2024 return;
2026 anc_offset = 0;
2027 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2028 gcc_assert (index >= 0);
2029 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2030 call, &jfunc))
2031 return;
2033 else
2035 gimple stmt = SSA_NAME_DEF_STMT (obj);
2036 tree expr;
2038 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2039 if (!expr)
2040 return;
2041 index = ipa_get_param_decl_index (info,
2042 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2043 gcc_assert (index >= 0);
2044 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2045 call, &jfunc, anc_offset))
2046 return;
2049 cs = ipa_note_param_call (node, index, call);
2050 ii = cs->indirect_info;
2051 ii->offset = anc_offset;
2052 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2053 ii->otr_type = obj_type_ref_class (target);
2054 ii->polymorphic = 1;
2057 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2058 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2059 containing intermediate information about each formal parameter. */
2061 static void
2062 ipa_analyze_call_uses (struct cgraph_node *node,
2063 struct ipa_node_params *info,
2064 struct param_analysis_info *parms_ainfo, gimple call)
2066 tree target = gimple_call_fn (call);
2067 struct cgraph_edge *cs;
2069 if (!target
2070 || (TREE_CODE (target) != SSA_NAME
2071 && !virtual_method_call_p (target)))
2072 return;
2074 /* If we previously turned the call into a direct call, there is
2075 no need to analyze. */
2076 cs = cgraph_edge (node, call);
2077 if (cs && !cs->indirect_unknown_callee)
2078 return;
2079 if (TREE_CODE (target) == SSA_NAME)
2080 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2081 else if (virtual_method_call_p (target))
2082 ipa_analyze_virtual_call_uses (node, info, call, target);
2086 /* Analyze the call statement STMT with respect to formal parameters (described
2087 in INFO) of caller given by NODE. Currently it only checks whether formal
2088 parameters are called. PARMS_AINFO is a pointer to a vector containing
2089 intermediate information about each formal parameter. */
2091 static void
2092 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2093 struct param_analysis_info *parms_ainfo, gimple stmt)
2095 if (is_gimple_call (stmt))
2096 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2099 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2100 If OP is a parameter declaration, mark it as used in the info structure
2101 passed in DATA. */
2103 static bool
2104 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2106 struct ipa_node_params *info = (struct ipa_node_params *) data;
2108 op = get_base_address (op);
2109 if (op
2110 && TREE_CODE (op) == PARM_DECL)
2112 int index = ipa_get_param_decl_index (info, op);
2113 gcc_assert (index >= 0);
2114 ipa_set_param_used (info, index, true);
2117 return false;
2120 /* Scan the function body of NODE and inspect the uses of formal parameters.
2121 Store the findings in various structures of the associated ipa_node_params
2122 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2123 vector containing intermediate information about each formal parameter. */
2125 static void
2126 ipa_analyze_params_uses (struct cgraph_node *node,
2127 struct param_analysis_info *parms_ainfo)
2129 tree decl = node->decl;
2130 basic_block bb;
2131 struct function *func;
2132 gimple_stmt_iterator gsi;
2133 struct ipa_node_params *info = IPA_NODE_REF (node);
2134 int i;
2136 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2137 return;
2139 info->uses_analysis_done = 1;
2140 if (ipa_func_spec_opts_forbid_analysis_p (node))
2142 for (i = 0; i < ipa_get_param_count (info); i++)
2144 ipa_set_param_used (info, i, true);
2145 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2147 return;
2150 for (i = 0; i < ipa_get_param_count (info); i++)
2152 tree parm = ipa_get_param (info, i);
2153 int controlled_uses = 0;
2155 /* For SSA regs see if parameter is used. For non-SSA we compute
2156 the flag during modification analysis. */
2157 if (is_gimple_reg (parm))
2159 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2160 parm);
2161 if (ddef && !has_zero_uses (ddef))
2163 imm_use_iterator imm_iter;
2164 use_operand_p use_p;
2166 ipa_set_param_used (info, i, true);
2167 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2168 if (!is_gimple_call (USE_STMT (use_p)))
2170 if (!is_gimple_debug (USE_STMT (use_p)))
2172 controlled_uses = IPA_UNDESCRIBED_USE;
2173 break;
2176 else
2177 controlled_uses++;
2179 else
2180 controlled_uses = 0;
2182 else
2183 controlled_uses = IPA_UNDESCRIBED_USE;
2184 ipa_set_controlled_uses (info, i, controlled_uses);
2187 func = DECL_STRUCT_FUNCTION (decl);
2188 FOR_EACH_BB_FN (bb, func)
2190 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2192 gimple stmt = gsi_stmt (gsi);
2194 if (is_gimple_debug (stmt))
2195 continue;
2197 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2198 walk_stmt_load_store_addr_ops (stmt, info,
2199 visit_ref_for_mod_analysis,
2200 visit_ref_for_mod_analysis,
2201 visit_ref_for_mod_analysis);
2203 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2204 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2205 visit_ref_for_mod_analysis,
2206 visit_ref_for_mod_analysis,
2207 visit_ref_for_mod_analysis);
2211 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2213 static void
2214 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2216 int i;
2218 for (i = 0; i < param_count; i++)
2220 if (parms_ainfo[i].parm_visited_statements)
2221 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2222 if (parms_ainfo[i].pt_visited_statements)
2223 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2227 /* Initialize the array describing properties of of formal parameters
2228 of NODE, analyze their uses and compute jump functions associated
2229 with actual arguments of calls from within NODE. */
2231 void
2232 ipa_analyze_node (struct cgraph_node *node)
2234 struct ipa_node_params *info;
2235 struct param_analysis_info *parms_ainfo;
2236 int param_count;
2238 ipa_check_create_node_params ();
2239 ipa_check_create_edge_args ();
2240 info = IPA_NODE_REF (node);
2241 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2242 ipa_initialize_node_params (node);
2244 param_count = ipa_get_param_count (info);
2245 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2246 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2248 ipa_analyze_params_uses (node, parms_ainfo);
2249 ipa_compute_jump_functions (node, parms_ainfo);
2251 free_parms_ainfo (parms_ainfo, param_count);
2252 pop_cfun ();
2255 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2256 attempt a type-based devirtualization. If successful, return the
2257 target function declaration, otherwise return NULL. */
2259 tree
2260 ipa_intraprocedural_devirtualization (gimple call)
2262 tree binfo, token, fndecl;
2263 struct ipa_jump_func jfunc;
2264 tree otr = gimple_call_fn (call);
2266 jfunc.type = IPA_JF_UNKNOWN;
2267 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2268 call, obj_type_ref_class (otr));
2269 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2270 return NULL_TREE;
2271 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2272 if (!binfo)
2273 return NULL_TREE;
2274 token = OBJ_TYPE_REF_TOKEN (otr);
2275 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2276 binfo);
2277 #ifdef ENABLE_CHECKING
2278 if (fndecl)
2279 gcc_assert (possible_polymorphic_call_target_p
2280 (otr, cgraph_get_node (fndecl)));
2281 #endif
2282 return fndecl;
2285 /* Update the jump function DST when the call graph edge corresponding to SRC is
2286 is being inlined, knowing that DST is of type ancestor and src of known
2287 type. */
2289 static void
2290 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2291 struct ipa_jump_func *dst)
2293 HOST_WIDE_INT combined_offset;
2294 tree combined_type;
2296 if (!ipa_get_jf_ancestor_type_preserved (dst))
2298 dst->type = IPA_JF_UNKNOWN;
2299 return;
2302 combined_offset = ipa_get_jf_known_type_offset (src)
2303 + ipa_get_jf_ancestor_offset (dst);
2304 combined_type = ipa_get_jf_ancestor_type (dst);
2306 ipa_set_jf_known_type (dst, combined_offset,
2307 ipa_get_jf_known_type_base_type (src),
2308 combined_type);
2311 /* Update the jump functions associated with call graph edge E when the call
2312 graph edge CS is being inlined, assuming that E->caller is already (possibly
2313 indirectly) inlined into CS->callee and that E has not been inlined. */
2315 static void
2316 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2317 struct cgraph_edge *e)
2319 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2320 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2321 int count = ipa_get_cs_argument_count (args);
2322 int i;
2324 for (i = 0; i < count; i++)
2326 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2328 if (dst->type == IPA_JF_ANCESTOR)
2330 struct ipa_jump_func *src;
2331 int dst_fid = dst->value.ancestor.formal_id;
2333 /* Variable number of arguments can cause havoc if we try to access
2334 one that does not exist in the inlined edge. So make sure we
2335 don't. */
2336 if (dst_fid >= ipa_get_cs_argument_count (top))
2338 dst->type = IPA_JF_UNKNOWN;
2339 continue;
2342 src = ipa_get_ith_jump_func (top, dst_fid);
2344 if (src->agg.items
2345 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2347 struct ipa_agg_jf_item *item;
2348 int j;
2350 /* Currently we do not produce clobber aggregate jump functions,
2351 replace with merging when we do. */
2352 gcc_assert (!dst->agg.items);
2354 dst->agg.items = vec_safe_copy (src->agg.items);
2355 dst->agg.by_ref = src->agg.by_ref;
2356 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2357 item->offset -= dst->value.ancestor.offset;
2360 if (src->type == IPA_JF_KNOWN_TYPE)
2361 combine_known_type_and_ancestor_jfs (src, dst);
2362 else if (src->type == IPA_JF_PASS_THROUGH
2363 && src->value.pass_through.operation == NOP_EXPR)
2365 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2366 dst->value.ancestor.agg_preserved &=
2367 src->value.pass_through.agg_preserved;
2368 dst->value.ancestor.type_preserved &=
2369 src->value.pass_through.type_preserved;
2371 else if (src->type == IPA_JF_ANCESTOR)
2373 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2374 dst->value.ancestor.offset += src->value.ancestor.offset;
2375 dst->value.ancestor.agg_preserved &=
2376 src->value.ancestor.agg_preserved;
2377 dst->value.ancestor.type_preserved &=
2378 src->value.ancestor.type_preserved;
2380 else
2381 dst->type = IPA_JF_UNKNOWN;
2383 else if (dst->type == IPA_JF_PASS_THROUGH)
2385 struct ipa_jump_func *src;
2386 /* We must check range due to calls with variable number of arguments
2387 and we cannot combine jump functions with operations. */
2388 if (dst->value.pass_through.operation == NOP_EXPR
2389 && (dst->value.pass_through.formal_id
2390 < ipa_get_cs_argument_count (top)))
2392 int dst_fid = dst->value.pass_through.formal_id;
2393 src = ipa_get_ith_jump_func (top, dst_fid);
2394 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2396 switch (src->type)
2398 case IPA_JF_UNKNOWN:
2399 dst->type = IPA_JF_UNKNOWN;
2400 break;
2401 case IPA_JF_KNOWN_TYPE:
2402 if (ipa_get_jf_pass_through_type_preserved (dst))
2403 ipa_set_jf_known_type (dst,
2404 ipa_get_jf_known_type_offset (src),
2405 ipa_get_jf_known_type_base_type (src),
2406 ipa_get_jf_known_type_component_type (src));
2407 else
2408 dst->type = IPA_JF_UNKNOWN;
2409 break;
2410 case IPA_JF_CONST:
2411 ipa_set_jf_cst_copy (dst, src);
2412 break;
2414 case IPA_JF_PASS_THROUGH:
2416 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2417 enum tree_code operation;
2418 operation = ipa_get_jf_pass_through_operation (src);
2420 if (operation == NOP_EXPR)
2422 bool agg_p, type_p;
2423 agg_p = dst_agg_p
2424 && ipa_get_jf_pass_through_agg_preserved (src);
2425 type_p = ipa_get_jf_pass_through_type_preserved (src)
2426 && ipa_get_jf_pass_through_type_preserved (dst);
2427 ipa_set_jf_simple_pass_through (dst, formal_id,
2428 agg_p, type_p);
2430 else
2432 tree operand = ipa_get_jf_pass_through_operand (src);
2433 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2434 operation);
2436 break;
2438 case IPA_JF_ANCESTOR:
2440 bool agg_p, type_p;
2441 agg_p = dst_agg_p
2442 && ipa_get_jf_ancestor_agg_preserved (src);
2443 type_p = ipa_get_jf_ancestor_type_preserved (src)
2444 && ipa_get_jf_pass_through_type_preserved (dst);
2445 ipa_set_ancestor_jf (dst,
2446 ipa_get_jf_ancestor_offset (src),
2447 ipa_get_jf_ancestor_type (src),
2448 ipa_get_jf_ancestor_formal_id (src),
2449 agg_p, type_p);
2450 break;
2452 default:
2453 gcc_unreachable ();
2456 if (src->agg.items
2457 && (dst_agg_p || !src->agg.by_ref))
2459 /* Currently we do not produce clobber aggregate jump
2460 functions, replace with merging when we do. */
2461 gcc_assert (!dst->agg.items);
2463 dst->agg.by_ref = src->agg.by_ref;
2464 dst->agg.items = vec_safe_copy (src->agg.items);
2467 else
2468 dst->type = IPA_JF_UNKNOWN;
2473 /* If TARGET is an addr_expr of a function declaration, make it the destination
2474 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2476 struct cgraph_edge *
2477 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2479 struct cgraph_node *callee;
2480 struct inline_edge_summary *es = inline_edge_summary (ie);
2481 bool unreachable = false;
2483 if (TREE_CODE (target) == ADDR_EXPR)
2484 target = TREE_OPERAND (target, 0);
2485 if (TREE_CODE (target) != FUNCTION_DECL)
2487 target = canonicalize_constructor_val (target, NULL);
2488 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2490 if (ie->indirect_info->member_ptr)
2491 /* Member pointer call that goes through a VMT lookup. */
2492 return NULL;
2494 if (dump_file)
2495 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2496 " in %s/%i, making it unreachable.\n",
2497 ie->caller->name (), ie->caller->order);
2498 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2499 callee = cgraph_get_create_node (target);
2500 unreachable = true;
2502 else
2503 callee = cgraph_get_node (target);
2505 else
2506 callee = cgraph_get_node (target);
2508 /* Because may-edges are not explicitely represented and vtable may be external,
2509 we may create the first reference to the object in the unit. */
2510 if (!callee || callee->global.inlined_to)
2513 /* We are better to ensure we can refer to it.
2514 In the case of static functions we are out of luck, since we already
2515 removed its body. In the case of public functions we may or may
2516 not introduce the reference. */
2517 if (!canonicalize_constructor_val (target, NULL)
2518 || !TREE_PUBLIC (target))
2520 if (dump_file)
2521 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2522 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2523 xstrdup (ie->caller->name ()),
2524 ie->caller->order,
2525 xstrdup (ie->callee->name ()),
2526 ie->callee->order);
2527 return NULL;
2529 callee = cgraph_get_create_node (target);
2531 ipa_check_create_node_params ();
2533 /* We can not make edges to inline clones. It is bug that someone removed
2534 the cgraph node too early. */
2535 gcc_assert (!callee->global.inlined_to);
2537 if (dump_file && !unreachable)
2539 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2540 "(%s/%i -> %s/%i), for stmt ",
2541 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2542 xstrdup (ie->caller->name ()),
2543 ie->caller->order,
2544 xstrdup (callee->name ()),
2545 callee->order);
2546 if (ie->call_stmt)
2547 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2548 else
2549 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2551 ie = cgraph_make_edge_direct (ie, callee);
2552 es = inline_edge_summary (ie);
2553 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2554 - eni_size_weights.call_cost);
2555 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2556 - eni_time_weights.call_cost);
2558 return ie;
2561 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2562 return NULL if there is not any. BY_REF specifies whether the value has to
2563 be passed by reference or by value. */
2565 tree
2566 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2567 HOST_WIDE_INT offset, bool by_ref)
2569 struct ipa_agg_jf_item *item;
2570 int i;
2572 if (by_ref != agg->by_ref)
2573 return NULL;
2575 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2576 if (item->offset == offset)
2578 /* Currently we do not have clobber values, return NULL for them once
2579 we do. */
2580 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2581 return item->value;
2583 return NULL;
2586 /* Remove a reference to SYMBOL from the list of references of a node given by
2587 reference description RDESC. Return true if the reference has been
2588 successfully found and removed. */
2590 static bool
2591 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2593 struct ipa_ref *to_del;
2594 struct cgraph_edge *origin;
2596 origin = rdesc->cs;
2597 if (!origin)
2598 return false;
2599 to_del = ipa_find_reference (origin->caller, symbol,
2600 origin->call_stmt, origin->lto_stmt_uid);
2601 if (!to_del)
2602 return false;
2604 ipa_remove_reference (to_del);
2605 if (dump_file)
2606 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2607 xstrdup (origin->caller->name ()),
2608 origin->caller->order, xstrdup (symbol->name ()));
2609 return true;
2612 /* If JFUNC has a reference description with refcount different from
2613 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2614 NULL. JFUNC must be a constant jump function. */
2616 static struct ipa_cst_ref_desc *
2617 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2619 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2620 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2621 return rdesc;
2622 else
2623 return NULL;
2626 /* If the value of constant jump function JFUNC is an address of a function
2627 declaration, return the associated call graph node. Otherwise return
2628 NULL. */
2630 static cgraph_node *
2631 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2633 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2634 tree cst = ipa_get_jf_constant (jfunc);
2635 if (TREE_CODE (cst) != ADDR_EXPR
2636 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2637 return NULL;
2639 return cgraph_get_node (TREE_OPERAND (cst, 0));
2643 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2644 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2645 the edge specified in the rdesc. Return false if either the symbol or the
2646 reference could not be found, otherwise return true. */
2648 static bool
2649 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2651 struct ipa_cst_ref_desc *rdesc;
2652 if (jfunc->type == IPA_JF_CONST
2653 && (rdesc = jfunc_rdesc_usable (jfunc))
2654 && --rdesc->refcount == 0)
2656 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2657 if (!symbol)
2658 return false;
2660 return remove_described_reference (symbol, rdesc);
2662 return true;
2665 /* Try to find a destination for indirect edge IE that corresponds to a simple
2666 call or a call of a member function pointer and where the destination is a
2667 pointer formal parameter described by jump function JFUNC. If it can be
2668 determined, return the newly direct edge, otherwise return NULL.
2669 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2671 static struct cgraph_edge *
2672 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2673 struct ipa_jump_func *jfunc,
2674 struct ipa_node_params *new_root_info)
2676 struct cgraph_edge *cs;
2677 tree target;
2678 bool agg_contents = ie->indirect_info->agg_contents;
2680 if (ie->indirect_info->agg_contents)
2681 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2682 ie->indirect_info->offset,
2683 ie->indirect_info->by_ref);
2684 else
2685 target = ipa_value_from_jfunc (new_root_info, jfunc);
2686 if (!target)
2687 return NULL;
2688 cs = ipa_make_edge_direct_to_target (ie, target);
2690 if (cs && !agg_contents)
2692 bool ok;
2693 gcc_checking_assert (cs->callee
2694 && (cs != ie
2695 || jfunc->type != IPA_JF_CONST
2696 || !cgraph_node_for_jfunc (jfunc)
2697 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2698 ok = try_decrement_rdesc_refcount (jfunc);
2699 gcc_checking_assert (ok);
2702 return cs;
2705 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2706 call based on a formal parameter which is described by jump function JFUNC
2707 and if it can be determined, make it direct and return the direct edge.
2708 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2709 are relative to. */
2711 static struct cgraph_edge *
2712 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2713 struct ipa_jump_func *jfunc,
2714 struct ipa_node_params *new_root_info)
2716 tree binfo, target;
2718 if (!flag_devirtualize)
2719 return NULL;
2721 /* First try to do lookup via known virtual table pointer value. */
2722 if (!ie->indirect_info->by_ref)
2724 tree vtable;
2725 unsigned HOST_WIDE_INT offset;
2726 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2727 ie->indirect_info->offset,
2728 true);
2729 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2731 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2732 vtable, offset);
2733 if (target)
2735 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2736 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2737 || !possible_polymorphic_call_target_p
2738 (ie, cgraph_get_node (target)))
2740 if (dump_file)
2741 fprintf (dump_file,
2742 "Type inconsident devirtualization: %s/%i->%s\n",
2743 ie->caller->name (), ie->caller->order,
2744 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2745 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2746 cgraph_get_create_node (target);
2748 return ipa_make_edge_direct_to_target (ie, target);
2753 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2755 if (!binfo)
2756 return NULL;
2758 if (TREE_CODE (binfo) != TREE_BINFO)
2760 ipa_polymorphic_call_context context;
2761 vec <cgraph_node *>targets;
2762 bool final;
2764 if (!get_polymorphic_call_info_from_invariant
2765 (&context, binfo, ie->indirect_info->otr_type,
2766 ie->indirect_info->offset))
2767 return NULL;
2768 targets = possible_polymorphic_call_targets
2769 (ie->indirect_info->otr_type,
2770 ie->indirect_info->otr_token,
2771 context, &final);
2772 if (!final || targets.length () > 1)
2773 return NULL;
2774 if (targets.length () == 1)
2775 target = targets[0]->decl;
2776 else
2778 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2779 cgraph_get_create_node (target);
2782 else
2784 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2785 ie->indirect_info->otr_type);
2786 if (binfo)
2787 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2788 binfo);
2789 else
2790 return NULL;
2793 if (target)
2795 #ifdef ENABLE_CHECKING
2796 gcc_assert (possible_polymorphic_call_target_p
2797 (ie, cgraph_get_node (target)));
2798 #endif
2799 return ipa_make_edge_direct_to_target (ie, target);
2801 else
2802 return NULL;
2805 /* Update the param called notes associated with NODE when CS is being inlined,
2806 assuming NODE is (potentially indirectly) inlined into CS->callee.
2807 Moreover, if the callee is discovered to be constant, create a new cgraph
2808 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2809 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2811 static bool
2812 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2813 struct cgraph_node *node,
2814 vec<cgraph_edge_p> *new_edges)
2816 struct ipa_edge_args *top;
2817 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2818 struct ipa_node_params *new_root_info;
2819 bool res = false;
2821 ipa_check_create_edge_args ();
2822 top = IPA_EDGE_REF (cs);
2823 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2824 ? cs->caller->global.inlined_to
2825 : cs->caller);
2827 for (ie = node->indirect_calls; ie; ie = next_ie)
2829 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2830 struct ipa_jump_func *jfunc;
2831 int param_index;
2833 next_ie = ie->next_callee;
2835 if (ici->param_index == -1)
2836 continue;
2838 /* We must check range due to calls with variable number of arguments: */
2839 if (ici->param_index >= ipa_get_cs_argument_count (top))
2841 ici->param_index = -1;
2842 continue;
2845 param_index = ici->param_index;
2846 jfunc = ipa_get_ith_jump_func (top, param_index);
2848 if (!flag_indirect_inlining)
2849 new_direct_edge = NULL;
2850 else if (ici->polymorphic)
2851 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2852 new_root_info);
2853 else
2854 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2855 new_root_info);
2856 /* If speculation was removed, then we need to do nothing. */
2857 if (new_direct_edge && new_direct_edge != ie)
2859 new_direct_edge->indirect_inlining_edge = 1;
2860 top = IPA_EDGE_REF (cs);
2861 res = true;
2863 else if (new_direct_edge)
2865 new_direct_edge->indirect_inlining_edge = 1;
2866 if (new_direct_edge->call_stmt)
2867 new_direct_edge->call_stmt_cannot_inline_p
2868 = !gimple_check_call_matching_types (
2869 new_direct_edge->call_stmt,
2870 new_direct_edge->callee->decl, false);
2871 if (new_edges)
2873 new_edges->safe_push (new_direct_edge);
2874 res = true;
2876 top = IPA_EDGE_REF (cs);
2878 else if (jfunc->type == IPA_JF_PASS_THROUGH
2879 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2881 if (ici->agg_contents
2882 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2883 ici->param_index = -1;
2884 else
2885 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2887 else if (jfunc->type == IPA_JF_ANCESTOR)
2889 if (ici->agg_contents
2890 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2891 ici->param_index = -1;
2892 else
2894 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2895 if (ipa_get_jf_ancestor_offset (jfunc))
2896 ici->outer_type = NULL;
2897 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2900 else
2901 /* Either we can find a destination for this edge now or never. */
2902 ici->param_index = -1;
2905 return res;
2908 /* Recursively traverse subtree of NODE (including node) made of inlined
2909 cgraph_edges when CS has been inlined and invoke
2910 update_indirect_edges_after_inlining on all nodes and
2911 update_jump_functions_after_inlining on all non-inlined edges that lead out
2912 of this subtree. Newly discovered indirect edges will be added to
2913 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2914 created. */
2916 static bool
2917 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2918 struct cgraph_node *node,
2919 vec<cgraph_edge_p> *new_edges)
2921 struct cgraph_edge *e;
2922 bool res;
2924 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2926 for (e = node->callees; e; e = e->next_callee)
2927 if (!e->inline_failed)
2928 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2929 else
2930 update_jump_functions_after_inlining (cs, e);
2931 for (e = node->indirect_calls; e; e = e->next_callee)
2932 update_jump_functions_after_inlining (cs, e);
2934 return res;
2937 /* Combine two controlled uses counts as done during inlining. */
2939 static int
2940 combine_controlled_uses_counters (int c, int d)
2942 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2943 return IPA_UNDESCRIBED_USE;
2944 else
2945 return c + d - 1;
2948 /* Propagate number of controlled users from CS->caleee to the new root of the
2949 tree of inlined nodes. */
2951 static void
2952 propagate_controlled_uses (struct cgraph_edge *cs)
2954 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2955 struct cgraph_node *new_root = cs->caller->global.inlined_to
2956 ? cs->caller->global.inlined_to : cs->caller;
2957 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2958 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2959 int count, i;
2961 count = MIN (ipa_get_cs_argument_count (args),
2962 ipa_get_param_count (old_root_info));
2963 for (i = 0; i < count; i++)
2965 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2966 struct ipa_cst_ref_desc *rdesc;
2968 if (jf->type == IPA_JF_PASS_THROUGH)
2970 int src_idx, c, d;
2971 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2972 c = ipa_get_controlled_uses (new_root_info, src_idx);
2973 d = ipa_get_controlled_uses (old_root_info, i);
2975 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2976 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2977 c = combine_controlled_uses_counters (c, d);
2978 ipa_set_controlled_uses (new_root_info, src_idx, c);
2979 if (c == 0 && new_root_info->ipcp_orig_node)
2981 struct cgraph_node *n;
2982 struct ipa_ref *ref;
2983 tree t = new_root_info->known_vals[src_idx];
2985 if (t && TREE_CODE (t) == ADDR_EXPR
2986 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2987 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2988 && (ref = ipa_find_reference (new_root,
2989 n, NULL, 0)))
2991 if (dump_file)
2992 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2993 "reference from %s/%i to %s/%i.\n",
2994 xstrdup (new_root->name ()),
2995 new_root->order,
2996 xstrdup (n->name ()), n->order);
2997 ipa_remove_reference (ref);
3001 else if (jf->type == IPA_JF_CONST
3002 && (rdesc = jfunc_rdesc_usable (jf)))
3004 int d = ipa_get_controlled_uses (old_root_info, i);
3005 int c = rdesc->refcount;
3006 rdesc->refcount = combine_controlled_uses_counters (c, d);
3007 if (rdesc->refcount == 0)
3009 tree cst = ipa_get_jf_constant (jf);
3010 struct cgraph_node *n;
3011 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3012 && TREE_CODE (TREE_OPERAND (cst, 0))
3013 == FUNCTION_DECL);
3014 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3015 if (n)
3017 struct cgraph_node *clone;
3018 bool ok;
3019 ok = remove_described_reference (n, rdesc);
3020 gcc_checking_assert (ok);
3022 clone = cs->caller;
3023 while (clone->global.inlined_to
3024 && clone != rdesc->cs->caller
3025 && IPA_NODE_REF (clone)->ipcp_orig_node)
3027 struct ipa_ref *ref;
3028 ref = ipa_find_reference (clone,
3029 n, NULL, 0);
3030 if (ref)
3032 if (dump_file)
3033 fprintf (dump_file, "ipa-prop: Removing "
3034 "cloning-created reference "
3035 "from %s/%i to %s/%i.\n",
3036 xstrdup (clone->name ()),
3037 clone->order,
3038 xstrdup (n->name ()),
3039 n->order);
3040 ipa_remove_reference (ref);
3042 clone = clone->callers->caller;
3049 for (i = ipa_get_param_count (old_root_info);
3050 i < ipa_get_cs_argument_count (args);
3051 i++)
3053 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3055 if (jf->type == IPA_JF_CONST)
3057 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3058 if (rdesc)
3059 rdesc->refcount = IPA_UNDESCRIBED_USE;
3061 else if (jf->type == IPA_JF_PASS_THROUGH)
3062 ipa_set_controlled_uses (new_root_info,
3063 jf->value.pass_through.formal_id,
3064 IPA_UNDESCRIBED_USE);
3068 /* Update jump functions and call note functions on inlining the call site CS.
3069 CS is expected to lead to a node already cloned by
3070 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3071 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3072 created. */
3074 bool
3075 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3076 vec<cgraph_edge_p> *new_edges)
3078 bool changed;
3079 /* Do nothing if the preparation phase has not been carried out yet
3080 (i.e. during early inlining). */
3081 if (!ipa_node_params_vector.exists ())
3082 return false;
3083 gcc_assert (ipa_edge_args_vector);
3085 propagate_controlled_uses (cs);
3086 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3088 return changed;
3091 /* Frees all dynamically allocated structures that the argument info points
3092 to. */
3094 void
3095 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3097 vec_free (args->jump_functions);
3098 memset (args, 0, sizeof (*args));
3101 /* Free all ipa_edge structures. */
3103 void
3104 ipa_free_all_edge_args (void)
3106 int i;
3107 struct ipa_edge_args *args;
3109 if (!ipa_edge_args_vector)
3110 return;
3112 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3113 ipa_free_edge_args_substructures (args);
3115 vec_free (ipa_edge_args_vector);
3118 /* Frees all dynamically allocated structures that the param info points
3119 to. */
3121 void
3122 ipa_free_node_params_substructures (struct ipa_node_params *info)
3124 info->descriptors.release ();
3125 free (info->lattices);
3126 /* Lattice values and their sources are deallocated with their alocation
3127 pool. */
3128 info->known_vals.release ();
3129 memset (info, 0, sizeof (*info));
3132 /* Free all ipa_node_params structures. */
3134 void
3135 ipa_free_all_node_params (void)
3137 int i;
3138 struct ipa_node_params *info;
3140 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3141 ipa_free_node_params_substructures (info);
3143 ipa_node_params_vector.release ();
3146 /* Set the aggregate replacements of NODE to be AGGVALS. */
3148 void
3149 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3150 struct ipa_agg_replacement_value *aggvals)
3152 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3153 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3155 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3158 /* Hook that is called by cgraph.c when an edge is removed. */
3160 static void
3161 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3163 struct ipa_edge_args *args;
3165 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3166 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3167 return;
3169 args = IPA_EDGE_REF (cs);
3170 if (args->jump_functions)
3172 struct ipa_jump_func *jf;
3173 int i;
3174 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3176 struct ipa_cst_ref_desc *rdesc;
3177 try_decrement_rdesc_refcount (jf);
3178 if (jf->type == IPA_JF_CONST
3179 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3180 && rdesc->cs == cs)
3181 rdesc->cs = NULL;
3185 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3188 /* Hook that is called by cgraph.c when a node is removed. */
3190 static void
3191 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3193 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3194 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3195 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3196 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3197 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3200 /* Hook that is called by cgraph.c when an edge is duplicated. */
3202 static void
3203 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3204 __attribute__((unused)) void *data)
3206 struct ipa_edge_args *old_args, *new_args;
3207 unsigned int i;
3209 ipa_check_create_edge_args ();
3211 old_args = IPA_EDGE_REF (src);
3212 new_args = IPA_EDGE_REF (dst);
3214 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3216 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3218 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3219 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3221 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3223 if (src_jf->type == IPA_JF_CONST)
3225 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3227 if (!src_rdesc)
3228 dst_jf->value.constant.rdesc = NULL;
3229 else if (src->caller == dst->caller)
3231 struct ipa_ref *ref;
3232 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3233 gcc_checking_assert (n);
3234 ref = ipa_find_reference (src->caller, n,
3235 src->call_stmt, src->lto_stmt_uid);
3236 gcc_checking_assert (ref);
3237 ipa_clone_ref (ref, dst->caller, ref->stmt);
3239 gcc_checking_assert (ipa_refdesc_pool);
3240 struct ipa_cst_ref_desc *dst_rdesc
3241 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3242 dst_rdesc->cs = dst;
3243 dst_rdesc->refcount = src_rdesc->refcount;
3244 dst_rdesc->next_duplicate = NULL;
3245 dst_jf->value.constant.rdesc = dst_rdesc;
3247 else if (src_rdesc->cs == src)
3249 struct ipa_cst_ref_desc *dst_rdesc;
3250 gcc_checking_assert (ipa_refdesc_pool);
3251 dst_rdesc
3252 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3253 dst_rdesc->cs = dst;
3254 dst_rdesc->refcount = src_rdesc->refcount;
3255 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3256 src_rdesc->next_duplicate = dst_rdesc;
3257 dst_jf->value.constant.rdesc = dst_rdesc;
3259 else
3261 struct ipa_cst_ref_desc *dst_rdesc;
3262 /* This can happen during inlining, when a JFUNC can refer to a
3263 reference taken in a function up in the tree of inline clones.
3264 We need to find the duplicate that refers to our tree of
3265 inline clones. */
3267 gcc_assert (dst->caller->global.inlined_to);
3268 for (dst_rdesc = src_rdesc->next_duplicate;
3269 dst_rdesc;
3270 dst_rdesc = dst_rdesc->next_duplicate)
3272 struct cgraph_node *top;
3273 top = dst_rdesc->cs->caller->global.inlined_to
3274 ? dst_rdesc->cs->caller->global.inlined_to
3275 : dst_rdesc->cs->caller;
3276 if (dst->caller->global.inlined_to == top)
3277 break;
3279 gcc_assert (dst_rdesc);
3280 dst_jf->value.constant.rdesc = dst_rdesc;
3286 /* Hook that is called by cgraph.c when a node is duplicated. */
3288 static void
3289 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3290 ATTRIBUTE_UNUSED void *data)
3292 struct ipa_node_params *old_info, *new_info;
3293 struct ipa_agg_replacement_value *old_av, *new_av;
3295 ipa_check_create_node_params ();
3296 old_info = IPA_NODE_REF (src);
3297 new_info = IPA_NODE_REF (dst);
3299 new_info->descriptors = old_info->descriptors.copy ();
3300 new_info->lattices = NULL;
3301 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3303 new_info->uses_analysis_done = old_info->uses_analysis_done;
3304 new_info->node_enqueued = old_info->node_enqueued;
3306 old_av = ipa_get_agg_replacements_for_node (src);
3307 if (!old_av)
3308 return;
3310 new_av = NULL;
3311 while (old_av)
3313 struct ipa_agg_replacement_value *v;
3315 v = ggc_alloc_ipa_agg_replacement_value ();
3316 memcpy (v, old_av, sizeof (*v));
3317 v->next = new_av;
3318 new_av = v;
3319 old_av = old_av->next;
3321 ipa_set_node_agg_value_chain (dst, new_av);
3325 /* Analyze newly added function into callgraph. */
3327 static void
3328 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3330 if (cgraph_function_with_gimple_body_p (node))
3331 ipa_analyze_node (node);
3334 /* Register our cgraph hooks if they are not already there. */
3336 void
3337 ipa_register_cgraph_hooks (void)
3339 if (!edge_removal_hook_holder)
3340 edge_removal_hook_holder =
3341 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3342 if (!node_removal_hook_holder)
3343 node_removal_hook_holder =
3344 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3345 if (!edge_duplication_hook_holder)
3346 edge_duplication_hook_holder =
3347 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3348 if (!node_duplication_hook_holder)
3349 node_duplication_hook_holder =
3350 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3351 function_insertion_hook_holder =
3352 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3355 /* Unregister our cgraph hooks if they are not already there. */
3357 static void
3358 ipa_unregister_cgraph_hooks (void)
3360 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3361 edge_removal_hook_holder = NULL;
3362 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3363 node_removal_hook_holder = NULL;
3364 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3365 edge_duplication_hook_holder = NULL;
3366 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3367 node_duplication_hook_holder = NULL;
3368 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3369 function_insertion_hook_holder = NULL;
3372 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3373 longer needed after ipa-cp. */
3375 void
3376 ipa_free_all_structures_after_ipa_cp (void)
3378 if (!optimize)
3380 ipa_free_all_edge_args ();
3381 ipa_free_all_node_params ();
3382 free_alloc_pool (ipcp_sources_pool);
3383 free_alloc_pool (ipcp_values_pool);
3384 free_alloc_pool (ipcp_agg_lattice_pool);
3385 ipa_unregister_cgraph_hooks ();
3386 if (ipa_refdesc_pool)
3387 free_alloc_pool (ipa_refdesc_pool);
3391 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3392 longer needed after indirect inlining. */
3394 void
3395 ipa_free_all_structures_after_iinln (void)
3397 ipa_free_all_edge_args ();
3398 ipa_free_all_node_params ();
3399 ipa_unregister_cgraph_hooks ();
3400 if (ipcp_sources_pool)
3401 free_alloc_pool (ipcp_sources_pool);
3402 if (ipcp_values_pool)
3403 free_alloc_pool (ipcp_values_pool);
3404 if (ipcp_agg_lattice_pool)
3405 free_alloc_pool (ipcp_agg_lattice_pool);
3406 if (ipa_refdesc_pool)
3407 free_alloc_pool (ipa_refdesc_pool);
3410 /* Print ipa_tree_map data structures of all functions in the
3411 callgraph to F. */
3413 void
3414 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3416 int i, count;
3417 struct ipa_node_params *info;
3419 if (!node->definition)
3420 return;
3421 info = IPA_NODE_REF (node);
3422 fprintf (f, " function %s/%i parameter descriptors:\n",
3423 node->name (), node->order);
3424 count = ipa_get_param_count (info);
3425 for (i = 0; i < count; i++)
3427 int c;
3429 fprintf (f, " ");
3430 ipa_dump_param (f, info, i);
3431 if (ipa_is_param_used (info, i))
3432 fprintf (f, " used");
3433 c = ipa_get_controlled_uses (info, i);
3434 if (c == IPA_UNDESCRIBED_USE)
3435 fprintf (f, " undescribed_use");
3436 else
3437 fprintf (f, " controlled_uses=%i", c);
3438 fprintf (f, "\n");
3442 /* Print ipa_tree_map data structures of all functions in the
3443 callgraph to F. */
3445 void
3446 ipa_print_all_params (FILE * f)
3448 struct cgraph_node *node;
3450 fprintf (f, "\nFunction parameters:\n");
3451 FOR_EACH_FUNCTION (node)
3452 ipa_print_node_params (f, node);
3455 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3457 vec<tree>
3458 ipa_get_vector_of_formal_parms (tree fndecl)
3460 vec<tree> args;
3461 int count;
3462 tree parm;
3464 gcc_assert (!flag_wpa);
3465 count = count_formal_params (fndecl);
3466 args.create (count);
3467 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3468 args.quick_push (parm);
3470 return args;
3473 /* Return a heap allocated vector containing types of formal parameters of
3474 function type FNTYPE. */
3476 vec<tree>
3477 ipa_get_vector_of_formal_parm_types (tree fntype)
3479 vec<tree> types;
3480 int count = 0;
3481 tree t;
3483 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3484 count++;
3486 types.create (count);
3487 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3488 types.quick_push (TREE_VALUE (t));
3490 return types;
3493 /* Modify the function declaration FNDECL and its type according to the plan in
3494 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3495 to reflect the actual parameters being modified which are determined by the
3496 base_index field. */
3498 void
3499 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3501 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3502 tree orig_type = TREE_TYPE (fndecl);
3503 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3505 /* The following test is an ugly hack, some functions simply don't have any
3506 arguments in their type. This is probably a bug but well... */
3507 bool care_for_types = (old_arg_types != NULL_TREE);
3508 bool last_parm_void;
3509 vec<tree> otypes;
3510 if (care_for_types)
3512 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3513 == void_type_node);
3514 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3515 if (last_parm_void)
3516 gcc_assert (oparms.length () + 1 == otypes.length ());
3517 else
3518 gcc_assert (oparms.length () == otypes.length ());
3520 else
3522 last_parm_void = false;
3523 otypes.create (0);
3526 int len = adjustments.length ();
3527 tree *link = &DECL_ARGUMENTS (fndecl);
3528 tree new_arg_types = NULL;
3529 for (int i = 0; i < len; i++)
3531 struct ipa_parm_adjustment *adj;
3532 gcc_assert (link);
3534 adj = &adjustments[i];
3535 tree parm;
3536 if (adj->op == IPA_PARM_OP_NEW)
3537 parm = NULL;
3538 else
3539 parm = oparms[adj->base_index];
3540 adj->base = parm;
3542 if (adj->op == IPA_PARM_OP_COPY)
3544 if (care_for_types)
3545 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3546 new_arg_types);
3547 *link = parm;
3548 link = &DECL_CHAIN (parm);
3550 else if (adj->op != IPA_PARM_OP_REMOVE)
3552 tree new_parm;
3553 tree ptype;
3555 if (adj->by_ref)
3556 ptype = build_pointer_type (adj->type);
3557 else
3559 ptype = adj->type;
3560 if (is_gimple_reg_type (ptype))
3562 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3563 if (TYPE_ALIGN (ptype) < malign)
3564 ptype = build_aligned_type (ptype, malign);
3568 if (care_for_types)
3569 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3571 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3572 ptype);
3573 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3574 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3575 DECL_ARTIFICIAL (new_parm) = 1;
3576 DECL_ARG_TYPE (new_parm) = ptype;
3577 DECL_CONTEXT (new_parm) = fndecl;
3578 TREE_USED (new_parm) = 1;
3579 DECL_IGNORED_P (new_parm) = 1;
3580 layout_decl (new_parm, 0);
3582 if (adj->op == IPA_PARM_OP_NEW)
3583 adj->base = NULL;
3584 else
3585 adj->base = parm;
3586 adj->new_decl = new_parm;
3588 *link = new_parm;
3589 link = &DECL_CHAIN (new_parm);
3593 *link = NULL_TREE;
3595 tree new_reversed = NULL;
3596 if (care_for_types)
3598 new_reversed = nreverse (new_arg_types);
3599 if (last_parm_void)
3601 if (new_reversed)
3602 TREE_CHAIN (new_arg_types) = void_list_node;
3603 else
3604 new_reversed = void_list_node;
3608 /* Use copy_node to preserve as much as possible from original type
3609 (debug info, attribute lists etc.)
3610 Exception is METHOD_TYPEs must have THIS argument.
3611 When we are asked to remove it, we need to build new FUNCTION_TYPE
3612 instead. */
3613 tree new_type = NULL;
3614 if (TREE_CODE (orig_type) != METHOD_TYPE
3615 || (adjustments[0].op == IPA_PARM_OP_COPY
3616 && adjustments[0].base_index == 0))
3618 new_type = build_distinct_type_copy (orig_type);
3619 TYPE_ARG_TYPES (new_type) = new_reversed;
3621 else
3623 new_type
3624 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3625 new_reversed));
3626 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3627 DECL_VINDEX (fndecl) = NULL_TREE;
3630 /* When signature changes, we need to clear builtin info. */
3631 if (DECL_BUILT_IN (fndecl))
3633 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3634 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3637 /* This is a new type, not a copy of an old type. Need to reassociate
3638 variants. We can handle everything except the main variant lazily. */
3639 tree t = TYPE_MAIN_VARIANT (orig_type);
3640 if (orig_type != t)
3642 TYPE_MAIN_VARIANT (new_type) = t;
3643 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3644 TYPE_NEXT_VARIANT (t) = new_type;
3646 else
3648 TYPE_MAIN_VARIANT (new_type) = new_type;
3649 TYPE_NEXT_VARIANT (new_type) = NULL;
3652 TREE_TYPE (fndecl) = new_type;
3653 DECL_VIRTUAL_P (fndecl) = 0;
3654 otypes.release ();
3655 oparms.release ();
3658 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3659 If this is a directly recursive call, CS must be NULL. Otherwise it must
3660 contain the corresponding call graph edge. */
3662 void
3663 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3664 ipa_parm_adjustment_vec adjustments)
3666 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3667 vec<tree> vargs;
3668 vec<tree, va_gc> **debug_args = NULL;
3669 gimple new_stmt;
3670 gimple_stmt_iterator gsi, prev_gsi;
3671 tree callee_decl;
3672 int i, len;
3674 len = adjustments.length ();
3675 vargs.create (len);
3676 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3677 ipa_remove_stmt_references (current_node, stmt);
3679 gsi = gsi_for_stmt (stmt);
3680 prev_gsi = gsi;
3681 gsi_prev (&prev_gsi);
3682 for (i = 0; i < len; i++)
3684 struct ipa_parm_adjustment *adj;
3686 adj = &adjustments[i];
3688 if (adj->op == IPA_PARM_OP_COPY)
3690 tree arg = gimple_call_arg (stmt, adj->base_index);
3692 vargs.quick_push (arg);
3694 else if (adj->op != IPA_PARM_OP_REMOVE)
3696 tree expr, base, off;
3697 location_t loc;
3698 unsigned int deref_align = 0;
3699 bool deref_base = false;
3701 /* We create a new parameter out of the value of the old one, we can
3702 do the following kind of transformations:
3704 - A scalar passed by reference is converted to a scalar passed by
3705 value. (adj->by_ref is false and the type of the original
3706 actual argument is a pointer to a scalar).
3708 - A part of an aggregate is passed instead of the whole aggregate.
3709 The part can be passed either by value or by reference, this is
3710 determined by value of adj->by_ref. Moreover, the code below
3711 handles both situations when the original aggregate is passed by
3712 value (its type is not a pointer) and when it is passed by
3713 reference (it is a pointer to an aggregate).
3715 When the new argument is passed by reference (adj->by_ref is true)
3716 it must be a part of an aggregate and therefore we form it by
3717 simply taking the address of a reference inside the original
3718 aggregate. */
3720 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3721 base = gimple_call_arg (stmt, adj->base_index);
3722 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3723 : EXPR_LOCATION (base);
3725 if (TREE_CODE (base) != ADDR_EXPR
3726 && POINTER_TYPE_P (TREE_TYPE (base)))
3727 off = build_int_cst (adj->alias_ptr_type,
3728 adj->offset / BITS_PER_UNIT);
3729 else
3731 HOST_WIDE_INT base_offset;
3732 tree prev_base;
3733 bool addrof;
3735 if (TREE_CODE (base) == ADDR_EXPR)
3737 base = TREE_OPERAND (base, 0);
3738 addrof = true;
3740 else
3741 addrof = false;
3742 prev_base = base;
3743 base = get_addr_base_and_unit_offset (base, &base_offset);
3744 /* Aggregate arguments can have non-invariant addresses. */
3745 if (!base)
3747 base = build_fold_addr_expr (prev_base);
3748 off = build_int_cst (adj->alias_ptr_type,
3749 adj->offset / BITS_PER_UNIT);
3751 else if (TREE_CODE (base) == MEM_REF)
3753 if (!addrof)
3755 deref_base = true;
3756 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3758 off = build_int_cst (adj->alias_ptr_type,
3759 base_offset
3760 + adj->offset / BITS_PER_UNIT);
3761 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3762 off);
3763 base = TREE_OPERAND (base, 0);
3765 else
3767 off = build_int_cst (adj->alias_ptr_type,
3768 base_offset
3769 + adj->offset / BITS_PER_UNIT);
3770 base = build_fold_addr_expr (base);
3774 if (!adj->by_ref)
3776 tree type = adj->type;
3777 unsigned int align;
3778 unsigned HOST_WIDE_INT misalign;
3780 if (deref_base)
3782 align = deref_align;
3783 misalign = 0;
3785 else
3787 get_pointer_alignment_1 (base, &align, &misalign);
3788 if (TYPE_ALIGN (type) > align)
3789 align = TYPE_ALIGN (type);
3791 misalign += (tree_to_double_int (off)
3792 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3793 * BITS_PER_UNIT);
3794 misalign = misalign & (align - 1);
3795 if (misalign != 0)
3796 align = (misalign & -misalign);
3797 if (align < TYPE_ALIGN (type))
3798 type = build_aligned_type (type, align);
3799 base = force_gimple_operand_gsi (&gsi, base,
3800 true, NULL, true, GSI_SAME_STMT);
3801 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3802 /* If expr is not a valid gimple call argument emit
3803 a load into a temporary. */
3804 if (is_gimple_reg_type (TREE_TYPE (expr)))
3806 gimple tem = gimple_build_assign (NULL_TREE, expr);
3807 if (gimple_in_ssa_p (cfun))
3809 gimple_set_vuse (tem, gimple_vuse (stmt));
3810 expr = make_ssa_name (TREE_TYPE (expr), tem);
3812 else
3813 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
3814 gimple_assign_set_lhs (tem, expr);
3815 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
3818 else
3820 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3821 expr = build_fold_addr_expr (expr);
3822 expr = force_gimple_operand_gsi (&gsi, expr,
3823 true, NULL, true, GSI_SAME_STMT);
3825 vargs.quick_push (expr);
3827 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
3829 unsigned int ix;
3830 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3831 gimple def_temp;
3833 arg = gimple_call_arg (stmt, adj->base_index);
3834 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3836 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3837 continue;
3838 arg = fold_convert_loc (gimple_location (stmt),
3839 TREE_TYPE (origin), arg);
3841 if (debug_args == NULL)
3842 debug_args = decl_debug_args_insert (callee_decl);
3843 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3844 if (ddecl == origin)
3846 ddecl = (**debug_args)[ix + 1];
3847 break;
3849 if (ddecl == NULL)
3851 ddecl = make_node (DEBUG_EXPR_DECL);
3852 DECL_ARTIFICIAL (ddecl) = 1;
3853 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3854 DECL_MODE (ddecl) = DECL_MODE (origin);
3856 vec_safe_push (*debug_args, origin);
3857 vec_safe_push (*debug_args, ddecl);
3859 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3860 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3864 if (dump_file && (dump_flags & TDF_DETAILS))
3866 fprintf (dump_file, "replacing stmt:");
3867 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3870 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3871 vargs.release ();
3872 if (gimple_call_lhs (stmt))
3873 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3875 gimple_set_block (new_stmt, gimple_block (stmt));
3876 if (gimple_has_location (stmt))
3877 gimple_set_location (new_stmt, gimple_location (stmt));
3878 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3879 gimple_call_copy_flags (new_stmt, stmt);
3880 if (gimple_in_ssa_p (cfun))
3882 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3883 if (gimple_vdef (stmt))
3885 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3886 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
3890 if (dump_file && (dump_flags & TDF_DETAILS))
3892 fprintf (dump_file, "with stmt:");
3893 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3894 fprintf (dump_file, "\n");
3896 gsi_replace (&gsi, new_stmt, true);
3897 if (cs)
3898 cgraph_set_call_stmt (cs, new_stmt);
3901 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3902 gsi_prev (&gsi);
3904 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3905 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3908 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3909 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3910 specifies whether the function should care about type incompatibility the
3911 current and new expressions. If it is false, the function will leave
3912 incompatibility issues to the caller. Return true iff the expression
3913 was modified. */
3915 bool
3916 ipa_modify_expr (tree *expr, bool convert,
3917 ipa_parm_adjustment_vec adjustments)
3919 struct ipa_parm_adjustment *cand
3920 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
3921 if (!cand)
3922 return false;
3924 tree src;
3925 if (cand->by_ref)
3926 src = build_simple_mem_ref (cand->new_decl);
3927 else
3928 src = cand->new_decl;
3930 if (dump_file && (dump_flags & TDF_DETAILS))
3932 fprintf (dump_file, "About to replace expr ");
3933 print_generic_expr (dump_file, *expr, 0);
3934 fprintf (dump_file, " with ");
3935 print_generic_expr (dump_file, src, 0);
3936 fprintf (dump_file, "\n");
3939 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3941 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3942 *expr = vce;
3944 else
3945 *expr = src;
3946 return true;
3949 /* If T is an SSA_NAME, return NULL if it is not a default def or
3950 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3951 the base variable is always returned, regardless if it is a default
3952 def. Return T if it is not an SSA_NAME. */
3954 static tree
3955 get_ssa_base_param (tree t, bool ignore_default_def)
3957 if (TREE_CODE (t) == SSA_NAME)
3959 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
3960 return SSA_NAME_VAR (t);
3961 else
3962 return NULL_TREE;
3964 return t;
3967 /* Given an expression, return an adjustment entry specifying the
3968 transformation to be done on EXPR. If no suitable adjustment entry
3969 was found, returns NULL.
3971 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3972 default def, otherwise bail on them.
3974 If CONVERT is non-NULL, this function will set *CONVERT if the
3975 expression provided is a component reference. ADJUSTMENTS is the
3976 adjustments vector. */
3978 ipa_parm_adjustment *
3979 ipa_get_adjustment_candidate (tree **expr, bool *convert,
3980 ipa_parm_adjustment_vec adjustments,
3981 bool ignore_default_def)
3983 if (TREE_CODE (**expr) == BIT_FIELD_REF
3984 || TREE_CODE (**expr) == IMAGPART_EXPR
3985 || TREE_CODE (**expr) == REALPART_EXPR)
3987 *expr = &TREE_OPERAND (**expr, 0);
3988 if (convert)
3989 *convert = true;
3992 HOST_WIDE_INT offset, size, max_size;
3993 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
3994 if (!base || size == -1 || max_size == -1)
3995 return NULL;
3997 if (TREE_CODE (base) == MEM_REF)
3999 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4000 base = TREE_OPERAND (base, 0);
4003 base = get_ssa_base_param (base, ignore_default_def);
4004 if (!base || TREE_CODE (base) != PARM_DECL)
4005 return NULL;
4007 struct ipa_parm_adjustment *cand = NULL;
4008 unsigned int len = adjustments.length ();
4009 for (unsigned i = 0; i < len; i++)
4011 struct ipa_parm_adjustment *adj = &adjustments[i];
4013 if (adj->base == base
4014 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4016 cand = adj;
4017 break;
4021 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4022 return NULL;
4023 return cand;
4026 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4028 static bool
4029 index_in_adjustments_multiple_times_p (int base_index,
4030 ipa_parm_adjustment_vec adjustments)
4032 int i, len = adjustments.length ();
4033 bool one = false;
4035 for (i = 0; i < len; i++)
4037 struct ipa_parm_adjustment *adj;
4038 adj = &adjustments[i];
4040 if (adj->base_index == base_index)
4042 if (one)
4043 return true;
4044 else
4045 one = true;
4048 return false;
4052 /* Return adjustments that should have the same effect on function parameters
4053 and call arguments as if they were first changed according to adjustments in
4054 INNER and then by adjustments in OUTER. */
4056 ipa_parm_adjustment_vec
4057 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4058 ipa_parm_adjustment_vec outer)
4060 int i, outlen = outer.length ();
4061 int inlen = inner.length ();
4062 int removals = 0;
4063 ipa_parm_adjustment_vec adjustments, tmp;
4065 tmp.create (inlen);
4066 for (i = 0; i < inlen; i++)
4068 struct ipa_parm_adjustment *n;
4069 n = &inner[i];
4071 if (n->op == IPA_PARM_OP_REMOVE)
4072 removals++;
4073 else
4075 /* FIXME: Handling of new arguments are not implemented yet. */
4076 gcc_assert (n->op != IPA_PARM_OP_NEW);
4077 tmp.quick_push (*n);
4081 adjustments.create (outlen + removals);
4082 for (i = 0; i < outlen; i++)
4084 struct ipa_parm_adjustment r;
4085 struct ipa_parm_adjustment *out = &outer[i];
4086 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4088 memset (&r, 0, sizeof (r));
4089 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4090 if (out->op == IPA_PARM_OP_REMOVE)
4092 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4094 r.op = IPA_PARM_OP_REMOVE;
4095 adjustments.quick_push (r);
4097 continue;
4099 else
4101 /* FIXME: Handling of new arguments are not implemented yet. */
4102 gcc_assert (out->op != IPA_PARM_OP_NEW);
4105 r.base_index = in->base_index;
4106 r.type = out->type;
4108 /* FIXME: Create nonlocal value too. */
4110 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4111 r.op = IPA_PARM_OP_COPY;
4112 else if (in->op == IPA_PARM_OP_COPY)
4113 r.offset = out->offset;
4114 else if (out->op == IPA_PARM_OP_COPY)
4115 r.offset = in->offset;
4116 else
4117 r.offset = in->offset + out->offset;
4118 adjustments.quick_push (r);
4121 for (i = 0; i < inlen; i++)
4123 struct ipa_parm_adjustment *n = &inner[i];
4125 if (n->op == IPA_PARM_OP_REMOVE)
4126 adjustments.quick_push (*n);
4129 tmp.release ();
4130 return adjustments;
4133 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4134 friendly way, assuming they are meant to be applied to FNDECL. */
4136 void
4137 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4138 tree fndecl)
4140 int i, len = adjustments.length ();
4141 bool first = true;
4142 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4144 fprintf (file, "IPA param adjustments: ");
4145 for (i = 0; i < len; i++)
4147 struct ipa_parm_adjustment *adj;
4148 adj = &adjustments[i];
4150 if (!first)
4151 fprintf (file, " ");
4152 else
4153 first = false;
4155 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4156 print_generic_expr (file, parms[adj->base_index], 0);
4157 if (adj->base)
4159 fprintf (file, ", base: ");
4160 print_generic_expr (file, adj->base, 0);
4162 if (adj->new_decl)
4164 fprintf (file, ", new_decl: ");
4165 print_generic_expr (file, adj->new_decl, 0);
4167 if (adj->new_ssa_base)
4169 fprintf (file, ", new_ssa_base: ");
4170 print_generic_expr (file, adj->new_ssa_base, 0);
4173 if (adj->op == IPA_PARM_OP_COPY)
4174 fprintf (file, ", copy_param");
4175 else if (adj->op == IPA_PARM_OP_REMOVE)
4176 fprintf (file, ", remove_param");
4177 else
4178 fprintf (file, ", offset %li", (long) adj->offset);
4179 if (adj->by_ref)
4180 fprintf (file, ", by_ref");
4181 print_node_brief (file, ", type: ", adj->type, 0);
4182 fprintf (file, "\n");
4184 parms.release ();
4187 /* Dump the AV linked list. */
4189 void
4190 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4192 bool comma = false;
4193 fprintf (f, " Aggregate replacements:");
4194 for (; av; av = av->next)
4196 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4197 av->index, av->offset);
4198 print_generic_expr (f, av->value, 0);
4199 comma = true;
4201 fprintf (f, "\n");
4204 /* Stream out jump function JUMP_FUNC to OB. */
4206 static void
4207 ipa_write_jump_function (struct output_block *ob,
4208 struct ipa_jump_func *jump_func)
4210 struct ipa_agg_jf_item *item;
4211 struct bitpack_d bp;
4212 int i, count;
4214 streamer_write_uhwi (ob, jump_func->type);
4215 switch (jump_func->type)
4217 case IPA_JF_UNKNOWN:
4218 break;
4219 case IPA_JF_KNOWN_TYPE:
4220 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4221 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4222 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4223 break;
4224 case IPA_JF_CONST:
4225 gcc_assert (
4226 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4227 stream_write_tree (ob, jump_func->value.constant.value, true);
4228 break;
4229 case IPA_JF_PASS_THROUGH:
4230 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4231 if (jump_func->value.pass_through.operation == NOP_EXPR)
4233 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4234 bp = bitpack_create (ob->main_stream);
4235 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4236 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4237 streamer_write_bitpack (&bp);
4239 else
4241 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4242 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4244 break;
4245 case IPA_JF_ANCESTOR:
4246 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4247 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4248 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4249 bp = bitpack_create (ob->main_stream);
4250 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4251 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4252 streamer_write_bitpack (&bp);
4253 break;
4256 count = vec_safe_length (jump_func->agg.items);
4257 streamer_write_uhwi (ob, count);
4258 if (count)
4260 bp = bitpack_create (ob->main_stream);
4261 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4262 streamer_write_bitpack (&bp);
4265 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4267 streamer_write_uhwi (ob, item->offset);
4268 stream_write_tree (ob, item->value, true);
4272 /* Read in jump function JUMP_FUNC from IB. */
4274 static void
4275 ipa_read_jump_function (struct lto_input_block *ib,
4276 struct ipa_jump_func *jump_func,
4277 struct cgraph_edge *cs,
4278 struct data_in *data_in)
4280 enum jump_func_type jftype;
4281 enum tree_code operation;
4282 int i, count;
4284 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4285 switch (jftype)
4287 case IPA_JF_UNKNOWN:
4288 jump_func->type = IPA_JF_UNKNOWN;
4289 break;
4290 case IPA_JF_KNOWN_TYPE:
4292 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4293 tree base_type = stream_read_tree (ib, data_in);
4294 tree component_type = stream_read_tree (ib, data_in);
4296 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4297 break;
4299 case IPA_JF_CONST:
4300 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4301 break;
4302 case IPA_JF_PASS_THROUGH:
4303 operation = (enum tree_code) streamer_read_uhwi (ib);
4304 if (operation == NOP_EXPR)
4306 int formal_id = streamer_read_uhwi (ib);
4307 struct bitpack_d bp = streamer_read_bitpack (ib);
4308 bool agg_preserved = bp_unpack_value (&bp, 1);
4309 bool type_preserved = bp_unpack_value (&bp, 1);
4310 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4311 type_preserved);
4313 else
4315 tree operand = stream_read_tree (ib, data_in);
4316 int formal_id = streamer_read_uhwi (ib);
4317 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4318 operation);
4320 break;
4321 case IPA_JF_ANCESTOR:
4323 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4324 tree type = stream_read_tree (ib, data_in);
4325 int formal_id = streamer_read_uhwi (ib);
4326 struct bitpack_d bp = streamer_read_bitpack (ib);
4327 bool agg_preserved = bp_unpack_value (&bp, 1);
4328 bool type_preserved = bp_unpack_value (&bp, 1);
4330 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4331 type_preserved);
4332 break;
4336 count = streamer_read_uhwi (ib);
4337 vec_alloc (jump_func->agg.items, count);
4338 if (count)
4340 struct bitpack_d bp = streamer_read_bitpack (ib);
4341 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4343 for (i = 0; i < count; i++)
4345 struct ipa_agg_jf_item item;
4346 item.offset = streamer_read_uhwi (ib);
4347 item.value = stream_read_tree (ib, data_in);
4348 jump_func->agg.items->quick_push (item);
4352 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4353 relevant to indirect inlining to OB. */
4355 static void
4356 ipa_write_indirect_edge_info (struct output_block *ob,
4357 struct cgraph_edge *cs)
4359 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4360 struct bitpack_d bp;
4362 streamer_write_hwi (ob, ii->param_index);
4363 streamer_write_hwi (ob, ii->offset);
4364 bp = bitpack_create (ob->main_stream);
4365 bp_pack_value (&bp, ii->polymorphic, 1);
4366 bp_pack_value (&bp, ii->agg_contents, 1);
4367 bp_pack_value (&bp, ii->member_ptr, 1);
4368 bp_pack_value (&bp, ii->by_ref, 1);
4369 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4370 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4371 streamer_write_bitpack (&bp);
4373 if (ii->polymorphic)
4375 streamer_write_hwi (ob, ii->otr_token);
4376 stream_write_tree (ob, ii->otr_type, true);
4377 stream_write_tree (ob, ii->outer_type, true);
4381 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4382 relevant to indirect inlining from IB. */
4384 static void
4385 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4386 struct data_in *data_in ATTRIBUTE_UNUSED,
4387 struct cgraph_edge *cs)
4389 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4390 struct bitpack_d bp;
4392 ii->param_index = (int) streamer_read_hwi (ib);
4393 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4394 bp = streamer_read_bitpack (ib);
4395 ii->polymorphic = bp_unpack_value (&bp, 1);
4396 ii->agg_contents = bp_unpack_value (&bp, 1);
4397 ii->member_ptr = bp_unpack_value (&bp, 1);
4398 ii->by_ref = bp_unpack_value (&bp, 1);
4399 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4400 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4401 if (ii->polymorphic)
4403 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4404 ii->otr_type = stream_read_tree (ib, data_in);
4405 ii->outer_type = stream_read_tree (ib, data_in);
4409 /* Stream out NODE info to OB. */
4411 static void
4412 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4414 int node_ref;
4415 lto_symtab_encoder_t encoder;
4416 struct ipa_node_params *info = IPA_NODE_REF (node);
4417 int j;
4418 struct cgraph_edge *e;
4419 struct bitpack_d bp;
4421 encoder = ob->decl_state->symtab_node_encoder;
4422 node_ref = lto_symtab_encoder_encode (encoder, node);
4423 streamer_write_uhwi (ob, node_ref);
4425 streamer_write_uhwi (ob, ipa_get_param_count (info));
4426 for (j = 0; j < ipa_get_param_count (info); j++)
4427 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4428 bp = bitpack_create (ob->main_stream);
4429 gcc_assert (info->uses_analysis_done
4430 || ipa_get_param_count (info) == 0);
4431 gcc_assert (!info->node_enqueued);
4432 gcc_assert (!info->ipcp_orig_node);
4433 for (j = 0; j < ipa_get_param_count (info); j++)
4434 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4435 streamer_write_bitpack (&bp);
4436 for (j = 0; j < ipa_get_param_count (info); j++)
4437 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4438 for (e = node->callees; e; e = e->next_callee)
4440 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4442 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4443 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4444 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4446 for (e = node->indirect_calls; e; e = e->next_callee)
4448 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4450 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4451 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4452 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4453 ipa_write_indirect_edge_info (ob, e);
4457 /* Stream in NODE info from IB. */
4459 static void
4460 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4461 struct data_in *data_in)
4463 struct ipa_node_params *info = IPA_NODE_REF (node);
4464 int k;
4465 struct cgraph_edge *e;
4466 struct bitpack_d bp;
4468 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4470 for (k = 0; k < ipa_get_param_count (info); k++)
4471 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4473 bp = streamer_read_bitpack (ib);
4474 if (ipa_get_param_count (info) != 0)
4475 info->uses_analysis_done = true;
4476 info->node_enqueued = false;
4477 for (k = 0; k < ipa_get_param_count (info); k++)
4478 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4479 for (k = 0; k < ipa_get_param_count (info); k++)
4480 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4481 for (e = node->callees; e; e = e->next_callee)
4483 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4484 int count = streamer_read_uhwi (ib);
4486 if (!count)
4487 continue;
4488 vec_safe_grow_cleared (args->jump_functions, count);
4490 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4491 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4492 data_in);
4494 for (e = node->indirect_calls; e; e = e->next_callee)
4496 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4497 int count = streamer_read_uhwi (ib);
4499 if (count)
4501 vec_safe_grow_cleared (args->jump_functions, count);
4502 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4503 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4504 data_in);
4506 ipa_read_indirect_edge_info (ib, data_in, e);
4510 /* Write jump functions for nodes in SET. */
4512 void
4513 ipa_prop_write_jump_functions (void)
4515 struct cgraph_node *node;
4516 struct output_block *ob;
4517 unsigned int count = 0;
4518 lto_symtab_encoder_iterator lsei;
4519 lto_symtab_encoder_t encoder;
4522 if (!ipa_node_params_vector.exists ())
4523 return;
4525 ob = create_output_block (LTO_section_jump_functions);
4526 encoder = ob->decl_state->symtab_node_encoder;
4527 ob->cgraph_node = NULL;
4528 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4529 lsei_next_function_in_partition (&lsei))
4531 node = lsei_cgraph_node (lsei);
4532 if (cgraph_function_with_gimple_body_p (node)
4533 && IPA_NODE_REF (node) != NULL)
4534 count++;
4537 streamer_write_uhwi (ob, count);
4539 /* Process all of the functions. */
4540 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4541 lsei_next_function_in_partition (&lsei))
4543 node = lsei_cgraph_node (lsei);
4544 if (cgraph_function_with_gimple_body_p (node)
4545 && IPA_NODE_REF (node) != NULL)
4546 ipa_write_node_info (ob, node);
4548 streamer_write_char_stream (ob->main_stream, 0);
4549 produce_asm (ob, NULL);
4550 destroy_output_block (ob);
4553 /* Read section in file FILE_DATA of length LEN with data DATA. */
4555 static void
4556 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4557 size_t len)
4559 const struct lto_function_header *header =
4560 (const struct lto_function_header *) data;
4561 const int cfg_offset = sizeof (struct lto_function_header);
4562 const int main_offset = cfg_offset + header->cfg_size;
4563 const int string_offset = main_offset + header->main_size;
4564 struct data_in *data_in;
4565 struct lto_input_block ib_main;
4566 unsigned int i;
4567 unsigned int count;
4569 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4570 header->main_size);
4572 data_in =
4573 lto_data_in_create (file_data, (const char *) data + string_offset,
4574 header->string_size, vNULL);
4575 count = streamer_read_uhwi (&ib_main);
4577 for (i = 0; i < count; i++)
4579 unsigned int index;
4580 struct cgraph_node *node;
4581 lto_symtab_encoder_t encoder;
4583 index = streamer_read_uhwi (&ib_main);
4584 encoder = file_data->symtab_node_encoder;
4585 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4586 gcc_assert (node->definition);
4587 ipa_read_node_info (&ib_main, node, data_in);
4589 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4590 len);
4591 lto_data_in_delete (data_in);
4594 /* Read ipcp jump functions. */
4596 void
4597 ipa_prop_read_jump_functions (void)
4599 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4600 struct lto_file_decl_data *file_data;
4601 unsigned int j = 0;
4603 ipa_check_create_node_params ();
4604 ipa_check_create_edge_args ();
4605 ipa_register_cgraph_hooks ();
4607 while ((file_data = file_data_vec[j++]))
4609 size_t len;
4610 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4612 if (data)
4613 ipa_prop_read_section (file_data, data, len);
4617 /* After merging units, we can get mismatch in argument counts.
4618 Also decl merging might've rendered parameter lists obsolete.
4619 Also compute called_with_variable_arg info. */
4621 void
4622 ipa_update_after_lto_read (void)
4624 ipa_check_create_node_params ();
4625 ipa_check_create_edge_args ();
4628 void
4629 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4631 int node_ref;
4632 unsigned int count = 0;
4633 lto_symtab_encoder_t encoder;
4634 struct ipa_agg_replacement_value *aggvals, *av;
4636 aggvals = ipa_get_agg_replacements_for_node (node);
4637 encoder = ob->decl_state->symtab_node_encoder;
4638 node_ref = lto_symtab_encoder_encode (encoder, node);
4639 streamer_write_uhwi (ob, node_ref);
4641 for (av = aggvals; av; av = av->next)
4642 count++;
4643 streamer_write_uhwi (ob, count);
4645 for (av = aggvals; av; av = av->next)
4647 struct bitpack_d bp;
4649 streamer_write_uhwi (ob, av->offset);
4650 streamer_write_uhwi (ob, av->index);
4651 stream_write_tree (ob, av->value, true);
4653 bp = bitpack_create (ob->main_stream);
4654 bp_pack_value (&bp, av->by_ref, 1);
4655 streamer_write_bitpack (&bp);
4659 /* Stream in the aggregate value replacement chain for NODE from IB. */
4661 static void
4662 read_agg_replacement_chain (struct lto_input_block *ib,
4663 struct cgraph_node *node,
4664 struct data_in *data_in)
4666 struct ipa_agg_replacement_value *aggvals = NULL;
4667 unsigned int count, i;
4669 count = streamer_read_uhwi (ib);
4670 for (i = 0; i <count; i++)
4672 struct ipa_agg_replacement_value *av;
4673 struct bitpack_d bp;
4675 av = ggc_alloc_ipa_agg_replacement_value ();
4676 av->offset = streamer_read_uhwi (ib);
4677 av->index = streamer_read_uhwi (ib);
4678 av->value = stream_read_tree (ib, data_in);
4679 bp = streamer_read_bitpack (ib);
4680 av->by_ref = bp_unpack_value (&bp, 1);
4681 av->next = aggvals;
4682 aggvals = av;
4684 ipa_set_node_agg_value_chain (node, aggvals);
4687 /* Write all aggregate replacement for nodes in set. */
4689 void
4690 ipa_prop_write_all_agg_replacement (void)
4692 struct cgraph_node *node;
4693 struct output_block *ob;
4694 unsigned int count = 0;
4695 lto_symtab_encoder_iterator lsei;
4696 lto_symtab_encoder_t encoder;
4698 if (!ipa_node_agg_replacements)
4699 return;
4701 ob = create_output_block (LTO_section_ipcp_transform);
4702 encoder = ob->decl_state->symtab_node_encoder;
4703 ob->cgraph_node = NULL;
4704 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4705 lsei_next_function_in_partition (&lsei))
4707 node = lsei_cgraph_node (lsei);
4708 if (cgraph_function_with_gimple_body_p (node)
4709 && ipa_get_agg_replacements_for_node (node) != NULL)
4710 count++;
4713 streamer_write_uhwi (ob, count);
4715 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4716 lsei_next_function_in_partition (&lsei))
4718 node = lsei_cgraph_node (lsei);
4719 if (cgraph_function_with_gimple_body_p (node)
4720 && ipa_get_agg_replacements_for_node (node) != NULL)
4721 write_agg_replacement_chain (ob, node);
4723 streamer_write_char_stream (ob->main_stream, 0);
4724 produce_asm (ob, NULL);
4725 destroy_output_block (ob);
4728 /* Read replacements section in file FILE_DATA of length LEN with data
4729 DATA. */
4731 static void
4732 read_replacements_section (struct lto_file_decl_data *file_data,
4733 const char *data,
4734 size_t len)
4736 const struct lto_function_header *header =
4737 (const struct lto_function_header *) data;
4738 const int cfg_offset = sizeof (struct lto_function_header);
4739 const int main_offset = cfg_offset + header->cfg_size;
4740 const int string_offset = main_offset + header->main_size;
4741 struct data_in *data_in;
4742 struct lto_input_block ib_main;
4743 unsigned int i;
4744 unsigned int count;
4746 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4747 header->main_size);
4749 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4750 header->string_size, vNULL);
4751 count = streamer_read_uhwi (&ib_main);
4753 for (i = 0; i < count; i++)
4755 unsigned int index;
4756 struct cgraph_node *node;
4757 lto_symtab_encoder_t encoder;
4759 index = streamer_read_uhwi (&ib_main);
4760 encoder = file_data->symtab_node_encoder;
4761 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4762 gcc_assert (node->definition);
4763 read_agg_replacement_chain (&ib_main, node, data_in);
4765 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4766 len);
4767 lto_data_in_delete (data_in);
4770 /* Read IPA-CP aggregate replacements. */
4772 void
4773 ipa_prop_read_all_agg_replacement (void)
4775 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4776 struct lto_file_decl_data *file_data;
4777 unsigned int j = 0;
4779 while ((file_data = file_data_vec[j++]))
4781 size_t len;
4782 const char *data = lto_get_section_data (file_data,
4783 LTO_section_ipcp_transform,
4784 NULL, &len);
4785 if (data)
4786 read_replacements_section (file_data, data, len);
4790 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4791 NODE. */
4793 static void
4794 adjust_agg_replacement_values (struct cgraph_node *node,
4795 struct ipa_agg_replacement_value *aggval)
4797 struct ipa_agg_replacement_value *v;
4798 int i, c = 0, d = 0, *adj;
4800 if (!node->clone.combined_args_to_skip)
4801 return;
4803 for (v = aggval; v; v = v->next)
4805 gcc_assert (v->index >= 0);
4806 if (c < v->index)
4807 c = v->index;
4809 c++;
4811 adj = XALLOCAVEC (int, c);
4812 for (i = 0; i < c; i++)
4813 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4815 adj[i] = -1;
4816 d++;
4818 else
4819 adj[i] = i - d;
4821 for (v = aggval; v; v = v->next)
4822 v->index = adj[v->index];
4826 /* Function body transformation phase. */
4828 unsigned int
4829 ipcp_transform_function (struct cgraph_node *node)
4831 vec<ipa_param_descriptor> descriptors = vNULL;
4832 struct param_analysis_info *parms_ainfo;
4833 struct ipa_agg_replacement_value *aggval;
4834 gimple_stmt_iterator gsi;
4835 basic_block bb;
4836 int param_count;
4837 bool cfg_changed = false, something_changed = false;
4839 gcc_checking_assert (cfun);
4840 gcc_checking_assert (current_function_decl);
4842 if (dump_file)
4843 fprintf (dump_file, "Modification phase of node %s/%i\n",
4844 node->name (), node->order);
4846 aggval = ipa_get_agg_replacements_for_node (node);
4847 if (!aggval)
4848 return 0;
4849 param_count = count_formal_params (node->decl);
4850 if (param_count == 0)
4851 return 0;
4852 adjust_agg_replacement_values (node, aggval);
4853 if (dump_file)
4854 ipa_dump_agg_replacement_values (dump_file, aggval);
4855 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4856 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4857 descriptors.safe_grow_cleared (param_count);
4858 ipa_populate_param_decls (node, descriptors);
4860 FOR_EACH_BB_FN (bb, cfun)
4861 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4863 struct ipa_agg_replacement_value *v;
4864 gimple stmt = gsi_stmt (gsi);
4865 tree rhs, val, t;
4866 HOST_WIDE_INT offset, size;
4867 int index;
4868 bool by_ref, vce;
4870 if (!gimple_assign_load_p (stmt))
4871 continue;
4872 rhs = gimple_assign_rhs1 (stmt);
4873 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4874 continue;
4876 vce = false;
4877 t = rhs;
4878 while (handled_component_p (t))
4880 /* V_C_E can do things like convert an array of integers to one
4881 bigger integer and similar things we do not handle below. */
4882 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4884 vce = true;
4885 break;
4887 t = TREE_OPERAND (t, 0);
4889 if (vce)
4890 continue;
4892 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4893 rhs, &index, &offset, &size, &by_ref))
4894 continue;
4895 for (v = aggval; v; v = v->next)
4896 if (v->index == index
4897 && v->offset == offset)
4898 break;
4899 if (!v
4900 || v->by_ref != by_ref
4901 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4902 continue;
4904 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4905 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4907 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4908 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4909 else if (TYPE_SIZE (TREE_TYPE (rhs))
4910 == TYPE_SIZE (TREE_TYPE (v->value)))
4911 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4912 else
4914 if (dump_file)
4916 fprintf (dump_file, " const ");
4917 print_generic_expr (dump_file, v->value, 0);
4918 fprintf (dump_file, " can't be converted to type of ");
4919 print_generic_expr (dump_file, rhs, 0);
4920 fprintf (dump_file, "\n");
4922 continue;
4925 else
4926 val = v->value;
4928 if (dump_file && (dump_flags & TDF_DETAILS))
4930 fprintf (dump_file, "Modifying stmt:\n ");
4931 print_gimple_stmt (dump_file, stmt, 0, 0);
4933 gimple_assign_set_rhs_from_tree (&gsi, val);
4934 update_stmt (stmt);
4936 if (dump_file && (dump_flags & TDF_DETAILS))
4938 fprintf (dump_file, "into:\n ");
4939 print_gimple_stmt (dump_file, stmt, 0, 0);
4940 fprintf (dump_file, "\n");
4943 something_changed = true;
4944 if (maybe_clean_eh_stmt (stmt)
4945 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4946 cfg_changed = true;
4949 (*ipa_node_agg_replacements)[node->uid] = NULL;
4950 free_parms_ainfo (parms_ainfo, param_count);
4951 descriptors.release ();
4953 if (!something_changed)
4954 return 0;
4955 else if (cfg_changed)
4956 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4957 else
4958 return TODO_update_ssa_only_virtuals;