2013-06-18 Andreas Krebbel <Andreas.Krebbel@de.ibm.com>
[official-gcc.git] / gcc / ipa-prop.c
bloba65adbbe331ca6d0ad8585c4d26edc8bf821a341
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "langhooks.h"
25 #include "ggc.h"
26 #include "target.h"
27 #include "cgraph.h"
28 #include "ipa-prop.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
45 struct param_analysis_info
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
51 /* Vector where the parameter infos are actually stored. */
52 vec<ipa_node_params_t> ipa_node_params_vector;
53 /* Vector of known aggregate values in cloned nodes. */
54 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
55 /* Vector where the parameter infos are actually stored. */
56 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60 static struct cgraph_node_hook_list *node_removal_hook_holder;
61 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
63 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65 /* Description of a reference to an IPA constant. */
66 struct ipa_cst_ref_desc
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge *cs;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc *next_duplicate;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
73 if out of control. */
74 int refcount;
77 /* Allocation pool for reference descriptions. */
79 static alloc_pool ipa_refdesc_pool;
81 /* Return index of the formal whose tree is PTREE in function which corresponds
82 to INFO. */
84 static int
85 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
87 int i, count;
89 count = descriptors.length ();
90 for (i = 0; i < count; i++)
91 if (descriptors[i].decl == ptree)
92 return i;
94 return -1;
97 /* Return index of the formal whose tree is PTREE in function which corresponds
98 to INFO. */
101 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
103 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
106 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
107 NODE. */
109 static void
110 ipa_populate_param_decls (struct cgraph_node *node,
111 vec<ipa_param_descriptor_t> &descriptors)
113 tree fndecl;
114 tree fnargs;
115 tree parm;
116 int param_num;
118 fndecl = node->symbol.decl;
119 fnargs = DECL_ARGUMENTS (fndecl);
120 param_num = 0;
121 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
123 descriptors[param_num].decl = parm;
124 param_num++;
128 /* Return how many formal parameters FNDECL has. */
130 static inline int
131 count_formal_params (tree fndecl)
133 tree parm;
134 int count = 0;
136 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
137 count++;
139 return count;
142 /* Initialize the ipa_node_params structure associated with NODE by counting
143 the function parameters, creating the descriptors and populating their
144 param_decls. */
146 void
147 ipa_initialize_node_params (struct cgraph_node *node)
149 struct ipa_node_params *info = IPA_NODE_REF (node);
151 if (!info->descriptors.exists ())
153 int param_count;
155 param_count = count_formal_params (node->symbol.decl);
156 if (param_count)
158 info->descriptors.safe_grow_cleared (param_count);
159 ipa_populate_param_decls (node, info->descriptors);
164 /* Print the jump functions associated with call graph edge CS to file F. */
166 static void
167 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
169 int i, count;
171 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
172 for (i = 0; i < count; i++)
174 struct ipa_jump_func *jump_func;
175 enum jump_func_type type;
177 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
178 type = jump_func->type;
180 fprintf (f, " param %d: ", i);
181 if (type == IPA_JF_UNKNOWN)
182 fprintf (f, "UNKNOWN\n");
183 else if (type == IPA_JF_KNOWN_TYPE)
185 fprintf (f, "KNOWN TYPE: base ");
186 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
187 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
188 jump_func->value.known_type.offset);
189 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
190 fprintf (f, "\n");
192 else if (type == IPA_JF_CONST)
194 tree val = jump_func->value.constant.value;
195 fprintf (f, "CONST: ");
196 print_generic_expr (f, val, 0);
197 if (TREE_CODE (val) == ADDR_EXPR
198 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
200 fprintf (f, " -> ");
201 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
204 fprintf (f, "\n");
206 else if (type == IPA_JF_PASS_THROUGH)
208 fprintf (f, "PASS THROUGH: ");
209 fprintf (f, "%d, op %s",
210 jump_func->value.pass_through.formal_id,
211 tree_code_name[(int)
212 jump_func->value.pass_through.operation]);
213 if (jump_func->value.pass_through.operation != NOP_EXPR)
215 fprintf (f, " ");
216 print_generic_expr (f,
217 jump_func->value.pass_through.operand, 0);
219 if (jump_func->value.pass_through.agg_preserved)
220 fprintf (f, ", agg_preserved");
221 fprintf (f, "\n");
223 else if (type == IPA_JF_ANCESTOR)
225 fprintf (f, "ANCESTOR: ");
226 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
227 jump_func->value.ancestor.formal_id,
228 jump_func->value.ancestor.offset);
229 print_generic_expr (f, jump_func->value.ancestor.type, 0);
230 if (jump_func->value.ancestor.agg_preserved)
231 fprintf (f, ", agg_preserved");
232 fprintf (f, "\n");
235 if (jump_func->agg.items)
237 struct ipa_agg_jf_item *item;
238 int j;
240 fprintf (f, " Aggregate passed by %s:\n",
241 jump_func->agg.by_ref ? "reference" : "value");
242 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
244 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
245 item->offset);
246 if (TYPE_P (item->value))
247 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
248 tree_low_cst (TYPE_SIZE (item->value), 1));
249 else
251 fprintf (f, "cst: ");
252 print_generic_expr (f, item->value, 0);
254 fprintf (f, "\n");
261 /* Print the jump functions of all arguments on all call graph edges going from
262 NODE to file F. */
264 void
265 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
267 struct cgraph_edge *cs;
269 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
270 node->symbol.order);
271 for (cs = node->callees; cs; cs = cs->next_callee)
273 if (!ipa_edge_args_info_available_for_edge_p (cs))
274 continue;
276 fprintf (f, " callsite %s/%i -> %s/%i : \n",
277 xstrdup (cgraph_node_name (node)), node->symbol.order,
278 xstrdup (cgraph_node_name (cs->callee)),
279 cs->callee->symbol.order);
280 ipa_print_node_jump_functions_for_edge (f, cs);
283 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
285 struct cgraph_indirect_call_info *ii;
286 if (!ipa_edge_args_info_available_for_edge_p (cs))
287 continue;
289 ii = cs->indirect_info;
290 if (ii->agg_contents)
291 fprintf (f, " indirect aggregate callsite, calling param %i, "
292 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
293 ii->param_index, ii->offset,
294 ii->by_ref ? "by reference" : "by_value");
295 else
296 fprintf (f, " indirect %s callsite, calling param %i",
297 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
299 if (cs->call_stmt)
301 fprintf (f, ", for stmt ");
302 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
304 else
305 fprintf (f, "\n");
306 ipa_print_node_jump_functions_for_edge (f, cs);
310 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
312 void
313 ipa_print_all_jump_functions (FILE *f)
315 struct cgraph_node *node;
317 fprintf (f, "\nJump functions:\n");
318 FOR_EACH_FUNCTION (node)
320 ipa_print_node_jump_functions (f, node);
324 /* Set JFUNC to be a known type jump function. */
326 static void
327 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
328 tree base_type, tree component_type)
330 jfunc->type = IPA_JF_KNOWN_TYPE;
331 jfunc->value.known_type.offset = offset,
332 jfunc->value.known_type.base_type = base_type;
333 jfunc->value.known_type.component_type = component_type;
336 /* Set JFUNC to be a constant jmp function. */
338 static void
339 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
340 struct cgraph_edge *cs)
342 constant = unshare_expr (constant);
343 if (constant && EXPR_P (constant))
344 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
345 jfunc->type = IPA_JF_CONST;
346 jfunc->value.constant.value = unshare_expr_without_location (constant);
348 if (TREE_CODE (constant) == ADDR_EXPR
349 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
351 struct ipa_cst_ref_desc *rdesc;
352 if (!ipa_refdesc_pool)
353 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
354 sizeof (struct ipa_cst_ref_desc), 32);
356 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
357 rdesc->cs = cs;
358 rdesc->next_duplicate = NULL;
359 rdesc->refcount = 1;
360 jfunc->value.constant.rdesc = rdesc;
362 else
363 jfunc->value.constant.rdesc = NULL;
366 /* Set JFUNC to be a simple pass-through jump function. */
367 static void
368 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
369 bool agg_preserved)
371 jfunc->type = IPA_JF_PASS_THROUGH;
372 jfunc->value.pass_through.operand = NULL_TREE;
373 jfunc->value.pass_through.formal_id = formal_id;
374 jfunc->value.pass_through.operation = NOP_EXPR;
375 jfunc->value.pass_through.agg_preserved = agg_preserved;
378 /* Set JFUNC to be an arithmetic pass through jump function. */
380 static void
381 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
382 tree operand, enum tree_code operation)
384 jfunc->type = IPA_JF_PASS_THROUGH;
385 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
386 jfunc->value.pass_through.formal_id = formal_id;
387 jfunc->value.pass_through.operation = operation;
388 jfunc->value.pass_through.agg_preserved = false;
391 /* Set JFUNC to be an ancestor jump function. */
393 static void
394 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
395 tree type, int formal_id, bool agg_preserved)
397 jfunc->type = IPA_JF_ANCESTOR;
398 jfunc->value.ancestor.formal_id = formal_id;
399 jfunc->value.ancestor.offset = offset;
400 jfunc->value.ancestor.type = type;
401 jfunc->value.ancestor.agg_preserved = agg_preserved;
404 /* Extract the acual BINFO being described by JFUNC which must be a known type
405 jump function. */
407 tree
408 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
410 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
411 if (!base_binfo)
412 return NULL_TREE;
413 return get_binfo_at_offset (base_binfo,
414 jfunc->value.known_type.offset,
415 jfunc->value.known_type.component_type);
418 /* Structure to be passed in between detect_type_change and
419 check_stmt_for_type_change. */
421 struct type_change_info
423 /* Offset into the object where there is the virtual method pointer we are
424 looking for. */
425 HOST_WIDE_INT offset;
426 /* The declaration or SSA_NAME pointer of the base that we are checking for
427 type change. */
428 tree object;
429 /* If we actually can tell the type that the object has changed to, it is
430 stored in this field. Otherwise it remains NULL_TREE. */
431 tree known_current_type;
432 /* Set to true if dynamic type change has been detected. */
433 bool type_maybe_changed;
434 /* Set to true if multiple types have been encountered. known_current_type
435 must be disregarded in that case. */
436 bool multiple_types_encountered;
439 /* Return true if STMT can modify a virtual method table pointer.
441 This function makes special assumptions about both constructors and
442 destructors which are all the functions that are allowed to alter the VMT
443 pointers. It assumes that destructors begin with assignment into all VMT
444 pointers and that constructors essentially look in the following way:
446 1) The very first thing they do is that they call constructors of ancestor
447 sub-objects that have them.
449 2) Then VMT pointers of this and all its ancestors is set to new values
450 corresponding to the type corresponding to the constructor.
452 3) Only afterwards, other stuff such as constructor of member sub-objects
453 and the code written by the user is run. Only this may include calling
454 virtual functions, directly or indirectly.
456 There is no way to call a constructor of an ancestor sub-object in any
457 other way.
459 This means that we do not have to care whether constructors get the correct
460 type information because they will always change it (in fact, if we define
461 the type to be given by the VMT pointer, it is undefined).
463 The most important fact to derive from the above is that if, for some
464 statement in the section 3, we try to detect whether the dynamic type has
465 changed, we can safely ignore all calls as we examine the function body
466 backwards until we reach statements in section 2 because these calls cannot
467 be ancestor constructors or destructors (if the input is not bogus) and so
468 do not change the dynamic type (this holds true only for automatically
469 allocated objects but at the moment we devirtualize only these). We then
470 must detect that statements in section 2 change the dynamic type and can try
471 to derive the new type. That is enough and we can stop, we will never see
472 the calls into constructors of sub-objects in this code. Therefore we can
473 safely ignore all call statements that we traverse.
476 static bool
477 stmt_may_be_vtbl_ptr_store (gimple stmt)
479 if (is_gimple_call (stmt))
480 return false;
481 else if (is_gimple_assign (stmt))
483 tree lhs = gimple_assign_lhs (stmt);
485 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
487 if (flag_strict_aliasing
488 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
489 return false;
491 if (TREE_CODE (lhs) == COMPONENT_REF
492 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
493 return false;
494 /* In the future we might want to use get_base_ref_and_offset to find
495 if there is a field corresponding to the offset and if so, proceed
496 almost like if it was a component ref. */
499 return true;
502 /* If STMT can be proved to be an assignment to the virtual method table
503 pointer of ANALYZED_OBJ and the type associated with the new table
504 identified, return the type. Otherwise return NULL_TREE. */
506 static tree
507 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
509 HOST_WIDE_INT offset, size, max_size;
510 tree lhs, rhs, base;
512 if (!gimple_assign_single_p (stmt))
513 return NULL_TREE;
515 lhs = gimple_assign_lhs (stmt);
516 rhs = gimple_assign_rhs1 (stmt);
517 if (TREE_CODE (lhs) != COMPONENT_REF
518 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
519 || TREE_CODE (rhs) != ADDR_EXPR)
520 return NULL_TREE;
521 rhs = get_base_address (TREE_OPERAND (rhs, 0));
522 if (!rhs
523 || TREE_CODE (rhs) != VAR_DECL
524 || !DECL_VIRTUAL_P (rhs))
525 return NULL_TREE;
527 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
528 if (offset != tci->offset
529 || size != POINTER_SIZE
530 || max_size != POINTER_SIZE)
531 return NULL_TREE;
532 if (TREE_CODE (base) == MEM_REF)
534 if (TREE_CODE (tci->object) != MEM_REF
535 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
536 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
537 TREE_OPERAND (base, 1)))
538 return NULL_TREE;
540 else if (tci->object != base)
541 return NULL_TREE;
543 return DECL_CONTEXT (rhs);
546 /* Callback of walk_aliased_vdefs and a helper function for
547 detect_type_change to check whether a particular statement may modify
548 the virtual table pointer, and if possible also determine the new type of
549 the (sub-)object. It stores its result into DATA, which points to a
550 type_change_info structure. */
552 static bool
553 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
555 gimple stmt = SSA_NAME_DEF_STMT (vdef);
556 struct type_change_info *tci = (struct type_change_info *) data;
558 if (stmt_may_be_vtbl_ptr_store (stmt))
560 tree type;
561 type = extr_type_from_vtbl_ptr_store (stmt, tci);
562 if (tci->type_maybe_changed
563 && type != tci->known_current_type)
564 tci->multiple_types_encountered = true;
565 tci->known_current_type = type;
566 tci->type_maybe_changed = true;
567 return true;
569 else
570 return false;
575 /* Like detect_type_change but with extra argument COMP_TYPE which will become
576 the component type part of new JFUNC of dynamic type change is detected and
577 the new base type is identified. */
579 static bool
580 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
581 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
583 struct type_change_info tci;
584 ao_ref ao;
586 gcc_checking_assert (DECL_P (arg)
587 || TREE_CODE (arg) == MEM_REF
588 || handled_component_p (arg));
589 /* Const calls cannot call virtual methods through VMT and so type changes do
590 not matter. */
591 if (!flag_devirtualize || !gimple_vuse (call))
592 return false;
594 ao_ref_init (&ao, arg);
595 ao.base = base;
596 ao.offset = offset;
597 ao.size = POINTER_SIZE;
598 ao.max_size = ao.size;
600 tci.offset = offset;
601 tci.object = get_base_address (arg);
602 tci.known_current_type = NULL_TREE;
603 tci.type_maybe_changed = false;
604 tci.multiple_types_encountered = false;
606 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
607 &tci, NULL);
608 if (!tci.type_maybe_changed)
609 return false;
611 if (!tci.known_current_type
612 || tci.multiple_types_encountered
613 || offset != 0)
614 jfunc->type = IPA_JF_UNKNOWN;
615 else
616 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
618 return true;
621 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
622 looking for assignments to its virtual table pointer. If it is, return true
623 and fill in the jump function JFUNC with relevant type information or set it
624 to unknown. ARG is the object itself (not a pointer to it, unless
625 dereferenced). BASE is the base of the memory access as returned by
626 get_ref_base_and_extent, as is the offset. */
628 static bool
629 detect_type_change (tree arg, tree base, gimple call,
630 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
632 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
635 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
636 SSA name (its dereference will become the base and the offset is assumed to
637 be zero). */
639 static bool
640 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
642 tree comp_type;
644 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
645 if (!flag_devirtualize
646 || !POINTER_TYPE_P (TREE_TYPE (arg))
647 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
648 return false;
650 comp_type = TREE_TYPE (TREE_TYPE (arg));
651 arg = build2 (MEM_REF, ptr_type_node, arg,
652 build_int_cst (ptr_type_node, 0));
654 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
657 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
658 boolean variable pointed to by DATA. */
660 static bool
661 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
662 void *data)
664 bool *b = (bool *) data;
665 *b = true;
666 return true;
669 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
670 a value known not to be modified in this function before reaching the
671 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
672 information about the parameter. */
674 static bool
675 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
676 gimple stmt, tree parm_load)
678 bool modified = false;
679 bitmap *visited_stmts;
680 ao_ref refd;
682 if (parm_ainfo && parm_ainfo->parm_modified)
683 return false;
685 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
686 ao_ref_init (&refd, parm_load);
687 /* We can cache visited statements only when parm_ainfo is available and when
688 we are looking at a naked load of the whole parameter. */
689 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
690 visited_stmts = NULL;
691 else
692 visited_stmts = &parm_ainfo->parm_visited_statements;
693 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
694 visited_stmts);
695 if (parm_ainfo && modified)
696 parm_ainfo->parm_modified = true;
697 return !modified;
700 /* If STMT is an assignment that loads a value from an parameter declaration,
701 return the index of the parameter in ipa_node_params which has not been
702 modified. Otherwise return -1. */
704 static int
705 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
706 struct param_analysis_info *parms_ainfo,
707 gimple stmt)
709 int index;
710 tree op1;
712 if (!gimple_assign_single_p (stmt))
713 return -1;
715 op1 = gimple_assign_rhs1 (stmt);
716 if (TREE_CODE (op1) != PARM_DECL)
717 return -1;
719 index = ipa_get_param_decl_index_1 (descriptors, op1);
720 if (index < 0
721 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
722 : NULL, stmt, op1))
723 return -1;
725 return index;
728 /* Return true if memory reference REF loads data that are known to be
729 unmodified in this function before reaching statement STMT. PARM_AINFO, if
730 non-NULL, is a pointer to a structure containing temporary information about
731 PARM. */
733 static bool
734 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
735 gimple stmt, tree ref)
737 bool modified = false;
738 ao_ref refd;
740 gcc_checking_assert (gimple_vuse (stmt));
741 if (parm_ainfo && parm_ainfo->ref_modified)
742 return false;
744 ao_ref_init (&refd, ref);
745 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
746 NULL);
747 if (parm_ainfo && modified)
748 parm_ainfo->ref_modified = true;
749 return !modified;
752 /* Return true if the data pointed to by PARM is known to be unmodified in this
753 function before reaching call statement CALL into which it is passed.
754 PARM_AINFO is a pointer to a structure containing temporary information
755 about PARM. */
757 static bool
758 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
759 gimple call, tree parm)
761 bool modified = false;
762 ao_ref refd;
764 /* It's unnecessary to calculate anything about memory contnets for a const
765 function because it is not goin to use it. But do not cache the result
766 either. Also, no such calculations for non-pointers. */
767 if (!gimple_vuse (call)
768 || !POINTER_TYPE_P (TREE_TYPE (parm)))
769 return false;
771 if (parm_ainfo->pt_modified)
772 return false;
774 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
775 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
776 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
777 if (modified)
778 parm_ainfo->pt_modified = true;
779 return !modified;
782 /* Return true if we can prove that OP is a memory reference loading unmodified
783 data from an aggregate passed as a parameter and if the aggregate is passed
784 by reference, that the alias type of the load corresponds to the type of the
785 formal parameter (so that we can rely on this type for TBAA in callers).
786 INFO and PARMS_AINFO describe parameters of the current function (but the
787 latter can be NULL), STMT is the load statement. If function returns true,
788 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
789 within the aggregate and whether it is a load from a value passed by
790 reference respectively. */
792 static bool
793 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
794 struct param_analysis_info *parms_ainfo, gimple stmt,
795 tree op, int *index_p, HOST_WIDE_INT *offset_p,
796 bool *by_ref_p)
798 int index;
799 HOST_WIDE_INT size, max_size;
800 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
802 if (max_size == -1 || max_size != size || *offset_p < 0)
803 return false;
805 if (DECL_P (base))
807 int index = ipa_get_param_decl_index_1 (descriptors, base);
808 if (index >= 0
809 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
810 : NULL, stmt, op))
812 *index_p = index;
813 *by_ref_p = false;
814 return true;
816 return false;
819 if (TREE_CODE (base) != MEM_REF
820 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
821 || !integer_zerop (TREE_OPERAND (base, 1)))
822 return false;
824 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
826 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
827 index = ipa_get_param_decl_index_1 (descriptors, parm);
829 else
831 /* This branch catches situations where a pointer parameter is not a
832 gimple register, for example:
834 void hip7(S*) (struct S * p)
836 void (*<T2e4>) (struct S *) D.1867;
837 struct S * p.1;
839 <bb 2>:
840 p.1_1 = p;
841 D.1867_2 = p.1_1->f;
842 D.1867_2 ();
843 gdp = &p;
846 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
847 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
850 if (index >= 0
851 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
852 stmt, op))
854 *index_p = index;
855 *by_ref_p = true;
856 return true;
858 return false;
861 /* Just like the previous function, just without the param_analysis_info
862 pointer, for users outside of this file. */
864 bool
865 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
866 tree op, int *index_p, HOST_WIDE_INT *offset_p,
867 bool *by_ref_p)
869 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
870 offset_p, by_ref_p);
873 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
874 of an assignment statement STMT, try to determine whether we are actually
875 handling any of the following cases and construct an appropriate jump
876 function into JFUNC if so:
878 1) The passed value is loaded from a formal parameter which is not a gimple
879 register (most probably because it is addressable, the value has to be
880 scalar) and we can guarantee the value has not changed. This case can
881 therefore be described by a simple pass-through jump function. For example:
883 foo (int a)
885 int a.0;
887 a.0_2 = a;
888 bar (a.0_2);
890 2) The passed value can be described by a simple arithmetic pass-through
891 jump function. E.g.
893 foo (int a)
895 int D.2064;
897 D.2064_4 = a.1(D) + 4;
898 bar (D.2064_4);
900 This case can also occur in combination of the previous one, e.g.:
902 foo (int a, int z)
904 int a.0;
905 int D.2064;
907 a.0_3 = a;
908 D.2064_4 = a.0_3 + 4;
909 foo (D.2064_4);
911 3) The passed value is an address of an object within another one (which
912 also passed by reference). Such situations are described by an ancestor
913 jump function and describe situations such as:
915 B::foo() (struct B * const this)
917 struct A * D.1845;
919 D.1845_2 = &this_1(D)->D.1748;
920 A::bar (D.1845_2);
922 INFO is the structure describing individual parameters access different
923 stages of IPA optimizations. PARMS_AINFO contains the information that is
924 only needed for intraprocedural analysis. */
926 static void
927 compute_complex_assign_jump_func (struct ipa_node_params *info,
928 struct param_analysis_info *parms_ainfo,
929 struct ipa_jump_func *jfunc,
930 gimple call, gimple stmt, tree name)
932 HOST_WIDE_INT offset, size, max_size;
933 tree op1, tc_ssa, base, ssa;
934 int index;
936 op1 = gimple_assign_rhs1 (stmt);
938 if (TREE_CODE (op1) == SSA_NAME)
940 if (SSA_NAME_IS_DEFAULT_DEF (op1))
941 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
942 else
943 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
944 SSA_NAME_DEF_STMT (op1));
945 tc_ssa = op1;
947 else
949 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
950 tc_ssa = gimple_assign_lhs (stmt);
953 if (index >= 0)
955 tree op2 = gimple_assign_rhs2 (stmt);
957 if (op2)
959 if (!is_gimple_ip_invariant (op2)
960 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
961 && !useless_type_conversion_p (TREE_TYPE (name),
962 TREE_TYPE (op1))))
963 return;
965 ipa_set_jf_arith_pass_through (jfunc, index, op2,
966 gimple_assign_rhs_code (stmt));
968 else if (gimple_assign_single_p (stmt)
969 && !detect_type_change_ssa (tc_ssa, call, jfunc))
971 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
972 call, tc_ssa);
973 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
975 return;
978 if (TREE_CODE (op1) != ADDR_EXPR)
979 return;
980 op1 = TREE_OPERAND (op1, 0);
981 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
982 return;
983 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
984 if (TREE_CODE (base) != MEM_REF
985 /* If this is a varying address, punt. */
986 || max_size == -1
987 || max_size != size)
988 return;
989 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
990 ssa = TREE_OPERAND (base, 0);
991 if (TREE_CODE (ssa) != SSA_NAME
992 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
993 || offset < 0)
994 return;
996 /* Dynamic types are changed only in constructors and destructors and */
997 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
998 if (index >= 0
999 && !detect_type_change (op1, base, call, jfunc, offset))
1000 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1001 parm_ref_data_pass_through_p (&parms_ainfo[index],
1002 call, ssa));
1005 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1006 it looks like:
1008 iftmp.1_3 = &obj_2(D)->D.1762;
1010 The base of the MEM_REF must be a default definition SSA NAME of a
1011 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1012 whole MEM_REF expression is returned and the offset calculated from any
1013 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1014 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1016 static tree
1017 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1019 HOST_WIDE_INT size, max_size;
1020 tree expr, parm, obj;
1022 if (!gimple_assign_single_p (assign))
1023 return NULL_TREE;
1024 expr = gimple_assign_rhs1 (assign);
1026 if (TREE_CODE (expr) != ADDR_EXPR)
1027 return NULL_TREE;
1028 expr = TREE_OPERAND (expr, 0);
1029 obj = expr;
1030 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1032 if (TREE_CODE (expr) != MEM_REF
1033 /* If this is a varying address, punt. */
1034 || max_size == -1
1035 || max_size != size
1036 || *offset < 0)
1037 return NULL_TREE;
1038 parm = TREE_OPERAND (expr, 0);
1039 if (TREE_CODE (parm) != SSA_NAME
1040 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1041 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1042 return NULL_TREE;
1044 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1045 *obj_p = obj;
1046 return expr;
1050 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1051 statement PHI, try to find out whether NAME is in fact a
1052 multiple-inheritance typecast from a descendant into an ancestor of a formal
1053 parameter and thus can be described by an ancestor jump function and if so,
1054 write the appropriate function into JFUNC.
1056 Essentially we want to match the following pattern:
1058 if (obj_2(D) != 0B)
1059 goto <bb 3>;
1060 else
1061 goto <bb 4>;
1063 <bb 3>:
1064 iftmp.1_3 = &obj_2(D)->D.1762;
1066 <bb 4>:
1067 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1068 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1069 return D.1879_6; */
1071 static void
1072 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1073 struct param_analysis_info *parms_ainfo,
1074 struct ipa_jump_func *jfunc,
1075 gimple call, gimple phi)
1077 HOST_WIDE_INT offset;
1078 gimple assign, cond;
1079 basic_block phi_bb, assign_bb, cond_bb;
1080 tree tmp, parm, expr, obj;
1081 int index, i;
1083 if (gimple_phi_num_args (phi) != 2)
1084 return;
1086 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1087 tmp = PHI_ARG_DEF (phi, 0);
1088 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1089 tmp = PHI_ARG_DEF (phi, 1);
1090 else
1091 return;
1092 if (TREE_CODE (tmp) != SSA_NAME
1093 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1094 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1095 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1096 return;
1098 assign = SSA_NAME_DEF_STMT (tmp);
1099 assign_bb = gimple_bb (assign);
1100 if (!single_pred_p (assign_bb))
1101 return;
1102 expr = get_ancestor_addr_info (assign, &obj, &offset);
1103 if (!expr)
1104 return;
1105 parm = TREE_OPERAND (expr, 0);
1106 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1107 gcc_assert (index >= 0);
1109 cond_bb = single_pred (assign_bb);
1110 cond = last_stmt (cond_bb);
1111 if (!cond
1112 || gimple_code (cond) != GIMPLE_COND
1113 || gimple_cond_code (cond) != NE_EXPR
1114 || gimple_cond_lhs (cond) != parm
1115 || !integer_zerop (gimple_cond_rhs (cond)))
1116 return;
1118 phi_bb = gimple_bb (phi);
1119 for (i = 0; i < 2; i++)
1121 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1122 if (pred != assign_bb && pred != cond_bb)
1123 return;
1126 if (!detect_type_change (obj, expr, call, jfunc, offset))
1127 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1128 parm_ref_data_pass_through_p (&parms_ainfo[index],
1129 call, parm));
1132 /* Given OP which is passed as an actual argument to a called function,
1133 determine if it is possible to construct a KNOWN_TYPE jump function for it
1134 and if so, create one and store it to JFUNC. */
1136 static void
1137 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1138 gimple call)
1140 HOST_WIDE_INT offset, size, max_size;
1141 tree base;
1143 if (!flag_devirtualize
1144 || TREE_CODE (op) != ADDR_EXPR
1145 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1146 return;
1148 op = TREE_OPERAND (op, 0);
1149 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1150 if (!DECL_P (base)
1151 || max_size == -1
1152 || max_size != size
1153 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1154 || is_global_var (base))
1155 return;
1157 if (!TYPE_BINFO (TREE_TYPE (base))
1158 || detect_type_change (op, base, call, jfunc, offset))
1159 return;
1161 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1164 /* Inspect the given TYPE and return true iff it has the same structure (the
1165 same number of fields of the same types) as a C++ member pointer. If
1166 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1167 corresponding fields there. */
1169 static bool
1170 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1172 tree fld;
1174 if (TREE_CODE (type) != RECORD_TYPE)
1175 return false;
1177 fld = TYPE_FIELDS (type);
1178 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1179 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1180 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1181 return false;
1183 if (method_ptr)
1184 *method_ptr = fld;
1186 fld = DECL_CHAIN (fld);
1187 if (!fld || INTEGRAL_TYPE_P (fld)
1188 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1189 return false;
1190 if (delta)
1191 *delta = fld;
1193 if (DECL_CHAIN (fld))
1194 return false;
1196 return true;
1199 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1200 return the rhs of its defining statement. Otherwise return RHS as it
1201 is. */
1203 static inline tree
1204 get_ssa_def_if_simple_copy (tree rhs)
1206 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1208 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1210 if (gimple_assign_single_p (def_stmt))
1211 rhs = gimple_assign_rhs1 (def_stmt);
1212 else
1213 break;
1215 return rhs;
1218 /* Simple linked list, describing known contents of an aggregate beforere
1219 call. */
1221 struct ipa_known_agg_contents_list
1223 /* Offset and size of the described part of the aggregate. */
1224 HOST_WIDE_INT offset, size;
1225 /* Known constant value or NULL if the contents is known to be unknown. */
1226 tree constant;
1227 /* Pointer to the next structure in the list. */
1228 struct ipa_known_agg_contents_list *next;
1231 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1232 in ARG is filled in with constant values. ARG can either be an aggregate
1233 expression or a pointer to an aggregate. JFUNC is the jump function into
1234 which the constants are subsequently stored. */
1236 static void
1237 determine_known_aggregate_parts (gimple call, tree arg,
1238 struct ipa_jump_func *jfunc)
1240 struct ipa_known_agg_contents_list *list = NULL;
1241 int item_count = 0, const_count = 0;
1242 HOST_WIDE_INT arg_offset, arg_size;
1243 gimple_stmt_iterator gsi;
1244 tree arg_base;
1245 bool check_ref, by_ref;
1246 ao_ref r;
1248 /* The function operates in three stages. First, we prepare check_ref, r,
1249 arg_base and arg_offset based on what is actually passed as an actual
1250 argument. */
1252 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1254 by_ref = true;
1255 if (TREE_CODE (arg) == SSA_NAME)
1257 tree type_size;
1258 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1259 return;
1260 check_ref = true;
1261 arg_base = arg;
1262 arg_offset = 0;
1263 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1264 arg_size = tree_low_cst (type_size, 1);
1265 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1267 else if (TREE_CODE (arg) == ADDR_EXPR)
1269 HOST_WIDE_INT arg_max_size;
1271 arg = TREE_OPERAND (arg, 0);
1272 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1273 &arg_max_size);
1274 if (arg_max_size == -1
1275 || arg_max_size != arg_size
1276 || arg_offset < 0)
1277 return;
1278 if (DECL_P (arg_base))
1280 tree size;
1281 check_ref = false;
1282 size = build_int_cst (integer_type_node, arg_size);
1283 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1285 else
1286 return;
1288 else
1289 return;
1291 else
1293 HOST_WIDE_INT arg_max_size;
1295 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1297 by_ref = false;
1298 check_ref = false;
1299 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1300 &arg_max_size);
1301 if (arg_max_size == -1
1302 || arg_max_size != arg_size
1303 || arg_offset < 0)
1304 return;
1306 ao_ref_init (&r, arg);
1309 /* Second stage walks back the BB, looks at individual statements and as long
1310 as it is confident of how the statements affect contents of the
1311 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1312 describing it. */
1313 gsi = gsi_for_stmt (call);
1314 gsi_prev (&gsi);
1315 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1317 struct ipa_known_agg_contents_list *n, **p;
1318 gimple stmt = gsi_stmt (gsi);
1319 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1320 tree lhs, rhs, lhs_base;
1321 bool partial_overlap;
1323 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1324 continue;
1325 if (!gimple_assign_single_p (stmt))
1326 break;
1328 lhs = gimple_assign_lhs (stmt);
1329 rhs = gimple_assign_rhs1 (stmt);
1330 if (!is_gimple_reg_type (rhs)
1331 || TREE_CODE (lhs) == BIT_FIELD_REF
1332 || contains_bitfld_component_ref_p (lhs))
1333 break;
1335 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1336 &lhs_max_size);
1337 if (lhs_max_size == -1
1338 || lhs_max_size != lhs_size
1339 || (lhs_offset < arg_offset
1340 && lhs_offset + lhs_size > arg_offset)
1341 || (lhs_offset < arg_offset + arg_size
1342 && lhs_offset + lhs_size > arg_offset + arg_size))
1343 break;
1345 if (check_ref)
1347 if (TREE_CODE (lhs_base) != MEM_REF
1348 || TREE_OPERAND (lhs_base, 0) != arg_base
1349 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1350 break;
1352 else if (lhs_base != arg_base)
1354 if (DECL_P (lhs_base))
1355 continue;
1356 else
1357 break;
1360 if (lhs_offset + lhs_size < arg_offset
1361 || lhs_offset >= (arg_offset + arg_size))
1362 continue;
1364 partial_overlap = false;
1365 p = &list;
1366 while (*p && (*p)->offset < lhs_offset)
1368 if ((*p)->offset + (*p)->size > lhs_offset)
1370 partial_overlap = true;
1371 break;
1373 p = &(*p)->next;
1375 if (partial_overlap)
1376 break;
1377 if (*p && (*p)->offset < lhs_offset + lhs_size)
1379 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1380 /* We already know this value is subsequently overwritten with
1381 something else. */
1382 continue;
1383 else
1384 /* Otherwise this is a partial overlap which we cannot
1385 represent. */
1386 break;
1389 rhs = get_ssa_def_if_simple_copy (rhs);
1390 n = XALLOCA (struct ipa_known_agg_contents_list);
1391 n->size = lhs_size;
1392 n->offset = lhs_offset;
1393 if (is_gimple_ip_invariant (rhs))
1395 n->constant = rhs;
1396 const_count++;
1398 else
1399 n->constant = NULL_TREE;
1400 n->next = *p;
1401 *p = n;
1403 item_count++;
1404 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1405 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1406 break;
1409 /* Third stage just goes over the list and creates an appropriate vector of
1410 ipa_agg_jf_item structures out of it, of sourse only if there are
1411 any known constants to begin with. */
1413 if (const_count)
1415 jfunc->agg.by_ref = by_ref;
1416 vec_alloc (jfunc->agg.items, const_count);
1417 while (list)
1419 if (list->constant)
1421 struct ipa_agg_jf_item item;
1422 item.offset = list->offset - arg_offset;
1423 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1424 item.value = unshare_expr_without_location (list->constant);
1425 jfunc->agg.items->quick_push (item);
1427 list = list->next;
1432 /* Compute jump function for all arguments of callsite CS and insert the
1433 information in the jump_functions array in the ipa_edge_args corresponding
1434 to this callsite. */
1436 static void
1437 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1438 struct cgraph_edge *cs)
1440 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1441 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1442 gimple call = cs->call_stmt;
1443 int n, arg_num = gimple_call_num_args (call);
1445 if (arg_num == 0 || args->jump_functions)
1446 return;
1447 vec_safe_grow_cleared (args->jump_functions, arg_num);
1449 for (n = 0; n < arg_num; n++)
1451 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1452 tree arg = gimple_call_arg (call, n);
1454 if (is_gimple_ip_invariant (arg))
1455 ipa_set_jf_constant (jfunc, arg, cs);
1456 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1457 && TREE_CODE (arg) == PARM_DECL)
1459 int index = ipa_get_param_decl_index (info, arg);
1461 gcc_assert (index >=0);
1462 /* Aggregate passed by value, check for pass-through, otherwise we
1463 will attempt to fill in aggregate contents later in this
1464 for cycle. */
1465 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1467 ipa_set_jf_simple_pass_through (jfunc, index, false);
1468 continue;
1471 else if (TREE_CODE (arg) == SSA_NAME)
1473 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1475 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1476 if (index >= 0
1477 && !detect_type_change_ssa (arg, call, jfunc))
1479 bool agg_p;
1480 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1481 call, arg);
1482 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1485 else
1487 gimple stmt = SSA_NAME_DEF_STMT (arg);
1488 if (is_gimple_assign (stmt))
1489 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1490 call, stmt, arg);
1491 else if (gimple_code (stmt) == GIMPLE_PHI)
1492 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1493 call, stmt);
1496 else
1497 compute_known_type_jump_func (arg, jfunc, call);
1499 if ((jfunc->type != IPA_JF_PASS_THROUGH
1500 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1501 && (jfunc->type != IPA_JF_ANCESTOR
1502 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1503 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1504 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1505 determine_known_aggregate_parts (call, arg, jfunc);
1509 /* Compute jump functions for all edges - both direct and indirect - outgoing
1510 from NODE. Also count the actual arguments in the process. */
1512 static void
1513 ipa_compute_jump_functions (struct cgraph_node *node,
1514 struct param_analysis_info *parms_ainfo)
1516 struct cgraph_edge *cs;
1518 for (cs = node->callees; cs; cs = cs->next_callee)
1520 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1521 NULL);
1522 /* We do not need to bother analyzing calls to unknown
1523 functions unless they may become known during lto/whopr. */
1524 if (!callee->symbol.definition && !flag_lto)
1525 continue;
1526 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1529 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1530 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1533 /* If STMT looks like a statement loading a value from a member pointer formal
1534 parameter, return that parameter and store the offset of the field to
1535 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1536 might be clobbered). If USE_DELTA, then we look for a use of the delta
1537 field rather than the pfn. */
1539 static tree
1540 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1541 HOST_WIDE_INT *offset_p)
1543 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1545 if (!gimple_assign_single_p (stmt))
1546 return NULL_TREE;
1548 rhs = gimple_assign_rhs1 (stmt);
1549 if (TREE_CODE (rhs) == COMPONENT_REF)
1551 ref_field = TREE_OPERAND (rhs, 1);
1552 rhs = TREE_OPERAND (rhs, 0);
1554 else
1555 ref_field = NULL_TREE;
1556 if (TREE_CODE (rhs) != MEM_REF)
1557 return NULL_TREE;
1558 rec = TREE_OPERAND (rhs, 0);
1559 if (TREE_CODE (rec) != ADDR_EXPR)
1560 return NULL_TREE;
1561 rec = TREE_OPERAND (rec, 0);
1562 if (TREE_CODE (rec) != PARM_DECL
1563 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1564 return NULL_TREE;
1565 ref_offset = TREE_OPERAND (rhs, 1);
1567 if (use_delta)
1568 fld = delta_field;
1569 else
1570 fld = ptr_field;
1571 if (offset_p)
1572 *offset_p = int_bit_position (fld);
1574 if (ref_field)
1576 if (integer_nonzerop (ref_offset))
1577 return NULL_TREE;
1578 return ref_field == fld ? rec : NULL_TREE;
1580 else
1581 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1582 : NULL_TREE;
1585 /* Returns true iff T is an SSA_NAME defined by a statement. */
1587 static bool
1588 ipa_is_ssa_with_stmt_def (tree t)
1590 if (TREE_CODE (t) == SSA_NAME
1591 && !SSA_NAME_IS_DEFAULT_DEF (t))
1592 return true;
1593 else
1594 return false;
1597 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1598 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1599 indirect call graph edge. */
1601 static struct cgraph_edge *
1602 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1604 struct cgraph_edge *cs;
1606 cs = cgraph_edge (node, stmt);
1607 cs->indirect_info->param_index = param_index;
1608 cs->indirect_info->offset = 0;
1609 cs->indirect_info->polymorphic = 0;
1610 cs->indirect_info->agg_contents = 0;
1611 return cs;
1614 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1615 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1616 intermediate information about each formal parameter. Currently it checks
1617 whether the call calls a pointer that is a formal parameter and if so, the
1618 parameter is marked with the called flag and an indirect call graph edge
1619 describing the call is created. This is very simple for ordinary pointers
1620 represented in SSA but not-so-nice when it comes to member pointers. The
1621 ugly part of this function does nothing more than trying to match the
1622 pattern of such a call. An example of such a pattern is the gimple dump
1623 below, the call is on the last line:
1625 <bb 2>:
1626 f$__delta_5 = f.__delta;
1627 f$__pfn_24 = f.__pfn;
1630 <bb 2>:
1631 f$__delta_5 = MEM[(struct *)&f];
1632 f$__pfn_24 = MEM[(struct *)&f + 4B];
1634 and a few lines below:
1636 <bb 5>
1637 D.2496_3 = (int) f$__pfn_24;
1638 D.2497_4 = D.2496_3 & 1;
1639 if (D.2497_4 != 0)
1640 goto <bb 3>;
1641 else
1642 goto <bb 4>;
1644 <bb 6>:
1645 D.2500_7 = (unsigned int) f$__delta_5;
1646 D.2501_8 = &S + D.2500_7;
1647 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1648 D.2503_10 = *D.2502_9;
1649 D.2504_12 = f$__pfn_24 + -1;
1650 D.2505_13 = (unsigned int) D.2504_12;
1651 D.2506_14 = D.2503_10 + D.2505_13;
1652 D.2507_15 = *D.2506_14;
1653 iftmp.11_16 = (String:: *) D.2507_15;
1655 <bb 7>:
1656 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1657 D.2500_19 = (unsigned int) f$__delta_5;
1658 D.2508_20 = &S + D.2500_19;
1659 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1661 Such patterns are results of simple calls to a member pointer:
1663 int doprinting (int (MyString::* f)(int) const)
1665 MyString S ("somestring");
1667 return (S.*f)(4);
1670 Moreover, the function also looks for called pointers loaded from aggregates
1671 passed by value or reference. */
1673 static void
1674 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1675 struct ipa_node_params *info,
1676 struct param_analysis_info *parms_ainfo,
1677 gimple call, tree target)
1679 gimple def;
1680 tree n1, n2;
1681 gimple d1, d2;
1682 tree rec, rec2, cond;
1683 gimple branch;
1684 int index;
1685 basic_block bb, virt_bb, join;
1686 HOST_WIDE_INT offset;
1687 bool by_ref;
1689 if (SSA_NAME_IS_DEFAULT_DEF (target))
1691 tree var = SSA_NAME_VAR (target);
1692 index = ipa_get_param_decl_index (info, var);
1693 if (index >= 0)
1694 ipa_note_param_call (node, index, call);
1695 return;
1698 def = SSA_NAME_DEF_STMT (target);
1699 if (gimple_assign_single_p (def)
1700 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1701 gimple_assign_rhs1 (def), &index, &offset,
1702 &by_ref))
1704 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1705 cs->indirect_info->offset = offset;
1706 cs->indirect_info->agg_contents = 1;
1707 cs->indirect_info->by_ref = by_ref;
1708 return;
1711 /* Now we need to try to match the complex pattern of calling a member
1712 pointer. */
1713 if (gimple_code (def) != GIMPLE_PHI
1714 || gimple_phi_num_args (def) != 2
1715 || !POINTER_TYPE_P (TREE_TYPE (target))
1716 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1717 return;
1719 /* First, we need to check whether one of these is a load from a member
1720 pointer that is a parameter to this function. */
1721 n1 = PHI_ARG_DEF (def, 0);
1722 n2 = PHI_ARG_DEF (def, 1);
1723 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1724 return;
1725 d1 = SSA_NAME_DEF_STMT (n1);
1726 d2 = SSA_NAME_DEF_STMT (n2);
1728 join = gimple_bb (def);
1729 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1731 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1732 return;
1734 bb = EDGE_PRED (join, 0)->src;
1735 virt_bb = gimple_bb (d2);
1737 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1739 bb = EDGE_PRED (join, 1)->src;
1740 virt_bb = gimple_bb (d1);
1742 else
1743 return;
1745 /* Second, we need to check that the basic blocks are laid out in the way
1746 corresponding to the pattern. */
1748 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1749 || single_pred (virt_bb) != bb
1750 || single_succ (virt_bb) != join)
1751 return;
1753 /* Third, let's see that the branching is done depending on the least
1754 significant bit of the pfn. */
1756 branch = last_stmt (bb);
1757 if (!branch || gimple_code (branch) != GIMPLE_COND)
1758 return;
1760 if ((gimple_cond_code (branch) != NE_EXPR
1761 && gimple_cond_code (branch) != EQ_EXPR)
1762 || !integer_zerop (gimple_cond_rhs (branch)))
1763 return;
1765 cond = gimple_cond_lhs (branch);
1766 if (!ipa_is_ssa_with_stmt_def (cond))
1767 return;
1769 def = SSA_NAME_DEF_STMT (cond);
1770 if (!is_gimple_assign (def)
1771 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1772 || !integer_onep (gimple_assign_rhs2 (def)))
1773 return;
1775 cond = gimple_assign_rhs1 (def);
1776 if (!ipa_is_ssa_with_stmt_def (cond))
1777 return;
1779 def = SSA_NAME_DEF_STMT (cond);
1781 if (is_gimple_assign (def)
1782 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1784 cond = gimple_assign_rhs1 (def);
1785 if (!ipa_is_ssa_with_stmt_def (cond))
1786 return;
1787 def = SSA_NAME_DEF_STMT (cond);
1790 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1791 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1792 == ptrmemfunc_vbit_in_delta),
1793 NULL);
1794 if (rec != rec2)
1795 return;
1797 index = ipa_get_param_decl_index (info, rec);
1798 if (index >= 0
1799 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1801 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1802 cs->indirect_info->offset = offset;
1803 cs->indirect_info->agg_contents = 1;
1806 return;
1809 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1810 object referenced in the expression is a formal parameter of the caller
1811 (described by INFO), create a call note for the statement. */
1813 static void
1814 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1815 struct ipa_node_params *info, gimple call,
1816 tree target)
1818 struct cgraph_edge *cs;
1819 struct cgraph_indirect_call_info *ii;
1820 struct ipa_jump_func jfunc;
1821 tree obj = OBJ_TYPE_REF_OBJECT (target);
1822 int index;
1823 HOST_WIDE_INT anc_offset;
1825 if (!flag_devirtualize)
1826 return;
1828 if (TREE_CODE (obj) != SSA_NAME)
1829 return;
1831 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1833 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1834 return;
1836 anc_offset = 0;
1837 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1838 gcc_assert (index >= 0);
1839 if (detect_type_change_ssa (obj, call, &jfunc))
1840 return;
1842 else
1844 gimple stmt = SSA_NAME_DEF_STMT (obj);
1845 tree expr;
1847 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1848 if (!expr)
1849 return;
1850 index = ipa_get_param_decl_index (info,
1851 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1852 gcc_assert (index >= 0);
1853 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1854 return;
1857 cs = ipa_note_param_call (node, index, call);
1858 ii = cs->indirect_info;
1859 ii->offset = anc_offset;
1860 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1861 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1862 ii->polymorphic = 1;
1865 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1866 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1867 containing intermediate information about each formal parameter. */
1869 static void
1870 ipa_analyze_call_uses (struct cgraph_node *node,
1871 struct ipa_node_params *info,
1872 struct param_analysis_info *parms_ainfo, gimple call)
1874 tree target = gimple_call_fn (call);
1876 if (!target)
1877 return;
1878 if (TREE_CODE (target) == SSA_NAME)
1879 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1880 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1881 ipa_analyze_virtual_call_uses (node, info, call, target);
1885 /* Analyze the call statement STMT with respect to formal parameters (described
1886 in INFO) of caller given by NODE. Currently it only checks whether formal
1887 parameters are called. PARMS_AINFO is a pointer to a vector containing
1888 intermediate information about each formal parameter. */
1890 static void
1891 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1892 struct param_analysis_info *parms_ainfo, gimple stmt)
1894 if (is_gimple_call (stmt))
1895 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1898 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1899 If OP is a parameter declaration, mark it as used in the info structure
1900 passed in DATA. */
1902 static bool
1903 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1904 tree op, void *data)
1906 struct ipa_node_params *info = (struct ipa_node_params *) data;
1908 op = get_base_address (op);
1909 if (op
1910 && TREE_CODE (op) == PARM_DECL)
1912 int index = ipa_get_param_decl_index (info, op);
1913 gcc_assert (index >= 0);
1914 ipa_set_param_used (info, index, true);
1917 return false;
1920 /* Scan the function body of NODE and inspect the uses of formal parameters.
1921 Store the findings in various structures of the associated ipa_node_params
1922 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1923 vector containing intermediate information about each formal parameter. */
1925 static void
1926 ipa_analyze_params_uses (struct cgraph_node *node,
1927 struct param_analysis_info *parms_ainfo)
1929 tree decl = node->symbol.decl;
1930 basic_block bb;
1931 struct function *func;
1932 gimple_stmt_iterator gsi;
1933 struct ipa_node_params *info = IPA_NODE_REF (node);
1934 int i;
1936 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1937 return;
1939 for (i = 0; i < ipa_get_param_count (info); i++)
1941 tree parm = ipa_get_param (info, i);
1942 int controlled_uses = 0;
1944 /* For SSA regs see if parameter is used. For non-SSA we compute
1945 the flag during modification analysis. */
1946 if (is_gimple_reg (parm))
1948 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1949 parm);
1950 if (ddef && !has_zero_uses (ddef))
1952 imm_use_iterator imm_iter;
1953 use_operand_p use_p;
1955 ipa_set_param_used (info, i, true);
1956 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
1957 if (!is_gimple_call (USE_STMT (use_p)))
1959 controlled_uses = IPA_UNDESCRIBED_USE;
1960 break;
1962 else
1963 controlled_uses++;
1965 else
1966 controlled_uses = 0;
1968 else
1969 controlled_uses = IPA_UNDESCRIBED_USE;
1970 ipa_set_controlled_uses (info, i, controlled_uses);
1973 func = DECL_STRUCT_FUNCTION (decl);
1974 FOR_EACH_BB_FN (bb, func)
1976 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1978 gimple stmt = gsi_stmt (gsi);
1980 if (is_gimple_debug (stmt))
1981 continue;
1983 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1984 walk_stmt_load_store_addr_ops (stmt, info,
1985 visit_ref_for_mod_analysis,
1986 visit_ref_for_mod_analysis,
1987 visit_ref_for_mod_analysis);
1989 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1990 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1991 visit_ref_for_mod_analysis,
1992 visit_ref_for_mod_analysis,
1993 visit_ref_for_mod_analysis);
1996 info->uses_analysis_done = 1;
1999 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2001 static void
2002 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2004 int i;
2006 for (i = 0; i < param_count; i++)
2008 if (parms_ainfo[i].parm_visited_statements)
2009 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2010 if (parms_ainfo[i].pt_visited_statements)
2011 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2015 /* Initialize the array describing properties of of formal parameters
2016 of NODE, analyze their uses and compute jump functions associated
2017 with actual arguments of calls from within NODE. */
2019 void
2020 ipa_analyze_node (struct cgraph_node *node)
2022 struct ipa_node_params *info;
2023 struct param_analysis_info *parms_ainfo;
2024 int param_count;
2026 ipa_check_create_node_params ();
2027 ipa_check_create_edge_args ();
2028 info = IPA_NODE_REF (node);
2029 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
2030 ipa_initialize_node_params (node);
2032 param_count = ipa_get_param_count (info);
2033 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2034 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2036 ipa_analyze_params_uses (node, parms_ainfo);
2037 ipa_compute_jump_functions (node, parms_ainfo);
2039 free_parms_ainfo (parms_ainfo, param_count);
2040 pop_cfun ();
2043 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2044 attempt a type-based devirtualization. If successful, return the
2045 target function declaration, otherwise return NULL. */
2047 tree
2048 ipa_intraprocedural_devirtualization (gimple call)
2050 tree binfo, token, fndecl;
2051 struct ipa_jump_func jfunc;
2052 tree otr = gimple_call_fn (call);
2054 jfunc.type = IPA_JF_UNKNOWN;
2055 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2056 call);
2057 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2058 return NULL_TREE;
2059 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2060 if (!binfo)
2061 return NULL_TREE;
2062 token = OBJ_TYPE_REF_TOKEN (otr);
2063 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2064 binfo);
2065 return fndecl;
2068 /* Update the jump function DST when the call graph edge corresponding to SRC is
2069 is being inlined, knowing that DST is of type ancestor and src of known
2070 type. */
2072 static void
2073 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2074 struct ipa_jump_func *dst)
2076 HOST_WIDE_INT combined_offset;
2077 tree combined_type;
2079 combined_offset = ipa_get_jf_known_type_offset (src)
2080 + ipa_get_jf_ancestor_offset (dst);
2081 combined_type = ipa_get_jf_ancestor_type (dst);
2083 ipa_set_jf_known_type (dst, combined_offset,
2084 ipa_get_jf_known_type_base_type (src),
2085 combined_type);
2088 /* Update the jump functions associated with call graph edge E when the call
2089 graph edge CS is being inlined, assuming that E->caller is already (possibly
2090 indirectly) inlined into CS->callee and that E has not been inlined. */
2092 static void
2093 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2094 struct cgraph_edge *e)
2096 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2097 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2098 int count = ipa_get_cs_argument_count (args);
2099 int i;
2101 for (i = 0; i < count; i++)
2103 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2105 if (dst->type == IPA_JF_ANCESTOR)
2107 struct ipa_jump_func *src;
2108 int dst_fid = dst->value.ancestor.formal_id;
2110 /* Variable number of arguments can cause havoc if we try to access
2111 one that does not exist in the inlined edge. So make sure we
2112 don't. */
2113 if (dst_fid >= ipa_get_cs_argument_count (top))
2115 dst->type = IPA_JF_UNKNOWN;
2116 continue;
2119 src = ipa_get_ith_jump_func (top, dst_fid);
2121 if (src->agg.items
2122 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2124 struct ipa_agg_jf_item *item;
2125 int j;
2127 /* Currently we do not produce clobber aggregate jump functions,
2128 replace with merging when we do. */
2129 gcc_assert (!dst->agg.items);
2131 dst->agg.items = vec_safe_copy (src->agg.items);
2132 dst->agg.by_ref = src->agg.by_ref;
2133 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2134 item->offset -= dst->value.ancestor.offset;
2137 if (src->type == IPA_JF_KNOWN_TYPE)
2138 combine_known_type_and_ancestor_jfs (src, dst);
2139 else if (src->type == IPA_JF_PASS_THROUGH
2140 && src->value.pass_through.operation == NOP_EXPR)
2142 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2143 dst->value.ancestor.agg_preserved &=
2144 src->value.pass_through.agg_preserved;
2146 else if (src->type == IPA_JF_ANCESTOR)
2148 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2149 dst->value.ancestor.offset += src->value.ancestor.offset;
2150 dst->value.ancestor.agg_preserved &=
2151 src->value.ancestor.agg_preserved;
2153 else
2154 dst->type = IPA_JF_UNKNOWN;
2156 else if (dst->type == IPA_JF_PASS_THROUGH)
2158 struct ipa_jump_func *src;
2159 /* We must check range due to calls with variable number of arguments
2160 and we cannot combine jump functions with operations. */
2161 if (dst->value.pass_through.operation == NOP_EXPR
2162 && (dst->value.pass_through.formal_id
2163 < ipa_get_cs_argument_count (top)))
2165 bool agg_p;
2166 int dst_fid = dst->value.pass_through.formal_id;
2167 src = ipa_get_ith_jump_func (top, dst_fid);
2168 agg_p = dst->value.pass_through.agg_preserved;
2170 dst->type = src->type;
2171 dst->value = src->value;
2173 if (src->agg.items
2174 && (agg_p || !src->agg.by_ref))
2176 /* Currently we do not produce clobber aggregate jump
2177 functions, replace with merging when we do. */
2178 gcc_assert (!dst->agg.items);
2180 dst->agg.by_ref = src->agg.by_ref;
2181 dst->agg.items = vec_safe_copy (src->agg.items);
2184 if (!agg_p)
2186 if (dst->type == IPA_JF_PASS_THROUGH)
2187 dst->value.pass_through.agg_preserved = false;
2188 else if (dst->type == IPA_JF_ANCESTOR)
2189 dst->value.ancestor.agg_preserved = false;
2192 else
2193 dst->type = IPA_JF_UNKNOWN;
2198 /* If TARGET is an addr_expr of a function declaration, make it the destination
2199 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2201 struct cgraph_edge *
2202 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2204 struct cgraph_node *callee;
2205 struct inline_edge_summary *es = inline_edge_summary (ie);
2206 bool unreachable = false;
2208 if (TREE_CODE (target) == ADDR_EXPR)
2209 target = TREE_OPERAND (target, 0);
2210 if (TREE_CODE (target) != FUNCTION_DECL)
2212 target = canonicalize_constructor_val (target, NULL);
2213 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2215 if (dump_file)
2216 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2217 " in %s/%i, making it unreachable.\n",
2218 cgraph_node_name (ie->caller), ie->caller->symbol.order);
2219 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2220 callee = cgraph_get_create_node (target);
2221 unreachable = true;
2223 else
2224 callee = cgraph_get_node (target);
2226 else
2227 callee = cgraph_get_node (target);
2229 /* Because may-edges are not explicitely represented and vtable may be external,
2230 we may create the first reference to the object in the unit. */
2231 if (!callee || callee->global.inlined_to)
2234 /* We are better to ensure we can refer to it.
2235 In the case of static functions we are out of luck, since we already
2236 removed its body. In the case of public functions we may or may
2237 not introduce the reference. */
2238 if (!canonicalize_constructor_val (target, NULL)
2239 || !TREE_PUBLIC (target))
2241 if (dump_file)
2242 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2243 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2244 xstrdup (cgraph_node_name (ie->caller)),
2245 ie->caller->symbol.order,
2246 xstrdup (cgraph_node_name (ie->callee)),
2247 ie->callee->symbol.order);
2248 return NULL;
2250 callee = cgraph_get_create_real_symbol_node (target);
2252 ipa_check_create_node_params ();
2254 /* We can not make edges to inline clones. It is bug that someone removed
2255 the cgraph node too early. */
2256 gcc_assert (!callee->global.inlined_to);
2258 cgraph_make_edge_direct (ie, callee);
2259 es = inline_edge_summary (ie);
2260 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2261 - eni_size_weights.call_cost);
2262 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2263 - eni_time_weights.call_cost);
2264 if (dump_file && !unreachable)
2266 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2267 "(%s/%i -> %s/%i), for stmt ",
2268 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2269 xstrdup (cgraph_node_name (ie->caller)),
2270 ie->caller->symbol.order,
2271 xstrdup (cgraph_node_name (ie->callee)),
2272 ie->callee->symbol.order);
2273 if (ie->call_stmt)
2274 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2275 else
2276 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2278 callee = cgraph_function_or_thunk_node (callee, NULL);
2280 return ie;
2283 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2284 return NULL if there is not any. BY_REF specifies whether the value has to
2285 be passed by reference or by value. */
2287 tree
2288 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2289 HOST_WIDE_INT offset, bool by_ref)
2291 struct ipa_agg_jf_item *item;
2292 int i;
2294 if (by_ref != agg->by_ref)
2295 return NULL;
2297 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2298 if (item->offset == offset)
2300 /* Currently we do not have clobber values, return NULL for them once
2301 we do. */
2302 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2303 return item->value;
2305 return NULL;
2308 /* Remove a reference to SYMBOL from the list of references of a node given by
2309 reference description RDESC. */
2311 static void
2312 remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2314 struct ipa_ref *to_del;
2315 struct cgraph_edge *origin;
2317 origin = rdesc->cs;
2318 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
2319 origin->call_stmt);
2320 gcc_assert (to_del);
2321 ipa_remove_reference (to_del);
2322 if (dump_file)
2323 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2324 xstrdup (cgraph_node_name (origin->caller)),
2325 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
2328 /* If JFUNC has a reference description with refcount different from
2329 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2330 NULL. JFUNC must be a constant jump function. */
2332 static struct ipa_cst_ref_desc *
2333 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2335 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2336 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2337 return rdesc;
2338 else
2339 return NULL;
2342 /* Try to find a destination for indirect edge IE that corresponds to a simple
2343 call or a call of a member function pointer and where the destination is a
2344 pointer formal parameter described by jump function JFUNC. If it can be
2345 determined, return the newly direct edge, otherwise return NULL.
2346 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2348 static struct cgraph_edge *
2349 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2350 struct ipa_jump_func *jfunc,
2351 struct ipa_node_params *new_root_info)
2353 struct ipa_cst_ref_desc *rdesc;
2354 struct cgraph_edge *cs;
2355 tree target;
2357 if (ie->indirect_info->agg_contents)
2358 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2359 ie->indirect_info->offset,
2360 ie->indirect_info->by_ref);
2361 else
2362 target = ipa_value_from_jfunc (new_root_info, jfunc);
2363 if (!target)
2364 return NULL;
2365 cs = ipa_make_edge_direct_to_target (ie, target);
2367 if (cs && !ie->indirect_info->agg_contents
2368 && jfunc->type == IPA_JF_CONST
2369 && (rdesc = jfunc_rdesc_usable (jfunc))
2370 && --rdesc->refcount == 0)
2371 remove_described_reference ((symtab_node) cs->callee, rdesc);
2373 return cs;
2376 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2377 call based on a formal parameter which is described by jump function JFUNC
2378 and if it can be determined, make it direct and return the direct edge.
2379 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2380 are relative to. */
2382 static struct cgraph_edge *
2383 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2384 struct ipa_jump_func *jfunc,
2385 struct ipa_node_params *new_root_info)
2387 tree binfo, target;
2389 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2391 if (!binfo)
2392 return NULL;
2394 if (TREE_CODE (binfo) != TREE_BINFO)
2396 binfo = gimple_extract_devirt_binfo_from_cst (binfo);
2397 if (!binfo)
2398 return NULL;
2401 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2402 ie->indirect_info->otr_type);
2403 if (binfo)
2404 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2405 binfo);
2406 else
2407 return NULL;
2409 if (target)
2410 return ipa_make_edge_direct_to_target (ie, target);
2411 else
2412 return NULL;
2415 /* Update the param called notes associated with NODE when CS is being inlined,
2416 assuming NODE is (potentially indirectly) inlined into CS->callee.
2417 Moreover, if the callee is discovered to be constant, create a new cgraph
2418 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2419 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2421 static bool
2422 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2423 struct cgraph_node *node,
2424 vec<cgraph_edge_p> *new_edges)
2426 struct ipa_edge_args *top;
2427 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2428 struct ipa_node_params *new_root_info;
2429 bool res = false;
2431 ipa_check_create_edge_args ();
2432 top = IPA_EDGE_REF (cs);
2433 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2434 ? cs->caller->global.inlined_to
2435 : cs->caller);
2437 for (ie = node->indirect_calls; ie; ie = next_ie)
2439 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2440 struct ipa_jump_func *jfunc;
2441 int param_index;
2443 next_ie = ie->next_callee;
2445 if (ici->param_index == -1)
2446 continue;
2448 /* We must check range due to calls with variable number of arguments: */
2449 if (ici->param_index >= ipa_get_cs_argument_count (top))
2451 ici->param_index = -1;
2452 continue;
2455 param_index = ici->param_index;
2456 jfunc = ipa_get_ith_jump_func (top, param_index);
2458 if (!flag_indirect_inlining)
2459 new_direct_edge = NULL;
2460 else if (ici->polymorphic)
2461 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2462 new_root_info);
2463 else
2464 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2465 new_root_info);
2466 if (new_direct_edge)
2468 new_direct_edge->indirect_inlining_edge = 1;
2469 if (new_direct_edge->call_stmt)
2470 new_direct_edge->call_stmt_cannot_inline_p
2471 = !gimple_check_call_matching_types (
2472 new_direct_edge->call_stmt,
2473 new_direct_edge->callee->symbol.decl, false);
2474 if (new_edges)
2476 new_edges->safe_push (new_direct_edge);
2477 top = IPA_EDGE_REF (cs);
2478 res = true;
2481 else if (jfunc->type == IPA_JF_PASS_THROUGH
2482 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2484 if (ici->agg_contents
2485 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2486 ici->param_index = -1;
2487 else
2488 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2490 else if (jfunc->type == IPA_JF_ANCESTOR)
2492 if (ici->agg_contents
2493 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2494 ici->param_index = -1;
2495 else
2497 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2498 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2501 else
2502 /* Either we can find a destination for this edge now or never. */
2503 ici->param_index = -1;
2506 return res;
2509 /* Recursively traverse subtree of NODE (including node) made of inlined
2510 cgraph_edges when CS has been inlined and invoke
2511 update_indirect_edges_after_inlining on all nodes and
2512 update_jump_functions_after_inlining on all non-inlined edges that lead out
2513 of this subtree. Newly discovered indirect edges will be added to
2514 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2515 created. */
2517 static bool
2518 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2519 struct cgraph_node *node,
2520 vec<cgraph_edge_p> *new_edges)
2522 struct cgraph_edge *e;
2523 bool res;
2525 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2527 for (e = node->callees; e; e = e->next_callee)
2528 if (!e->inline_failed)
2529 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2530 else
2531 update_jump_functions_after_inlining (cs, e);
2532 for (e = node->indirect_calls; e; e = e->next_callee)
2533 update_jump_functions_after_inlining (cs, e);
2535 return res;
2538 /* Combine two controlled uses counts as done during inlining. */
2540 static int
2541 combine_controlled_uses_counters (int c, int d)
2543 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2544 return IPA_UNDESCRIBED_USE;
2545 else
2546 return c + d - 1;
2549 /* Propagate number of controlled users from CS->caleee to the new root of the
2550 tree of inlined nodes. */
2552 static void
2553 propagate_controlled_uses (struct cgraph_edge *cs)
2555 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2556 struct cgraph_node *new_root = cs->caller->global.inlined_to
2557 ? cs->caller->global.inlined_to : cs->caller;
2558 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2559 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2560 int count, i;
2562 count = MIN (ipa_get_cs_argument_count (args),
2563 ipa_get_param_count (old_root_info));
2564 for (i = 0; i < count; i++)
2566 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2567 struct ipa_cst_ref_desc *rdesc;
2569 if (jf->type == IPA_JF_PASS_THROUGH)
2571 int src_idx, c, d;
2572 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2573 c = ipa_get_controlled_uses (new_root_info, src_idx);
2574 d = ipa_get_controlled_uses (old_root_info, i);
2576 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2577 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2578 c = combine_controlled_uses_counters (c, d);
2579 ipa_set_controlled_uses (new_root_info, src_idx, c);
2580 if (c == 0 && new_root_info->ipcp_orig_node)
2582 struct cgraph_node *n;
2583 struct ipa_ref *ref;
2584 tree t = new_root_info->known_vals[src_idx];
2586 if (t && TREE_CODE (t) == ADDR_EXPR
2587 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2588 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2589 && (ref = ipa_find_reference ((symtab_node) new_root,
2590 (symtab_node) n, NULL)))
2592 if (dump_file)
2593 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2594 "reference from %s/%i to %s/%i.\n",
2595 xstrdup (cgraph_node_name (new_root)),
2596 new_root->symbol.order,
2597 xstrdup (cgraph_node_name (n)), n->symbol.order);
2598 ipa_remove_reference (ref);
2602 else if (jf->type == IPA_JF_CONST
2603 && (rdesc = jfunc_rdesc_usable (jf)))
2605 int d = ipa_get_controlled_uses (old_root_info, i);
2606 int c = rdesc->refcount;
2607 rdesc->refcount = combine_controlled_uses_counters (c, d);
2608 if (rdesc->refcount == 0)
2610 tree cst = ipa_get_jf_constant (jf);
2611 struct cgraph_node *n;
2612 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2613 && TREE_CODE (TREE_OPERAND (cst, 0))
2614 == FUNCTION_DECL);
2615 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2616 if (n)
2618 struct cgraph_node *clone;
2619 remove_described_reference ((symtab_node) n, rdesc);
2621 clone = cs->caller;
2622 while (clone->global.inlined_to
2623 && clone != rdesc->cs->caller
2624 && IPA_NODE_REF (clone)->ipcp_orig_node)
2626 struct ipa_ref *ref;
2627 ref = ipa_find_reference ((symtab_node) clone,
2628 (symtab_node) n, NULL);
2629 if (ref)
2631 if (dump_file)
2632 fprintf (dump_file, "ipa-prop: Removing "
2633 "cloning-created reference "
2634 "from %s/%i to %s/%i.\n",
2635 xstrdup (cgraph_node_name (clone)),
2636 clone->symbol.order,
2637 xstrdup (cgraph_node_name (n)),
2638 n->symbol.order);
2639 ipa_remove_reference (ref);
2641 clone = clone->callers->caller;
2648 for (i = ipa_get_param_count (old_root_info);
2649 i < ipa_get_cs_argument_count (args);
2650 i++)
2652 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2654 if (jf->type == IPA_JF_CONST)
2656 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2657 if (rdesc)
2658 rdesc->refcount = IPA_UNDESCRIBED_USE;
2660 else if (jf->type == IPA_JF_PASS_THROUGH)
2661 ipa_set_controlled_uses (new_root_info,
2662 jf->value.pass_through.formal_id,
2663 IPA_UNDESCRIBED_USE);
2667 /* Update jump functions and call note functions on inlining the call site CS.
2668 CS is expected to lead to a node already cloned by
2669 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2670 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2671 created. */
2673 bool
2674 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2675 vec<cgraph_edge_p> *new_edges)
2677 bool changed;
2678 /* Do nothing if the preparation phase has not been carried out yet
2679 (i.e. during early inlining). */
2680 if (!ipa_node_params_vector.exists ())
2681 return false;
2682 gcc_assert (ipa_edge_args_vector);
2684 propagate_controlled_uses (cs);
2685 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2687 /* We do not keep jump functions of inlined edges up to date. Better to free
2688 them so we do not access them accidentally. */
2689 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2690 return changed;
2693 /* Frees all dynamically allocated structures that the argument info points
2694 to. */
2696 void
2697 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2699 vec_free (args->jump_functions);
2700 memset (args, 0, sizeof (*args));
2703 /* Free all ipa_edge structures. */
2705 void
2706 ipa_free_all_edge_args (void)
2708 int i;
2709 struct ipa_edge_args *args;
2711 if (!ipa_edge_args_vector)
2712 return;
2714 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2715 ipa_free_edge_args_substructures (args);
2717 vec_free (ipa_edge_args_vector);
2720 /* Frees all dynamically allocated structures that the param info points
2721 to. */
2723 void
2724 ipa_free_node_params_substructures (struct ipa_node_params *info)
2726 info->descriptors.release ();
2727 free (info->lattices);
2728 /* Lattice values and their sources are deallocated with their alocation
2729 pool. */
2730 info->known_vals.release ();
2731 memset (info, 0, sizeof (*info));
2734 /* Free all ipa_node_params structures. */
2736 void
2737 ipa_free_all_node_params (void)
2739 int i;
2740 struct ipa_node_params *info;
2742 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2743 ipa_free_node_params_substructures (info);
2745 ipa_node_params_vector.release ();
2748 /* Set the aggregate replacements of NODE to be AGGVALS. */
2750 void
2751 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2752 struct ipa_agg_replacement_value *aggvals)
2754 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2755 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2757 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2760 /* Hook that is called by cgraph.c when an edge is removed. */
2762 static void
2763 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2765 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2766 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
2767 return;
2768 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2771 /* Hook that is called by cgraph.c when a node is removed. */
2773 static void
2774 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2776 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2777 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2778 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2779 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2780 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
2783 /* Hook that is called by cgraph.c when an edge is duplicated. */
2785 static void
2786 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2787 __attribute__((unused)) void *data)
2789 struct ipa_edge_args *old_args, *new_args;
2790 unsigned int i;
2792 ipa_check_create_edge_args ();
2794 old_args = IPA_EDGE_REF (src);
2795 new_args = IPA_EDGE_REF (dst);
2797 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
2799 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
2801 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
2802 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
2804 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
2806 if (src_jf->type == IPA_JF_CONST)
2808 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
2810 if (!src_rdesc)
2811 dst_jf->value.constant.rdesc = NULL;
2812 else if (src_rdesc->cs == src)
2814 struct ipa_cst_ref_desc *dst_rdesc;
2815 gcc_checking_assert (ipa_refdesc_pool);
2816 dst_rdesc
2817 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
2818 dst_rdesc->cs = dst;
2819 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
2820 src_rdesc->next_duplicate = dst_rdesc;
2821 dst_rdesc->refcount = src_rdesc->refcount;
2822 dst_jf->value.constant.rdesc = dst_rdesc;
2824 else
2826 struct ipa_cst_ref_desc *dst_rdesc;
2827 /* This can happen during inlining, when a JFUNC can refer to a
2828 reference taken in a function up in the tree of inline clones.
2829 We need to find the duplicate that refers to our tree of
2830 inline clones. */
2832 gcc_assert (dst->caller->global.inlined_to);
2833 for (dst_rdesc = src_rdesc->next_duplicate;
2834 dst_rdesc;
2835 dst_rdesc = dst_rdesc->next_duplicate)
2837 gcc_assert (dst_rdesc->cs->caller->global.inlined_to);
2838 if (dst_rdesc->cs->caller->global.inlined_to
2839 == dst->caller->global.inlined_to)
2840 break;
2843 dst_jf->value.constant.rdesc = dst_rdesc;
2849 /* Hook that is called by cgraph.c when a node is duplicated. */
2851 static void
2852 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2853 ATTRIBUTE_UNUSED void *data)
2855 struct ipa_node_params *old_info, *new_info;
2856 struct ipa_agg_replacement_value *old_av, *new_av;
2858 ipa_check_create_node_params ();
2859 old_info = IPA_NODE_REF (src);
2860 new_info = IPA_NODE_REF (dst);
2862 new_info->descriptors = old_info->descriptors.copy ();
2863 new_info->lattices = NULL;
2864 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2866 new_info->uses_analysis_done = old_info->uses_analysis_done;
2867 new_info->node_enqueued = old_info->node_enqueued;
2869 old_av = ipa_get_agg_replacements_for_node (src);
2870 if (!old_av)
2871 return;
2873 new_av = NULL;
2874 while (old_av)
2876 struct ipa_agg_replacement_value *v;
2878 v = ggc_alloc_ipa_agg_replacement_value ();
2879 memcpy (v, old_av, sizeof (*v));
2880 v->next = new_av;
2881 new_av = v;
2882 old_av = old_av->next;
2884 ipa_set_node_agg_value_chain (dst, new_av);
2888 /* Analyze newly added function into callgraph. */
2890 static void
2891 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2893 ipa_analyze_node (node);
2896 /* Register our cgraph hooks if they are not already there. */
2898 void
2899 ipa_register_cgraph_hooks (void)
2901 if (!edge_removal_hook_holder)
2902 edge_removal_hook_holder =
2903 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2904 if (!node_removal_hook_holder)
2905 node_removal_hook_holder =
2906 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2907 if (!edge_duplication_hook_holder)
2908 edge_duplication_hook_holder =
2909 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2910 if (!node_duplication_hook_holder)
2911 node_duplication_hook_holder =
2912 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2913 function_insertion_hook_holder =
2914 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2917 /* Unregister our cgraph hooks if they are not already there. */
2919 static void
2920 ipa_unregister_cgraph_hooks (void)
2922 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2923 edge_removal_hook_holder = NULL;
2924 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2925 node_removal_hook_holder = NULL;
2926 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2927 edge_duplication_hook_holder = NULL;
2928 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2929 node_duplication_hook_holder = NULL;
2930 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2931 function_insertion_hook_holder = NULL;
2934 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2935 longer needed after ipa-cp. */
2937 void
2938 ipa_free_all_structures_after_ipa_cp (void)
2940 if (!optimize)
2942 ipa_free_all_edge_args ();
2943 ipa_free_all_node_params ();
2944 free_alloc_pool (ipcp_sources_pool);
2945 free_alloc_pool (ipcp_values_pool);
2946 free_alloc_pool (ipcp_agg_lattice_pool);
2947 ipa_unregister_cgraph_hooks ();
2948 if (ipa_refdesc_pool)
2949 free_alloc_pool (ipa_refdesc_pool);
2953 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2954 longer needed after indirect inlining. */
2956 void
2957 ipa_free_all_structures_after_iinln (void)
2959 ipa_free_all_edge_args ();
2960 ipa_free_all_node_params ();
2961 ipa_unregister_cgraph_hooks ();
2962 if (ipcp_sources_pool)
2963 free_alloc_pool (ipcp_sources_pool);
2964 if (ipcp_values_pool)
2965 free_alloc_pool (ipcp_values_pool);
2966 if (ipcp_agg_lattice_pool)
2967 free_alloc_pool (ipcp_agg_lattice_pool);
2968 if (ipa_refdesc_pool)
2969 free_alloc_pool (ipa_refdesc_pool);
2972 /* Print ipa_tree_map data structures of all functions in the
2973 callgraph to F. */
2975 void
2976 ipa_print_node_params (FILE *f, struct cgraph_node *node)
2978 int i, count;
2979 tree temp;
2980 struct ipa_node_params *info;
2982 if (!node->symbol.definition)
2983 return;
2984 info = IPA_NODE_REF (node);
2985 fprintf (f, " function %s/%i parameter descriptors:\n",
2986 cgraph_node_name (node), node->symbol.order);
2987 count = ipa_get_param_count (info);
2988 for (i = 0; i < count; i++)
2990 int c;
2992 temp = ipa_get_param (info, i);
2993 if (TREE_CODE (temp) == PARM_DECL)
2994 fprintf (f, " param %d : %s", i,
2995 (DECL_NAME (temp)
2996 ? (*lang_hooks.decl_printable_name) (temp, 2)
2997 : "(unnamed)"));
2998 if (ipa_is_param_used (info, i))
2999 fprintf (f, " used");
3000 c = ipa_get_controlled_uses (info, i);
3001 if (c == IPA_UNDESCRIBED_USE)
3002 fprintf (f, " undescribed_use");
3003 else
3004 fprintf (f, " controlled_uses=%i", c);
3005 fprintf (f, "\n");
3009 /* Print ipa_tree_map data structures of all functions in the
3010 callgraph to F. */
3012 void
3013 ipa_print_all_params (FILE * f)
3015 struct cgraph_node *node;
3017 fprintf (f, "\nFunction parameters:\n");
3018 FOR_EACH_FUNCTION (node)
3019 ipa_print_node_params (f, node);
3022 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3024 vec<tree>
3025 ipa_get_vector_of_formal_parms (tree fndecl)
3027 vec<tree> args;
3028 int count;
3029 tree parm;
3031 count = count_formal_params (fndecl);
3032 args.create (count);
3033 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3034 args.quick_push (parm);
3036 return args;
3039 /* Return a heap allocated vector containing types of formal parameters of
3040 function type FNTYPE. */
3042 static inline vec<tree>
3043 get_vector_of_formal_parm_types (tree fntype)
3045 vec<tree> types;
3046 int count = 0;
3047 tree t;
3049 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3050 count++;
3052 types.create (count);
3053 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3054 types.quick_push (TREE_VALUE (t));
3056 return types;
3059 /* Modify the function declaration FNDECL and its type according to the plan in
3060 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3061 to reflect the actual parameters being modified which are determined by the
3062 base_index field. */
3064 void
3065 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3066 const char *synth_parm_prefix)
3068 vec<tree> oparms, otypes;
3069 tree orig_type, new_type = NULL;
3070 tree old_arg_types, t, new_arg_types = NULL;
3071 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3072 int i, len = adjustments.length ();
3073 tree new_reversed = NULL;
3074 bool care_for_types, last_parm_void;
3076 if (!synth_parm_prefix)
3077 synth_parm_prefix = "SYNTH";
3079 oparms = ipa_get_vector_of_formal_parms (fndecl);
3080 orig_type = TREE_TYPE (fndecl);
3081 old_arg_types = TYPE_ARG_TYPES (orig_type);
3083 /* The following test is an ugly hack, some functions simply don't have any
3084 arguments in their type. This is probably a bug but well... */
3085 care_for_types = (old_arg_types != NULL_TREE);
3086 if (care_for_types)
3088 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3089 == void_type_node);
3090 otypes = get_vector_of_formal_parm_types (orig_type);
3091 if (last_parm_void)
3092 gcc_assert (oparms.length () + 1 == otypes.length ());
3093 else
3094 gcc_assert (oparms.length () == otypes.length ());
3096 else
3098 last_parm_void = false;
3099 otypes.create (0);
3102 for (i = 0; i < len; i++)
3104 struct ipa_parm_adjustment *adj;
3105 gcc_assert (link);
3107 adj = &adjustments[i];
3108 parm = oparms[adj->base_index];
3109 adj->base = parm;
3111 if (adj->copy_param)
3113 if (care_for_types)
3114 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3115 new_arg_types);
3116 *link = parm;
3117 link = &DECL_CHAIN (parm);
3119 else if (!adj->remove_param)
3121 tree new_parm;
3122 tree ptype;
3124 if (adj->by_ref)
3125 ptype = build_pointer_type (adj->type);
3126 else
3127 ptype = adj->type;
3129 if (care_for_types)
3130 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3132 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3133 ptype);
3134 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3136 DECL_ARTIFICIAL (new_parm) = 1;
3137 DECL_ARG_TYPE (new_parm) = ptype;
3138 DECL_CONTEXT (new_parm) = fndecl;
3139 TREE_USED (new_parm) = 1;
3140 DECL_IGNORED_P (new_parm) = 1;
3141 layout_decl (new_parm, 0);
3143 adj->base = parm;
3144 adj->reduction = new_parm;
3146 *link = new_parm;
3148 link = &DECL_CHAIN (new_parm);
3152 *link = NULL_TREE;
3154 if (care_for_types)
3156 new_reversed = nreverse (new_arg_types);
3157 if (last_parm_void)
3159 if (new_reversed)
3160 TREE_CHAIN (new_arg_types) = void_list_node;
3161 else
3162 new_reversed = void_list_node;
3166 /* Use copy_node to preserve as much as possible from original type
3167 (debug info, attribute lists etc.)
3168 Exception is METHOD_TYPEs must have THIS argument.
3169 When we are asked to remove it, we need to build new FUNCTION_TYPE
3170 instead. */
3171 if (TREE_CODE (orig_type) != METHOD_TYPE
3172 || (adjustments[0].copy_param
3173 && adjustments[0].base_index == 0))
3175 new_type = build_distinct_type_copy (orig_type);
3176 TYPE_ARG_TYPES (new_type) = new_reversed;
3178 else
3180 new_type
3181 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3182 new_reversed));
3183 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3184 DECL_VINDEX (fndecl) = NULL_TREE;
3187 /* When signature changes, we need to clear builtin info. */
3188 if (DECL_BUILT_IN (fndecl))
3190 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3191 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3194 /* This is a new type, not a copy of an old type. Need to reassociate
3195 variants. We can handle everything except the main variant lazily. */
3196 t = TYPE_MAIN_VARIANT (orig_type);
3197 if (orig_type != t)
3199 TYPE_MAIN_VARIANT (new_type) = t;
3200 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3201 TYPE_NEXT_VARIANT (t) = new_type;
3203 else
3205 TYPE_MAIN_VARIANT (new_type) = new_type;
3206 TYPE_NEXT_VARIANT (new_type) = NULL;
3209 TREE_TYPE (fndecl) = new_type;
3210 DECL_VIRTUAL_P (fndecl) = 0;
3211 otypes.release ();
3212 oparms.release ();
3215 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3216 If this is a directly recursive call, CS must be NULL. Otherwise it must
3217 contain the corresponding call graph edge. */
3219 void
3220 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3221 ipa_parm_adjustment_vec adjustments)
3223 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3224 vec<tree> vargs;
3225 vec<tree, va_gc> **debug_args = NULL;
3226 gimple new_stmt;
3227 gimple_stmt_iterator gsi, prev_gsi;
3228 tree callee_decl;
3229 int i, len;
3231 len = adjustments.length ();
3232 vargs.create (len);
3233 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3234 ipa_remove_stmt_references ((symtab_node) current_node, stmt);
3236 gsi = gsi_for_stmt (stmt);
3237 prev_gsi = gsi;
3238 gsi_prev (&prev_gsi);
3239 for (i = 0; i < len; i++)
3241 struct ipa_parm_adjustment *adj;
3243 adj = &adjustments[i];
3245 if (adj->copy_param)
3247 tree arg = gimple_call_arg (stmt, adj->base_index);
3249 vargs.quick_push (arg);
3251 else if (!adj->remove_param)
3253 tree expr, base, off;
3254 location_t loc;
3255 unsigned int deref_align;
3256 bool deref_base = false;
3258 /* We create a new parameter out of the value of the old one, we can
3259 do the following kind of transformations:
3261 - A scalar passed by reference is converted to a scalar passed by
3262 value. (adj->by_ref is false and the type of the original
3263 actual argument is a pointer to a scalar).
3265 - A part of an aggregate is passed instead of the whole aggregate.
3266 The part can be passed either by value or by reference, this is
3267 determined by value of adj->by_ref. Moreover, the code below
3268 handles both situations when the original aggregate is passed by
3269 value (its type is not a pointer) and when it is passed by
3270 reference (it is a pointer to an aggregate).
3272 When the new argument is passed by reference (adj->by_ref is true)
3273 it must be a part of an aggregate and therefore we form it by
3274 simply taking the address of a reference inside the original
3275 aggregate. */
3277 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3278 base = gimple_call_arg (stmt, adj->base_index);
3279 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3280 : EXPR_LOCATION (base);
3282 if (TREE_CODE (base) != ADDR_EXPR
3283 && POINTER_TYPE_P (TREE_TYPE (base)))
3284 off = build_int_cst (adj->alias_ptr_type,
3285 adj->offset / BITS_PER_UNIT);
3286 else
3288 HOST_WIDE_INT base_offset;
3289 tree prev_base;
3290 bool addrof;
3292 if (TREE_CODE (base) == ADDR_EXPR)
3294 base = TREE_OPERAND (base, 0);
3295 addrof = true;
3297 else
3298 addrof = false;
3299 prev_base = base;
3300 base = get_addr_base_and_unit_offset (base, &base_offset);
3301 /* Aggregate arguments can have non-invariant addresses. */
3302 if (!base)
3304 base = build_fold_addr_expr (prev_base);
3305 off = build_int_cst (adj->alias_ptr_type,
3306 adj->offset / BITS_PER_UNIT);
3308 else if (TREE_CODE (base) == MEM_REF)
3310 if (!addrof)
3312 deref_base = true;
3313 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3315 off = build_int_cst (adj->alias_ptr_type,
3316 base_offset
3317 + adj->offset / BITS_PER_UNIT);
3318 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3319 off);
3320 base = TREE_OPERAND (base, 0);
3322 else
3324 off = build_int_cst (adj->alias_ptr_type,
3325 base_offset
3326 + adj->offset / BITS_PER_UNIT);
3327 base = build_fold_addr_expr (base);
3331 if (!adj->by_ref)
3333 tree type = adj->type;
3334 unsigned int align;
3335 unsigned HOST_WIDE_INT misalign;
3337 if (deref_base)
3339 align = deref_align;
3340 misalign = 0;
3342 else
3344 get_pointer_alignment_1 (base, &align, &misalign);
3345 if (TYPE_ALIGN (type) > align)
3346 align = TYPE_ALIGN (type);
3348 misalign += (tree_to_double_int (off)
3349 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3350 * BITS_PER_UNIT);
3351 misalign = misalign & (align - 1);
3352 if (misalign != 0)
3353 align = (misalign & -misalign);
3354 if (align < TYPE_ALIGN (type))
3355 type = build_aligned_type (type, align);
3356 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3358 else
3360 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3361 expr = build_fold_addr_expr (expr);
3364 expr = force_gimple_operand_gsi (&gsi, expr,
3365 adj->by_ref
3366 || is_gimple_reg_type (adj->type),
3367 NULL, true, GSI_SAME_STMT);
3368 vargs.quick_push (expr);
3370 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3372 unsigned int ix;
3373 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3374 gimple def_temp;
3376 arg = gimple_call_arg (stmt, adj->base_index);
3377 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3379 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3380 continue;
3381 arg = fold_convert_loc (gimple_location (stmt),
3382 TREE_TYPE (origin), arg);
3384 if (debug_args == NULL)
3385 debug_args = decl_debug_args_insert (callee_decl);
3386 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3387 if (ddecl == origin)
3389 ddecl = (**debug_args)[ix + 1];
3390 break;
3392 if (ddecl == NULL)
3394 ddecl = make_node (DEBUG_EXPR_DECL);
3395 DECL_ARTIFICIAL (ddecl) = 1;
3396 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3397 DECL_MODE (ddecl) = DECL_MODE (origin);
3399 vec_safe_push (*debug_args, origin);
3400 vec_safe_push (*debug_args, ddecl);
3402 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3403 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3407 if (dump_file && (dump_flags & TDF_DETAILS))
3409 fprintf (dump_file, "replacing stmt:");
3410 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3413 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3414 vargs.release ();
3415 if (gimple_call_lhs (stmt))
3416 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3418 gimple_set_block (new_stmt, gimple_block (stmt));
3419 if (gimple_has_location (stmt))
3420 gimple_set_location (new_stmt, gimple_location (stmt));
3421 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3422 gimple_call_copy_flags (new_stmt, stmt);
3424 if (dump_file && (dump_flags & TDF_DETAILS))
3426 fprintf (dump_file, "with stmt:");
3427 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3428 fprintf (dump_file, "\n");
3430 gsi_replace (&gsi, new_stmt, true);
3431 if (cs)
3432 cgraph_set_call_stmt (cs, new_stmt);
3435 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3436 gsi_prev (&gsi);
3438 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3439 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3441 update_ssa (TODO_update_ssa);
3442 free_dominance_info (CDI_DOMINATORS);
3445 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3447 static bool
3448 index_in_adjustments_multiple_times_p (int base_index,
3449 ipa_parm_adjustment_vec adjustments)
3451 int i, len = adjustments.length ();
3452 bool one = false;
3454 for (i = 0; i < len; i++)
3456 struct ipa_parm_adjustment *adj;
3457 adj = &adjustments[i];
3459 if (adj->base_index == base_index)
3461 if (one)
3462 return true;
3463 else
3464 one = true;
3467 return false;
3471 /* Return adjustments that should have the same effect on function parameters
3472 and call arguments as if they were first changed according to adjustments in
3473 INNER and then by adjustments in OUTER. */
3475 ipa_parm_adjustment_vec
3476 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3477 ipa_parm_adjustment_vec outer)
3479 int i, outlen = outer.length ();
3480 int inlen = inner.length ();
3481 int removals = 0;
3482 ipa_parm_adjustment_vec adjustments, tmp;
3484 tmp.create (inlen);
3485 for (i = 0; i < inlen; i++)
3487 struct ipa_parm_adjustment *n;
3488 n = &inner[i];
3490 if (n->remove_param)
3491 removals++;
3492 else
3493 tmp.quick_push (*n);
3496 adjustments.create (outlen + removals);
3497 for (i = 0; i < outlen; i++)
3499 struct ipa_parm_adjustment r;
3500 struct ipa_parm_adjustment *out = &outer[i];
3501 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3503 memset (&r, 0, sizeof (r));
3504 gcc_assert (!in->remove_param);
3505 if (out->remove_param)
3507 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3509 r.remove_param = true;
3510 adjustments.quick_push (r);
3512 continue;
3515 r.base_index = in->base_index;
3516 r.type = out->type;
3518 /* FIXME: Create nonlocal value too. */
3520 if (in->copy_param && out->copy_param)
3521 r.copy_param = true;
3522 else if (in->copy_param)
3523 r.offset = out->offset;
3524 else if (out->copy_param)
3525 r.offset = in->offset;
3526 else
3527 r.offset = in->offset + out->offset;
3528 adjustments.quick_push (r);
3531 for (i = 0; i < inlen; i++)
3533 struct ipa_parm_adjustment *n = &inner[i];
3535 if (n->remove_param)
3536 adjustments.quick_push (*n);
3539 tmp.release ();
3540 return adjustments;
3543 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3544 friendly way, assuming they are meant to be applied to FNDECL. */
3546 void
3547 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3548 tree fndecl)
3550 int i, len = adjustments.length ();
3551 bool first = true;
3552 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3554 fprintf (file, "IPA param adjustments: ");
3555 for (i = 0; i < len; i++)
3557 struct ipa_parm_adjustment *adj;
3558 adj = &adjustments[i];
3560 if (!first)
3561 fprintf (file, " ");
3562 else
3563 first = false;
3565 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3566 print_generic_expr (file, parms[adj->base_index], 0);
3567 if (adj->base)
3569 fprintf (file, ", base: ");
3570 print_generic_expr (file, adj->base, 0);
3572 if (adj->reduction)
3574 fprintf (file, ", reduction: ");
3575 print_generic_expr (file, adj->reduction, 0);
3577 if (adj->new_ssa_base)
3579 fprintf (file, ", new_ssa_base: ");
3580 print_generic_expr (file, adj->new_ssa_base, 0);
3583 if (adj->copy_param)
3584 fprintf (file, ", copy_param");
3585 else if (adj->remove_param)
3586 fprintf (file, ", remove_param");
3587 else
3588 fprintf (file, ", offset %li", (long) adj->offset);
3589 if (adj->by_ref)
3590 fprintf (file, ", by_ref");
3591 print_node_brief (file, ", type: ", adj->type, 0);
3592 fprintf (file, "\n");
3594 parms.release ();
3597 /* Dump the AV linked list. */
3599 void
3600 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3602 bool comma = false;
3603 fprintf (f, " Aggregate replacements:");
3604 for (; av; av = av->next)
3606 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3607 av->index, av->offset);
3608 print_generic_expr (f, av->value, 0);
3609 comma = true;
3611 fprintf (f, "\n");
3614 /* Stream out jump function JUMP_FUNC to OB. */
3616 static void
3617 ipa_write_jump_function (struct output_block *ob,
3618 struct ipa_jump_func *jump_func)
3620 struct ipa_agg_jf_item *item;
3621 struct bitpack_d bp;
3622 int i, count;
3624 streamer_write_uhwi (ob, jump_func->type);
3625 switch (jump_func->type)
3627 case IPA_JF_UNKNOWN:
3628 break;
3629 case IPA_JF_KNOWN_TYPE:
3630 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3631 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3632 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3633 break;
3634 case IPA_JF_CONST:
3635 gcc_assert (
3636 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3637 stream_write_tree (ob, jump_func->value.constant.value, true);
3638 break;
3639 case IPA_JF_PASS_THROUGH:
3640 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3641 if (jump_func->value.pass_through.operation == NOP_EXPR)
3643 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3644 bp = bitpack_create (ob->main_stream);
3645 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3646 streamer_write_bitpack (&bp);
3648 else
3650 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3651 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3653 break;
3654 case IPA_JF_ANCESTOR:
3655 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3656 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3657 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3658 bp = bitpack_create (ob->main_stream);
3659 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3660 streamer_write_bitpack (&bp);
3661 break;
3664 count = vec_safe_length (jump_func->agg.items);
3665 streamer_write_uhwi (ob, count);
3666 if (count)
3668 bp = bitpack_create (ob->main_stream);
3669 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3670 streamer_write_bitpack (&bp);
3673 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3675 streamer_write_uhwi (ob, item->offset);
3676 stream_write_tree (ob, item->value, true);
3680 /* Read in jump function JUMP_FUNC from IB. */
3682 static void
3683 ipa_read_jump_function (struct lto_input_block *ib,
3684 struct ipa_jump_func *jump_func,
3685 struct cgraph_edge *cs,
3686 struct data_in *data_in)
3688 enum jump_func_type jftype;
3689 enum tree_code operation;
3690 int i, count;
3692 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3693 switch (jftype)
3695 case IPA_JF_UNKNOWN:
3696 jump_func->type = IPA_JF_UNKNOWN;
3697 break;
3698 case IPA_JF_KNOWN_TYPE:
3700 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3701 tree base_type = stream_read_tree (ib, data_in);
3702 tree component_type = stream_read_tree (ib, data_in);
3704 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
3705 break;
3707 case IPA_JF_CONST:
3708 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
3709 break;
3710 case IPA_JF_PASS_THROUGH:
3711 operation = (enum tree_code) streamer_read_uhwi (ib);
3712 if (operation == NOP_EXPR)
3714 int formal_id = streamer_read_uhwi (ib);
3715 struct bitpack_d bp = streamer_read_bitpack (ib);
3716 bool agg_preserved = bp_unpack_value (&bp, 1);
3717 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
3719 else
3721 tree operand = stream_read_tree (ib, data_in);
3722 int formal_id = streamer_read_uhwi (ib);
3723 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
3724 operation);
3726 break;
3727 case IPA_JF_ANCESTOR:
3729 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3730 tree type = stream_read_tree (ib, data_in);
3731 int formal_id = streamer_read_uhwi (ib);
3732 struct bitpack_d bp = streamer_read_bitpack (ib);
3733 bool agg_preserved = bp_unpack_value (&bp, 1);
3735 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved);
3736 break;
3740 count = streamer_read_uhwi (ib);
3741 vec_alloc (jump_func->agg.items, count);
3742 if (count)
3744 struct bitpack_d bp = streamer_read_bitpack (ib);
3745 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3747 for (i = 0; i < count; i++)
3749 struct ipa_agg_jf_item item;
3750 item.offset = streamer_read_uhwi (ib);
3751 item.value = stream_read_tree (ib, data_in);
3752 jump_func->agg.items->quick_push (item);
3756 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3757 relevant to indirect inlining to OB. */
3759 static void
3760 ipa_write_indirect_edge_info (struct output_block *ob,
3761 struct cgraph_edge *cs)
3763 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3764 struct bitpack_d bp;
3766 streamer_write_hwi (ob, ii->param_index);
3767 streamer_write_hwi (ob, ii->offset);
3768 bp = bitpack_create (ob->main_stream);
3769 bp_pack_value (&bp, ii->polymorphic, 1);
3770 bp_pack_value (&bp, ii->agg_contents, 1);
3771 bp_pack_value (&bp, ii->by_ref, 1);
3772 streamer_write_bitpack (&bp);
3774 if (ii->polymorphic)
3776 streamer_write_hwi (ob, ii->otr_token);
3777 stream_write_tree (ob, ii->otr_type, true);
3781 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3782 relevant to indirect inlining from IB. */
3784 static void
3785 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3786 struct data_in *data_in ATTRIBUTE_UNUSED,
3787 struct cgraph_edge *cs)
3789 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3790 struct bitpack_d bp;
3792 ii->param_index = (int) streamer_read_hwi (ib);
3793 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3794 bp = streamer_read_bitpack (ib);
3795 ii->polymorphic = bp_unpack_value (&bp, 1);
3796 ii->agg_contents = bp_unpack_value (&bp, 1);
3797 ii->by_ref = bp_unpack_value (&bp, 1);
3798 if (ii->polymorphic)
3800 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3801 ii->otr_type = stream_read_tree (ib, data_in);
3805 /* Stream out NODE info to OB. */
3807 static void
3808 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3810 int node_ref;
3811 lto_symtab_encoder_t encoder;
3812 struct ipa_node_params *info = IPA_NODE_REF (node);
3813 int j;
3814 struct cgraph_edge *e;
3815 struct bitpack_d bp;
3817 encoder = ob->decl_state->symtab_node_encoder;
3818 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3819 streamer_write_uhwi (ob, node_ref);
3821 bp = bitpack_create (ob->main_stream);
3822 gcc_assert (info->uses_analysis_done
3823 || ipa_get_param_count (info) == 0);
3824 gcc_assert (!info->node_enqueued);
3825 gcc_assert (!info->ipcp_orig_node);
3826 for (j = 0; j < ipa_get_param_count (info); j++)
3827 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3828 streamer_write_bitpack (&bp);
3829 for (j = 0; j < ipa_get_param_count (info); j++)
3830 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
3831 for (e = node->callees; e; e = e->next_callee)
3833 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3835 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3836 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3837 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3839 for (e = node->indirect_calls; e; e = e->next_callee)
3841 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3843 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3844 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3845 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3846 ipa_write_indirect_edge_info (ob, e);
3850 /* Stream in NODE info from IB. */
3852 static void
3853 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3854 struct data_in *data_in)
3856 struct ipa_node_params *info = IPA_NODE_REF (node);
3857 int k;
3858 struct cgraph_edge *e;
3859 struct bitpack_d bp;
3861 ipa_initialize_node_params (node);
3863 bp = streamer_read_bitpack (ib);
3864 if (ipa_get_param_count (info) != 0)
3865 info->uses_analysis_done = true;
3866 info->node_enqueued = false;
3867 for (k = 0; k < ipa_get_param_count (info); k++)
3868 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3869 for (k = 0; k < ipa_get_param_count (info); k++)
3870 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
3871 for (e = node->callees; e; e = e->next_callee)
3873 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3874 int count = streamer_read_uhwi (ib);
3876 if (!count)
3877 continue;
3878 vec_safe_grow_cleared (args->jump_functions, count);
3880 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3881 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
3882 data_in);
3884 for (e = node->indirect_calls; e; e = e->next_callee)
3886 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3887 int count = streamer_read_uhwi (ib);
3889 if (count)
3891 vec_safe_grow_cleared (args->jump_functions, count);
3892 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3893 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
3894 data_in);
3896 ipa_read_indirect_edge_info (ib, data_in, e);
3900 /* Write jump functions for nodes in SET. */
3902 void
3903 ipa_prop_write_jump_functions (void)
3905 struct cgraph_node *node;
3906 struct output_block *ob;
3907 unsigned int count = 0;
3908 lto_symtab_encoder_iterator lsei;
3909 lto_symtab_encoder_t encoder;
3912 if (!ipa_node_params_vector.exists ())
3913 return;
3915 ob = create_output_block (LTO_section_jump_functions);
3916 encoder = ob->decl_state->symtab_node_encoder;
3917 ob->cgraph_node = NULL;
3918 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3919 lsei_next_function_in_partition (&lsei))
3921 node = lsei_cgraph_node (lsei);
3922 if (cgraph_function_with_gimple_body_p (node)
3923 && IPA_NODE_REF (node) != NULL)
3924 count++;
3927 streamer_write_uhwi (ob, count);
3929 /* Process all of the functions. */
3930 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3931 lsei_next_function_in_partition (&lsei))
3933 node = lsei_cgraph_node (lsei);
3934 if (cgraph_function_with_gimple_body_p (node)
3935 && IPA_NODE_REF (node) != NULL)
3936 ipa_write_node_info (ob, node);
3938 streamer_write_char_stream (ob->main_stream, 0);
3939 produce_asm (ob, NULL);
3940 destroy_output_block (ob);
3943 /* Read section in file FILE_DATA of length LEN with data DATA. */
3945 static void
3946 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3947 size_t len)
3949 const struct lto_function_header *header =
3950 (const struct lto_function_header *) data;
3951 const int cfg_offset = sizeof (struct lto_function_header);
3952 const int main_offset = cfg_offset + header->cfg_size;
3953 const int string_offset = main_offset + header->main_size;
3954 struct data_in *data_in;
3955 struct lto_input_block ib_main;
3956 unsigned int i;
3957 unsigned int count;
3959 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3960 header->main_size);
3962 data_in =
3963 lto_data_in_create (file_data, (const char *) data + string_offset,
3964 header->string_size, vNULL);
3965 count = streamer_read_uhwi (&ib_main);
3967 for (i = 0; i < count; i++)
3969 unsigned int index;
3970 struct cgraph_node *node;
3971 lto_symtab_encoder_t encoder;
3973 index = streamer_read_uhwi (&ib_main);
3974 encoder = file_data->symtab_node_encoder;
3975 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3976 gcc_assert (node->symbol.definition);
3977 ipa_read_node_info (&ib_main, node, data_in);
3979 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3980 len);
3981 lto_data_in_delete (data_in);
3984 /* Read ipcp jump functions. */
3986 void
3987 ipa_prop_read_jump_functions (void)
3989 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3990 struct lto_file_decl_data *file_data;
3991 unsigned int j = 0;
3993 ipa_check_create_node_params ();
3994 ipa_check_create_edge_args ();
3995 ipa_register_cgraph_hooks ();
3997 while ((file_data = file_data_vec[j++]))
3999 size_t len;
4000 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4002 if (data)
4003 ipa_prop_read_section (file_data, data, len);
4007 /* After merging units, we can get mismatch in argument counts.
4008 Also decl merging might've rendered parameter lists obsolete.
4009 Also compute called_with_variable_arg info. */
4011 void
4012 ipa_update_after_lto_read (void)
4014 struct cgraph_node *node;
4016 ipa_check_create_node_params ();
4017 ipa_check_create_edge_args ();
4019 FOR_EACH_DEFINED_FUNCTION (node)
4020 ipa_initialize_node_params (node);
4023 void
4024 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4026 int node_ref;
4027 unsigned int count = 0;
4028 lto_symtab_encoder_t encoder;
4029 struct ipa_agg_replacement_value *aggvals, *av;
4031 aggvals = ipa_get_agg_replacements_for_node (node);
4032 encoder = ob->decl_state->symtab_node_encoder;
4033 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4034 streamer_write_uhwi (ob, node_ref);
4036 for (av = aggvals; av; av = av->next)
4037 count++;
4038 streamer_write_uhwi (ob, count);
4040 for (av = aggvals; av; av = av->next)
4042 struct bitpack_d bp;
4044 streamer_write_uhwi (ob, av->offset);
4045 streamer_write_uhwi (ob, av->index);
4046 stream_write_tree (ob, av->value, true);
4048 bp = bitpack_create (ob->main_stream);
4049 bp_pack_value (&bp, av->by_ref, 1);
4050 streamer_write_bitpack (&bp);
4054 /* Stream in the aggregate value replacement chain for NODE from IB. */
4056 static void
4057 read_agg_replacement_chain (struct lto_input_block *ib,
4058 struct cgraph_node *node,
4059 struct data_in *data_in)
4061 struct ipa_agg_replacement_value *aggvals = NULL;
4062 unsigned int count, i;
4064 count = streamer_read_uhwi (ib);
4065 for (i = 0; i <count; i++)
4067 struct ipa_agg_replacement_value *av;
4068 struct bitpack_d bp;
4070 av = ggc_alloc_ipa_agg_replacement_value ();
4071 av->offset = streamer_read_uhwi (ib);
4072 av->index = streamer_read_uhwi (ib);
4073 av->value = stream_read_tree (ib, data_in);
4074 bp = streamer_read_bitpack (ib);
4075 av->by_ref = bp_unpack_value (&bp, 1);
4076 av->next = aggvals;
4077 aggvals = av;
4079 ipa_set_node_agg_value_chain (node, aggvals);
4082 /* Write all aggregate replacement for nodes in set. */
4084 void
4085 ipa_prop_write_all_agg_replacement (void)
4087 struct cgraph_node *node;
4088 struct output_block *ob;
4089 unsigned int count = 0;
4090 lto_symtab_encoder_iterator lsei;
4091 lto_symtab_encoder_t encoder;
4093 if (!ipa_node_agg_replacements)
4094 return;
4096 ob = create_output_block (LTO_section_ipcp_transform);
4097 encoder = ob->decl_state->symtab_node_encoder;
4098 ob->cgraph_node = NULL;
4099 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4100 lsei_next_function_in_partition (&lsei))
4102 node = lsei_cgraph_node (lsei);
4103 if (cgraph_function_with_gimple_body_p (node)
4104 && ipa_get_agg_replacements_for_node (node) != NULL)
4105 count++;
4108 streamer_write_uhwi (ob, count);
4110 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4111 lsei_next_function_in_partition (&lsei))
4113 node = lsei_cgraph_node (lsei);
4114 if (cgraph_function_with_gimple_body_p (node)
4115 && ipa_get_agg_replacements_for_node (node) != NULL)
4116 write_agg_replacement_chain (ob, node);
4118 streamer_write_char_stream (ob->main_stream, 0);
4119 produce_asm (ob, NULL);
4120 destroy_output_block (ob);
4123 /* Read replacements section in file FILE_DATA of length LEN with data
4124 DATA. */
4126 static void
4127 read_replacements_section (struct lto_file_decl_data *file_data,
4128 const char *data,
4129 size_t len)
4131 const struct lto_function_header *header =
4132 (const struct lto_function_header *) data;
4133 const int cfg_offset = sizeof (struct lto_function_header);
4134 const int main_offset = cfg_offset + header->cfg_size;
4135 const int string_offset = main_offset + header->main_size;
4136 struct data_in *data_in;
4137 struct lto_input_block ib_main;
4138 unsigned int i;
4139 unsigned int count;
4141 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4142 header->main_size);
4144 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4145 header->string_size, vNULL);
4146 count = streamer_read_uhwi (&ib_main);
4148 for (i = 0; i < count; i++)
4150 unsigned int index;
4151 struct cgraph_node *node;
4152 lto_symtab_encoder_t encoder;
4154 index = streamer_read_uhwi (&ib_main);
4155 encoder = file_data->symtab_node_encoder;
4156 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4157 gcc_assert (node->symbol.definition);
4158 read_agg_replacement_chain (&ib_main, node, data_in);
4160 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4161 len);
4162 lto_data_in_delete (data_in);
4165 /* Read IPA-CP aggregate replacements. */
4167 void
4168 ipa_prop_read_all_agg_replacement (void)
4170 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4171 struct lto_file_decl_data *file_data;
4172 unsigned int j = 0;
4174 while ((file_data = file_data_vec[j++]))
4176 size_t len;
4177 const char *data = lto_get_section_data (file_data,
4178 LTO_section_ipcp_transform,
4179 NULL, &len);
4180 if (data)
4181 read_replacements_section (file_data, data, len);
4185 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4186 NODE. */
4188 static void
4189 adjust_agg_replacement_values (struct cgraph_node *node,
4190 struct ipa_agg_replacement_value *aggval)
4192 struct ipa_agg_replacement_value *v;
4193 int i, c = 0, d = 0, *adj;
4195 if (!node->clone.combined_args_to_skip)
4196 return;
4198 for (v = aggval; v; v = v->next)
4200 gcc_assert (v->index >= 0);
4201 if (c < v->index)
4202 c = v->index;
4204 c++;
4206 adj = XALLOCAVEC (int, c);
4207 for (i = 0; i < c; i++)
4208 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4210 adj[i] = -1;
4211 d++;
4213 else
4214 adj[i] = i - d;
4216 for (v = aggval; v; v = v->next)
4217 v->index = adj[v->index];
4221 /* Function body transformation phase. */
4223 unsigned int
4224 ipcp_transform_function (struct cgraph_node *node)
4226 vec<ipa_param_descriptor_t> descriptors = vNULL;
4227 struct param_analysis_info *parms_ainfo;
4228 struct ipa_agg_replacement_value *aggval;
4229 gimple_stmt_iterator gsi;
4230 basic_block bb;
4231 int param_count;
4232 bool cfg_changed = false, something_changed = false;
4234 gcc_checking_assert (cfun);
4235 gcc_checking_assert (current_function_decl);
4237 if (dump_file)
4238 fprintf (dump_file, "Modification phase of node %s/%i\n",
4239 cgraph_node_name (node), node->symbol.order);
4241 aggval = ipa_get_agg_replacements_for_node (node);
4242 if (!aggval)
4243 return 0;
4244 param_count = count_formal_params (node->symbol.decl);
4245 if (param_count == 0)
4246 return 0;
4247 adjust_agg_replacement_values (node, aggval);
4248 if (dump_file)
4249 ipa_dump_agg_replacement_values (dump_file, aggval);
4250 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4251 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4252 descriptors.safe_grow_cleared (param_count);
4253 ipa_populate_param_decls (node, descriptors);
4255 FOR_EACH_BB (bb)
4256 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4258 struct ipa_agg_replacement_value *v;
4259 gimple stmt = gsi_stmt (gsi);
4260 tree rhs, val, t;
4261 HOST_WIDE_INT offset;
4262 int index;
4263 bool by_ref, vce;
4265 if (!gimple_assign_load_p (stmt))
4266 continue;
4267 rhs = gimple_assign_rhs1 (stmt);
4268 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4269 continue;
4271 vce = false;
4272 t = rhs;
4273 while (handled_component_p (t))
4275 /* V_C_E can do things like convert an array of integers to one
4276 bigger integer and similar things we do not handle below. */
4277 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4279 vce = true;
4280 break;
4282 t = TREE_OPERAND (t, 0);
4284 if (vce)
4285 continue;
4287 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4288 rhs, &index, &offset, &by_ref))
4289 continue;
4290 for (v = aggval; v; v = v->next)
4291 if (v->index == index
4292 && v->offset == offset)
4293 break;
4294 if (!v || v->by_ref != by_ref)
4295 continue;
4297 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4298 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4300 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4301 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4302 else if (TYPE_SIZE (TREE_TYPE (rhs))
4303 == TYPE_SIZE (TREE_TYPE (v->value)))
4304 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4305 else
4307 if (dump_file)
4309 fprintf (dump_file, " const ");
4310 print_generic_expr (dump_file, v->value, 0);
4311 fprintf (dump_file, " can't be converted to type of ");
4312 print_generic_expr (dump_file, rhs, 0);
4313 fprintf (dump_file, "\n");
4315 continue;
4318 else
4319 val = v->value;
4321 if (dump_file && (dump_flags & TDF_DETAILS))
4323 fprintf (dump_file, "Modifying stmt:\n ");
4324 print_gimple_stmt (dump_file, stmt, 0, 0);
4326 gimple_assign_set_rhs_from_tree (&gsi, val);
4327 update_stmt (stmt);
4329 if (dump_file && (dump_flags & TDF_DETAILS))
4331 fprintf (dump_file, "into:\n ");
4332 print_gimple_stmt (dump_file, stmt, 0, 0);
4333 fprintf (dump_file, "\n");
4336 something_changed = true;
4337 if (maybe_clean_eh_stmt (stmt)
4338 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4339 cfg_changed = true;
4342 (*ipa_node_agg_replacements)[node->uid] = NULL;
4343 free_parms_ainfo (parms_ainfo, param_count);
4344 descriptors.release ();
4346 if (!something_changed)
4347 return 0;
4348 else if (cfg_changed)
4349 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4350 else
4351 return TODO_update_ssa_only_virtuals;