c-family/
[official-gcc.git] / gcc / ipa-prop.c
blob714dd8f7bcb81dd6cbc6b9dfd65fff91fd0bbb0b
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
46 struct param_analysis_info
48 bool parm_modified, ref_modified, pt_modified;
49 bitmap parm_visited_statements, pt_visited_statements;
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
57 /* Holders of ipa cgraph hooks: */
58 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
59 static struct cgraph_node_hook_list *node_removal_hook_holder;
60 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
61 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
62 static struct cgraph_node_hook_list *function_insertion_hook_holder;
64 /* Return index of the formal whose tree is PTREE in function which corresponds
65 to INFO. */
67 int
68 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
70 int i, count;
72 count = ipa_get_param_count (info);
73 for (i = 0; i < count; i++)
74 if (ipa_get_param (info, i) == ptree)
75 return i;
77 return -1;
80 /* Populate the param_decl field in parameter descriptors of INFO that
81 corresponds to NODE. */
83 static void
84 ipa_populate_param_decls (struct cgraph_node *node,
85 struct ipa_node_params *info)
87 tree fndecl;
88 tree fnargs;
89 tree parm;
90 int param_num;
92 fndecl = node->symbol.decl;
93 fnargs = DECL_ARGUMENTS (fndecl);
94 param_num = 0;
95 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
97 VEC_index (ipa_param_descriptor_t,
98 info->descriptors, param_num).decl = parm;
99 param_num++;
103 /* Return how many formal parameters FNDECL has. */
105 static inline int
106 count_formal_params (tree fndecl)
108 tree parm;
109 int count = 0;
111 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
112 count++;
114 return count;
117 /* Initialize the ipa_node_params structure associated with NODE by counting
118 the function parameters, creating the descriptors and populating their
119 param_decls. */
121 void
122 ipa_initialize_node_params (struct cgraph_node *node)
124 struct ipa_node_params *info = IPA_NODE_REF (node);
126 if (!info->descriptors)
128 int param_count;
130 param_count = count_formal_params (node->symbol.decl);
131 if (param_count)
133 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
134 info->descriptors, param_count);
135 ipa_populate_param_decls (node, info);
140 /* Print the jump functions associated with call graph edge CS to file F. */
142 static void
143 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
145 int i, count;
147 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
148 for (i = 0; i < count; i++)
150 struct ipa_jump_func *jump_func;
151 enum jump_func_type type;
153 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
154 type = jump_func->type;
156 fprintf (f, " param %d: ", i);
157 if (type == IPA_JF_UNKNOWN)
158 fprintf (f, "UNKNOWN\n");
159 else if (type == IPA_JF_KNOWN_TYPE)
161 fprintf (f, "KNOWN TYPE: base ");
162 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
163 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
164 jump_func->value.known_type.offset);
165 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
166 fprintf (f, "\n");
168 else if (type == IPA_JF_CONST)
170 tree val = jump_func->value.constant;
171 fprintf (f, "CONST: ");
172 print_generic_expr (f, val, 0);
173 if (TREE_CODE (val) == ADDR_EXPR
174 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
176 fprintf (f, " -> ");
177 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
180 fprintf (f, "\n");
182 else if (type == IPA_JF_PASS_THROUGH)
184 fprintf (f, "PASS THROUGH: ");
185 fprintf (f, "%d, op %s",
186 jump_func->value.pass_through.formal_id,
187 tree_code_name[(int)
188 jump_func->value.pass_through.operation]);
189 if (jump_func->value.pass_through.operation != NOP_EXPR)
191 fprintf (f, " ");
192 print_generic_expr (f,
193 jump_func->value.pass_through.operand, 0);
195 if (jump_func->value.pass_through.agg_preserved)
196 fprintf (f, ", agg_preserved");
197 fprintf (f, "\n");
199 else if (type == IPA_JF_ANCESTOR)
201 fprintf (f, "ANCESTOR: ");
202 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
203 jump_func->value.ancestor.formal_id,
204 jump_func->value.ancestor.offset);
205 print_generic_expr (f, jump_func->value.ancestor.type, 0);
206 if (jump_func->value.ancestor.agg_preserved)
207 fprintf (f, ", agg_preserved");
208 fprintf (f, "\n");
211 if (jump_func->agg.items)
213 struct ipa_agg_jf_item *item;
214 int j;
216 fprintf (f, " Aggregate passed by %s:\n",
217 jump_func->agg.by_ref ? "reference" : "value");
218 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items,
219 j, item)
221 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
222 item->offset);
223 if (TYPE_P (item->value))
224 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
225 tree_low_cst (TYPE_SIZE (item->value), 1));
226 else
228 fprintf (f, "cst: ");
229 print_generic_expr (f, item->value, 0);
231 fprintf (f, "\n");
238 /* Print the jump functions of all arguments on all call graph edges going from
239 NODE to file F. */
241 void
242 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
244 struct cgraph_edge *cs;
245 int i;
247 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
248 for (cs = node->callees; cs; cs = cs->next_callee)
250 if (!ipa_edge_args_info_available_for_edge_p (cs))
251 continue;
253 fprintf (f, " callsite %s/%i -> %s/%i : \n",
254 xstrdup (cgraph_node_name (node)), node->uid,
255 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
256 ipa_print_node_jump_functions_for_edge (f, cs);
259 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
261 if (!ipa_edge_args_info_available_for_edge_p (cs))
262 continue;
264 if (cs->call_stmt)
266 fprintf (f, " indirect callsite %d for stmt ", i);
267 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
269 else
270 fprintf (f, " indirect callsite %d :\n", i);
271 ipa_print_node_jump_functions_for_edge (f, cs);
276 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
278 void
279 ipa_print_all_jump_functions (FILE *f)
281 struct cgraph_node *node;
283 fprintf (f, "\nJump functions:\n");
284 FOR_EACH_FUNCTION (node)
286 ipa_print_node_jump_functions (f, node);
290 /* Worker for prune_expression_for_jf. */
292 static tree
293 prune_expression_for_jf_1 (tree *tp, int *walk_subtrees, void *)
295 if (EXPR_P (*tp))
296 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
297 else
298 *walk_subtrees = 0;
299 return NULL_TREE;
302 /* Return the expression tree EXPR unshared and with location stripped off. */
304 static tree
305 prune_expression_for_jf (tree exp)
307 if (EXPR_P (exp))
309 exp = unshare_expr (exp);
310 walk_tree (&exp, prune_expression_for_jf_1, NULL, NULL);
312 return exp;
315 /* Set JFUNC to be a known type jump function. */
317 static void
318 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
319 tree base_type, tree component_type)
321 jfunc->type = IPA_JF_KNOWN_TYPE;
322 jfunc->value.known_type.offset = offset,
323 jfunc->value.known_type.base_type = base_type;
324 jfunc->value.known_type.component_type = component_type;
327 /* Set JFUNC to be a constant jmp function. */
329 static void
330 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
332 constant = unshare_expr (constant);
333 if (constant && EXPR_P (constant))
334 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
335 jfunc->type = IPA_JF_CONST;
336 jfunc->value.constant = prune_expression_for_jf (constant);
339 /* Set JFUNC to be a simple pass-through jump function. */
340 static void
341 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
342 bool agg_preserved)
344 jfunc->type = IPA_JF_PASS_THROUGH;
345 jfunc->value.pass_through.operand = NULL_TREE;
346 jfunc->value.pass_through.formal_id = formal_id;
347 jfunc->value.pass_through.operation = NOP_EXPR;
348 jfunc->value.pass_through.agg_preserved = agg_preserved;
351 /* Set JFUNC to be an arithmetic pass through jump function. */
353 static void
354 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
355 tree operand, enum tree_code operation)
357 jfunc->type = IPA_JF_PASS_THROUGH;
358 jfunc->value.pass_through.operand = prune_expression_for_jf (operand);
359 jfunc->value.pass_through.formal_id = formal_id;
360 jfunc->value.pass_through.operation = operation;
361 jfunc->value.pass_through.agg_preserved = false;
364 /* Set JFUNC to be an ancestor jump function. */
366 static void
367 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
368 tree type, int formal_id, bool agg_preserved)
370 jfunc->type = IPA_JF_ANCESTOR;
371 jfunc->value.ancestor.formal_id = formal_id;
372 jfunc->value.ancestor.offset = offset;
373 jfunc->value.ancestor.type = type;
374 jfunc->value.ancestor.agg_preserved = agg_preserved;
377 /* Structure to be passed in between detect_type_change and
378 check_stmt_for_type_change. */
380 struct type_change_info
382 /* Offset into the object where there is the virtual method pointer we are
383 looking for. */
384 HOST_WIDE_INT offset;
385 /* The declaration or SSA_NAME pointer of the base that we are checking for
386 type change. */
387 tree object;
388 /* If we actually can tell the type that the object has changed to, it is
389 stored in this field. Otherwise it remains NULL_TREE. */
390 tree known_current_type;
391 /* Set to true if dynamic type change has been detected. */
392 bool type_maybe_changed;
393 /* Set to true if multiple types have been encountered. known_current_type
394 must be disregarded in that case. */
395 bool multiple_types_encountered;
398 /* Return true if STMT can modify a virtual method table pointer.
400 This function makes special assumptions about both constructors and
401 destructors which are all the functions that are allowed to alter the VMT
402 pointers. It assumes that destructors begin with assignment into all VMT
403 pointers and that constructors essentially look in the following way:
405 1) The very first thing they do is that they call constructors of ancestor
406 sub-objects that have them.
408 2) Then VMT pointers of this and all its ancestors is set to new values
409 corresponding to the type corresponding to the constructor.
411 3) Only afterwards, other stuff such as constructor of member sub-objects
412 and the code written by the user is run. Only this may include calling
413 virtual functions, directly or indirectly.
415 There is no way to call a constructor of an ancestor sub-object in any
416 other way.
418 This means that we do not have to care whether constructors get the correct
419 type information because they will always change it (in fact, if we define
420 the type to be given by the VMT pointer, it is undefined).
422 The most important fact to derive from the above is that if, for some
423 statement in the section 3, we try to detect whether the dynamic type has
424 changed, we can safely ignore all calls as we examine the function body
425 backwards until we reach statements in section 2 because these calls cannot
426 be ancestor constructors or destructors (if the input is not bogus) and so
427 do not change the dynamic type (this holds true only for automatically
428 allocated objects but at the moment we devirtualize only these). We then
429 must detect that statements in section 2 change the dynamic type and can try
430 to derive the new type. That is enough and we can stop, we will never see
431 the calls into constructors of sub-objects in this code. Therefore we can
432 safely ignore all call statements that we traverse.
435 static bool
436 stmt_may_be_vtbl_ptr_store (gimple stmt)
438 if (is_gimple_call (stmt))
439 return false;
440 else if (is_gimple_assign (stmt))
442 tree lhs = gimple_assign_lhs (stmt);
444 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
446 if (flag_strict_aliasing
447 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
448 return false;
450 if (TREE_CODE (lhs) == COMPONENT_REF
451 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
452 return false;
453 /* In the future we might want to use get_base_ref_and_offset to find
454 if there is a field corresponding to the offset and if so, proceed
455 almost like if it was a component ref. */
458 return true;
461 /* If STMT can be proved to be an assignment to the virtual method table
462 pointer of ANALYZED_OBJ and the type associated with the new table
463 identified, return the type. Otherwise return NULL_TREE. */
465 static tree
466 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
468 HOST_WIDE_INT offset, size, max_size;
469 tree lhs, rhs, base;
471 if (!gimple_assign_single_p (stmt))
472 return NULL_TREE;
474 lhs = gimple_assign_lhs (stmt);
475 rhs = gimple_assign_rhs1 (stmt);
476 if (TREE_CODE (lhs) != COMPONENT_REF
477 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
478 || TREE_CODE (rhs) != ADDR_EXPR)
479 return NULL_TREE;
480 rhs = get_base_address (TREE_OPERAND (rhs, 0));
481 if (!rhs
482 || TREE_CODE (rhs) != VAR_DECL
483 || !DECL_VIRTUAL_P (rhs))
484 return NULL_TREE;
486 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
487 if (offset != tci->offset
488 || size != POINTER_SIZE
489 || max_size != POINTER_SIZE)
490 return NULL_TREE;
491 if (TREE_CODE (base) == MEM_REF)
493 if (TREE_CODE (tci->object) != MEM_REF
494 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
495 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
496 TREE_OPERAND (base, 1)))
497 return NULL_TREE;
499 else if (tci->object != base)
500 return NULL_TREE;
502 return DECL_CONTEXT (rhs);
505 /* Callback of walk_aliased_vdefs and a helper function for
506 detect_type_change to check whether a particular statement may modify
507 the virtual table pointer, and if possible also determine the new type of
508 the (sub-)object. It stores its result into DATA, which points to a
509 type_change_info structure. */
511 static bool
512 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
514 gimple stmt = SSA_NAME_DEF_STMT (vdef);
515 struct type_change_info *tci = (struct type_change_info *) data;
517 if (stmt_may_be_vtbl_ptr_store (stmt))
519 tree type;
520 type = extr_type_from_vtbl_ptr_store (stmt, tci);
521 if (tci->type_maybe_changed
522 && type != tci->known_current_type)
523 tci->multiple_types_encountered = true;
524 tci->known_current_type = type;
525 tci->type_maybe_changed = true;
526 return true;
528 else
529 return false;
534 /* Like detect_type_change but with extra argument COMP_TYPE which will become
535 the component type part of new JFUNC of dynamic type change is detected and
536 the new base type is identified. */
538 static bool
539 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
540 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
542 struct type_change_info tci;
543 ao_ref ao;
545 gcc_checking_assert (DECL_P (arg)
546 || TREE_CODE (arg) == MEM_REF
547 || handled_component_p (arg));
548 /* Const calls cannot call virtual methods through VMT and so type changes do
549 not matter. */
550 if (!flag_devirtualize || !gimple_vuse (call))
551 return false;
553 ao_ref_init (&ao, arg);
554 ao.base = base;
555 ao.offset = offset;
556 ao.size = POINTER_SIZE;
557 ao.max_size = ao.size;
559 tci.offset = offset;
560 tci.object = get_base_address (arg);
561 tci.known_current_type = NULL_TREE;
562 tci.type_maybe_changed = false;
563 tci.multiple_types_encountered = false;
565 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
566 &tci, NULL);
567 if (!tci.type_maybe_changed)
568 return false;
570 if (!tci.known_current_type
571 || tci.multiple_types_encountered
572 || offset != 0)
573 jfunc->type = IPA_JF_UNKNOWN;
574 else
575 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
577 return true;
580 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
581 looking for assignments to its virtual table pointer. If it is, return true
582 and fill in the jump function JFUNC with relevant type information or set it
583 to unknown. ARG is the object itself (not a pointer to it, unless
584 dereferenced). BASE is the base of the memory access as returned by
585 get_ref_base_and_extent, as is the offset. */
587 static bool
588 detect_type_change (tree arg, tree base, gimple call,
589 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
591 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
594 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
595 SSA name (its dereference will become the base and the offset is assumed to
596 be zero). */
598 static bool
599 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
601 tree comp_type;
603 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
604 if (!flag_devirtualize
605 || !POINTER_TYPE_P (TREE_TYPE (arg))
606 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
607 return false;
609 comp_type = TREE_TYPE (TREE_TYPE (arg));
610 arg = build2 (MEM_REF, ptr_type_node, arg,
611 build_int_cst (ptr_type_node, 0));
613 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
616 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
617 boolean variable pointed to by DATA. */
619 static bool
620 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
621 void *data)
623 bool *b = (bool *) data;
624 *b = true;
625 return true;
628 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
629 a value known not to be modified in this function before reaching the
630 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
631 information about the parameter. */
633 static bool
634 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
635 gimple stmt, tree parm_load)
637 bool modified = false;
638 bitmap *visited_stmts;
639 ao_ref refd;
641 if (parm_ainfo && parm_ainfo->parm_modified)
642 return false;
644 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
645 ao_ref_init (&refd, parm_load);
646 /* We can cache visited statements only when parm_ainfo is available and when
647 we are looking at a naked load of the whole parameter. */
648 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
649 visited_stmts = NULL;
650 else
651 visited_stmts = &parm_ainfo->parm_visited_statements;
652 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
653 visited_stmts);
654 if (parm_ainfo && modified)
655 parm_ainfo->parm_modified = true;
656 return !modified;
659 /* If STMT is an assignment that loads a value from an parameter declaration,
660 return the index of the parameter in ipa_node_params which has not been
661 modified. Otherwise return -1. */
663 static int
664 load_from_unmodified_param (struct ipa_node_params *info,
665 struct param_analysis_info *parms_ainfo,
666 gimple stmt)
668 int index;
669 tree op1;
671 if (!gimple_assign_single_p (stmt))
672 return -1;
674 op1 = gimple_assign_rhs1 (stmt);
675 if (TREE_CODE (op1) != PARM_DECL)
676 return -1;
678 index = ipa_get_param_decl_index (info, op1);
679 if (index < 0
680 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
681 : NULL, stmt, op1))
682 return -1;
684 return index;
687 /* Return true if memory reference REF loads data that are known to be
688 unmodified in this function before reaching statement STMT. PARM_AINFO, if
689 non-NULL, is a pointer to a structure containing temporary information about
690 PARM. */
692 static bool
693 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
694 gimple stmt, tree ref)
696 bool modified = false;
697 ao_ref refd;
699 gcc_checking_assert (gimple_vuse (stmt));
700 if (parm_ainfo && parm_ainfo->ref_modified)
701 return false;
703 ao_ref_init (&refd, ref);
704 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
705 NULL);
706 if (parm_ainfo && modified)
707 parm_ainfo->ref_modified = true;
708 return !modified;
711 /* Return true if the data pointed to by PARM is known to be unmodified in this
712 function before reaching call statement CALL into which it is passed.
713 PARM_AINFO is a pointer to a structure containing temporary information
714 about PARM. */
716 static bool
717 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
718 gimple call, tree parm)
720 bool modified = false;
721 ao_ref refd;
723 /* It's unnecessary to calculate anything about memory contnets for a const
724 function because it is not goin to use it. But do not cache the result
725 either. Also, no such calculations for non-pointers. */
726 if (!gimple_vuse (call)
727 || !POINTER_TYPE_P (TREE_TYPE (parm)))
728 return false;
730 if (parm_ainfo->pt_modified)
731 return false;
733 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
734 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
735 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
736 if (modified)
737 parm_ainfo->pt_modified = true;
738 return !modified;
741 /* Return true if we can prove that OP is a memory reference loading unmodified
742 data from an aggregate passed as a parameter and if the aggregate is passed
743 by reference, that the alias type of the load corresponds to the type of the
744 formal parameter (so that we can rely on this type for TBAA in callers).
745 INFO and PARMS_AINFO describe parameters of the current function (but the
746 latter can be NULL), STMT is the load statement. If function returns true,
747 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
748 within the aggregate and whether it is a load from a value passed by
749 reference respectively. */
751 static bool
752 ipa_load_from_parm_agg_1 (struct ipa_node_params *info,
753 struct param_analysis_info *parms_ainfo, gimple stmt,
754 tree op, int *index_p, HOST_WIDE_INT *offset_p,
755 bool *by_ref_p)
757 int index;
758 HOST_WIDE_INT size, max_size;
759 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
761 if (max_size == -1 || max_size != size || *offset_p < 0)
762 return false;
764 if (DECL_P (base))
766 int index = ipa_get_param_decl_index (info, base);
767 if (index >= 0
768 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
769 : NULL, stmt, op))
771 *index_p = index;
772 *by_ref_p = false;
773 return true;
775 return false;
778 if (TREE_CODE (base) != MEM_REF
779 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
780 || !integer_zerop (TREE_OPERAND (base, 1)))
781 return false;
783 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
785 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
786 index = ipa_get_param_decl_index (info, parm);
788 else
790 /* This branch catches situations where a pointer parameter is not a
791 gimple register, for example:
793 void hip7(S*) (struct S * p)
795 void (*<T2e4>) (struct S *) D.1867;
796 struct S * p.1;
798 <bb 2>:
799 p.1_1 = p;
800 D.1867_2 = p.1_1->f;
801 D.1867_2 ();
802 gdp = &p;
805 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
806 index = load_from_unmodified_param (info, parms_ainfo, def);
809 if (index >= 0
810 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
811 stmt, op))
813 *index_p = index;
814 *by_ref_p = true;
815 return true;
817 return false;
820 /* Just like the previous function, just without the param_analysis_info
821 pointer, for users outside of this file. */
823 bool
824 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
825 tree op, int *index_p, HOST_WIDE_INT *offset_p,
826 bool *by_ref_p)
828 return ipa_load_from_parm_agg_1 (info, NULL, stmt, op, index_p, offset_p,
829 by_ref_p);
832 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
833 of an assignment statement STMT, try to determine whether we are actually
834 handling any of the following cases and construct an appropriate jump
835 function into JFUNC if so:
837 1) The passed value is loaded from a formal parameter which is not a gimple
838 register (most probably because it is addressable, the value has to be
839 scalar) and we can guarantee the value has not changed. This case can
840 therefore be described by a simple pass-through jump function. For example:
842 foo (int a)
844 int a.0;
846 a.0_2 = a;
847 bar (a.0_2);
849 2) The passed value can be described by a simple arithmetic pass-through
850 jump function. E.g.
852 foo (int a)
854 int D.2064;
856 D.2064_4 = a.1(D) + 4;
857 bar (D.2064_4);
859 This case can also occur in combination of the previous one, e.g.:
861 foo (int a, int z)
863 int a.0;
864 int D.2064;
866 a.0_3 = a;
867 D.2064_4 = a.0_3 + 4;
868 foo (D.2064_4);
870 3) The passed value is an address of an object within another one (which
871 also passed by reference). Such situations are described by an ancestor
872 jump function and describe situations such as:
874 B::foo() (struct B * const this)
876 struct A * D.1845;
878 D.1845_2 = &this_1(D)->D.1748;
879 A::bar (D.1845_2);
881 INFO is the structure describing individual parameters access different
882 stages of IPA optimizations. PARMS_AINFO contains the information that is
883 only needed for intraprocedural analysis. */
885 static void
886 compute_complex_assign_jump_func (struct ipa_node_params *info,
887 struct param_analysis_info *parms_ainfo,
888 struct ipa_jump_func *jfunc,
889 gimple call, gimple stmt, tree name)
891 HOST_WIDE_INT offset, size, max_size;
892 tree op1, tc_ssa, base, ssa;
893 int index;
895 op1 = gimple_assign_rhs1 (stmt);
897 if (TREE_CODE (op1) == SSA_NAME)
899 if (SSA_NAME_IS_DEFAULT_DEF (op1))
900 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
901 else
902 index = load_from_unmodified_param (info, parms_ainfo,
903 SSA_NAME_DEF_STMT (op1));
904 tc_ssa = op1;
906 else
908 index = load_from_unmodified_param (info, parms_ainfo, stmt);
909 tc_ssa = gimple_assign_lhs (stmt);
912 if (index >= 0)
914 tree op2 = gimple_assign_rhs2 (stmt);
916 if (op2)
918 if (!is_gimple_ip_invariant (op2)
919 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
920 && !useless_type_conversion_p (TREE_TYPE (name),
921 TREE_TYPE (op1))))
922 return;
924 ipa_set_jf_arith_pass_through (jfunc, index, op2,
925 gimple_assign_rhs_code (stmt));
927 else if (gimple_assign_single_p (stmt)
928 && !detect_type_change_ssa (tc_ssa, call, jfunc))
930 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
931 call, tc_ssa);
932 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
934 return;
937 if (TREE_CODE (op1) != ADDR_EXPR)
938 return;
939 op1 = TREE_OPERAND (op1, 0);
940 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
941 return;
942 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
943 if (TREE_CODE (base) != MEM_REF
944 /* If this is a varying address, punt. */
945 || max_size == -1
946 || max_size != size)
947 return;
948 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
949 ssa = TREE_OPERAND (base, 0);
950 if (TREE_CODE (ssa) != SSA_NAME
951 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
952 || offset < 0)
953 return;
955 /* Dynamic types are changed only in constructors and destructors and */
956 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
957 if (index >= 0
958 && !detect_type_change (op1, base, call, jfunc, offset))
959 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
960 parm_ref_data_pass_through_p (&parms_ainfo[index],
961 call, ssa));
964 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
965 it looks like:
967 iftmp.1_3 = &obj_2(D)->D.1762;
969 The base of the MEM_REF must be a default definition SSA NAME of a
970 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
971 whole MEM_REF expression is returned and the offset calculated from any
972 handled components and the MEM_REF itself is stored into *OFFSET. The whole
973 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
975 static tree
976 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
978 HOST_WIDE_INT size, max_size;
979 tree expr, parm, obj;
981 if (!gimple_assign_single_p (assign))
982 return NULL_TREE;
983 expr = gimple_assign_rhs1 (assign);
985 if (TREE_CODE (expr) != ADDR_EXPR)
986 return NULL_TREE;
987 expr = TREE_OPERAND (expr, 0);
988 obj = expr;
989 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
991 if (TREE_CODE (expr) != MEM_REF
992 /* If this is a varying address, punt. */
993 || max_size == -1
994 || max_size != size
995 || *offset < 0)
996 return NULL_TREE;
997 parm = TREE_OPERAND (expr, 0);
998 if (TREE_CODE (parm) != SSA_NAME
999 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1000 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1001 return NULL_TREE;
1003 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1004 *obj_p = obj;
1005 return expr;
1009 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1010 statement PHI, try to find out whether NAME is in fact a
1011 multiple-inheritance typecast from a descendant into an ancestor of a formal
1012 parameter and thus can be described by an ancestor jump function and if so,
1013 write the appropriate function into JFUNC.
1015 Essentially we want to match the following pattern:
1017 if (obj_2(D) != 0B)
1018 goto <bb 3>;
1019 else
1020 goto <bb 4>;
1022 <bb 3>:
1023 iftmp.1_3 = &obj_2(D)->D.1762;
1025 <bb 4>:
1026 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1027 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1028 return D.1879_6; */
1030 static void
1031 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1032 struct param_analysis_info *parms_ainfo,
1033 struct ipa_jump_func *jfunc,
1034 gimple call, gimple phi)
1036 HOST_WIDE_INT offset;
1037 gimple assign, cond;
1038 basic_block phi_bb, assign_bb, cond_bb;
1039 tree tmp, parm, expr, obj;
1040 int index, i;
1042 if (gimple_phi_num_args (phi) != 2)
1043 return;
1045 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1046 tmp = PHI_ARG_DEF (phi, 0);
1047 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1048 tmp = PHI_ARG_DEF (phi, 1);
1049 else
1050 return;
1051 if (TREE_CODE (tmp) != SSA_NAME
1052 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1053 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1054 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1055 return;
1057 assign = SSA_NAME_DEF_STMT (tmp);
1058 assign_bb = gimple_bb (assign);
1059 if (!single_pred_p (assign_bb))
1060 return;
1061 expr = get_ancestor_addr_info (assign, &obj, &offset);
1062 if (!expr)
1063 return;
1064 parm = TREE_OPERAND (expr, 0);
1065 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1066 gcc_assert (index >= 0);
1068 cond_bb = single_pred (assign_bb);
1069 cond = last_stmt (cond_bb);
1070 if (!cond
1071 || gimple_code (cond) != GIMPLE_COND
1072 || gimple_cond_code (cond) != NE_EXPR
1073 || gimple_cond_lhs (cond) != parm
1074 || !integer_zerop (gimple_cond_rhs (cond)))
1075 return;
1077 phi_bb = gimple_bb (phi);
1078 for (i = 0; i < 2; i++)
1080 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1081 if (pred != assign_bb && pred != cond_bb)
1082 return;
1085 if (!detect_type_change (obj, expr, call, jfunc, offset))
1086 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1087 parm_ref_data_pass_through_p (&parms_ainfo[index],
1088 call, parm));
1091 /* Given OP which is passed as an actual argument to a called function,
1092 determine if it is possible to construct a KNOWN_TYPE jump function for it
1093 and if so, create one and store it to JFUNC. */
1095 static void
1096 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1097 gimple call)
1099 HOST_WIDE_INT offset, size, max_size;
1100 tree base;
1102 if (!flag_devirtualize
1103 || TREE_CODE (op) != ADDR_EXPR
1104 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1105 return;
1107 op = TREE_OPERAND (op, 0);
1108 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1109 if (!DECL_P (base)
1110 || max_size == -1
1111 || max_size != size
1112 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1113 || is_global_var (base))
1114 return;
1116 if (!TYPE_BINFO (TREE_TYPE (base))
1117 || detect_type_change (op, base, call, jfunc, offset))
1118 return;
1120 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1123 /* Inspect the given TYPE and return true iff it has the same structure (the
1124 same number of fields of the same types) as a C++ member pointer. If
1125 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1126 corresponding fields there. */
1128 static bool
1129 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1131 tree fld;
1133 if (TREE_CODE (type) != RECORD_TYPE)
1134 return false;
1136 fld = TYPE_FIELDS (type);
1137 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1138 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1139 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1140 return false;
1142 if (method_ptr)
1143 *method_ptr = fld;
1145 fld = DECL_CHAIN (fld);
1146 if (!fld || INTEGRAL_TYPE_P (fld)
1147 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1148 return false;
1149 if (delta)
1150 *delta = fld;
1152 if (DECL_CHAIN (fld))
1153 return false;
1155 return true;
1158 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1159 return the rhs of its defining statement. Otherwise return RHS as it
1160 is. */
1162 static inline tree
1163 get_ssa_def_if_simple_copy (tree rhs)
1165 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1167 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1169 if (gimple_assign_single_p (def_stmt))
1170 rhs = gimple_assign_rhs1 (def_stmt);
1171 else
1172 break;
1174 return rhs;
1177 /* Simple linked list, describing known contents of an aggregate beforere
1178 call. */
1180 struct ipa_known_agg_contents_list
1182 /* Offset and size of the described part of the aggregate. */
1183 HOST_WIDE_INT offset, size;
1184 /* Known constant value or NULL if the contents is known to be unknown. */
1185 tree constant;
1186 /* Pointer to the next structure in the list. */
1187 struct ipa_known_agg_contents_list *next;
1190 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1191 in ARG is filled in with constant values. ARG can either be an aggregate
1192 expression or a pointer to an aggregate. JFUNC is the jump function into
1193 which the constants are subsequently stored. */
1195 static void
1196 determine_known_aggregate_parts (gimple call, tree arg,
1197 struct ipa_jump_func *jfunc)
1199 struct ipa_known_agg_contents_list *list = NULL;
1200 int item_count = 0, const_count = 0;
1201 HOST_WIDE_INT arg_offset, arg_size;
1202 gimple_stmt_iterator gsi;
1203 tree arg_base;
1204 bool check_ref, by_ref;
1205 ao_ref r;
1207 /* The function operates in three stages. First, we prepare check_ref, r,
1208 arg_base and arg_offset based on what is actually passed as an actual
1209 argument. */
1211 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1213 by_ref = true;
1214 if (TREE_CODE (arg) == SSA_NAME)
1216 tree type_size;
1217 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1218 return;
1219 check_ref = true;
1220 arg_base = arg;
1221 arg_offset = 0;
1222 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1223 arg_size = tree_low_cst (type_size, 1);
1224 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1226 else if (TREE_CODE (arg) == ADDR_EXPR)
1228 HOST_WIDE_INT arg_max_size;
1230 arg = TREE_OPERAND (arg, 0);
1231 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1232 &arg_max_size);
1233 if (arg_max_size == -1
1234 || arg_max_size != arg_size
1235 || arg_offset < 0)
1236 return;
1237 if (DECL_P (arg_base))
1239 tree size;
1240 check_ref = false;
1241 size = build_int_cst (integer_type_node, arg_size);
1242 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1244 else
1245 return;
1247 else
1248 return;
1250 else
1252 HOST_WIDE_INT arg_max_size;
1254 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1256 by_ref = false;
1257 check_ref = false;
1258 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1259 &arg_max_size);
1260 if (arg_max_size == -1
1261 || arg_max_size != arg_size
1262 || arg_offset < 0)
1263 return;
1265 ao_ref_init (&r, arg);
1268 /* Second stage walks back the BB, looks at individual statements and as long
1269 as it is confident of how the statements affect contents of the
1270 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1271 describing it. */
1272 gsi = gsi_for_stmt (call);
1273 gsi_prev (&gsi);
1274 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1276 struct ipa_known_agg_contents_list *n, **p;
1277 gimple stmt = gsi_stmt (gsi);
1278 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1279 tree lhs, rhs, lhs_base;
1280 bool partial_overlap;
1282 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1283 continue;
1284 if (!gimple_assign_single_p (stmt))
1285 break;
1287 lhs = gimple_assign_lhs (stmt);
1288 rhs = gimple_assign_rhs1 (stmt);
1289 if (!is_gimple_reg_type (rhs))
1290 break;
1292 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1293 &lhs_max_size);
1294 if (lhs_max_size == -1
1295 || lhs_max_size != lhs_size
1296 || (lhs_offset < arg_offset
1297 && lhs_offset + lhs_size > arg_offset)
1298 || (lhs_offset < arg_offset + arg_size
1299 && lhs_offset + lhs_size > arg_offset + arg_size))
1300 break;
1302 if (check_ref)
1304 if (TREE_CODE (lhs_base) != MEM_REF
1305 || TREE_OPERAND (lhs_base, 0) != arg_base
1306 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1307 break;
1309 else if (lhs_base != arg_base)
1310 break;
1312 if (lhs_offset + lhs_size < arg_offset
1313 || lhs_offset >= (arg_offset + arg_size))
1314 continue;
1316 partial_overlap = false;
1317 p = &list;
1318 while (*p && (*p)->offset < lhs_offset)
1320 if ((*p)->offset + (*p)->size > lhs_offset)
1322 partial_overlap = true;
1323 break;
1325 p = &(*p)->next;
1327 if (partial_overlap)
1328 break;
1329 if (*p && (*p)->offset < lhs_offset + lhs_size)
1331 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1332 /* We already know this value is subsequently overwritten with
1333 something else. */
1334 continue;
1335 else
1336 /* Otherwise this is a partial overlap which we cannot
1337 represent. */
1338 break;
1341 rhs = get_ssa_def_if_simple_copy (rhs);
1342 n = XALLOCA (struct ipa_known_agg_contents_list);
1343 n->size = lhs_size;
1344 n->offset = lhs_offset;
1345 if (is_gimple_ip_invariant (rhs))
1347 n->constant = rhs;
1348 const_count++;
1350 else
1351 n->constant = NULL_TREE;
1352 n->next = *p;
1353 *p = n;
1355 item_count++;
1356 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1357 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1358 break;
1361 /* Third stage just goes over the list and creates an appropriate vector of
1362 ipa_agg_jf_item structures out of it, of sourse only if there are
1363 any known constants to begin with. */
1365 if (const_count)
1367 jfunc->agg.by_ref = by_ref;
1368 jfunc->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, const_count);
1369 while (list)
1371 if (list->constant)
1373 struct ipa_agg_jf_item item;
1374 item.offset = list->offset - arg_offset;
1375 item.value = prune_expression_for_jf (list->constant);
1376 VEC_quick_push (ipa_agg_jf_item_t, jfunc->agg.items, item);
1378 list = list->next;
1383 /* Compute jump function for all arguments of callsite CS and insert the
1384 information in the jump_functions array in the ipa_edge_args corresponding
1385 to this callsite. */
1387 static void
1388 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1389 struct cgraph_edge *cs)
1391 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1392 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1393 gimple call = cs->call_stmt;
1394 int n, arg_num = gimple_call_num_args (call);
1396 if (arg_num == 0 || args->jump_functions)
1397 return;
1398 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1400 for (n = 0; n < arg_num; n++)
1402 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1403 tree arg = gimple_call_arg (call, n);
1405 if (is_gimple_ip_invariant (arg))
1406 ipa_set_jf_constant (jfunc, arg);
1407 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1408 && TREE_CODE (arg) == PARM_DECL)
1410 int index = ipa_get_param_decl_index (info, arg);
1412 gcc_assert (index >=0);
1413 /* Aggregate passed by value, check for pass-through, otherwise we
1414 will attempt to fill in aggregate contents later in this
1415 for cycle. */
1416 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1418 ipa_set_jf_simple_pass_through (jfunc, index, false);
1419 continue;
1422 else if (TREE_CODE (arg) == SSA_NAME)
1424 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1426 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1427 if (index >= 0
1428 && !detect_type_change_ssa (arg, call, jfunc))
1430 bool agg_p;
1431 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1432 call, arg);
1433 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1436 else
1438 gimple stmt = SSA_NAME_DEF_STMT (arg);
1439 if (is_gimple_assign (stmt))
1440 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1441 call, stmt, arg);
1442 else if (gimple_code (stmt) == GIMPLE_PHI)
1443 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1444 call, stmt);
1447 else
1448 compute_known_type_jump_func (arg, jfunc, call);
1450 if ((jfunc->type != IPA_JF_PASS_THROUGH
1451 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1452 && (jfunc->type != IPA_JF_ANCESTOR
1453 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1454 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1455 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1456 determine_known_aggregate_parts (call, arg, jfunc);
1460 /* Compute jump functions for all edges - both direct and indirect - outgoing
1461 from NODE. Also count the actual arguments in the process. */
1463 static void
1464 ipa_compute_jump_functions (struct cgraph_node *node,
1465 struct param_analysis_info *parms_ainfo)
1467 struct cgraph_edge *cs;
1469 for (cs = node->callees; cs; cs = cs->next_callee)
1471 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1472 NULL);
1473 /* We do not need to bother analyzing calls to unknown
1474 functions unless they may become known during lto/whopr. */
1475 if (!callee->analyzed && !flag_lto)
1476 continue;
1477 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1480 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1481 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1484 /* If STMT looks like a statement loading a value from a member pointer formal
1485 parameter, return that parameter and store the offset of the field to
1486 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1487 might be clobbered). If USE_DELTA, then we look for a use of the delta
1488 field rather than the pfn. */
1490 static tree
1491 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1492 HOST_WIDE_INT *offset_p)
1494 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1496 if (!gimple_assign_single_p (stmt))
1497 return NULL_TREE;
1499 rhs = gimple_assign_rhs1 (stmt);
1500 if (TREE_CODE (rhs) == COMPONENT_REF)
1502 ref_field = TREE_OPERAND (rhs, 1);
1503 rhs = TREE_OPERAND (rhs, 0);
1505 else
1506 ref_field = NULL_TREE;
1507 if (TREE_CODE (rhs) != MEM_REF)
1508 return NULL_TREE;
1509 rec = TREE_OPERAND (rhs, 0);
1510 if (TREE_CODE (rec) != ADDR_EXPR)
1511 return NULL_TREE;
1512 rec = TREE_OPERAND (rec, 0);
1513 if (TREE_CODE (rec) != PARM_DECL
1514 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1515 return NULL_TREE;
1516 ref_offset = TREE_OPERAND (rhs, 1);
1518 if (use_delta)
1519 fld = delta_field;
1520 else
1521 fld = ptr_field;
1522 if (offset_p)
1523 *offset_p = int_bit_position (fld);
1525 if (ref_field)
1527 if (integer_nonzerop (ref_offset))
1528 return NULL_TREE;
1529 return ref_field == fld ? rec : NULL_TREE;
1531 else
1532 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1533 : NULL_TREE;
1536 /* Returns true iff T is an SSA_NAME defined by a statement. */
1538 static bool
1539 ipa_is_ssa_with_stmt_def (tree t)
1541 if (TREE_CODE (t) == SSA_NAME
1542 && !SSA_NAME_IS_DEFAULT_DEF (t))
1543 return true;
1544 else
1545 return false;
1548 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1549 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1550 indirect call graph edge. */
1552 static struct cgraph_edge *
1553 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1555 struct cgraph_edge *cs;
1557 cs = cgraph_edge (node, stmt);
1558 cs->indirect_info->param_index = param_index;
1559 cs->indirect_info->offset = 0;
1560 cs->indirect_info->polymorphic = 0;
1561 cs->indirect_info->agg_contents = 0;
1562 return cs;
1565 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1566 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1567 intermediate information about each formal parameter. Currently it checks
1568 whether the call calls a pointer that is a formal parameter and if so, the
1569 parameter is marked with the called flag and an indirect call graph edge
1570 describing the call is created. This is very simple for ordinary pointers
1571 represented in SSA but not-so-nice when it comes to member pointers. The
1572 ugly part of this function does nothing more than trying to match the
1573 pattern of such a call. An example of such a pattern is the gimple dump
1574 below, the call is on the last line:
1576 <bb 2>:
1577 f$__delta_5 = f.__delta;
1578 f$__pfn_24 = f.__pfn;
1581 <bb 2>:
1582 f$__delta_5 = MEM[(struct *)&f];
1583 f$__pfn_24 = MEM[(struct *)&f + 4B];
1585 and a few lines below:
1587 <bb 5>
1588 D.2496_3 = (int) f$__pfn_24;
1589 D.2497_4 = D.2496_3 & 1;
1590 if (D.2497_4 != 0)
1591 goto <bb 3>;
1592 else
1593 goto <bb 4>;
1595 <bb 6>:
1596 D.2500_7 = (unsigned int) f$__delta_5;
1597 D.2501_8 = &S + D.2500_7;
1598 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1599 D.2503_10 = *D.2502_9;
1600 D.2504_12 = f$__pfn_24 + -1;
1601 D.2505_13 = (unsigned int) D.2504_12;
1602 D.2506_14 = D.2503_10 + D.2505_13;
1603 D.2507_15 = *D.2506_14;
1604 iftmp.11_16 = (String:: *) D.2507_15;
1606 <bb 7>:
1607 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1608 D.2500_19 = (unsigned int) f$__delta_5;
1609 D.2508_20 = &S + D.2500_19;
1610 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1612 Such patterns are results of simple calls to a member pointer:
1614 int doprinting (int (MyString::* f)(int) const)
1616 MyString S ("somestring");
1618 return (S.*f)(4);
1621 Moreover, the function also looks for called pointers loaded from aggregates
1622 passed by value or reference. */
1624 static void
1625 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1626 struct ipa_node_params *info,
1627 struct param_analysis_info *parms_ainfo,
1628 gimple call, tree target)
1630 gimple def;
1631 tree n1, n2;
1632 gimple d1, d2;
1633 tree rec, rec2, cond;
1634 gimple branch;
1635 int index;
1636 basic_block bb, virt_bb, join;
1637 HOST_WIDE_INT offset;
1638 bool by_ref;
1640 if (SSA_NAME_IS_DEFAULT_DEF (target))
1642 tree var = SSA_NAME_VAR (target);
1643 index = ipa_get_param_decl_index (info, var);
1644 if (index >= 0)
1645 ipa_note_param_call (node, index, call);
1646 return;
1649 def = SSA_NAME_DEF_STMT (target);
1650 if (gimple_assign_single_p (def)
1651 && ipa_load_from_parm_agg_1 (info, parms_ainfo, def,
1652 gimple_assign_rhs1 (def), &index, &offset,
1653 &by_ref))
1655 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1656 cs->indirect_info->offset = offset;
1657 cs->indirect_info->agg_contents = 1;
1658 cs->indirect_info->by_ref = by_ref;
1659 return;
1662 /* Now we need to try to match the complex pattern of calling a member
1663 pointer. */
1664 if (gimple_code (def) != GIMPLE_PHI
1665 || gimple_phi_num_args (def) != 2
1666 || !POINTER_TYPE_P (TREE_TYPE (target))
1667 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1668 return;
1670 /* First, we need to check whether one of these is a load from a member
1671 pointer that is a parameter to this function. */
1672 n1 = PHI_ARG_DEF (def, 0);
1673 n2 = PHI_ARG_DEF (def, 1);
1674 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1675 return;
1676 d1 = SSA_NAME_DEF_STMT (n1);
1677 d2 = SSA_NAME_DEF_STMT (n2);
1679 join = gimple_bb (def);
1680 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1682 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1683 return;
1685 bb = EDGE_PRED (join, 0)->src;
1686 virt_bb = gimple_bb (d2);
1688 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1690 bb = EDGE_PRED (join, 1)->src;
1691 virt_bb = gimple_bb (d1);
1693 else
1694 return;
1696 /* Second, we need to check that the basic blocks are laid out in the way
1697 corresponding to the pattern. */
1699 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1700 || single_pred (virt_bb) != bb
1701 || single_succ (virt_bb) != join)
1702 return;
1704 /* Third, let's see that the branching is done depending on the least
1705 significant bit of the pfn. */
1707 branch = last_stmt (bb);
1708 if (!branch || gimple_code (branch) != GIMPLE_COND)
1709 return;
1711 if ((gimple_cond_code (branch) != NE_EXPR
1712 && gimple_cond_code (branch) != EQ_EXPR)
1713 || !integer_zerop (gimple_cond_rhs (branch)))
1714 return;
1716 cond = gimple_cond_lhs (branch);
1717 if (!ipa_is_ssa_with_stmt_def (cond))
1718 return;
1720 def = SSA_NAME_DEF_STMT (cond);
1721 if (!is_gimple_assign (def)
1722 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1723 || !integer_onep (gimple_assign_rhs2 (def)))
1724 return;
1726 cond = gimple_assign_rhs1 (def);
1727 if (!ipa_is_ssa_with_stmt_def (cond))
1728 return;
1730 def = SSA_NAME_DEF_STMT (cond);
1732 if (is_gimple_assign (def)
1733 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1735 cond = gimple_assign_rhs1 (def);
1736 if (!ipa_is_ssa_with_stmt_def (cond))
1737 return;
1738 def = SSA_NAME_DEF_STMT (cond);
1741 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1742 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1743 == ptrmemfunc_vbit_in_delta),
1744 NULL);
1745 if (rec != rec2)
1746 return;
1748 index = ipa_get_param_decl_index (info, rec);
1749 if (index >= 0
1750 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1752 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1753 cs->indirect_info->offset = offset;
1754 cs->indirect_info->agg_contents = 1;
1757 return;
1760 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1761 object referenced in the expression is a formal parameter of the caller
1762 (described by INFO), create a call note for the statement. */
1764 static void
1765 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1766 struct ipa_node_params *info, gimple call,
1767 tree target)
1769 struct cgraph_edge *cs;
1770 struct cgraph_indirect_call_info *ii;
1771 struct ipa_jump_func jfunc;
1772 tree obj = OBJ_TYPE_REF_OBJECT (target);
1773 int index;
1774 HOST_WIDE_INT anc_offset;
1776 if (!flag_devirtualize)
1777 return;
1779 if (TREE_CODE (obj) != SSA_NAME)
1780 return;
1782 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1784 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1785 return;
1787 anc_offset = 0;
1788 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1789 gcc_assert (index >= 0);
1790 if (detect_type_change_ssa (obj, call, &jfunc))
1791 return;
1793 else
1795 gimple stmt = SSA_NAME_DEF_STMT (obj);
1796 tree expr;
1798 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1799 if (!expr)
1800 return;
1801 index = ipa_get_param_decl_index (info,
1802 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1803 gcc_assert (index >= 0);
1804 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1805 return;
1808 cs = ipa_note_param_call (node, index, call);
1809 ii = cs->indirect_info;
1810 ii->offset = anc_offset;
1811 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1812 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1813 ii->polymorphic = 1;
1816 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1817 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1818 containing intermediate information about each formal parameter. */
1820 static void
1821 ipa_analyze_call_uses (struct cgraph_node *node,
1822 struct ipa_node_params *info,
1823 struct param_analysis_info *parms_ainfo, gimple call)
1825 tree target = gimple_call_fn (call);
1827 if (!target)
1828 return;
1829 if (TREE_CODE (target) == SSA_NAME)
1830 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1831 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1832 ipa_analyze_virtual_call_uses (node, info, call, target);
1836 /* Analyze the call statement STMT with respect to formal parameters (described
1837 in INFO) of caller given by NODE. Currently it only checks whether formal
1838 parameters are called. PARMS_AINFO is a pointer to a vector containing
1839 intermediate information about each formal parameter. */
1841 static void
1842 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1843 struct param_analysis_info *parms_ainfo, gimple stmt)
1845 if (is_gimple_call (stmt))
1846 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1849 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1850 If OP is a parameter declaration, mark it as used in the info structure
1851 passed in DATA. */
1853 static bool
1854 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1855 tree op, void *data)
1857 struct ipa_node_params *info = (struct ipa_node_params *) data;
1859 op = get_base_address (op);
1860 if (op
1861 && TREE_CODE (op) == PARM_DECL)
1863 int index = ipa_get_param_decl_index (info, op);
1864 gcc_assert (index >= 0);
1865 ipa_set_param_used (info, index, true);
1868 return false;
1871 /* Scan the function body of NODE and inspect the uses of formal parameters.
1872 Store the findings in various structures of the associated ipa_node_params
1873 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1874 vector containing intermediate information about each formal parameter. */
1876 static void
1877 ipa_analyze_params_uses (struct cgraph_node *node,
1878 struct param_analysis_info *parms_ainfo)
1880 tree decl = node->symbol.decl;
1881 basic_block bb;
1882 struct function *func;
1883 gimple_stmt_iterator gsi;
1884 struct ipa_node_params *info = IPA_NODE_REF (node);
1885 int i;
1887 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1888 return;
1890 for (i = 0; i < ipa_get_param_count (info); i++)
1892 tree parm = ipa_get_param (info, i);
1893 tree ddef;
1894 /* For SSA regs see if parameter is used. For non-SSA we compute
1895 the flag during modification analysis. */
1896 if (is_gimple_reg (parm)
1897 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1898 parm)) != NULL_TREE
1899 && !has_zero_uses (ddef))
1900 ipa_set_param_used (info, i, true);
1903 func = DECL_STRUCT_FUNCTION (decl);
1904 FOR_EACH_BB_FN (bb, func)
1906 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1908 gimple stmt = gsi_stmt (gsi);
1910 if (is_gimple_debug (stmt))
1911 continue;
1913 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1914 walk_stmt_load_store_addr_ops (stmt, info,
1915 visit_ref_for_mod_analysis,
1916 visit_ref_for_mod_analysis,
1917 visit_ref_for_mod_analysis);
1919 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1920 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1921 visit_ref_for_mod_analysis,
1922 visit_ref_for_mod_analysis,
1923 visit_ref_for_mod_analysis);
1926 info->uses_analysis_done = 1;
1929 /* Initialize the array describing properties of of formal parameters
1930 of NODE, analyze their uses and compute jump functions associated
1931 with actual arguments of calls from within NODE. */
1933 void
1934 ipa_analyze_node (struct cgraph_node *node)
1936 struct ipa_node_params *info;
1937 struct param_analysis_info *parms_ainfo;
1938 int i, param_count;
1940 ipa_check_create_node_params ();
1941 ipa_check_create_edge_args ();
1942 info = IPA_NODE_REF (node);
1943 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1944 ipa_initialize_node_params (node);
1946 param_count = ipa_get_param_count (info);
1947 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1948 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1950 ipa_analyze_params_uses (node, parms_ainfo);
1951 ipa_compute_jump_functions (node, parms_ainfo);
1953 for (i = 0; i < param_count; i++)
1955 if (parms_ainfo[i].parm_visited_statements)
1956 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1957 if (parms_ainfo[i].pt_visited_statements)
1958 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1961 pop_cfun ();
1965 /* Update the jump function DST when the call graph edge corresponding to SRC is
1966 is being inlined, knowing that DST is of type ancestor and src of known
1967 type. */
1969 static void
1970 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1971 struct ipa_jump_func *dst)
1973 HOST_WIDE_INT combined_offset;
1974 tree combined_type;
1976 combined_offset = ipa_get_jf_known_type_offset (src)
1977 + ipa_get_jf_ancestor_offset (dst);
1978 combined_type = ipa_get_jf_ancestor_type (dst);
1980 ipa_set_jf_known_type (dst, combined_offset,
1981 ipa_get_jf_known_type_base_type (src),
1982 combined_type);
1985 /* Update the jump functions associated with call graph edge E when the call
1986 graph edge CS is being inlined, assuming that E->caller is already (possibly
1987 indirectly) inlined into CS->callee and that E has not been inlined. */
1989 static void
1990 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1991 struct cgraph_edge *e)
1993 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1994 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1995 int count = ipa_get_cs_argument_count (args);
1996 int i;
1998 for (i = 0; i < count; i++)
2000 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2002 if (dst->type == IPA_JF_ANCESTOR)
2004 struct ipa_jump_func *src;
2005 int dst_fid = dst->value.ancestor.formal_id;
2007 /* Variable number of arguments can cause havoc if we try to access
2008 one that does not exist in the inlined edge. So make sure we
2009 don't. */
2010 if (dst_fid >= ipa_get_cs_argument_count (top))
2012 dst->type = IPA_JF_UNKNOWN;
2013 continue;
2016 src = ipa_get_ith_jump_func (top, dst_fid);
2018 if (src->agg.items
2019 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2021 struct ipa_agg_jf_item *item;
2022 int j;
2024 /* Currently we do not produce clobber aggregate jump functions,
2025 replace with merging when we do. */
2026 gcc_assert (!dst->agg.items);
2028 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc, src->agg.items);
2029 dst->agg.by_ref = src->agg.by_ref;
2030 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, dst->agg.items, j, item)
2031 item->offset -= dst->value.ancestor.offset;
2034 if (src->type == IPA_JF_KNOWN_TYPE)
2035 combine_known_type_and_ancestor_jfs (src, dst);
2036 else if (src->type == IPA_JF_PASS_THROUGH
2037 && src->value.pass_through.operation == NOP_EXPR)
2039 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2040 dst->value.ancestor.agg_preserved &=
2041 src->value.pass_through.agg_preserved;
2043 else if (src->type == IPA_JF_ANCESTOR)
2045 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2046 dst->value.ancestor.offset += src->value.ancestor.offset;
2047 dst->value.ancestor.agg_preserved &=
2048 src->value.ancestor.agg_preserved;
2050 else
2051 dst->type = IPA_JF_UNKNOWN;
2053 else if (dst->type == IPA_JF_PASS_THROUGH)
2055 struct ipa_jump_func *src;
2056 /* We must check range due to calls with variable number of arguments
2057 and we cannot combine jump functions with operations. */
2058 if (dst->value.pass_through.operation == NOP_EXPR
2059 && (dst->value.pass_through.formal_id
2060 < ipa_get_cs_argument_count (top)))
2062 bool agg_p;
2063 int dst_fid = dst->value.pass_through.formal_id;
2064 src = ipa_get_ith_jump_func (top, dst_fid);
2065 agg_p = dst->value.pass_through.agg_preserved;
2067 dst->type = src->type;
2068 dst->value = src->value;
2070 if (src->agg.items
2071 && (agg_p || !src->agg.by_ref))
2073 /* Currently we do not produce clobber aggregate jump
2074 functions, replace with merging when we do. */
2075 gcc_assert (!dst->agg.items);
2077 dst->agg.by_ref = src->agg.by_ref;
2078 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc,
2079 src->agg.items);
2082 if (!agg_p)
2084 if (dst->type == IPA_JF_PASS_THROUGH)
2085 dst->value.pass_through.agg_preserved = false;
2086 else if (dst->type == IPA_JF_ANCESTOR)
2087 dst->value.ancestor.agg_preserved = false;
2090 else
2091 dst->type = IPA_JF_UNKNOWN;
2096 /* If TARGET is an addr_expr of a function declaration, make it the destination
2097 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2099 struct cgraph_edge *
2100 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2102 struct cgraph_node *callee;
2104 if (TREE_CODE (target) == ADDR_EXPR)
2105 target = TREE_OPERAND (target, 0);
2106 if (TREE_CODE (target) != FUNCTION_DECL)
2107 return NULL;
2108 callee = cgraph_get_node (target);
2109 if (!callee)
2110 return NULL;
2111 ipa_check_create_node_params ();
2113 /* We can not make edges to inline clones. It is bug that someone removed
2114 the cgraph node too early. */
2115 gcc_assert (!callee->global.inlined_to);
2117 cgraph_make_edge_direct (ie, callee);
2118 if (dump_file)
2120 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2121 "(%s/%i -> %s/%i), for stmt ",
2122 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2123 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2124 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2125 if (ie->call_stmt)
2126 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2127 else
2128 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2130 callee = cgraph_function_or_thunk_node (callee, NULL);
2132 return ie;
2135 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2136 return NULL if there is not any. BY_REF specifies whether the value has to
2137 be passed by reference or by value. */
2139 tree
2140 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2141 HOST_WIDE_INT offset, bool by_ref)
2143 struct ipa_agg_jf_item *item;
2144 int i;
2146 if (by_ref != agg->by_ref)
2147 return NULL;
2149 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, agg->items, i, item)
2151 if (item->offset == offset)
2153 /* Currently we do not have clobber values, return NULL for them once
2154 we do. */
2155 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2156 return item->value;
2158 else if (item->offset > offset)
2159 return NULL;
2161 return NULL;
2164 /* Try to find a destination for indirect edge IE that corresponds to a simple
2165 call or a call of a member function pointer and where the destination is a
2166 pointer formal parameter described by jump function JFUNC. If it can be
2167 determined, return the newly direct edge, otherwise return NULL. */
2169 static struct cgraph_edge *
2170 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2171 struct ipa_jump_func *jfunc)
2173 tree target;
2175 if (ie->indirect_info->agg_contents)
2177 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2178 ie->indirect_info->offset,
2179 ie->indirect_info->by_ref);
2180 if (!target)
2181 return NULL;
2183 else
2185 if (jfunc->type != IPA_JF_CONST)
2186 return NULL;
2187 target = ipa_get_jf_constant (jfunc);
2189 return ipa_make_edge_direct_to_target (ie, target);
2192 /* Try to find a destination for indirect edge IE that corresponds to a
2193 virtual call based on a formal parameter which is described by jump
2194 function JFUNC and if it can be determined, make it direct and return the
2195 direct edge. Otherwise, return NULL. */
2197 static struct cgraph_edge *
2198 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2199 struct ipa_jump_func *jfunc)
2201 tree binfo, target;
2203 if (jfunc->type != IPA_JF_KNOWN_TYPE)
2204 return NULL;
2206 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
2207 gcc_checking_assert (binfo);
2208 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
2209 + ie->indirect_info->offset,
2210 ie->indirect_info->otr_type);
2211 if (binfo)
2212 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2213 binfo);
2214 else
2215 return NULL;
2217 if (target)
2218 return ipa_make_edge_direct_to_target (ie, target);
2219 else
2220 return NULL;
2223 /* Update the param called notes associated with NODE when CS is being inlined,
2224 assuming NODE is (potentially indirectly) inlined into CS->callee.
2225 Moreover, if the callee is discovered to be constant, create a new cgraph
2226 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2227 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2229 static bool
2230 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2231 struct cgraph_node *node,
2232 VEC (cgraph_edge_p, heap) **new_edges)
2234 struct ipa_edge_args *top;
2235 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2236 bool res = false;
2238 ipa_check_create_edge_args ();
2239 top = IPA_EDGE_REF (cs);
2241 for (ie = node->indirect_calls; ie; ie = next_ie)
2243 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2244 struct ipa_jump_func *jfunc;
2245 int param_index;
2247 next_ie = ie->next_callee;
2249 if (ici->param_index == -1)
2250 continue;
2252 /* We must check range due to calls with variable number of arguments: */
2253 if (ici->param_index >= ipa_get_cs_argument_count (top))
2255 ici->param_index = -1;
2256 continue;
2259 param_index = ici->param_index;
2260 jfunc = ipa_get_ith_jump_func (top, param_index);
2261 if (jfunc->type == IPA_JF_PASS_THROUGH
2262 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2264 if (ici->agg_contents
2265 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2266 ici->param_index = -1;
2267 else
2268 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2270 else if (jfunc->type == IPA_JF_ANCESTOR)
2272 if (ici->agg_contents
2273 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2274 ici->param_index = -1;
2275 else
2277 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2278 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2281 else
2282 /* Either we can find a destination for this edge now or never. */
2283 ici->param_index = -1;
2285 if (!flag_indirect_inlining)
2286 continue;
2288 if (ici->polymorphic)
2289 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
2290 else
2291 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
2293 if (new_direct_edge)
2295 new_direct_edge->indirect_inlining_edge = 1;
2296 if (new_direct_edge->call_stmt)
2297 new_direct_edge->call_stmt_cannot_inline_p
2298 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2299 new_direct_edge->callee->symbol.decl);
2300 if (new_edges)
2302 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
2303 new_direct_edge);
2304 top = IPA_EDGE_REF (cs);
2305 res = true;
2310 return res;
2313 /* Recursively traverse subtree of NODE (including node) made of inlined
2314 cgraph_edges when CS has been inlined and invoke
2315 update_indirect_edges_after_inlining on all nodes and
2316 update_jump_functions_after_inlining on all non-inlined edges that lead out
2317 of this subtree. Newly discovered indirect edges will be added to
2318 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2319 created. */
2321 static bool
2322 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2323 struct cgraph_node *node,
2324 VEC (cgraph_edge_p, heap) **new_edges)
2326 struct cgraph_edge *e;
2327 bool res;
2329 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2331 for (e = node->callees; e; e = e->next_callee)
2332 if (!e->inline_failed)
2333 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2334 else
2335 update_jump_functions_after_inlining (cs, e);
2336 for (e = node->indirect_calls; e; e = e->next_callee)
2337 update_jump_functions_after_inlining (cs, e);
2339 return res;
2342 /* Update jump functions and call note functions on inlining the call site CS.
2343 CS is expected to lead to a node already cloned by
2344 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2345 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2346 created. */
2348 bool
2349 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2350 VEC (cgraph_edge_p, heap) **new_edges)
2352 bool changed;
2353 /* Do nothing if the preparation phase has not been carried out yet
2354 (i.e. during early inlining). */
2355 if (!ipa_node_params_vector)
2356 return false;
2357 gcc_assert (ipa_edge_args_vector);
2359 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2361 /* We do not keep jump functions of inlined edges up to date. Better to free
2362 them so we do not access them accidentally. */
2363 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2364 return changed;
2367 /* Frees all dynamically allocated structures that the argument info points
2368 to. */
2370 void
2371 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2373 if (args->jump_functions)
2374 ggc_free (args->jump_functions);
2376 memset (args, 0, sizeof (*args));
2379 /* Free all ipa_edge structures. */
2381 void
2382 ipa_free_all_edge_args (void)
2384 int i;
2385 struct ipa_edge_args *args;
2387 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2388 ipa_free_edge_args_substructures (args);
2390 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2391 ipa_edge_args_vector = NULL;
2394 /* Frees all dynamically allocated structures that the param info points
2395 to. */
2397 void
2398 ipa_free_node_params_substructures (struct ipa_node_params *info)
2400 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2401 free (info->lattices);
2402 /* Lattice values and their sources are deallocated with their alocation
2403 pool. */
2404 VEC_free (tree, heap, info->known_vals);
2405 memset (info, 0, sizeof (*info));
2408 /* Free all ipa_node_params structures. */
2410 void
2411 ipa_free_all_node_params (void)
2413 int i;
2414 struct ipa_node_params *info;
2416 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2417 ipa_free_node_params_substructures (info);
2419 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2420 ipa_node_params_vector = NULL;
2423 /* Hook that is called by cgraph.c when an edge is removed. */
2425 static void
2426 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2428 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2429 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2430 <= (unsigned)cs->uid)
2431 return;
2432 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2435 /* Hook that is called by cgraph.c when a node is removed. */
2437 static void
2438 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2440 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2441 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2442 <= (unsigned)node->uid)
2443 return;
2444 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2447 /* Hook that is called by cgraph.c when an edge is duplicated. */
2449 static void
2450 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2451 __attribute__((unused)) void *data)
2453 struct ipa_edge_args *old_args, *new_args;
2454 unsigned int i;
2456 ipa_check_create_edge_args ();
2458 old_args = IPA_EDGE_REF (src);
2459 new_args = IPA_EDGE_REF (dst);
2461 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2462 old_args->jump_functions);
2464 for (i = 0; i < VEC_length (ipa_jump_func_t, old_args->jump_functions); i++)
2465 VEC_index (ipa_jump_func_t, new_args->jump_functions, i).agg.items
2466 = VEC_copy (ipa_agg_jf_item_t, gc,
2467 VEC_index (ipa_jump_func_t,
2468 old_args->jump_functions, i).agg.items);
2471 /* Hook that is called by cgraph.c when a node is duplicated. */
2473 static void
2474 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2475 ATTRIBUTE_UNUSED void *data)
2477 struct ipa_node_params *old_info, *new_info;
2479 ipa_check_create_node_params ();
2480 old_info = IPA_NODE_REF (src);
2481 new_info = IPA_NODE_REF (dst);
2483 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2484 old_info->descriptors);
2485 new_info->lattices = NULL;
2486 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2488 new_info->uses_analysis_done = old_info->uses_analysis_done;
2489 new_info->node_enqueued = old_info->node_enqueued;
2493 /* Analyze newly added function into callgraph. */
2495 static void
2496 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2498 ipa_analyze_node (node);
2501 /* Register our cgraph hooks if they are not already there. */
2503 void
2504 ipa_register_cgraph_hooks (void)
2506 if (!edge_removal_hook_holder)
2507 edge_removal_hook_holder =
2508 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2509 if (!node_removal_hook_holder)
2510 node_removal_hook_holder =
2511 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2512 if (!edge_duplication_hook_holder)
2513 edge_duplication_hook_holder =
2514 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2515 if (!node_duplication_hook_holder)
2516 node_duplication_hook_holder =
2517 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2518 function_insertion_hook_holder =
2519 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2522 /* Unregister our cgraph hooks if they are not already there. */
2524 static void
2525 ipa_unregister_cgraph_hooks (void)
2527 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2528 edge_removal_hook_holder = NULL;
2529 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2530 node_removal_hook_holder = NULL;
2531 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2532 edge_duplication_hook_holder = NULL;
2533 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2534 node_duplication_hook_holder = NULL;
2535 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2536 function_insertion_hook_holder = NULL;
2539 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2540 longer needed after ipa-cp. */
2542 void
2543 ipa_free_all_structures_after_ipa_cp (void)
2545 if (!optimize)
2547 ipa_free_all_edge_args ();
2548 ipa_free_all_node_params ();
2549 free_alloc_pool (ipcp_sources_pool);
2550 free_alloc_pool (ipcp_values_pool);
2551 ipa_unregister_cgraph_hooks ();
2555 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2556 longer needed after indirect inlining. */
2558 void
2559 ipa_free_all_structures_after_iinln (void)
2561 ipa_free_all_edge_args ();
2562 ipa_free_all_node_params ();
2563 ipa_unregister_cgraph_hooks ();
2564 if (ipcp_sources_pool)
2565 free_alloc_pool (ipcp_sources_pool);
2566 if (ipcp_values_pool)
2567 free_alloc_pool (ipcp_values_pool);
2570 /* Print ipa_tree_map data structures of all functions in the
2571 callgraph to F. */
2573 void
2574 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2576 int i, count;
2577 tree temp;
2578 struct ipa_node_params *info;
2580 if (!node->analyzed)
2581 return;
2582 info = IPA_NODE_REF (node);
2583 fprintf (f, " function %s parameter descriptors:\n",
2584 cgraph_node_name (node));
2585 count = ipa_get_param_count (info);
2586 for (i = 0; i < count; i++)
2588 temp = ipa_get_param (info, i);
2589 if (TREE_CODE (temp) == PARM_DECL)
2590 fprintf (f, " param %d : %s", i,
2591 (DECL_NAME (temp)
2592 ? (*lang_hooks.decl_printable_name) (temp, 2)
2593 : "(unnamed)"));
2594 if (ipa_is_param_used (info, i))
2595 fprintf (f, " used");
2596 fprintf (f, "\n");
2600 /* Print ipa_tree_map data structures of all functions in the
2601 callgraph to F. */
2603 void
2604 ipa_print_all_params (FILE * f)
2606 struct cgraph_node *node;
2608 fprintf (f, "\nFunction parameters:\n");
2609 FOR_EACH_FUNCTION (node)
2610 ipa_print_node_params (f, node);
2613 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2615 VEC(tree, heap) *
2616 ipa_get_vector_of_formal_parms (tree fndecl)
2618 VEC(tree, heap) *args;
2619 int count;
2620 tree parm;
2622 count = count_formal_params (fndecl);
2623 args = VEC_alloc (tree, heap, count);
2624 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2625 VEC_quick_push (tree, args, parm);
2627 return args;
2630 /* Return a heap allocated vector containing types of formal parameters of
2631 function type FNTYPE. */
2633 static inline VEC(tree, heap) *
2634 get_vector_of_formal_parm_types (tree fntype)
2636 VEC(tree, heap) *types;
2637 int count = 0;
2638 tree t;
2640 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2641 count++;
2643 types = VEC_alloc (tree, heap, count);
2644 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2645 VEC_quick_push (tree, types, TREE_VALUE (t));
2647 return types;
2650 /* Modify the function declaration FNDECL and its type according to the plan in
2651 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2652 to reflect the actual parameters being modified which are determined by the
2653 base_index field. */
2655 void
2656 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2657 const char *synth_parm_prefix)
2659 VEC(tree, heap) *oparms, *otypes;
2660 tree orig_type, new_type = NULL;
2661 tree old_arg_types, t, new_arg_types = NULL;
2662 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2663 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2664 tree new_reversed = NULL;
2665 bool care_for_types, last_parm_void;
2667 if (!synth_parm_prefix)
2668 synth_parm_prefix = "SYNTH";
2670 oparms = ipa_get_vector_of_formal_parms (fndecl);
2671 orig_type = TREE_TYPE (fndecl);
2672 old_arg_types = TYPE_ARG_TYPES (orig_type);
2674 /* The following test is an ugly hack, some functions simply don't have any
2675 arguments in their type. This is probably a bug but well... */
2676 care_for_types = (old_arg_types != NULL_TREE);
2677 if (care_for_types)
2679 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2680 == void_type_node);
2681 otypes = get_vector_of_formal_parm_types (orig_type);
2682 if (last_parm_void)
2683 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2684 else
2685 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2687 else
2689 last_parm_void = false;
2690 otypes = NULL;
2693 for (i = 0; i < len; i++)
2695 struct ipa_parm_adjustment *adj;
2696 gcc_assert (link);
2698 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2699 parm = VEC_index (tree, oparms, adj->base_index);
2700 adj->base = parm;
2702 if (adj->copy_param)
2704 if (care_for_types)
2705 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2706 adj->base_index),
2707 new_arg_types);
2708 *link = parm;
2709 link = &DECL_CHAIN (parm);
2711 else if (!adj->remove_param)
2713 tree new_parm;
2714 tree ptype;
2716 if (adj->by_ref)
2717 ptype = build_pointer_type (adj->type);
2718 else
2719 ptype = adj->type;
2721 if (care_for_types)
2722 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2724 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2725 ptype);
2726 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2728 DECL_ARTIFICIAL (new_parm) = 1;
2729 DECL_ARG_TYPE (new_parm) = ptype;
2730 DECL_CONTEXT (new_parm) = fndecl;
2731 TREE_USED (new_parm) = 1;
2732 DECL_IGNORED_P (new_parm) = 1;
2733 layout_decl (new_parm, 0);
2735 adj->base = parm;
2736 adj->reduction = new_parm;
2738 *link = new_parm;
2740 link = &DECL_CHAIN (new_parm);
2744 *link = NULL_TREE;
2746 if (care_for_types)
2748 new_reversed = nreverse (new_arg_types);
2749 if (last_parm_void)
2751 if (new_reversed)
2752 TREE_CHAIN (new_arg_types) = void_list_node;
2753 else
2754 new_reversed = void_list_node;
2758 /* Use copy_node to preserve as much as possible from original type
2759 (debug info, attribute lists etc.)
2760 Exception is METHOD_TYPEs must have THIS argument.
2761 When we are asked to remove it, we need to build new FUNCTION_TYPE
2762 instead. */
2763 if (TREE_CODE (orig_type) != METHOD_TYPE
2764 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0).copy_param
2765 && VEC_index (ipa_parm_adjustment_t, adjustments, 0).base_index == 0))
2767 new_type = build_distinct_type_copy (orig_type);
2768 TYPE_ARG_TYPES (new_type) = new_reversed;
2770 else
2772 new_type
2773 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2774 new_reversed));
2775 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2776 DECL_VINDEX (fndecl) = NULL_TREE;
2779 /* When signature changes, we need to clear builtin info. */
2780 if (DECL_BUILT_IN (fndecl))
2782 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2783 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2786 /* This is a new type, not a copy of an old type. Need to reassociate
2787 variants. We can handle everything except the main variant lazily. */
2788 t = TYPE_MAIN_VARIANT (orig_type);
2789 if (orig_type != t)
2791 TYPE_MAIN_VARIANT (new_type) = t;
2792 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2793 TYPE_NEXT_VARIANT (t) = new_type;
2795 else
2797 TYPE_MAIN_VARIANT (new_type) = new_type;
2798 TYPE_NEXT_VARIANT (new_type) = NULL;
2801 TREE_TYPE (fndecl) = new_type;
2802 DECL_VIRTUAL_P (fndecl) = 0;
2803 if (otypes)
2804 VEC_free (tree, heap, otypes);
2805 VEC_free (tree, heap, oparms);
2808 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2809 If this is a directly recursive call, CS must be NULL. Otherwise it must
2810 contain the corresponding call graph edge. */
2812 void
2813 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2814 ipa_parm_adjustment_vec adjustments)
2816 VEC(tree, heap) *vargs;
2817 VEC(tree, gc) **debug_args = NULL;
2818 gimple new_stmt;
2819 gimple_stmt_iterator gsi;
2820 tree callee_decl;
2821 int i, len;
2823 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2824 vargs = VEC_alloc (tree, heap, len);
2825 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2827 gsi = gsi_for_stmt (stmt);
2828 for (i = 0; i < len; i++)
2830 struct ipa_parm_adjustment *adj;
2832 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2834 if (adj->copy_param)
2836 tree arg = gimple_call_arg (stmt, adj->base_index);
2838 VEC_quick_push (tree, vargs, arg);
2840 else if (!adj->remove_param)
2842 tree expr, base, off;
2843 location_t loc;
2845 /* We create a new parameter out of the value of the old one, we can
2846 do the following kind of transformations:
2848 - A scalar passed by reference is converted to a scalar passed by
2849 value. (adj->by_ref is false and the type of the original
2850 actual argument is a pointer to a scalar).
2852 - A part of an aggregate is passed instead of the whole aggregate.
2853 The part can be passed either by value or by reference, this is
2854 determined by value of adj->by_ref. Moreover, the code below
2855 handles both situations when the original aggregate is passed by
2856 value (its type is not a pointer) and when it is passed by
2857 reference (it is a pointer to an aggregate).
2859 When the new argument is passed by reference (adj->by_ref is true)
2860 it must be a part of an aggregate and therefore we form it by
2861 simply taking the address of a reference inside the original
2862 aggregate. */
2864 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2865 base = gimple_call_arg (stmt, adj->base_index);
2866 loc = EXPR_LOCATION (base);
2868 if (TREE_CODE (base) != ADDR_EXPR
2869 && POINTER_TYPE_P (TREE_TYPE (base)))
2870 off = build_int_cst (adj->alias_ptr_type,
2871 adj->offset / BITS_PER_UNIT);
2872 else
2874 HOST_WIDE_INT base_offset;
2875 tree prev_base;
2877 if (TREE_CODE (base) == ADDR_EXPR)
2878 base = TREE_OPERAND (base, 0);
2879 prev_base = base;
2880 base = get_addr_base_and_unit_offset (base, &base_offset);
2881 /* Aggregate arguments can have non-invariant addresses. */
2882 if (!base)
2884 base = build_fold_addr_expr (prev_base);
2885 off = build_int_cst (adj->alias_ptr_type,
2886 adj->offset / BITS_PER_UNIT);
2888 else if (TREE_CODE (base) == MEM_REF)
2890 off = build_int_cst (adj->alias_ptr_type,
2891 base_offset
2892 + adj->offset / BITS_PER_UNIT);
2893 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2894 off);
2895 base = TREE_OPERAND (base, 0);
2897 else
2899 off = build_int_cst (adj->alias_ptr_type,
2900 base_offset
2901 + adj->offset / BITS_PER_UNIT);
2902 base = build_fold_addr_expr (base);
2906 if (!adj->by_ref)
2908 tree type = adj->type;
2909 unsigned int align;
2910 unsigned HOST_WIDE_INT misalign;
2912 get_pointer_alignment_1 (base, &align, &misalign);
2913 misalign += (tree_to_double_int (off)
2914 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
2915 * BITS_PER_UNIT);
2916 misalign = misalign & (align - 1);
2917 if (misalign != 0)
2918 align = (misalign & -misalign);
2919 if (align < TYPE_ALIGN (type))
2920 type = build_aligned_type (type, align);
2921 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2923 else
2925 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2926 expr = build_fold_addr_expr (expr);
2929 expr = force_gimple_operand_gsi (&gsi, expr,
2930 adj->by_ref
2931 || is_gimple_reg_type (adj->type),
2932 NULL, true, GSI_SAME_STMT);
2933 VEC_quick_push (tree, vargs, expr);
2935 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2937 unsigned int ix;
2938 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2939 gimple def_temp;
2941 arg = gimple_call_arg (stmt, adj->base_index);
2942 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2944 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2945 continue;
2946 arg = fold_convert_loc (gimple_location (stmt),
2947 TREE_TYPE (origin), arg);
2949 if (debug_args == NULL)
2950 debug_args = decl_debug_args_insert (callee_decl);
2951 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2952 if (ddecl == origin)
2954 ddecl = VEC_index (tree, *debug_args, ix + 1);
2955 break;
2957 if (ddecl == NULL)
2959 ddecl = make_node (DEBUG_EXPR_DECL);
2960 DECL_ARTIFICIAL (ddecl) = 1;
2961 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2962 DECL_MODE (ddecl) = DECL_MODE (origin);
2964 VEC_safe_push (tree, gc, *debug_args, origin);
2965 VEC_safe_push (tree, gc, *debug_args, ddecl);
2967 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2968 stmt);
2969 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2973 if (dump_file && (dump_flags & TDF_DETAILS))
2975 fprintf (dump_file, "replacing stmt:");
2976 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2979 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2980 VEC_free (tree, heap, vargs);
2981 if (gimple_call_lhs (stmt))
2982 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2984 gimple_set_block (new_stmt, gimple_block (stmt));
2985 if (gimple_has_location (stmt))
2986 gimple_set_location (new_stmt, gimple_location (stmt));
2987 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2988 gimple_call_copy_flags (new_stmt, stmt);
2990 if (dump_file && (dump_flags & TDF_DETAILS))
2992 fprintf (dump_file, "with stmt:");
2993 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2994 fprintf (dump_file, "\n");
2996 gsi_replace (&gsi, new_stmt, true);
2997 if (cs)
2998 cgraph_set_call_stmt (cs, new_stmt);
2999 update_ssa (TODO_update_ssa);
3000 free_dominance_info (CDI_DOMINATORS);
3003 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3005 static bool
3006 index_in_adjustments_multiple_times_p (int base_index,
3007 ipa_parm_adjustment_vec adjustments)
3009 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3010 bool one = false;
3012 for (i = 0; i < len; i++)
3014 struct ipa_parm_adjustment *adj;
3015 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3017 if (adj->base_index == base_index)
3019 if (one)
3020 return true;
3021 else
3022 one = true;
3025 return false;
3029 /* Return adjustments that should have the same effect on function parameters
3030 and call arguments as if they were first changed according to adjustments in
3031 INNER and then by adjustments in OUTER. */
3033 ipa_parm_adjustment_vec
3034 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3035 ipa_parm_adjustment_vec outer)
3037 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
3038 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
3039 int removals = 0;
3040 ipa_parm_adjustment_vec adjustments, tmp;
3042 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
3043 for (i = 0; i < inlen; i++)
3045 struct ipa_parm_adjustment *n;
3046 n = &VEC_index (ipa_parm_adjustment_t, inner, i);
3048 if (n->remove_param)
3049 removals++;
3050 else
3051 VEC_quick_push (ipa_parm_adjustment_t, tmp, *n);
3054 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
3055 for (i = 0; i < outlen; i++)
3057 struct ipa_parm_adjustment r;
3058 struct ipa_parm_adjustment *out = &VEC_index (ipa_parm_adjustment_t,
3059 outer, i);
3060 struct ipa_parm_adjustment *in = &VEC_index (ipa_parm_adjustment_t, tmp,
3061 out->base_index);
3063 memset (&r, 0, sizeof (r));
3064 gcc_assert (!in->remove_param);
3065 if (out->remove_param)
3067 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3069 r.remove_param = true;
3070 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3072 continue;
3075 r.base_index = in->base_index;
3076 r.type = out->type;
3078 /* FIXME: Create nonlocal value too. */
3080 if (in->copy_param && out->copy_param)
3081 r.copy_param = true;
3082 else if (in->copy_param)
3083 r.offset = out->offset;
3084 else if (out->copy_param)
3085 r.offset = in->offset;
3086 else
3087 r.offset = in->offset + out->offset;
3088 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3091 for (i = 0; i < inlen; i++)
3093 struct ipa_parm_adjustment *n = &VEC_index (ipa_parm_adjustment_t,
3094 inner, i);
3096 if (n->remove_param)
3097 VEC_quick_push (ipa_parm_adjustment_t, adjustments, *n);
3100 VEC_free (ipa_parm_adjustment_t, heap, tmp);
3101 return adjustments;
3104 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3105 friendly way, assuming they are meant to be applied to FNDECL. */
3107 void
3108 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3109 tree fndecl)
3111 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3112 bool first = true;
3113 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
3115 fprintf (file, "IPA param adjustments: ");
3116 for (i = 0; i < len; i++)
3118 struct ipa_parm_adjustment *adj;
3119 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3121 if (!first)
3122 fprintf (file, " ");
3123 else
3124 first = false;
3126 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3127 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
3128 if (adj->base)
3130 fprintf (file, ", base: ");
3131 print_generic_expr (file, adj->base, 0);
3133 if (adj->reduction)
3135 fprintf (file, ", reduction: ");
3136 print_generic_expr (file, adj->reduction, 0);
3138 if (adj->new_ssa_base)
3140 fprintf (file, ", new_ssa_base: ");
3141 print_generic_expr (file, adj->new_ssa_base, 0);
3144 if (adj->copy_param)
3145 fprintf (file, ", copy_param");
3146 else if (adj->remove_param)
3147 fprintf (file, ", remove_param");
3148 else
3149 fprintf (file, ", offset %li", (long) adj->offset);
3150 if (adj->by_ref)
3151 fprintf (file, ", by_ref");
3152 print_node_brief (file, ", type: ", adj->type, 0);
3153 fprintf (file, "\n");
3155 VEC_free (tree, heap, parms);
3158 /* Stream out jump function JUMP_FUNC to OB. */
3160 static void
3161 ipa_write_jump_function (struct output_block *ob,
3162 struct ipa_jump_func *jump_func)
3164 struct ipa_agg_jf_item *item;
3165 struct bitpack_d bp;
3166 int i, count;
3168 streamer_write_uhwi (ob, jump_func->type);
3169 switch (jump_func->type)
3171 case IPA_JF_UNKNOWN:
3172 break;
3173 case IPA_JF_KNOWN_TYPE:
3174 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3175 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3176 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3177 break;
3178 case IPA_JF_CONST:
3179 gcc_assert (
3180 EXPR_LOCATION (jump_func->value.constant) == UNKNOWN_LOCATION);
3181 stream_write_tree (ob, jump_func->value.constant, true);
3182 break;
3183 case IPA_JF_PASS_THROUGH:
3184 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3185 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3186 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3187 bp = bitpack_create (ob->main_stream);
3188 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3189 streamer_write_bitpack (&bp);
3190 break;
3191 case IPA_JF_ANCESTOR:
3192 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3193 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3194 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3195 bp = bitpack_create (ob->main_stream);
3196 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3197 streamer_write_bitpack (&bp);
3198 break;
3201 count = VEC_length (ipa_agg_jf_item_t, jump_func->agg.items);
3202 streamer_write_uhwi (ob, count);
3203 if (count)
3205 bp = bitpack_create (ob->main_stream);
3206 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3207 streamer_write_bitpack (&bp);
3210 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items, i, item)
3212 streamer_write_uhwi (ob, item->offset);
3213 stream_write_tree (ob, item->value, true);
3217 /* Read in jump function JUMP_FUNC from IB. */
3219 static void
3220 ipa_read_jump_function (struct lto_input_block *ib,
3221 struct ipa_jump_func *jump_func,
3222 struct data_in *data_in)
3224 struct bitpack_d bp;
3225 int i, count;
3227 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3228 switch (jump_func->type)
3230 case IPA_JF_UNKNOWN:
3231 break;
3232 case IPA_JF_KNOWN_TYPE:
3233 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3234 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3235 jump_func->value.known_type.component_type = stream_read_tree (ib,
3236 data_in);
3237 break;
3238 case IPA_JF_CONST:
3239 jump_func->value.constant = stream_read_tree (ib, data_in);
3240 break;
3241 case IPA_JF_PASS_THROUGH:
3242 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3243 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3244 jump_func->value.pass_through.operation
3245 = (enum tree_code) streamer_read_uhwi (ib);
3246 bp = streamer_read_bitpack (ib);
3247 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3248 break;
3249 case IPA_JF_ANCESTOR:
3250 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3251 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3252 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3253 bp = streamer_read_bitpack (ib);
3254 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3255 break;
3258 count = streamer_read_uhwi (ib);
3259 jump_func->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, count);
3260 if (count)
3262 bp = streamer_read_bitpack (ib);
3263 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3265 for (i = 0; i < count; i++)
3267 struct ipa_agg_jf_item item;
3268 item.offset = streamer_read_uhwi (ib);
3269 item.value = stream_read_tree (ib, data_in);
3270 VEC_quick_push (ipa_agg_jf_item_t, jump_func->agg.items, item);
3274 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3275 relevant to indirect inlining to OB. */
3277 static void
3278 ipa_write_indirect_edge_info (struct output_block *ob,
3279 struct cgraph_edge *cs)
3281 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3282 struct bitpack_d bp;
3284 streamer_write_hwi (ob, ii->param_index);
3285 streamer_write_hwi (ob, ii->offset);
3286 bp = bitpack_create (ob->main_stream);
3287 bp_pack_value (&bp, ii->polymorphic, 1);
3288 bp_pack_value (&bp, ii->agg_contents, 1);
3289 bp_pack_value (&bp, ii->by_ref, 1);
3290 streamer_write_bitpack (&bp);
3292 if (ii->polymorphic)
3294 streamer_write_hwi (ob, ii->otr_token);
3295 stream_write_tree (ob, ii->otr_type, true);
3299 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3300 relevant to indirect inlining from IB. */
3302 static void
3303 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3304 struct data_in *data_in ATTRIBUTE_UNUSED,
3305 struct cgraph_edge *cs)
3307 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3308 struct bitpack_d bp;
3310 ii->param_index = (int) streamer_read_hwi (ib);
3311 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3312 bp = streamer_read_bitpack (ib);
3313 ii->polymorphic = bp_unpack_value (&bp, 1);
3314 ii->agg_contents = bp_unpack_value (&bp, 1);
3315 ii->by_ref = bp_unpack_value (&bp, 1);
3316 if (ii->polymorphic)
3318 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3319 ii->otr_type = stream_read_tree (ib, data_in);
3323 /* Stream out NODE info to OB. */
3325 static void
3326 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3328 int node_ref;
3329 lto_symtab_encoder_t encoder;
3330 struct ipa_node_params *info = IPA_NODE_REF (node);
3331 int j;
3332 struct cgraph_edge *e;
3333 struct bitpack_d bp;
3335 encoder = ob->decl_state->symtab_node_encoder;
3336 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3337 streamer_write_uhwi (ob, node_ref);
3339 bp = bitpack_create (ob->main_stream);
3340 gcc_assert (info->uses_analysis_done
3341 || ipa_get_param_count (info) == 0);
3342 gcc_assert (!info->node_enqueued);
3343 gcc_assert (!info->ipcp_orig_node);
3344 for (j = 0; j < ipa_get_param_count (info); j++)
3345 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3346 streamer_write_bitpack (&bp);
3347 for (e = node->callees; e; e = e->next_callee)
3349 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3351 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3352 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3353 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3355 for (e = node->indirect_calls; e; e = e->next_callee)
3357 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3359 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3360 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3361 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3362 ipa_write_indirect_edge_info (ob, e);
3366 /* Stream in NODE info from IB. */
3368 static void
3369 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3370 struct data_in *data_in)
3372 struct ipa_node_params *info = IPA_NODE_REF (node);
3373 int k;
3374 struct cgraph_edge *e;
3375 struct bitpack_d bp;
3377 ipa_initialize_node_params (node);
3379 bp = streamer_read_bitpack (ib);
3380 if (ipa_get_param_count (info) != 0)
3381 info->uses_analysis_done = true;
3382 info->node_enqueued = false;
3383 for (k = 0; k < ipa_get_param_count (info); k++)
3384 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3385 for (e = node->callees; e; e = e->next_callee)
3387 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3388 int count = streamer_read_uhwi (ib);
3390 if (!count)
3391 continue;
3392 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
3394 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3395 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3397 for (e = node->indirect_calls; e; e = e->next_callee)
3399 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3400 int count = streamer_read_uhwi (ib);
3402 if (count)
3404 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
3405 count);
3406 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3407 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3408 data_in);
3410 ipa_read_indirect_edge_info (ib, data_in, e);
3414 /* Write jump functions for nodes in SET. */
3416 void
3417 ipa_prop_write_jump_functions (void)
3419 struct cgraph_node *node;
3420 struct output_block *ob;
3421 unsigned int count = 0;
3422 lto_symtab_encoder_iterator lsei;
3423 lto_symtab_encoder_t encoder;
3426 if (!ipa_node_params_vector)
3427 return;
3429 ob = create_output_block (LTO_section_jump_functions);
3430 encoder = ob->decl_state->symtab_node_encoder;
3431 ob->cgraph_node = NULL;
3432 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3433 lsei_next_function_in_partition (&lsei))
3435 node = lsei_cgraph_node (lsei);
3436 if (cgraph_function_with_gimple_body_p (node)
3437 && IPA_NODE_REF (node) != NULL)
3438 count++;
3441 streamer_write_uhwi (ob, count);
3443 /* Process all of the functions. */
3444 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3445 lsei_next_function_in_partition (&lsei))
3447 node = lsei_cgraph_node (lsei);
3448 if (cgraph_function_with_gimple_body_p (node)
3449 && IPA_NODE_REF (node) != NULL)
3450 ipa_write_node_info (ob, node);
3452 streamer_write_char_stream (ob->main_stream, 0);
3453 produce_asm (ob, NULL);
3454 destroy_output_block (ob);
3457 /* Read section in file FILE_DATA of length LEN with data DATA. */
3459 static void
3460 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3461 size_t len)
3463 const struct lto_function_header *header =
3464 (const struct lto_function_header *) data;
3465 const int cfg_offset = sizeof (struct lto_function_header);
3466 const int main_offset = cfg_offset + header->cfg_size;
3467 const int string_offset = main_offset + header->main_size;
3468 struct data_in *data_in;
3469 struct lto_input_block ib_main;
3470 unsigned int i;
3471 unsigned int count;
3473 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3474 header->main_size);
3476 data_in =
3477 lto_data_in_create (file_data, (const char *) data + string_offset,
3478 header->string_size, NULL);
3479 count = streamer_read_uhwi (&ib_main);
3481 for (i = 0; i < count; i++)
3483 unsigned int index;
3484 struct cgraph_node *node;
3485 lto_symtab_encoder_t encoder;
3487 index = streamer_read_uhwi (&ib_main);
3488 encoder = file_data->symtab_node_encoder;
3489 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3490 gcc_assert (node->analyzed);
3491 ipa_read_node_info (&ib_main, node, data_in);
3493 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3494 len);
3495 lto_data_in_delete (data_in);
3498 /* Read ipcp jump functions. */
3500 void
3501 ipa_prop_read_jump_functions (void)
3503 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3504 struct lto_file_decl_data *file_data;
3505 unsigned int j = 0;
3507 ipa_check_create_node_params ();
3508 ipa_check_create_edge_args ();
3509 ipa_register_cgraph_hooks ();
3511 while ((file_data = file_data_vec[j++]))
3513 size_t len;
3514 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3516 if (data)
3517 ipa_prop_read_section (file_data, data, len);
3521 /* After merging units, we can get mismatch in argument counts.
3522 Also decl merging might've rendered parameter lists obsolete.
3523 Also compute called_with_variable_arg info. */
3525 void
3526 ipa_update_after_lto_read (void)
3528 struct cgraph_node *node;
3530 ipa_check_create_node_params ();
3531 ipa_check_create_edge_args ();
3533 FOR_EACH_DEFINED_FUNCTION (node)
3534 if (node->analyzed)
3535 ipa_initialize_node_params (node);