Remove _GNU_SOURCE from AM_CPPFLAGS
[official-gcc.git] / gcc / ipa-prop.c
blobcc5e8ee11c29826484729bfbbda1c745fb65fcbe
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
46 struct param_analysis_info
48 bool modified;
49 bitmap visited_statements;
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
57 /* Bitmap with all UIDs of call graph edges that have been already processed
58 by indirect inlining. */
59 static bitmap iinlining_processed_edges;
61 /* Holders of ipa cgraph hooks: */
62 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
63 static struct cgraph_node_hook_list *node_removal_hook_holder;
64 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
65 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
67 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
68 it is in one or not. It should almost never be used directly, as opposed to
69 ipa_push_func_to_list. */
71 void
72 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
73 struct cgraph_node *node,
74 struct ipa_node_params *info)
76 struct ipa_func_list *temp;
78 info->node_enqueued = 1;
79 temp = XCNEW (struct ipa_func_list);
80 temp->node = node;
81 temp->next = *wl;
82 *wl = temp;
85 /* Initialize worklist to contain all functions. */
87 struct ipa_func_list *
88 ipa_init_func_list (void)
90 struct cgraph_node *node;
91 struct ipa_func_list * wl;
93 wl = NULL;
94 for (node = cgraph_nodes; node; node = node->next)
95 if (node->analyzed)
97 struct ipa_node_params *info = IPA_NODE_REF (node);
98 /* Unreachable nodes should have been eliminated before ipcp and
99 inlining. */
100 gcc_assert (node->needed || node->reachable);
101 ipa_push_func_to_list_1 (&wl, node, info);
104 return wl;
107 /* Remove a function from the worklist WL and return it. */
109 struct cgraph_node *
110 ipa_pop_func_from_list (struct ipa_func_list **wl)
112 struct ipa_node_params *info;
113 struct ipa_func_list *first;
114 struct cgraph_node *node;
116 first = *wl;
117 *wl = (*wl)->next;
118 node = first->node;
119 free (first);
121 info = IPA_NODE_REF (node);
122 info->node_enqueued = 0;
123 return node;
126 /* Return index of the formal whose tree is PTREE in function which corresponds
127 to INFO. */
129 static int
130 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
132 int i, count;
134 count = ipa_get_param_count (info);
135 for (i = 0; i < count; i++)
136 if (ipa_get_param(info, i) == ptree)
137 return i;
139 return -1;
142 /* Populate the param_decl field in parameter descriptors of INFO that
143 corresponds to NODE. */
145 static void
146 ipa_populate_param_decls (struct cgraph_node *node,
147 struct ipa_node_params *info)
149 tree fndecl;
150 tree fnargs;
151 tree parm;
152 int param_num;
154 fndecl = node->decl;
155 fnargs = DECL_ARGUMENTS (fndecl);
156 param_num = 0;
157 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
159 info->params[param_num].decl = parm;
160 param_num++;
164 /* Return how many formal parameters FNDECL has. */
166 static inline int
167 count_formal_params_1 (tree fndecl)
169 tree parm;
170 int count = 0;
172 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
173 count++;
175 return count;
178 /* Count number of formal parameters in NOTE. Store the result to the
179 appropriate field of INFO. */
181 static void
182 ipa_count_formal_params (struct cgraph_node *node,
183 struct ipa_node_params *info)
185 int param_num;
187 param_num = count_formal_params_1 (node->decl);
188 ipa_set_param_count (info, param_num);
191 /* Initialize the ipa_node_params structure associated with NODE by counting
192 the function parameters, creating the descriptors and populating their
193 param_decls. */
195 void
196 ipa_initialize_node_params (struct cgraph_node *node)
198 struct ipa_node_params *info = IPA_NODE_REF (node);
200 if (!info->params)
202 ipa_count_formal_params (node, info);
203 info->params = XCNEWVEC (struct ipa_param_descriptor,
204 ipa_get_param_count (info));
205 ipa_populate_param_decls (node, info);
209 /* Count number of arguments callsite CS has and store it in
210 ipa_edge_args structure corresponding to this callsite. */
212 static void
213 ipa_count_arguments (struct cgraph_edge *cs)
215 gimple stmt;
216 int arg_num;
218 stmt = cs->call_stmt;
219 gcc_assert (is_gimple_call (stmt));
220 arg_num = gimple_call_num_args (stmt);
221 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
222 <= (unsigned) cgraph_edge_max_uid)
223 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
224 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
225 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
228 /* Print the jump functions associated with call graph edge CS to file F. */
230 static void
231 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
233 int i, count;
235 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
236 for (i = 0; i < count; i++)
238 struct ipa_jump_func *jump_func;
239 enum jump_func_type type;
241 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
242 type = jump_func->type;
244 fprintf (f, " param %d: ", i);
245 if (type == IPA_JF_UNKNOWN)
246 fprintf (f, "UNKNOWN\n");
247 else if (type == IPA_JF_KNOWN_TYPE)
249 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
250 fprintf (f, "KNOWN TYPE, type in binfo is: ");
251 print_generic_expr (f, binfo_type, 0);
252 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
254 else if (type == IPA_JF_CONST)
256 tree val = jump_func->value.constant;
257 fprintf (f, "CONST: ");
258 print_generic_expr (f, val, 0);
259 if (TREE_CODE (val) == ADDR_EXPR
260 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
262 fprintf (f, " -> ");
263 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
266 fprintf (f, "\n");
268 else if (type == IPA_JF_CONST_MEMBER_PTR)
270 fprintf (f, "CONST MEMBER PTR: ");
271 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
272 fprintf (f, ", ");
273 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
274 fprintf (f, "\n");
276 else if (type == IPA_JF_PASS_THROUGH)
278 fprintf (f, "PASS THROUGH: ");
279 fprintf (f, "%d, op %s ",
280 jump_func->value.pass_through.formal_id,
281 tree_code_name[(int)
282 jump_func->value.pass_through.operation]);
283 if (jump_func->value.pass_through.operation != NOP_EXPR)
284 print_generic_expr (dump_file,
285 jump_func->value.pass_through.operand, 0);
286 fprintf (dump_file, "\n");
288 else if (type == IPA_JF_ANCESTOR)
290 fprintf (f, "ANCESTOR: ");
291 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
292 jump_func->value.ancestor.formal_id,
293 jump_func->value.ancestor.offset);
294 print_generic_expr (f, jump_func->value.ancestor.type, 0);
295 fprintf (dump_file, "\n");
301 /* Print the jump functions of all arguments on all call graph edges going from
302 NODE to file F. */
304 void
305 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
307 struct cgraph_edge *cs;
308 int i;
310 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
311 for (cs = node->callees; cs; cs = cs->next_callee)
313 if (!ipa_edge_args_info_available_for_edge_p (cs))
314 continue;
316 fprintf (f, " callsite %s/%i -> %s/%i : \n",
317 cgraph_node_name (node), node->uid,
318 cgraph_node_name (cs->callee), cs->callee->uid);
319 ipa_print_node_jump_functions_for_edge (f, cs);
322 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
324 if (!ipa_edge_args_info_available_for_edge_p (cs))
325 continue;
327 if (cs->call_stmt)
329 fprintf (f, " indirect callsite %d for stmt ", i);
330 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
332 else
333 fprintf (f, " indirect callsite %d :\n", i);
334 ipa_print_node_jump_functions_for_edge (f, cs);
339 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
341 void
342 ipa_print_all_jump_functions (FILE *f)
344 struct cgraph_node *node;
346 fprintf (f, "\nJump functions:\n");
347 for (node = cgraph_nodes; node; node = node->next)
349 ipa_print_node_jump_functions (f, node);
353 /* Structure to be passed in between detect_type_change and
354 check_stmt_for_type_change. */
356 struct type_change_info
358 /* Set to true if dynamic type change has been detected. */
359 bool type_maybe_changed;
362 /* Return true if STMT can modify a virtual method table pointer.
364 This function makes special assumptions about both constructors and
365 destructors which are all the functions that are allowed to alter the VMT
366 pointers. It assumes that destructors begin with assignment into all VMT
367 pointers and that constructors essentially look in the following way:
369 1) The very first thing they do is that they call constructors of ancestor
370 sub-objects that have them.
372 2) Then VMT pointers of this and all its ancestors is set to new values
373 corresponding to the type corresponding to the constructor.
375 3) Only afterwards, other stuff such as constructor of member sub-objects
376 and the code written by the user is run. Only this may include calling
377 virtual functions, directly or indirectly.
379 There is no way to call a constructor of an ancestor sub-object in any
380 other way.
382 This means that we do not have to care whether constructors get the correct
383 type information because they will always change it (in fact, if we define
384 the type to be given by the VMT pointer, it is undefined).
386 The most important fact to derive from the above is that if, for some
387 statement in the section 3, we try to detect whether the dynamic type has
388 changed, we can safely ignore all calls as we examine the function body
389 backwards until we reach statements in section 2 because these calls cannot
390 be ancestor constructors or destructors (if the input is not bogus) and so
391 do not change the dynamic type (this holds true only for automatically
392 allocated objects but at the moment we devirtualize only these). We then
393 must detect that statements in section 2 change the dynamic type and can try
394 to derive the new type. That is enough and we can stop, we will never see
395 the calls into constructors of sub-objects in this code. Therefore we can
396 safely ignore all call statements that we traverse.
399 static bool
400 stmt_may_be_vtbl_ptr_store (gimple stmt)
402 if (is_gimple_call (stmt))
403 return false;
404 else if (is_gimple_assign (stmt))
406 tree lhs = gimple_assign_lhs (stmt);
408 if (TREE_CODE (lhs) == COMPONENT_REF
409 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
410 && !AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
411 return false;
412 /* In the future we might want to use get_base_ref_and_offset to find
413 if there is a field corresponding to the offset and if so, proceed
414 almost like if it was a component ref. */
416 return true;
419 /* Callback of walk_aliased_vdefs and a helper function for
420 detect_type_change to check whether a particular statement may modify
421 the virtual table pointer, and if possible also determine the new type of
422 the (sub-)object. It stores its result into DATA, which points to a
423 type_change_info structure. */
425 static bool
426 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
428 gimple stmt = SSA_NAME_DEF_STMT (vdef);
429 struct type_change_info *tci = (struct type_change_info *) data;
431 if (stmt_may_be_vtbl_ptr_store (stmt))
433 tci->type_maybe_changed = true;
434 return true;
436 else
437 return false;
440 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
441 looking for assignments to its virtual table pointer. If it is, return true
442 and fill in the jump function JFUNC with relevant type information or set it
443 to unknown. ARG is the object itself (not a pointer to it, unless
444 dereferenced). BASE is the base of the memory access as returned by
445 get_ref_base_and_extent, as is the offset. */
447 static bool
448 detect_type_change (tree arg, tree base, gimple call,
449 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
451 struct type_change_info tci;
452 ao_ref ao;
454 gcc_checking_assert (DECL_P (arg)
455 || TREE_CODE (arg) == MEM_REF
456 || handled_component_p (arg));
457 /* Const calls cannot call virtual methods through VMT and so type changes do
458 not matter. */
459 if (!flag_devirtualize || !gimple_vuse (call))
460 return false;
462 tci.type_maybe_changed = false;
464 ao.ref = arg;
465 ao.base = base;
466 ao.offset = offset;
467 ao.size = POINTER_SIZE;
468 ao.max_size = ao.size;
469 ao.ref_alias_set = -1;
470 ao.base_alias_set = -1;
472 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
473 &tci, NULL);
474 if (!tci.type_maybe_changed)
475 return false;
477 jfunc->type = IPA_JF_UNKNOWN;
478 return true;
481 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
482 SSA name (its dereference will become the base and the offset is assumed to
483 be zero). */
485 static bool
486 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
488 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
489 if (!flag_devirtualize
490 || !POINTER_TYPE_P (TREE_TYPE (arg))
491 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
492 return false;
494 arg = build2 (MEM_REF, ptr_type_node, arg,
495 build_int_cst (ptr_type_node, 0));
497 return detect_type_change (arg, arg, call, jfunc, 0);
501 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
502 of an assignment statement STMT, try to find out whether NAME can be
503 described by a (possibly polynomial) pass-through jump-function or an
504 ancestor jump function and if so, write the appropriate function into
505 JFUNC */
507 static void
508 compute_complex_assign_jump_func (struct ipa_node_params *info,
509 struct ipa_jump_func *jfunc,
510 gimple call, gimple stmt, tree name)
512 HOST_WIDE_INT offset, size, max_size;
513 tree op1, op2, base, ssa;
514 int index;
516 op1 = gimple_assign_rhs1 (stmt);
517 op2 = gimple_assign_rhs2 (stmt);
519 if (TREE_CODE (op1) == SSA_NAME
520 && SSA_NAME_IS_DEFAULT_DEF (op1))
522 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
523 if (index < 0)
524 return;
526 if (op2)
528 if (!is_gimple_ip_invariant (op2)
529 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
530 && !useless_type_conversion_p (TREE_TYPE (name),
531 TREE_TYPE (op1))))
532 return;
534 jfunc->type = IPA_JF_PASS_THROUGH;
535 jfunc->value.pass_through.formal_id = index;
536 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
537 jfunc->value.pass_through.operand = op2;
539 else if (gimple_assign_unary_nop_p (stmt)
540 && !detect_type_change_ssa (op1, call, jfunc))
542 jfunc->type = IPA_JF_PASS_THROUGH;
543 jfunc->value.pass_through.formal_id = index;
544 jfunc->value.pass_through.operation = NOP_EXPR;
546 return;
549 if (TREE_CODE (op1) != ADDR_EXPR)
550 return;
551 op1 = TREE_OPERAND (op1, 0);
552 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
553 return;
554 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
555 if (TREE_CODE (base) != MEM_REF
556 /* If this is a varying address, punt. */
557 || max_size == -1
558 || max_size != size)
559 return;
560 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
561 ssa = TREE_OPERAND (base, 0);
562 if (TREE_CODE (ssa) != SSA_NAME
563 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
564 || offset < 0)
565 return;
567 /* Dynamic types are changed only in constructors and destructors and */
568 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
569 if (index >= 0
570 && !detect_type_change (op1, base, call, jfunc, offset))
572 jfunc->type = IPA_JF_ANCESTOR;
573 jfunc->value.ancestor.formal_id = index;
574 jfunc->value.ancestor.offset = offset;
575 jfunc->value.ancestor.type = TREE_TYPE (op1);
580 /* Given that an actual argument is an SSA_NAME that is a result of a phi
581 statement PHI, try to find out whether NAME is in fact a
582 multiple-inheritance typecast from a descendant into an ancestor of a formal
583 parameter and thus can be described by an ancestor jump function and if so,
584 write the appropriate function into JFUNC.
586 Essentially we want to match the following pattern:
588 if (obj_2(D) != 0B)
589 goto <bb 3>;
590 else
591 goto <bb 4>;
593 <bb 3>:
594 iftmp.1_3 = &obj_2(D)->D.1762;
596 <bb 4>:
597 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
598 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
599 return D.1879_6; */
601 static void
602 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
603 struct ipa_jump_func *jfunc,
604 gimple call, gimple phi)
606 HOST_WIDE_INT offset, size, max_size;
607 gimple assign, cond;
608 basic_block phi_bb, assign_bb, cond_bb;
609 tree tmp, parm, expr, obj;
610 int index, i;
612 if (gimple_phi_num_args (phi) != 2)
613 return;
615 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
616 tmp = PHI_ARG_DEF (phi, 0);
617 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
618 tmp = PHI_ARG_DEF (phi, 1);
619 else
620 return;
621 if (TREE_CODE (tmp) != SSA_NAME
622 || SSA_NAME_IS_DEFAULT_DEF (tmp)
623 || !POINTER_TYPE_P (TREE_TYPE (tmp))
624 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
625 return;
627 assign = SSA_NAME_DEF_STMT (tmp);
628 assign_bb = gimple_bb (assign);
629 if (!single_pred_p (assign_bb)
630 || !gimple_assign_single_p (assign))
631 return;
632 expr = gimple_assign_rhs1 (assign);
634 if (TREE_CODE (expr) != ADDR_EXPR)
635 return;
636 expr = TREE_OPERAND (expr, 0);
637 obj = expr;
638 expr = get_ref_base_and_extent (expr, &offset, &size, &max_size);
640 if (TREE_CODE (expr) != MEM_REF
641 /* If this is a varying address, punt. */
642 || max_size == -1
643 || max_size != size)
644 return;
645 offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
646 parm = TREE_OPERAND (expr, 0);
647 if (TREE_CODE (parm) != SSA_NAME
648 || !SSA_NAME_IS_DEFAULT_DEF (parm)
649 || offset < 0)
650 return;
652 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
653 if (index < 0)
654 return;
656 cond_bb = single_pred (assign_bb);
657 cond = last_stmt (cond_bb);
658 if (!cond
659 || gimple_code (cond) != GIMPLE_COND
660 || gimple_cond_code (cond) != NE_EXPR
661 || gimple_cond_lhs (cond) != parm
662 || !integer_zerop (gimple_cond_rhs (cond)))
663 return;
665 phi_bb = gimple_bb (phi);
666 for (i = 0; i < 2; i++)
668 basic_block pred = EDGE_PRED (phi_bb, i)->src;
669 if (pred != assign_bb && pred != cond_bb)
670 return;
673 if (!detect_type_change (obj, expr, call, jfunc, offset))
675 jfunc->type = IPA_JF_ANCESTOR;
676 jfunc->value.ancestor.formal_id = index;
677 jfunc->value.ancestor.offset = offset;
678 jfunc->value.ancestor.type = TREE_TYPE (obj);;
682 /* Given OP which is passed as an actual argument to a called function,
683 determine if it is possible to construct a KNOWN_TYPE jump function for it
684 and if so, create one and store it to JFUNC. */
686 static void
687 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
688 gimple call)
690 HOST_WIDE_INT offset, size, max_size;
691 tree base, binfo;
693 if (!flag_devirtualize
694 || TREE_CODE (op) != ADDR_EXPR
695 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
696 return;
698 op = TREE_OPERAND (op, 0);
699 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
700 if (!DECL_P (base)
701 || max_size == -1
702 || max_size != size
703 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
704 || is_global_var (base))
705 return;
707 if (detect_type_change (op, base, call, jfunc, offset))
708 return;
710 binfo = TYPE_BINFO (TREE_TYPE (base));
711 if (!binfo)
712 return;
713 binfo = get_binfo_at_offset (binfo, offset, TREE_TYPE (op));
714 if (binfo)
716 jfunc->type = IPA_JF_KNOWN_TYPE;
717 jfunc->value.base_binfo = binfo;
722 /* Determine the jump functions of scalar arguments. Scalar means SSA names
723 and constants of a number of selected types. INFO is the ipa_node_params
724 structure associated with the caller, FUNCTIONS is a pointer to an array of
725 jump function structures associated with CALL which is the call statement
726 being examined.*/
728 static void
729 compute_scalar_jump_functions (struct ipa_node_params *info,
730 struct ipa_jump_func *functions,
731 gimple call)
733 tree arg;
734 unsigned num = 0;
736 for (num = 0; num < gimple_call_num_args (call); num++)
738 arg = gimple_call_arg (call, num);
740 if (is_gimple_ip_invariant (arg))
742 functions[num].type = IPA_JF_CONST;
743 functions[num].value.constant = arg;
745 else if (TREE_CODE (arg) == SSA_NAME)
747 if (SSA_NAME_IS_DEFAULT_DEF (arg))
749 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
751 if (index >= 0
752 && !detect_type_change_ssa (arg, call, &functions[num]))
754 functions[num].type = IPA_JF_PASS_THROUGH;
755 functions[num].value.pass_through.formal_id = index;
756 functions[num].value.pass_through.operation = NOP_EXPR;
759 else
761 gimple stmt = SSA_NAME_DEF_STMT (arg);
762 if (is_gimple_assign (stmt))
763 compute_complex_assign_jump_func (info, &functions[num],
764 call, stmt, arg);
765 else if (gimple_code (stmt) == GIMPLE_PHI)
766 compute_complex_ancestor_jump_func (info, &functions[num],
767 call, stmt);
770 else
771 compute_known_type_jump_func (arg, &functions[num], call);
775 /* Inspect the given TYPE and return true iff it has the same structure (the
776 same number of fields of the same types) as a C++ member pointer. If
777 METHOD_PTR and DELTA are non-NULL, store the trees representing the
778 corresponding fields there. */
780 static bool
781 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
783 tree fld;
785 if (TREE_CODE (type) != RECORD_TYPE)
786 return false;
788 fld = TYPE_FIELDS (type);
789 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
790 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
791 return false;
793 if (method_ptr)
794 *method_ptr = fld;
796 fld = DECL_CHAIN (fld);
797 if (!fld || INTEGRAL_TYPE_P (fld))
798 return false;
799 if (delta)
800 *delta = fld;
802 if (DECL_CHAIN (fld))
803 return false;
805 return true;
808 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
809 boolean variable pointed to by DATA. */
811 static bool
812 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
813 void *data)
815 bool *b = (bool *) data;
816 *b = true;
817 return true;
820 /* Return true if the formal parameter PARM might have been modified in this
821 function before reaching the statement CALL. PARM_INFO is a pointer to a
822 structure containing intermediate information about PARM. */
824 static bool
825 is_parm_modified_before_call (struct param_analysis_info *parm_info,
826 gimple call, tree parm)
828 bool modified = false;
829 ao_ref refd;
831 if (parm_info->modified)
832 return true;
834 ao_ref_init (&refd, parm);
835 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
836 &modified, &parm_info->visited_statements);
837 if (modified)
839 parm_info->modified = true;
840 return true;
842 return false;
845 /* Go through arguments of the CALL and for every one that looks like a member
846 pointer, check whether it can be safely declared pass-through and if so,
847 mark that to the corresponding item of jump FUNCTIONS. Return true iff
848 there are non-pass-through member pointers within the arguments. INFO
849 describes formal parameters of the caller. PARMS_INFO is a pointer to a
850 vector containing intermediate information about each formal parameter. */
852 static bool
853 compute_pass_through_member_ptrs (struct ipa_node_params *info,
854 struct param_analysis_info *parms_info,
855 struct ipa_jump_func *functions,
856 gimple call)
858 bool undecided_members = false;
859 unsigned num;
860 tree arg;
862 for (num = 0; num < gimple_call_num_args (call); num++)
864 arg = gimple_call_arg (call, num);
866 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
868 if (TREE_CODE (arg) == PARM_DECL)
870 int index = ipa_get_param_decl_index (info, arg);
872 gcc_assert (index >=0);
873 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
875 functions[num].type = IPA_JF_PASS_THROUGH;
876 functions[num].value.pass_through.formal_id = index;
877 functions[num].value.pass_through.operation = NOP_EXPR;
879 else
880 undecided_members = true;
882 else
883 undecided_members = true;
887 return undecided_members;
890 /* Simple function filling in a member pointer constant jump function (with PFN
891 and DELTA as the constant value) into JFUNC. */
893 static void
894 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
895 tree pfn, tree delta)
897 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
898 jfunc->value.member_cst.pfn = pfn;
899 jfunc->value.member_cst.delta = delta;
902 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
903 return the rhs of its defining statement. */
905 static inline tree
906 get_ssa_def_if_simple_copy (tree rhs)
908 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
910 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
912 if (gimple_assign_single_p (def_stmt))
913 rhs = gimple_assign_rhs1 (def_stmt);
914 else
915 break;
917 return rhs;
920 /* Traverse statements from CALL backwards, scanning whether the argument ARG
921 which is a member pointer is filled in with constant values. If it is, fill
922 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
923 fields of the record type of the member pointer. To give an example, we
924 look for a pattern looking like the following:
926 D.2515.__pfn ={v} printStuff;
927 D.2515.__delta ={v} 0;
928 i_1 = doprinting (D.2515); */
930 static void
931 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
932 tree delta_field, struct ipa_jump_func *jfunc)
934 gimple_stmt_iterator gsi;
935 tree method = NULL_TREE;
936 tree delta = NULL_TREE;
938 gsi = gsi_for_stmt (call);
940 gsi_prev (&gsi);
941 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
943 gimple stmt = gsi_stmt (gsi);
944 tree lhs, rhs, fld;
946 if (!stmt_may_clobber_ref_p (stmt, arg))
947 continue;
948 if (!gimple_assign_single_p (stmt))
949 return;
951 lhs = gimple_assign_lhs (stmt);
952 rhs = gimple_assign_rhs1 (stmt);
954 if (TREE_CODE (lhs) != COMPONENT_REF
955 || TREE_OPERAND (lhs, 0) != arg)
956 return;
958 fld = TREE_OPERAND (lhs, 1);
959 if (!method && fld == method_field)
961 rhs = get_ssa_def_if_simple_copy (rhs);
962 if (TREE_CODE (rhs) == ADDR_EXPR
963 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
964 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
966 method = TREE_OPERAND (rhs, 0);
967 if (delta)
969 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
970 return;
973 else
974 return;
977 if (!delta && fld == delta_field)
979 rhs = get_ssa_def_if_simple_copy (rhs);
980 if (TREE_CODE (rhs) == INTEGER_CST)
982 delta = rhs;
983 if (method)
985 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
986 return;
989 else
990 return;
994 return;
997 /* Go through the arguments of the CALL and for every member pointer within
998 tries determine whether it is a constant. If it is, create a corresponding
999 constant jump function in FUNCTIONS which is an array of jump functions
1000 associated with the call. */
1002 static void
1003 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
1004 gimple call)
1006 unsigned num;
1007 tree arg, method_field, delta_field;
1009 for (num = 0; num < gimple_call_num_args (call); num++)
1011 arg = gimple_call_arg (call, num);
1013 if (functions[num].type == IPA_JF_UNKNOWN
1014 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
1015 &delta_field))
1016 determine_cst_member_ptr (call, arg, method_field, delta_field,
1017 &functions[num]);
1021 /* Compute jump function for all arguments of callsite CS and insert the
1022 information in the jump_functions array in the ipa_edge_args corresponding
1023 to this callsite. */
1025 static void
1026 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
1027 struct cgraph_edge *cs)
1029 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1030 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
1031 gimple call;
1033 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
1034 return;
1035 arguments->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
1036 (ipa_get_cs_argument_count (arguments));
1038 call = cs->call_stmt;
1039 gcc_assert (is_gimple_call (call));
1041 /* We will deal with constants and SSA scalars first: */
1042 compute_scalar_jump_functions (info, arguments->jump_functions, call);
1044 /* Let's check whether there are any potential member pointers and if so,
1045 whether we can determine their functions as pass_through. */
1046 if (!compute_pass_through_member_ptrs (info, parms_info,
1047 arguments->jump_functions, call))
1048 return;
1050 /* Finally, let's check whether we actually pass a new constant member
1051 pointer here... */
1052 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
1055 /* Compute jump functions for all edges - both direct and indirect - outgoing
1056 from NODE. Also count the actual arguments in the process. */
1058 static void
1059 ipa_compute_jump_functions (struct cgraph_node *node,
1060 struct param_analysis_info *parms_info)
1062 struct cgraph_edge *cs;
1064 for (cs = node->callees; cs; cs = cs->next_callee)
1066 /* We do not need to bother analyzing calls to unknown
1067 functions unless they may become known during lto/whopr. */
1068 if (!cs->callee->analyzed && !flag_lto)
1069 continue;
1070 ipa_count_arguments (cs);
1071 /* If the descriptor of the callee is not initialized yet, we have to do
1072 it now. */
1073 if (cs->callee->analyzed)
1074 ipa_initialize_node_params (cs->callee);
1075 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
1076 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
1077 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
1078 ipa_compute_jump_functions_for_edge (parms_info, cs);
1081 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1083 ipa_count_arguments (cs);
1084 ipa_compute_jump_functions_for_edge (parms_info, cs);
1088 /* If RHS looks like a rhs of a statement loading pfn from a member
1089 pointer formal parameter, return the parameter, otherwise return
1090 NULL. If USE_DELTA, then we look for a use of the delta field
1091 rather than the pfn. */
1093 static tree
1094 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1096 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1098 if (TREE_CODE (rhs) == COMPONENT_REF)
1100 ref_field = TREE_OPERAND (rhs, 1);
1101 rhs = TREE_OPERAND (rhs, 0);
1103 else
1104 ref_field = NULL_TREE;
1105 if (TREE_CODE (rhs) != MEM_REF)
1106 return NULL_TREE;
1107 rec = TREE_OPERAND (rhs, 0);
1108 if (TREE_CODE (rec) != ADDR_EXPR)
1109 return NULL_TREE;
1110 rec = TREE_OPERAND (rec, 0);
1111 if (TREE_CODE (rec) != PARM_DECL
1112 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1113 return NULL_TREE;
1115 ref_offset = TREE_OPERAND (rhs, 1);
1117 if (ref_field)
1119 if (integer_nonzerop (ref_offset))
1120 return NULL_TREE;
1122 if (use_delta)
1123 fld = delta_field;
1124 else
1125 fld = ptr_field;
1127 return ref_field == fld ? rec : NULL_TREE;
1130 if (use_delta)
1131 fld_offset = byte_position (delta_field);
1132 else
1133 fld_offset = byte_position (ptr_field);
1135 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1138 /* If STMT looks like a statement loading a value from a member pointer formal
1139 parameter, this function returns that parameter. */
1141 static tree
1142 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1144 tree rhs;
1146 if (!gimple_assign_single_p (stmt))
1147 return NULL_TREE;
1149 rhs = gimple_assign_rhs1 (stmt);
1150 return ipa_get_member_ptr_load_param (rhs, use_delta);
1153 /* Returns true iff T is an SSA_NAME defined by a statement. */
1155 static bool
1156 ipa_is_ssa_with_stmt_def (tree t)
1158 if (TREE_CODE (t) == SSA_NAME
1159 && !SSA_NAME_IS_DEFAULT_DEF (t))
1160 return true;
1161 else
1162 return false;
1165 /* Find the indirect call graph edge corresponding to STMT and add to it all
1166 information necessary to describe a call to a parameter number PARAM_INDEX.
1167 NODE is the caller. POLYMORPHIC should be set to true iff the call is a
1168 virtual one. */
1170 static void
1171 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt,
1172 bool polymorphic)
1174 struct cgraph_edge *cs;
1176 cs = cgraph_edge (node, stmt);
1177 cs->indirect_info->param_index = param_index;
1178 cs->indirect_info->anc_offset = 0;
1179 cs->indirect_info->polymorphic = polymorphic;
1180 if (polymorphic)
1182 tree otr = gimple_call_fn (stmt);
1183 tree type, token = OBJ_TYPE_REF_TOKEN (otr);
1184 cs->indirect_info->otr_token = tree_low_cst (token, 1);
1185 type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (otr)));
1186 cs->indirect_info->otr_type = type;
1190 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1191 (described by INFO). PARMS_INFO is a pointer to a vector containing
1192 intermediate information about each formal parameter. Currently it checks
1193 whether the call calls a pointer that is a formal parameter and if so, the
1194 parameter is marked with the called flag and an indirect call graph edge
1195 describing the call is created. This is very simple for ordinary pointers
1196 represented in SSA but not-so-nice when it comes to member pointers. The
1197 ugly part of this function does nothing more than trying to match the
1198 pattern of such a call. An example of such a pattern is the gimple dump
1199 below, the call is on the last line:
1201 <bb 2>:
1202 f$__delta_5 = f.__delta;
1203 f$__pfn_24 = f.__pfn;
1206 <bb 2>:
1207 f$__delta_5 = MEM[(struct *)&f];
1208 f$__pfn_24 = MEM[(struct *)&f + 4B];
1210 and a few lines below:
1212 <bb 5>
1213 D.2496_3 = (int) f$__pfn_24;
1214 D.2497_4 = D.2496_3 & 1;
1215 if (D.2497_4 != 0)
1216 goto <bb 3>;
1217 else
1218 goto <bb 4>;
1220 <bb 6>:
1221 D.2500_7 = (unsigned int) f$__delta_5;
1222 D.2501_8 = &S + D.2500_7;
1223 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1224 D.2503_10 = *D.2502_9;
1225 D.2504_12 = f$__pfn_24 + -1;
1226 D.2505_13 = (unsigned int) D.2504_12;
1227 D.2506_14 = D.2503_10 + D.2505_13;
1228 D.2507_15 = *D.2506_14;
1229 iftmp.11_16 = (String:: *) D.2507_15;
1231 <bb 7>:
1232 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1233 D.2500_19 = (unsigned int) f$__delta_5;
1234 D.2508_20 = &S + D.2500_19;
1235 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1237 Such patterns are results of simple calls to a member pointer:
1239 int doprinting (int (MyString::* f)(int) const)
1241 MyString S ("somestring");
1243 return (S.*f)(4);
1247 static void
1248 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1249 struct ipa_node_params *info,
1250 struct param_analysis_info *parms_info,
1251 gimple call, tree target)
1253 gimple def;
1254 tree n1, n2;
1255 gimple d1, d2;
1256 tree rec, rec2, cond;
1257 gimple branch;
1258 int index;
1259 basic_block bb, virt_bb, join;
1261 if (SSA_NAME_IS_DEFAULT_DEF (target))
1263 tree var = SSA_NAME_VAR (target);
1264 index = ipa_get_param_decl_index (info, var);
1265 if (index >= 0)
1266 ipa_note_param_call (node, index, call, false);
1267 return;
1270 /* Now we need to try to match the complex pattern of calling a member
1271 pointer. */
1273 if (!POINTER_TYPE_P (TREE_TYPE (target))
1274 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1275 return;
1277 def = SSA_NAME_DEF_STMT (target);
1278 if (gimple_code (def) != GIMPLE_PHI)
1279 return;
1281 if (gimple_phi_num_args (def) != 2)
1282 return;
1284 /* First, we need to check whether one of these is a load from a member
1285 pointer that is a parameter to this function. */
1286 n1 = PHI_ARG_DEF (def, 0);
1287 n2 = PHI_ARG_DEF (def, 1);
1288 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1289 return;
1290 d1 = SSA_NAME_DEF_STMT (n1);
1291 d2 = SSA_NAME_DEF_STMT (n2);
1293 join = gimple_bb (def);
1294 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1296 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1297 return;
1299 bb = EDGE_PRED (join, 0)->src;
1300 virt_bb = gimple_bb (d2);
1302 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1304 bb = EDGE_PRED (join, 1)->src;
1305 virt_bb = gimple_bb (d1);
1307 else
1308 return;
1310 /* Second, we need to check that the basic blocks are laid out in the way
1311 corresponding to the pattern. */
1313 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1314 || single_pred (virt_bb) != bb
1315 || single_succ (virt_bb) != join)
1316 return;
1318 /* Third, let's see that the branching is done depending on the least
1319 significant bit of the pfn. */
1321 branch = last_stmt (bb);
1322 if (!branch || gimple_code (branch) != GIMPLE_COND)
1323 return;
1325 if (gimple_cond_code (branch) != NE_EXPR
1326 || !integer_zerop (gimple_cond_rhs (branch)))
1327 return;
1329 cond = gimple_cond_lhs (branch);
1330 if (!ipa_is_ssa_with_stmt_def (cond))
1331 return;
1333 def = SSA_NAME_DEF_STMT (cond);
1334 if (!is_gimple_assign (def)
1335 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1336 || !integer_onep (gimple_assign_rhs2 (def)))
1337 return;
1339 cond = gimple_assign_rhs1 (def);
1340 if (!ipa_is_ssa_with_stmt_def (cond))
1341 return;
1343 def = SSA_NAME_DEF_STMT (cond);
1345 if (is_gimple_assign (def)
1346 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1348 cond = gimple_assign_rhs1 (def);
1349 if (!ipa_is_ssa_with_stmt_def (cond))
1350 return;
1351 def = SSA_NAME_DEF_STMT (cond);
1354 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1355 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1356 == ptrmemfunc_vbit_in_delta));
1358 if (rec != rec2)
1359 return;
1361 index = ipa_get_param_decl_index (info, rec);
1362 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1363 call, rec))
1364 ipa_note_param_call (node, index, call, false);
1366 return;
1369 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1370 object referenced in the expression is a formal parameter of the caller
1371 (described by INFO), create a call note for the statement. */
1373 static void
1374 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1375 struct ipa_node_params *info, gimple call,
1376 tree target)
1378 struct ipa_jump_func jfunc;
1379 tree obj = OBJ_TYPE_REF_OBJECT (target);
1380 tree var;
1381 int index;
1383 if (!flag_devirtualize)
1384 return;
1386 if (TREE_CODE (obj) == ADDR_EXPR)
1390 obj = TREE_OPERAND (obj, 0);
1392 while (TREE_CODE (obj) == COMPONENT_REF);
1393 if (TREE_CODE (obj) != MEM_REF)
1394 return;
1395 obj = TREE_OPERAND (obj, 0);
1398 if (TREE_CODE (obj) != SSA_NAME
1399 || !SSA_NAME_IS_DEFAULT_DEF (obj))
1400 return;
1402 var = SSA_NAME_VAR (obj);
1403 index = ipa_get_param_decl_index (info, var);
1405 if (index >= 0
1406 && !detect_type_change_ssa (obj, call, &jfunc))
1407 ipa_note_param_call (node, index, call, true);
1410 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1411 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1412 containing intermediate information about each formal parameter. */
1414 static void
1415 ipa_analyze_call_uses (struct cgraph_node *node,
1416 struct ipa_node_params *info,
1417 struct param_analysis_info *parms_info, gimple call)
1419 tree target = gimple_call_fn (call);
1421 if (TREE_CODE (target) == SSA_NAME)
1422 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1423 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1424 ipa_analyze_virtual_call_uses (node, info, call, target);
1428 /* Analyze the call statement STMT with respect to formal parameters (described
1429 in INFO) of caller given by NODE. Currently it only checks whether formal
1430 parameters are called. PARMS_INFO is a pointer to a vector containing
1431 intermediate information about each formal parameter. */
1433 static void
1434 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1435 struct param_analysis_info *parms_info, gimple stmt)
1437 if (is_gimple_call (stmt))
1438 ipa_analyze_call_uses (node, info, parms_info, stmt);
1441 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1442 If OP is a parameter declaration, mark it as used in the info structure
1443 passed in DATA. */
1445 static bool
1446 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1447 tree op, void *data)
1449 struct ipa_node_params *info = (struct ipa_node_params *) data;
1451 op = get_base_address (op);
1452 if (op
1453 && TREE_CODE (op) == PARM_DECL)
1455 int index = ipa_get_param_decl_index (info, op);
1456 gcc_assert (index >= 0);
1457 info->params[index].used = true;
1460 return false;
1463 /* Scan the function body of NODE and inspect the uses of formal parameters.
1464 Store the findings in various structures of the associated ipa_node_params
1465 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1466 vector containing intermediate information about each formal parameter. */
1468 static void
1469 ipa_analyze_params_uses (struct cgraph_node *node,
1470 struct param_analysis_info *parms_info)
1472 tree decl = node->decl;
1473 basic_block bb;
1474 struct function *func;
1475 gimple_stmt_iterator gsi;
1476 struct ipa_node_params *info = IPA_NODE_REF (node);
1477 int i;
1479 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1480 return;
1482 for (i = 0; i < ipa_get_param_count (info); i++)
1484 tree parm = ipa_get_param (info, i);
1485 /* For SSA regs see if parameter is used. For non-SSA we compute
1486 the flag during modification analysis. */
1487 if (is_gimple_reg (parm)
1488 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1489 info->params[i].used = true;
1492 func = DECL_STRUCT_FUNCTION (decl);
1493 FOR_EACH_BB_FN (bb, func)
1495 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1497 gimple stmt = gsi_stmt (gsi);
1499 if (is_gimple_debug (stmt))
1500 continue;
1502 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1503 walk_stmt_load_store_addr_ops (stmt, info,
1504 visit_ref_for_mod_analysis,
1505 visit_ref_for_mod_analysis,
1506 visit_ref_for_mod_analysis);
1508 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1509 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1510 visit_ref_for_mod_analysis,
1511 visit_ref_for_mod_analysis,
1512 visit_ref_for_mod_analysis);
1515 info->uses_analysis_done = 1;
1518 /* Initialize the array describing properties of of formal parameters
1519 of NODE, analyze their uses and compute jump functions associated
1520 with actual arguments of calls from within NODE. */
1522 void
1523 ipa_analyze_node (struct cgraph_node *node)
1525 struct ipa_node_params *info;
1526 struct param_analysis_info *parms_info;
1527 int i, param_count;
1529 ipa_check_create_node_params ();
1530 ipa_check_create_edge_args ();
1531 info = IPA_NODE_REF (node);
1532 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1533 current_function_decl = node->decl;
1534 ipa_initialize_node_params (node);
1536 param_count = ipa_get_param_count (info);
1537 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1538 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1540 ipa_analyze_params_uses (node, parms_info);
1541 ipa_compute_jump_functions (node, parms_info);
1543 for (i = 0; i < param_count; i++)
1544 if (parms_info[i].visited_statements)
1545 BITMAP_FREE (parms_info[i].visited_statements);
1547 current_function_decl = NULL;
1548 pop_cfun ();
1552 /* Update the jump function DST when the call graph edge corresponding to SRC is
1553 is being inlined, knowing that DST is of type ancestor and src of known
1554 type. */
1556 static void
1557 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1558 struct ipa_jump_func *dst)
1560 tree new_binfo;
1562 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1563 dst->value.ancestor.offset,
1564 dst->value.ancestor.type);
1565 if (new_binfo)
1567 dst->type = IPA_JF_KNOWN_TYPE;
1568 dst->value.base_binfo = new_binfo;
1570 else
1571 dst->type = IPA_JF_UNKNOWN;
1574 /* Update the jump functions associated with call graph edge E when the call
1575 graph edge CS is being inlined, assuming that E->caller is already (possibly
1576 indirectly) inlined into CS->callee and that E has not been inlined. */
1578 static void
1579 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1580 struct cgraph_edge *e)
1582 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1583 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1584 int count = ipa_get_cs_argument_count (args);
1585 int i;
1587 for (i = 0; i < count; i++)
1589 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1591 if (dst->type == IPA_JF_ANCESTOR)
1593 struct ipa_jump_func *src;
1595 /* Variable number of arguments can cause havoc if we try to access
1596 one that does not exist in the inlined edge. So make sure we
1597 don't. */
1598 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1600 dst->type = IPA_JF_UNKNOWN;
1601 continue;
1604 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1605 if (src->type == IPA_JF_KNOWN_TYPE)
1606 combine_known_type_and_ancestor_jfs (src, dst);
1607 else if (src->type == IPA_JF_PASS_THROUGH
1608 && src->value.pass_through.operation == NOP_EXPR)
1609 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1610 else if (src->type == IPA_JF_ANCESTOR)
1612 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1613 dst->value.ancestor.offset += src->value.ancestor.offset;
1615 else
1616 dst->type = IPA_JF_UNKNOWN;
1618 else if (dst->type == IPA_JF_PASS_THROUGH)
1620 struct ipa_jump_func *src;
1621 /* We must check range due to calls with variable number of arguments
1622 and we cannot combine jump functions with operations. */
1623 if (dst->value.pass_through.operation == NOP_EXPR
1624 && (dst->value.pass_through.formal_id
1625 < ipa_get_cs_argument_count (top)))
1627 src = ipa_get_ith_jump_func (top,
1628 dst->value.pass_through.formal_id);
1629 *dst = *src;
1631 else
1632 dst->type = IPA_JF_UNKNOWN;
1637 /* If TARGET is an addr_expr of a function declaration, make it the destination
1638 of an indirect edge IE and return the edge. Otherwise, return NULL. Delta,
1639 if non-NULL, is an integer constant that must be added to this pointer
1640 (first parameter). */
1642 struct cgraph_edge *
1643 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, tree delta)
1645 struct cgraph_node *callee;
1647 if (TREE_CODE (target) == ADDR_EXPR)
1648 target = TREE_OPERAND (target, 0);
1649 if (TREE_CODE (target) != FUNCTION_DECL)
1650 return NULL;
1651 callee = cgraph_get_node (target);
1652 if (!callee)
1653 return NULL;
1654 ipa_check_create_node_params ();
1656 /* We can not make edges to inline clones. It is bug that someone removed the cgraph
1657 node too early. */
1658 gcc_assert (!callee->global.inlined_to);
1660 cgraph_make_edge_direct (ie, callee, delta ? tree_low_cst (delta, 0) : 0);
1661 if (dump_file)
1663 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1664 "(%s/%i -> %s/%i), for stmt ",
1665 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1666 cgraph_node_name (ie->caller), ie->caller->uid,
1667 cgraph_node_name (ie->callee), ie->callee->uid);
1668 if (ie->call_stmt)
1669 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1670 else
1671 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1673 if (delta)
1675 fprintf (dump_file, " Thunk delta is ");
1676 print_generic_expr (dump_file, delta, 0);
1677 fprintf (dump_file, "\n");
1681 if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
1682 != ipa_get_param_count (IPA_NODE_REF (callee)))
1683 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1685 return ie;
1688 /* Try to find a destination for indirect edge IE that corresponds to a simple
1689 call or a call of a member function pointer and where the destination is a
1690 pointer formal parameter described by jump function JFUNC. If it can be
1691 determined, return the newly direct edge, otherwise return NULL. */
1693 static struct cgraph_edge *
1694 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1695 struct ipa_jump_func *jfunc)
1697 tree target;
1699 if (jfunc->type == IPA_JF_CONST)
1700 target = jfunc->value.constant;
1701 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1702 target = jfunc->value.member_cst.pfn;
1703 else
1704 return NULL;
1706 return ipa_make_edge_direct_to_target (ie, target, NULL_TREE);
1709 /* Try to find a destination for indirect edge IE that corresponds to a
1710 virtual call based on a formal parameter which is described by jump
1711 function JFUNC and if it can be determined, make it direct and return the
1712 direct edge. Otherwise, return NULL. */
1714 static struct cgraph_edge *
1715 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1716 struct ipa_jump_func *jfunc)
1718 tree binfo, type, target, delta;
1719 HOST_WIDE_INT token;
1721 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1722 binfo = jfunc->value.base_binfo;
1723 else
1724 return NULL;
1726 if (!binfo)
1727 return NULL;
1729 token = ie->indirect_info->otr_token;
1730 type = ie->indirect_info->otr_type;
1731 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1732 if (binfo)
1733 target = gimple_get_virt_mehtod_for_binfo (token, binfo, &delta, true);
1734 else
1735 return NULL;
1737 if (target)
1738 return ipa_make_edge_direct_to_target (ie, target, delta);
1739 else
1740 return NULL;
1743 /* Update the param called notes associated with NODE when CS is being inlined,
1744 assuming NODE is (potentially indirectly) inlined into CS->callee.
1745 Moreover, if the callee is discovered to be constant, create a new cgraph
1746 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1747 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1749 static bool
1750 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1751 struct cgraph_node *node,
1752 VEC (cgraph_edge_p, heap) **new_edges)
1754 struct ipa_edge_args *top;
1755 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1756 bool res = false;
1758 ipa_check_create_edge_args ();
1759 top = IPA_EDGE_REF (cs);
1761 for (ie = node->indirect_calls; ie; ie = next_ie)
1763 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1764 struct ipa_jump_func *jfunc;
1766 next_ie = ie->next_callee;
1767 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1768 continue;
1770 /* If we ever use indirect edges for anything other than indirect
1771 inlining, we will need to skip those with negative param_indices. */
1772 if (ici->param_index == -1)
1773 continue;
1775 /* We must check range due to calls with variable number of arguments: */
1776 if (ici->param_index >= ipa_get_cs_argument_count (top))
1778 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1779 continue;
1782 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1783 if (jfunc->type == IPA_JF_PASS_THROUGH
1784 && jfunc->value.pass_through.operation == NOP_EXPR)
1785 ici->param_index = jfunc->value.pass_through.formal_id;
1786 else if (jfunc->type == IPA_JF_ANCESTOR)
1788 ici->param_index = jfunc->value.ancestor.formal_id;
1789 ici->anc_offset += jfunc->value.ancestor.offset;
1791 else
1792 /* Either we can find a destination for this edge now or never. */
1793 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1795 if (ici->polymorphic)
1796 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1797 else
1798 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1800 if (new_direct_edge)
1802 new_direct_edge->indirect_inlining_edge = 1;
1803 if (new_edges)
1805 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1806 new_direct_edge);
1807 top = IPA_EDGE_REF (cs);
1808 res = true;
1813 return res;
1816 /* Recursively traverse subtree of NODE (including node) made of inlined
1817 cgraph_edges when CS has been inlined and invoke
1818 update_indirect_edges_after_inlining on all nodes and
1819 update_jump_functions_after_inlining on all non-inlined edges that lead out
1820 of this subtree. Newly discovered indirect edges will be added to
1821 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1822 created. */
1824 static bool
1825 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1826 struct cgraph_node *node,
1827 VEC (cgraph_edge_p, heap) **new_edges)
1829 struct cgraph_edge *e;
1830 bool res;
1832 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1834 for (e = node->callees; e; e = e->next_callee)
1835 if (!e->inline_failed)
1836 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1837 else
1838 update_jump_functions_after_inlining (cs, e);
1840 return res;
1843 /* Update jump functions and call note functions on inlining the call site CS.
1844 CS is expected to lead to a node already cloned by
1845 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1846 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1847 created. */
1849 bool
1850 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1851 VEC (cgraph_edge_p, heap) **new_edges)
1853 /* FIXME lto: We do not stream out indirect call information. */
1854 if (flag_wpa)
1855 return false;
1857 /* Do nothing if the preparation phase has not been carried out yet
1858 (i.e. during early inlining). */
1859 if (!ipa_node_params_vector)
1860 return false;
1861 gcc_assert (ipa_edge_args_vector);
1863 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1866 /* Frees all dynamically allocated structures that the argument info points
1867 to. */
1869 void
1870 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1872 if (args->jump_functions)
1873 ggc_free (args->jump_functions);
1875 memset (args, 0, sizeof (*args));
1878 /* Free all ipa_edge structures. */
1880 void
1881 ipa_free_all_edge_args (void)
1883 int i;
1884 struct ipa_edge_args *args;
1886 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1887 ipa_free_edge_args_substructures (args);
1889 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1890 ipa_edge_args_vector = NULL;
1893 /* Frees all dynamically allocated structures that the param info points
1894 to. */
1896 void
1897 ipa_free_node_params_substructures (struct ipa_node_params *info)
1899 if (info->params)
1900 free (info->params);
1902 memset (info, 0, sizeof (*info));
1905 /* Free all ipa_node_params structures. */
1907 void
1908 ipa_free_all_node_params (void)
1910 int i;
1911 struct ipa_node_params *info;
1913 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
1914 ipa_free_node_params_substructures (info);
1916 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1917 ipa_node_params_vector = NULL;
1920 /* Hook that is called by cgraph.c when an edge is removed. */
1922 static void
1923 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1925 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1926 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1927 <= (unsigned)cs->uid)
1928 return;
1929 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1932 /* Hook that is called by cgraph.c when a node is removed. */
1934 static void
1935 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1937 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1938 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1939 <= (unsigned)node->uid)
1940 return;
1941 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1944 /* Helper function to duplicate an array of size N that is at SRC and store a
1945 pointer to it to DST. Nothing is done if SRC is NULL. */
1947 static void *
1948 duplicate_array (void *src, size_t n)
1950 void *p;
1952 if (!src)
1953 return NULL;
1955 p = xmalloc (n);
1956 memcpy (p, src, n);
1957 return p;
1960 static struct ipa_jump_func *
1961 duplicate_ipa_jump_func_array (const struct ipa_jump_func * src, size_t n)
1963 struct ipa_jump_func *p;
1965 if (!src)
1966 return NULL;
1968 p = ggc_alloc_vec_ipa_jump_func (n);
1969 memcpy (p, src, n * sizeof (struct ipa_jump_func));
1970 return p;
1973 /* Hook that is called by cgraph.c when a node is duplicated. */
1975 static void
1976 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1977 __attribute__((unused)) void *data)
1979 struct ipa_edge_args *old_args, *new_args;
1980 int arg_count;
1982 ipa_check_create_edge_args ();
1984 old_args = IPA_EDGE_REF (src);
1985 new_args = IPA_EDGE_REF (dst);
1987 arg_count = ipa_get_cs_argument_count (old_args);
1988 ipa_set_cs_argument_count (new_args, arg_count);
1989 new_args->jump_functions =
1990 duplicate_ipa_jump_func_array (old_args->jump_functions, arg_count);
1992 if (iinlining_processed_edges
1993 && bitmap_bit_p (iinlining_processed_edges, src->uid))
1994 bitmap_set_bit (iinlining_processed_edges, dst->uid);
1997 /* Hook that is called by cgraph.c when a node is duplicated. */
1999 static void
2000 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2001 __attribute__((unused)) void *data)
2003 struct ipa_node_params *old_info, *new_info;
2004 int param_count, i;
2006 ipa_check_create_node_params ();
2007 old_info = IPA_NODE_REF (src);
2008 new_info = IPA_NODE_REF (dst);
2009 param_count = ipa_get_param_count (old_info);
2011 ipa_set_param_count (new_info, param_count);
2012 new_info->params = (struct ipa_param_descriptor *)
2013 duplicate_array (old_info->params,
2014 sizeof (struct ipa_param_descriptor) * param_count);
2015 for (i = 0; i < param_count; i++)
2016 new_info->params[i].types = VEC_copy (tree, heap,
2017 old_info->params[i].types);
2018 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2019 new_info->count_scale = old_info->count_scale;
2021 new_info->called_with_var_arguments = old_info->called_with_var_arguments;
2022 new_info->uses_analysis_done = old_info->uses_analysis_done;
2023 new_info->node_enqueued = old_info->node_enqueued;
2026 /* Register our cgraph hooks if they are not already there. */
2028 void
2029 ipa_register_cgraph_hooks (void)
2031 if (!edge_removal_hook_holder)
2032 edge_removal_hook_holder =
2033 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2034 if (!node_removal_hook_holder)
2035 node_removal_hook_holder =
2036 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2037 if (!edge_duplication_hook_holder)
2038 edge_duplication_hook_holder =
2039 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2040 if (!node_duplication_hook_holder)
2041 node_duplication_hook_holder =
2042 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2045 /* Unregister our cgraph hooks if they are not already there. */
2047 static void
2048 ipa_unregister_cgraph_hooks (void)
2050 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2051 edge_removal_hook_holder = NULL;
2052 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2053 node_removal_hook_holder = NULL;
2054 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2055 edge_duplication_hook_holder = NULL;
2056 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2057 node_duplication_hook_holder = NULL;
2060 /* Allocate all necessary data structures necessary for indirect inlining. */
2062 void
2063 ipa_create_all_structures_for_iinln (void)
2065 iinlining_processed_edges = BITMAP_ALLOC (NULL);
2068 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2069 longer needed after ipa-cp. */
2071 void
2072 ipa_free_all_structures_after_ipa_cp (void)
2074 if (!flag_indirect_inlining)
2076 ipa_free_all_edge_args ();
2077 ipa_free_all_node_params ();
2078 ipa_unregister_cgraph_hooks ();
2082 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2083 longer needed after indirect inlining. */
2085 void
2086 ipa_free_all_structures_after_iinln (void)
2088 BITMAP_FREE (iinlining_processed_edges);
2090 ipa_free_all_edge_args ();
2091 ipa_free_all_node_params ();
2092 ipa_unregister_cgraph_hooks ();
2095 /* Print ipa_tree_map data structures of all functions in the
2096 callgraph to F. */
2098 void
2099 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2101 int i, count;
2102 tree temp;
2103 struct ipa_node_params *info;
2105 if (!node->analyzed)
2106 return;
2107 info = IPA_NODE_REF (node);
2108 fprintf (f, " function %s parameter descriptors:\n",
2109 cgraph_node_name (node));
2110 count = ipa_get_param_count (info);
2111 for (i = 0; i < count; i++)
2113 temp = ipa_get_param (info, i);
2114 if (TREE_CODE (temp) == PARM_DECL)
2115 fprintf (f, " param %d : %s", i,
2116 (DECL_NAME (temp)
2117 ? (*lang_hooks.decl_printable_name) (temp, 2)
2118 : "(unnamed)"));
2119 if (ipa_is_param_used (info, i))
2120 fprintf (f, " used");
2121 fprintf (f, "\n");
2125 /* Print ipa_tree_map data structures of all functions in the
2126 callgraph to F. */
2128 void
2129 ipa_print_all_params (FILE * f)
2131 struct cgraph_node *node;
2133 fprintf (f, "\nFunction parameters:\n");
2134 for (node = cgraph_nodes; node; node = node->next)
2135 ipa_print_node_params (f, node);
2138 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2140 VEC(tree, heap) *
2141 ipa_get_vector_of_formal_parms (tree fndecl)
2143 VEC(tree, heap) *args;
2144 int count;
2145 tree parm;
2147 count = count_formal_params_1 (fndecl);
2148 args = VEC_alloc (tree, heap, count);
2149 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2150 VEC_quick_push (tree, args, parm);
2152 return args;
2155 /* Return a heap allocated vector containing types of formal parameters of
2156 function type FNTYPE. */
2158 static inline VEC(tree, heap) *
2159 get_vector_of_formal_parm_types (tree fntype)
2161 VEC(tree, heap) *types;
2162 int count = 0;
2163 tree t;
2165 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2166 count++;
2168 types = VEC_alloc (tree, heap, count);
2169 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2170 VEC_quick_push (tree, types, TREE_VALUE (t));
2172 return types;
2175 /* Modify the function declaration FNDECL and its type according to the plan in
2176 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2177 to reflect the actual parameters being modified which are determined by the
2178 base_index field. */
2180 void
2181 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2182 const char *synth_parm_prefix)
2184 VEC(tree, heap) *oparms, *otypes;
2185 tree orig_type, new_type = NULL;
2186 tree old_arg_types, t, new_arg_types = NULL;
2187 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2188 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2189 tree new_reversed = NULL;
2190 bool care_for_types, last_parm_void;
2192 if (!synth_parm_prefix)
2193 synth_parm_prefix = "SYNTH";
2195 oparms = ipa_get_vector_of_formal_parms (fndecl);
2196 orig_type = TREE_TYPE (fndecl);
2197 old_arg_types = TYPE_ARG_TYPES (orig_type);
2199 /* The following test is an ugly hack, some functions simply don't have any
2200 arguments in their type. This is probably a bug but well... */
2201 care_for_types = (old_arg_types != NULL_TREE);
2202 if (care_for_types)
2204 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2205 == void_type_node);
2206 otypes = get_vector_of_formal_parm_types (orig_type);
2207 if (last_parm_void)
2208 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2209 else
2210 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2212 else
2214 last_parm_void = false;
2215 otypes = NULL;
2218 for (i = 0; i < len; i++)
2220 struct ipa_parm_adjustment *adj;
2221 gcc_assert (link);
2223 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2224 parm = VEC_index (tree, oparms, adj->base_index);
2225 adj->base = parm;
2227 if (adj->copy_param)
2229 if (care_for_types)
2230 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2231 adj->base_index),
2232 new_arg_types);
2233 *link = parm;
2234 link = &DECL_CHAIN (parm);
2236 else if (!adj->remove_param)
2238 tree new_parm;
2239 tree ptype;
2241 if (adj->by_ref)
2242 ptype = build_pointer_type (adj->type);
2243 else
2244 ptype = adj->type;
2246 if (care_for_types)
2247 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2249 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2250 ptype);
2251 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2253 DECL_ARTIFICIAL (new_parm) = 1;
2254 DECL_ARG_TYPE (new_parm) = ptype;
2255 DECL_CONTEXT (new_parm) = fndecl;
2256 TREE_USED (new_parm) = 1;
2257 DECL_IGNORED_P (new_parm) = 1;
2258 layout_decl (new_parm, 0);
2260 add_referenced_var (new_parm);
2261 mark_sym_for_renaming (new_parm);
2262 adj->base = parm;
2263 adj->reduction = new_parm;
2265 *link = new_parm;
2267 link = &DECL_CHAIN (new_parm);
2271 *link = NULL_TREE;
2273 if (care_for_types)
2275 new_reversed = nreverse (new_arg_types);
2276 if (last_parm_void)
2278 if (new_reversed)
2279 TREE_CHAIN (new_arg_types) = void_list_node;
2280 else
2281 new_reversed = void_list_node;
2285 /* Use copy_node to preserve as much as possible from original type
2286 (debug info, attribute lists etc.)
2287 Exception is METHOD_TYPEs must have THIS argument.
2288 When we are asked to remove it, we need to build new FUNCTION_TYPE
2289 instead. */
2290 if (TREE_CODE (orig_type) != METHOD_TYPE
2291 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2292 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2294 new_type = build_distinct_type_copy (orig_type);
2295 TYPE_ARG_TYPES (new_type) = new_reversed;
2297 else
2299 new_type
2300 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2301 new_reversed));
2302 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2303 DECL_VINDEX (fndecl) = NULL_TREE;
2306 /* When signature changes, we need to clear builtin info. */
2307 if (DECL_BUILT_IN (fndecl))
2309 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2310 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2313 /* This is a new type, not a copy of an old type. Need to reassociate
2314 variants. We can handle everything except the main variant lazily. */
2315 t = TYPE_MAIN_VARIANT (orig_type);
2316 if (orig_type != t)
2318 TYPE_MAIN_VARIANT (new_type) = t;
2319 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2320 TYPE_NEXT_VARIANT (t) = new_type;
2322 else
2324 TYPE_MAIN_VARIANT (new_type) = new_type;
2325 TYPE_NEXT_VARIANT (new_type) = NULL;
2328 TREE_TYPE (fndecl) = new_type;
2329 DECL_VIRTUAL_P (fndecl) = 0;
2330 if (otypes)
2331 VEC_free (tree, heap, otypes);
2332 VEC_free (tree, heap, oparms);
2335 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2336 If this is a directly recursive call, CS must be NULL. Otherwise it must
2337 contain the corresponding call graph edge. */
2339 void
2340 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2341 ipa_parm_adjustment_vec adjustments)
2343 VEC(tree, heap) *vargs;
2344 gimple new_stmt;
2345 gimple_stmt_iterator gsi;
2346 tree callee_decl;
2347 int i, len;
2349 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2350 vargs = VEC_alloc (tree, heap, len);
2352 gsi = gsi_for_stmt (stmt);
2353 for (i = 0; i < len; i++)
2355 struct ipa_parm_adjustment *adj;
2357 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2359 if (adj->copy_param)
2361 tree arg = gimple_call_arg (stmt, adj->base_index);
2363 VEC_quick_push (tree, vargs, arg);
2365 else if (!adj->remove_param)
2367 tree expr, base, off;
2368 location_t loc;
2370 /* We create a new parameter out of the value of the old one, we can
2371 do the following kind of transformations:
2373 - A scalar passed by reference is converted to a scalar passed by
2374 value. (adj->by_ref is false and the type of the original
2375 actual argument is a pointer to a scalar).
2377 - A part of an aggregate is passed instead of the whole aggregate.
2378 The part can be passed either by value or by reference, this is
2379 determined by value of adj->by_ref. Moreover, the code below
2380 handles both situations when the original aggregate is passed by
2381 value (its type is not a pointer) and when it is passed by
2382 reference (it is a pointer to an aggregate).
2384 When the new argument is passed by reference (adj->by_ref is true)
2385 it must be a part of an aggregate and therefore we form it by
2386 simply taking the address of a reference inside the original
2387 aggregate. */
2389 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2390 base = gimple_call_arg (stmt, adj->base_index);
2391 loc = EXPR_LOCATION (base);
2393 if (TREE_CODE (base) != ADDR_EXPR
2394 && POINTER_TYPE_P (TREE_TYPE (base)))
2395 off = build_int_cst (adj->alias_ptr_type,
2396 adj->offset / BITS_PER_UNIT);
2397 else
2399 HOST_WIDE_INT base_offset;
2400 tree prev_base;
2402 if (TREE_CODE (base) == ADDR_EXPR)
2403 base = TREE_OPERAND (base, 0);
2404 prev_base = base;
2405 base = get_addr_base_and_unit_offset (base, &base_offset);
2406 /* Aggregate arguments can have non-invariant addresses. */
2407 if (!base)
2409 base = build_fold_addr_expr (prev_base);
2410 off = build_int_cst (adj->alias_ptr_type,
2411 adj->offset / BITS_PER_UNIT);
2413 else if (TREE_CODE (base) == MEM_REF)
2415 off = build_int_cst (adj->alias_ptr_type,
2416 base_offset
2417 + adj->offset / BITS_PER_UNIT);
2418 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2419 off, 0);
2420 base = TREE_OPERAND (base, 0);
2422 else
2424 off = build_int_cst (adj->alias_ptr_type,
2425 base_offset
2426 + adj->offset / BITS_PER_UNIT);
2427 base = build_fold_addr_expr (base);
2431 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2432 if (adj->by_ref)
2433 expr = build_fold_addr_expr (expr);
2435 expr = force_gimple_operand_gsi (&gsi, expr,
2436 adj->by_ref
2437 || is_gimple_reg_type (adj->type),
2438 NULL, true, GSI_SAME_STMT);
2439 VEC_quick_push (tree, vargs, expr);
2443 if (dump_file && (dump_flags & TDF_DETAILS))
2445 fprintf (dump_file, "replacing stmt:");
2446 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2449 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2450 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2451 VEC_free (tree, heap, vargs);
2452 if (gimple_call_lhs (stmt))
2453 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2455 gimple_set_block (new_stmt, gimple_block (stmt));
2456 if (gimple_has_location (stmt))
2457 gimple_set_location (new_stmt, gimple_location (stmt));
2458 gimple_call_copy_flags (new_stmt, stmt);
2459 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2461 if (dump_file && (dump_flags & TDF_DETAILS))
2463 fprintf (dump_file, "with stmt:");
2464 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2465 fprintf (dump_file, "\n");
2467 gsi_replace (&gsi, new_stmt, true);
2468 if (cs)
2469 cgraph_set_call_stmt (cs, new_stmt);
2470 update_ssa (TODO_update_ssa);
2471 free_dominance_info (CDI_DOMINATORS);
2474 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2476 static bool
2477 index_in_adjustments_multiple_times_p (int base_index,
2478 ipa_parm_adjustment_vec adjustments)
2480 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2481 bool one = false;
2483 for (i = 0; i < len; i++)
2485 struct ipa_parm_adjustment *adj;
2486 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2488 if (adj->base_index == base_index)
2490 if (one)
2491 return true;
2492 else
2493 one = true;
2496 return false;
2500 /* Return adjustments that should have the same effect on function parameters
2501 and call arguments as if they were first changed according to adjustments in
2502 INNER and then by adjustments in OUTER. */
2504 ipa_parm_adjustment_vec
2505 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2506 ipa_parm_adjustment_vec outer)
2508 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2509 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2510 int removals = 0;
2511 ipa_parm_adjustment_vec adjustments, tmp;
2513 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2514 for (i = 0; i < inlen; i++)
2516 struct ipa_parm_adjustment *n;
2517 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2519 if (n->remove_param)
2520 removals++;
2521 else
2522 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2525 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2526 for (i = 0; i < outlen; i++)
2528 struct ipa_parm_adjustment *r;
2529 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2530 outer, i);
2531 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2532 out->base_index);
2534 gcc_assert (!in->remove_param);
2535 if (out->remove_param)
2537 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2539 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2540 memset (r, 0, sizeof (*r));
2541 r->remove_param = true;
2543 continue;
2546 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2547 memset (r, 0, sizeof (*r));
2548 r->base_index = in->base_index;
2549 r->type = out->type;
2551 /* FIXME: Create nonlocal value too. */
2553 if (in->copy_param && out->copy_param)
2554 r->copy_param = true;
2555 else if (in->copy_param)
2556 r->offset = out->offset;
2557 else if (out->copy_param)
2558 r->offset = in->offset;
2559 else
2560 r->offset = in->offset + out->offset;
2563 for (i = 0; i < inlen; i++)
2565 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2566 inner, i);
2568 if (n->remove_param)
2569 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2572 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2573 return adjustments;
2576 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2577 friendly way, assuming they are meant to be applied to FNDECL. */
2579 void
2580 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2581 tree fndecl)
2583 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2584 bool first = true;
2585 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2587 fprintf (file, "IPA param adjustments: ");
2588 for (i = 0; i < len; i++)
2590 struct ipa_parm_adjustment *adj;
2591 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2593 if (!first)
2594 fprintf (file, " ");
2595 else
2596 first = false;
2598 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2599 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2600 if (adj->base)
2602 fprintf (file, ", base: ");
2603 print_generic_expr (file, adj->base, 0);
2605 if (adj->reduction)
2607 fprintf (file, ", reduction: ");
2608 print_generic_expr (file, adj->reduction, 0);
2610 if (adj->new_ssa_base)
2612 fprintf (file, ", new_ssa_base: ");
2613 print_generic_expr (file, adj->new_ssa_base, 0);
2616 if (adj->copy_param)
2617 fprintf (file, ", copy_param");
2618 else if (adj->remove_param)
2619 fprintf (file, ", remove_param");
2620 else
2621 fprintf (file, ", offset %li", (long) adj->offset);
2622 if (adj->by_ref)
2623 fprintf (file, ", by_ref");
2624 print_node_brief (file, ", type: ", adj->type, 0);
2625 fprintf (file, "\n");
2627 VEC_free (tree, heap, parms);
2630 /* Stream out jump function JUMP_FUNC to OB. */
2632 static void
2633 ipa_write_jump_function (struct output_block *ob,
2634 struct ipa_jump_func *jump_func)
2636 lto_output_uleb128_stream (ob->main_stream,
2637 jump_func->type);
2639 switch (jump_func->type)
2641 case IPA_JF_UNKNOWN:
2642 break;
2643 case IPA_JF_KNOWN_TYPE:
2644 lto_output_tree (ob, jump_func->value.base_binfo, true);
2645 break;
2646 case IPA_JF_CONST:
2647 lto_output_tree (ob, jump_func->value.constant, true);
2648 break;
2649 case IPA_JF_PASS_THROUGH:
2650 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2651 lto_output_uleb128_stream (ob->main_stream,
2652 jump_func->value.pass_through.formal_id);
2653 lto_output_uleb128_stream (ob->main_stream,
2654 jump_func->value.pass_through.operation);
2655 break;
2656 case IPA_JF_ANCESTOR:
2657 lto_output_uleb128_stream (ob->main_stream,
2658 jump_func->value.ancestor.offset);
2659 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2660 lto_output_uleb128_stream (ob->main_stream,
2661 jump_func->value.ancestor.formal_id);
2662 break;
2663 case IPA_JF_CONST_MEMBER_PTR:
2664 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2665 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2666 break;
2670 /* Read in jump function JUMP_FUNC from IB. */
2672 static void
2673 ipa_read_jump_function (struct lto_input_block *ib,
2674 struct ipa_jump_func *jump_func,
2675 struct data_in *data_in)
2677 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2679 switch (jump_func->type)
2681 case IPA_JF_UNKNOWN:
2682 break;
2683 case IPA_JF_KNOWN_TYPE:
2684 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2685 break;
2686 case IPA_JF_CONST:
2687 jump_func->value.constant = lto_input_tree (ib, data_in);
2688 break;
2689 case IPA_JF_PASS_THROUGH:
2690 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2691 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2692 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2693 break;
2694 case IPA_JF_ANCESTOR:
2695 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2696 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2697 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2698 break;
2699 case IPA_JF_CONST_MEMBER_PTR:
2700 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2701 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2702 break;
2706 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2707 relevant to indirect inlining to OB. */
2709 static void
2710 ipa_write_indirect_edge_info (struct output_block *ob,
2711 struct cgraph_edge *cs)
2713 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2714 struct bitpack_d bp;
2716 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2717 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2718 bp = bitpack_create (ob->main_stream);
2719 bp_pack_value (&bp, ii->polymorphic, 1);
2720 lto_output_bitpack (&bp);
2722 if (ii->polymorphic)
2724 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2725 lto_output_tree (ob, ii->otr_type, true);
2729 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2730 relevant to indirect inlining from IB. */
2732 static void
2733 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2734 struct data_in *data_in ATTRIBUTE_UNUSED,
2735 struct cgraph_edge *cs)
2737 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2738 struct bitpack_d bp;
2740 ii->param_index = (int) lto_input_sleb128 (ib);
2741 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2742 bp = lto_input_bitpack (ib);
2743 ii->polymorphic = bp_unpack_value (&bp, 1);
2744 if (ii->polymorphic)
2746 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2747 ii->otr_type = lto_input_tree (ib, data_in);
2751 /* Stream out NODE info to OB. */
2753 static void
2754 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2756 int node_ref;
2757 lto_cgraph_encoder_t encoder;
2758 struct ipa_node_params *info = IPA_NODE_REF (node);
2759 int j;
2760 struct cgraph_edge *e;
2761 struct bitpack_d bp;
2763 encoder = ob->decl_state->cgraph_node_encoder;
2764 node_ref = lto_cgraph_encoder_encode (encoder, node);
2765 lto_output_uleb128_stream (ob->main_stream, node_ref);
2767 bp = bitpack_create (ob->main_stream);
2768 bp_pack_value (&bp, info->called_with_var_arguments, 1);
2769 gcc_assert (info->uses_analysis_done
2770 || ipa_get_param_count (info) == 0);
2771 gcc_assert (!info->node_enqueued);
2772 gcc_assert (!info->ipcp_orig_node);
2773 for (j = 0; j < ipa_get_param_count (info); j++)
2774 bp_pack_value (&bp, info->params[j].used, 1);
2775 lto_output_bitpack (&bp);
2776 for (e = node->callees; e; e = e->next_callee)
2778 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2780 lto_output_uleb128_stream (ob->main_stream,
2781 ipa_get_cs_argument_count (args));
2782 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2783 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2785 for (e = node->indirect_calls; e; e = e->next_callee)
2786 ipa_write_indirect_edge_info (ob, e);
2789 /* Stream in NODE info from IB. */
2791 static void
2792 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2793 struct data_in *data_in)
2795 struct ipa_node_params *info = IPA_NODE_REF (node);
2796 int k;
2797 struct cgraph_edge *e;
2798 struct bitpack_d bp;
2800 ipa_initialize_node_params (node);
2802 bp = lto_input_bitpack (ib);
2803 info->called_with_var_arguments = bp_unpack_value (&bp, 1);
2804 if (ipa_get_param_count (info) != 0)
2805 info->uses_analysis_done = true;
2806 info->node_enqueued = false;
2807 for (k = 0; k < ipa_get_param_count (info); k++)
2808 info->params[k].used = bp_unpack_value (&bp, 1);
2809 for (e = node->callees; e; e = e->next_callee)
2811 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2812 int count = lto_input_uleb128 (ib);
2814 ipa_set_cs_argument_count (args, count);
2815 if (!count)
2816 continue;
2818 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2819 (ipa_get_cs_argument_count (args));
2820 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2821 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2823 for (e = node->indirect_calls; e; e = e->next_callee)
2824 ipa_read_indirect_edge_info (ib, data_in, e);
2827 /* Write jump functions for nodes in SET. */
2829 void
2830 ipa_prop_write_jump_functions (cgraph_node_set set)
2832 struct cgraph_node *node;
2833 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2834 unsigned int count = 0;
2835 cgraph_node_set_iterator csi;
2837 ob->cgraph_node = NULL;
2839 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2841 node = csi_node (csi);
2842 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2843 count++;
2846 lto_output_uleb128_stream (ob->main_stream, count);
2848 /* Process all of the functions. */
2849 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2851 node = csi_node (csi);
2852 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2853 ipa_write_node_info (ob, node);
2855 lto_output_1_stream (ob->main_stream, 0);
2856 produce_asm (ob, NULL);
2857 destroy_output_block (ob);
2860 /* Read section in file FILE_DATA of length LEN with data DATA. */
2862 static void
2863 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2864 size_t len)
2866 const struct lto_function_header *header =
2867 (const struct lto_function_header *) data;
2868 const int32_t cfg_offset = sizeof (struct lto_function_header);
2869 const int32_t main_offset = cfg_offset + header->cfg_size;
2870 const int32_t string_offset = main_offset + header->main_size;
2871 struct data_in *data_in;
2872 struct lto_input_block ib_main;
2873 unsigned int i;
2874 unsigned int count;
2876 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2877 header->main_size);
2879 data_in =
2880 lto_data_in_create (file_data, (const char *) data + string_offset,
2881 header->string_size, NULL);
2882 count = lto_input_uleb128 (&ib_main);
2884 for (i = 0; i < count; i++)
2886 unsigned int index;
2887 struct cgraph_node *node;
2888 lto_cgraph_encoder_t encoder;
2890 index = lto_input_uleb128 (&ib_main);
2891 encoder = file_data->cgraph_node_encoder;
2892 node = lto_cgraph_encoder_deref (encoder, index);
2893 gcc_assert (node->analyzed);
2894 ipa_read_node_info (&ib_main, node, data_in);
2896 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2897 len);
2898 lto_data_in_delete (data_in);
2901 /* Read ipcp jump functions. */
2903 void
2904 ipa_prop_read_jump_functions (void)
2906 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2907 struct lto_file_decl_data *file_data;
2908 unsigned int j = 0;
2910 ipa_check_create_node_params ();
2911 ipa_check_create_edge_args ();
2912 ipa_register_cgraph_hooks ();
2914 while ((file_data = file_data_vec[j++]))
2916 size_t len;
2917 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2919 if (data)
2920 ipa_prop_read_section (file_data, data, len);
2924 /* After merging units, we can get mismatch in argument counts.
2925 Also decl merging might've rendered parameter lists obsolete.
2926 Also compute called_with_variable_arg info. */
2928 void
2929 ipa_update_after_lto_read (void)
2931 struct cgraph_node *node;
2932 struct cgraph_edge *cs;
2934 ipa_check_create_node_params ();
2935 ipa_check_create_edge_args ();
2937 for (node = cgraph_nodes; node; node = node->next)
2938 if (node->analyzed)
2939 ipa_initialize_node_params (node);
2941 for (node = cgraph_nodes; node; node = node->next)
2942 if (node->analyzed)
2943 for (cs = node->callees; cs; cs = cs->next_callee)
2945 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2946 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2947 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));