2011-08-19 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / ipa-prop.c
bloba81bb3cb1cdf5d1d9f60ece397c9e1622276cc76
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
41 #include "data-streamer.h"
42 #include "tree-streamer.h"
45 /* Intermediate information about a parameter that is only useful during the
46 run of ipa_analyze_node and is not kept afterwards. */
48 struct param_analysis_info
50 bool modified;
51 bitmap visited_statements;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
56 /* Vector where the parameter infos are actually stored. */
57 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
59 /* Bitmap with all UIDs of call graph edges that have been already processed
60 by indirect inlining. */
61 static bitmap iinlining_processed_edges;
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_node_hook_list *node_removal_hook_holder;
66 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
67 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
68 static struct cgraph_node_hook_list *function_insertion_hook_holder;
70 /* Return index of the formal whose tree is PTREE in function which corresponds
71 to INFO. */
73 int
74 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
76 int i, count;
78 count = ipa_get_param_count (info);
79 for (i = 0; i < count; i++)
80 if (ipa_get_param (info, i) == ptree)
81 return i;
83 return -1;
86 /* Populate the param_decl field in parameter descriptors of INFO that
87 corresponds to NODE. */
89 static void
90 ipa_populate_param_decls (struct cgraph_node *node,
91 struct ipa_node_params *info)
93 tree fndecl;
94 tree fnargs;
95 tree parm;
96 int param_num;
98 fndecl = node->decl;
99 fnargs = DECL_ARGUMENTS (fndecl);
100 param_num = 0;
101 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
103 VEC_index (ipa_param_descriptor_t,
104 info->descriptors, param_num)->decl = parm;
105 param_num++;
109 /* Return how many formal parameters FNDECL has. */
111 static inline int
112 count_formal_params (tree fndecl)
114 tree parm;
115 int count = 0;
117 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
118 count++;
120 return count;
123 /* Initialize the ipa_node_params structure associated with NODE by counting
124 the function parameters, creating the descriptors and populating their
125 param_decls. */
127 void
128 ipa_initialize_node_params (struct cgraph_node *node)
130 struct ipa_node_params *info = IPA_NODE_REF (node);
132 if (!info->descriptors)
134 int param_count;
136 param_count = count_formal_params (node->decl);
137 if (param_count)
139 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
140 info->descriptors, param_count);
141 ipa_populate_param_decls (node, info);
146 /* Count number of arguments callsite CS has and store it in
147 ipa_edge_args structure corresponding to this callsite. */
149 static void
150 ipa_count_arguments (struct cgraph_edge *cs)
152 gimple stmt;
153 int arg_num;
155 stmt = cs->call_stmt;
156 gcc_assert (is_gimple_call (stmt));
157 arg_num = gimple_call_num_args (stmt);
158 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
159 <= (unsigned) cgraph_edge_max_uid)
160 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
161 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
162 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
165 /* Print the jump functions associated with call graph edge CS to file F. */
167 static void
168 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
170 int i, count;
172 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
173 for (i = 0; i < count; i++)
175 struct ipa_jump_func *jump_func;
176 enum jump_func_type type;
178 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
179 type = jump_func->type;
181 fprintf (f, " param %d: ", i);
182 if (type == IPA_JF_UNKNOWN)
183 fprintf (f, "UNKNOWN\n");
184 else if (type == IPA_JF_KNOWN_TYPE)
186 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
187 fprintf (f, "KNOWN TYPE, type in binfo is: ");
188 print_generic_expr (f, binfo_type, 0);
189 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
191 else if (type == IPA_JF_CONST)
193 tree val = jump_func->value.constant;
194 fprintf (f, "CONST: ");
195 print_generic_expr (f, val, 0);
196 if (TREE_CODE (val) == ADDR_EXPR
197 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
199 fprintf (f, " -> ");
200 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
203 fprintf (f, "\n");
205 else if (type == IPA_JF_CONST_MEMBER_PTR)
207 fprintf (f, "CONST MEMBER PTR: ");
208 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
209 fprintf (f, ", ");
210 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
211 fprintf (f, "\n");
213 else if (type == IPA_JF_PASS_THROUGH)
215 fprintf (f, "PASS THROUGH: ");
216 fprintf (f, "%d, op %s ",
217 jump_func->value.pass_through.formal_id,
218 tree_code_name[(int)
219 jump_func->value.pass_through.operation]);
220 if (jump_func->value.pass_through.operation != NOP_EXPR)
221 print_generic_expr (dump_file,
222 jump_func->value.pass_through.operand, 0);
223 fprintf (dump_file, "\n");
225 else if (type == IPA_JF_ANCESTOR)
227 fprintf (f, "ANCESTOR: ");
228 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
229 jump_func->value.ancestor.formal_id,
230 jump_func->value.ancestor.offset);
231 print_generic_expr (f, jump_func->value.ancestor.type, 0);
232 fprintf (dump_file, "\n");
238 /* Print the jump functions of all arguments on all call graph edges going from
239 NODE to file F. */
241 void
242 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
244 struct cgraph_edge *cs;
245 int i;
247 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
248 for (cs = node->callees; cs; cs = cs->next_callee)
250 if (!ipa_edge_args_info_available_for_edge_p (cs))
251 continue;
253 fprintf (f, " callsite %s/%i -> %s/%i : \n",
254 cgraph_node_name (node), node->uid,
255 cgraph_node_name (cs->callee), cs->callee->uid);
256 ipa_print_node_jump_functions_for_edge (f, cs);
259 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
261 if (!ipa_edge_args_info_available_for_edge_p (cs))
262 continue;
264 if (cs->call_stmt)
266 fprintf (f, " indirect callsite %d for stmt ", i);
267 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
269 else
270 fprintf (f, " indirect callsite %d :\n", i);
271 ipa_print_node_jump_functions_for_edge (f, cs);
276 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
278 void
279 ipa_print_all_jump_functions (FILE *f)
281 struct cgraph_node *node;
283 fprintf (f, "\nJump functions:\n");
284 for (node = cgraph_nodes; node; node = node->next)
286 ipa_print_node_jump_functions (f, node);
290 /* Structure to be passed in between detect_type_change and
291 check_stmt_for_type_change. */
293 struct type_change_info
295 /* Set to true if dynamic type change has been detected. */
296 bool type_maybe_changed;
299 /* Return true if STMT can modify a virtual method table pointer.
301 This function makes special assumptions about both constructors and
302 destructors which are all the functions that are allowed to alter the VMT
303 pointers. It assumes that destructors begin with assignment into all VMT
304 pointers and that constructors essentially look in the following way:
306 1) The very first thing they do is that they call constructors of ancestor
307 sub-objects that have them.
309 2) Then VMT pointers of this and all its ancestors is set to new values
310 corresponding to the type corresponding to the constructor.
312 3) Only afterwards, other stuff such as constructor of member sub-objects
313 and the code written by the user is run. Only this may include calling
314 virtual functions, directly or indirectly.
316 There is no way to call a constructor of an ancestor sub-object in any
317 other way.
319 This means that we do not have to care whether constructors get the correct
320 type information because they will always change it (in fact, if we define
321 the type to be given by the VMT pointer, it is undefined).
323 The most important fact to derive from the above is that if, for some
324 statement in the section 3, we try to detect whether the dynamic type has
325 changed, we can safely ignore all calls as we examine the function body
326 backwards until we reach statements in section 2 because these calls cannot
327 be ancestor constructors or destructors (if the input is not bogus) and so
328 do not change the dynamic type (this holds true only for automatically
329 allocated objects but at the moment we devirtualize only these). We then
330 must detect that statements in section 2 change the dynamic type and can try
331 to derive the new type. That is enough and we can stop, we will never see
332 the calls into constructors of sub-objects in this code. Therefore we can
333 safely ignore all call statements that we traverse.
336 static bool
337 stmt_may_be_vtbl_ptr_store (gimple stmt)
339 if (is_gimple_call (stmt))
340 return false;
341 else if (is_gimple_assign (stmt))
343 tree lhs = gimple_assign_lhs (stmt);
345 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
347 if (flag_strict_aliasing
348 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
349 return false;
351 if (TREE_CODE (lhs) == COMPONENT_REF
352 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
353 return false;
354 /* In the future we might want to use get_base_ref_and_offset to find
355 if there is a field corresponding to the offset and if so, proceed
356 almost like if it was a component ref. */
359 return true;
362 /* Callback of walk_aliased_vdefs and a helper function for
363 detect_type_change to check whether a particular statement may modify
364 the virtual table pointer, and if possible also determine the new type of
365 the (sub-)object. It stores its result into DATA, which points to a
366 type_change_info structure. */
368 static bool
369 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
371 gimple stmt = SSA_NAME_DEF_STMT (vdef);
372 struct type_change_info *tci = (struct type_change_info *) data;
374 if (stmt_may_be_vtbl_ptr_store (stmt))
376 tci->type_maybe_changed = true;
377 return true;
379 else
380 return false;
383 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
384 looking for assignments to its virtual table pointer. If it is, return true
385 and fill in the jump function JFUNC with relevant type information or set it
386 to unknown. ARG is the object itself (not a pointer to it, unless
387 dereferenced). BASE is the base of the memory access as returned by
388 get_ref_base_and_extent, as is the offset. */
390 static bool
391 detect_type_change (tree arg, tree base, gimple call,
392 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
394 struct type_change_info tci;
395 ao_ref ao;
397 gcc_checking_assert (DECL_P (arg)
398 || TREE_CODE (arg) == MEM_REF
399 || handled_component_p (arg));
400 /* Const calls cannot call virtual methods through VMT and so type changes do
401 not matter. */
402 if (!flag_devirtualize || !gimple_vuse (call))
403 return false;
405 tci.type_maybe_changed = false;
407 ao.ref = arg;
408 ao.base = base;
409 ao.offset = offset;
410 ao.size = POINTER_SIZE;
411 ao.max_size = ao.size;
412 ao.ref_alias_set = -1;
413 ao.base_alias_set = -1;
415 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
416 &tci, NULL);
417 if (!tci.type_maybe_changed)
418 return false;
420 jfunc->type = IPA_JF_UNKNOWN;
421 return true;
424 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
425 SSA name (its dereference will become the base and the offset is assumed to
426 be zero). */
428 static bool
429 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
431 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
432 if (!flag_devirtualize
433 || !POINTER_TYPE_P (TREE_TYPE (arg))
434 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
435 return false;
437 arg = build2 (MEM_REF, ptr_type_node, arg,
438 build_int_cst (ptr_type_node, 0));
440 return detect_type_change (arg, arg, call, jfunc, 0);
444 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
445 of an assignment statement STMT, try to find out whether NAME can be
446 described by a (possibly polynomial) pass-through jump-function or an
447 ancestor jump function and if so, write the appropriate function into
448 JFUNC */
450 static void
451 compute_complex_assign_jump_func (struct ipa_node_params *info,
452 struct ipa_jump_func *jfunc,
453 gimple call, gimple stmt, tree name)
455 HOST_WIDE_INT offset, size, max_size;
456 tree op1, op2, base, ssa;
457 int index;
459 op1 = gimple_assign_rhs1 (stmt);
460 op2 = gimple_assign_rhs2 (stmt);
462 if (TREE_CODE (op1) == SSA_NAME
463 && SSA_NAME_IS_DEFAULT_DEF (op1))
465 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
466 if (index < 0)
467 return;
469 if (op2)
471 if (!is_gimple_ip_invariant (op2)
472 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
473 && !useless_type_conversion_p (TREE_TYPE (name),
474 TREE_TYPE (op1))))
475 return;
477 jfunc->type = IPA_JF_PASS_THROUGH;
478 jfunc->value.pass_through.formal_id = index;
479 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
480 jfunc->value.pass_through.operand = op2;
482 else if (gimple_assign_unary_nop_p (stmt)
483 && !detect_type_change_ssa (op1, call, jfunc))
485 jfunc->type = IPA_JF_PASS_THROUGH;
486 jfunc->value.pass_through.formal_id = index;
487 jfunc->value.pass_through.operation = NOP_EXPR;
489 return;
492 if (TREE_CODE (op1) != ADDR_EXPR)
493 return;
494 op1 = TREE_OPERAND (op1, 0);
495 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
496 return;
497 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
498 if (TREE_CODE (base) != MEM_REF
499 /* If this is a varying address, punt. */
500 || max_size == -1
501 || max_size != size)
502 return;
503 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
504 ssa = TREE_OPERAND (base, 0);
505 if (TREE_CODE (ssa) != SSA_NAME
506 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
507 || offset < 0)
508 return;
510 /* Dynamic types are changed only in constructors and destructors and */
511 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
512 if (index >= 0
513 && !detect_type_change (op1, base, call, jfunc, offset))
515 jfunc->type = IPA_JF_ANCESTOR;
516 jfunc->value.ancestor.formal_id = index;
517 jfunc->value.ancestor.offset = offset;
518 jfunc->value.ancestor.type = TREE_TYPE (op1);
522 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
523 it looks like:
525 iftmp.1_3 = &obj_2(D)->D.1762;
527 The base of the MEM_REF must be a default definition SSA NAME of a
528 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
529 whole MEM_REF expression is returned and the offset calculated from any
530 handled components and the MEM_REF itself is stored into *OFFSET. The whole
531 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
533 static tree
534 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
536 HOST_WIDE_INT size, max_size;
537 tree expr, parm, obj;
539 if (!gimple_assign_single_p (assign))
540 return NULL_TREE;
541 expr = gimple_assign_rhs1 (assign);
543 if (TREE_CODE (expr) != ADDR_EXPR)
544 return NULL_TREE;
545 expr = TREE_OPERAND (expr, 0);
546 obj = expr;
547 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
549 if (TREE_CODE (expr) != MEM_REF
550 /* If this is a varying address, punt. */
551 || max_size == -1
552 || max_size != size
553 || *offset < 0)
554 return NULL_TREE;
555 parm = TREE_OPERAND (expr, 0);
556 if (TREE_CODE (parm) != SSA_NAME
557 || !SSA_NAME_IS_DEFAULT_DEF (parm)
558 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
559 return NULL_TREE;
561 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
562 *obj_p = obj;
563 return expr;
567 /* Given that an actual argument is an SSA_NAME that is a result of a phi
568 statement PHI, try to find out whether NAME is in fact a
569 multiple-inheritance typecast from a descendant into an ancestor of a formal
570 parameter and thus can be described by an ancestor jump function and if so,
571 write the appropriate function into JFUNC.
573 Essentially we want to match the following pattern:
575 if (obj_2(D) != 0B)
576 goto <bb 3>;
577 else
578 goto <bb 4>;
580 <bb 3>:
581 iftmp.1_3 = &obj_2(D)->D.1762;
583 <bb 4>:
584 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
585 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
586 return D.1879_6; */
588 static void
589 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
590 struct ipa_jump_func *jfunc,
591 gimple call, gimple phi)
593 HOST_WIDE_INT offset;
594 gimple assign, cond;
595 basic_block phi_bb, assign_bb, cond_bb;
596 tree tmp, parm, expr, obj;
597 int index, i;
599 if (gimple_phi_num_args (phi) != 2)
600 return;
602 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
603 tmp = PHI_ARG_DEF (phi, 0);
604 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
605 tmp = PHI_ARG_DEF (phi, 1);
606 else
607 return;
608 if (TREE_CODE (tmp) != SSA_NAME
609 || SSA_NAME_IS_DEFAULT_DEF (tmp)
610 || !POINTER_TYPE_P (TREE_TYPE (tmp))
611 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
612 return;
614 assign = SSA_NAME_DEF_STMT (tmp);
615 assign_bb = gimple_bb (assign);
616 if (!single_pred_p (assign_bb))
617 return;
618 expr = get_ancestor_addr_info (assign, &obj, &offset);
619 if (!expr)
620 return;
621 parm = TREE_OPERAND (expr, 0);
622 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
623 gcc_assert (index >= 0);
625 cond_bb = single_pred (assign_bb);
626 cond = last_stmt (cond_bb);
627 if (!cond
628 || gimple_code (cond) != GIMPLE_COND
629 || gimple_cond_code (cond) != NE_EXPR
630 || gimple_cond_lhs (cond) != parm
631 || !integer_zerop (gimple_cond_rhs (cond)))
632 return;
634 phi_bb = gimple_bb (phi);
635 for (i = 0; i < 2; i++)
637 basic_block pred = EDGE_PRED (phi_bb, i)->src;
638 if (pred != assign_bb && pred != cond_bb)
639 return;
642 if (!detect_type_change (obj, expr, call, jfunc, offset))
644 jfunc->type = IPA_JF_ANCESTOR;
645 jfunc->value.ancestor.formal_id = index;
646 jfunc->value.ancestor.offset = offset;
647 jfunc->value.ancestor.type = TREE_TYPE (obj);
651 /* Given OP which is passed as an actual argument to a called function,
652 determine if it is possible to construct a KNOWN_TYPE jump function for it
653 and if so, create one and store it to JFUNC. */
655 static void
656 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
657 gimple call)
659 HOST_WIDE_INT offset, size, max_size;
660 tree base, binfo;
662 if (!flag_devirtualize
663 || TREE_CODE (op) != ADDR_EXPR
664 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
665 return;
667 op = TREE_OPERAND (op, 0);
668 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
669 if (!DECL_P (base)
670 || max_size == -1
671 || max_size != size
672 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
673 || is_global_var (base))
674 return;
676 if (detect_type_change (op, base, call, jfunc, offset))
677 return;
679 binfo = TYPE_BINFO (TREE_TYPE (base));
680 if (!binfo)
681 return;
682 binfo = get_binfo_at_offset (binfo, offset, TREE_TYPE (op));
683 if (binfo)
685 jfunc->type = IPA_JF_KNOWN_TYPE;
686 jfunc->value.base_binfo = binfo;
691 /* Determine the jump functions of scalar arguments. Scalar means SSA names
692 and constants of a number of selected types. INFO is the ipa_node_params
693 structure associated with the caller, FUNCTIONS is a pointer to an array of
694 jump function structures associated with CALL which is the call statement
695 being examined.*/
697 static void
698 compute_scalar_jump_functions (struct ipa_node_params *info,
699 struct ipa_jump_func *functions,
700 gimple call)
702 tree arg;
703 unsigned num = 0;
705 for (num = 0; num < gimple_call_num_args (call); num++)
707 arg = gimple_call_arg (call, num);
709 if (is_gimple_ip_invariant (arg))
711 functions[num].type = IPA_JF_CONST;
712 functions[num].value.constant = arg;
714 else if (TREE_CODE (arg) == SSA_NAME)
716 if (SSA_NAME_IS_DEFAULT_DEF (arg))
718 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
720 if (index >= 0
721 && !detect_type_change_ssa (arg, call, &functions[num]))
723 functions[num].type = IPA_JF_PASS_THROUGH;
724 functions[num].value.pass_through.formal_id = index;
725 functions[num].value.pass_through.operation = NOP_EXPR;
728 else
730 gimple stmt = SSA_NAME_DEF_STMT (arg);
731 if (is_gimple_assign (stmt))
732 compute_complex_assign_jump_func (info, &functions[num],
733 call, stmt, arg);
734 else if (gimple_code (stmt) == GIMPLE_PHI)
735 compute_complex_ancestor_jump_func (info, &functions[num],
736 call, stmt);
739 else
740 compute_known_type_jump_func (arg, &functions[num], call);
744 /* Inspect the given TYPE and return true iff it has the same structure (the
745 same number of fields of the same types) as a C++ member pointer. If
746 METHOD_PTR and DELTA are non-NULL, store the trees representing the
747 corresponding fields there. */
749 static bool
750 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
752 tree fld;
754 if (TREE_CODE (type) != RECORD_TYPE)
755 return false;
757 fld = TYPE_FIELDS (type);
758 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
759 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
760 return false;
762 if (method_ptr)
763 *method_ptr = fld;
765 fld = DECL_CHAIN (fld);
766 if (!fld || INTEGRAL_TYPE_P (fld))
767 return false;
768 if (delta)
769 *delta = fld;
771 if (DECL_CHAIN (fld))
772 return false;
774 return true;
777 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
778 boolean variable pointed to by DATA. */
780 static bool
781 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
782 void *data)
784 bool *b = (bool *) data;
785 *b = true;
786 return true;
789 /* Return true if the formal parameter PARM might have been modified in this
790 function before reaching the statement CALL. PARM_INFO is a pointer to a
791 structure containing intermediate information about PARM. */
793 static bool
794 is_parm_modified_before_call (struct param_analysis_info *parm_info,
795 gimple call, tree parm)
797 bool modified = false;
798 ao_ref refd;
800 if (parm_info->modified)
801 return true;
803 ao_ref_init (&refd, parm);
804 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
805 &modified, &parm_info->visited_statements);
806 if (modified)
808 parm_info->modified = true;
809 return true;
811 return false;
814 /* Go through arguments of the CALL and for every one that looks like a member
815 pointer, check whether it can be safely declared pass-through and if so,
816 mark that to the corresponding item of jump FUNCTIONS. Return true iff
817 there are non-pass-through member pointers within the arguments. INFO
818 describes formal parameters of the caller. PARMS_INFO is a pointer to a
819 vector containing intermediate information about each formal parameter. */
821 static bool
822 compute_pass_through_member_ptrs (struct ipa_node_params *info,
823 struct param_analysis_info *parms_info,
824 struct ipa_jump_func *functions,
825 gimple call)
827 bool undecided_members = false;
828 unsigned num;
829 tree arg;
831 for (num = 0; num < gimple_call_num_args (call); num++)
833 arg = gimple_call_arg (call, num);
835 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
837 if (TREE_CODE (arg) == PARM_DECL)
839 int index = ipa_get_param_decl_index (info, arg);
841 gcc_assert (index >=0);
842 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
844 functions[num].type = IPA_JF_PASS_THROUGH;
845 functions[num].value.pass_through.formal_id = index;
846 functions[num].value.pass_through.operation = NOP_EXPR;
848 else
849 undecided_members = true;
851 else
852 undecided_members = true;
856 return undecided_members;
859 /* Simple function filling in a member pointer constant jump function (with PFN
860 and DELTA as the constant value) into JFUNC. */
862 static void
863 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
864 tree pfn, tree delta)
866 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
867 jfunc->value.member_cst.pfn = pfn;
868 jfunc->value.member_cst.delta = delta;
871 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
872 return the rhs of its defining statement. */
874 static inline tree
875 get_ssa_def_if_simple_copy (tree rhs)
877 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
879 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
881 if (gimple_assign_single_p (def_stmt))
882 rhs = gimple_assign_rhs1 (def_stmt);
883 else
884 break;
886 return rhs;
889 /* Traverse statements from CALL backwards, scanning whether the argument ARG
890 which is a member pointer is filled in with constant values. If it is, fill
891 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
892 fields of the record type of the member pointer. To give an example, we
893 look for a pattern looking like the following:
895 D.2515.__pfn ={v} printStuff;
896 D.2515.__delta ={v} 0;
897 i_1 = doprinting (D.2515); */
899 static void
900 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
901 tree delta_field, struct ipa_jump_func *jfunc)
903 gimple_stmt_iterator gsi;
904 tree method = NULL_TREE;
905 tree delta = NULL_TREE;
907 gsi = gsi_for_stmt (call);
909 gsi_prev (&gsi);
910 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
912 gimple stmt = gsi_stmt (gsi);
913 tree lhs, rhs, fld;
915 if (!stmt_may_clobber_ref_p (stmt, arg))
916 continue;
917 if (!gimple_assign_single_p (stmt))
918 return;
920 lhs = gimple_assign_lhs (stmt);
921 rhs = gimple_assign_rhs1 (stmt);
923 if (TREE_CODE (lhs) != COMPONENT_REF
924 || TREE_OPERAND (lhs, 0) != arg)
925 return;
927 fld = TREE_OPERAND (lhs, 1);
928 if (!method && fld == method_field)
930 rhs = get_ssa_def_if_simple_copy (rhs);
931 if (TREE_CODE (rhs) == ADDR_EXPR
932 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
933 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
935 method = TREE_OPERAND (rhs, 0);
936 if (delta)
938 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
939 return;
942 else
943 return;
946 if (!delta && fld == delta_field)
948 rhs = get_ssa_def_if_simple_copy (rhs);
949 if (TREE_CODE (rhs) == INTEGER_CST)
951 delta = rhs;
952 if (method)
954 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
955 return;
958 else
959 return;
963 return;
966 /* Go through the arguments of the CALL and for every member pointer within
967 tries determine whether it is a constant. If it is, create a corresponding
968 constant jump function in FUNCTIONS which is an array of jump functions
969 associated with the call. */
971 static void
972 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
973 gimple call)
975 unsigned num;
976 tree arg, method_field, delta_field;
978 for (num = 0; num < gimple_call_num_args (call); num++)
980 arg = gimple_call_arg (call, num);
982 if (functions[num].type == IPA_JF_UNKNOWN
983 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
984 &delta_field))
985 determine_cst_member_ptr (call, arg, method_field, delta_field,
986 &functions[num]);
990 /* Compute jump function for all arguments of callsite CS and insert the
991 information in the jump_functions array in the ipa_edge_args corresponding
992 to this callsite. */
994 static void
995 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
996 struct cgraph_edge *cs)
998 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
999 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
1000 gimple call;
1002 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
1003 return;
1004 arguments->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
1005 (ipa_get_cs_argument_count (arguments));
1007 call = cs->call_stmt;
1008 gcc_assert (is_gimple_call (call));
1010 /* We will deal with constants and SSA scalars first: */
1011 compute_scalar_jump_functions (info, arguments->jump_functions, call);
1013 /* Let's check whether there are any potential member pointers and if so,
1014 whether we can determine their functions as pass_through. */
1015 if (!compute_pass_through_member_ptrs (info, parms_info,
1016 arguments->jump_functions, call))
1017 return;
1019 /* Finally, let's check whether we actually pass a new constant member
1020 pointer here... */
1021 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
1024 /* Compute jump functions for all edges - both direct and indirect - outgoing
1025 from NODE. Also count the actual arguments in the process. */
1027 static void
1028 ipa_compute_jump_functions (struct cgraph_node *node,
1029 struct param_analysis_info *parms_info)
1031 struct cgraph_edge *cs;
1033 for (cs = node->callees; cs; cs = cs->next_callee)
1035 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee, NULL);
1036 /* We do not need to bother analyzing calls to unknown
1037 functions unless they may become known during lto/whopr. */
1038 if (!cs->callee->analyzed && !flag_lto)
1039 continue;
1040 ipa_count_arguments (cs);
1041 /* If the descriptor of the callee is not initialized yet, we have to do
1042 it now. */
1043 if (callee->analyzed)
1044 ipa_initialize_node_params (callee);
1045 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
1046 != ipa_get_param_count (IPA_NODE_REF (callee)))
1047 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1048 ipa_compute_jump_functions_for_edge (parms_info, cs);
1051 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1053 ipa_count_arguments (cs);
1054 ipa_compute_jump_functions_for_edge (parms_info, cs);
1058 /* If RHS looks like a rhs of a statement loading pfn from a member
1059 pointer formal parameter, return the parameter, otherwise return
1060 NULL. If USE_DELTA, then we look for a use of the delta field
1061 rather than the pfn. */
1063 static tree
1064 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1066 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1068 if (TREE_CODE (rhs) == COMPONENT_REF)
1070 ref_field = TREE_OPERAND (rhs, 1);
1071 rhs = TREE_OPERAND (rhs, 0);
1073 else
1074 ref_field = NULL_TREE;
1075 if (TREE_CODE (rhs) != MEM_REF)
1076 return NULL_TREE;
1077 rec = TREE_OPERAND (rhs, 0);
1078 if (TREE_CODE (rec) != ADDR_EXPR)
1079 return NULL_TREE;
1080 rec = TREE_OPERAND (rec, 0);
1081 if (TREE_CODE (rec) != PARM_DECL
1082 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1083 return NULL_TREE;
1085 ref_offset = TREE_OPERAND (rhs, 1);
1087 if (ref_field)
1089 if (integer_nonzerop (ref_offset))
1090 return NULL_TREE;
1092 if (use_delta)
1093 fld = delta_field;
1094 else
1095 fld = ptr_field;
1097 return ref_field == fld ? rec : NULL_TREE;
1100 if (use_delta)
1101 fld_offset = byte_position (delta_field);
1102 else
1103 fld_offset = byte_position (ptr_field);
1105 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1108 /* If STMT looks like a statement loading a value from a member pointer formal
1109 parameter, this function returns that parameter. */
1111 static tree
1112 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1114 tree rhs;
1116 if (!gimple_assign_single_p (stmt))
1117 return NULL_TREE;
1119 rhs = gimple_assign_rhs1 (stmt);
1120 return ipa_get_member_ptr_load_param (rhs, use_delta);
1123 /* Returns true iff T is an SSA_NAME defined by a statement. */
1125 static bool
1126 ipa_is_ssa_with_stmt_def (tree t)
1128 if (TREE_CODE (t) == SSA_NAME
1129 && !SSA_NAME_IS_DEFAULT_DEF (t))
1130 return true;
1131 else
1132 return false;
1135 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1136 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1137 indirect call graph edge. */
1139 static struct cgraph_edge *
1140 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1142 struct cgraph_edge *cs;
1144 cs = cgraph_edge (node, stmt);
1145 cs->indirect_info->param_index = param_index;
1146 cs->indirect_info->anc_offset = 0;
1147 cs->indirect_info->polymorphic = 0;
1148 return cs;
1151 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1152 (described by INFO). PARMS_INFO is a pointer to a vector containing
1153 intermediate information about each formal parameter. Currently it checks
1154 whether the call calls a pointer that is a formal parameter and if so, the
1155 parameter is marked with the called flag and an indirect call graph edge
1156 describing the call is created. This is very simple for ordinary pointers
1157 represented in SSA but not-so-nice when it comes to member pointers. The
1158 ugly part of this function does nothing more than trying to match the
1159 pattern of such a call. An example of such a pattern is the gimple dump
1160 below, the call is on the last line:
1162 <bb 2>:
1163 f$__delta_5 = f.__delta;
1164 f$__pfn_24 = f.__pfn;
1167 <bb 2>:
1168 f$__delta_5 = MEM[(struct *)&f];
1169 f$__pfn_24 = MEM[(struct *)&f + 4B];
1171 and a few lines below:
1173 <bb 5>
1174 D.2496_3 = (int) f$__pfn_24;
1175 D.2497_4 = D.2496_3 & 1;
1176 if (D.2497_4 != 0)
1177 goto <bb 3>;
1178 else
1179 goto <bb 4>;
1181 <bb 6>:
1182 D.2500_7 = (unsigned int) f$__delta_5;
1183 D.2501_8 = &S + D.2500_7;
1184 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1185 D.2503_10 = *D.2502_9;
1186 D.2504_12 = f$__pfn_24 + -1;
1187 D.2505_13 = (unsigned int) D.2504_12;
1188 D.2506_14 = D.2503_10 + D.2505_13;
1189 D.2507_15 = *D.2506_14;
1190 iftmp.11_16 = (String:: *) D.2507_15;
1192 <bb 7>:
1193 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1194 D.2500_19 = (unsigned int) f$__delta_5;
1195 D.2508_20 = &S + D.2500_19;
1196 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1198 Such patterns are results of simple calls to a member pointer:
1200 int doprinting (int (MyString::* f)(int) const)
1202 MyString S ("somestring");
1204 return (S.*f)(4);
1208 static void
1209 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1210 struct ipa_node_params *info,
1211 struct param_analysis_info *parms_info,
1212 gimple call, tree target)
1214 gimple def;
1215 tree n1, n2;
1216 gimple d1, d2;
1217 tree rec, rec2, cond;
1218 gimple branch;
1219 int index;
1220 basic_block bb, virt_bb, join;
1222 if (SSA_NAME_IS_DEFAULT_DEF (target))
1224 tree var = SSA_NAME_VAR (target);
1225 index = ipa_get_param_decl_index (info, var);
1226 if (index >= 0)
1227 ipa_note_param_call (node, index, call);
1228 return;
1231 /* Now we need to try to match the complex pattern of calling a member
1232 pointer. */
1234 if (!POINTER_TYPE_P (TREE_TYPE (target))
1235 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1236 return;
1238 def = SSA_NAME_DEF_STMT (target);
1239 if (gimple_code (def) != GIMPLE_PHI)
1240 return;
1242 if (gimple_phi_num_args (def) != 2)
1243 return;
1245 /* First, we need to check whether one of these is a load from a member
1246 pointer that is a parameter to this function. */
1247 n1 = PHI_ARG_DEF (def, 0);
1248 n2 = PHI_ARG_DEF (def, 1);
1249 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1250 return;
1251 d1 = SSA_NAME_DEF_STMT (n1);
1252 d2 = SSA_NAME_DEF_STMT (n2);
1254 join = gimple_bb (def);
1255 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1257 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1258 return;
1260 bb = EDGE_PRED (join, 0)->src;
1261 virt_bb = gimple_bb (d2);
1263 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1265 bb = EDGE_PRED (join, 1)->src;
1266 virt_bb = gimple_bb (d1);
1268 else
1269 return;
1271 /* Second, we need to check that the basic blocks are laid out in the way
1272 corresponding to the pattern. */
1274 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1275 || single_pred (virt_bb) != bb
1276 || single_succ (virt_bb) != join)
1277 return;
1279 /* Third, let's see that the branching is done depending on the least
1280 significant bit of the pfn. */
1282 branch = last_stmt (bb);
1283 if (!branch || gimple_code (branch) != GIMPLE_COND)
1284 return;
1286 if ((gimple_cond_code (branch) != NE_EXPR
1287 && gimple_cond_code (branch) != EQ_EXPR)
1288 || !integer_zerop (gimple_cond_rhs (branch)))
1289 return;
1291 cond = gimple_cond_lhs (branch);
1292 if (!ipa_is_ssa_with_stmt_def (cond))
1293 return;
1295 def = SSA_NAME_DEF_STMT (cond);
1296 if (!is_gimple_assign (def)
1297 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1298 || !integer_onep (gimple_assign_rhs2 (def)))
1299 return;
1301 cond = gimple_assign_rhs1 (def);
1302 if (!ipa_is_ssa_with_stmt_def (cond))
1303 return;
1305 def = SSA_NAME_DEF_STMT (cond);
1307 if (is_gimple_assign (def)
1308 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1310 cond = gimple_assign_rhs1 (def);
1311 if (!ipa_is_ssa_with_stmt_def (cond))
1312 return;
1313 def = SSA_NAME_DEF_STMT (cond);
1316 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1317 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1318 == ptrmemfunc_vbit_in_delta));
1320 if (rec != rec2)
1321 return;
1323 index = ipa_get_param_decl_index (info, rec);
1324 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1325 call, rec))
1326 ipa_note_param_call (node, index, call);
1328 return;
1331 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1332 object referenced in the expression is a formal parameter of the caller
1333 (described by INFO), create a call note for the statement. */
1335 static void
1336 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1337 struct ipa_node_params *info, gimple call,
1338 tree target)
1340 struct cgraph_edge *cs;
1341 struct cgraph_indirect_call_info *ii;
1342 struct ipa_jump_func jfunc;
1343 tree obj = OBJ_TYPE_REF_OBJECT (target);
1344 int index;
1345 HOST_WIDE_INT anc_offset;
1347 if (!flag_devirtualize)
1348 return;
1350 if (TREE_CODE (obj) != SSA_NAME)
1351 return;
1353 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1355 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1356 return;
1358 anc_offset = 0;
1359 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1360 gcc_assert (index >= 0);
1361 if (detect_type_change_ssa (obj, call, &jfunc))
1362 return;
1364 else
1366 gimple stmt = SSA_NAME_DEF_STMT (obj);
1367 tree expr;
1369 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1370 if (!expr)
1371 return;
1372 index = ipa_get_param_decl_index (info,
1373 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1374 gcc_assert (index >= 0);
1375 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1376 return;
1379 cs = ipa_note_param_call (node, index, call);
1380 ii = cs->indirect_info;
1381 ii->anc_offset = anc_offset;
1382 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1383 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1384 ii->polymorphic = 1;
1387 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1388 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1389 containing intermediate information about each formal parameter. */
1391 static void
1392 ipa_analyze_call_uses (struct cgraph_node *node,
1393 struct ipa_node_params *info,
1394 struct param_analysis_info *parms_info, gimple call)
1396 tree target = gimple_call_fn (call);
1398 if (!target)
1399 return;
1400 if (TREE_CODE (target) == SSA_NAME)
1401 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1402 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1403 ipa_analyze_virtual_call_uses (node, info, call, target);
1407 /* Analyze the call statement STMT with respect to formal parameters (described
1408 in INFO) of caller given by NODE. Currently it only checks whether formal
1409 parameters are called. PARMS_INFO is a pointer to a vector containing
1410 intermediate information about each formal parameter. */
1412 static void
1413 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1414 struct param_analysis_info *parms_info, gimple stmt)
1416 if (is_gimple_call (stmt))
1417 ipa_analyze_call_uses (node, info, parms_info, stmt);
1420 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1421 If OP is a parameter declaration, mark it as used in the info structure
1422 passed in DATA. */
1424 static bool
1425 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1426 tree op, void *data)
1428 struct ipa_node_params *info = (struct ipa_node_params *) data;
1430 op = get_base_address (op);
1431 if (op
1432 && TREE_CODE (op) == PARM_DECL)
1434 int index = ipa_get_param_decl_index (info, op);
1435 gcc_assert (index >= 0);
1436 ipa_set_param_used (info, index, true);
1439 return false;
1442 /* Scan the function body of NODE and inspect the uses of formal parameters.
1443 Store the findings in various structures of the associated ipa_node_params
1444 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1445 vector containing intermediate information about each formal parameter. */
1447 static void
1448 ipa_analyze_params_uses (struct cgraph_node *node,
1449 struct param_analysis_info *parms_info)
1451 tree decl = node->decl;
1452 basic_block bb;
1453 struct function *func;
1454 gimple_stmt_iterator gsi;
1455 struct ipa_node_params *info = IPA_NODE_REF (node);
1456 int i;
1458 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1459 return;
1461 for (i = 0; i < ipa_get_param_count (info); i++)
1463 tree parm = ipa_get_param (info, i);
1464 /* For SSA regs see if parameter is used. For non-SSA we compute
1465 the flag during modification analysis. */
1466 if (is_gimple_reg (parm)
1467 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1468 ipa_set_param_used (info, i, true);
1471 func = DECL_STRUCT_FUNCTION (decl);
1472 FOR_EACH_BB_FN (bb, func)
1474 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1476 gimple stmt = gsi_stmt (gsi);
1478 if (is_gimple_debug (stmt))
1479 continue;
1481 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1482 walk_stmt_load_store_addr_ops (stmt, info,
1483 visit_ref_for_mod_analysis,
1484 visit_ref_for_mod_analysis,
1485 visit_ref_for_mod_analysis);
1487 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1488 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1489 visit_ref_for_mod_analysis,
1490 visit_ref_for_mod_analysis,
1491 visit_ref_for_mod_analysis);
1494 info->uses_analysis_done = 1;
1497 /* Initialize the array describing properties of of formal parameters
1498 of NODE, analyze their uses and compute jump functions associated
1499 with actual arguments of calls from within NODE. */
1501 void
1502 ipa_analyze_node (struct cgraph_node *node)
1504 struct ipa_node_params *info;
1505 struct param_analysis_info *parms_info;
1506 int i, param_count;
1508 ipa_check_create_node_params ();
1509 ipa_check_create_edge_args ();
1510 info = IPA_NODE_REF (node);
1511 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1512 current_function_decl = node->decl;
1513 ipa_initialize_node_params (node);
1515 param_count = ipa_get_param_count (info);
1516 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1517 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1519 ipa_analyze_params_uses (node, parms_info);
1520 ipa_compute_jump_functions (node, parms_info);
1522 for (i = 0; i < param_count; i++)
1523 if (parms_info[i].visited_statements)
1524 BITMAP_FREE (parms_info[i].visited_statements);
1526 current_function_decl = NULL;
1527 pop_cfun ();
1531 /* Update the jump function DST when the call graph edge corresponding to SRC is
1532 is being inlined, knowing that DST is of type ancestor and src of known
1533 type. */
1535 static void
1536 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1537 struct ipa_jump_func *dst)
1539 tree new_binfo;
1541 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1542 dst->value.ancestor.offset,
1543 dst->value.ancestor.type);
1544 if (new_binfo)
1546 dst->type = IPA_JF_KNOWN_TYPE;
1547 dst->value.base_binfo = new_binfo;
1549 else
1550 dst->type = IPA_JF_UNKNOWN;
1553 /* Update the jump functions associated with call graph edge E when the call
1554 graph edge CS is being inlined, assuming that E->caller is already (possibly
1555 indirectly) inlined into CS->callee and that E has not been inlined. */
1557 static void
1558 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1559 struct cgraph_edge *e)
1561 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1562 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1563 int count = ipa_get_cs_argument_count (args);
1564 int i;
1566 for (i = 0; i < count; i++)
1568 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1570 if (dst->type == IPA_JF_ANCESTOR)
1572 struct ipa_jump_func *src;
1574 /* Variable number of arguments can cause havoc if we try to access
1575 one that does not exist in the inlined edge. So make sure we
1576 don't. */
1577 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1579 dst->type = IPA_JF_UNKNOWN;
1580 continue;
1583 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1584 if (src->type == IPA_JF_KNOWN_TYPE)
1585 combine_known_type_and_ancestor_jfs (src, dst);
1586 else if (src->type == IPA_JF_PASS_THROUGH
1587 && src->value.pass_through.operation == NOP_EXPR)
1588 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1589 else if (src->type == IPA_JF_ANCESTOR)
1591 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1592 dst->value.ancestor.offset += src->value.ancestor.offset;
1594 else
1595 dst->type = IPA_JF_UNKNOWN;
1597 else if (dst->type == IPA_JF_PASS_THROUGH)
1599 struct ipa_jump_func *src;
1600 /* We must check range due to calls with variable number of arguments
1601 and we cannot combine jump functions with operations. */
1602 if (dst->value.pass_through.operation == NOP_EXPR
1603 && (dst->value.pass_through.formal_id
1604 < ipa_get_cs_argument_count (top)))
1606 src = ipa_get_ith_jump_func (top,
1607 dst->value.pass_through.formal_id);
1608 *dst = *src;
1610 else
1611 dst->type = IPA_JF_UNKNOWN;
1616 /* If TARGET is an addr_expr of a function declaration, make it the destination
1617 of an indirect edge IE and return the edge. Otherwise, return NULL. Delta,
1618 if non-NULL, is an integer constant that must be added to this pointer
1619 (first parameter). */
1621 struct cgraph_edge *
1622 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, tree delta)
1624 struct cgraph_node *callee;
1626 if (TREE_CODE (target) == ADDR_EXPR)
1627 target = TREE_OPERAND (target, 0);
1628 if (TREE_CODE (target) != FUNCTION_DECL)
1629 return NULL;
1630 callee = cgraph_get_node (target);
1631 if (!callee)
1632 return NULL;
1633 ipa_check_create_node_params ();
1635 /* We can not make edges to inline clones. It is bug that someone removed the cgraph
1636 node too early. */
1637 gcc_assert (!callee->global.inlined_to);
1639 cgraph_make_edge_direct (ie, callee, delta ? tree_low_cst (delta, 0) : 0);
1640 if (dump_file)
1642 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1643 "(%s/%i -> %s/%i), for stmt ",
1644 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1645 cgraph_node_name (ie->caller), ie->caller->uid,
1646 cgraph_node_name (ie->callee), ie->callee->uid);
1647 if (ie->call_stmt)
1648 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1649 else
1650 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1652 if (delta)
1654 fprintf (dump_file, " Thunk delta is ");
1655 print_generic_expr (dump_file, delta, 0);
1656 fprintf (dump_file, "\n");
1659 callee = cgraph_function_or_thunk_node (callee, NULL);
1661 if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
1662 != ipa_get_param_count (IPA_NODE_REF (callee)))
1663 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1665 return ie;
1668 /* Try to find a destination for indirect edge IE that corresponds to a simple
1669 call or a call of a member function pointer and where the destination is a
1670 pointer formal parameter described by jump function JFUNC. If it can be
1671 determined, return the newly direct edge, otherwise return NULL. */
1673 static struct cgraph_edge *
1674 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1675 struct ipa_jump_func *jfunc)
1677 tree target;
1679 if (jfunc->type == IPA_JF_CONST)
1680 target = jfunc->value.constant;
1681 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1682 target = jfunc->value.member_cst.pfn;
1683 else
1684 return NULL;
1686 return ipa_make_edge_direct_to_target (ie, target, NULL_TREE);
1689 /* Try to find a destination for indirect edge IE that corresponds to a
1690 virtual call based on a formal parameter which is described by jump
1691 function JFUNC and if it can be determined, make it direct and return the
1692 direct edge. Otherwise, return NULL. */
1694 static struct cgraph_edge *
1695 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1696 struct ipa_jump_func *jfunc)
1698 tree binfo, type, target, delta;
1699 HOST_WIDE_INT token;
1701 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1702 binfo = jfunc->value.base_binfo;
1703 else
1704 return NULL;
1706 if (!binfo)
1707 return NULL;
1709 token = ie->indirect_info->otr_token;
1710 type = ie->indirect_info->otr_type;
1711 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1712 if (binfo)
1713 target = gimple_get_virt_method_for_binfo (token, binfo, &delta);
1714 else
1715 return NULL;
1717 if (target)
1718 return ipa_make_edge_direct_to_target (ie, target, delta);
1719 else
1720 return NULL;
1723 /* Update the param called notes associated with NODE when CS is being inlined,
1724 assuming NODE is (potentially indirectly) inlined into CS->callee.
1725 Moreover, if the callee is discovered to be constant, create a new cgraph
1726 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1727 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1729 static bool
1730 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1731 struct cgraph_node *node,
1732 VEC (cgraph_edge_p, heap) **new_edges)
1734 struct ipa_edge_args *top;
1735 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1736 bool res = false;
1738 ipa_check_create_edge_args ();
1739 top = IPA_EDGE_REF (cs);
1741 for (ie = node->indirect_calls; ie; ie = next_ie)
1743 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1744 struct ipa_jump_func *jfunc;
1746 next_ie = ie->next_callee;
1747 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1748 continue;
1750 /* If we ever use indirect edges for anything other than indirect
1751 inlining, we will need to skip those with negative param_indices. */
1752 if (ici->param_index == -1)
1753 continue;
1755 /* We must check range due to calls with variable number of arguments: */
1756 if (ici->param_index >= ipa_get_cs_argument_count (top))
1758 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1759 continue;
1762 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1763 if (jfunc->type == IPA_JF_PASS_THROUGH
1764 && jfunc->value.pass_through.operation == NOP_EXPR)
1765 ici->param_index = jfunc->value.pass_through.formal_id;
1766 else if (jfunc->type == IPA_JF_ANCESTOR)
1768 ici->param_index = jfunc->value.ancestor.formal_id;
1769 ici->anc_offset += jfunc->value.ancestor.offset;
1771 else
1772 /* Either we can find a destination for this edge now or never. */
1773 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1775 if (ici->polymorphic)
1776 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1777 else
1778 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1780 if (new_direct_edge)
1782 new_direct_edge->indirect_inlining_edge = 1;
1783 if (new_edges)
1785 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1786 new_direct_edge);
1787 top = IPA_EDGE_REF (cs);
1788 res = true;
1793 return res;
1796 /* Recursively traverse subtree of NODE (including node) made of inlined
1797 cgraph_edges when CS has been inlined and invoke
1798 update_indirect_edges_after_inlining on all nodes and
1799 update_jump_functions_after_inlining on all non-inlined edges that lead out
1800 of this subtree. Newly discovered indirect edges will be added to
1801 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1802 created. */
1804 static bool
1805 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1806 struct cgraph_node *node,
1807 VEC (cgraph_edge_p, heap) **new_edges)
1809 struct cgraph_edge *e;
1810 bool res;
1812 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1814 for (e = node->callees; e; e = e->next_callee)
1815 if (!e->inline_failed)
1816 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1817 else
1818 update_jump_functions_after_inlining (cs, e);
1820 return res;
1823 /* Update jump functions and call note functions on inlining the call site CS.
1824 CS is expected to lead to a node already cloned by
1825 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1826 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1827 created. */
1829 bool
1830 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1831 VEC (cgraph_edge_p, heap) **new_edges)
1833 /* Do nothing if the preparation phase has not been carried out yet
1834 (i.e. during early inlining). */
1835 if (!ipa_node_params_vector)
1836 return false;
1837 gcc_assert (ipa_edge_args_vector);
1839 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1842 /* Frees all dynamically allocated structures that the argument info points
1843 to. */
1845 void
1846 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1848 if (args->jump_functions)
1849 ggc_free (args->jump_functions);
1851 memset (args, 0, sizeof (*args));
1854 /* Free all ipa_edge structures. */
1856 void
1857 ipa_free_all_edge_args (void)
1859 int i;
1860 struct ipa_edge_args *args;
1862 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1863 ipa_free_edge_args_substructures (args);
1865 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1866 ipa_edge_args_vector = NULL;
1869 /* Frees all dynamically allocated structures that the param info points
1870 to. */
1872 void
1873 ipa_free_node_params_substructures (struct ipa_node_params *info)
1875 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
1876 free (info->lattices);
1877 /* Lattice values and their sources are deallocated with their alocation
1878 pool. */
1879 VEC_free (tree, heap, info->known_vals);
1880 memset (info, 0, sizeof (*info));
1883 /* Free all ipa_node_params structures. */
1885 void
1886 ipa_free_all_node_params (void)
1888 int i;
1889 struct ipa_node_params *info;
1891 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
1892 ipa_free_node_params_substructures (info);
1894 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1895 ipa_node_params_vector = NULL;
1898 /* Hook that is called by cgraph.c when an edge is removed. */
1900 static void
1901 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1903 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1904 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1905 <= (unsigned)cs->uid)
1906 return;
1907 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1910 /* Hook that is called by cgraph.c when a node is removed. */
1912 static void
1913 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1915 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1916 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1917 <= (unsigned)node->uid)
1918 return;
1919 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1922 static struct ipa_jump_func *
1923 duplicate_ipa_jump_func_array (const struct ipa_jump_func * src, size_t n)
1925 struct ipa_jump_func *p;
1927 if (!src)
1928 return NULL;
1930 p = ggc_alloc_vec_ipa_jump_func (n);
1931 memcpy (p, src, n * sizeof (struct ipa_jump_func));
1932 return p;
1935 /* Hook that is called by cgraph.c when a node is duplicated. */
1937 static void
1938 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1939 __attribute__((unused)) void *data)
1941 struct ipa_edge_args *old_args, *new_args;
1942 int arg_count;
1944 ipa_check_create_edge_args ();
1946 old_args = IPA_EDGE_REF (src);
1947 new_args = IPA_EDGE_REF (dst);
1949 arg_count = ipa_get_cs_argument_count (old_args);
1950 ipa_set_cs_argument_count (new_args, arg_count);
1951 new_args->jump_functions =
1952 duplicate_ipa_jump_func_array (old_args->jump_functions, arg_count);
1954 if (iinlining_processed_edges
1955 && bitmap_bit_p (iinlining_processed_edges, src->uid))
1956 bitmap_set_bit (iinlining_processed_edges, dst->uid);
1959 /* Hook that is called by cgraph.c when a node is duplicated. */
1961 static void
1962 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
1963 ATTRIBUTE_UNUSED void *data)
1965 struct ipa_node_params *old_info, *new_info;
1967 ipa_check_create_node_params ();
1968 old_info = IPA_NODE_REF (src);
1969 new_info = IPA_NODE_REF (dst);
1971 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
1972 old_info->descriptors);
1973 new_info->lattices = NULL;
1974 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
1976 new_info->called_with_var_arguments = old_info->called_with_var_arguments;
1977 new_info->uses_analysis_done = old_info->uses_analysis_done;
1978 new_info->node_enqueued = old_info->node_enqueued;
1982 /* Analyze newly added function into callgraph. */
1984 static void
1985 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1987 ipa_analyze_node (node);
1990 /* Register our cgraph hooks if they are not already there. */
1992 void
1993 ipa_register_cgraph_hooks (void)
1995 if (!edge_removal_hook_holder)
1996 edge_removal_hook_holder =
1997 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
1998 if (!node_removal_hook_holder)
1999 node_removal_hook_holder =
2000 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2001 if (!edge_duplication_hook_holder)
2002 edge_duplication_hook_holder =
2003 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2004 if (!node_duplication_hook_holder)
2005 node_duplication_hook_holder =
2006 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2007 function_insertion_hook_holder =
2008 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2011 /* Unregister our cgraph hooks if they are not already there. */
2013 static void
2014 ipa_unregister_cgraph_hooks (void)
2016 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2017 edge_removal_hook_holder = NULL;
2018 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2019 node_removal_hook_holder = NULL;
2020 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2021 edge_duplication_hook_holder = NULL;
2022 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2023 node_duplication_hook_holder = NULL;
2024 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2025 function_insertion_hook_holder = NULL;
2028 /* Allocate all necessary data structures necessary for indirect inlining. */
2030 void
2031 ipa_create_all_structures_for_iinln (void)
2033 iinlining_processed_edges = BITMAP_ALLOC (NULL);
2036 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2037 longer needed after ipa-cp. */
2039 void
2040 ipa_free_all_structures_after_ipa_cp (void)
2042 if (!flag_indirect_inlining)
2044 ipa_free_all_edge_args ();
2045 ipa_free_all_node_params ();
2046 free_alloc_pool (ipcp_sources_pool);
2047 free_alloc_pool (ipcp_values_pool);
2048 ipa_unregister_cgraph_hooks ();
2052 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2053 longer needed after indirect inlining. */
2055 void
2056 ipa_free_all_structures_after_iinln (void)
2058 BITMAP_FREE (iinlining_processed_edges);
2060 ipa_free_all_edge_args ();
2061 ipa_free_all_node_params ();
2062 ipa_unregister_cgraph_hooks ();
2063 if (ipcp_sources_pool)
2064 free_alloc_pool (ipcp_sources_pool);
2065 if (ipcp_values_pool)
2066 free_alloc_pool (ipcp_values_pool);
2069 /* Print ipa_tree_map data structures of all functions in the
2070 callgraph to F. */
2072 void
2073 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2075 int i, count;
2076 tree temp;
2077 struct ipa_node_params *info;
2079 if (!node->analyzed)
2080 return;
2081 info = IPA_NODE_REF (node);
2082 fprintf (f, " function %s parameter descriptors:\n",
2083 cgraph_node_name (node));
2084 count = ipa_get_param_count (info);
2085 for (i = 0; i < count; i++)
2087 temp = ipa_get_param (info, i);
2088 if (TREE_CODE (temp) == PARM_DECL)
2089 fprintf (f, " param %d : %s", i,
2090 (DECL_NAME (temp)
2091 ? (*lang_hooks.decl_printable_name) (temp, 2)
2092 : "(unnamed)"));
2093 if (ipa_is_param_used (info, i))
2094 fprintf (f, " used");
2095 fprintf (f, "\n");
2099 /* Print ipa_tree_map data structures of all functions in the
2100 callgraph to F. */
2102 void
2103 ipa_print_all_params (FILE * f)
2105 struct cgraph_node *node;
2107 fprintf (f, "\nFunction parameters:\n");
2108 for (node = cgraph_nodes; node; node = node->next)
2109 ipa_print_node_params (f, node);
2112 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2114 VEC(tree, heap) *
2115 ipa_get_vector_of_formal_parms (tree fndecl)
2117 VEC(tree, heap) *args;
2118 int count;
2119 tree parm;
2121 count = count_formal_params (fndecl);
2122 args = VEC_alloc (tree, heap, count);
2123 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2124 VEC_quick_push (tree, args, parm);
2126 return args;
2129 /* Return a heap allocated vector containing types of formal parameters of
2130 function type FNTYPE. */
2132 static inline VEC(tree, heap) *
2133 get_vector_of_formal_parm_types (tree fntype)
2135 VEC(tree, heap) *types;
2136 int count = 0;
2137 tree t;
2139 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2140 count++;
2142 types = VEC_alloc (tree, heap, count);
2143 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2144 VEC_quick_push (tree, types, TREE_VALUE (t));
2146 return types;
2149 /* Modify the function declaration FNDECL and its type according to the plan in
2150 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2151 to reflect the actual parameters being modified which are determined by the
2152 base_index field. */
2154 void
2155 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2156 const char *synth_parm_prefix)
2158 VEC(tree, heap) *oparms, *otypes;
2159 tree orig_type, new_type = NULL;
2160 tree old_arg_types, t, new_arg_types = NULL;
2161 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2162 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2163 tree new_reversed = NULL;
2164 bool care_for_types, last_parm_void;
2166 if (!synth_parm_prefix)
2167 synth_parm_prefix = "SYNTH";
2169 oparms = ipa_get_vector_of_formal_parms (fndecl);
2170 orig_type = TREE_TYPE (fndecl);
2171 old_arg_types = TYPE_ARG_TYPES (orig_type);
2173 /* The following test is an ugly hack, some functions simply don't have any
2174 arguments in their type. This is probably a bug but well... */
2175 care_for_types = (old_arg_types != NULL_TREE);
2176 if (care_for_types)
2178 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2179 == void_type_node);
2180 otypes = get_vector_of_formal_parm_types (orig_type);
2181 if (last_parm_void)
2182 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2183 else
2184 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2186 else
2188 last_parm_void = false;
2189 otypes = NULL;
2192 for (i = 0; i < len; i++)
2194 struct ipa_parm_adjustment *adj;
2195 gcc_assert (link);
2197 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2198 parm = VEC_index (tree, oparms, adj->base_index);
2199 adj->base = parm;
2201 if (adj->copy_param)
2203 if (care_for_types)
2204 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2205 adj->base_index),
2206 new_arg_types);
2207 *link = parm;
2208 link = &DECL_CHAIN (parm);
2210 else if (!adj->remove_param)
2212 tree new_parm;
2213 tree ptype;
2215 if (adj->by_ref)
2216 ptype = build_pointer_type (adj->type);
2217 else
2218 ptype = adj->type;
2220 if (care_for_types)
2221 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2223 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2224 ptype);
2225 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2227 DECL_ARTIFICIAL (new_parm) = 1;
2228 DECL_ARG_TYPE (new_parm) = ptype;
2229 DECL_CONTEXT (new_parm) = fndecl;
2230 TREE_USED (new_parm) = 1;
2231 DECL_IGNORED_P (new_parm) = 1;
2232 layout_decl (new_parm, 0);
2234 add_referenced_var (new_parm);
2235 mark_sym_for_renaming (new_parm);
2236 adj->base = parm;
2237 adj->reduction = new_parm;
2239 *link = new_parm;
2241 link = &DECL_CHAIN (new_parm);
2245 *link = NULL_TREE;
2247 if (care_for_types)
2249 new_reversed = nreverse (new_arg_types);
2250 if (last_parm_void)
2252 if (new_reversed)
2253 TREE_CHAIN (new_arg_types) = void_list_node;
2254 else
2255 new_reversed = void_list_node;
2259 /* Use copy_node to preserve as much as possible from original type
2260 (debug info, attribute lists etc.)
2261 Exception is METHOD_TYPEs must have THIS argument.
2262 When we are asked to remove it, we need to build new FUNCTION_TYPE
2263 instead. */
2264 if (TREE_CODE (orig_type) != METHOD_TYPE
2265 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2266 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2268 new_type = build_distinct_type_copy (orig_type);
2269 TYPE_ARG_TYPES (new_type) = new_reversed;
2271 else
2273 new_type
2274 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2275 new_reversed));
2276 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2277 DECL_VINDEX (fndecl) = NULL_TREE;
2280 /* When signature changes, we need to clear builtin info. */
2281 if (DECL_BUILT_IN (fndecl))
2283 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2284 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2287 /* This is a new type, not a copy of an old type. Need to reassociate
2288 variants. We can handle everything except the main variant lazily. */
2289 t = TYPE_MAIN_VARIANT (orig_type);
2290 if (orig_type != t)
2292 TYPE_MAIN_VARIANT (new_type) = t;
2293 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2294 TYPE_NEXT_VARIANT (t) = new_type;
2296 else
2298 TYPE_MAIN_VARIANT (new_type) = new_type;
2299 TYPE_NEXT_VARIANT (new_type) = NULL;
2302 TREE_TYPE (fndecl) = new_type;
2303 DECL_VIRTUAL_P (fndecl) = 0;
2304 if (otypes)
2305 VEC_free (tree, heap, otypes);
2306 VEC_free (tree, heap, oparms);
2309 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2310 If this is a directly recursive call, CS must be NULL. Otherwise it must
2311 contain the corresponding call graph edge. */
2313 void
2314 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2315 ipa_parm_adjustment_vec adjustments)
2317 VEC(tree, heap) *vargs;
2318 VEC(tree, gc) **debug_args = NULL;
2319 gimple new_stmt;
2320 gimple_stmt_iterator gsi;
2321 tree callee_decl;
2322 int i, len;
2324 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2325 vargs = VEC_alloc (tree, heap, len);
2326 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2328 gsi = gsi_for_stmt (stmt);
2329 for (i = 0; i < len; i++)
2331 struct ipa_parm_adjustment *adj;
2333 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2335 if (adj->copy_param)
2337 tree arg = gimple_call_arg (stmt, adj->base_index);
2339 VEC_quick_push (tree, vargs, arg);
2341 else if (!adj->remove_param)
2343 tree expr, base, off;
2344 location_t loc;
2346 /* We create a new parameter out of the value of the old one, we can
2347 do the following kind of transformations:
2349 - A scalar passed by reference is converted to a scalar passed by
2350 value. (adj->by_ref is false and the type of the original
2351 actual argument is a pointer to a scalar).
2353 - A part of an aggregate is passed instead of the whole aggregate.
2354 The part can be passed either by value or by reference, this is
2355 determined by value of adj->by_ref. Moreover, the code below
2356 handles both situations when the original aggregate is passed by
2357 value (its type is not a pointer) and when it is passed by
2358 reference (it is a pointer to an aggregate).
2360 When the new argument is passed by reference (adj->by_ref is true)
2361 it must be a part of an aggregate and therefore we form it by
2362 simply taking the address of a reference inside the original
2363 aggregate. */
2365 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2366 base = gimple_call_arg (stmt, adj->base_index);
2367 loc = EXPR_LOCATION (base);
2369 if (TREE_CODE (base) != ADDR_EXPR
2370 && POINTER_TYPE_P (TREE_TYPE (base)))
2371 off = build_int_cst (adj->alias_ptr_type,
2372 adj->offset / BITS_PER_UNIT);
2373 else
2375 HOST_WIDE_INT base_offset;
2376 tree prev_base;
2378 if (TREE_CODE (base) == ADDR_EXPR)
2379 base = TREE_OPERAND (base, 0);
2380 prev_base = base;
2381 base = get_addr_base_and_unit_offset (base, &base_offset);
2382 /* Aggregate arguments can have non-invariant addresses. */
2383 if (!base)
2385 base = build_fold_addr_expr (prev_base);
2386 off = build_int_cst (adj->alias_ptr_type,
2387 adj->offset / BITS_PER_UNIT);
2389 else if (TREE_CODE (base) == MEM_REF)
2391 off = build_int_cst (adj->alias_ptr_type,
2392 base_offset
2393 + adj->offset / BITS_PER_UNIT);
2394 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2395 off);
2396 base = TREE_OPERAND (base, 0);
2398 else
2400 off = build_int_cst (adj->alias_ptr_type,
2401 base_offset
2402 + adj->offset / BITS_PER_UNIT);
2403 base = build_fold_addr_expr (base);
2407 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2408 if (adj->by_ref)
2409 expr = build_fold_addr_expr (expr);
2411 expr = force_gimple_operand_gsi (&gsi, expr,
2412 adj->by_ref
2413 || is_gimple_reg_type (adj->type),
2414 NULL, true, GSI_SAME_STMT);
2415 VEC_quick_push (tree, vargs, expr);
2417 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2419 unsigned int ix;
2420 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2421 gimple def_temp;
2423 arg = gimple_call_arg (stmt, adj->base_index);
2424 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2426 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2427 continue;
2428 arg = fold_convert_loc (gimple_location (stmt),
2429 TREE_TYPE (origin), arg);
2431 if (debug_args == NULL)
2432 debug_args = decl_debug_args_insert (callee_decl);
2433 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2434 if (ddecl == origin)
2436 ddecl = VEC_index (tree, *debug_args, ix + 1);
2437 break;
2439 if (ddecl == NULL)
2441 ddecl = make_node (DEBUG_EXPR_DECL);
2442 DECL_ARTIFICIAL (ddecl) = 1;
2443 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2444 DECL_MODE (ddecl) = DECL_MODE (origin);
2446 VEC_safe_push (tree, gc, *debug_args, origin);
2447 VEC_safe_push (tree, gc, *debug_args, ddecl);
2449 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2450 stmt);
2451 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2455 if (dump_file && (dump_flags & TDF_DETAILS))
2457 fprintf (dump_file, "replacing stmt:");
2458 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2461 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2462 VEC_free (tree, heap, vargs);
2463 if (gimple_call_lhs (stmt))
2464 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2466 gimple_set_block (new_stmt, gimple_block (stmt));
2467 if (gimple_has_location (stmt))
2468 gimple_set_location (new_stmt, gimple_location (stmt));
2469 gimple_call_copy_flags (new_stmt, stmt);
2470 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2472 if (dump_file && (dump_flags & TDF_DETAILS))
2474 fprintf (dump_file, "with stmt:");
2475 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2476 fprintf (dump_file, "\n");
2478 gsi_replace (&gsi, new_stmt, true);
2479 if (cs)
2480 cgraph_set_call_stmt (cs, new_stmt);
2481 update_ssa (TODO_update_ssa);
2482 free_dominance_info (CDI_DOMINATORS);
2485 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2487 static bool
2488 index_in_adjustments_multiple_times_p (int base_index,
2489 ipa_parm_adjustment_vec adjustments)
2491 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2492 bool one = false;
2494 for (i = 0; i < len; i++)
2496 struct ipa_parm_adjustment *adj;
2497 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2499 if (adj->base_index == base_index)
2501 if (one)
2502 return true;
2503 else
2504 one = true;
2507 return false;
2511 /* Return adjustments that should have the same effect on function parameters
2512 and call arguments as if they were first changed according to adjustments in
2513 INNER and then by adjustments in OUTER. */
2515 ipa_parm_adjustment_vec
2516 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2517 ipa_parm_adjustment_vec outer)
2519 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2520 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2521 int removals = 0;
2522 ipa_parm_adjustment_vec adjustments, tmp;
2524 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2525 for (i = 0; i < inlen; i++)
2527 struct ipa_parm_adjustment *n;
2528 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2530 if (n->remove_param)
2531 removals++;
2532 else
2533 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2536 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2537 for (i = 0; i < outlen; i++)
2539 struct ipa_parm_adjustment *r;
2540 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2541 outer, i);
2542 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2543 out->base_index);
2545 gcc_assert (!in->remove_param);
2546 if (out->remove_param)
2548 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2550 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2551 memset (r, 0, sizeof (*r));
2552 r->remove_param = true;
2554 continue;
2557 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2558 memset (r, 0, sizeof (*r));
2559 r->base_index = in->base_index;
2560 r->type = out->type;
2562 /* FIXME: Create nonlocal value too. */
2564 if (in->copy_param && out->copy_param)
2565 r->copy_param = true;
2566 else if (in->copy_param)
2567 r->offset = out->offset;
2568 else if (out->copy_param)
2569 r->offset = in->offset;
2570 else
2571 r->offset = in->offset + out->offset;
2574 for (i = 0; i < inlen; i++)
2576 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2577 inner, i);
2579 if (n->remove_param)
2580 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2583 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2584 return adjustments;
2587 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2588 friendly way, assuming they are meant to be applied to FNDECL. */
2590 void
2591 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2592 tree fndecl)
2594 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2595 bool first = true;
2596 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2598 fprintf (file, "IPA param adjustments: ");
2599 for (i = 0; i < len; i++)
2601 struct ipa_parm_adjustment *adj;
2602 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2604 if (!first)
2605 fprintf (file, " ");
2606 else
2607 first = false;
2609 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2610 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2611 if (adj->base)
2613 fprintf (file, ", base: ");
2614 print_generic_expr (file, adj->base, 0);
2616 if (adj->reduction)
2618 fprintf (file, ", reduction: ");
2619 print_generic_expr (file, adj->reduction, 0);
2621 if (adj->new_ssa_base)
2623 fprintf (file, ", new_ssa_base: ");
2624 print_generic_expr (file, adj->new_ssa_base, 0);
2627 if (adj->copy_param)
2628 fprintf (file, ", copy_param");
2629 else if (adj->remove_param)
2630 fprintf (file, ", remove_param");
2631 else
2632 fprintf (file, ", offset %li", (long) adj->offset);
2633 if (adj->by_ref)
2634 fprintf (file, ", by_ref");
2635 print_node_brief (file, ", type: ", adj->type, 0);
2636 fprintf (file, "\n");
2638 VEC_free (tree, heap, parms);
2641 /* Stream out jump function JUMP_FUNC to OB. */
2643 static void
2644 ipa_write_jump_function (struct output_block *ob,
2645 struct ipa_jump_func *jump_func)
2647 streamer_write_uhwi (ob, jump_func->type);
2649 switch (jump_func->type)
2651 case IPA_JF_UNKNOWN:
2652 break;
2653 case IPA_JF_KNOWN_TYPE:
2654 stream_write_tree (ob, jump_func->value.base_binfo, true);
2655 break;
2656 case IPA_JF_CONST:
2657 stream_write_tree (ob, jump_func->value.constant, true);
2658 break;
2659 case IPA_JF_PASS_THROUGH:
2660 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
2661 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
2662 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
2663 break;
2664 case IPA_JF_ANCESTOR:
2665 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
2666 stream_write_tree (ob, jump_func->value.ancestor.type, true);
2667 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
2668 break;
2669 case IPA_JF_CONST_MEMBER_PTR:
2670 stream_write_tree (ob, jump_func->value.member_cst.pfn, true);
2671 stream_write_tree (ob, jump_func->value.member_cst.delta, false);
2672 break;
2676 /* Read in jump function JUMP_FUNC from IB. */
2678 static void
2679 ipa_read_jump_function (struct lto_input_block *ib,
2680 struct ipa_jump_func *jump_func,
2681 struct data_in *data_in)
2683 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
2685 switch (jump_func->type)
2687 case IPA_JF_UNKNOWN:
2688 break;
2689 case IPA_JF_KNOWN_TYPE:
2690 jump_func->value.base_binfo = stream_read_tree (ib, data_in);
2691 break;
2692 case IPA_JF_CONST:
2693 jump_func->value.constant = stream_read_tree (ib, data_in);
2694 break;
2695 case IPA_JF_PASS_THROUGH:
2696 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
2697 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
2698 jump_func->value.pass_through.operation
2699 = (enum tree_code) streamer_read_uhwi (ib);
2700 break;
2701 case IPA_JF_ANCESTOR:
2702 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
2703 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
2704 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
2705 break;
2706 case IPA_JF_CONST_MEMBER_PTR:
2707 jump_func->value.member_cst.pfn = stream_read_tree (ib, data_in);
2708 jump_func->value.member_cst.delta = stream_read_tree (ib, data_in);
2709 break;
2713 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2714 relevant to indirect inlining to OB. */
2716 static void
2717 ipa_write_indirect_edge_info (struct output_block *ob,
2718 struct cgraph_edge *cs)
2720 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2721 struct bitpack_d bp;
2723 streamer_write_hwi (ob, ii->param_index);
2724 streamer_write_hwi (ob, ii->anc_offset);
2725 bp = bitpack_create (ob->main_stream);
2726 bp_pack_value (&bp, ii->polymorphic, 1);
2727 streamer_write_bitpack (&bp);
2729 if (ii->polymorphic)
2731 streamer_write_hwi (ob, ii->otr_token);
2732 stream_write_tree (ob, ii->otr_type, true);
2736 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2737 relevant to indirect inlining from IB. */
2739 static void
2740 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2741 struct data_in *data_in ATTRIBUTE_UNUSED,
2742 struct cgraph_edge *cs)
2744 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2745 struct bitpack_d bp;
2747 ii->param_index = (int) streamer_read_hwi (ib);
2748 ii->anc_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
2749 bp = streamer_read_bitpack (ib);
2750 ii->polymorphic = bp_unpack_value (&bp, 1);
2751 if (ii->polymorphic)
2753 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
2754 ii->otr_type = stream_read_tree (ib, data_in);
2758 /* Stream out NODE info to OB. */
2760 static void
2761 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2763 int node_ref;
2764 lto_cgraph_encoder_t encoder;
2765 struct ipa_node_params *info = IPA_NODE_REF (node);
2766 int j;
2767 struct cgraph_edge *e;
2768 struct bitpack_d bp;
2770 encoder = ob->decl_state->cgraph_node_encoder;
2771 node_ref = lto_cgraph_encoder_encode (encoder, node);
2772 streamer_write_uhwi (ob, node_ref);
2774 bp = bitpack_create (ob->main_stream);
2775 gcc_assert (info->uses_analysis_done
2776 || ipa_get_param_count (info) == 0);
2777 gcc_assert (!info->node_enqueued);
2778 gcc_assert (!info->ipcp_orig_node);
2779 for (j = 0; j < ipa_get_param_count (info); j++)
2780 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
2781 streamer_write_bitpack (&bp);
2782 for (e = node->callees; e; e = e->next_callee)
2784 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2786 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2787 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2788 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2790 for (e = node->indirect_calls; e; e = e->next_callee)
2792 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2794 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2795 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2796 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2797 ipa_write_indirect_edge_info (ob, e);
2801 /* Stream in NODE info from IB. */
2803 static void
2804 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2805 struct data_in *data_in)
2807 struct ipa_node_params *info = IPA_NODE_REF (node);
2808 int k;
2809 struct cgraph_edge *e;
2810 struct bitpack_d bp;
2812 ipa_initialize_node_params (node);
2814 bp = streamer_read_bitpack (ib);
2815 if (ipa_get_param_count (info) != 0)
2816 info->uses_analysis_done = true;
2817 info->node_enqueued = false;
2818 for (k = 0; k < ipa_get_param_count (info); k++)
2819 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
2820 for (e = node->callees; e; e = e->next_callee)
2822 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2823 int count = streamer_read_uhwi (ib);
2825 ipa_set_cs_argument_count (args, count);
2826 if (!count)
2827 continue;
2829 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2830 (ipa_get_cs_argument_count (args));
2831 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2832 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2834 for (e = node->indirect_calls; e; e = e->next_callee)
2836 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2837 int count = streamer_read_uhwi (ib);
2839 ipa_set_cs_argument_count (args, count);
2840 if (count)
2842 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2843 (ipa_get_cs_argument_count (args));
2844 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2845 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2847 ipa_read_indirect_edge_info (ib, data_in, e);
2851 /* Write jump functions for nodes in SET. */
2853 void
2854 ipa_prop_write_jump_functions (cgraph_node_set set)
2856 struct cgraph_node *node;
2857 struct output_block *ob;
2858 unsigned int count = 0;
2859 cgraph_node_set_iterator csi;
2861 if (!ipa_node_params_vector)
2862 return;
2864 ob = create_output_block (LTO_section_jump_functions);
2865 ob->cgraph_node = NULL;
2866 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2868 node = csi_node (csi);
2869 if (cgraph_function_with_gimple_body_p (node)
2870 && IPA_NODE_REF (node) != NULL)
2871 count++;
2874 streamer_write_uhwi (ob, count);
2876 /* Process all of the functions. */
2877 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2879 node = csi_node (csi);
2880 if (cgraph_function_with_gimple_body_p (node)
2881 && IPA_NODE_REF (node) != NULL)
2882 ipa_write_node_info (ob, node);
2884 streamer_write_char_stream (ob->main_stream, 0);
2885 produce_asm (ob, NULL);
2886 destroy_output_block (ob);
2889 /* Read section in file FILE_DATA of length LEN with data DATA. */
2891 static void
2892 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2893 size_t len)
2895 const struct lto_function_header *header =
2896 (const struct lto_function_header *) data;
2897 const int32_t cfg_offset = sizeof (struct lto_function_header);
2898 const int32_t main_offset = cfg_offset + header->cfg_size;
2899 const int32_t string_offset = main_offset + header->main_size;
2900 struct data_in *data_in;
2901 struct lto_input_block ib_main;
2902 unsigned int i;
2903 unsigned int count;
2905 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2906 header->main_size);
2908 data_in =
2909 lto_data_in_create (file_data, (const char *) data + string_offset,
2910 header->string_size, NULL);
2911 count = streamer_read_uhwi (&ib_main);
2913 for (i = 0; i < count; i++)
2915 unsigned int index;
2916 struct cgraph_node *node;
2917 lto_cgraph_encoder_t encoder;
2919 index = streamer_read_uhwi (&ib_main);
2920 encoder = file_data->cgraph_node_encoder;
2921 node = lto_cgraph_encoder_deref (encoder, index);
2922 gcc_assert (node->analyzed);
2923 ipa_read_node_info (&ib_main, node, data_in);
2925 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2926 len);
2927 lto_data_in_delete (data_in);
2930 /* Read ipcp jump functions. */
2932 void
2933 ipa_prop_read_jump_functions (void)
2935 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2936 struct lto_file_decl_data *file_data;
2937 unsigned int j = 0;
2939 ipa_check_create_node_params ();
2940 ipa_check_create_edge_args ();
2941 ipa_register_cgraph_hooks ();
2943 while ((file_data = file_data_vec[j++]))
2945 size_t len;
2946 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2948 if (data)
2949 ipa_prop_read_section (file_data, data, len);
2953 /* After merging units, we can get mismatch in argument counts.
2954 Also decl merging might've rendered parameter lists obsolete.
2955 Also compute called_with_variable_arg info. */
2957 void
2958 ipa_update_after_lto_read (void)
2960 struct cgraph_node *node;
2961 struct cgraph_edge *cs;
2963 ipa_check_create_node_params ();
2964 ipa_check_create_edge_args ();
2966 for (node = cgraph_nodes; node; node = node->next)
2967 if (node->analyzed)
2968 ipa_initialize_node_params (node);
2970 for (node = cgraph_nodes; node; node = node->next)
2971 if (node->analyzed)
2972 for (cs = node->callees; cs; cs = cs->next_callee)
2974 struct cgraph_node *callee;
2976 callee = cgraph_function_or_thunk_node (cs->callee, NULL);
2977 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2978 != ipa_get_param_count (IPA_NODE_REF (callee)))
2979 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));