2011-04-29 Tobias Burnus <burnus@net-b.de>
[official-gcc.git] / gcc / ipa-prop.c
blob73203a386b778972e6f296dc623cc5693e4d03d6
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
46 struct param_analysis_info
48 bool modified;
49 bitmap visited_statements;
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
57 /* Bitmap with all UIDs of call graph edges that have been already processed
58 by indirect inlining. */
59 static bitmap iinlining_processed_edges;
61 /* Holders of ipa cgraph hooks: */
62 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
63 static struct cgraph_node_hook_list *node_removal_hook_holder;
64 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
65 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
68 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
69 it is in one or not. It should almost never be used directly, as opposed to
70 ipa_push_func_to_list. */
72 void
73 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
74 struct cgraph_node *node,
75 struct ipa_node_params *info)
77 struct ipa_func_list *temp;
79 info->node_enqueued = 1;
80 temp = XCNEW (struct ipa_func_list);
81 temp->node = node;
82 temp->next = *wl;
83 *wl = temp;
86 /* Initialize worklist to contain all functions. */
88 struct ipa_func_list *
89 ipa_init_func_list (void)
91 struct cgraph_node *node;
92 struct ipa_func_list * wl;
94 wl = NULL;
95 for (node = cgraph_nodes; node; node = node->next)
96 if (node->analyzed)
98 struct ipa_node_params *info = IPA_NODE_REF (node);
99 /* Unreachable nodes should have been eliminated before ipcp and
100 inlining. */
101 gcc_assert (node->needed || node->reachable);
102 ipa_push_func_to_list_1 (&wl, node, info);
105 return wl;
108 /* Remove a function from the worklist WL and return it. */
110 struct cgraph_node *
111 ipa_pop_func_from_list (struct ipa_func_list **wl)
113 struct ipa_node_params *info;
114 struct ipa_func_list *first;
115 struct cgraph_node *node;
117 first = *wl;
118 *wl = (*wl)->next;
119 node = first->node;
120 free (first);
122 info = IPA_NODE_REF (node);
123 info->node_enqueued = 0;
124 return node;
127 /* Return index of the formal whose tree is PTREE in function which corresponds
128 to INFO. */
131 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
133 int i, count;
135 count = ipa_get_param_count (info);
136 for (i = 0; i < count; i++)
137 if (ipa_get_param(info, i) == ptree)
138 return i;
140 return -1;
143 /* Populate the param_decl field in parameter descriptors of INFO that
144 corresponds to NODE. */
146 static void
147 ipa_populate_param_decls (struct cgraph_node *node,
148 struct ipa_node_params *info)
150 tree fndecl;
151 tree fnargs;
152 tree parm;
153 int param_num;
155 fndecl = node->decl;
156 fnargs = DECL_ARGUMENTS (fndecl);
157 param_num = 0;
158 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
160 info->params[param_num].decl = parm;
161 param_num++;
165 /* Return how many formal parameters FNDECL has. */
167 static inline int
168 count_formal_params_1 (tree fndecl)
170 tree parm;
171 int count = 0;
173 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
174 count++;
176 return count;
179 /* Count number of formal parameters in NOTE. Store the result to the
180 appropriate field of INFO. */
182 static void
183 ipa_count_formal_params (struct cgraph_node *node,
184 struct ipa_node_params *info)
186 int param_num;
188 param_num = count_formal_params_1 (node->decl);
189 ipa_set_param_count (info, param_num);
192 /* Initialize the ipa_node_params structure associated with NODE by counting
193 the function parameters, creating the descriptors and populating their
194 param_decls. */
196 void
197 ipa_initialize_node_params (struct cgraph_node *node)
199 struct ipa_node_params *info = IPA_NODE_REF (node);
201 if (!info->params)
203 ipa_count_formal_params (node, info);
204 info->params = XCNEWVEC (struct ipa_param_descriptor,
205 ipa_get_param_count (info));
206 ipa_populate_param_decls (node, info);
210 /* Count number of arguments callsite CS has and store it in
211 ipa_edge_args structure corresponding to this callsite. */
213 static void
214 ipa_count_arguments (struct cgraph_edge *cs)
216 gimple stmt;
217 int arg_num;
219 stmt = cs->call_stmt;
220 gcc_assert (is_gimple_call (stmt));
221 arg_num = gimple_call_num_args (stmt);
222 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
223 <= (unsigned) cgraph_edge_max_uid)
224 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
225 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
226 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
229 /* Print the jump functions associated with call graph edge CS to file F. */
231 static void
232 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
234 int i, count;
236 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
237 for (i = 0; i < count; i++)
239 struct ipa_jump_func *jump_func;
240 enum jump_func_type type;
242 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
243 type = jump_func->type;
245 fprintf (f, " param %d: ", i);
246 if (type == IPA_JF_UNKNOWN)
247 fprintf (f, "UNKNOWN\n");
248 else if (type == IPA_JF_KNOWN_TYPE)
250 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
251 fprintf (f, "KNOWN TYPE, type in binfo is: ");
252 print_generic_expr (f, binfo_type, 0);
253 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
255 else if (type == IPA_JF_CONST)
257 tree val = jump_func->value.constant;
258 fprintf (f, "CONST: ");
259 print_generic_expr (f, val, 0);
260 if (TREE_CODE (val) == ADDR_EXPR
261 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
263 fprintf (f, " -> ");
264 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
267 fprintf (f, "\n");
269 else if (type == IPA_JF_CONST_MEMBER_PTR)
271 fprintf (f, "CONST MEMBER PTR: ");
272 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
273 fprintf (f, ", ");
274 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
275 fprintf (f, "\n");
277 else if (type == IPA_JF_PASS_THROUGH)
279 fprintf (f, "PASS THROUGH: ");
280 fprintf (f, "%d, op %s ",
281 jump_func->value.pass_through.formal_id,
282 tree_code_name[(int)
283 jump_func->value.pass_through.operation]);
284 if (jump_func->value.pass_through.operation != NOP_EXPR)
285 print_generic_expr (dump_file,
286 jump_func->value.pass_through.operand, 0);
287 fprintf (dump_file, "\n");
289 else if (type == IPA_JF_ANCESTOR)
291 fprintf (f, "ANCESTOR: ");
292 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
293 jump_func->value.ancestor.formal_id,
294 jump_func->value.ancestor.offset);
295 print_generic_expr (f, jump_func->value.ancestor.type, 0);
296 fprintf (dump_file, "\n");
302 /* Print the jump functions of all arguments on all call graph edges going from
303 NODE to file F. */
305 void
306 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
308 struct cgraph_edge *cs;
309 int i;
311 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
312 for (cs = node->callees; cs; cs = cs->next_callee)
314 if (!ipa_edge_args_info_available_for_edge_p (cs))
315 continue;
317 fprintf (f, " callsite %s/%i -> %s/%i : \n",
318 cgraph_node_name (node), node->uid,
319 cgraph_node_name (cs->callee), cs->callee->uid);
320 ipa_print_node_jump_functions_for_edge (f, cs);
323 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
325 if (!ipa_edge_args_info_available_for_edge_p (cs))
326 continue;
328 if (cs->call_stmt)
330 fprintf (f, " indirect callsite %d for stmt ", i);
331 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
333 else
334 fprintf (f, " indirect callsite %d :\n", i);
335 ipa_print_node_jump_functions_for_edge (f, cs);
340 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
342 void
343 ipa_print_all_jump_functions (FILE *f)
345 struct cgraph_node *node;
347 fprintf (f, "\nJump functions:\n");
348 for (node = cgraph_nodes; node; node = node->next)
350 ipa_print_node_jump_functions (f, node);
354 /* Structure to be passed in between detect_type_change and
355 check_stmt_for_type_change. */
357 struct type_change_info
359 /* Set to true if dynamic type change has been detected. */
360 bool type_maybe_changed;
363 /* Return true if STMT can modify a virtual method table pointer.
365 This function makes special assumptions about both constructors and
366 destructors which are all the functions that are allowed to alter the VMT
367 pointers. It assumes that destructors begin with assignment into all VMT
368 pointers and that constructors essentially look in the following way:
370 1) The very first thing they do is that they call constructors of ancestor
371 sub-objects that have them.
373 2) Then VMT pointers of this and all its ancestors is set to new values
374 corresponding to the type corresponding to the constructor.
376 3) Only afterwards, other stuff such as constructor of member sub-objects
377 and the code written by the user is run. Only this may include calling
378 virtual functions, directly or indirectly.
380 There is no way to call a constructor of an ancestor sub-object in any
381 other way.
383 This means that we do not have to care whether constructors get the correct
384 type information because they will always change it (in fact, if we define
385 the type to be given by the VMT pointer, it is undefined).
387 The most important fact to derive from the above is that if, for some
388 statement in the section 3, we try to detect whether the dynamic type has
389 changed, we can safely ignore all calls as we examine the function body
390 backwards until we reach statements in section 2 because these calls cannot
391 be ancestor constructors or destructors (if the input is not bogus) and so
392 do not change the dynamic type (this holds true only for automatically
393 allocated objects but at the moment we devirtualize only these). We then
394 must detect that statements in section 2 change the dynamic type and can try
395 to derive the new type. That is enough and we can stop, we will never see
396 the calls into constructors of sub-objects in this code. Therefore we can
397 safely ignore all call statements that we traverse.
400 static bool
401 stmt_may_be_vtbl_ptr_store (gimple stmt)
403 if (is_gimple_call (stmt))
404 return false;
405 else if (is_gimple_assign (stmt))
407 tree lhs = gimple_assign_lhs (stmt);
409 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
411 if (flag_strict_aliasing
412 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
413 return false;
415 if (TREE_CODE (lhs) == COMPONENT_REF
416 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
417 return false;
418 /* In the future we might want to use get_base_ref_and_offset to find
419 if there is a field corresponding to the offset and if so, proceed
420 almost like if it was a component ref. */
423 return true;
426 /* Callback of walk_aliased_vdefs and a helper function for
427 detect_type_change to check whether a particular statement may modify
428 the virtual table pointer, and if possible also determine the new type of
429 the (sub-)object. It stores its result into DATA, which points to a
430 type_change_info structure. */
432 static bool
433 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
435 gimple stmt = SSA_NAME_DEF_STMT (vdef);
436 struct type_change_info *tci = (struct type_change_info *) data;
438 if (stmt_may_be_vtbl_ptr_store (stmt))
440 tci->type_maybe_changed = true;
441 return true;
443 else
444 return false;
447 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
448 looking for assignments to its virtual table pointer. If it is, return true
449 and fill in the jump function JFUNC with relevant type information or set it
450 to unknown. ARG is the object itself (not a pointer to it, unless
451 dereferenced). BASE is the base of the memory access as returned by
452 get_ref_base_and_extent, as is the offset. */
454 static bool
455 detect_type_change (tree arg, tree base, gimple call,
456 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
458 struct type_change_info tci;
459 ao_ref ao;
461 gcc_checking_assert (DECL_P (arg)
462 || TREE_CODE (arg) == MEM_REF
463 || handled_component_p (arg));
464 /* Const calls cannot call virtual methods through VMT and so type changes do
465 not matter. */
466 if (!flag_devirtualize || !gimple_vuse (call))
467 return false;
469 tci.type_maybe_changed = false;
471 ao.ref = arg;
472 ao.base = base;
473 ao.offset = offset;
474 ao.size = POINTER_SIZE;
475 ao.max_size = ao.size;
476 ao.ref_alias_set = -1;
477 ao.base_alias_set = -1;
479 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
480 &tci, NULL);
481 if (!tci.type_maybe_changed)
482 return false;
484 jfunc->type = IPA_JF_UNKNOWN;
485 return true;
488 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
489 SSA name (its dereference will become the base and the offset is assumed to
490 be zero). */
492 static bool
493 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
495 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
496 if (!flag_devirtualize
497 || !POINTER_TYPE_P (TREE_TYPE (arg))
498 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
499 return false;
501 arg = build2 (MEM_REF, ptr_type_node, arg,
502 build_int_cst (ptr_type_node, 0));
504 return detect_type_change (arg, arg, call, jfunc, 0);
508 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
509 of an assignment statement STMT, try to find out whether NAME can be
510 described by a (possibly polynomial) pass-through jump-function or an
511 ancestor jump function and if so, write the appropriate function into
512 JFUNC */
514 static void
515 compute_complex_assign_jump_func (struct ipa_node_params *info,
516 struct ipa_jump_func *jfunc,
517 gimple call, gimple stmt, tree name)
519 HOST_WIDE_INT offset, size, max_size;
520 tree op1, op2, base, ssa;
521 int index;
523 op1 = gimple_assign_rhs1 (stmt);
524 op2 = gimple_assign_rhs2 (stmt);
526 if (TREE_CODE (op1) == SSA_NAME
527 && SSA_NAME_IS_DEFAULT_DEF (op1))
529 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
530 if (index < 0)
531 return;
533 if (op2)
535 if (!is_gimple_ip_invariant (op2)
536 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
537 && !useless_type_conversion_p (TREE_TYPE (name),
538 TREE_TYPE (op1))))
539 return;
541 jfunc->type = IPA_JF_PASS_THROUGH;
542 jfunc->value.pass_through.formal_id = index;
543 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
544 jfunc->value.pass_through.operand = op2;
546 else if (gimple_assign_unary_nop_p (stmt)
547 && !detect_type_change_ssa (op1, call, jfunc))
549 jfunc->type = IPA_JF_PASS_THROUGH;
550 jfunc->value.pass_through.formal_id = index;
551 jfunc->value.pass_through.operation = NOP_EXPR;
553 return;
556 if (TREE_CODE (op1) != ADDR_EXPR)
557 return;
558 op1 = TREE_OPERAND (op1, 0);
559 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
560 return;
561 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
562 if (TREE_CODE (base) != MEM_REF
563 /* If this is a varying address, punt. */
564 || max_size == -1
565 || max_size != size)
566 return;
567 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
568 ssa = TREE_OPERAND (base, 0);
569 if (TREE_CODE (ssa) != SSA_NAME
570 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
571 || offset < 0)
572 return;
574 /* Dynamic types are changed only in constructors and destructors and */
575 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
576 if (index >= 0
577 && !detect_type_change (op1, base, call, jfunc, offset))
579 jfunc->type = IPA_JF_ANCESTOR;
580 jfunc->value.ancestor.formal_id = index;
581 jfunc->value.ancestor.offset = offset;
582 jfunc->value.ancestor.type = TREE_TYPE (op1);
586 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
587 it looks like:
589 iftmp.1_3 = &obj_2(D)->D.1762;
591 The base of the MEM_REF must be a default definition SSA NAME of a
592 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
593 whole MEM_REF expression is returned and the offset calculated from any
594 handled components and the MEM_REF itself is stored into *OFFSET. The whole
595 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
597 static tree
598 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
600 HOST_WIDE_INT size, max_size;
601 tree expr, parm, obj;
603 if (!gimple_assign_single_p (assign))
604 return NULL_TREE;
605 expr = gimple_assign_rhs1 (assign);
607 if (TREE_CODE (expr) != ADDR_EXPR)
608 return NULL_TREE;
609 expr = TREE_OPERAND (expr, 0);
610 obj = expr;
611 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
613 if (TREE_CODE (expr) != MEM_REF
614 /* If this is a varying address, punt. */
615 || max_size == -1
616 || max_size != size
617 || *offset < 0)
618 return NULL_TREE;
619 parm = TREE_OPERAND (expr, 0);
620 if (TREE_CODE (parm) != SSA_NAME
621 || !SSA_NAME_IS_DEFAULT_DEF (parm)
622 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
623 return NULL_TREE;
625 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
626 *obj_p = obj;
627 return expr;
631 /* Given that an actual argument is an SSA_NAME that is a result of a phi
632 statement PHI, try to find out whether NAME is in fact a
633 multiple-inheritance typecast from a descendant into an ancestor of a formal
634 parameter and thus can be described by an ancestor jump function and if so,
635 write the appropriate function into JFUNC.
637 Essentially we want to match the following pattern:
639 if (obj_2(D) != 0B)
640 goto <bb 3>;
641 else
642 goto <bb 4>;
644 <bb 3>:
645 iftmp.1_3 = &obj_2(D)->D.1762;
647 <bb 4>:
648 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
649 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
650 return D.1879_6; */
652 static void
653 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
654 struct ipa_jump_func *jfunc,
655 gimple call, gimple phi)
657 HOST_WIDE_INT offset;
658 gimple assign, cond;
659 basic_block phi_bb, assign_bb, cond_bb;
660 tree tmp, parm, expr, obj;
661 int index, i;
663 if (gimple_phi_num_args (phi) != 2)
664 return;
666 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
667 tmp = PHI_ARG_DEF (phi, 0);
668 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
669 tmp = PHI_ARG_DEF (phi, 1);
670 else
671 return;
672 if (TREE_CODE (tmp) != SSA_NAME
673 || SSA_NAME_IS_DEFAULT_DEF (tmp)
674 || !POINTER_TYPE_P (TREE_TYPE (tmp))
675 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
676 return;
678 assign = SSA_NAME_DEF_STMT (tmp);
679 assign_bb = gimple_bb (assign);
680 if (!single_pred_p (assign_bb))
681 return;
682 expr = get_ancestor_addr_info (assign, &obj, &offset);
683 if (!expr)
684 return;
685 parm = TREE_OPERAND (expr, 0);
686 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
687 gcc_assert (index >= 0);
689 cond_bb = single_pred (assign_bb);
690 cond = last_stmt (cond_bb);
691 if (!cond
692 || gimple_code (cond) != GIMPLE_COND
693 || gimple_cond_code (cond) != NE_EXPR
694 || gimple_cond_lhs (cond) != parm
695 || !integer_zerop (gimple_cond_rhs (cond)))
696 return;
698 phi_bb = gimple_bb (phi);
699 for (i = 0; i < 2; i++)
701 basic_block pred = EDGE_PRED (phi_bb, i)->src;
702 if (pred != assign_bb && pred != cond_bb)
703 return;
706 if (!detect_type_change (obj, expr, call, jfunc, offset))
708 jfunc->type = IPA_JF_ANCESTOR;
709 jfunc->value.ancestor.formal_id = index;
710 jfunc->value.ancestor.offset = offset;
711 jfunc->value.ancestor.type = TREE_TYPE (obj);
715 /* Given OP which is passed as an actual argument to a called function,
716 determine if it is possible to construct a KNOWN_TYPE jump function for it
717 and if so, create one and store it to JFUNC. */
719 static void
720 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
721 gimple call)
723 HOST_WIDE_INT offset, size, max_size;
724 tree base, binfo;
726 if (!flag_devirtualize
727 || TREE_CODE (op) != ADDR_EXPR
728 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
729 return;
731 op = TREE_OPERAND (op, 0);
732 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
733 if (!DECL_P (base)
734 || max_size == -1
735 || max_size != size
736 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
737 || is_global_var (base))
738 return;
740 if (detect_type_change (op, base, call, jfunc, offset))
741 return;
743 binfo = TYPE_BINFO (TREE_TYPE (base));
744 if (!binfo)
745 return;
746 binfo = get_binfo_at_offset (binfo, offset, TREE_TYPE (op));
747 if (binfo)
749 jfunc->type = IPA_JF_KNOWN_TYPE;
750 jfunc->value.base_binfo = binfo;
755 /* Determine the jump functions of scalar arguments. Scalar means SSA names
756 and constants of a number of selected types. INFO is the ipa_node_params
757 structure associated with the caller, FUNCTIONS is a pointer to an array of
758 jump function structures associated with CALL which is the call statement
759 being examined.*/
761 static void
762 compute_scalar_jump_functions (struct ipa_node_params *info,
763 struct ipa_jump_func *functions,
764 gimple call)
766 tree arg;
767 unsigned num = 0;
769 for (num = 0; num < gimple_call_num_args (call); num++)
771 arg = gimple_call_arg (call, num);
773 if (is_gimple_ip_invariant (arg))
775 functions[num].type = IPA_JF_CONST;
776 functions[num].value.constant = arg;
778 else if (TREE_CODE (arg) == SSA_NAME)
780 if (SSA_NAME_IS_DEFAULT_DEF (arg))
782 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
784 if (index >= 0
785 && !detect_type_change_ssa (arg, call, &functions[num]))
787 functions[num].type = IPA_JF_PASS_THROUGH;
788 functions[num].value.pass_through.formal_id = index;
789 functions[num].value.pass_through.operation = NOP_EXPR;
792 else
794 gimple stmt = SSA_NAME_DEF_STMT (arg);
795 if (is_gimple_assign (stmt))
796 compute_complex_assign_jump_func (info, &functions[num],
797 call, stmt, arg);
798 else if (gimple_code (stmt) == GIMPLE_PHI)
799 compute_complex_ancestor_jump_func (info, &functions[num],
800 call, stmt);
803 else
804 compute_known_type_jump_func (arg, &functions[num], call);
808 /* Inspect the given TYPE and return true iff it has the same structure (the
809 same number of fields of the same types) as a C++ member pointer. If
810 METHOD_PTR and DELTA are non-NULL, store the trees representing the
811 corresponding fields there. */
813 static bool
814 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
816 tree fld;
818 if (TREE_CODE (type) != RECORD_TYPE)
819 return false;
821 fld = TYPE_FIELDS (type);
822 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
823 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
824 return false;
826 if (method_ptr)
827 *method_ptr = fld;
829 fld = DECL_CHAIN (fld);
830 if (!fld || INTEGRAL_TYPE_P (fld))
831 return false;
832 if (delta)
833 *delta = fld;
835 if (DECL_CHAIN (fld))
836 return false;
838 return true;
841 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
842 boolean variable pointed to by DATA. */
844 static bool
845 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
846 void *data)
848 bool *b = (bool *) data;
849 *b = true;
850 return true;
853 /* Return true if the formal parameter PARM might have been modified in this
854 function before reaching the statement CALL. PARM_INFO is a pointer to a
855 structure containing intermediate information about PARM. */
857 static bool
858 is_parm_modified_before_call (struct param_analysis_info *parm_info,
859 gimple call, tree parm)
861 bool modified = false;
862 ao_ref refd;
864 if (parm_info->modified)
865 return true;
867 ao_ref_init (&refd, parm);
868 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
869 &modified, &parm_info->visited_statements);
870 if (modified)
872 parm_info->modified = true;
873 return true;
875 return false;
878 /* Go through arguments of the CALL and for every one that looks like a member
879 pointer, check whether it can be safely declared pass-through and if so,
880 mark that to the corresponding item of jump FUNCTIONS. Return true iff
881 there are non-pass-through member pointers within the arguments. INFO
882 describes formal parameters of the caller. PARMS_INFO is a pointer to a
883 vector containing intermediate information about each formal parameter. */
885 static bool
886 compute_pass_through_member_ptrs (struct ipa_node_params *info,
887 struct param_analysis_info *parms_info,
888 struct ipa_jump_func *functions,
889 gimple call)
891 bool undecided_members = false;
892 unsigned num;
893 tree arg;
895 for (num = 0; num < gimple_call_num_args (call); num++)
897 arg = gimple_call_arg (call, num);
899 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
901 if (TREE_CODE (arg) == PARM_DECL)
903 int index = ipa_get_param_decl_index (info, arg);
905 gcc_assert (index >=0);
906 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
908 functions[num].type = IPA_JF_PASS_THROUGH;
909 functions[num].value.pass_through.formal_id = index;
910 functions[num].value.pass_through.operation = NOP_EXPR;
912 else
913 undecided_members = true;
915 else
916 undecided_members = true;
920 return undecided_members;
923 /* Simple function filling in a member pointer constant jump function (with PFN
924 and DELTA as the constant value) into JFUNC. */
926 static void
927 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
928 tree pfn, tree delta)
930 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
931 jfunc->value.member_cst.pfn = pfn;
932 jfunc->value.member_cst.delta = delta;
935 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
936 return the rhs of its defining statement. */
938 static inline tree
939 get_ssa_def_if_simple_copy (tree rhs)
941 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
943 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
945 if (gimple_assign_single_p (def_stmt))
946 rhs = gimple_assign_rhs1 (def_stmt);
947 else
948 break;
950 return rhs;
953 /* Traverse statements from CALL backwards, scanning whether the argument ARG
954 which is a member pointer is filled in with constant values. If it is, fill
955 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
956 fields of the record type of the member pointer. To give an example, we
957 look for a pattern looking like the following:
959 D.2515.__pfn ={v} printStuff;
960 D.2515.__delta ={v} 0;
961 i_1 = doprinting (D.2515); */
963 static void
964 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
965 tree delta_field, struct ipa_jump_func *jfunc)
967 gimple_stmt_iterator gsi;
968 tree method = NULL_TREE;
969 tree delta = NULL_TREE;
971 gsi = gsi_for_stmt (call);
973 gsi_prev (&gsi);
974 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
976 gimple stmt = gsi_stmt (gsi);
977 tree lhs, rhs, fld;
979 if (!stmt_may_clobber_ref_p (stmt, arg))
980 continue;
981 if (!gimple_assign_single_p (stmt))
982 return;
984 lhs = gimple_assign_lhs (stmt);
985 rhs = gimple_assign_rhs1 (stmt);
987 if (TREE_CODE (lhs) != COMPONENT_REF
988 || TREE_OPERAND (lhs, 0) != arg)
989 return;
991 fld = TREE_OPERAND (lhs, 1);
992 if (!method && fld == method_field)
994 rhs = get_ssa_def_if_simple_copy (rhs);
995 if (TREE_CODE (rhs) == ADDR_EXPR
996 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
997 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
999 method = TREE_OPERAND (rhs, 0);
1000 if (delta)
1002 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
1003 return;
1006 else
1007 return;
1010 if (!delta && fld == delta_field)
1012 rhs = get_ssa_def_if_simple_copy (rhs);
1013 if (TREE_CODE (rhs) == INTEGER_CST)
1015 delta = rhs;
1016 if (method)
1018 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
1019 return;
1022 else
1023 return;
1027 return;
1030 /* Go through the arguments of the CALL and for every member pointer within
1031 tries determine whether it is a constant. If it is, create a corresponding
1032 constant jump function in FUNCTIONS which is an array of jump functions
1033 associated with the call. */
1035 static void
1036 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
1037 gimple call)
1039 unsigned num;
1040 tree arg, method_field, delta_field;
1042 for (num = 0; num < gimple_call_num_args (call); num++)
1044 arg = gimple_call_arg (call, num);
1046 if (functions[num].type == IPA_JF_UNKNOWN
1047 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
1048 &delta_field))
1049 determine_cst_member_ptr (call, arg, method_field, delta_field,
1050 &functions[num]);
1054 /* Compute jump function for all arguments of callsite CS and insert the
1055 information in the jump_functions array in the ipa_edge_args corresponding
1056 to this callsite. */
1058 static void
1059 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
1060 struct cgraph_edge *cs)
1062 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1063 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
1064 gimple call;
1066 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
1067 return;
1068 arguments->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
1069 (ipa_get_cs_argument_count (arguments));
1071 call = cs->call_stmt;
1072 gcc_assert (is_gimple_call (call));
1074 /* We will deal with constants and SSA scalars first: */
1075 compute_scalar_jump_functions (info, arguments->jump_functions, call);
1077 /* Let's check whether there are any potential member pointers and if so,
1078 whether we can determine their functions as pass_through. */
1079 if (!compute_pass_through_member_ptrs (info, parms_info,
1080 arguments->jump_functions, call))
1081 return;
1083 /* Finally, let's check whether we actually pass a new constant member
1084 pointer here... */
1085 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
1088 /* Compute jump functions for all edges - both direct and indirect - outgoing
1089 from NODE. Also count the actual arguments in the process. */
1091 static void
1092 ipa_compute_jump_functions (struct cgraph_node *node,
1093 struct param_analysis_info *parms_info)
1095 struct cgraph_edge *cs;
1097 for (cs = node->callees; cs; cs = cs->next_callee)
1099 /* We do not need to bother analyzing calls to unknown
1100 functions unless they may become known during lto/whopr. */
1101 if (!cs->callee->analyzed && !flag_lto)
1102 continue;
1103 ipa_count_arguments (cs);
1104 /* If the descriptor of the callee is not initialized yet, we have to do
1105 it now. */
1106 if (cs->callee->analyzed)
1107 ipa_initialize_node_params (cs->callee);
1108 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
1109 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
1110 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
1111 ipa_compute_jump_functions_for_edge (parms_info, cs);
1114 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1116 ipa_count_arguments (cs);
1117 ipa_compute_jump_functions_for_edge (parms_info, cs);
1121 /* If RHS looks like a rhs of a statement loading pfn from a member
1122 pointer formal parameter, return the parameter, otherwise return
1123 NULL. If USE_DELTA, then we look for a use of the delta field
1124 rather than the pfn. */
1126 static tree
1127 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1129 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1131 if (TREE_CODE (rhs) == COMPONENT_REF)
1133 ref_field = TREE_OPERAND (rhs, 1);
1134 rhs = TREE_OPERAND (rhs, 0);
1136 else
1137 ref_field = NULL_TREE;
1138 if (TREE_CODE (rhs) != MEM_REF)
1139 return NULL_TREE;
1140 rec = TREE_OPERAND (rhs, 0);
1141 if (TREE_CODE (rec) != ADDR_EXPR)
1142 return NULL_TREE;
1143 rec = TREE_OPERAND (rec, 0);
1144 if (TREE_CODE (rec) != PARM_DECL
1145 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1146 return NULL_TREE;
1148 ref_offset = TREE_OPERAND (rhs, 1);
1150 if (ref_field)
1152 if (integer_nonzerop (ref_offset))
1153 return NULL_TREE;
1155 if (use_delta)
1156 fld = delta_field;
1157 else
1158 fld = ptr_field;
1160 return ref_field == fld ? rec : NULL_TREE;
1163 if (use_delta)
1164 fld_offset = byte_position (delta_field);
1165 else
1166 fld_offset = byte_position (ptr_field);
1168 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1171 /* If STMT looks like a statement loading a value from a member pointer formal
1172 parameter, this function returns that parameter. */
1174 static tree
1175 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1177 tree rhs;
1179 if (!gimple_assign_single_p (stmt))
1180 return NULL_TREE;
1182 rhs = gimple_assign_rhs1 (stmt);
1183 return ipa_get_member_ptr_load_param (rhs, use_delta);
1186 /* Returns true iff T is an SSA_NAME defined by a statement. */
1188 static bool
1189 ipa_is_ssa_with_stmt_def (tree t)
1191 if (TREE_CODE (t) == SSA_NAME
1192 && !SSA_NAME_IS_DEFAULT_DEF (t))
1193 return true;
1194 else
1195 return false;
1198 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1199 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1200 indirect call graph edge. */
1202 static struct cgraph_edge *
1203 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1205 struct cgraph_edge *cs;
1207 cs = cgraph_edge (node, stmt);
1208 cs->indirect_info->param_index = param_index;
1209 cs->indirect_info->anc_offset = 0;
1210 cs->indirect_info->polymorphic = 0;
1211 return cs;
1214 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1215 (described by INFO). PARMS_INFO is a pointer to a vector containing
1216 intermediate information about each formal parameter. Currently it checks
1217 whether the call calls a pointer that is a formal parameter and if so, the
1218 parameter is marked with the called flag and an indirect call graph edge
1219 describing the call is created. This is very simple for ordinary pointers
1220 represented in SSA but not-so-nice when it comes to member pointers. The
1221 ugly part of this function does nothing more than trying to match the
1222 pattern of such a call. An example of such a pattern is the gimple dump
1223 below, the call is on the last line:
1225 <bb 2>:
1226 f$__delta_5 = f.__delta;
1227 f$__pfn_24 = f.__pfn;
1230 <bb 2>:
1231 f$__delta_5 = MEM[(struct *)&f];
1232 f$__pfn_24 = MEM[(struct *)&f + 4B];
1234 and a few lines below:
1236 <bb 5>
1237 D.2496_3 = (int) f$__pfn_24;
1238 D.2497_4 = D.2496_3 & 1;
1239 if (D.2497_4 != 0)
1240 goto <bb 3>;
1241 else
1242 goto <bb 4>;
1244 <bb 6>:
1245 D.2500_7 = (unsigned int) f$__delta_5;
1246 D.2501_8 = &S + D.2500_7;
1247 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1248 D.2503_10 = *D.2502_9;
1249 D.2504_12 = f$__pfn_24 + -1;
1250 D.2505_13 = (unsigned int) D.2504_12;
1251 D.2506_14 = D.2503_10 + D.2505_13;
1252 D.2507_15 = *D.2506_14;
1253 iftmp.11_16 = (String:: *) D.2507_15;
1255 <bb 7>:
1256 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1257 D.2500_19 = (unsigned int) f$__delta_5;
1258 D.2508_20 = &S + D.2500_19;
1259 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1261 Such patterns are results of simple calls to a member pointer:
1263 int doprinting (int (MyString::* f)(int) const)
1265 MyString S ("somestring");
1267 return (S.*f)(4);
1271 static void
1272 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1273 struct ipa_node_params *info,
1274 struct param_analysis_info *parms_info,
1275 gimple call, tree target)
1277 gimple def;
1278 tree n1, n2;
1279 gimple d1, d2;
1280 tree rec, rec2, cond;
1281 gimple branch;
1282 int index;
1283 basic_block bb, virt_bb, join;
1285 if (SSA_NAME_IS_DEFAULT_DEF (target))
1287 tree var = SSA_NAME_VAR (target);
1288 index = ipa_get_param_decl_index (info, var);
1289 if (index >= 0)
1290 ipa_note_param_call (node, index, call);
1291 return;
1294 /* Now we need to try to match the complex pattern of calling a member
1295 pointer. */
1297 if (!POINTER_TYPE_P (TREE_TYPE (target))
1298 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1299 return;
1301 def = SSA_NAME_DEF_STMT (target);
1302 if (gimple_code (def) != GIMPLE_PHI)
1303 return;
1305 if (gimple_phi_num_args (def) != 2)
1306 return;
1308 /* First, we need to check whether one of these is a load from a member
1309 pointer that is a parameter to this function. */
1310 n1 = PHI_ARG_DEF (def, 0);
1311 n2 = PHI_ARG_DEF (def, 1);
1312 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1313 return;
1314 d1 = SSA_NAME_DEF_STMT (n1);
1315 d2 = SSA_NAME_DEF_STMT (n2);
1317 join = gimple_bb (def);
1318 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1320 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1321 return;
1323 bb = EDGE_PRED (join, 0)->src;
1324 virt_bb = gimple_bb (d2);
1326 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1328 bb = EDGE_PRED (join, 1)->src;
1329 virt_bb = gimple_bb (d1);
1331 else
1332 return;
1334 /* Second, we need to check that the basic blocks are laid out in the way
1335 corresponding to the pattern. */
1337 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1338 || single_pred (virt_bb) != bb
1339 || single_succ (virt_bb) != join)
1340 return;
1342 /* Third, let's see that the branching is done depending on the least
1343 significant bit of the pfn. */
1345 branch = last_stmt (bb);
1346 if (!branch || gimple_code (branch) != GIMPLE_COND)
1347 return;
1349 if (gimple_cond_code (branch) != NE_EXPR
1350 || !integer_zerop (gimple_cond_rhs (branch)))
1351 return;
1353 cond = gimple_cond_lhs (branch);
1354 if (!ipa_is_ssa_with_stmt_def (cond))
1355 return;
1357 def = SSA_NAME_DEF_STMT (cond);
1358 if (!is_gimple_assign (def)
1359 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1360 || !integer_onep (gimple_assign_rhs2 (def)))
1361 return;
1363 cond = gimple_assign_rhs1 (def);
1364 if (!ipa_is_ssa_with_stmt_def (cond))
1365 return;
1367 def = SSA_NAME_DEF_STMT (cond);
1369 if (is_gimple_assign (def)
1370 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1372 cond = gimple_assign_rhs1 (def);
1373 if (!ipa_is_ssa_with_stmt_def (cond))
1374 return;
1375 def = SSA_NAME_DEF_STMT (cond);
1378 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1379 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1380 == ptrmemfunc_vbit_in_delta));
1382 if (rec != rec2)
1383 return;
1385 index = ipa_get_param_decl_index (info, rec);
1386 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1387 call, rec))
1388 ipa_note_param_call (node, index, call);
1390 return;
1393 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1394 object referenced in the expression is a formal parameter of the caller
1395 (described by INFO), create a call note for the statement. */
1397 static void
1398 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1399 struct ipa_node_params *info, gimple call,
1400 tree target)
1402 struct cgraph_edge *cs;
1403 struct cgraph_indirect_call_info *ii;
1404 struct ipa_jump_func jfunc;
1405 tree obj = OBJ_TYPE_REF_OBJECT (target);
1406 int index;
1407 HOST_WIDE_INT anc_offset;
1409 if (!flag_devirtualize)
1410 return;
1412 if (TREE_CODE (obj) != SSA_NAME)
1413 return;
1415 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1417 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1418 return;
1420 anc_offset = 0;
1421 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1422 gcc_assert (index >= 0);
1423 if (detect_type_change_ssa (obj, call, &jfunc))
1424 return;
1426 else
1428 gimple stmt = SSA_NAME_DEF_STMT (obj);
1429 tree expr;
1431 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1432 if (!expr)
1433 return;
1434 index = ipa_get_param_decl_index (info,
1435 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1436 gcc_assert (index >= 0);
1437 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1438 return;
1441 cs = ipa_note_param_call (node, index, call);
1442 ii = cs->indirect_info;
1443 ii->anc_offset = anc_offset;
1444 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1445 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1446 ii->polymorphic = 1;
1449 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1450 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1451 containing intermediate information about each formal parameter. */
1453 static void
1454 ipa_analyze_call_uses (struct cgraph_node *node,
1455 struct ipa_node_params *info,
1456 struct param_analysis_info *parms_info, gimple call)
1458 tree target = gimple_call_fn (call);
1460 if (!target)
1461 return;
1462 if (TREE_CODE (target) == SSA_NAME)
1463 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1464 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1465 ipa_analyze_virtual_call_uses (node, info, call, target);
1469 /* Analyze the call statement STMT with respect to formal parameters (described
1470 in INFO) of caller given by NODE. Currently it only checks whether formal
1471 parameters are called. PARMS_INFO is a pointer to a vector containing
1472 intermediate information about each formal parameter. */
1474 static void
1475 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1476 struct param_analysis_info *parms_info, gimple stmt)
1478 if (is_gimple_call (stmt))
1479 ipa_analyze_call_uses (node, info, parms_info, stmt);
1482 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1483 If OP is a parameter declaration, mark it as used in the info structure
1484 passed in DATA. */
1486 static bool
1487 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1488 tree op, void *data)
1490 struct ipa_node_params *info = (struct ipa_node_params *) data;
1492 op = get_base_address (op);
1493 if (op
1494 && TREE_CODE (op) == PARM_DECL)
1496 int index = ipa_get_param_decl_index (info, op);
1497 gcc_assert (index >= 0);
1498 info->params[index].used = true;
1501 return false;
1504 /* Scan the function body of NODE and inspect the uses of formal parameters.
1505 Store the findings in various structures of the associated ipa_node_params
1506 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1507 vector containing intermediate information about each formal parameter. */
1509 static void
1510 ipa_analyze_params_uses (struct cgraph_node *node,
1511 struct param_analysis_info *parms_info)
1513 tree decl = node->decl;
1514 basic_block bb;
1515 struct function *func;
1516 gimple_stmt_iterator gsi;
1517 struct ipa_node_params *info = IPA_NODE_REF (node);
1518 int i;
1520 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1521 return;
1523 for (i = 0; i < ipa_get_param_count (info); i++)
1525 tree parm = ipa_get_param (info, i);
1526 /* For SSA regs see if parameter is used. For non-SSA we compute
1527 the flag during modification analysis. */
1528 if (is_gimple_reg (parm)
1529 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1530 info->params[i].used = true;
1533 func = DECL_STRUCT_FUNCTION (decl);
1534 FOR_EACH_BB_FN (bb, func)
1536 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1538 gimple stmt = gsi_stmt (gsi);
1540 if (is_gimple_debug (stmt))
1541 continue;
1543 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1544 walk_stmt_load_store_addr_ops (stmt, info,
1545 visit_ref_for_mod_analysis,
1546 visit_ref_for_mod_analysis,
1547 visit_ref_for_mod_analysis);
1549 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1550 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1551 visit_ref_for_mod_analysis,
1552 visit_ref_for_mod_analysis,
1553 visit_ref_for_mod_analysis);
1556 info->uses_analysis_done = 1;
1559 /* Initialize the array describing properties of of formal parameters
1560 of NODE, analyze their uses and compute jump functions associated
1561 with actual arguments of calls from within NODE. */
1563 void
1564 ipa_analyze_node (struct cgraph_node *node)
1566 struct ipa_node_params *info;
1567 struct param_analysis_info *parms_info;
1568 int i, param_count;
1570 ipa_check_create_node_params ();
1571 ipa_check_create_edge_args ();
1572 info = IPA_NODE_REF (node);
1573 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1574 current_function_decl = node->decl;
1575 ipa_initialize_node_params (node);
1577 param_count = ipa_get_param_count (info);
1578 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1579 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1581 ipa_analyze_params_uses (node, parms_info);
1582 ipa_compute_jump_functions (node, parms_info);
1584 for (i = 0; i < param_count; i++)
1585 if (parms_info[i].visited_statements)
1586 BITMAP_FREE (parms_info[i].visited_statements);
1588 current_function_decl = NULL;
1589 pop_cfun ();
1593 /* Update the jump function DST when the call graph edge corresponding to SRC is
1594 is being inlined, knowing that DST is of type ancestor and src of known
1595 type. */
1597 static void
1598 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1599 struct ipa_jump_func *dst)
1601 tree new_binfo;
1603 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1604 dst->value.ancestor.offset,
1605 dst->value.ancestor.type);
1606 if (new_binfo)
1608 dst->type = IPA_JF_KNOWN_TYPE;
1609 dst->value.base_binfo = new_binfo;
1611 else
1612 dst->type = IPA_JF_UNKNOWN;
1615 /* Update the jump functions associated with call graph edge E when the call
1616 graph edge CS is being inlined, assuming that E->caller is already (possibly
1617 indirectly) inlined into CS->callee and that E has not been inlined. */
1619 static void
1620 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1621 struct cgraph_edge *e)
1623 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1624 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1625 int count = ipa_get_cs_argument_count (args);
1626 int i;
1628 for (i = 0; i < count; i++)
1630 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1632 if (dst->type == IPA_JF_ANCESTOR)
1634 struct ipa_jump_func *src;
1636 /* Variable number of arguments can cause havoc if we try to access
1637 one that does not exist in the inlined edge. So make sure we
1638 don't. */
1639 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1641 dst->type = IPA_JF_UNKNOWN;
1642 continue;
1645 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1646 if (src->type == IPA_JF_KNOWN_TYPE)
1647 combine_known_type_and_ancestor_jfs (src, dst);
1648 else if (src->type == IPA_JF_PASS_THROUGH
1649 && src->value.pass_through.operation == NOP_EXPR)
1650 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1651 else if (src->type == IPA_JF_ANCESTOR)
1653 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1654 dst->value.ancestor.offset += src->value.ancestor.offset;
1656 else
1657 dst->type = IPA_JF_UNKNOWN;
1659 else if (dst->type == IPA_JF_PASS_THROUGH)
1661 struct ipa_jump_func *src;
1662 /* We must check range due to calls with variable number of arguments
1663 and we cannot combine jump functions with operations. */
1664 if (dst->value.pass_through.operation == NOP_EXPR
1665 && (dst->value.pass_through.formal_id
1666 < ipa_get_cs_argument_count (top)))
1668 src = ipa_get_ith_jump_func (top,
1669 dst->value.pass_through.formal_id);
1670 *dst = *src;
1672 else
1673 dst->type = IPA_JF_UNKNOWN;
1678 /* If TARGET is an addr_expr of a function declaration, make it the destination
1679 of an indirect edge IE and return the edge. Otherwise, return NULL. Delta,
1680 if non-NULL, is an integer constant that must be added to this pointer
1681 (first parameter). */
1683 struct cgraph_edge *
1684 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, tree delta)
1686 struct cgraph_node *callee;
1688 if (TREE_CODE (target) == ADDR_EXPR)
1689 target = TREE_OPERAND (target, 0);
1690 if (TREE_CODE (target) != FUNCTION_DECL)
1691 return NULL;
1692 callee = cgraph_get_node (target);
1693 if (!callee)
1694 return NULL;
1695 ipa_check_create_node_params ();
1697 /* We can not make edges to inline clones. It is bug that someone removed the cgraph
1698 node too early. */
1699 gcc_assert (!callee->global.inlined_to);
1701 cgraph_make_edge_direct (ie, callee, delta ? tree_low_cst (delta, 0) : 0);
1702 if (dump_file)
1704 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1705 "(%s/%i -> %s/%i), for stmt ",
1706 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1707 cgraph_node_name (ie->caller), ie->caller->uid,
1708 cgraph_node_name (ie->callee), ie->callee->uid);
1709 if (ie->call_stmt)
1710 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1711 else
1712 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1714 if (delta)
1716 fprintf (dump_file, " Thunk delta is ");
1717 print_generic_expr (dump_file, delta, 0);
1718 fprintf (dump_file, "\n");
1722 if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
1723 != ipa_get_param_count (IPA_NODE_REF (callee)))
1724 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1726 return ie;
1729 /* Try to find a destination for indirect edge IE that corresponds to a simple
1730 call or a call of a member function pointer and where the destination is a
1731 pointer formal parameter described by jump function JFUNC. If it can be
1732 determined, return the newly direct edge, otherwise return NULL. */
1734 static struct cgraph_edge *
1735 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1736 struct ipa_jump_func *jfunc)
1738 tree target;
1740 if (jfunc->type == IPA_JF_CONST)
1741 target = jfunc->value.constant;
1742 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1743 target = jfunc->value.member_cst.pfn;
1744 else
1745 return NULL;
1747 return ipa_make_edge_direct_to_target (ie, target, NULL_TREE);
1750 /* Try to find a destination for indirect edge IE that corresponds to a
1751 virtual call based on a formal parameter which is described by jump
1752 function JFUNC and if it can be determined, make it direct and return the
1753 direct edge. Otherwise, return NULL. */
1755 static struct cgraph_edge *
1756 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1757 struct ipa_jump_func *jfunc)
1759 tree binfo, type, target, delta;
1760 HOST_WIDE_INT token;
1762 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1763 binfo = jfunc->value.base_binfo;
1764 else
1765 return NULL;
1767 if (!binfo)
1768 return NULL;
1770 token = ie->indirect_info->otr_token;
1771 type = ie->indirect_info->otr_type;
1772 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1773 if (binfo)
1774 target = gimple_get_virt_method_for_binfo (token, binfo, &delta, true);
1775 else
1776 return NULL;
1778 if (target)
1779 return ipa_make_edge_direct_to_target (ie, target, delta);
1780 else
1781 return NULL;
1784 /* Update the param called notes associated with NODE when CS is being inlined,
1785 assuming NODE is (potentially indirectly) inlined into CS->callee.
1786 Moreover, if the callee is discovered to be constant, create a new cgraph
1787 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1788 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1790 static bool
1791 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1792 struct cgraph_node *node,
1793 VEC (cgraph_edge_p, heap) **new_edges)
1795 struct ipa_edge_args *top;
1796 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1797 bool res = false;
1799 ipa_check_create_edge_args ();
1800 top = IPA_EDGE_REF (cs);
1802 for (ie = node->indirect_calls; ie; ie = next_ie)
1804 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1805 struct ipa_jump_func *jfunc;
1807 next_ie = ie->next_callee;
1808 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1809 continue;
1811 /* If we ever use indirect edges for anything other than indirect
1812 inlining, we will need to skip those with negative param_indices. */
1813 if (ici->param_index == -1)
1814 continue;
1816 /* We must check range due to calls with variable number of arguments: */
1817 if (ici->param_index >= ipa_get_cs_argument_count (top))
1819 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1820 continue;
1823 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1824 if (jfunc->type == IPA_JF_PASS_THROUGH
1825 && jfunc->value.pass_through.operation == NOP_EXPR)
1826 ici->param_index = jfunc->value.pass_through.formal_id;
1827 else if (jfunc->type == IPA_JF_ANCESTOR)
1829 ici->param_index = jfunc->value.ancestor.formal_id;
1830 ici->anc_offset += jfunc->value.ancestor.offset;
1832 else
1833 /* Either we can find a destination for this edge now or never. */
1834 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1836 if (ici->polymorphic)
1837 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1838 else
1839 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1841 if (new_direct_edge)
1843 new_direct_edge->indirect_inlining_edge = 1;
1844 if (new_edges)
1846 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1847 new_direct_edge);
1848 top = IPA_EDGE_REF (cs);
1849 res = true;
1854 return res;
1857 /* Recursively traverse subtree of NODE (including node) made of inlined
1858 cgraph_edges when CS has been inlined and invoke
1859 update_indirect_edges_after_inlining on all nodes and
1860 update_jump_functions_after_inlining on all non-inlined edges that lead out
1861 of this subtree. Newly discovered indirect edges will be added to
1862 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1863 created. */
1865 static bool
1866 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1867 struct cgraph_node *node,
1868 VEC (cgraph_edge_p, heap) **new_edges)
1870 struct cgraph_edge *e;
1871 bool res;
1873 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1875 for (e = node->callees; e; e = e->next_callee)
1876 if (!e->inline_failed)
1877 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1878 else
1879 update_jump_functions_after_inlining (cs, e);
1881 return res;
1884 /* Update jump functions and call note functions on inlining the call site CS.
1885 CS is expected to lead to a node already cloned by
1886 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1887 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1888 created. */
1890 bool
1891 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1892 VEC (cgraph_edge_p, heap) **new_edges)
1894 /* Do nothing if the preparation phase has not been carried out yet
1895 (i.e. during early inlining). */
1896 if (!ipa_node_params_vector)
1897 return false;
1898 gcc_assert (ipa_edge_args_vector);
1900 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1903 /* Frees all dynamically allocated structures that the argument info points
1904 to. */
1906 void
1907 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1909 if (args->jump_functions)
1910 ggc_free (args->jump_functions);
1912 memset (args, 0, sizeof (*args));
1915 /* Free all ipa_edge structures. */
1917 void
1918 ipa_free_all_edge_args (void)
1920 int i;
1921 struct ipa_edge_args *args;
1923 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1924 ipa_free_edge_args_substructures (args);
1926 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1927 ipa_edge_args_vector = NULL;
1930 /* Frees all dynamically allocated structures that the param info points
1931 to. */
1933 void
1934 ipa_free_node_params_substructures (struct ipa_node_params *info)
1936 free (info->params);
1938 memset (info, 0, sizeof (*info));
1941 /* Free all ipa_node_params structures. */
1943 void
1944 ipa_free_all_node_params (void)
1946 int i;
1947 struct ipa_node_params *info;
1949 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
1950 ipa_free_node_params_substructures (info);
1952 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1953 ipa_node_params_vector = NULL;
1956 /* Hook that is called by cgraph.c when an edge is removed. */
1958 static void
1959 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1961 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1962 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1963 <= (unsigned)cs->uid)
1964 return;
1965 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1968 /* Hook that is called by cgraph.c when a node is removed. */
1970 static void
1971 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1973 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1974 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1975 <= (unsigned)node->uid)
1976 return;
1977 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1980 /* Helper function to duplicate an array of size N that is at SRC and store a
1981 pointer to it to DST. Nothing is done if SRC is NULL. */
1983 static void *
1984 duplicate_array (void *src, size_t n)
1986 void *p;
1988 if (!src)
1989 return NULL;
1991 p = xmalloc (n);
1992 memcpy (p, src, n);
1993 return p;
1996 static struct ipa_jump_func *
1997 duplicate_ipa_jump_func_array (const struct ipa_jump_func * src, size_t n)
1999 struct ipa_jump_func *p;
2001 if (!src)
2002 return NULL;
2004 p = ggc_alloc_vec_ipa_jump_func (n);
2005 memcpy (p, src, n * sizeof (struct ipa_jump_func));
2006 return p;
2009 /* Hook that is called by cgraph.c when a node is duplicated. */
2011 static void
2012 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2013 __attribute__((unused)) void *data)
2015 struct ipa_edge_args *old_args, *new_args;
2016 int arg_count;
2018 ipa_check_create_edge_args ();
2020 old_args = IPA_EDGE_REF (src);
2021 new_args = IPA_EDGE_REF (dst);
2023 arg_count = ipa_get_cs_argument_count (old_args);
2024 ipa_set_cs_argument_count (new_args, arg_count);
2025 new_args->jump_functions =
2026 duplicate_ipa_jump_func_array (old_args->jump_functions, arg_count);
2028 if (iinlining_processed_edges
2029 && bitmap_bit_p (iinlining_processed_edges, src->uid))
2030 bitmap_set_bit (iinlining_processed_edges, dst->uid);
2033 /* Hook that is called by cgraph.c when a node is duplicated. */
2035 static void
2036 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2037 ATTRIBUTE_UNUSED void *data)
2039 struct ipa_node_params *old_info, *new_info;
2040 int param_count, i;
2042 ipa_check_create_node_params ();
2043 old_info = IPA_NODE_REF (src);
2044 new_info = IPA_NODE_REF (dst);
2045 param_count = ipa_get_param_count (old_info);
2047 ipa_set_param_count (new_info, param_count);
2048 new_info->params = (struct ipa_param_descriptor *)
2049 duplicate_array (old_info->params,
2050 sizeof (struct ipa_param_descriptor) * param_count);
2051 for (i = 0; i < param_count; i++)
2052 new_info->params[i].types = VEC_copy (tree, heap,
2053 old_info->params[i].types);
2054 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2055 new_info->count_scale = old_info->count_scale;
2057 new_info->called_with_var_arguments = old_info->called_with_var_arguments;
2058 new_info->uses_analysis_done = old_info->uses_analysis_done;
2059 new_info->node_enqueued = old_info->node_enqueued;
2063 /* Analyze newly added function into callgraph. */
2065 static void
2066 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2068 ipa_analyze_node (node);
2071 /* Register our cgraph hooks if they are not already there. */
2073 void
2074 ipa_register_cgraph_hooks (void)
2076 if (!edge_removal_hook_holder)
2077 edge_removal_hook_holder =
2078 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2079 if (!node_removal_hook_holder)
2080 node_removal_hook_holder =
2081 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2082 if (!edge_duplication_hook_holder)
2083 edge_duplication_hook_holder =
2084 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2085 if (!node_duplication_hook_holder)
2086 node_duplication_hook_holder =
2087 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2088 function_insertion_hook_holder =
2089 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2092 /* Unregister our cgraph hooks if they are not already there. */
2094 static void
2095 ipa_unregister_cgraph_hooks (void)
2097 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2098 edge_removal_hook_holder = NULL;
2099 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2100 node_removal_hook_holder = NULL;
2101 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2102 edge_duplication_hook_holder = NULL;
2103 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2104 node_duplication_hook_holder = NULL;
2105 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2106 function_insertion_hook_holder = NULL;
2109 /* Allocate all necessary data structures necessary for indirect inlining. */
2111 void
2112 ipa_create_all_structures_for_iinln (void)
2114 iinlining_processed_edges = BITMAP_ALLOC (NULL);
2117 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2118 longer needed after ipa-cp. */
2120 void
2121 ipa_free_all_structures_after_ipa_cp (void)
2123 if (!flag_indirect_inlining)
2125 ipa_free_all_edge_args ();
2126 ipa_free_all_node_params ();
2127 ipa_unregister_cgraph_hooks ();
2131 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2132 longer needed after indirect inlining. */
2134 void
2135 ipa_free_all_structures_after_iinln (void)
2137 BITMAP_FREE (iinlining_processed_edges);
2139 ipa_free_all_edge_args ();
2140 ipa_free_all_node_params ();
2141 ipa_unregister_cgraph_hooks ();
2144 /* Print ipa_tree_map data structures of all functions in the
2145 callgraph to F. */
2147 void
2148 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2150 int i, count;
2151 tree temp;
2152 struct ipa_node_params *info;
2154 if (!node->analyzed)
2155 return;
2156 info = IPA_NODE_REF (node);
2157 fprintf (f, " function %s parameter descriptors:\n",
2158 cgraph_node_name (node));
2159 count = ipa_get_param_count (info);
2160 for (i = 0; i < count; i++)
2162 temp = ipa_get_param (info, i);
2163 if (TREE_CODE (temp) == PARM_DECL)
2164 fprintf (f, " param %d : %s", i,
2165 (DECL_NAME (temp)
2166 ? (*lang_hooks.decl_printable_name) (temp, 2)
2167 : "(unnamed)"));
2168 if (ipa_is_param_used (info, i))
2169 fprintf (f, " used");
2170 fprintf (f, "\n");
2174 /* Print ipa_tree_map data structures of all functions in the
2175 callgraph to F. */
2177 void
2178 ipa_print_all_params (FILE * f)
2180 struct cgraph_node *node;
2182 fprintf (f, "\nFunction parameters:\n");
2183 for (node = cgraph_nodes; node; node = node->next)
2184 ipa_print_node_params (f, node);
2187 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2189 VEC(tree, heap) *
2190 ipa_get_vector_of_formal_parms (tree fndecl)
2192 VEC(tree, heap) *args;
2193 int count;
2194 tree parm;
2196 count = count_formal_params_1 (fndecl);
2197 args = VEC_alloc (tree, heap, count);
2198 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2199 VEC_quick_push (tree, args, parm);
2201 return args;
2204 /* Return a heap allocated vector containing types of formal parameters of
2205 function type FNTYPE. */
2207 static inline VEC(tree, heap) *
2208 get_vector_of_formal_parm_types (tree fntype)
2210 VEC(tree, heap) *types;
2211 int count = 0;
2212 tree t;
2214 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2215 count++;
2217 types = VEC_alloc (tree, heap, count);
2218 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2219 VEC_quick_push (tree, types, TREE_VALUE (t));
2221 return types;
2224 /* Modify the function declaration FNDECL and its type according to the plan in
2225 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2226 to reflect the actual parameters being modified which are determined by the
2227 base_index field. */
2229 void
2230 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2231 const char *synth_parm_prefix)
2233 VEC(tree, heap) *oparms, *otypes;
2234 tree orig_type, new_type = NULL;
2235 tree old_arg_types, t, new_arg_types = NULL;
2236 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2237 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2238 tree new_reversed = NULL;
2239 bool care_for_types, last_parm_void;
2241 if (!synth_parm_prefix)
2242 synth_parm_prefix = "SYNTH";
2244 oparms = ipa_get_vector_of_formal_parms (fndecl);
2245 orig_type = TREE_TYPE (fndecl);
2246 old_arg_types = TYPE_ARG_TYPES (orig_type);
2248 /* The following test is an ugly hack, some functions simply don't have any
2249 arguments in their type. This is probably a bug but well... */
2250 care_for_types = (old_arg_types != NULL_TREE);
2251 if (care_for_types)
2253 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2254 == void_type_node);
2255 otypes = get_vector_of_formal_parm_types (orig_type);
2256 if (last_parm_void)
2257 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2258 else
2259 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2261 else
2263 last_parm_void = false;
2264 otypes = NULL;
2267 for (i = 0; i < len; i++)
2269 struct ipa_parm_adjustment *adj;
2270 gcc_assert (link);
2272 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2273 parm = VEC_index (tree, oparms, adj->base_index);
2274 adj->base = parm;
2276 if (adj->copy_param)
2278 if (care_for_types)
2279 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2280 adj->base_index),
2281 new_arg_types);
2282 *link = parm;
2283 link = &DECL_CHAIN (parm);
2285 else if (!adj->remove_param)
2287 tree new_parm;
2288 tree ptype;
2290 if (adj->by_ref)
2291 ptype = build_pointer_type (adj->type);
2292 else
2293 ptype = adj->type;
2295 if (care_for_types)
2296 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2298 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2299 ptype);
2300 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2302 DECL_ARTIFICIAL (new_parm) = 1;
2303 DECL_ARG_TYPE (new_parm) = ptype;
2304 DECL_CONTEXT (new_parm) = fndecl;
2305 TREE_USED (new_parm) = 1;
2306 DECL_IGNORED_P (new_parm) = 1;
2307 layout_decl (new_parm, 0);
2309 add_referenced_var (new_parm);
2310 mark_sym_for_renaming (new_parm);
2311 adj->base = parm;
2312 adj->reduction = new_parm;
2314 *link = new_parm;
2316 link = &DECL_CHAIN (new_parm);
2320 *link = NULL_TREE;
2322 if (care_for_types)
2324 new_reversed = nreverse (new_arg_types);
2325 if (last_parm_void)
2327 if (new_reversed)
2328 TREE_CHAIN (new_arg_types) = void_list_node;
2329 else
2330 new_reversed = void_list_node;
2334 /* Use copy_node to preserve as much as possible from original type
2335 (debug info, attribute lists etc.)
2336 Exception is METHOD_TYPEs must have THIS argument.
2337 When we are asked to remove it, we need to build new FUNCTION_TYPE
2338 instead. */
2339 if (TREE_CODE (orig_type) != METHOD_TYPE
2340 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2341 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2343 new_type = build_distinct_type_copy (orig_type);
2344 TYPE_ARG_TYPES (new_type) = new_reversed;
2346 else
2348 new_type
2349 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2350 new_reversed));
2351 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2352 DECL_VINDEX (fndecl) = NULL_TREE;
2355 /* When signature changes, we need to clear builtin info. */
2356 if (DECL_BUILT_IN (fndecl))
2358 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2359 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2362 /* This is a new type, not a copy of an old type. Need to reassociate
2363 variants. We can handle everything except the main variant lazily. */
2364 t = TYPE_MAIN_VARIANT (orig_type);
2365 if (orig_type != t)
2367 TYPE_MAIN_VARIANT (new_type) = t;
2368 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2369 TYPE_NEXT_VARIANT (t) = new_type;
2371 else
2373 TYPE_MAIN_VARIANT (new_type) = new_type;
2374 TYPE_NEXT_VARIANT (new_type) = NULL;
2377 TREE_TYPE (fndecl) = new_type;
2378 DECL_VIRTUAL_P (fndecl) = 0;
2379 if (otypes)
2380 VEC_free (tree, heap, otypes);
2381 VEC_free (tree, heap, oparms);
2384 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2385 If this is a directly recursive call, CS must be NULL. Otherwise it must
2386 contain the corresponding call graph edge. */
2388 void
2389 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2390 ipa_parm_adjustment_vec adjustments)
2392 VEC(tree, heap) *vargs;
2393 gimple new_stmt;
2394 gimple_stmt_iterator gsi;
2395 tree callee_decl;
2396 int i, len;
2398 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2399 vargs = VEC_alloc (tree, heap, len);
2401 gsi = gsi_for_stmt (stmt);
2402 for (i = 0; i < len; i++)
2404 struct ipa_parm_adjustment *adj;
2406 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2408 if (adj->copy_param)
2410 tree arg = gimple_call_arg (stmt, adj->base_index);
2412 VEC_quick_push (tree, vargs, arg);
2414 else if (!adj->remove_param)
2416 tree expr, base, off;
2417 location_t loc;
2419 /* We create a new parameter out of the value of the old one, we can
2420 do the following kind of transformations:
2422 - A scalar passed by reference is converted to a scalar passed by
2423 value. (adj->by_ref is false and the type of the original
2424 actual argument is a pointer to a scalar).
2426 - A part of an aggregate is passed instead of the whole aggregate.
2427 The part can be passed either by value or by reference, this is
2428 determined by value of adj->by_ref. Moreover, the code below
2429 handles both situations when the original aggregate is passed by
2430 value (its type is not a pointer) and when it is passed by
2431 reference (it is a pointer to an aggregate).
2433 When the new argument is passed by reference (adj->by_ref is true)
2434 it must be a part of an aggregate and therefore we form it by
2435 simply taking the address of a reference inside the original
2436 aggregate. */
2438 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2439 base = gimple_call_arg (stmt, adj->base_index);
2440 loc = EXPR_LOCATION (base);
2442 if (TREE_CODE (base) != ADDR_EXPR
2443 && POINTER_TYPE_P (TREE_TYPE (base)))
2444 off = build_int_cst (adj->alias_ptr_type,
2445 adj->offset / BITS_PER_UNIT);
2446 else
2448 HOST_WIDE_INT base_offset;
2449 tree prev_base;
2451 if (TREE_CODE (base) == ADDR_EXPR)
2452 base = TREE_OPERAND (base, 0);
2453 prev_base = base;
2454 base = get_addr_base_and_unit_offset (base, &base_offset);
2455 /* Aggregate arguments can have non-invariant addresses. */
2456 if (!base)
2458 base = build_fold_addr_expr (prev_base);
2459 off = build_int_cst (adj->alias_ptr_type,
2460 adj->offset / BITS_PER_UNIT);
2462 else if (TREE_CODE (base) == MEM_REF)
2464 off = build_int_cst (adj->alias_ptr_type,
2465 base_offset
2466 + adj->offset / BITS_PER_UNIT);
2467 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2468 off, 0);
2469 base = TREE_OPERAND (base, 0);
2471 else
2473 off = build_int_cst (adj->alias_ptr_type,
2474 base_offset
2475 + adj->offset / BITS_PER_UNIT);
2476 base = build_fold_addr_expr (base);
2480 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2481 if (adj->by_ref)
2482 expr = build_fold_addr_expr (expr);
2484 expr = force_gimple_operand_gsi (&gsi, expr,
2485 adj->by_ref
2486 || is_gimple_reg_type (adj->type),
2487 NULL, true, GSI_SAME_STMT);
2488 VEC_quick_push (tree, vargs, expr);
2492 if (dump_file && (dump_flags & TDF_DETAILS))
2494 fprintf (dump_file, "replacing stmt:");
2495 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2498 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2499 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2500 VEC_free (tree, heap, vargs);
2501 if (gimple_call_lhs (stmt))
2502 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2504 gimple_set_block (new_stmt, gimple_block (stmt));
2505 if (gimple_has_location (stmt))
2506 gimple_set_location (new_stmt, gimple_location (stmt));
2507 gimple_call_copy_flags (new_stmt, stmt);
2508 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2510 if (dump_file && (dump_flags & TDF_DETAILS))
2512 fprintf (dump_file, "with stmt:");
2513 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2514 fprintf (dump_file, "\n");
2516 gsi_replace (&gsi, new_stmt, true);
2517 if (cs)
2518 cgraph_set_call_stmt (cs, new_stmt);
2519 update_ssa (TODO_update_ssa);
2520 free_dominance_info (CDI_DOMINATORS);
2523 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2525 static bool
2526 index_in_adjustments_multiple_times_p (int base_index,
2527 ipa_parm_adjustment_vec adjustments)
2529 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2530 bool one = false;
2532 for (i = 0; i < len; i++)
2534 struct ipa_parm_adjustment *adj;
2535 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2537 if (adj->base_index == base_index)
2539 if (one)
2540 return true;
2541 else
2542 one = true;
2545 return false;
2549 /* Return adjustments that should have the same effect on function parameters
2550 and call arguments as if they were first changed according to adjustments in
2551 INNER and then by adjustments in OUTER. */
2553 ipa_parm_adjustment_vec
2554 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2555 ipa_parm_adjustment_vec outer)
2557 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2558 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2559 int removals = 0;
2560 ipa_parm_adjustment_vec adjustments, tmp;
2562 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2563 for (i = 0; i < inlen; i++)
2565 struct ipa_parm_adjustment *n;
2566 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2568 if (n->remove_param)
2569 removals++;
2570 else
2571 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2574 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2575 for (i = 0; i < outlen; i++)
2577 struct ipa_parm_adjustment *r;
2578 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2579 outer, i);
2580 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2581 out->base_index);
2583 gcc_assert (!in->remove_param);
2584 if (out->remove_param)
2586 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2588 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2589 memset (r, 0, sizeof (*r));
2590 r->remove_param = true;
2592 continue;
2595 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2596 memset (r, 0, sizeof (*r));
2597 r->base_index = in->base_index;
2598 r->type = out->type;
2600 /* FIXME: Create nonlocal value too. */
2602 if (in->copy_param && out->copy_param)
2603 r->copy_param = true;
2604 else if (in->copy_param)
2605 r->offset = out->offset;
2606 else if (out->copy_param)
2607 r->offset = in->offset;
2608 else
2609 r->offset = in->offset + out->offset;
2612 for (i = 0; i < inlen; i++)
2614 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2615 inner, i);
2617 if (n->remove_param)
2618 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2621 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2622 return adjustments;
2625 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2626 friendly way, assuming they are meant to be applied to FNDECL. */
2628 void
2629 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2630 tree fndecl)
2632 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2633 bool first = true;
2634 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2636 fprintf (file, "IPA param adjustments: ");
2637 for (i = 0; i < len; i++)
2639 struct ipa_parm_adjustment *adj;
2640 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2642 if (!first)
2643 fprintf (file, " ");
2644 else
2645 first = false;
2647 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2648 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2649 if (adj->base)
2651 fprintf (file, ", base: ");
2652 print_generic_expr (file, adj->base, 0);
2654 if (adj->reduction)
2656 fprintf (file, ", reduction: ");
2657 print_generic_expr (file, adj->reduction, 0);
2659 if (adj->new_ssa_base)
2661 fprintf (file, ", new_ssa_base: ");
2662 print_generic_expr (file, adj->new_ssa_base, 0);
2665 if (adj->copy_param)
2666 fprintf (file, ", copy_param");
2667 else if (adj->remove_param)
2668 fprintf (file, ", remove_param");
2669 else
2670 fprintf (file, ", offset %li", (long) adj->offset);
2671 if (adj->by_ref)
2672 fprintf (file, ", by_ref");
2673 print_node_brief (file, ", type: ", adj->type, 0);
2674 fprintf (file, "\n");
2676 VEC_free (tree, heap, parms);
2679 /* Stream out jump function JUMP_FUNC to OB. */
2681 static void
2682 ipa_write_jump_function (struct output_block *ob,
2683 struct ipa_jump_func *jump_func)
2685 lto_output_uleb128_stream (ob->main_stream,
2686 jump_func->type);
2688 switch (jump_func->type)
2690 case IPA_JF_UNKNOWN:
2691 break;
2692 case IPA_JF_KNOWN_TYPE:
2693 lto_output_tree (ob, jump_func->value.base_binfo, true);
2694 break;
2695 case IPA_JF_CONST:
2696 lto_output_tree (ob, jump_func->value.constant, true);
2697 break;
2698 case IPA_JF_PASS_THROUGH:
2699 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2700 lto_output_uleb128_stream (ob->main_stream,
2701 jump_func->value.pass_through.formal_id);
2702 lto_output_uleb128_stream (ob->main_stream,
2703 jump_func->value.pass_through.operation);
2704 break;
2705 case IPA_JF_ANCESTOR:
2706 lto_output_uleb128_stream (ob->main_stream,
2707 jump_func->value.ancestor.offset);
2708 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2709 lto_output_uleb128_stream (ob->main_stream,
2710 jump_func->value.ancestor.formal_id);
2711 break;
2712 case IPA_JF_CONST_MEMBER_PTR:
2713 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2714 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2715 break;
2719 /* Read in jump function JUMP_FUNC from IB. */
2721 static void
2722 ipa_read_jump_function (struct lto_input_block *ib,
2723 struct ipa_jump_func *jump_func,
2724 struct data_in *data_in)
2726 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2728 switch (jump_func->type)
2730 case IPA_JF_UNKNOWN:
2731 break;
2732 case IPA_JF_KNOWN_TYPE:
2733 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2734 break;
2735 case IPA_JF_CONST:
2736 jump_func->value.constant = lto_input_tree (ib, data_in);
2737 break;
2738 case IPA_JF_PASS_THROUGH:
2739 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2740 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2741 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2742 break;
2743 case IPA_JF_ANCESTOR:
2744 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2745 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2746 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2747 break;
2748 case IPA_JF_CONST_MEMBER_PTR:
2749 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2750 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2751 break;
2755 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2756 relevant to indirect inlining to OB. */
2758 static void
2759 ipa_write_indirect_edge_info (struct output_block *ob,
2760 struct cgraph_edge *cs)
2762 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2763 struct bitpack_d bp;
2765 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2766 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2767 bp = bitpack_create (ob->main_stream);
2768 bp_pack_value (&bp, ii->polymorphic, 1);
2769 lto_output_bitpack (&bp);
2771 if (ii->polymorphic)
2773 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2774 lto_output_tree (ob, ii->otr_type, true);
2778 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2779 relevant to indirect inlining from IB. */
2781 static void
2782 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2783 struct data_in *data_in ATTRIBUTE_UNUSED,
2784 struct cgraph_edge *cs)
2786 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2787 struct bitpack_d bp;
2789 ii->param_index = (int) lto_input_sleb128 (ib);
2790 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2791 bp = lto_input_bitpack (ib);
2792 ii->polymorphic = bp_unpack_value (&bp, 1);
2793 if (ii->polymorphic)
2795 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2796 ii->otr_type = lto_input_tree (ib, data_in);
2800 /* Stream out NODE info to OB. */
2802 static void
2803 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2805 int node_ref;
2806 lto_cgraph_encoder_t encoder;
2807 struct ipa_node_params *info = IPA_NODE_REF (node);
2808 int j;
2809 struct cgraph_edge *e;
2810 struct bitpack_d bp;
2812 encoder = ob->decl_state->cgraph_node_encoder;
2813 node_ref = lto_cgraph_encoder_encode (encoder, node);
2814 lto_output_uleb128_stream (ob->main_stream, node_ref);
2816 bp = bitpack_create (ob->main_stream);
2817 bp_pack_value (&bp, info->called_with_var_arguments, 1);
2818 gcc_assert (info->uses_analysis_done
2819 || ipa_get_param_count (info) == 0);
2820 gcc_assert (!info->node_enqueued);
2821 gcc_assert (!info->ipcp_orig_node);
2822 for (j = 0; j < ipa_get_param_count (info); j++)
2823 bp_pack_value (&bp, info->params[j].used, 1);
2824 lto_output_bitpack (&bp);
2825 for (e = node->callees; e; e = e->next_callee)
2827 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2829 lto_output_uleb128_stream (ob->main_stream,
2830 ipa_get_cs_argument_count (args));
2831 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2832 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2834 for (e = node->indirect_calls; e; e = e->next_callee)
2835 ipa_write_indirect_edge_info (ob, e);
2838 /* Stream in NODE info from IB. */
2840 static void
2841 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2842 struct data_in *data_in)
2844 struct ipa_node_params *info = IPA_NODE_REF (node);
2845 int k;
2846 struct cgraph_edge *e;
2847 struct bitpack_d bp;
2849 ipa_initialize_node_params (node);
2851 bp = lto_input_bitpack (ib);
2852 info->called_with_var_arguments = bp_unpack_value (&bp, 1);
2853 if (ipa_get_param_count (info) != 0)
2854 info->uses_analysis_done = true;
2855 info->node_enqueued = false;
2856 for (k = 0; k < ipa_get_param_count (info); k++)
2857 info->params[k].used = bp_unpack_value (&bp, 1);
2858 for (e = node->callees; e; e = e->next_callee)
2860 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2861 int count = lto_input_uleb128 (ib);
2863 ipa_set_cs_argument_count (args, count);
2864 if (!count)
2865 continue;
2867 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2868 (ipa_get_cs_argument_count (args));
2869 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2870 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2872 for (e = node->indirect_calls; e; e = e->next_callee)
2873 ipa_read_indirect_edge_info (ib, data_in, e);
2876 /* Write jump functions for nodes in SET. */
2878 void
2879 ipa_prop_write_jump_functions (cgraph_node_set set)
2881 struct cgraph_node *node;
2882 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2883 unsigned int count = 0;
2884 cgraph_node_set_iterator csi;
2886 ob->cgraph_node = NULL;
2888 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2890 node = csi_node (csi);
2891 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2892 count++;
2895 lto_output_uleb128_stream (ob->main_stream, count);
2897 /* Process all of the functions. */
2898 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2900 node = csi_node (csi);
2901 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2902 ipa_write_node_info (ob, node);
2904 lto_output_1_stream (ob->main_stream, 0);
2905 produce_asm (ob, NULL);
2906 destroy_output_block (ob);
2909 /* Read section in file FILE_DATA of length LEN with data DATA. */
2911 static void
2912 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2913 size_t len)
2915 const struct lto_function_header *header =
2916 (const struct lto_function_header *) data;
2917 const int32_t cfg_offset = sizeof (struct lto_function_header);
2918 const int32_t main_offset = cfg_offset + header->cfg_size;
2919 const int32_t string_offset = main_offset + header->main_size;
2920 struct data_in *data_in;
2921 struct lto_input_block ib_main;
2922 unsigned int i;
2923 unsigned int count;
2925 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2926 header->main_size);
2928 data_in =
2929 lto_data_in_create (file_data, (const char *) data + string_offset,
2930 header->string_size, NULL);
2931 count = lto_input_uleb128 (&ib_main);
2933 for (i = 0; i < count; i++)
2935 unsigned int index;
2936 struct cgraph_node *node;
2937 lto_cgraph_encoder_t encoder;
2939 index = lto_input_uleb128 (&ib_main);
2940 encoder = file_data->cgraph_node_encoder;
2941 node = lto_cgraph_encoder_deref (encoder, index);
2942 gcc_assert (node->analyzed);
2943 ipa_read_node_info (&ib_main, node, data_in);
2945 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2946 len);
2947 lto_data_in_delete (data_in);
2950 /* Read ipcp jump functions. */
2952 void
2953 ipa_prop_read_jump_functions (void)
2955 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2956 struct lto_file_decl_data *file_data;
2957 unsigned int j = 0;
2959 ipa_check_create_node_params ();
2960 ipa_check_create_edge_args ();
2961 ipa_register_cgraph_hooks ();
2963 while ((file_data = file_data_vec[j++]))
2965 size_t len;
2966 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2968 if (data)
2969 ipa_prop_read_section (file_data, data, len);
2973 /* After merging units, we can get mismatch in argument counts.
2974 Also decl merging might've rendered parameter lists obsolete.
2975 Also compute called_with_variable_arg info. */
2977 void
2978 ipa_update_after_lto_read (void)
2980 struct cgraph_node *node;
2981 struct cgraph_edge *cs;
2983 ipa_check_create_node_params ();
2984 ipa_check_create_edge_args ();
2986 for (node = cgraph_nodes; node; node = node->next)
2987 if (node->analyzed)
2988 ipa_initialize_node_params (node);
2990 for (node = cgraph_nodes; node; node = node->next)
2991 if (node->analyzed)
2992 for (cs = node->callees; cs; cs = cs->next_callee)
2994 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2995 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2996 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
3000 /* Given the jump function JFUNC, compute the lattice LAT that describes the
3001 value coming down the callsite. INFO describes the caller node so that
3002 pass-through jump functions can be evaluated. */
3003 void
3004 ipa_lattice_from_jfunc (struct ipa_node_params *info, struct ipcp_lattice *lat,
3005 struct ipa_jump_func *jfunc)
3007 if (jfunc->type == IPA_JF_CONST)
3009 lat->type = IPA_CONST_VALUE;
3010 lat->constant = jfunc->value.constant;
3012 else if (jfunc->type == IPA_JF_PASS_THROUGH)
3014 struct ipcp_lattice *caller_lat;
3015 tree cst;
3017 caller_lat = ipa_get_lattice (info, jfunc->value.pass_through.formal_id);
3018 lat->type = caller_lat->type;
3019 if (caller_lat->type != IPA_CONST_VALUE)
3020 return;
3021 cst = caller_lat->constant;
3023 if (jfunc->value.pass_through.operation != NOP_EXPR)
3025 tree restype;
3026 if (TREE_CODE_CLASS (jfunc->value.pass_through.operation)
3027 == tcc_comparison)
3028 restype = boolean_type_node;
3029 else
3030 restype = TREE_TYPE (cst);
3031 cst = fold_binary (jfunc->value.pass_through.operation,
3032 restype, cst, jfunc->value.pass_through.operand);
3034 if (!cst || !is_gimple_ip_invariant (cst))
3035 lat->type = IPA_BOTTOM;
3036 lat->constant = cst;
3038 else if (jfunc->type == IPA_JF_ANCESTOR)
3040 struct ipcp_lattice *caller_lat;
3041 tree t;
3043 caller_lat = ipa_get_lattice (info, jfunc->value.ancestor.formal_id);
3044 lat->type = caller_lat->type;
3045 if (caller_lat->type != IPA_CONST_VALUE)
3046 return;
3047 if (TREE_CODE (caller_lat->constant) != ADDR_EXPR)
3049 /* This can happen when the constant is a NULL pointer. */
3050 lat->type = IPA_BOTTOM;
3051 return;
3053 t = TREE_OPERAND (caller_lat->constant, 0);
3054 t = build_ref_for_offset (EXPR_LOCATION (t), t,
3055 jfunc->value.ancestor.offset,
3056 jfunc->value.ancestor.type, NULL, false);
3057 lat->constant = build_fold_addr_expr (t);
3059 else
3060 lat->type = IPA_BOTTOM;