2010-11-11 Jakub Jelinek <jakub@redhat.com>
[official-gcc.git] / gcc / ipa-prop.c
blob473f483ad3dc38fc0d201071ab6900c0b4d7aecc
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
46 struct param_analysis_info
48 bool modified;
49 bitmap visited_statements;
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
57 /* Bitmap with all UIDs of call graph edges that have been already processed
58 by indirect inlining. */
59 static bitmap iinlining_processed_edges;
61 /* Holders of ipa cgraph hooks: */
62 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
63 static struct cgraph_node_hook_list *node_removal_hook_holder;
64 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
65 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
67 /* Add cgraph NODE described by INFO to the worklist WL regardless of whether
68 it is in one or not. It should almost never be used directly, as opposed to
69 ipa_push_func_to_list. */
71 void
72 ipa_push_func_to_list_1 (struct ipa_func_list **wl,
73 struct cgraph_node *node,
74 struct ipa_node_params *info)
76 struct ipa_func_list *temp;
78 info->node_enqueued = 1;
79 temp = XCNEW (struct ipa_func_list);
80 temp->node = node;
81 temp->next = *wl;
82 *wl = temp;
85 /* Initialize worklist to contain all functions. */
87 struct ipa_func_list *
88 ipa_init_func_list (void)
90 struct cgraph_node *node;
91 struct ipa_func_list * wl;
93 wl = NULL;
94 for (node = cgraph_nodes; node; node = node->next)
95 if (node->analyzed)
97 struct ipa_node_params *info = IPA_NODE_REF (node);
98 /* Unreachable nodes should have been eliminated before ipcp and
99 inlining. */
100 gcc_assert (node->needed || node->reachable);
101 ipa_push_func_to_list_1 (&wl, node, info);
104 return wl;
107 /* Remove a function from the worklist WL and return it. */
109 struct cgraph_node *
110 ipa_pop_func_from_list (struct ipa_func_list **wl)
112 struct ipa_node_params *info;
113 struct ipa_func_list *first;
114 struct cgraph_node *node;
116 first = *wl;
117 *wl = (*wl)->next;
118 node = first->node;
119 free (first);
121 info = IPA_NODE_REF (node);
122 info->node_enqueued = 0;
123 return node;
126 /* Return index of the formal whose tree is PTREE in function which corresponds
127 to INFO. */
129 static int
130 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
132 int i, count;
134 count = ipa_get_param_count (info);
135 for (i = 0; i < count; i++)
136 if (ipa_get_param(info, i) == ptree)
137 return i;
139 return -1;
142 /* Populate the param_decl field in parameter descriptors of INFO that
143 corresponds to NODE. */
145 static void
146 ipa_populate_param_decls (struct cgraph_node *node,
147 struct ipa_node_params *info)
149 tree fndecl;
150 tree fnargs;
151 tree parm;
152 int param_num;
154 fndecl = node->decl;
155 fnargs = DECL_ARGUMENTS (fndecl);
156 param_num = 0;
157 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
159 info->params[param_num].decl = parm;
160 param_num++;
164 /* Return how many formal parameters FNDECL has. */
166 static inline int
167 count_formal_params_1 (tree fndecl)
169 tree parm;
170 int count = 0;
172 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
173 count++;
175 return count;
178 /* Count number of formal parameters in NOTE. Store the result to the
179 appropriate field of INFO. */
181 static void
182 ipa_count_formal_params (struct cgraph_node *node,
183 struct ipa_node_params *info)
185 int param_num;
187 param_num = count_formal_params_1 (node->decl);
188 ipa_set_param_count (info, param_num);
191 /* Initialize the ipa_node_params structure associated with NODE by counting
192 the function parameters, creating the descriptors and populating their
193 param_decls. */
195 void
196 ipa_initialize_node_params (struct cgraph_node *node)
198 struct ipa_node_params *info = IPA_NODE_REF (node);
200 if (!info->params)
202 ipa_count_formal_params (node, info);
203 info->params = XCNEWVEC (struct ipa_param_descriptor,
204 ipa_get_param_count (info));
205 ipa_populate_param_decls (node, info);
209 /* Count number of arguments callsite CS has and store it in
210 ipa_edge_args structure corresponding to this callsite. */
212 static void
213 ipa_count_arguments (struct cgraph_edge *cs)
215 gimple stmt;
216 int arg_num;
218 stmt = cs->call_stmt;
219 gcc_assert (is_gimple_call (stmt));
220 arg_num = gimple_call_num_args (stmt);
221 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
222 <= (unsigned) cgraph_edge_max_uid)
223 VEC_safe_grow_cleared (ipa_edge_args_t, gc,
224 ipa_edge_args_vector, cgraph_edge_max_uid + 1);
225 ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
228 /* Print the jump functions associated with call graph edge CS to file F. */
230 static void
231 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
233 int i, count;
235 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
236 for (i = 0; i < count; i++)
238 struct ipa_jump_func *jump_func;
239 enum jump_func_type type;
241 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
242 type = jump_func->type;
244 fprintf (f, " param %d: ", i);
245 if (type == IPA_JF_UNKNOWN)
246 fprintf (f, "UNKNOWN\n");
247 else if (type == IPA_JF_KNOWN_TYPE)
249 tree binfo_type = TREE_TYPE (jump_func->value.base_binfo);
250 fprintf (f, "KNOWN TYPE, type in binfo is: ");
251 print_generic_expr (f, binfo_type, 0);
252 fprintf (f, " (%u)\n", TYPE_UID (binfo_type));
254 else if (type == IPA_JF_CONST)
256 tree val = jump_func->value.constant;
257 fprintf (f, "CONST: ");
258 print_generic_expr (f, val, 0);
259 if (TREE_CODE (val) == ADDR_EXPR
260 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
262 fprintf (f, " -> ");
263 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
266 fprintf (f, "\n");
268 else if (type == IPA_JF_CONST_MEMBER_PTR)
270 fprintf (f, "CONST MEMBER PTR: ");
271 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
272 fprintf (f, ", ");
273 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
274 fprintf (f, "\n");
276 else if (type == IPA_JF_PASS_THROUGH)
278 fprintf (f, "PASS THROUGH: ");
279 fprintf (f, "%d, op %s ",
280 jump_func->value.pass_through.formal_id,
281 tree_code_name[(int)
282 jump_func->value.pass_through.operation]);
283 if (jump_func->value.pass_through.operation != NOP_EXPR)
284 print_generic_expr (dump_file,
285 jump_func->value.pass_through.operand, 0);
286 fprintf (dump_file, "\n");
288 else if (type == IPA_JF_ANCESTOR)
290 fprintf (f, "ANCESTOR: ");
291 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
292 jump_func->value.ancestor.formal_id,
293 jump_func->value.ancestor.offset);
294 print_generic_expr (f, jump_func->value.ancestor.type, 0);
295 fprintf (dump_file, "\n");
301 /* Print the jump functions of all arguments on all call graph edges going from
302 NODE to file F. */
304 void
305 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
307 struct cgraph_edge *cs;
308 int i;
310 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
311 for (cs = node->callees; cs; cs = cs->next_callee)
313 if (!ipa_edge_args_info_available_for_edge_p (cs))
314 continue;
316 fprintf (f, " callsite %s/%i -> %s/%i : \n",
317 cgraph_node_name (node), node->uid,
318 cgraph_node_name (cs->callee), cs->callee->uid);
319 ipa_print_node_jump_functions_for_edge (f, cs);
322 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
324 if (!ipa_edge_args_info_available_for_edge_p (cs))
325 continue;
327 if (cs->call_stmt)
329 fprintf (f, " indirect callsite %d for stmt ", i);
330 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
332 else
333 fprintf (f, " indirect callsite %d :\n", i);
334 ipa_print_node_jump_functions_for_edge (f, cs);
339 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
341 void
342 ipa_print_all_jump_functions (FILE *f)
344 struct cgraph_node *node;
346 fprintf (f, "\nJump functions:\n");
347 for (node = cgraph_nodes; node; node = node->next)
349 ipa_print_node_jump_functions (f, node);
353 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
354 of an assignment statement STMT, try to find out whether NAME can be
355 described by a (possibly polynomial) pass-through jump-function or an
356 ancestor jump function and if so, write the appropriate function into
357 JFUNC */
359 static void
360 compute_complex_assign_jump_func (struct ipa_node_params *info,
361 struct ipa_jump_func *jfunc,
362 gimple stmt, tree name)
364 HOST_WIDE_INT offset, size, max_size;
365 tree op1, op2, type;
366 int index;
368 op1 = gimple_assign_rhs1 (stmt);
369 op2 = gimple_assign_rhs2 (stmt);
371 if (TREE_CODE (op1) == SSA_NAME
372 && SSA_NAME_IS_DEFAULT_DEF (op1))
374 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
375 if (index < 0)
376 return;
378 if (op2)
380 if (!is_gimple_ip_invariant (op2)
381 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
382 && !useless_type_conversion_p (TREE_TYPE (name),
383 TREE_TYPE (op1))))
384 return;
386 jfunc->type = IPA_JF_PASS_THROUGH;
387 jfunc->value.pass_through.formal_id = index;
388 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
389 jfunc->value.pass_through.operand = op2;
391 else if (gimple_assign_unary_nop_p (stmt))
393 jfunc->type = IPA_JF_PASS_THROUGH;
394 jfunc->value.pass_through.formal_id = index;
395 jfunc->value.pass_through.operation = NOP_EXPR;
397 return;
400 if (TREE_CODE (op1) != ADDR_EXPR)
401 return;
403 op1 = TREE_OPERAND (op1, 0);
404 type = TREE_TYPE (op1);
405 if (TREE_CODE (type) != RECORD_TYPE)
406 return;
407 op1 = get_ref_base_and_extent (op1, &offset, &size, &max_size);
408 if (TREE_CODE (op1) != MEM_REF
409 /* If this is a varying address, punt. */
410 || max_size == -1
411 || max_size != size)
412 return;
413 offset += mem_ref_offset (op1).low * BITS_PER_UNIT;
414 op1 = TREE_OPERAND (op1, 0);
415 if (TREE_CODE (op1) != SSA_NAME
416 || !SSA_NAME_IS_DEFAULT_DEF (op1)
417 || offset < 0)
418 return;
420 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
421 if (index >= 0)
423 jfunc->type = IPA_JF_ANCESTOR;
424 jfunc->value.ancestor.formal_id = index;
425 jfunc->value.ancestor.offset = offset;
426 jfunc->value.ancestor.type = type;
431 /* Given that an actual argument is an SSA_NAME that is a result of a phi
432 statement PHI, try to find out whether NAME is in fact a
433 multiple-inheritance typecast from a descendant into an ancestor of a formal
434 parameter and thus can be described by an ancestor jump function and if so,
435 write the appropriate function into JFUNC.
437 Essentially we want to match the following pattern:
439 if (obj_2(D) != 0B)
440 goto <bb 3>;
441 else
442 goto <bb 4>;
444 <bb 3>:
445 iftmp.1_3 = &obj_2(D)->D.1762;
447 <bb 4>:
448 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
449 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
450 return D.1879_6; */
452 static void
453 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
454 struct ipa_jump_func *jfunc,
455 gimple phi)
457 HOST_WIDE_INT offset, size, max_size;
458 gimple assign, cond;
459 basic_block phi_bb, assign_bb, cond_bb;
460 tree tmp, parm, expr;
461 int index, i;
463 if (gimple_phi_num_args (phi) != 2
464 || !integer_zerop (PHI_ARG_DEF (phi, 1)))
465 return;
467 tmp = PHI_ARG_DEF (phi, 0);
468 if (TREE_CODE (tmp) != SSA_NAME
469 || SSA_NAME_IS_DEFAULT_DEF (tmp)
470 || !POINTER_TYPE_P (TREE_TYPE (tmp))
471 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
472 return;
474 assign = SSA_NAME_DEF_STMT (tmp);
475 assign_bb = gimple_bb (assign);
476 if (!single_pred_p (assign_bb)
477 || !gimple_assign_single_p (assign))
478 return;
479 expr = gimple_assign_rhs1 (assign);
481 if (TREE_CODE (expr) != ADDR_EXPR)
482 return;
483 expr = TREE_OPERAND (expr, 0);
484 expr = get_ref_base_and_extent (expr, &offset, &size, &max_size);
486 if (TREE_CODE (expr) != MEM_REF
487 /* If this is a varying address, punt. */
488 || max_size == -1
489 || max_size != size)
490 return;
491 offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
492 parm = TREE_OPERAND (expr, 0);
493 if (TREE_CODE (parm) != SSA_NAME
494 || !SSA_NAME_IS_DEFAULT_DEF (parm)
495 || offset < 0)
496 return;
498 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
499 if (index < 0)
500 return;
502 cond_bb = single_pred (assign_bb);
503 cond = last_stmt (cond_bb);
504 if (!cond
505 || gimple_code (cond) != GIMPLE_COND
506 || gimple_cond_code (cond) != NE_EXPR
507 || gimple_cond_lhs (cond) != parm
508 || !integer_zerop (gimple_cond_rhs (cond)))
509 return;
512 phi_bb = gimple_bb (phi);
513 for (i = 0; i < 2; i++)
515 basic_block pred = EDGE_PRED (phi_bb, i)->src;
516 if (pred != assign_bb && pred != cond_bb)
517 return;
520 jfunc->type = IPA_JF_ANCESTOR;
521 jfunc->value.ancestor.formal_id = index;
522 jfunc->value.ancestor.offset = offset;
523 jfunc->value.ancestor.type = TREE_TYPE (TREE_TYPE (tmp));
526 /* Given OP whch is passed as an actual argument to a called function,
527 determine if it is possible to construct a KNOWN_TYPE jump function for it
528 and if so, create one and store it to JFUNC. */
530 static void
531 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc)
533 tree binfo;
535 if (TREE_CODE (op) != ADDR_EXPR)
536 return;
538 op = TREE_OPERAND (op, 0);
539 binfo = gimple_get_relevant_ref_binfo (op, NULL_TREE);
540 if (binfo)
542 jfunc->type = IPA_JF_KNOWN_TYPE;
543 jfunc->value.base_binfo = binfo;
548 /* Determine the jump functions of scalar arguments. Scalar means SSA names
549 and constants of a number of selected types. INFO is the ipa_node_params
550 structure associated with the caller, FUNCTIONS is a pointer to an array of
551 jump function structures associated with CALL which is the call statement
552 being examined.*/
554 static void
555 compute_scalar_jump_functions (struct ipa_node_params *info,
556 struct ipa_jump_func *functions,
557 gimple call)
559 tree arg;
560 unsigned num = 0;
562 for (num = 0; num < gimple_call_num_args (call); num++)
564 arg = gimple_call_arg (call, num);
566 if (is_gimple_ip_invariant (arg))
568 functions[num].type = IPA_JF_CONST;
569 functions[num].value.constant = arg;
571 else if (TREE_CODE (arg) == SSA_NAME)
573 if (SSA_NAME_IS_DEFAULT_DEF (arg))
575 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
577 if (index >= 0)
579 functions[num].type = IPA_JF_PASS_THROUGH;
580 functions[num].value.pass_through.formal_id = index;
581 functions[num].value.pass_through.operation = NOP_EXPR;
584 else
586 gimple stmt = SSA_NAME_DEF_STMT (arg);
587 if (is_gimple_assign (stmt))
588 compute_complex_assign_jump_func (info, &functions[num],
589 stmt, arg);
590 else if (gimple_code (stmt) == GIMPLE_PHI)
591 compute_complex_ancestor_jump_func (info, &functions[num],
592 stmt);
595 else
596 compute_known_type_jump_func (arg, &functions[num]);
600 /* Inspect the given TYPE and return true iff it has the same structure (the
601 same number of fields of the same types) as a C++ member pointer. If
602 METHOD_PTR and DELTA are non-NULL, store the trees representing the
603 corresponding fields there. */
605 static bool
606 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
608 tree fld;
610 if (TREE_CODE (type) != RECORD_TYPE)
611 return false;
613 fld = TYPE_FIELDS (type);
614 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
615 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
616 return false;
618 if (method_ptr)
619 *method_ptr = fld;
621 fld = DECL_CHAIN (fld);
622 if (!fld || INTEGRAL_TYPE_P (fld))
623 return false;
624 if (delta)
625 *delta = fld;
627 if (DECL_CHAIN (fld))
628 return false;
630 return true;
633 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
634 boolean variable pointed to by DATA. */
636 static bool
637 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
638 void *data)
640 bool *b = (bool *) data;
641 *b = true;
642 return true;
645 /* Return true if the formal parameter PARM might have been modified in this
646 function before reaching the statement CALL. PARM_INFO is a pointer to a
647 structure containing intermediate information about PARM. */
649 static bool
650 is_parm_modified_before_call (struct param_analysis_info *parm_info,
651 gimple call, tree parm)
653 bool modified = false;
654 ao_ref refd;
656 if (parm_info->modified)
657 return true;
659 ao_ref_init (&refd, parm);
660 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
661 &modified, &parm_info->visited_statements);
662 if (modified)
664 parm_info->modified = true;
665 return true;
667 return false;
670 /* Go through arguments of the CALL and for every one that looks like a member
671 pointer, check whether it can be safely declared pass-through and if so,
672 mark that to the corresponding item of jump FUNCTIONS. Return true iff
673 there are non-pass-through member pointers within the arguments. INFO
674 describes formal parameters of the caller. PARMS_INFO is a pointer to a
675 vector containing intermediate information about each formal parameter. */
677 static bool
678 compute_pass_through_member_ptrs (struct ipa_node_params *info,
679 struct param_analysis_info *parms_info,
680 struct ipa_jump_func *functions,
681 gimple call)
683 bool undecided_members = false;
684 unsigned num;
685 tree arg;
687 for (num = 0; num < gimple_call_num_args (call); num++)
689 arg = gimple_call_arg (call, num);
691 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
693 if (TREE_CODE (arg) == PARM_DECL)
695 int index = ipa_get_param_decl_index (info, arg);
697 gcc_assert (index >=0);
698 if (!is_parm_modified_before_call (&parms_info[index], call, arg))
700 functions[num].type = IPA_JF_PASS_THROUGH;
701 functions[num].value.pass_through.formal_id = index;
702 functions[num].value.pass_through.operation = NOP_EXPR;
704 else
705 undecided_members = true;
707 else
708 undecided_members = true;
712 return undecided_members;
715 /* Simple function filling in a member pointer constant jump function (with PFN
716 and DELTA as the constant value) into JFUNC. */
718 static void
719 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
720 tree pfn, tree delta)
722 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
723 jfunc->value.member_cst.pfn = pfn;
724 jfunc->value.member_cst.delta = delta;
727 /* If RHS is an SSA_NAMe and it is defined by a simple copy assign statement,
728 return the rhs of its defining statement. */
730 static inline tree
731 get_ssa_def_if_simple_copy (tree rhs)
733 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
735 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
737 if (gimple_assign_single_p (def_stmt))
738 rhs = gimple_assign_rhs1 (def_stmt);
739 else
740 break;
742 return rhs;
745 /* Traverse statements from CALL backwards, scanning whether the argument ARG
746 which is a member pointer is filled in with constant values. If it is, fill
747 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
748 fields of the record type of the member pointer. To give an example, we
749 look for a pattern looking like the following:
751 D.2515.__pfn ={v} printStuff;
752 D.2515.__delta ={v} 0;
753 i_1 = doprinting (D.2515); */
755 static void
756 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
757 tree delta_field, struct ipa_jump_func *jfunc)
759 gimple_stmt_iterator gsi;
760 tree method = NULL_TREE;
761 tree delta = NULL_TREE;
763 gsi = gsi_for_stmt (call);
765 gsi_prev (&gsi);
766 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
768 gimple stmt = gsi_stmt (gsi);
769 tree lhs, rhs, fld;
771 if (!stmt_may_clobber_ref_p (stmt, arg))
772 continue;
773 if (!gimple_assign_single_p (stmt))
774 return;
776 lhs = gimple_assign_lhs (stmt);
777 rhs = gimple_assign_rhs1 (stmt);
779 if (TREE_CODE (lhs) != COMPONENT_REF
780 || TREE_OPERAND (lhs, 0) != arg)
781 return;
783 fld = TREE_OPERAND (lhs, 1);
784 if (!method && fld == method_field)
786 rhs = get_ssa_def_if_simple_copy (rhs);
787 if (TREE_CODE (rhs) == ADDR_EXPR
788 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
789 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
791 method = TREE_OPERAND (rhs, 0);
792 if (delta)
794 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
795 return;
798 else
799 return;
802 if (!delta && fld == delta_field)
804 rhs = get_ssa_def_if_simple_copy (rhs);
805 if (TREE_CODE (rhs) == INTEGER_CST)
807 delta = rhs;
808 if (method)
810 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
811 return;
814 else
815 return;
819 return;
822 /* Go through the arguments of the CALL and for every member pointer within
823 tries determine whether it is a constant. If it is, create a corresponding
824 constant jump function in FUNCTIONS which is an array of jump functions
825 associated with the call. */
827 static void
828 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
829 gimple call)
831 unsigned num;
832 tree arg, method_field, delta_field;
834 for (num = 0; num < gimple_call_num_args (call); num++)
836 arg = gimple_call_arg (call, num);
838 if (functions[num].type == IPA_JF_UNKNOWN
839 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
840 &delta_field))
841 determine_cst_member_ptr (call, arg, method_field, delta_field,
842 &functions[num]);
846 /* Compute jump function for all arguments of callsite CS and insert the
847 information in the jump_functions array in the ipa_edge_args corresponding
848 to this callsite. */
850 static void
851 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_info,
852 struct cgraph_edge *cs)
854 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
855 struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
856 gimple call;
858 if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
859 return;
860 arguments->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
861 (ipa_get_cs_argument_count (arguments));
863 call = cs->call_stmt;
864 gcc_assert (is_gimple_call (call));
866 /* We will deal with constants and SSA scalars first: */
867 compute_scalar_jump_functions (info, arguments->jump_functions, call);
869 /* Let's check whether there are any potential member pointers and if so,
870 whether we can determine their functions as pass_through. */
871 if (!compute_pass_through_member_ptrs (info, parms_info,
872 arguments->jump_functions, call))
873 return;
875 /* Finally, let's check whether we actually pass a new constant member
876 pointer here... */
877 compute_cst_member_ptr_arguments (arguments->jump_functions, call);
880 /* Compute jump functions for all edges - both direct and indirect - outgoing
881 from NODE. Also count the actual arguments in the process. */
883 static void
884 ipa_compute_jump_functions (struct cgraph_node *node,
885 struct param_analysis_info *parms_info)
887 struct cgraph_edge *cs;
889 for (cs = node->callees; cs; cs = cs->next_callee)
891 /* We do not need to bother analyzing calls to unknown
892 functions unless they may become known during lto/whopr. */
893 if (!cs->callee->analyzed && !flag_lto)
894 continue;
895 ipa_count_arguments (cs);
896 /* If the descriptor of the callee is not initialized yet, we have to do
897 it now. */
898 if (cs->callee->analyzed)
899 ipa_initialize_node_params (cs->callee);
900 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
901 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
902 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
903 ipa_compute_jump_functions_for_edge (parms_info, cs);
906 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
908 ipa_count_arguments (cs);
909 ipa_compute_jump_functions_for_edge (parms_info, cs);
913 /* If RHS looks like a rhs of a statement loading pfn from a member
914 pointer formal parameter, return the parameter, otherwise return
915 NULL. If USE_DELTA, then we look for a use of the delta field
916 rather than the pfn. */
918 static tree
919 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
921 tree rec, ref_offset, fld_offset;
922 tree ptr_field;
923 tree delta_field;
925 if (TREE_CODE (rhs) != MEM_REF)
926 return NULL_TREE;
927 rec = TREE_OPERAND (rhs, 0);
928 if (TREE_CODE (rec) != ADDR_EXPR)
929 return NULL_TREE;
930 rec = TREE_OPERAND (rec, 0);
931 if (TREE_CODE (rec) != PARM_DECL
932 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
933 return NULL_TREE;
935 ref_offset = TREE_OPERAND (rhs, 1);
936 if (use_delta)
937 fld_offset = byte_position (delta_field);
938 else
939 fld_offset = byte_position (ptr_field);
941 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
944 /* If STMT looks like a statement loading a value from a member pointer formal
945 parameter, this function returns that parameter. */
947 static tree
948 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
950 tree rhs;
952 if (!gimple_assign_single_p (stmt))
953 return NULL_TREE;
955 rhs = gimple_assign_rhs1 (stmt);
956 return ipa_get_member_ptr_load_param (rhs, use_delta);
959 /* Returns true iff T is an SSA_NAME defined by a statement. */
961 static bool
962 ipa_is_ssa_with_stmt_def (tree t)
964 if (TREE_CODE (t) == SSA_NAME
965 && !SSA_NAME_IS_DEFAULT_DEF (t))
966 return true;
967 else
968 return false;
971 /* Find the indirect call graph edge corresponding to STMT and add to it all
972 information necessary to describe a call to a parameter number PARAM_INDEX.
973 NODE is the caller. POLYMORPHIC should be set to true iff the call is a
974 virtual one. */
976 static void
977 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt,
978 bool polymorphic)
980 struct cgraph_edge *cs;
982 cs = cgraph_edge (node, stmt);
983 cs->indirect_info->param_index = param_index;
984 cs->indirect_info->anc_offset = 0;
985 cs->indirect_info->polymorphic = polymorphic;
986 if (polymorphic)
988 tree otr = gimple_call_fn (stmt);
989 tree type, token = OBJ_TYPE_REF_TOKEN (otr);
990 cs->indirect_info->otr_token = tree_low_cst (token, 1);
991 type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (otr)));
992 cs->indirect_info->otr_type = type;
996 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
997 (described by INFO). PARMS_INFO is a pointer to a vector containing
998 intermediate information about each formal parameter. Currently it checks
999 whether the call calls a pointer that is a formal parameter and if so, the
1000 parameter is marked with the called flag and an indirect call graph edge
1001 describing the call is created. This is very simple for ordinary pointers
1002 represented in SSA but not-so-nice when it comes to member pointers. The
1003 ugly part of this function does nothing more than trying to match the
1004 pattern of such a call. An example of such a pattern is the gimple dump
1005 below, the call is on the last line:
1007 <bb 2>:
1008 f$__delta_5 = MEM[(struct *)&f];
1009 f$__pfn_24 = MEM[(struct *)&f + 4B];
1013 <bb 5>
1014 D.2496_3 = (int) f$__pfn_24;
1015 D.2497_4 = D.2496_3 & 1;
1016 if (D.2497_4 != 0)
1017 goto <bb 3>;
1018 else
1019 goto <bb 4>;
1021 <bb 6>:
1022 D.2500_7 = (unsigned int) f$__delta_5;
1023 D.2501_8 = &S + D.2500_7;
1024 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1025 D.2503_10 = *D.2502_9;
1026 D.2504_12 = f$__pfn_24 + -1;
1027 D.2505_13 = (unsigned int) D.2504_12;
1028 D.2506_14 = D.2503_10 + D.2505_13;
1029 D.2507_15 = *D.2506_14;
1030 iftmp.11_16 = (String:: *) D.2507_15;
1032 <bb 7>:
1033 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1034 D.2500_19 = (unsigned int) f$__delta_5;
1035 D.2508_20 = &S + D.2500_19;
1036 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1038 Such patterns are results of simple calls to a member pointer:
1040 int doprinting (int (MyString::* f)(int) const)
1042 MyString S ("somestring");
1044 return (S.*f)(4);
1048 static void
1049 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1050 struct ipa_node_params *info,
1051 struct param_analysis_info *parms_info,
1052 gimple call, tree target)
1054 gimple def;
1055 tree n1, n2;
1056 gimple d1, d2;
1057 tree rec, rec2, cond;
1058 gimple branch;
1059 int index;
1060 basic_block bb, virt_bb, join;
1062 if (SSA_NAME_IS_DEFAULT_DEF (target))
1064 tree var = SSA_NAME_VAR (target);
1065 index = ipa_get_param_decl_index (info, var);
1066 if (index >= 0)
1067 ipa_note_param_call (node, index, call, false);
1068 return;
1071 /* Now we need to try to match the complex pattern of calling a member
1072 pointer. */
1074 if (!POINTER_TYPE_P (TREE_TYPE (target))
1075 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1076 return;
1078 def = SSA_NAME_DEF_STMT (target);
1079 if (gimple_code (def) != GIMPLE_PHI)
1080 return;
1082 if (gimple_phi_num_args (def) != 2)
1083 return;
1085 /* First, we need to check whether one of these is a load from a member
1086 pointer that is a parameter to this function. */
1087 n1 = PHI_ARG_DEF (def, 0);
1088 n2 = PHI_ARG_DEF (def, 1);
1089 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1090 return;
1091 d1 = SSA_NAME_DEF_STMT (n1);
1092 d2 = SSA_NAME_DEF_STMT (n2);
1094 join = gimple_bb (def);
1095 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1097 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1098 return;
1100 bb = EDGE_PRED (join, 0)->src;
1101 virt_bb = gimple_bb (d2);
1103 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1105 bb = EDGE_PRED (join, 1)->src;
1106 virt_bb = gimple_bb (d1);
1108 else
1109 return;
1111 /* Second, we need to check that the basic blocks are laid out in the way
1112 corresponding to the pattern. */
1114 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1115 || single_pred (virt_bb) != bb
1116 || single_succ (virt_bb) != join)
1117 return;
1119 /* Third, let's see that the branching is done depending on the least
1120 significant bit of the pfn. */
1122 branch = last_stmt (bb);
1123 if (!branch || gimple_code (branch) != GIMPLE_COND)
1124 return;
1126 if (gimple_cond_code (branch) != NE_EXPR
1127 || !integer_zerop (gimple_cond_rhs (branch)))
1128 return;
1130 cond = gimple_cond_lhs (branch);
1131 if (!ipa_is_ssa_with_stmt_def (cond))
1132 return;
1134 def = SSA_NAME_DEF_STMT (cond);
1135 if (!is_gimple_assign (def)
1136 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1137 || !integer_onep (gimple_assign_rhs2 (def)))
1138 return;
1140 cond = gimple_assign_rhs1 (def);
1141 if (!ipa_is_ssa_with_stmt_def (cond))
1142 return;
1144 def = SSA_NAME_DEF_STMT (cond);
1146 if (is_gimple_assign (def)
1147 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1149 cond = gimple_assign_rhs1 (def);
1150 if (!ipa_is_ssa_with_stmt_def (cond))
1151 return;
1152 def = SSA_NAME_DEF_STMT (cond);
1155 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1156 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1157 == ptrmemfunc_vbit_in_delta));
1159 if (rec != rec2)
1160 return;
1162 index = ipa_get_param_decl_index (info, rec);
1163 if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
1164 call, rec))
1165 ipa_note_param_call (node, index, call, false);
1167 return;
1170 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1171 object referenced in the expression is a formal parameter of the caller
1172 (described by INFO), create a call note for the statement. */
1174 static void
1175 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1176 struct ipa_node_params *info, gimple call,
1177 tree target)
1179 tree obj = OBJ_TYPE_REF_OBJECT (target);
1180 tree var;
1181 int index;
1183 if (TREE_CODE (obj) == ADDR_EXPR)
1187 obj = TREE_OPERAND (obj, 0);
1189 while (TREE_CODE (obj) == COMPONENT_REF);
1190 if (TREE_CODE (obj) != MEM_REF)
1191 return;
1192 obj = TREE_OPERAND (obj, 0);
1195 if (TREE_CODE (obj) != SSA_NAME
1196 || !SSA_NAME_IS_DEFAULT_DEF (obj))
1197 return;
1199 var = SSA_NAME_VAR (obj);
1200 index = ipa_get_param_decl_index (info, var);
1202 if (index >= 0)
1203 ipa_note_param_call (node, index, call, true);
1206 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1207 of the caller (described by INFO). PARMS_INFO is a pointer to a vector
1208 containing intermediate information about each formal parameter. */
1210 static void
1211 ipa_analyze_call_uses (struct cgraph_node *node,
1212 struct ipa_node_params *info,
1213 struct param_analysis_info *parms_info, gimple call)
1215 tree target = gimple_call_fn (call);
1217 if (TREE_CODE (target) == SSA_NAME)
1218 ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
1219 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1220 ipa_analyze_virtual_call_uses (node, info, call, target);
1224 /* Analyze the call statement STMT with respect to formal parameters (described
1225 in INFO) of caller given by NODE. Currently it only checks whether formal
1226 parameters are called. PARMS_INFO is a pointer to a vector containing
1227 intermediate information about each formal parameter. */
1229 static void
1230 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1231 struct param_analysis_info *parms_info, gimple stmt)
1233 if (is_gimple_call (stmt))
1234 ipa_analyze_call_uses (node, info, parms_info, stmt);
1237 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1238 If OP is a parameter declaration, mark it as used in the info structure
1239 passed in DATA. */
1241 static bool
1242 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1243 tree op, void *data)
1245 struct ipa_node_params *info = (struct ipa_node_params *) data;
1247 op = get_base_address (op);
1248 if (op
1249 && TREE_CODE (op) == PARM_DECL)
1251 int index = ipa_get_param_decl_index (info, op);
1252 gcc_assert (index >= 0);
1253 info->params[index].used = true;
1256 return false;
1259 /* Scan the function body of NODE and inspect the uses of formal parameters.
1260 Store the findings in various structures of the associated ipa_node_params
1261 structure, such as parameter flags, notes etc. PARMS_INFO is a pointer to a
1262 vector containing intermediate information about each formal parameter. */
1264 static void
1265 ipa_analyze_params_uses (struct cgraph_node *node,
1266 struct param_analysis_info *parms_info)
1268 tree decl = node->decl;
1269 basic_block bb;
1270 struct function *func;
1271 gimple_stmt_iterator gsi;
1272 struct ipa_node_params *info = IPA_NODE_REF (node);
1273 int i;
1275 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1276 return;
1278 for (i = 0; i < ipa_get_param_count (info); i++)
1280 tree parm = ipa_get_param (info, i);
1281 /* For SSA regs see if parameter is used. For non-SSA we compute
1282 the flag during modification analysis. */
1283 if (is_gimple_reg (parm)
1284 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1285 info->params[i].used = true;
1288 func = DECL_STRUCT_FUNCTION (decl);
1289 FOR_EACH_BB_FN (bb, func)
1291 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1293 gimple stmt = gsi_stmt (gsi);
1295 if (is_gimple_debug (stmt))
1296 continue;
1298 ipa_analyze_stmt_uses (node, info, parms_info, stmt);
1299 walk_stmt_load_store_addr_ops (stmt, info,
1300 visit_ref_for_mod_analysis,
1301 visit_ref_for_mod_analysis,
1302 visit_ref_for_mod_analysis);
1304 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1305 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1306 visit_ref_for_mod_analysis,
1307 visit_ref_for_mod_analysis,
1308 visit_ref_for_mod_analysis);
1311 info->uses_analysis_done = 1;
1314 /* Initialize the array describing properties of of formal parameters of NODE,
1315 analyze their uses and and compute jump functions associated witu actual
1316 arguments of calls from within NODE. */
1318 void
1319 ipa_analyze_node (struct cgraph_node *node)
1321 struct ipa_node_params *info = IPA_NODE_REF (node);
1322 struct param_analysis_info *parms_info;
1323 int i, param_count;
1325 ipa_initialize_node_params (node);
1327 param_count = ipa_get_param_count (info);
1328 parms_info = XALLOCAVEC (struct param_analysis_info, param_count);
1329 memset (parms_info, 0, sizeof (struct param_analysis_info) * param_count);
1331 ipa_analyze_params_uses (node, parms_info);
1332 ipa_compute_jump_functions (node, parms_info);
1334 for (i = 0; i < param_count; i++)
1335 if (parms_info[i].visited_statements)
1336 BITMAP_FREE (parms_info[i].visited_statements);
1340 /* Update the jump function DST when the call graph edge correspondng to SRC is
1341 is being inlined, knowing that DST is of type ancestor and src of known
1342 type. */
1344 static void
1345 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1346 struct ipa_jump_func *dst)
1348 tree new_binfo;
1350 new_binfo = get_binfo_at_offset (src->value.base_binfo,
1351 dst->value.ancestor.offset,
1352 dst->value.ancestor.type);
1353 if (new_binfo)
1355 dst->type = IPA_JF_KNOWN_TYPE;
1356 dst->value.base_binfo = new_binfo;
1358 else
1359 dst->type = IPA_JF_UNKNOWN;
1362 /* Update the jump functions associated with call graph edge E when the call
1363 graph edge CS is being inlined, assuming that E->caller is already (possibly
1364 indirectly) inlined into CS->callee and that E has not been inlined. */
1366 static void
1367 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1368 struct cgraph_edge *e)
1370 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1371 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1372 int count = ipa_get_cs_argument_count (args);
1373 int i;
1375 for (i = 0; i < count; i++)
1377 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1379 if (dst->type == IPA_JF_ANCESTOR)
1381 struct ipa_jump_func *src;
1383 /* Variable number of arguments can cause havoc if we try to access
1384 one that does not exist in the inlined edge. So make sure we
1385 don't. */
1386 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1388 dst->type = IPA_JF_UNKNOWN;
1389 continue;
1392 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1393 if (src->type == IPA_JF_KNOWN_TYPE)
1394 combine_known_type_and_ancestor_jfs (src, dst);
1395 else if (src->type == IPA_JF_CONST)
1397 struct ipa_jump_func kt_func;
1399 kt_func.type = IPA_JF_UNKNOWN;
1400 compute_known_type_jump_func (src->value.constant, &kt_func);
1401 if (kt_func.type == IPA_JF_KNOWN_TYPE)
1402 combine_known_type_and_ancestor_jfs (&kt_func, dst);
1403 else
1404 dst->type = IPA_JF_UNKNOWN;
1406 else if (src->type == IPA_JF_PASS_THROUGH
1407 && src->value.pass_through.operation == NOP_EXPR)
1408 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1409 else if (src->type == IPA_JF_ANCESTOR)
1411 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1412 dst->value.ancestor.offset += src->value.ancestor.offset;
1414 else
1415 dst->type = IPA_JF_UNKNOWN;
1417 else if (dst->type == IPA_JF_PASS_THROUGH)
1419 struct ipa_jump_func *src;
1420 /* We must check range due to calls with variable number of arguments
1421 and we cannot combine jump functions with operations. */
1422 if (dst->value.pass_through.operation == NOP_EXPR
1423 && (dst->value.pass_through.formal_id
1424 < ipa_get_cs_argument_count (top)))
1426 src = ipa_get_ith_jump_func (top,
1427 dst->value.pass_through.formal_id);
1428 *dst = *src;
1430 else
1431 dst->type = IPA_JF_UNKNOWN;
1436 /* If TARGET is an addr_expr of a function declaration, make it the destination
1437 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1439 struct cgraph_edge *
1440 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1442 struct cgraph_node *callee;
1444 if (TREE_CODE (target) != ADDR_EXPR)
1445 return NULL;
1446 target = TREE_OPERAND (target, 0);
1447 if (TREE_CODE (target) != FUNCTION_DECL)
1448 return NULL;
1449 callee = cgraph_node (target);
1450 if (!callee)
1451 return NULL;
1452 ipa_check_create_node_params ();
1453 cgraph_make_edge_direct (ie, callee);
1454 if (dump_file)
1456 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1457 "(%s/%i -> %s/%i) for stmt ",
1458 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1459 cgraph_node_name (ie->caller), ie->caller->uid,
1460 cgraph_node_name (ie->callee), ie->callee->uid);
1462 if (ie->call_stmt)
1463 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1464 else
1465 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1468 if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
1469 != ipa_get_param_count (IPA_NODE_REF (callee)))
1470 ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
1472 return ie;
1475 /* Try to find a destination for indirect edge IE that corresponds to a simple
1476 call or a call of a member function pointer and where the destination is a
1477 pointer formal parameter described by jump function JFUNC. If it can be
1478 determined, return the newly direct edge, otherwise return NULL. */
1480 static struct cgraph_edge *
1481 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1482 struct ipa_jump_func *jfunc)
1484 tree target;
1486 if (jfunc->type == IPA_JF_CONST)
1487 target = jfunc->value.constant;
1488 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1489 target = jfunc->value.member_cst.pfn;
1490 else
1491 return NULL;
1493 return ipa_make_edge_direct_to_target (ie, target);
1496 /* Try to find a destination for indirect edge IE that corresponds to a
1497 virtuall call based on a formal parameter which is described by jump
1498 function JFUNC and if it can be determined, make it direct and return the
1499 direct edge. Otherwise, return NULL. */
1501 static struct cgraph_edge *
1502 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1503 struct ipa_jump_func *jfunc)
1505 tree binfo, type, target;
1506 HOST_WIDE_INT token;
1508 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1509 binfo = jfunc->value.base_binfo;
1510 else if (jfunc->type == IPA_JF_CONST)
1512 tree cst = jfunc->value.constant;
1513 if (TREE_CODE (cst) == ADDR_EXPR)
1514 binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (cst, 0),
1515 NULL_TREE);
1516 else
1517 return NULL;
1519 else
1520 return NULL;
1522 if (!binfo)
1523 return NULL;
1525 token = ie->indirect_info->otr_token;
1526 type = ie->indirect_info->otr_type;
1527 binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
1528 if (binfo)
1529 target = gimple_fold_obj_type_ref_known_binfo (token, binfo);
1530 else
1531 return NULL;
1533 if (target)
1534 return ipa_make_edge_direct_to_target (ie, target);
1535 else
1536 return NULL;
1539 /* Update the param called notes associated with NODE when CS is being inlined,
1540 assuming NODE is (potentially indirectly) inlined into CS->callee.
1541 Moreover, if the callee is discovered to be constant, create a new cgraph
1542 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1543 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1545 static bool
1546 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1547 struct cgraph_node *node,
1548 VEC (cgraph_edge_p, heap) **new_edges)
1550 struct ipa_edge_args *top;
1551 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1552 bool res = false;
1554 ipa_check_create_edge_args ();
1555 top = IPA_EDGE_REF (cs);
1557 for (ie = node->indirect_calls; ie; ie = next_ie)
1559 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1560 struct ipa_jump_func *jfunc;
1562 next_ie = ie->next_callee;
1563 if (bitmap_bit_p (iinlining_processed_edges, ie->uid))
1564 continue;
1566 /* If we ever use indirect edges for anything other than indirect
1567 inlining, we will need to skip those with negative param_indices. */
1568 if (ici->param_index == -1)
1569 continue;
1571 /* We must check range due to calls with variable number of arguments: */
1572 if (ici->param_index >= ipa_get_cs_argument_count (top))
1574 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1575 continue;
1578 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1579 if (jfunc->type == IPA_JF_PASS_THROUGH
1580 && jfunc->value.pass_through.operation == NOP_EXPR)
1581 ici->param_index = jfunc->value.pass_through.formal_id;
1582 else if (jfunc->type == IPA_JF_ANCESTOR)
1584 ici->param_index = jfunc->value.ancestor.formal_id;
1585 ici->anc_offset += jfunc->value.ancestor.offset;
1587 else
1588 /* Either we can find a destination for this edge now or never. */
1589 bitmap_set_bit (iinlining_processed_edges, ie->uid);
1591 if (ici->polymorphic)
1592 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1593 else
1594 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1596 if (new_direct_edge)
1598 new_direct_edge->indirect_inlining_edge = 1;
1599 if (new_edges)
1601 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1602 new_direct_edge);
1603 top = IPA_EDGE_REF (cs);
1604 res = true;
1609 return res;
1612 /* Recursively traverse subtree of NODE (including node) made of inlined
1613 cgraph_edges when CS has been inlined and invoke
1614 update_indirect_edges_after_inlining on all nodes and
1615 update_jump_functions_after_inlining on all non-inlined edges that lead out
1616 of this subtree. Newly discovered indirect edges will be added to
1617 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1618 created. */
1620 static bool
1621 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1622 struct cgraph_node *node,
1623 VEC (cgraph_edge_p, heap) **new_edges)
1625 struct cgraph_edge *e;
1626 bool res;
1628 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1630 for (e = node->callees; e; e = e->next_callee)
1631 if (!e->inline_failed)
1632 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1633 else
1634 update_jump_functions_after_inlining (cs, e);
1636 return res;
1639 /* Update jump functions and call note functions on inlining the call site CS.
1640 CS is expected to lead to a node already cloned by
1641 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1642 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1643 created. */
1645 bool
1646 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1647 VEC (cgraph_edge_p, heap) **new_edges)
1649 /* FIXME lto: We do not stream out indirect call information. */
1650 if (flag_wpa)
1651 return false;
1653 /* Do nothing if the preparation phase has not been carried out yet
1654 (i.e. during early inlining). */
1655 if (!ipa_node_params_vector)
1656 return false;
1657 gcc_assert (ipa_edge_args_vector);
1659 return propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1662 /* Frees all dynamically allocated structures that the argument info points
1663 to. */
1665 void
1666 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1668 if (args->jump_functions)
1669 ggc_free (args->jump_functions);
1671 memset (args, 0, sizeof (*args));
1674 /* Free all ipa_edge structures. */
1676 void
1677 ipa_free_all_edge_args (void)
1679 int i;
1680 struct ipa_edge_args *args;
1682 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1683 ipa_free_edge_args_substructures (args);
1685 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
1686 ipa_edge_args_vector = NULL;
1689 /* Frees all dynamically allocated structures that the param info points
1690 to. */
1692 void
1693 ipa_free_node_params_substructures (struct ipa_node_params *info)
1695 if (info->params)
1696 free (info->params);
1698 memset (info, 0, sizeof (*info));
1701 /* Free all ipa_node_params structures. */
1703 void
1704 ipa_free_all_node_params (void)
1706 int i;
1707 struct ipa_node_params *info;
1709 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
1710 ipa_free_node_params_substructures (info);
1712 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
1713 ipa_node_params_vector = NULL;
1716 /* Hook that is called by cgraph.c when an edge is removed. */
1718 static void
1719 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
1721 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1722 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
1723 <= (unsigned)cs->uid)
1724 return;
1725 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1728 /* Hook that is called by cgraph.c when a node is removed. */
1730 static void
1731 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1733 /* During IPA-CP updating we can be called on not-yet analyze clones. */
1734 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
1735 <= (unsigned)node->uid)
1736 return;
1737 ipa_free_node_params_substructures (IPA_NODE_REF (node));
1740 /* Helper function to duplicate an array of size N that is at SRC and store a
1741 pointer to it to DST. Nothing is done if SRC is NULL. */
1743 static void *
1744 duplicate_array (void *src, size_t n)
1746 void *p;
1748 if (!src)
1749 return NULL;
1751 p = xmalloc (n);
1752 memcpy (p, src, n);
1753 return p;
1756 static struct ipa_jump_func *
1757 duplicate_ipa_jump_func_array (const struct ipa_jump_func * src, size_t n)
1759 struct ipa_jump_func *p;
1761 if (!src)
1762 return NULL;
1764 p = ggc_alloc_vec_ipa_jump_func (n);
1765 memcpy (p, src, n * sizeof (struct ipa_jump_func));
1766 return p;
1769 /* Hook that is called by cgraph.c when a node is duplicated. */
1771 static void
1772 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1773 __attribute__((unused)) void *data)
1775 struct ipa_edge_args *old_args, *new_args;
1776 int arg_count;
1778 ipa_check_create_edge_args ();
1780 old_args = IPA_EDGE_REF (src);
1781 new_args = IPA_EDGE_REF (dst);
1783 arg_count = ipa_get_cs_argument_count (old_args);
1784 ipa_set_cs_argument_count (new_args, arg_count);
1785 new_args->jump_functions =
1786 duplicate_ipa_jump_func_array (old_args->jump_functions, arg_count);
1788 if (iinlining_processed_edges
1789 && bitmap_bit_p (iinlining_processed_edges, src->uid))
1790 bitmap_set_bit (iinlining_processed_edges, dst->uid);
1793 /* Hook that is called by cgraph.c when a node is duplicated. */
1795 static void
1796 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
1797 __attribute__((unused)) void *data)
1799 struct ipa_node_params *old_info, *new_info;
1800 int param_count, i;
1802 ipa_check_create_node_params ();
1803 old_info = IPA_NODE_REF (src);
1804 new_info = IPA_NODE_REF (dst);
1805 param_count = ipa_get_param_count (old_info);
1807 ipa_set_param_count (new_info, param_count);
1808 new_info->params = (struct ipa_param_descriptor *)
1809 duplicate_array (old_info->params,
1810 sizeof (struct ipa_param_descriptor) * param_count);
1811 for (i = 0; i < param_count; i++)
1812 new_info->params[i].types = VEC_copy (tree, heap,
1813 old_info->params[i].types);
1814 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
1815 new_info->count_scale = old_info->count_scale;
1817 new_info->called_with_var_arguments = old_info->called_with_var_arguments;
1818 new_info->uses_analysis_done = old_info->uses_analysis_done;
1819 new_info->node_enqueued = old_info->node_enqueued;
1822 /* Register our cgraph hooks if they are not already there. */
1824 void
1825 ipa_register_cgraph_hooks (void)
1827 if (!edge_removal_hook_holder)
1828 edge_removal_hook_holder =
1829 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
1830 if (!node_removal_hook_holder)
1831 node_removal_hook_holder =
1832 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
1833 if (!edge_duplication_hook_holder)
1834 edge_duplication_hook_holder =
1835 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
1836 if (!node_duplication_hook_holder)
1837 node_duplication_hook_holder =
1838 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
1841 /* Unregister our cgraph hooks if they are not already there. */
1843 static void
1844 ipa_unregister_cgraph_hooks (void)
1846 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
1847 edge_removal_hook_holder = NULL;
1848 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1849 node_removal_hook_holder = NULL;
1850 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
1851 edge_duplication_hook_holder = NULL;
1852 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1853 node_duplication_hook_holder = NULL;
1856 /* Allocate all necessary data strucutures necessary for indirect inlining. */
1858 void
1859 ipa_create_all_structures_for_iinln (void)
1861 iinlining_processed_edges = BITMAP_ALLOC (NULL);
1864 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1865 longer needed after ipa-cp. */
1867 void
1868 ipa_free_all_structures_after_ipa_cp (void)
1870 if (!flag_indirect_inlining)
1872 ipa_free_all_edge_args ();
1873 ipa_free_all_node_params ();
1874 ipa_unregister_cgraph_hooks ();
1878 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
1879 longer needed after indirect inlining. */
1881 void
1882 ipa_free_all_structures_after_iinln (void)
1884 BITMAP_FREE (iinlining_processed_edges);
1886 ipa_free_all_edge_args ();
1887 ipa_free_all_node_params ();
1888 ipa_unregister_cgraph_hooks ();
1891 /* Print ipa_tree_map data structures of all functions in the
1892 callgraph to F. */
1894 void
1895 ipa_print_node_params (FILE * f, struct cgraph_node *node)
1897 int i, count;
1898 tree temp;
1899 struct ipa_node_params *info;
1901 if (!node->analyzed)
1902 return;
1903 info = IPA_NODE_REF (node);
1904 fprintf (f, " function %s parameter descriptors:\n",
1905 cgraph_node_name (node));
1906 count = ipa_get_param_count (info);
1907 for (i = 0; i < count; i++)
1909 temp = ipa_get_param (info, i);
1910 if (TREE_CODE (temp) == PARM_DECL)
1911 fprintf (f, " param %d : %s", i,
1912 (DECL_NAME (temp)
1913 ? (*lang_hooks.decl_printable_name) (temp, 2)
1914 : "(unnamed)"));
1915 if (ipa_is_param_used (info, i))
1916 fprintf (f, " used");
1917 fprintf (f, "\n");
1921 /* Print ipa_tree_map data structures of all functions in the
1922 callgraph to F. */
1924 void
1925 ipa_print_all_params (FILE * f)
1927 struct cgraph_node *node;
1929 fprintf (f, "\nFunction parameters:\n");
1930 for (node = cgraph_nodes; node; node = node->next)
1931 ipa_print_node_params (f, node);
1934 /* Return a heap allocated vector containing formal parameters of FNDECL. */
1936 VEC(tree, heap) *
1937 ipa_get_vector_of_formal_parms (tree fndecl)
1939 VEC(tree, heap) *args;
1940 int count;
1941 tree parm;
1943 count = count_formal_params_1 (fndecl);
1944 args = VEC_alloc (tree, heap, count);
1945 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
1946 VEC_quick_push (tree, args, parm);
1948 return args;
1951 /* Return a heap allocated vector containing types of formal parameters of
1952 function type FNTYPE. */
1954 static inline VEC(tree, heap) *
1955 get_vector_of_formal_parm_types (tree fntype)
1957 VEC(tree, heap) *types;
1958 int count = 0;
1959 tree t;
1961 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1962 count++;
1964 types = VEC_alloc (tree, heap, count);
1965 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
1966 VEC_quick_push (tree, types, TREE_VALUE (t));
1968 return types;
1971 /* Modify the function declaration FNDECL and its type according to the plan in
1972 ADJUSTMENTS. It also sets base fields of individual adjustments structures
1973 to reflect the actual parameters being modified which are determined by the
1974 base_index field. */
1976 void
1977 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
1978 const char *synth_parm_prefix)
1980 VEC(tree, heap) *oparms, *otypes;
1981 tree orig_type, new_type = NULL;
1982 tree old_arg_types, t, new_arg_types = NULL;
1983 tree parm, *link = &DECL_ARGUMENTS (fndecl);
1984 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
1985 tree new_reversed = NULL;
1986 bool care_for_types, last_parm_void;
1988 if (!synth_parm_prefix)
1989 synth_parm_prefix = "SYNTH";
1991 oparms = ipa_get_vector_of_formal_parms (fndecl);
1992 orig_type = TREE_TYPE (fndecl);
1993 old_arg_types = TYPE_ARG_TYPES (orig_type);
1995 /* The following test is an ugly hack, some functions simply don't have any
1996 arguments in their type. This is probably a bug but well... */
1997 care_for_types = (old_arg_types != NULL_TREE);
1998 if (care_for_types)
2000 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2001 == void_type_node);
2002 otypes = get_vector_of_formal_parm_types (orig_type);
2003 if (last_parm_void)
2004 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2005 else
2006 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2008 else
2010 last_parm_void = false;
2011 otypes = NULL;
2014 for (i = 0; i < len; i++)
2016 struct ipa_parm_adjustment *adj;
2017 gcc_assert (link);
2019 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2020 parm = VEC_index (tree, oparms, adj->base_index);
2021 adj->base = parm;
2023 if (adj->copy_param)
2025 if (care_for_types)
2026 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2027 adj->base_index),
2028 new_arg_types);
2029 *link = parm;
2030 link = &DECL_CHAIN (parm);
2032 else if (!adj->remove_param)
2034 tree new_parm;
2035 tree ptype;
2037 if (adj->by_ref)
2038 ptype = build_pointer_type (adj->type);
2039 else
2040 ptype = adj->type;
2042 if (care_for_types)
2043 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2045 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2046 ptype);
2047 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2049 DECL_ARTIFICIAL (new_parm) = 1;
2050 DECL_ARG_TYPE (new_parm) = ptype;
2051 DECL_CONTEXT (new_parm) = fndecl;
2052 TREE_USED (new_parm) = 1;
2053 DECL_IGNORED_P (new_parm) = 1;
2054 layout_decl (new_parm, 0);
2056 add_referenced_var (new_parm);
2057 mark_sym_for_renaming (new_parm);
2058 adj->base = parm;
2059 adj->reduction = new_parm;
2061 *link = new_parm;
2063 link = &DECL_CHAIN (new_parm);
2067 *link = NULL_TREE;
2069 if (care_for_types)
2071 new_reversed = nreverse (new_arg_types);
2072 if (last_parm_void)
2074 if (new_reversed)
2075 TREE_CHAIN (new_arg_types) = void_list_node;
2076 else
2077 new_reversed = void_list_node;
2081 /* Use copy_node to preserve as much as possible from original type
2082 (debug info, attribute lists etc.)
2083 Exception is METHOD_TYPEs must have THIS argument.
2084 When we are asked to remove it, we need to build new FUNCTION_TYPE
2085 instead. */
2086 if (TREE_CODE (orig_type) != METHOD_TYPE
2087 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2088 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2090 new_type = build_distinct_type_copy (orig_type);
2091 TYPE_ARG_TYPES (new_type) = new_reversed;
2093 else
2095 new_type
2096 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2097 new_reversed));
2098 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2099 DECL_VINDEX (fndecl) = NULL_TREE;
2102 /* When signature changes, we need to clear builtin info. */
2103 if (DECL_BUILT_IN (fndecl))
2105 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2106 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2109 /* This is a new type, not a copy of an old type. Need to reassociate
2110 variants. We can handle everything except the main variant lazily. */
2111 t = TYPE_MAIN_VARIANT (orig_type);
2112 if (orig_type != t)
2114 TYPE_MAIN_VARIANT (new_type) = t;
2115 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2116 TYPE_NEXT_VARIANT (t) = new_type;
2118 else
2120 TYPE_MAIN_VARIANT (new_type) = new_type;
2121 TYPE_NEXT_VARIANT (new_type) = NULL;
2124 TREE_TYPE (fndecl) = new_type;
2125 DECL_VIRTUAL_P (fndecl) = 0;
2126 if (otypes)
2127 VEC_free (tree, heap, otypes);
2128 VEC_free (tree, heap, oparms);
2131 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2132 If this is a directly recursive call, CS must be NULL. Otherwise it must
2133 contain the corresponding call graph edge. */
2135 void
2136 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2137 ipa_parm_adjustment_vec adjustments)
2139 VEC(tree, heap) *vargs;
2140 gimple new_stmt;
2141 gimple_stmt_iterator gsi;
2142 tree callee_decl;
2143 int i, len;
2145 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2146 vargs = VEC_alloc (tree, heap, len);
2148 gsi = gsi_for_stmt (stmt);
2149 for (i = 0; i < len; i++)
2151 struct ipa_parm_adjustment *adj;
2153 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2155 if (adj->copy_param)
2157 tree arg = gimple_call_arg (stmt, adj->base_index);
2159 VEC_quick_push (tree, vargs, arg);
2161 else if (!adj->remove_param)
2163 tree expr, base, off;
2164 location_t loc;
2166 /* We create a new parameter out of the value of the old one, we can
2167 do the following kind of transformations:
2169 - A scalar passed by reference is converted to a scalar passed by
2170 value. (adj->by_ref is false and the type of the original
2171 actual argument is a pointer to a scalar).
2173 - A part of an aggregate is passed instead of the whole aggregate.
2174 The part can be passed either by value or by reference, this is
2175 determined by value of adj->by_ref. Moreover, the code below
2176 handles both situations when the original aggregate is passed by
2177 value (its type is not a pointer) and when it is passed by
2178 reference (it is a pointer to an aggregate).
2180 When the new argument is passed by reference (adj->by_ref is true)
2181 it must be a part of an aggregate and therefore we form it by
2182 simply taking the address of a reference inside the original
2183 aggregate. */
2185 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2186 base = gimple_call_arg (stmt, adj->base_index);
2187 loc = EXPR_LOCATION (base);
2189 if (TREE_CODE (base) == ADDR_EXPR
2190 && DECL_P (TREE_OPERAND (base, 0)))
2191 off = build_int_cst (TREE_TYPE (base),
2192 adj->offset / BITS_PER_UNIT);
2193 else if (TREE_CODE (base) != ADDR_EXPR
2194 && POINTER_TYPE_P (TREE_TYPE (base)))
2195 off = build_int_cst (TREE_TYPE (base), adj->offset / BITS_PER_UNIT);
2196 else
2198 HOST_WIDE_INT base_offset;
2199 tree prev_base;
2201 if (TREE_CODE (base) == ADDR_EXPR)
2202 base = TREE_OPERAND (base, 0);
2203 prev_base = base;
2204 base = get_addr_base_and_unit_offset (base, &base_offset);
2205 /* Aggregate arguments can have non-invariant addresses. */
2206 if (!base)
2208 base = build_fold_addr_expr (prev_base);
2209 off = build_int_cst (reference_alias_ptr_type (prev_base),
2210 adj->offset / BITS_PER_UNIT);
2212 else if (TREE_CODE (base) == MEM_REF)
2214 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
2215 base_offset
2216 + adj->offset / BITS_PER_UNIT);
2217 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2218 off, 0);
2219 base = TREE_OPERAND (base, 0);
2221 else
2223 off = build_int_cst (reference_alias_ptr_type (prev_base),
2224 base_offset
2225 + adj->offset / BITS_PER_UNIT);
2226 base = build_fold_addr_expr (base);
2230 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2231 if (adj->by_ref)
2232 expr = build_fold_addr_expr (expr);
2234 expr = force_gimple_operand_gsi (&gsi, expr,
2235 adj->by_ref
2236 || is_gimple_reg_type (adj->type),
2237 NULL, true, GSI_SAME_STMT);
2238 VEC_quick_push (tree, vargs, expr);
2242 if (dump_file && (dump_flags & TDF_DETAILS))
2244 fprintf (dump_file, "replacing stmt:");
2245 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2248 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2249 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2250 VEC_free (tree, heap, vargs);
2251 if (gimple_call_lhs (stmt))
2252 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2254 gimple_set_block (new_stmt, gimple_block (stmt));
2255 if (gimple_has_location (stmt))
2256 gimple_set_location (new_stmt, gimple_location (stmt));
2257 gimple_call_copy_flags (new_stmt, stmt);
2258 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2260 if (dump_file && (dump_flags & TDF_DETAILS))
2262 fprintf (dump_file, "with stmt:");
2263 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2264 fprintf (dump_file, "\n");
2266 gsi_replace (&gsi, new_stmt, true);
2267 if (cs)
2268 cgraph_set_call_stmt (cs, new_stmt);
2269 update_ssa (TODO_update_ssa);
2270 free_dominance_info (CDI_DOMINATORS);
2273 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2275 static bool
2276 index_in_adjustments_multiple_times_p (int base_index,
2277 ipa_parm_adjustment_vec adjustments)
2279 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2280 bool one = false;
2282 for (i = 0; i < len; i++)
2284 struct ipa_parm_adjustment *adj;
2285 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2287 if (adj->base_index == base_index)
2289 if (one)
2290 return true;
2291 else
2292 one = true;
2295 return false;
2299 /* Return adjustments that should have the same effect on function parameters
2300 and call arguments as if they were first changed according to adjustments in
2301 INNER and then by adjustments in OUTER. */
2303 ipa_parm_adjustment_vec
2304 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2305 ipa_parm_adjustment_vec outer)
2307 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2308 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2309 int removals = 0;
2310 ipa_parm_adjustment_vec adjustments, tmp;
2312 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2313 for (i = 0; i < inlen; i++)
2315 struct ipa_parm_adjustment *n;
2316 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2318 if (n->remove_param)
2319 removals++;
2320 else
2321 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2324 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2325 for (i = 0; i < outlen; i++)
2327 struct ipa_parm_adjustment *r;
2328 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2329 outer, i);
2330 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2331 out->base_index);
2333 gcc_assert (!in->remove_param);
2334 if (out->remove_param)
2336 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2338 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2339 memset (r, 0, sizeof (*r));
2340 r->remove_param = true;
2342 continue;
2345 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2346 memset (r, 0, sizeof (*r));
2347 r->base_index = in->base_index;
2348 r->type = out->type;
2350 /* FIXME: Create nonlocal value too. */
2352 if (in->copy_param && out->copy_param)
2353 r->copy_param = true;
2354 else if (in->copy_param)
2355 r->offset = out->offset;
2356 else if (out->copy_param)
2357 r->offset = in->offset;
2358 else
2359 r->offset = in->offset + out->offset;
2362 for (i = 0; i < inlen; i++)
2364 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2365 inner, i);
2367 if (n->remove_param)
2368 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2371 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2372 return adjustments;
2375 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2376 friendly way, assuming they are meant to be applied to FNDECL. */
2378 void
2379 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2380 tree fndecl)
2382 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2383 bool first = true;
2384 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2386 fprintf (file, "IPA param adjustments: ");
2387 for (i = 0; i < len; i++)
2389 struct ipa_parm_adjustment *adj;
2390 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2392 if (!first)
2393 fprintf (file, " ");
2394 else
2395 first = false;
2397 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2398 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2399 if (adj->base)
2401 fprintf (file, ", base: ");
2402 print_generic_expr (file, adj->base, 0);
2404 if (adj->reduction)
2406 fprintf (file, ", reduction: ");
2407 print_generic_expr (file, adj->reduction, 0);
2409 if (adj->new_ssa_base)
2411 fprintf (file, ", new_ssa_base: ");
2412 print_generic_expr (file, adj->new_ssa_base, 0);
2415 if (adj->copy_param)
2416 fprintf (file, ", copy_param");
2417 else if (adj->remove_param)
2418 fprintf (file, ", remove_param");
2419 else
2420 fprintf (file, ", offset %li", (long) adj->offset);
2421 if (adj->by_ref)
2422 fprintf (file, ", by_ref");
2423 print_node_brief (file, ", type: ", adj->type, 0);
2424 fprintf (file, "\n");
2426 VEC_free (tree, heap, parms);
2429 /* Stream out jump function JUMP_FUNC to OB. */
2431 static void
2432 ipa_write_jump_function (struct output_block *ob,
2433 struct ipa_jump_func *jump_func)
2435 lto_output_uleb128_stream (ob->main_stream,
2436 jump_func->type);
2438 switch (jump_func->type)
2440 case IPA_JF_UNKNOWN:
2441 break;
2442 case IPA_JF_KNOWN_TYPE:
2443 lto_output_tree (ob, jump_func->value.base_binfo, true);
2444 break;
2445 case IPA_JF_CONST:
2446 lto_output_tree (ob, jump_func->value.constant, true);
2447 break;
2448 case IPA_JF_PASS_THROUGH:
2449 lto_output_tree (ob, jump_func->value.pass_through.operand, true);
2450 lto_output_uleb128_stream (ob->main_stream,
2451 jump_func->value.pass_through.formal_id);
2452 lto_output_uleb128_stream (ob->main_stream,
2453 jump_func->value.pass_through.operation);
2454 break;
2455 case IPA_JF_ANCESTOR:
2456 lto_output_uleb128_stream (ob->main_stream,
2457 jump_func->value.ancestor.offset);
2458 lto_output_tree (ob, jump_func->value.ancestor.type, true);
2459 lto_output_uleb128_stream (ob->main_stream,
2460 jump_func->value.ancestor.formal_id);
2461 break;
2462 case IPA_JF_CONST_MEMBER_PTR:
2463 lto_output_tree (ob, jump_func->value.member_cst.pfn, true);
2464 lto_output_tree (ob, jump_func->value.member_cst.delta, false);
2465 break;
2469 /* Read in jump function JUMP_FUNC from IB. */
2471 static void
2472 ipa_read_jump_function (struct lto_input_block *ib,
2473 struct ipa_jump_func *jump_func,
2474 struct data_in *data_in)
2476 jump_func->type = (enum jump_func_type) lto_input_uleb128 (ib);
2478 switch (jump_func->type)
2480 case IPA_JF_UNKNOWN:
2481 break;
2482 case IPA_JF_KNOWN_TYPE:
2483 jump_func->value.base_binfo = lto_input_tree (ib, data_in);
2484 break;
2485 case IPA_JF_CONST:
2486 jump_func->value.constant = lto_input_tree (ib, data_in);
2487 break;
2488 case IPA_JF_PASS_THROUGH:
2489 jump_func->value.pass_through.operand = lto_input_tree (ib, data_in);
2490 jump_func->value.pass_through.formal_id = lto_input_uleb128 (ib);
2491 jump_func->value.pass_through.operation = (enum tree_code) lto_input_uleb128 (ib);
2492 break;
2493 case IPA_JF_ANCESTOR:
2494 jump_func->value.ancestor.offset = lto_input_uleb128 (ib);
2495 jump_func->value.ancestor.type = lto_input_tree (ib, data_in);
2496 jump_func->value.ancestor.formal_id = lto_input_uleb128 (ib);
2497 break;
2498 case IPA_JF_CONST_MEMBER_PTR:
2499 jump_func->value.member_cst.pfn = lto_input_tree (ib, data_in);
2500 jump_func->value.member_cst.delta = lto_input_tree (ib, data_in);
2501 break;
2505 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2506 relevant to indirect inlining to OB. */
2508 static void
2509 ipa_write_indirect_edge_info (struct output_block *ob,
2510 struct cgraph_edge *cs)
2512 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2513 struct bitpack_d bp;
2515 lto_output_sleb128_stream (ob->main_stream, ii->param_index);
2516 lto_output_sleb128_stream (ob->main_stream, ii->anc_offset);
2517 bp = bitpack_create (ob->main_stream);
2518 bp_pack_value (&bp, ii->polymorphic, 1);
2519 lto_output_bitpack (&bp);
2521 if (ii->polymorphic)
2523 lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
2524 lto_output_tree (ob, ii->otr_type, true);
2528 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2529 relevant to indirect inlining from IB. */
2531 static void
2532 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2533 struct data_in *data_in ATTRIBUTE_UNUSED,
2534 struct cgraph_edge *cs)
2536 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2537 struct bitpack_d bp;
2539 ii->param_index = (int) lto_input_sleb128 (ib);
2540 ii->anc_offset = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2541 bp = lto_input_bitpack (ib);
2542 ii->polymorphic = bp_unpack_value (&bp, 1);
2543 if (ii->polymorphic)
2545 ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
2546 ii->otr_type = lto_input_tree (ib, data_in);
2550 /* Stream out NODE info to OB. */
2552 static void
2553 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2555 int node_ref;
2556 lto_cgraph_encoder_t encoder;
2557 struct ipa_node_params *info = IPA_NODE_REF (node);
2558 int j;
2559 struct cgraph_edge *e;
2560 struct bitpack_d bp;
2562 encoder = ob->decl_state->cgraph_node_encoder;
2563 node_ref = lto_cgraph_encoder_encode (encoder, node);
2564 lto_output_uleb128_stream (ob->main_stream, node_ref);
2566 bp = bitpack_create (ob->main_stream);
2567 bp_pack_value (&bp, info->called_with_var_arguments, 1);
2568 gcc_assert (info->uses_analysis_done
2569 || ipa_get_param_count (info) == 0);
2570 gcc_assert (!info->node_enqueued);
2571 gcc_assert (!info->ipcp_orig_node);
2572 for (j = 0; j < ipa_get_param_count (info); j++)
2573 bp_pack_value (&bp, info->params[j].used, 1);
2574 lto_output_bitpack (&bp);
2575 for (e = node->callees; e; e = e->next_callee)
2577 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2579 lto_output_uleb128_stream (ob->main_stream,
2580 ipa_get_cs_argument_count (args));
2581 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2582 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2584 for (e = node->indirect_calls; e; e = e->next_callee)
2585 ipa_write_indirect_edge_info (ob, e);
2588 /* Srtream in NODE info from IB. */
2590 static void
2591 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2592 struct data_in *data_in)
2594 struct ipa_node_params *info = IPA_NODE_REF (node);
2595 int k;
2596 struct cgraph_edge *e;
2597 struct bitpack_d bp;
2599 ipa_initialize_node_params (node);
2601 bp = lto_input_bitpack (ib);
2602 info->called_with_var_arguments = bp_unpack_value (&bp, 1);
2603 if (ipa_get_param_count (info) != 0)
2604 info->uses_analysis_done = true;
2605 info->node_enqueued = false;
2606 for (k = 0; k < ipa_get_param_count (info); k++)
2607 info->params[k].used = bp_unpack_value (&bp, 1);
2608 for (e = node->callees; e; e = e->next_callee)
2610 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2611 int count = lto_input_uleb128 (ib);
2613 ipa_set_cs_argument_count (args, count);
2614 if (!count)
2615 continue;
2617 args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
2618 (ipa_get_cs_argument_count (args));
2619 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2620 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2622 for (e = node->indirect_calls; e; e = e->next_callee)
2623 ipa_read_indirect_edge_info (ib, data_in, e);
2626 /* Write jump functions for nodes in SET. */
2628 void
2629 ipa_prop_write_jump_functions (cgraph_node_set set)
2631 struct cgraph_node *node;
2632 struct output_block *ob = create_output_block (LTO_section_jump_functions);
2633 unsigned int count = 0;
2634 cgraph_node_set_iterator csi;
2636 ob->cgraph_node = NULL;
2638 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2640 node = csi_node (csi);
2641 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2642 count++;
2645 lto_output_uleb128_stream (ob->main_stream, count);
2647 /* Process all of the functions. */
2648 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2650 node = csi_node (csi);
2651 if (node->analyzed && IPA_NODE_REF (node) != NULL)
2652 ipa_write_node_info (ob, node);
2654 lto_output_1_stream (ob->main_stream, 0);
2655 produce_asm (ob, NULL);
2656 destroy_output_block (ob);
2659 /* Read section in file FILE_DATA of length LEN with data DATA. */
2661 static void
2662 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
2663 size_t len)
2665 const struct lto_function_header *header =
2666 (const struct lto_function_header *) data;
2667 const int32_t cfg_offset = sizeof (struct lto_function_header);
2668 const int32_t main_offset = cfg_offset + header->cfg_size;
2669 const int32_t string_offset = main_offset + header->main_size;
2670 struct data_in *data_in;
2671 struct lto_input_block ib_main;
2672 unsigned int i;
2673 unsigned int count;
2675 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2676 header->main_size);
2678 data_in =
2679 lto_data_in_create (file_data, (const char *) data + string_offset,
2680 header->string_size, NULL);
2681 count = lto_input_uleb128 (&ib_main);
2683 for (i = 0; i < count; i++)
2685 unsigned int index;
2686 struct cgraph_node *node;
2687 lto_cgraph_encoder_t encoder;
2689 index = lto_input_uleb128 (&ib_main);
2690 encoder = file_data->cgraph_node_encoder;
2691 node = lto_cgraph_encoder_deref (encoder, index);
2692 gcc_assert (node->analyzed);
2693 ipa_read_node_info (&ib_main, node, data_in);
2695 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
2696 len);
2697 lto_data_in_delete (data_in);
2700 /* Read ipcp jump functions. */
2702 void
2703 ipa_prop_read_jump_functions (void)
2705 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2706 struct lto_file_decl_data *file_data;
2707 unsigned int j = 0;
2709 ipa_check_create_node_params ();
2710 ipa_check_create_edge_args ();
2711 ipa_register_cgraph_hooks ();
2713 while ((file_data = file_data_vec[j++]))
2715 size_t len;
2716 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
2718 if (data)
2719 ipa_prop_read_section (file_data, data, len);
2723 /* After merging units, we can get mismatch in argument counts.
2724 Also decl merging might've rendered parameter lists obsolette.
2725 Also compute called_with_variable_arg info. */
2727 void
2728 ipa_update_after_lto_read (void)
2730 struct cgraph_node *node;
2731 struct cgraph_edge *cs;
2733 ipa_check_create_node_params ();
2734 ipa_check_create_edge_args ();
2736 for (node = cgraph_nodes; node; node = node->next)
2737 if (node->analyzed)
2738 ipa_initialize_node_params (node);
2740 for (node = cgraph_nodes; node; node = node->next)
2741 if (node->analyzed)
2742 for (cs = node->callees; cs; cs = cs->next_callee)
2744 if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
2745 != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
2746 ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));