* arm.h (TARGET_CPU_CPP_BUILTINS): Remove Maverick support.
[official-gcc.git] / gcc / ipa-prop.c
blob6a74cdaa88ed9ef8e84a3043fbfcbf0fd6e3cc29
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
41 #include "data-streamer.h"
42 #include "tree-streamer.h"
45 /* Intermediate information about a parameter that is only useful during the
46 run of ipa_analyze_node and is not kept afterwards. */
48 struct param_analysis_info
50 bool modified;
51 bitmap visited_statements;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
56 /* Vector where the parameter infos are actually stored. */
57 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
61 static struct cgraph_node_hook_list *node_removal_hook_holder;
62 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
63 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
64 static struct cgraph_node_hook_list *function_insertion_hook_holder;
66 /* Return index of the formal whose tree is PTREE in function which corresponds
67 to INFO. */
69 int
70 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
72 int i, count;
74 count = ipa_get_param_count (info);
75 for (i = 0; i < count; i++)
76 if (ipa_get_param (info, i) == ptree)
77 return i;
79 return -1;
82 /* Populate the param_decl field in parameter descriptors of INFO that
83 corresponds to NODE. */
85 static void
86 ipa_populate_param_decls (struct cgraph_node *node,
87 struct ipa_node_params *info)
89 tree fndecl;
90 tree fnargs;
91 tree parm;
92 int param_num;
94 fndecl = node->symbol.decl;
95 fnargs = DECL_ARGUMENTS (fndecl);
96 param_num = 0;
97 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
99 VEC_index (ipa_param_descriptor_t,
100 info->descriptors, param_num)->decl = parm;
101 param_num++;
105 /* Return how many formal parameters FNDECL has. */
107 static inline int
108 count_formal_params (tree fndecl)
110 tree parm;
111 int count = 0;
113 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
114 count++;
116 return count;
119 /* Initialize the ipa_node_params structure associated with NODE by counting
120 the function parameters, creating the descriptors and populating their
121 param_decls. */
123 void
124 ipa_initialize_node_params (struct cgraph_node *node)
126 struct ipa_node_params *info = IPA_NODE_REF (node);
128 if (!info->descriptors)
130 int param_count;
132 param_count = count_formal_params (node->symbol.decl);
133 if (param_count)
135 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
136 info->descriptors, param_count);
137 ipa_populate_param_decls (node, info);
142 /* Print the jump functions associated with call graph edge CS to file F. */
144 static void
145 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
147 int i, count;
149 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
150 for (i = 0; i < count; i++)
152 struct ipa_jump_func *jump_func;
153 enum jump_func_type type;
155 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
156 type = jump_func->type;
158 fprintf (f, " param %d: ", i);
159 if (type == IPA_JF_UNKNOWN)
160 fprintf (f, "UNKNOWN\n");
161 else if (type == IPA_JF_KNOWN_TYPE)
163 fprintf (f, "KNOWN TYPE: base ");
164 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
165 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
166 jump_func->value.known_type.offset);
167 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
168 fprintf (f, "\n");
170 else if (type == IPA_JF_CONST)
172 tree val = jump_func->value.constant;
173 fprintf (f, "CONST: ");
174 print_generic_expr (f, val, 0);
175 if (TREE_CODE (val) == ADDR_EXPR
176 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
178 fprintf (f, " -> ");
179 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
182 fprintf (f, "\n");
184 else if (type == IPA_JF_CONST_MEMBER_PTR)
186 fprintf (f, "CONST MEMBER PTR: ");
187 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
188 fprintf (f, ", ");
189 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
190 fprintf (f, "\n");
192 else if (type == IPA_JF_PASS_THROUGH)
194 fprintf (f, "PASS THROUGH: ");
195 fprintf (f, "%d, op %s ",
196 jump_func->value.pass_through.formal_id,
197 tree_code_name[(int)
198 jump_func->value.pass_through.operation]);
199 if (jump_func->value.pass_through.operation != NOP_EXPR)
200 print_generic_expr (f,
201 jump_func->value.pass_through.operand, 0);
202 fprintf (f, "\n");
204 else if (type == IPA_JF_ANCESTOR)
206 fprintf (f, "ANCESTOR: ");
207 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
208 jump_func->value.ancestor.formal_id,
209 jump_func->value.ancestor.offset);
210 print_generic_expr (f, jump_func->value.ancestor.type, 0);
211 fprintf (f, "\n");
217 /* Print the jump functions of all arguments on all call graph edges going from
218 NODE to file F. */
220 void
221 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
223 struct cgraph_edge *cs;
224 int i;
226 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
227 for (cs = node->callees; cs; cs = cs->next_callee)
229 if (!ipa_edge_args_info_available_for_edge_p (cs))
230 continue;
232 fprintf (f, " callsite %s/%i -> %s/%i : \n",
233 xstrdup (cgraph_node_name (node)), node->uid,
234 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
235 ipa_print_node_jump_functions_for_edge (f, cs);
238 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
240 if (!ipa_edge_args_info_available_for_edge_p (cs))
241 continue;
243 if (cs->call_stmt)
245 fprintf (f, " indirect callsite %d for stmt ", i);
246 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
248 else
249 fprintf (f, " indirect callsite %d :\n", i);
250 ipa_print_node_jump_functions_for_edge (f, cs);
255 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
257 void
258 ipa_print_all_jump_functions (FILE *f)
260 struct cgraph_node *node;
262 fprintf (f, "\nJump functions:\n");
263 FOR_EACH_FUNCTION (node)
265 ipa_print_node_jump_functions (f, node);
269 /* Set JFUNC to be a known type jump function. */
271 static void
272 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
273 tree base_type, tree component_type)
275 jfunc->type = IPA_JF_KNOWN_TYPE;
276 jfunc->value.known_type.offset = offset,
277 jfunc->value.known_type.base_type = base_type;
278 jfunc->value.known_type.component_type = component_type;
281 /* Set JFUNC to be a constant jmp function. */
283 static void
284 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
286 jfunc->type = IPA_JF_CONST;
287 jfunc->value.constant = constant;
290 /* Set JFUNC to be a simple pass-through jump function. */
291 static void
292 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id)
294 jfunc->type = IPA_JF_PASS_THROUGH;
295 jfunc->value.pass_through.operand = NULL_TREE;
296 jfunc->value.pass_through.formal_id = formal_id;
297 jfunc->value.pass_through.operation = NOP_EXPR;
300 /* Set JFUNC to be an arithmetic pass through jump function. */
302 static void
303 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
304 tree operand, enum tree_code operation)
306 jfunc->type = IPA_JF_PASS_THROUGH;
307 jfunc->value.pass_through.operand = operand;
308 jfunc->value.pass_through.formal_id = formal_id;
309 jfunc->value.pass_through.operation = operation;
312 /* Set JFUNC to be an ancestor jump function. */
314 static void
315 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
316 tree type, int formal_id)
318 jfunc->type = IPA_JF_ANCESTOR;
319 jfunc->value.ancestor.formal_id = formal_id;
320 jfunc->value.ancestor.offset = offset;
321 jfunc->value.ancestor.type = type;
324 /* Simple function filling in a member pointer constant jump function (with PFN
325 and DELTA as the constant value) into JFUNC. */
327 static void
328 ipa_set_jf_member_ptr_cst (struct ipa_jump_func *jfunc,
329 tree pfn, tree delta)
331 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
332 jfunc->value.member_cst.pfn = pfn;
333 jfunc->value.member_cst.delta = delta;
336 /* Structure to be passed in between detect_type_change and
337 check_stmt_for_type_change. */
339 struct type_change_info
341 /* Offset into the object where there is the virtual method pointer we are
342 looking for. */
343 HOST_WIDE_INT offset;
344 /* The declaration or SSA_NAME pointer of the base that we are checking for
345 type change. */
346 tree object;
347 /* If we actually can tell the type that the object has changed to, it is
348 stored in this field. Otherwise it remains NULL_TREE. */
349 tree known_current_type;
350 /* Set to true if dynamic type change has been detected. */
351 bool type_maybe_changed;
352 /* Set to true if multiple types have been encountered. known_current_type
353 must be disregarded in that case. */
354 bool multiple_types_encountered;
357 /* Return true if STMT can modify a virtual method table pointer.
359 This function makes special assumptions about both constructors and
360 destructors which are all the functions that are allowed to alter the VMT
361 pointers. It assumes that destructors begin with assignment into all VMT
362 pointers and that constructors essentially look in the following way:
364 1) The very first thing they do is that they call constructors of ancestor
365 sub-objects that have them.
367 2) Then VMT pointers of this and all its ancestors is set to new values
368 corresponding to the type corresponding to the constructor.
370 3) Only afterwards, other stuff such as constructor of member sub-objects
371 and the code written by the user is run. Only this may include calling
372 virtual functions, directly or indirectly.
374 There is no way to call a constructor of an ancestor sub-object in any
375 other way.
377 This means that we do not have to care whether constructors get the correct
378 type information because they will always change it (in fact, if we define
379 the type to be given by the VMT pointer, it is undefined).
381 The most important fact to derive from the above is that if, for some
382 statement in the section 3, we try to detect whether the dynamic type has
383 changed, we can safely ignore all calls as we examine the function body
384 backwards until we reach statements in section 2 because these calls cannot
385 be ancestor constructors or destructors (if the input is not bogus) and so
386 do not change the dynamic type (this holds true only for automatically
387 allocated objects but at the moment we devirtualize only these). We then
388 must detect that statements in section 2 change the dynamic type and can try
389 to derive the new type. That is enough and we can stop, we will never see
390 the calls into constructors of sub-objects in this code. Therefore we can
391 safely ignore all call statements that we traverse.
394 static bool
395 stmt_may_be_vtbl_ptr_store (gimple stmt)
397 if (is_gimple_call (stmt))
398 return false;
399 else if (is_gimple_assign (stmt))
401 tree lhs = gimple_assign_lhs (stmt);
403 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
405 if (flag_strict_aliasing
406 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
407 return false;
409 if (TREE_CODE (lhs) == COMPONENT_REF
410 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
411 return false;
412 /* In the future we might want to use get_base_ref_and_offset to find
413 if there is a field corresponding to the offset and if so, proceed
414 almost like if it was a component ref. */
417 return true;
420 /* If STMT can be proved to be an assignment to the virtual method table
421 pointer of ANALYZED_OBJ and the type associated with the new table
422 identified, return the type. Otherwise return NULL_TREE. */
424 static tree
425 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
427 HOST_WIDE_INT offset, size, max_size;
428 tree lhs, rhs, base;
430 if (!gimple_assign_single_p (stmt))
431 return NULL_TREE;
433 lhs = gimple_assign_lhs (stmt);
434 rhs = gimple_assign_rhs1 (stmt);
435 if (TREE_CODE (lhs) != COMPONENT_REF
436 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
437 || TREE_CODE (rhs) != ADDR_EXPR)
438 return NULL_TREE;
439 rhs = get_base_address (TREE_OPERAND (rhs, 0));
440 if (!rhs
441 || TREE_CODE (rhs) != VAR_DECL
442 || !DECL_VIRTUAL_P (rhs))
443 return NULL_TREE;
445 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
446 if (offset != tci->offset
447 || size != POINTER_SIZE
448 || max_size != POINTER_SIZE)
449 return NULL_TREE;
450 if (TREE_CODE (base) == MEM_REF)
452 if (TREE_CODE (tci->object) != MEM_REF
453 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
454 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
455 TREE_OPERAND (base, 1)))
456 return NULL_TREE;
458 else if (tci->object != base)
459 return NULL_TREE;
461 return DECL_CONTEXT (rhs);
464 /* Callback of walk_aliased_vdefs and a helper function for
465 detect_type_change to check whether a particular statement may modify
466 the virtual table pointer, and if possible also determine the new type of
467 the (sub-)object. It stores its result into DATA, which points to a
468 type_change_info structure. */
470 static bool
471 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
473 gimple stmt = SSA_NAME_DEF_STMT (vdef);
474 struct type_change_info *tci = (struct type_change_info *) data;
476 if (stmt_may_be_vtbl_ptr_store (stmt))
478 tree type;
479 type = extr_type_from_vtbl_ptr_store (stmt, tci);
480 if (tci->type_maybe_changed
481 && type != tci->known_current_type)
482 tci->multiple_types_encountered = true;
483 tci->known_current_type = type;
484 tci->type_maybe_changed = true;
485 return true;
487 else
488 return false;
493 /* Like detect_type_change but with extra argument COMP_TYPE which will become
494 the component type part of new JFUNC of dynamic type change is detected and
495 the new base type is identified. */
497 static bool
498 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
499 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
501 struct type_change_info tci;
502 ao_ref ao;
504 gcc_checking_assert (DECL_P (arg)
505 || TREE_CODE (arg) == MEM_REF
506 || handled_component_p (arg));
507 /* Const calls cannot call virtual methods through VMT and so type changes do
508 not matter. */
509 if (!flag_devirtualize || !gimple_vuse (call))
510 return false;
512 ao_ref_init (&ao, arg);
513 ao.base = base;
514 ao.offset = offset;
515 ao.size = POINTER_SIZE;
516 ao.max_size = ao.size;
518 tci.offset = offset;
519 tci.object = get_base_address (arg);
520 tci.known_current_type = NULL_TREE;
521 tci.type_maybe_changed = false;
522 tci.multiple_types_encountered = false;
524 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
525 &tci, NULL);
526 if (!tci.type_maybe_changed)
527 return false;
529 if (!tci.known_current_type
530 || tci.multiple_types_encountered
531 || offset != 0)
532 jfunc->type = IPA_JF_UNKNOWN;
533 else
534 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
536 return true;
539 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
540 looking for assignments to its virtual table pointer. If it is, return true
541 and fill in the jump function JFUNC with relevant type information or set it
542 to unknown. ARG is the object itself (not a pointer to it, unless
543 dereferenced). BASE is the base of the memory access as returned by
544 get_ref_base_and_extent, as is the offset. */
546 static bool
547 detect_type_change (tree arg, tree base, gimple call,
548 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
550 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
553 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
554 SSA name (its dereference will become the base and the offset is assumed to
555 be zero). */
557 static bool
558 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
560 tree comp_type;
562 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
563 if (!flag_devirtualize
564 || !POINTER_TYPE_P (TREE_TYPE (arg))
565 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
566 return false;
568 comp_type = TREE_TYPE (TREE_TYPE (arg));
569 arg = build2 (MEM_REF, ptr_type_node, arg,
570 build_int_cst (ptr_type_node, 0));
572 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
575 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
576 boolean variable pointed to by DATA. */
578 static bool
579 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
580 void *data)
582 bool *b = (bool *) data;
583 *b = true;
584 return true;
587 /* Return true if the formal parameter PARM might have been modified in this
588 function before reaching the statement STMT. PARM_AINFO is a pointer to a
589 structure containing temporary information about PARM. */
591 static bool
592 is_parm_modified_before_stmt (struct param_analysis_info *parm_ainfo,
593 gimple stmt, tree parm)
595 bool modified = false;
596 ao_ref refd;
598 if (parm_ainfo->modified)
599 return true;
601 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
602 ao_ref_init (&refd, parm);
603 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
604 &modified, &parm_ainfo->visited_statements);
605 if (modified)
607 parm_ainfo->modified = true;
608 return true;
610 return false;
613 /* If STMT is an assignment that loads a value from an parameter declaration,
614 return the index of the parameter in ipa_node_params which has not been
615 modified. Otherwise return -1. */
617 static int
618 load_from_unmodified_param (struct ipa_node_params *info,
619 struct param_analysis_info *parms_ainfo,
620 gimple stmt)
622 int index;
623 tree op1;
625 if (!gimple_assign_single_p (stmt))
626 return -1;
628 op1 = gimple_assign_rhs1 (stmt);
629 if (TREE_CODE (op1) != PARM_DECL)
630 return -1;
632 index = ipa_get_param_decl_index (info, op1);
633 if (index < 0
634 || is_parm_modified_before_stmt (&parms_ainfo[index], stmt, op1))
635 return -1;
637 return index;
640 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
641 of an assignment statement STMT, try to determine whether we are actually
642 handling any of the following cases and construct an appropriate jump
643 function into JFUNC if so:
645 1) The passed value is loaded from a formal parameter which is not a gimple
646 register (most probably because it is addressable, the value has to be
647 scalar) and we can guarantee the value has not changed. This case can
648 therefore be described by a simple pass-through jump function. For example:
650 foo (int a)
652 int a.0;
654 a.0_2 = a;
655 bar (a.0_2);
657 2) The passed value can be described by a simple arithmetic pass-through
658 jump function. E.g.
660 foo (int a)
662 int D.2064;
664 D.2064_4 = a.1(D) + 4;
665 bar (D.2064_4);
667 This case can also occur in combination of the previous one, e.g.:
669 foo (int a, int z)
671 int a.0;
672 int D.2064;
674 a.0_3 = a;
675 D.2064_4 = a.0_3 + 4;
676 foo (D.2064_4);
678 3) The passed value is an address of an object within another one (which
679 also passed by reference). Such situations are described by an ancestor
680 jump function and describe situations such as:
682 B::foo() (struct B * const this)
684 struct A * D.1845;
686 D.1845_2 = &this_1(D)->D.1748;
687 A::bar (D.1845_2);
689 INFO is the structure describing individual parameters access different
690 stages of IPA optimizations. PARMS_AINFO contains the information that is
691 only needed for intraprocedural analysis. */
693 static void
694 compute_complex_assign_jump_func (struct ipa_node_params *info,
695 struct param_analysis_info *parms_ainfo,
696 struct ipa_jump_func *jfunc,
697 gimple call, gimple stmt, tree name)
699 HOST_WIDE_INT offset, size, max_size;
700 tree op1, tc_ssa, base, ssa;
701 int index;
703 op1 = gimple_assign_rhs1 (stmt);
705 if (TREE_CODE (op1) == SSA_NAME)
707 if (SSA_NAME_IS_DEFAULT_DEF (op1))
708 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
709 else
710 index = load_from_unmodified_param (info, parms_ainfo,
711 SSA_NAME_DEF_STMT (op1));
712 tc_ssa = op1;
714 else
716 index = load_from_unmodified_param (info, parms_ainfo, stmt);
717 tc_ssa = gimple_assign_lhs (stmt);
720 if (index >= 0)
722 tree op2 = gimple_assign_rhs2 (stmt);
724 if (op2)
726 if (!is_gimple_ip_invariant (op2)
727 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
728 && !useless_type_conversion_p (TREE_TYPE (name),
729 TREE_TYPE (op1))))
730 return;
732 ipa_set_jf_arith_pass_through (jfunc, index, op2,
733 gimple_assign_rhs_code (stmt));
735 else if (gimple_assign_single_p (stmt)
736 && !detect_type_change_ssa (tc_ssa, call, jfunc))
737 ipa_set_jf_simple_pass_through (jfunc, index);
738 return;
741 if (TREE_CODE (op1) != ADDR_EXPR)
742 return;
743 op1 = TREE_OPERAND (op1, 0);
744 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
745 return;
746 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
747 if (TREE_CODE (base) != MEM_REF
748 /* If this is a varying address, punt. */
749 || max_size == -1
750 || max_size != size)
751 return;
752 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
753 ssa = TREE_OPERAND (base, 0);
754 if (TREE_CODE (ssa) != SSA_NAME
755 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
756 || offset < 0)
757 return;
759 /* Dynamic types are changed only in constructors and destructors and */
760 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
761 if (index >= 0
762 && !detect_type_change (op1, base, call, jfunc, offset))
763 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index);
766 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
767 it looks like:
769 iftmp.1_3 = &obj_2(D)->D.1762;
771 The base of the MEM_REF must be a default definition SSA NAME of a
772 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
773 whole MEM_REF expression is returned and the offset calculated from any
774 handled components and the MEM_REF itself is stored into *OFFSET. The whole
775 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
777 static tree
778 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
780 HOST_WIDE_INT size, max_size;
781 tree expr, parm, obj;
783 if (!gimple_assign_single_p (assign))
784 return NULL_TREE;
785 expr = gimple_assign_rhs1 (assign);
787 if (TREE_CODE (expr) != ADDR_EXPR)
788 return NULL_TREE;
789 expr = TREE_OPERAND (expr, 0);
790 obj = expr;
791 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
793 if (TREE_CODE (expr) != MEM_REF
794 /* If this is a varying address, punt. */
795 || max_size == -1
796 || max_size != size
797 || *offset < 0)
798 return NULL_TREE;
799 parm = TREE_OPERAND (expr, 0);
800 if (TREE_CODE (parm) != SSA_NAME
801 || !SSA_NAME_IS_DEFAULT_DEF (parm)
802 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
803 return NULL_TREE;
805 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
806 *obj_p = obj;
807 return expr;
811 /* Given that an actual argument is an SSA_NAME that is a result of a phi
812 statement PHI, try to find out whether NAME is in fact a
813 multiple-inheritance typecast from a descendant into an ancestor of a formal
814 parameter and thus can be described by an ancestor jump function and if so,
815 write the appropriate function into JFUNC.
817 Essentially we want to match the following pattern:
819 if (obj_2(D) != 0B)
820 goto <bb 3>;
821 else
822 goto <bb 4>;
824 <bb 3>:
825 iftmp.1_3 = &obj_2(D)->D.1762;
827 <bb 4>:
828 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
829 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
830 return D.1879_6; */
832 static void
833 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
834 struct ipa_jump_func *jfunc,
835 gimple call, gimple phi)
837 HOST_WIDE_INT offset;
838 gimple assign, cond;
839 basic_block phi_bb, assign_bb, cond_bb;
840 tree tmp, parm, expr, obj;
841 int index, i;
843 if (gimple_phi_num_args (phi) != 2)
844 return;
846 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
847 tmp = PHI_ARG_DEF (phi, 0);
848 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
849 tmp = PHI_ARG_DEF (phi, 1);
850 else
851 return;
852 if (TREE_CODE (tmp) != SSA_NAME
853 || SSA_NAME_IS_DEFAULT_DEF (tmp)
854 || !POINTER_TYPE_P (TREE_TYPE (tmp))
855 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
856 return;
858 assign = SSA_NAME_DEF_STMT (tmp);
859 assign_bb = gimple_bb (assign);
860 if (!single_pred_p (assign_bb))
861 return;
862 expr = get_ancestor_addr_info (assign, &obj, &offset);
863 if (!expr)
864 return;
865 parm = TREE_OPERAND (expr, 0);
866 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
867 gcc_assert (index >= 0);
869 cond_bb = single_pred (assign_bb);
870 cond = last_stmt (cond_bb);
871 if (!cond
872 || gimple_code (cond) != GIMPLE_COND
873 || gimple_cond_code (cond) != NE_EXPR
874 || gimple_cond_lhs (cond) != parm
875 || !integer_zerop (gimple_cond_rhs (cond)))
876 return;
878 phi_bb = gimple_bb (phi);
879 for (i = 0; i < 2; i++)
881 basic_block pred = EDGE_PRED (phi_bb, i)->src;
882 if (pred != assign_bb && pred != cond_bb)
883 return;
886 if (!detect_type_change (obj, expr, call, jfunc, offset))
887 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index);
890 /* Given OP which is passed as an actual argument to a called function,
891 determine if it is possible to construct a KNOWN_TYPE jump function for it
892 and if so, create one and store it to JFUNC. */
894 static void
895 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
896 gimple call)
898 HOST_WIDE_INT offset, size, max_size;
899 tree base;
901 if (!flag_devirtualize
902 || TREE_CODE (op) != ADDR_EXPR
903 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
904 return;
906 op = TREE_OPERAND (op, 0);
907 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
908 if (!DECL_P (base)
909 || max_size == -1
910 || max_size != size
911 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
912 || is_global_var (base))
913 return;
915 if (!TYPE_BINFO (TREE_TYPE (base))
916 || detect_type_change (op, base, call, jfunc, offset))
917 return;
919 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
923 /* Determine the jump functions of scalar arguments. Scalar means SSA names
924 and constants of a number of selected types. INFO is the ipa_node_params
925 structure associated with the caller, PARMS_AINFO describes state of
926 analysis with respect to individual formal parameters. ARGS is the
927 ipa_edge_args structure describing the callsite CALL which is the call
928 statement being examined.*/
930 static void
931 compute_scalar_jump_functions (struct ipa_node_params *info,
932 struct param_analysis_info *parms_ainfo,
933 struct ipa_edge_args *args,
934 gimple call)
936 tree arg;
937 unsigned num = 0;
939 for (num = 0; num < gimple_call_num_args (call); num++)
941 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
942 arg = gimple_call_arg (call, num);
944 if (is_gimple_ip_invariant (arg))
945 ipa_set_jf_constant (jfunc, arg);
946 else if (TREE_CODE (arg) == SSA_NAME)
948 if (SSA_NAME_IS_DEFAULT_DEF (arg))
950 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
952 if (index >= 0
953 && !detect_type_change_ssa (arg, call, jfunc))
954 ipa_set_jf_simple_pass_through (jfunc, index);
956 else
958 gimple stmt = SSA_NAME_DEF_STMT (arg);
959 if (is_gimple_assign (stmt))
960 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
961 call, stmt, arg);
962 else if (gimple_code (stmt) == GIMPLE_PHI)
963 compute_complex_ancestor_jump_func (info, jfunc, call, stmt);
966 else
967 compute_known_type_jump_func (arg, jfunc, call);
971 /* Inspect the given TYPE and return true iff it has the same structure (the
972 same number of fields of the same types) as a C++ member pointer. If
973 METHOD_PTR and DELTA are non-NULL, store the trees representing the
974 corresponding fields there. */
976 static bool
977 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
979 tree fld;
981 if (TREE_CODE (type) != RECORD_TYPE)
982 return false;
984 fld = TYPE_FIELDS (type);
985 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
986 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
987 return false;
989 if (method_ptr)
990 *method_ptr = fld;
992 fld = DECL_CHAIN (fld);
993 if (!fld || INTEGRAL_TYPE_P (fld))
994 return false;
995 if (delta)
996 *delta = fld;
998 if (DECL_CHAIN (fld))
999 return false;
1001 return true;
1004 /* Go through arguments of the CALL and for every one that looks like a member
1005 pointer, check whether it can be safely declared pass-through and if so,
1006 mark that to the corresponding item of jump FUNCTIONS. Return true iff
1007 there are non-pass-through member pointers within the arguments. INFO
1008 describes formal parameters of the caller. PARMS_INFO is a pointer to a
1009 vector containing intermediate information about each formal parameter. */
1011 static bool
1012 compute_pass_through_member_ptrs (struct ipa_node_params *info,
1013 struct param_analysis_info *parms_ainfo,
1014 struct ipa_edge_args *args,
1015 gimple call)
1017 bool undecided_members = false;
1018 unsigned num;
1019 tree arg;
1021 for (num = 0; num < gimple_call_num_args (call); num++)
1023 arg = gimple_call_arg (call, num);
1025 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
1027 if (TREE_CODE (arg) == PARM_DECL)
1029 int index = ipa_get_param_decl_index (info, arg);
1031 gcc_assert (index >=0);
1032 if (!is_parm_modified_before_stmt (&parms_ainfo[index], call,
1033 arg))
1035 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args,
1036 num);
1037 ipa_set_jf_simple_pass_through (jfunc, index);
1039 else
1040 undecided_members = true;
1042 else
1043 undecided_members = true;
1047 return undecided_members;
1050 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1051 return the rhs of its defining statement. */
1053 static inline tree
1054 get_ssa_def_if_simple_copy (tree rhs)
1056 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1058 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1060 if (gimple_assign_single_p (def_stmt))
1061 rhs = gimple_assign_rhs1 (def_stmt);
1062 else
1063 break;
1065 return rhs;
1068 /* Traverse statements from CALL backwards, scanning whether the argument ARG
1069 which is a member pointer is filled in with constant values. If it is, fill
1070 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
1071 fields of the record type of the member pointer. To give an example, we
1072 look for a pattern looking like the following:
1074 D.2515.__pfn ={v} printStuff;
1075 D.2515.__delta ={v} 0;
1076 i_1 = doprinting (D.2515); */
1078 static void
1079 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
1080 tree delta_field, struct ipa_jump_func *jfunc)
1082 gimple_stmt_iterator gsi;
1083 tree method = NULL_TREE;
1084 tree delta = NULL_TREE;
1086 gsi = gsi_for_stmt (call);
1088 gsi_prev (&gsi);
1089 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1091 gimple stmt = gsi_stmt (gsi);
1092 tree lhs, rhs, fld;
1094 if (!stmt_may_clobber_ref_p (stmt, arg))
1095 continue;
1096 if (!gimple_assign_single_p (stmt))
1097 return;
1099 lhs = gimple_assign_lhs (stmt);
1100 rhs = gimple_assign_rhs1 (stmt);
1102 if (TREE_CODE (lhs) != COMPONENT_REF
1103 || TREE_OPERAND (lhs, 0) != arg)
1104 return;
1106 fld = TREE_OPERAND (lhs, 1);
1107 if (!method && fld == method_field)
1109 rhs = get_ssa_def_if_simple_copy (rhs);
1110 if (TREE_CODE (rhs) == ADDR_EXPR
1111 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
1112 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
1114 method = TREE_OPERAND (rhs, 0);
1115 if (delta)
1117 ipa_set_jf_member_ptr_cst (jfunc, rhs, delta);
1118 return;
1121 else
1122 return;
1125 if (!delta && fld == delta_field)
1127 rhs = get_ssa_def_if_simple_copy (rhs);
1128 if (TREE_CODE (rhs) == INTEGER_CST)
1130 delta = rhs;
1131 if (method)
1133 ipa_set_jf_member_ptr_cst (jfunc, rhs, delta);
1134 return;
1137 else
1138 return;
1142 return;
1145 /* Go through the arguments of the CALL and for every member pointer within
1146 tries determine whether it is a constant. If it is, create a corresponding
1147 constant jump function in FUNCTIONS which is an array of jump functions
1148 associated with the call. */
1150 static void
1151 compute_cst_member_ptr_arguments (struct ipa_edge_args *args,
1152 gimple call)
1154 unsigned num;
1155 tree arg, method_field, delta_field;
1157 for (num = 0; num < gimple_call_num_args (call); num++)
1159 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
1160 arg = gimple_call_arg (call, num);
1162 if (jfunc->type == IPA_JF_UNKNOWN
1163 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
1164 &delta_field))
1165 determine_cst_member_ptr (call, arg, method_field, delta_field, jfunc);
1169 /* Compute jump function for all arguments of callsite CS and insert the
1170 information in the jump_functions array in the ipa_edge_args corresponding
1171 to this callsite. */
1173 static void
1174 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1175 struct cgraph_edge *cs)
1177 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1178 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1179 gimple call = cs->call_stmt;
1180 int arg_num = gimple_call_num_args (call);
1182 if (arg_num == 0 || args->jump_functions)
1183 return;
1184 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1186 /* We will deal with constants and SSA scalars first: */
1187 compute_scalar_jump_functions (info, parms_ainfo, args, call);
1189 /* Let's check whether there are any potential member pointers and if so,
1190 whether we can determine their functions as pass_through. */
1191 if (!compute_pass_through_member_ptrs (info, parms_ainfo, args, call))
1192 return;
1194 /* Finally, let's check whether we actually pass a new constant member
1195 pointer here... */
1196 compute_cst_member_ptr_arguments (args, call);
1199 /* Compute jump functions for all edges - both direct and indirect - outgoing
1200 from NODE. Also count the actual arguments in the process. */
1202 static void
1203 ipa_compute_jump_functions (struct cgraph_node *node,
1204 struct param_analysis_info *parms_ainfo)
1206 struct cgraph_edge *cs;
1208 for (cs = node->callees; cs; cs = cs->next_callee)
1210 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1211 NULL);
1212 /* We do not need to bother analyzing calls to unknown
1213 functions unless they may become known during lto/whopr. */
1214 if (!callee->analyzed && !flag_lto)
1215 continue;
1216 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1219 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1220 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1223 /* If RHS looks like a rhs of a statement loading pfn from a member
1224 pointer formal parameter, return the parameter, otherwise return
1225 NULL. If USE_DELTA, then we look for a use of the delta field
1226 rather than the pfn. */
1228 static tree
1229 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1231 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1233 if (TREE_CODE (rhs) == COMPONENT_REF)
1235 ref_field = TREE_OPERAND (rhs, 1);
1236 rhs = TREE_OPERAND (rhs, 0);
1238 else
1239 ref_field = NULL_TREE;
1240 if (TREE_CODE (rhs) != MEM_REF)
1241 return NULL_TREE;
1242 rec = TREE_OPERAND (rhs, 0);
1243 if (TREE_CODE (rec) != ADDR_EXPR)
1244 return NULL_TREE;
1245 rec = TREE_OPERAND (rec, 0);
1246 if (TREE_CODE (rec) != PARM_DECL
1247 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1248 return NULL_TREE;
1250 ref_offset = TREE_OPERAND (rhs, 1);
1252 if (ref_field)
1254 if (integer_nonzerop (ref_offset))
1255 return NULL_TREE;
1257 if (use_delta)
1258 fld = delta_field;
1259 else
1260 fld = ptr_field;
1262 return ref_field == fld ? rec : NULL_TREE;
1265 if (use_delta)
1266 fld_offset = byte_position (delta_field);
1267 else
1268 fld_offset = byte_position (ptr_field);
1270 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1273 /* If STMT looks like a statement loading a value from a member pointer formal
1274 parameter, this function returns that parameter. */
1276 static tree
1277 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1279 tree rhs;
1281 if (!gimple_assign_single_p (stmt))
1282 return NULL_TREE;
1284 rhs = gimple_assign_rhs1 (stmt);
1285 return ipa_get_member_ptr_load_param (rhs, use_delta);
1288 /* Returns true iff T is an SSA_NAME defined by a statement. */
1290 static bool
1291 ipa_is_ssa_with_stmt_def (tree t)
1293 if (TREE_CODE (t) == SSA_NAME
1294 && !SSA_NAME_IS_DEFAULT_DEF (t))
1295 return true;
1296 else
1297 return false;
1300 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1301 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1302 indirect call graph edge. */
1304 static struct cgraph_edge *
1305 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1307 struct cgraph_edge *cs;
1309 cs = cgraph_edge (node, stmt);
1310 cs->indirect_info->param_index = param_index;
1311 cs->indirect_info->anc_offset = 0;
1312 cs->indirect_info->polymorphic = 0;
1313 return cs;
1316 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1317 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1318 intermediate information about each formal parameter. Currently it checks
1319 whether the call calls a pointer that is a formal parameter and if so, the
1320 parameter is marked with the called flag and an indirect call graph edge
1321 describing the call is created. This is very simple for ordinary pointers
1322 represented in SSA but not-so-nice when it comes to member pointers. The
1323 ugly part of this function does nothing more than trying to match the
1324 pattern of such a call. An example of such a pattern is the gimple dump
1325 below, the call is on the last line:
1327 <bb 2>:
1328 f$__delta_5 = f.__delta;
1329 f$__pfn_24 = f.__pfn;
1332 <bb 2>:
1333 f$__delta_5 = MEM[(struct *)&f];
1334 f$__pfn_24 = MEM[(struct *)&f + 4B];
1336 and a few lines below:
1338 <bb 5>
1339 D.2496_3 = (int) f$__pfn_24;
1340 D.2497_4 = D.2496_3 & 1;
1341 if (D.2497_4 != 0)
1342 goto <bb 3>;
1343 else
1344 goto <bb 4>;
1346 <bb 6>:
1347 D.2500_7 = (unsigned int) f$__delta_5;
1348 D.2501_8 = &S + D.2500_7;
1349 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1350 D.2503_10 = *D.2502_9;
1351 D.2504_12 = f$__pfn_24 + -1;
1352 D.2505_13 = (unsigned int) D.2504_12;
1353 D.2506_14 = D.2503_10 + D.2505_13;
1354 D.2507_15 = *D.2506_14;
1355 iftmp.11_16 = (String:: *) D.2507_15;
1357 <bb 7>:
1358 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1359 D.2500_19 = (unsigned int) f$__delta_5;
1360 D.2508_20 = &S + D.2500_19;
1361 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1363 Such patterns are results of simple calls to a member pointer:
1365 int doprinting (int (MyString::* f)(int) const)
1367 MyString S ("somestring");
1369 return (S.*f)(4);
1373 static void
1374 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1375 struct ipa_node_params *info,
1376 struct param_analysis_info *parms_ainfo,
1377 gimple call, tree target)
1379 gimple def;
1380 tree n1, n2;
1381 gimple d1, d2;
1382 tree rec, rec2, cond;
1383 gimple branch;
1384 int index;
1385 basic_block bb, virt_bb, join;
1387 if (SSA_NAME_IS_DEFAULT_DEF (target))
1389 tree var = SSA_NAME_VAR (target);
1390 index = ipa_get_param_decl_index (info, var);
1391 if (index >= 0)
1392 ipa_note_param_call (node, index, call);
1393 return;
1396 /* Now we need to try to match the complex pattern of calling a member
1397 pointer. */
1399 if (!POINTER_TYPE_P (TREE_TYPE (target))
1400 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1401 return;
1403 def = SSA_NAME_DEF_STMT (target);
1404 if (gimple_code (def) != GIMPLE_PHI)
1405 return;
1407 if (gimple_phi_num_args (def) != 2)
1408 return;
1410 /* First, we need to check whether one of these is a load from a member
1411 pointer that is a parameter to this function. */
1412 n1 = PHI_ARG_DEF (def, 0);
1413 n2 = PHI_ARG_DEF (def, 1);
1414 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1415 return;
1416 d1 = SSA_NAME_DEF_STMT (n1);
1417 d2 = SSA_NAME_DEF_STMT (n2);
1419 join = gimple_bb (def);
1420 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1422 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1423 return;
1425 bb = EDGE_PRED (join, 0)->src;
1426 virt_bb = gimple_bb (d2);
1428 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1430 bb = EDGE_PRED (join, 1)->src;
1431 virt_bb = gimple_bb (d1);
1433 else
1434 return;
1436 /* Second, we need to check that the basic blocks are laid out in the way
1437 corresponding to the pattern. */
1439 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1440 || single_pred (virt_bb) != bb
1441 || single_succ (virt_bb) != join)
1442 return;
1444 /* Third, let's see that the branching is done depending on the least
1445 significant bit of the pfn. */
1447 branch = last_stmt (bb);
1448 if (!branch || gimple_code (branch) != GIMPLE_COND)
1449 return;
1451 if ((gimple_cond_code (branch) != NE_EXPR
1452 && gimple_cond_code (branch) != EQ_EXPR)
1453 || !integer_zerop (gimple_cond_rhs (branch)))
1454 return;
1456 cond = gimple_cond_lhs (branch);
1457 if (!ipa_is_ssa_with_stmt_def (cond))
1458 return;
1460 def = SSA_NAME_DEF_STMT (cond);
1461 if (!is_gimple_assign (def)
1462 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1463 || !integer_onep (gimple_assign_rhs2 (def)))
1464 return;
1466 cond = gimple_assign_rhs1 (def);
1467 if (!ipa_is_ssa_with_stmt_def (cond))
1468 return;
1470 def = SSA_NAME_DEF_STMT (cond);
1472 if (is_gimple_assign (def)
1473 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1475 cond = gimple_assign_rhs1 (def);
1476 if (!ipa_is_ssa_with_stmt_def (cond))
1477 return;
1478 def = SSA_NAME_DEF_STMT (cond);
1481 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1482 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1483 == ptrmemfunc_vbit_in_delta));
1485 if (rec != rec2)
1486 return;
1488 index = ipa_get_param_decl_index (info, rec);
1489 if (index >= 0 && !is_parm_modified_before_stmt (&parms_ainfo[index],
1490 call, rec))
1491 ipa_note_param_call (node, index, call);
1493 return;
1496 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1497 object referenced in the expression is a formal parameter of the caller
1498 (described by INFO), create a call note for the statement. */
1500 static void
1501 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1502 struct ipa_node_params *info, gimple call,
1503 tree target)
1505 struct cgraph_edge *cs;
1506 struct cgraph_indirect_call_info *ii;
1507 struct ipa_jump_func jfunc;
1508 tree obj = OBJ_TYPE_REF_OBJECT (target);
1509 int index;
1510 HOST_WIDE_INT anc_offset;
1512 if (!flag_devirtualize)
1513 return;
1515 if (TREE_CODE (obj) != SSA_NAME)
1516 return;
1518 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1520 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1521 return;
1523 anc_offset = 0;
1524 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1525 gcc_assert (index >= 0);
1526 if (detect_type_change_ssa (obj, call, &jfunc))
1527 return;
1529 else
1531 gimple stmt = SSA_NAME_DEF_STMT (obj);
1532 tree expr;
1534 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1535 if (!expr)
1536 return;
1537 index = ipa_get_param_decl_index (info,
1538 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1539 gcc_assert (index >= 0);
1540 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1541 return;
1544 cs = ipa_note_param_call (node, index, call);
1545 ii = cs->indirect_info;
1546 ii->anc_offset = anc_offset;
1547 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1548 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1549 ii->polymorphic = 1;
1552 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1553 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1554 containing intermediate information about each formal parameter. */
1556 static void
1557 ipa_analyze_call_uses (struct cgraph_node *node,
1558 struct ipa_node_params *info,
1559 struct param_analysis_info *parms_ainfo, gimple call)
1561 tree target = gimple_call_fn (call);
1563 if (!target)
1564 return;
1565 if (TREE_CODE (target) == SSA_NAME)
1566 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1567 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1568 ipa_analyze_virtual_call_uses (node, info, call, target);
1572 /* Analyze the call statement STMT with respect to formal parameters (described
1573 in INFO) of caller given by NODE. Currently it only checks whether formal
1574 parameters are called. PARMS_AINFO is a pointer to a vector containing
1575 intermediate information about each formal parameter. */
1577 static void
1578 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1579 struct param_analysis_info *parms_ainfo, gimple stmt)
1581 if (is_gimple_call (stmt))
1582 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1585 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1586 If OP is a parameter declaration, mark it as used in the info structure
1587 passed in DATA. */
1589 static bool
1590 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1591 tree op, void *data)
1593 struct ipa_node_params *info = (struct ipa_node_params *) data;
1595 op = get_base_address (op);
1596 if (op
1597 && TREE_CODE (op) == PARM_DECL)
1599 int index = ipa_get_param_decl_index (info, op);
1600 gcc_assert (index >= 0);
1601 ipa_set_param_used (info, index, true);
1604 return false;
1607 /* Scan the function body of NODE and inspect the uses of formal parameters.
1608 Store the findings in various structures of the associated ipa_node_params
1609 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1610 vector containing intermediate information about each formal parameter. */
1612 static void
1613 ipa_analyze_params_uses (struct cgraph_node *node,
1614 struct param_analysis_info *parms_ainfo)
1616 tree decl = node->symbol.decl;
1617 basic_block bb;
1618 struct function *func;
1619 gimple_stmt_iterator gsi;
1620 struct ipa_node_params *info = IPA_NODE_REF (node);
1621 int i;
1623 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1624 return;
1626 for (i = 0; i < ipa_get_param_count (info); i++)
1628 tree parm = ipa_get_param (info, i);
1629 /* For SSA regs see if parameter is used. For non-SSA we compute
1630 the flag during modification analysis. */
1631 if (is_gimple_reg (parm)
1632 && gimple_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl), parm))
1633 ipa_set_param_used (info, i, true);
1636 func = DECL_STRUCT_FUNCTION (decl);
1637 FOR_EACH_BB_FN (bb, func)
1639 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1641 gimple stmt = gsi_stmt (gsi);
1643 if (is_gimple_debug (stmt))
1644 continue;
1646 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1647 walk_stmt_load_store_addr_ops (stmt, info,
1648 visit_ref_for_mod_analysis,
1649 visit_ref_for_mod_analysis,
1650 visit_ref_for_mod_analysis);
1652 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1653 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1654 visit_ref_for_mod_analysis,
1655 visit_ref_for_mod_analysis,
1656 visit_ref_for_mod_analysis);
1659 info->uses_analysis_done = 1;
1662 /* Initialize the array describing properties of of formal parameters
1663 of NODE, analyze their uses and compute jump functions associated
1664 with actual arguments of calls from within NODE. */
1666 void
1667 ipa_analyze_node (struct cgraph_node *node)
1669 struct ipa_node_params *info;
1670 struct param_analysis_info *parms_ainfo;
1671 int i, param_count;
1673 ipa_check_create_node_params ();
1674 ipa_check_create_edge_args ();
1675 info = IPA_NODE_REF (node);
1676 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1677 current_function_decl = node->symbol.decl;
1678 ipa_initialize_node_params (node);
1680 param_count = ipa_get_param_count (info);
1681 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1682 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1684 ipa_analyze_params_uses (node, parms_ainfo);
1685 ipa_compute_jump_functions (node, parms_ainfo);
1687 for (i = 0; i < param_count; i++)
1688 if (parms_ainfo[i].visited_statements)
1689 BITMAP_FREE (parms_ainfo[i].visited_statements);
1691 current_function_decl = NULL;
1692 pop_cfun ();
1696 /* Update the jump function DST when the call graph edge corresponding to SRC is
1697 is being inlined, knowing that DST is of type ancestor and src of known
1698 type. */
1700 static void
1701 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1702 struct ipa_jump_func *dst)
1704 HOST_WIDE_INT combined_offset;
1705 tree combined_type;
1707 combined_offset = ipa_get_jf_known_type_offset (src)
1708 + ipa_get_jf_ancestor_offset (dst);
1709 combined_type = ipa_get_jf_ancestor_type (dst);
1711 ipa_set_jf_known_type (dst, combined_offset,
1712 ipa_get_jf_known_type_base_type (src),
1713 combined_type);
1716 /* Update the jump functions associated with call graph edge E when the call
1717 graph edge CS is being inlined, assuming that E->caller is already (possibly
1718 indirectly) inlined into CS->callee and that E has not been inlined. */
1720 static void
1721 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1722 struct cgraph_edge *e)
1724 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1725 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1726 int count = ipa_get_cs_argument_count (args);
1727 int i;
1729 for (i = 0; i < count; i++)
1731 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1733 if (dst->type == IPA_JF_ANCESTOR)
1735 struct ipa_jump_func *src;
1737 /* Variable number of arguments can cause havoc if we try to access
1738 one that does not exist in the inlined edge. So make sure we
1739 don't. */
1740 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1742 dst->type = IPA_JF_UNKNOWN;
1743 continue;
1746 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1747 if (src->type == IPA_JF_KNOWN_TYPE)
1748 combine_known_type_and_ancestor_jfs (src, dst);
1749 else if (src->type == IPA_JF_PASS_THROUGH
1750 && src->value.pass_through.operation == NOP_EXPR)
1751 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1752 else if (src->type == IPA_JF_ANCESTOR)
1754 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1755 dst->value.ancestor.offset += src->value.ancestor.offset;
1757 else
1758 dst->type = IPA_JF_UNKNOWN;
1760 else if (dst->type == IPA_JF_PASS_THROUGH)
1762 struct ipa_jump_func *src;
1763 /* We must check range due to calls with variable number of arguments
1764 and we cannot combine jump functions with operations. */
1765 if (dst->value.pass_through.operation == NOP_EXPR
1766 && (dst->value.pass_through.formal_id
1767 < ipa_get_cs_argument_count (top)))
1769 src = ipa_get_ith_jump_func (top,
1770 dst->value.pass_through.formal_id);
1771 *dst = *src;
1773 else
1774 dst->type = IPA_JF_UNKNOWN;
1779 /* If TARGET is an addr_expr of a function declaration, make it the destination
1780 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1782 struct cgraph_edge *
1783 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1785 struct cgraph_node *callee;
1787 if (TREE_CODE (target) == ADDR_EXPR)
1788 target = TREE_OPERAND (target, 0);
1789 if (TREE_CODE (target) != FUNCTION_DECL)
1790 return NULL;
1791 callee = cgraph_get_node (target);
1792 if (!callee)
1793 return NULL;
1794 ipa_check_create_node_params ();
1796 /* We can not make edges to inline clones. It is bug that someone removed
1797 the cgraph node too early. */
1798 gcc_assert (!callee->global.inlined_to);
1800 cgraph_make_edge_direct (ie, callee);
1801 if (dump_file)
1803 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1804 "(%s/%i -> %s/%i), for stmt ",
1805 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1806 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
1807 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
1808 if (ie->call_stmt)
1809 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1810 else
1811 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1813 callee = cgraph_function_or_thunk_node (callee, NULL);
1815 return ie;
1818 /* Try to find a destination for indirect edge IE that corresponds to a simple
1819 call or a call of a member function pointer and where the destination is a
1820 pointer formal parameter described by jump function JFUNC. If it can be
1821 determined, return the newly direct edge, otherwise return NULL. */
1823 static struct cgraph_edge *
1824 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1825 struct ipa_jump_func *jfunc)
1827 tree target;
1829 if (jfunc->type == IPA_JF_CONST)
1830 target = ipa_get_jf_constant (jfunc);
1831 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1832 target = ipa_get_jf_member_ptr_pfn (jfunc);
1833 else
1834 return NULL;
1836 return ipa_make_edge_direct_to_target (ie, target);
1839 /* Try to find a destination for indirect edge IE that corresponds to a
1840 virtual call based on a formal parameter which is described by jump
1841 function JFUNC and if it can be determined, make it direct and return the
1842 direct edge. Otherwise, return NULL. */
1844 static struct cgraph_edge *
1845 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1846 struct ipa_jump_func *jfunc)
1848 tree binfo, target;
1850 if (jfunc->type != IPA_JF_KNOWN_TYPE)
1851 return NULL;
1853 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
1854 gcc_checking_assert (binfo);
1855 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
1856 + ie->indirect_info->anc_offset,
1857 ie->indirect_info->otr_type);
1858 if (binfo)
1859 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
1860 binfo);
1861 else
1862 return NULL;
1864 if (target)
1865 return ipa_make_edge_direct_to_target (ie, target);
1866 else
1867 return NULL;
1870 /* Update the param called notes associated with NODE when CS is being inlined,
1871 assuming NODE is (potentially indirectly) inlined into CS->callee.
1872 Moreover, if the callee is discovered to be constant, create a new cgraph
1873 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1874 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1876 static bool
1877 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1878 struct cgraph_node *node,
1879 VEC (cgraph_edge_p, heap) **new_edges)
1881 struct ipa_edge_args *top;
1882 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1883 bool res = false;
1885 ipa_check_create_edge_args ();
1886 top = IPA_EDGE_REF (cs);
1888 for (ie = node->indirect_calls; ie; ie = next_ie)
1890 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1891 struct ipa_jump_func *jfunc;
1893 next_ie = ie->next_callee;
1895 if (ici->param_index == -1)
1896 continue;
1898 /* We must check range due to calls with variable number of arguments: */
1899 if (ici->param_index >= ipa_get_cs_argument_count (top))
1901 ici->param_index = -1;
1902 continue;
1905 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1906 if (jfunc->type == IPA_JF_PASS_THROUGH
1907 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1908 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
1909 else if (jfunc->type == IPA_JF_ANCESTOR)
1911 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
1912 ici->anc_offset += ipa_get_jf_ancestor_offset (jfunc);
1914 else
1915 /* Either we can find a destination for this edge now or never. */
1916 ici->param_index = -1;
1918 if (!flag_indirect_inlining)
1919 continue;
1921 if (ici->polymorphic)
1922 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1923 else
1924 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1926 if (new_direct_edge)
1928 new_direct_edge->indirect_inlining_edge = 1;
1929 if (new_direct_edge->call_stmt)
1930 new_direct_edge->call_stmt_cannot_inline_p
1931 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
1932 new_direct_edge->callee->symbol.decl);
1933 if (new_edges)
1935 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1936 new_direct_edge);
1937 top = IPA_EDGE_REF (cs);
1938 res = true;
1943 return res;
1946 /* Recursively traverse subtree of NODE (including node) made of inlined
1947 cgraph_edges when CS has been inlined and invoke
1948 update_indirect_edges_after_inlining on all nodes and
1949 update_jump_functions_after_inlining on all non-inlined edges that lead out
1950 of this subtree. Newly discovered indirect edges will be added to
1951 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1952 created. */
1954 static bool
1955 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1956 struct cgraph_node *node,
1957 VEC (cgraph_edge_p, heap) **new_edges)
1959 struct cgraph_edge *e;
1960 bool res;
1962 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1964 for (e = node->callees; e; e = e->next_callee)
1965 if (!e->inline_failed)
1966 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1967 else
1968 update_jump_functions_after_inlining (cs, e);
1969 for (e = node->indirect_calls; e; e = e->next_callee)
1970 update_jump_functions_after_inlining (cs, e);
1972 return res;
1975 /* Update jump functions and call note functions on inlining the call site CS.
1976 CS is expected to lead to a node already cloned by
1977 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1978 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1979 created. */
1981 bool
1982 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1983 VEC (cgraph_edge_p, heap) **new_edges)
1985 bool changed;
1986 /* Do nothing if the preparation phase has not been carried out yet
1987 (i.e. during early inlining). */
1988 if (!ipa_node_params_vector)
1989 return false;
1990 gcc_assert (ipa_edge_args_vector);
1992 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1994 /* We do not keep jump functions of inlined edges up to date. Better to free
1995 them so we do not access them accidentally. */
1996 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1997 return changed;
2000 /* Frees all dynamically allocated structures that the argument info points
2001 to. */
2003 void
2004 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2006 if (args->jump_functions)
2007 ggc_free (args->jump_functions);
2009 memset (args, 0, sizeof (*args));
2012 /* Free all ipa_edge structures. */
2014 void
2015 ipa_free_all_edge_args (void)
2017 int i;
2018 struct ipa_edge_args *args;
2020 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2021 ipa_free_edge_args_substructures (args);
2023 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2024 ipa_edge_args_vector = NULL;
2027 /* Frees all dynamically allocated structures that the param info points
2028 to. */
2030 void
2031 ipa_free_node_params_substructures (struct ipa_node_params *info)
2033 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2034 free (info->lattices);
2035 /* Lattice values and their sources are deallocated with their alocation
2036 pool. */
2037 VEC_free (tree, heap, info->known_vals);
2038 memset (info, 0, sizeof (*info));
2041 /* Free all ipa_node_params structures. */
2043 void
2044 ipa_free_all_node_params (void)
2046 int i;
2047 struct ipa_node_params *info;
2049 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2050 ipa_free_node_params_substructures (info);
2052 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2053 ipa_node_params_vector = NULL;
2056 /* Hook that is called by cgraph.c when an edge is removed. */
2058 static void
2059 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2061 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2062 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2063 <= (unsigned)cs->uid)
2064 return;
2065 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2068 /* Hook that is called by cgraph.c when a node is removed. */
2070 static void
2071 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2073 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2074 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2075 <= (unsigned)node->uid)
2076 return;
2077 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2080 /* Hook that is called by cgraph.c when a node is duplicated. */
2082 static void
2083 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2084 __attribute__((unused)) void *data)
2086 struct ipa_edge_args *old_args, *new_args;
2088 ipa_check_create_edge_args ();
2090 old_args = IPA_EDGE_REF (src);
2091 new_args = IPA_EDGE_REF (dst);
2093 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2094 old_args->jump_functions);
2097 /* Hook that is called by cgraph.c when a node is duplicated. */
2099 static void
2100 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2101 ATTRIBUTE_UNUSED void *data)
2103 struct ipa_node_params *old_info, *new_info;
2105 ipa_check_create_node_params ();
2106 old_info = IPA_NODE_REF (src);
2107 new_info = IPA_NODE_REF (dst);
2109 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2110 old_info->descriptors);
2111 new_info->lattices = NULL;
2112 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2114 new_info->uses_analysis_done = old_info->uses_analysis_done;
2115 new_info->node_enqueued = old_info->node_enqueued;
2119 /* Analyze newly added function into callgraph. */
2121 static void
2122 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2124 ipa_analyze_node (node);
2127 /* Register our cgraph hooks if they are not already there. */
2129 void
2130 ipa_register_cgraph_hooks (void)
2132 if (!edge_removal_hook_holder)
2133 edge_removal_hook_holder =
2134 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2135 if (!node_removal_hook_holder)
2136 node_removal_hook_holder =
2137 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2138 if (!edge_duplication_hook_holder)
2139 edge_duplication_hook_holder =
2140 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2141 if (!node_duplication_hook_holder)
2142 node_duplication_hook_holder =
2143 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2144 function_insertion_hook_holder =
2145 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2148 /* Unregister our cgraph hooks if they are not already there. */
2150 static void
2151 ipa_unregister_cgraph_hooks (void)
2153 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2154 edge_removal_hook_holder = NULL;
2155 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2156 node_removal_hook_holder = NULL;
2157 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2158 edge_duplication_hook_holder = NULL;
2159 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2160 node_duplication_hook_holder = NULL;
2161 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2162 function_insertion_hook_holder = NULL;
2165 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2166 longer needed after ipa-cp. */
2168 void
2169 ipa_free_all_structures_after_ipa_cp (void)
2171 if (!optimize)
2173 ipa_free_all_edge_args ();
2174 ipa_free_all_node_params ();
2175 free_alloc_pool (ipcp_sources_pool);
2176 free_alloc_pool (ipcp_values_pool);
2177 ipa_unregister_cgraph_hooks ();
2181 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2182 longer needed after indirect inlining. */
2184 void
2185 ipa_free_all_structures_after_iinln (void)
2187 ipa_free_all_edge_args ();
2188 ipa_free_all_node_params ();
2189 ipa_unregister_cgraph_hooks ();
2190 if (ipcp_sources_pool)
2191 free_alloc_pool (ipcp_sources_pool);
2192 if (ipcp_values_pool)
2193 free_alloc_pool (ipcp_values_pool);
2196 /* Print ipa_tree_map data structures of all functions in the
2197 callgraph to F. */
2199 void
2200 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2202 int i, count;
2203 tree temp;
2204 struct ipa_node_params *info;
2206 if (!node->analyzed)
2207 return;
2208 info = IPA_NODE_REF (node);
2209 fprintf (f, " function %s parameter descriptors:\n",
2210 cgraph_node_name (node));
2211 count = ipa_get_param_count (info);
2212 for (i = 0; i < count; i++)
2214 temp = ipa_get_param (info, i);
2215 if (TREE_CODE (temp) == PARM_DECL)
2216 fprintf (f, " param %d : %s", i,
2217 (DECL_NAME (temp)
2218 ? (*lang_hooks.decl_printable_name) (temp, 2)
2219 : "(unnamed)"));
2220 if (ipa_is_param_used (info, i))
2221 fprintf (f, " used");
2222 fprintf (f, "\n");
2226 /* Print ipa_tree_map data structures of all functions in the
2227 callgraph to F. */
2229 void
2230 ipa_print_all_params (FILE * f)
2232 struct cgraph_node *node;
2234 fprintf (f, "\nFunction parameters:\n");
2235 FOR_EACH_FUNCTION (node)
2236 ipa_print_node_params (f, node);
2239 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2241 VEC(tree, heap) *
2242 ipa_get_vector_of_formal_parms (tree fndecl)
2244 VEC(tree, heap) *args;
2245 int count;
2246 tree parm;
2248 count = count_formal_params (fndecl);
2249 args = VEC_alloc (tree, heap, count);
2250 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2251 VEC_quick_push (tree, args, parm);
2253 return args;
2256 /* Return a heap allocated vector containing types of formal parameters of
2257 function type FNTYPE. */
2259 static inline VEC(tree, heap) *
2260 get_vector_of_formal_parm_types (tree fntype)
2262 VEC(tree, heap) *types;
2263 int count = 0;
2264 tree t;
2266 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2267 count++;
2269 types = VEC_alloc (tree, heap, count);
2270 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2271 VEC_quick_push (tree, types, TREE_VALUE (t));
2273 return types;
2276 /* Modify the function declaration FNDECL and its type according to the plan in
2277 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2278 to reflect the actual parameters being modified which are determined by the
2279 base_index field. */
2281 void
2282 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2283 const char *synth_parm_prefix)
2285 VEC(tree, heap) *oparms, *otypes;
2286 tree orig_type, new_type = NULL;
2287 tree old_arg_types, t, new_arg_types = NULL;
2288 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2289 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2290 tree new_reversed = NULL;
2291 bool care_for_types, last_parm_void;
2293 if (!synth_parm_prefix)
2294 synth_parm_prefix = "SYNTH";
2296 oparms = ipa_get_vector_of_formal_parms (fndecl);
2297 orig_type = TREE_TYPE (fndecl);
2298 old_arg_types = TYPE_ARG_TYPES (orig_type);
2300 /* The following test is an ugly hack, some functions simply don't have any
2301 arguments in their type. This is probably a bug but well... */
2302 care_for_types = (old_arg_types != NULL_TREE);
2303 if (care_for_types)
2305 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2306 == void_type_node);
2307 otypes = get_vector_of_formal_parm_types (orig_type);
2308 if (last_parm_void)
2309 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2310 else
2311 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2313 else
2315 last_parm_void = false;
2316 otypes = NULL;
2319 for (i = 0; i < len; i++)
2321 struct ipa_parm_adjustment *adj;
2322 gcc_assert (link);
2324 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2325 parm = VEC_index (tree, oparms, adj->base_index);
2326 adj->base = parm;
2328 if (adj->copy_param)
2330 if (care_for_types)
2331 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2332 adj->base_index),
2333 new_arg_types);
2334 *link = parm;
2335 link = &DECL_CHAIN (parm);
2337 else if (!adj->remove_param)
2339 tree new_parm;
2340 tree ptype;
2342 if (adj->by_ref)
2343 ptype = build_pointer_type (adj->type);
2344 else
2345 ptype = adj->type;
2347 if (care_for_types)
2348 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2350 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2351 ptype);
2352 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2354 DECL_ARTIFICIAL (new_parm) = 1;
2355 DECL_ARG_TYPE (new_parm) = ptype;
2356 DECL_CONTEXT (new_parm) = fndecl;
2357 TREE_USED (new_parm) = 1;
2358 DECL_IGNORED_P (new_parm) = 1;
2359 layout_decl (new_parm, 0);
2361 add_referenced_var (new_parm);
2362 mark_sym_for_renaming (new_parm);
2363 adj->base = parm;
2364 adj->reduction = new_parm;
2366 *link = new_parm;
2368 link = &DECL_CHAIN (new_parm);
2372 *link = NULL_TREE;
2374 if (care_for_types)
2376 new_reversed = nreverse (new_arg_types);
2377 if (last_parm_void)
2379 if (new_reversed)
2380 TREE_CHAIN (new_arg_types) = void_list_node;
2381 else
2382 new_reversed = void_list_node;
2386 /* Use copy_node to preserve as much as possible from original type
2387 (debug info, attribute lists etc.)
2388 Exception is METHOD_TYPEs must have THIS argument.
2389 When we are asked to remove it, we need to build new FUNCTION_TYPE
2390 instead. */
2391 if (TREE_CODE (orig_type) != METHOD_TYPE
2392 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2393 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2395 new_type = build_distinct_type_copy (orig_type);
2396 TYPE_ARG_TYPES (new_type) = new_reversed;
2398 else
2400 new_type
2401 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2402 new_reversed));
2403 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2404 DECL_VINDEX (fndecl) = NULL_TREE;
2407 /* When signature changes, we need to clear builtin info. */
2408 if (DECL_BUILT_IN (fndecl))
2410 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2411 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2414 /* This is a new type, not a copy of an old type. Need to reassociate
2415 variants. We can handle everything except the main variant lazily. */
2416 t = TYPE_MAIN_VARIANT (orig_type);
2417 if (orig_type != t)
2419 TYPE_MAIN_VARIANT (new_type) = t;
2420 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2421 TYPE_NEXT_VARIANT (t) = new_type;
2423 else
2425 TYPE_MAIN_VARIANT (new_type) = new_type;
2426 TYPE_NEXT_VARIANT (new_type) = NULL;
2429 TREE_TYPE (fndecl) = new_type;
2430 DECL_VIRTUAL_P (fndecl) = 0;
2431 if (otypes)
2432 VEC_free (tree, heap, otypes);
2433 VEC_free (tree, heap, oparms);
2436 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2437 If this is a directly recursive call, CS must be NULL. Otherwise it must
2438 contain the corresponding call graph edge. */
2440 void
2441 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2442 ipa_parm_adjustment_vec adjustments)
2444 VEC(tree, heap) *vargs;
2445 VEC(tree, gc) **debug_args = NULL;
2446 gimple new_stmt;
2447 gimple_stmt_iterator gsi;
2448 tree callee_decl;
2449 int i, len;
2451 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2452 vargs = VEC_alloc (tree, heap, len);
2453 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2455 gsi = gsi_for_stmt (stmt);
2456 for (i = 0; i < len; i++)
2458 struct ipa_parm_adjustment *adj;
2460 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2462 if (adj->copy_param)
2464 tree arg = gimple_call_arg (stmt, adj->base_index);
2466 VEC_quick_push (tree, vargs, arg);
2468 else if (!adj->remove_param)
2470 tree expr, base, off;
2471 location_t loc;
2473 /* We create a new parameter out of the value of the old one, we can
2474 do the following kind of transformations:
2476 - A scalar passed by reference is converted to a scalar passed by
2477 value. (adj->by_ref is false and the type of the original
2478 actual argument is a pointer to a scalar).
2480 - A part of an aggregate is passed instead of the whole aggregate.
2481 The part can be passed either by value or by reference, this is
2482 determined by value of adj->by_ref. Moreover, the code below
2483 handles both situations when the original aggregate is passed by
2484 value (its type is not a pointer) and when it is passed by
2485 reference (it is a pointer to an aggregate).
2487 When the new argument is passed by reference (adj->by_ref is true)
2488 it must be a part of an aggregate and therefore we form it by
2489 simply taking the address of a reference inside the original
2490 aggregate. */
2492 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2493 base = gimple_call_arg (stmt, adj->base_index);
2494 loc = EXPR_LOCATION (base);
2496 if (TREE_CODE (base) != ADDR_EXPR
2497 && POINTER_TYPE_P (TREE_TYPE (base)))
2498 off = build_int_cst (adj->alias_ptr_type,
2499 adj->offset / BITS_PER_UNIT);
2500 else
2502 HOST_WIDE_INT base_offset;
2503 tree prev_base;
2505 if (TREE_CODE (base) == ADDR_EXPR)
2506 base = TREE_OPERAND (base, 0);
2507 prev_base = base;
2508 base = get_addr_base_and_unit_offset (base, &base_offset);
2509 /* Aggregate arguments can have non-invariant addresses. */
2510 if (!base)
2512 base = build_fold_addr_expr (prev_base);
2513 off = build_int_cst (adj->alias_ptr_type,
2514 adj->offset / BITS_PER_UNIT);
2516 else if (TREE_CODE (base) == MEM_REF)
2518 off = build_int_cst (adj->alias_ptr_type,
2519 base_offset
2520 + adj->offset / BITS_PER_UNIT);
2521 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2522 off);
2523 base = TREE_OPERAND (base, 0);
2525 else
2527 off = build_int_cst (adj->alias_ptr_type,
2528 base_offset
2529 + adj->offset / BITS_PER_UNIT);
2530 base = build_fold_addr_expr (base);
2534 if (!adj->by_ref)
2536 tree type = adj->type;
2537 unsigned int align;
2538 unsigned HOST_WIDE_INT misalign;
2540 get_pointer_alignment_1 (base, &align, &misalign);
2541 misalign += (double_int_sext (tree_to_double_int (off),
2542 TYPE_PRECISION (TREE_TYPE (off))).low
2543 * BITS_PER_UNIT);
2544 misalign = misalign & (align - 1);
2545 if (misalign != 0)
2546 align = (misalign & -misalign);
2547 if (align < TYPE_ALIGN (type))
2548 type = build_aligned_type (type, align);
2549 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2551 else
2553 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2554 expr = build_fold_addr_expr (expr);
2557 expr = force_gimple_operand_gsi (&gsi, expr,
2558 adj->by_ref
2559 || is_gimple_reg_type (adj->type),
2560 NULL, true, GSI_SAME_STMT);
2561 VEC_quick_push (tree, vargs, expr);
2563 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2565 unsigned int ix;
2566 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2567 gimple def_temp;
2569 arg = gimple_call_arg (stmt, adj->base_index);
2570 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2572 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2573 continue;
2574 arg = fold_convert_loc (gimple_location (stmt),
2575 TREE_TYPE (origin), arg);
2577 if (debug_args == NULL)
2578 debug_args = decl_debug_args_insert (callee_decl);
2579 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2580 if (ddecl == origin)
2582 ddecl = VEC_index (tree, *debug_args, ix + 1);
2583 break;
2585 if (ddecl == NULL)
2587 ddecl = make_node (DEBUG_EXPR_DECL);
2588 DECL_ARTIFICIAL (ddecl) = 1;
2589 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2590 DECL_MODE (ddecl) = DECL_MODE (origin);
2592 VEC_safe_push (tree, gc, *debug_args, origin);
2593 VEC_safe_push (tree, gc, *debug_args, ddecl);
2595 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2596 stmt);
2597 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2601 if (dump_file && (dump_flags & TDF_DETAILS))
2603 fprintf (dump_file, "replacing stmt:");
2604 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2607 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2608 VEC_free (tree, heap, vargs);
2609 if (gimple_call_lhs (stmt))
2610 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2612 gimple_set_block (new_stmt, gimple_block (stmt));
2613 if (gimple_has_location (stmt))
2614 gimple_set_location (new_stmt, gimple_location (stmt));
2615 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2616 gimple_call_copy_flags (new_stmt, stmt);
2618 if (dump_file && (dump_flags & TDF_DETAILS))
2620 fprintf (dump_file, "with stmt:");
2621 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2622 fprintf (dump_file, "\n");
2624 gsi_replace (&gsi, new_stmt, true);
2625 if (cs)
2626 cgraph_set_call_stmt (cs, new_stmt);
2627 update_ssa (TODO_update_ssa);
2628 free_dominance_info (CDI_DOMINATORS);
2631 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2633 static bool
2634 index_in_adjustments_multiple_times_p (int base_index,
2635 ipa_parm_adjustment_vec adjustments)
2637 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2638 bool one = false;
2640 for (i = 0; i < len; i++)
2642 struct ipa_parm_adjustment *adj;
2643 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2645 if (adj->base_index == base_index)
2647 if (one)
2648 return true;
2649 else
2650 one = true;
2653 return false;
2657 /* Return adjustments that should have the same effect on function parameters
2658 and call arguments as if they were first changed according to adjustments in
2659 INNER and then by adjustments in OUTER. */
2661 ipa_parm_adjustment_vec
2662 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2663 ipa_parm_adjustment_vec outer)
2665 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2666 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2667 int removals = 0;
2668 ipa_parm_adjustment_vec adjustments, tmp;
2670 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2671 for (i = 0; i < inlen; i++)
2673 struct ipa_parm_adjustment *n;
2674 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2676 if (n->remove_param)
2677 removals++;
2678 else
2679 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2682 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2683 for (i = 0; i < outlen; i++)
2685 struct ipa_parm_adjustment *r;
2686 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2687 outer, i);
2688 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2689 out->base_index);
2691 gcc_assert (!in->remove_param);
2692 if (out->remove_param)
2694 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2696 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2697 memset (r, 0, sizeof (*r));
2698 r->remove_param = true;
2700 continue;
2703 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2704 memset (r, 0, sizeof (*r));
2705 r->base_index = in->base_index;
2706 r->type = out->type;
2708 /* FIXME: Create nonlocal value too. */
2710 if (in->copy_param && out->copy_param)
2711 r->copy_param = true;
2712 else if (in->copy_param)
2713 r->offset = out->offset;
2714 else if (out->copy_param)
2715 r->offset = in->offset;
2716 else
2717 r->offset = in->offset + out->offset;
2720 for (i = 0; i < inlen; i++)
2722 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2723 inner, i);
2725 if (n->remove_param)
2726 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2729 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2730 return adjustments;
2733 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2734 friendly way, assuming they are meant to be applied to FNDECL. */
2736 void
2737 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2738 tree fndecl)
2740 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2741 bool first = true;
2742 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2744 fprintf (file, "IPA param adjustments: ");
2745 for (i = 0; i < len; i++)
2747 struct ipa_parm_adjustment *adj;
2748 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2750 if (!first)
2751 fprintf (file, " ");
2752 else
2753 first = false;
2755 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2756 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2757 if (adj->base)
2759 fprintf (file, ", base: ");
2760 print_generic_expr (file, adj->base, 0);
2762 if (adj->reduction)
2764 fprintf (file, ", reduction: ");
2765 print_generic_expr (file, adj->reduction, 0);
2767 if (adj->new_ssa_base)
2769 fprintf (file, ", new_ssa_base: ");
2770 print_generic_expr (file, adj->new_ssa_base, 0);
2773 if (adj->copy_param)
2774 fprintf (file, ", copy_param");
2775 else if (adj->remove_param)
2776 fprintf (file, ", remove_param");
2777 else
2778 fprintf (file, ", offset %li", (long) adj->offset);
2779 if (adj->by_ref)
2780 fprintf (file, ", by_ref");
2781 print_node_brief (file, ", type: ", adj->type, 0);
2782 fprintf (file, "\n");
2784 VEC_free (tree, heap, parms);
2787 /* Stream out jump function JUMP_FUNC to OB. */
2789 static void
2790 ipa_write_jump_function (struct output_block *ob,
2791 struct ipa_jump_func *jump_func)
2793 streamer_write_uhwi (ob, jump_func->type);
2795 switch (jump_func->type)
2797 case IPA_JF_UNKNOWN:
2798 break;
2799 case IPA_JF_KNOWN_TYPE:
2800 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
2801 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
2802 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
2803 break;
2804 case IPA_JF_CONST:
2805 stream_write_tree (ob, jump_func->value.constant, true);
2806 break;
2807 case IPA_JF_PASS_THROUGH:
2808 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
2809 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
2810 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
2811 break;
2812 case IPA_JF_ANCESTOR:
2813 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
2814 stream_write_tree (ob, jump_func->value.ancestor.type, true);
2815 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
2816 break;
2817 case IPA_JF_CONST_MEMBER_PTR:
2818 stream_write_tree (ob, jump_func->value.member_cst.pfn, true);
2819 stream_write_tree (ob, jump_func->value.member_cst.delta, false);
2820 break;
2824 /* Read in jump function JUMP_FUNC from IB. */
2826 static void
2827 ipa_read_jump_function (struct lto_input_block *ib,
2828 struct ipa_jump_func *jump_func,
2829 struct data_in *data_in)
2831 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
2833 switch (jump_func->type)
2835 case IPA_JF_UNKNOWN:
2836 break;
2837 case IPA_JF_KNOWN_TYPE:
2838 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
2839 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
2840 jump_func->value.known_type.component_type = stream_read_tree (ib,
2841 data_in);
2842 break;
2843 case IPA_JF_CONST:
2844 jump_func->value.constant = stream_read_tree (ib, data_in);
2845 break;
2846 case IPA_JF_PASS_THROUGH:
2847 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
2848 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
2849 jump_func->value.pass_through.operation
2850 = (enum tree_code) streamer_read_uhwi (ib);
2851 break;
2852 case IPA_JF_ANCESTOR:
2853 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
2854 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
2855 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
2856 break;
2857 case IPA_JF_CONST_MEMBER_PTR:
2858 jump_func->value.member_cst.pfn = stream_read_tree (ib, data_in);
2859 jump_func->value.member_cst.delta = stream_read_tree (ib, data_in);
2860 break;
2864 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2865 relevant to indirect inlining to OB. */
2867 static void
2868 ipa_write_indirect_edge_info (struct output_block *ob,
2869 struct cgraph_edge *cs)
2871 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2872 struct bitpack_d bp;
2874 streamer_write_hwi (ob, ii->param_index);
2875 streamer_write_hwi (ob, ii->anc_offset);
2876 bp = bitpack_create (ob->main_stream);
2877 bp_pack_value (&bp, ii->polymorphic, 1);
2878 streamer_write_bitpack (&bp);
2880 if (ii->polymorphic)
2882 streamer_write_hwi (ob, ii->otr_token);
2883 stream_write_tree (ob, ii->otr_type, true);
2887 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2888 relevant to indirect inlining from IB. */
2890 static void
2891 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2892 struct data_in *data_in ATTRIBUTE_UNUSED,
2893 struct cgraph_edge *cs)
2895 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2896 struct bitpack_d bp;
2898 ii->param_index = (int) streamer_read_hwi (ib);
2899 ii->anc_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
2900 bp = streamer_read_bitpack (ib);
2901 ii->polymorphic = bp_unpack_value (&bp, 1);
2902 if (ii->polymorphic)
2904 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
2905 ii->otr_type = stream_read_tree (ib, data_in);
2909 /* Stream out NODE info to OB. */
2911 static void
2912 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2914 int node_ref;
2915 lto_cgraph_encoder_t encoder;
2916 struct ipa_node_params *info = IPA_NODE_REF (node);
2917 int j;
2918 struct cgraph_edge *e;
2919 struct bitpack_d bp;
2921 encoder = ob->decl_state->cgraph_node_encoder;
2922 node_ref = lto_cgraph_encoder_encode (encoder, node);
2923 streamer_write_uhwi (ob, node_ref);
2925 bp = bitpack_create (ob->main_stream);
2926 gcc_assert (info->uses_analysis_done
2927 || ipa_get_param_count (info) == 0);
2928 gcc_assert (!info->node_enqueued);
2929 gcc_assert (!info->ipcp_orig_node);
2930 for (j = 0; j < ipa_get_param_count (info); j++)
2931 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
2932 streamer_write_bitpack (&bp);
2933 for (e = node->callees; e; e = e->next_callee)
2935 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2937 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2938 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2939 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2941 for (e = node->indirect_calls; e; e = e->next_callee)
2943 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2945 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2946 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2947 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2948 ipa_write_indirect_edge_info (ob, e);
2952 /* Stream in NODE info from IB. */
2954 static void
2955 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2956 struct data_in *data_in)
2958 struct ipa_node_params *info = IPA_NODE_REF (node);
2959 int k;
2960 struct cgraph_edge *e;
2961 struct bitpack_d bp;
2963 ipa_initialize_node_params (node);
2965 bp = streamer_read_bitpack (ib);
2966 if (ipa_get_param_count (info) != 0)
2967 info->uses_analysis_done = true;
2968 info->node_enqueued = false;
2969 for (k = 0; k < ipa_get_param_count (info); k++)
2970 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
2971 for (e = node->callees; e; e = e->next_callee)
2973 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2974 int count = streamer_read_uhwi (ib);
2976 if (!count)
2977 continue;
2978 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
2980 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2981 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2983 for (e = node->indirect_calls; e; e = e->next_callee)
2985 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2986 int count = streamer_read_uhwi (ib);
2988 if (count)
2990 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
2991 count);
2992 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2993 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
2994 data_in);
2996 ipa_read_indirect_edge_info (ib, data_in, e);
3000 /* Write jump functions for nodes in SET. */
3002 void
3003 ipa_prop_write_jump_functions (cgraph_node_set set)
3005 struct cgraph_node *node;
3006 struct output_block *ob;
3007 unsigned int count = 0;
3008 cgraph_node_set_iterator csi;
3010 if (!ipa_node_params_vector)
3011 return;
3013 ob = create_output_block (LTO_section_jump_functions);
3014 ob->cgraph_node = NULL;
3015 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
3017 node = csi_node (csi);
3018 if (cgraph_function_with_gimple_body_p (node)
3019 && IPA_NODE_REF (node) != NULL)
3020 count++;
3023 streamer_write_uhwi (ob, count);
3025 /* Process all of the functions. */
3026 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
3028 node = csi_node (csi);
3029 if (cgraph_function_with_gimple_body_p (node)
3030 && IPA_NODE_REF (node) != NULL)
3031 ipa_write_node_info (ob, node);
3033 streamer_write_char_stream (ob->main_stream, 0);
3034 produce_asm (ob, NULL);
3035 destroy_output_block (ob);
3038 /* Read section in file FILE_DATA of length LEN with data DATA. */
3040 static void
3041 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3042 size_t len)
3044 const struct lto_function_header *header =
3045 (const struct lto_function_header *) data;
3046 const int cfg_offset = sizeof (struct lto_function_header);
3047 const int main_offset = cfg_offset + header->cfg_size;
3048 const int string_offset = main_offset + header->main_size;
3049 struct data_in *data_in;
3050 struct lto_input_block ib_main;
3051 unsigned int i;
3052 unsigned int count;
3054 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3055 header->main_size);
3057 data_in =
3058 lto_data_in_create (file_data, (const char *) data + string_offset,
3059 header->string_size, NULL);
3060 count = streamer_read_uhwi (&ib_main);
3062 for (i = 0; i < count; i++)
3064 unsigned int index;
3065 struct cgraph_node *node;
3066 lto_cgraph_encoder_t encoder;
3068 index = streamer_read_uhwi (&ib_main);
3069 encoder = file_data->cgraph_node_encoder;
3070 node = lto_cgraph_encoder_deref (encoder, index);
3071 gcc_assert (node->analyzed);
3072 ipa_read_node_info (&ib_main, node, data_in);
3074 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3075 len);
3076 lto_data_in_delete (data_in);
3079 /* Read ipcp jump functions. */
3081 void
3082 ipa_prop_read_jump_functions (void)
3084 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3085 struct lto_file_decl_data *file_data;
3086 unsigned int j = 0;
3088 ipa_check_create_node_params ();
3089 ipa_check_create_edge_args ();
3090 ipa_register_cgraph_hooks ();
3092 while ((file_data = file_data_vec[j++]))
3094 size_t len;
3095 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3097 if (data)
3098 ipa_prop_read_section (file_data, data, len);
3102 /* After merging units, we can get mismatch in argument counts.
3103 Also decl merging might've rendered parameter lists obsolete.
3104 Also compute called_with_variable_arg info. */
3106 void
3107 ipa_update_after_lto_read (void)
3109 struct cgraph_node *node;
3111 ipa_check_create_node_params ();
3112 ipa_check_create_edge_args ();
3114 FOR_EACH_DEFINED_FUNCTION (node)
3115 if (node->analyzed)
3116 ipa_initialize_node_params (node);