31370.cc: Skip this test on powerpc64-*-freebsd*.
[official-gcc.git] / gcc / ipa-prop.c
blob3856793b0546c0a3178c46210e251edf0f068929
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
41 #include "data-streamer.h"
42 #include "tree-streamer.h"
45 /* Intermediate information about a parameter that is only useful during the
46 run of ipa_analyze_node and is not kept afterwards. */
48 struct param_analysis_info
50 bool modified;
51 bitmap visited_statements;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
56 /* Vector where the parameter infos are actually stored. */
57 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
61 static struct cgraph_node_hook_list *node_removal_hook_holder;
62 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
63 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
64 static struct cgraph_node_hook_list *function_insertion_hook_holder;
66 /* Return index of the formal whose tree is PTREE in function which corresponds
67 to INFO. */
69 int
70 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
72 int i, count;
74 count = ipa_get_param_count (info);
75 for (i = 0; i < count; i++)
76 if (ipa_get_param (info, i) == ptree)
77 return i;
79 return -1;
82 /* Populate the param_decl field in parameter descriptors of INFO that
83 corresponds to NODE. */
85 static void
86 ipa_populate_param_decls (struct cgraph_node *node,
87 struct ipa_node_params *info)
89 tree fndecl;
90 tree fnargs;
91 tree parm;
92 int param_num;
94 fndecl = node->decl;
95 fnargs = DECL_ARGUMENTS (fndecl);
96 param_num = 0;
97 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
99 VEC_index (ipa_param_descriptor_t,
100 info->descriptors, param_num)->decl = parm;
101 param_num++;
105 /* Return how many formal parameters FNDECL has. */
107 static inline int
108 count_formal_params (tree fndecl)
110 tree parm;
111 int count = 0;
113 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
114 count++;
116 return count;
119 /* Initialize the ipa_node_params structure associated with NODE by counting
120 the function parameters, creating the descriptors and populating their
121 param_decls. */
123 void
124 ipa_initialize_node_params (struct cgraph_node *node)
126 struct ipa_node_params *info = IPA_NODE_REF (node);
128 if (!info->descriptors)
130 int param_count;
132 param_count = count_formal_params (node->decl);
133 if (param_count)
135 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
136 info->descriptors, param_count);
137 ipa_populate_param_decls (node, info);
142 /* Print the jump functions associated with call graph edge CS to file F. */
144 static void
145 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
147 int i, count;
149 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
150 for (i = 0; i < count; i++)
152 struct ipa_jump_func *jump_func;
153 enum jump_func_type type;
155 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
156 type = jump_func->type;
158 fprintf (f, " param %d: ", i);
159 if (type == IPA_JF_UNKNOWN)
160 fprintf (f, "UNKNOWN\n");
161 else if (type == IPA_JF_KNOWN_TYPE)
163 fprintf (f, "KNOWN TYPE: base ");
164 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
165 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
166 jump_func->value.known_type.offset);
167 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
168 fprintf (f, "\n");
170 else if (type == IPA_JF_CONST)
172 tree val = jump_func->value.constant;
173 fprintf (f, "CONST: ");
174 print_generic_expr (f, val, 0);
175 if (TREE_CODE (val) == ADDR_EXPR
176 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
178 fprintf (f, " -> ");
179 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
182 fprintf (f, "\n");
184 else if (type == IPA_JF_CONST_MEMBER_PTR)
186 fprintf (f, "CONST MEMBER PTR: ");
187 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
188 fprintf (f, ", ");
189 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
190 fprintf (f, "\n");
192 else if (type == IPA_JF_PASS_THROUGH)
194 fprintf (f, "PASS THROUGH: ");
195 fprintf (f, "%d, op %s ",
196 jump_func->value.pass_through.formal_id,
197 tree_code_name[(int)
198 jump_func->value.pass_through.operation]);
199 if (jump_func->value.pass_through.operation != NOP_EXPR)
200 print_generic_expr (f,
201 jump_func->value.pass_through.operand, 0);
202 fprintf (f, "\n");
204 else if (type == IPA_JF_ANCESTOR)
206 fprintf (f, "ANCESTOR: ");
207 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
208 jump_func->value.ancestor.formal_id,
209 jump_func->value.ancestor.offset);
210 print_generic_expr (f, jump_func->value.ancestor.type, 0);
211 fprintf (f, "\n");
217 /* Print the jump functions of all arguments on all call graph edges going from
218 NODE to file F. */
220 void
221 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
223 struct cgraph_edge *cs;
224 int i;
226 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
227 for (cs = node->callees; cs; cs = cs->next_callee)
229 if (!ipa_edge_args_info_available_for_edge_p (cs))
230 continue;
232 fprintf (f, " callsite %s/%i -> %s/%i : \n",
233 cgraph_node_name (node), node->uid,
234 cgraph_node_name (cs->callee), cs->callee->uid);
235 ipa_print_node_jump_functions_for_edge (f, cs);
238 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
240 if (!ipa_edge_args_info_available_for_edge_p (cs))
241 continue;
243 if (cs->call_stmt)
245 fprintf (f, " indirect callsite %d for stmt ", i);
246 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
248 else
249 fprintf (f, " indirect callsite %d :\n", i);
250 ipa_print_node_jump_functions_for_edge (f, cs);
255 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
257 void
258 ipa_print_all_jump_functions (FILE *f)
260 struct cgraph_node *node;
262 fprintf (f, "\nJump functions:\n");
263 for (node = cgraph_nodes; node; node = node->next)
265 ipa_print_node_jump_functions (f, node);
269 /* Structure to be passed in between detect_type_change and
270 check_stmt_for_type_change. */
272 struct type_change_info
274 /* Offset into the object where there is the virtual method pointer we are
275 looking for. */
276 HOST_WIDE_INT offset;
277 /* The declaration or SSA_NAME pointer of the base that we are checking for
278 type change. */
279 tree object;
280 /* If we actually can tell the type that the object has changed to, it is
281 stored in this field. Otherwise it remains NULL_TREE. */
282 tree known_current_type;
283 /* Set to true if dynamic type change has been detected. */
284 bool type_maybe_changed;
285 /* Set to true if multiple types have been encountered. known_current_type
286 must be disregarded in that case. */
287 bool multiple_types_encountered;
290 /* Return true if STMT can modify a virtual method table pointer.
292 This function makes special assumptions about both constructors and
293 destructors which are all the functions that are allowed to alter the VMT
294 pointers. It assumes that destructors begin with assignment into all VMT
295 pointers and that constructors essentially look in the following way:
297 1) The very first thing they do is that they call constructors of ancestor
298 sub-objects that have them.
300 2) Then VMT pointers of this and all its ancestors is set to new values
301 corresponding to the type corresponding to the constructor.
303 3) Only afterwards, other stuff such as constructor of member sub-objects
304 and the code written by the user is run. Only this may include calling
305 virtual functions, directly or indirectly.
307 There is no way to call a constructor of an ancestor sub-object in any
308 other way.
310 This means that we do not have to care whether constructors get the correct
311 type information because they will always change it (in fact, if we define
312 the type to be given by the VMT pointer, it is undefined).
314 The most important fact to derive from the above is that if, for some
315 statement in the section 3, we try to detect whether the dynamic type has
316 changed, we can safely ignore all calls as we examine the function body
317 backwards until we reach statements in section 2 because these calls cannot
318 be ancestor constructors or destructors (if the input is not bogus) and so
319 do not change the dynamic type (this holds true only for automatically
320 allocated objects but at the moment we devirtualize only these). We then
321 must detect that statements in section 2 change the dynamic type and can try
322 to derive the new type. That is enough and we can stop, we will never see
323 the calls into constructors of sub-objects in this code. Therefore we can
324 safely ignore all call statements that we traverse.
327 static bool
328 stmt_may_be_vtbl_ptr_store (gimple stmt)
330 if (is_gimple_call (stmt))
331 return false;
332 else if (is_gimple_assign (stmt))
334 tree lhs = gimple_assign_lhs (stmt);
336 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
338 if (flag_strict_aliasing
339 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
340 return false;
342 if (TREE_CODE (lhs) == COMPONENT_REF
343 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
344 return false;
345 /* In the future we might want to use get_base_ref_and_offset to find
346 if there is a field corresponding to the offset and if so, proceed
347 almost like if it was a component ref. */
350 return true;
353 /* If STMT can be proved to be an assignment to the virtual method table
354 pointer of ANALYZED_OBJ and the type associated with the new table
355 identified, return the type. Otherwise return NULL_TREE. */
357 static tree
358 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
360 HOST_WIDE_INT offset, size, max_size;
361 tree lhs, rhs, base;
363 if (!gimple_assign_single_p (stmt))
364 return NULL_TREE;
366 lhs = gimple_assign_lhs (stmt);
367 rhs = gimple_assign_rhs1 (stmt);
368 if (TREE_CODE (lhs) != COMPONENT_REF
369 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
370 || TREE_CODE (rhs) != ADDR_EXPR)
371 return NULL_TREE;
372 rhs = get_base_address (TREE_OPERAND (rhs, 0));
373 if (!rhs
374 || TREE_CODE (rhs) != VAR_DECL
375 || !DECL_VIRTUAL_P (rhs))
376 return NULL_TREE;
378 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
379 if (offset != tci->offset
380 || size != POINTER_SIZE
381 || max_size != POINTER_SIZE)
382 return NULL_TREE;
383 if (TREE_CODE (base) == MEM_REF)
385 if (TREE_CODE (tci->object) != MEM_REF
386 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
387 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
388 TREE_OPERAND (base, 1)))
389 return NULL_TREE;
391 else if (tci->object != base)
392 return NULL_TREE;
394 return DECL_CONTEXT (rhs);
397 /* Callback of walk_aliased_vdefs and a helper function for
398 detect_type_change to check whether a particular statement may modify
399 the virtual table pointer, and if possible also determine the new type of
400 the (sub-)object. It stores its result into DATA, which points to a
401 type_change_info structure. */
403 static bool
404 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
406 gimple stmt = SSA_NAME_DEF_STMT (vdef);
407 struct type_change_info *tci = (struct type_change_info *) data;
409 if (stmt_may_be_vtbl_ptr_store (stmt))
411 tree type;
412 type = extr_type_from_vtbl_ptr_store (stmt, tci);
413 if (tci->type_maybe_changed
414 && type != tci->known_current_type)
415 tci->multiple_types_encountered = true;
416 tci->known_current_type = type;
417 tci->type_maybe_changed = true;
418 return true;
420 else
421 return false;
426 /* Like detect_type_change but with extra argument COMP_TYPE which will become
427 the component type part of new JFUNC of dynamic type change is detected and
428 the new base type is identified. */
430 static bool
431 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
432 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
434 struct type_change_info tci;
435 ao_ref ao;
437 gcc_checking_assert (DECL_P (arg)
438 || TREE_CODE (arg) == MEM_REF
439 || handled_component_p (arg));
440 /* Const calls cannot call virtual methods through VMT and so type changes do
441 not matter. */
442 if (!flag_devirtualize || !gimple_vuse (call))
443 return false;
445 ao_ref_init (&ao, arg);
446 ao.base = base;
447 ao.offset = offset;
448 ao.size = POINTER_SIZE;
449 ao.max_size = ao.size;
451 tci.offset = offset;
452 tci.object = get_base_address (arg);
453 tci.known_current_type = NULL_TREE;
454 tci.type_maybe_changed = false;
455 tci.multiple_types_encountered = false;
457 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
458 &tci, NULL);
459 if (!tci.type_maybe_changed)
460 return false;
462 if (!tci.known_current_type
463 || tci.multiple_types_encountered
464 || offset != 0)
465 jfunc->type = IPA_JF_UNKNOWN;
466 else
468 jfunc->type = IPA_JF_KNOWN_TYPE;
469 jfunc->value.known_type.base_type = tci.known_current_type;
470 jfunc->value.known_type.component_type = comp_type;
473 return true;
476 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
477 looking for assignments to its virtual table pointer. If it is, return true
478 and fill in the jump function JFUNC with relevant type information or set it
479 to unknown. ARG is the object itself (not a pointer to it, unless
480 dereferenced). BASE is the base of the memory access as returned by
481 get_ref_base_and_extent, as is the offset. */
483 static bool
484 detect_type_change (tree arg, tree base, gimple call,
485 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
487 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
490 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
491 SSA name (its dereference will become the base and the offset is assumed to
492 be zero). */
494 static bool
495 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
497 tree comp_type;
499 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
500 if (!flag_devirtualize
501 || !POINTER_TYPE_P (TREE_TYPE (arg))
502 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
503 return false;
505 comp_type = TREE_TYPE (TREE_TYPE (arg));
506 arg = build2 (MEM_REF, ptr_type_node, arg,
507 build_int_cst (ptr_type_node, 0));
509 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
512 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
513 boolean variable pointed to by DATA. */
515 static bool
516 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
517 void *data)
519 bool *b = (bool *) data;
520 *b = true;
521 return true;
524 /* Return true if the formal parameter PARM might have been modified in this
525 function before reaching the statement STMT. PARM_AINFO is a pointer to a
526 structure containing temporary information about PARM. */
528 static bool
529 is_parm_modified_before_stmt (struct param_analysis_info *parm_ainfo,
530 gimple stmt, tree parm)
532 bool modified = false;
533 ao_ref refd;
535 if (parm_ainfo->modified)
536 return true;
538 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
539 ao_ref_init (&refd, parm);
540 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
541 &modified, &parm_ainfo->visited_statements);
542 if (modified)
544 parm_ainfo->modified = true;
545 return true;
547 return false;
550 /* If STMT is an assignment that loads a value from an parameter declaration,
551 return the index of the parameter in ipa_node_params which has not been
552 modified. Otherwise return -1. */
554 static int
555 load_from_unmodified_param (struct ipa_node_params *info,
556 struct param_analysis_info *parms_ainfo,
557 gimple stmt)
559 int index;
560 tree op1;
562 if (!gimple_assign_single_p (stmt))
563 return -1;
565 op1 = gimple_assign_rhs1 (stmt);
566 if (TREE_CODE (op1) != PARM_DECL)
567 return -1;
569 index = ipa_get_param_decl_index (info, op1);
570 if (index < 0
571 || is_parm_modified_before_stmt (&parms_ainfo[index], stmt, op1))
572 return -1;
574 return index;
577 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
578 of an assignment statement STMT, try to determine whether we are actually
579 handling any of the following cases and construct an appropriate jump
580 function into JFUNC if so:
582 1) The passed value is loaded from a formal parameter which is not a gimple
583 register (most probably because it is addressable, the value has to be
584 scalar) and we can guarantee the value has not changed. This case can
585 therefore be described by a simple pass-through jump function. For example:
587 foo (int a)
589 int a.0;
591 a.0_2 = a;
592 bar (a.0_2);
594 2) The passed value can be described by a simple arithmetic pass-through
595 jump function. E.g.
597 foo (int a)
599 int D.2064;
601 D.2064_4 = a.1(D) + 4;
602 bar (D.2064_4);
604 This case can also occur in combination of the previous one, e.g.:
606 foo (int a, int z)
608 int a.0;
609 int D.2064;
611 a.0_3 = a;
612 D.2064_4 = a.0_3 + 4;
613 foo (D.2064_4);
615 3) The passed value is an address of an object within another one (which
616 also passed by reference). Such situations are described by an ancestor
617 jump function and describe situations such as:
619 B::foo() (struct B * const this)
621 struct A * D.1845;
623 D.1845_2 = &this_1(D)->D.1748;
624 A::bar (D.1845_2);
626 INFO is the structure describing individual parameters access different
627 stages of IPA optimizations. PARMS_AINFO contains the information that is
628 only needed for intraprocedural analysis. */
630 static void
631 compute_complex_assign_jump_func (struct ipa_node_params *info,
632 struct param_analysis_info *parms_ainfo,
633 struct ipa_jump_func *jfunc,
634 gimple call, gimple stmt, tree name)
636 HOST_WIDE_INT offset, size, max_size;
637 tree op1, tc_ssa, base, ssa;
638 int index;
640 op1 = gimple_assign_rhs1 (stmt);
642 if (TREE_CODE (op1) == SSA_NAME)
644 if (SSA_NAME_IS_DEFAULT_DEF (op1))
645 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
646 else
647 index = load_from_unmodified_param (info, parms_ainfo,
648 SSA_NAME_DEF_STMT (op1));
649 tc_ssa = op1;
651 else
653 index = load_from_unmodified_param (info, parms_ainfo, stmt);
654 tc_ssa = gimple_assign_lhs (stmt);
657 if (index >= 0)
659 tree op2 = gimple_assign_rhs2 (stmt);
661 if (op2)
663 if (!is_gimple_ip_invariant (op2)
664 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
665 && !useless_type_conversion_p (TREE_TYPE (name),
666 TREE_TYPE (op1))))
667 return;
669 jfunc->type = IPA_JF_PASS_THROUGH;
670 jfunc->value.pass_through.formal_id = index;
671 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
672 jfunc->value.pass_through.operand = op2;
674 else if (gimple_assign_single_p (stmt)
675 && !detect_type_change_ssa (tc_ssa, call, jfunc))
677 jfunc->type = IPA_JF_PASS_THROUGH;
678 jfunc->value.pass_through.formal_id = index;
679 jfunc->value.pass_through.operation = NOP_EXPR;
681 return;
684 if (TREE_CODE (op1) != ADDR_EXPR)
685 return;
686 op1 = TREE_OPERAND (op1, 0);
687 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
688 return;
689 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
690 if (TREE_CODE (base) != MEM_REF
691 /* If this is a varying address, punt. */
692 || max_size == -1
693 || max_size != size)
694 return;
695 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
696 ssa = TREE_OPERAND (base, 0);
697 if (TREE_CODE (ssa) != SSA_NAME
698 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
699 || offset < 0)
700 return;
702 /* Dynamic types are changed only in constructors and destructors and */
703 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
704 if (index >= 0
705 && !detect_type_change (op1, base, call, jfunc, offset))
707 jfunc->type = IPA_JF_ANCESTOR;
708 jfunc->value.ancestor.formal_id = index;
709 jfunc->value.ancestor.offset = offset;
710 jfunc->value.ancestor.type = TREE_TYPE (op1);
714 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
715 it looks like:
717 iftmp.1_3 = &obj_2(D)->D.1762;
719 The base of the MEM_REF must be a default definition SSA NAME of a
720 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
721 whole MEM_REF expression is returned and the offset calculated from any
722 handled components and the MEM_REF itself is stored into *OFFSET. The whole
723 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
725 static tree
726 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
728 HOST_WIDE_INT size, max_size;
729 tree expr, parm, obj;
731 if (!gimple_assign_single_p (assign))
732 return NULL_TREE;
733 expr = gimple_assign_rhs1 (assign);
735 if (TREE_CODE (expr) != ADDR_EXPR)
736 return NULL_TREE;
737 expr = TREE_OPERAND (expr, 0);
738 obj = expr;
739 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
741 if (TREE_CODE (expr) != MEM_REF
742 /* If this is a varying address, punt. */
743 || max_size == -1
744 || max_size != size
745 || *offset < 0)
746 return NULL_TREE;
747 parm = TREE_OPERAND (expr, 0);
748 if (TREE_CODE (parm) != SSA_NAME
749 || !SSA_NAME_IS_DEFAULT_DEF (parm)
750 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
751 return NULL_TREE;
753 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
754 *obj_p = obj;
755 return expr;
759 /* Given that an actual argument is an SSA_NAME that is a result of a phi
760 statement PHI, try to find out whether NAME is in fact a
761 multiple-inheritance typecast from a descendant into an ancestor of a formal
762 parameter and thus can be described by an ancestor jump function and if so,
763 write the appropriate function into JFUNC.
765 Essentially we want to match the following pattern:
767 if (obj_2(D) != 0B)
768 goto <bb 3>;
769 else
770 goto <bb 4>;
772 <bb 3>:
773 iftmp.1_3 = &obj_2(D)->D.1762;
775 <bb 4>:
776 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
777 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
778 return D.1879_6; */
780 static void
781 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
782 struct ipa_jump_func *jfunc,
783 gimple call, gimple phi)
785 HOST_WIDE_INT offset;
786 gimple assign, cond;
787 basic_block phi_bb, assign_bb, cond_bb;
788 tree tmp, parm, expr, obj;
789 int index, i;
791 if (gimple_phi_num_args (phi) != 2)
792 return;
794 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
795 tmp = PHI_ARG_DEF (phi, 0);
796 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
797 tmp = PHI_ARG_DEF (phi, 1);
798 else
799 return;
800 if (TREE_CODE (tmp) != SSA_NAME
801 || SSA_NAME_IS_DEFAULT_DEF (tmp)
802 || !POINTER_TYPE_P (TREE_TYPE (tmp))
803 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
804 return;
806 assign = SSA_NAME_DEF_STMT (tmp);
807 assign_bb = gimple_bb (assign);
808 if (!single_pred_p (assign_bb))
809 return;
810 expr = get_ancestor_addr_info (assign, &obj, &offset);
811 if (!expr)
812 return;
813 parm = TREE_OPERAND (expr, 0);
814 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
815 gcc_assert (index >= 0);
817 cond_bb = single_pred (assign_bb);
818 cond = last_stmt (cond_bb);
819 if (!cond
820 || gimple_code (cond) != GIMPLE_COND
821 || gimple_cond_code (cond) != NE_EXPR
822 || gimple_cond_lhs (cond) != parm
823 || !integer_zerop (gimple_cond_rhs (cond)))
824 return;
826 phi_bb = gimple_bb (phi);
827 for (i = 0; i < 2; i++)
829 basic_block pred = EDGE_PRED (phi_bb, i)->src;
830 if (pred != assign_bb && pred != cond_bb)
831 return;
834 if (!detect_type_change (obj, expr, call, jfunc, offset))
836 jfunc->type = IPA_JF_ANCESTOR;
837 jfunc->value.ancestor.formal_id = index;
838 jfunc->value.ancestor.offset = offset;
839 jfunc->value.ancestor.type = TREE_TYPE (obj);
843 /* Given OP which is passed as an actual argument to a called function,
844 determine if it is possible to construct a KNOWN_TYPE jump function for it
845 and if so, create one and store it to JFUNC. */
847 static void
848 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
849 gimple call)
851 HOST_WIDE_INT offset, size, max_size;
852 tree base;
854 if (!flag_devirtualize
855 || TREE_CODE (op) != ADDR_EXPR
856 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
857 return;
859 op = TREE_OPERAND (op, 0);
860 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
861 if (!DECL_P (base)
862 || max_size == -1
863 || max_size != size
864 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
865 || is_global_var (base))
866 return;
868 if (detect_type_change (op, base, call, jfunc, offset)
869 || !TYPE_BINFO (TREE_TYPE (base)))
870 return;
872 jfunc->type = IPA_JF_KNOWN_TYPE;
873 jfunc->value.known_type.base_type = TREE_TYPE (base);
874 jfunc->value.known_type.offset = offset;
875 jfunc->value.known_type.component_type = TREE_TYPE (op);
879 /* Determine the jump functions of scalar arguments. Scalar means SSA names
880 and constants of a number of selected types. INFO is the ipa_node_params
881 structure associated with the caller, PARMS_AINFO describes state of
882 analysis with respect to individual formal parameters. ARGS is the
883 ipa_edge_args structure describing the callsite CALL which is the call
884 statement being examined.*/
886 static void
887 compute_scalar_jump_functions (struct ipa_node_params *info,
888 struct param_analysis_info *parms_ainfo,
889 struct ipa_edge_args *args,
890 gimple call)
892 tree arg;
893 unsigned num = 0;
895 for (num = 0; num < gimple_call_num_args (call); num++)
897 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
898 arg = gimple_call_arg (call, num);
900 if (is_gimple_ip_invariant (arg))
902 jfunc->type = IPA_JF_CONST;
903 jfunc->value.constant = arg;
905 else if (TREE_CODE (arg) == SSA_NAME)
907 if (SSA_NAME_IS_DEFAULT_DEF (arg))
909 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
911 if (index >= 0
912 && !detect_type_change_ssa (arg, call, jfunc))
914 jfunc->type = IPA_JF_PASS_THROUGH;
915 jfunc->value.pass_through.formal_id = index;
916 jfunc->value.pass_through.operation = NOP_EXPR;
919 else
921 gimple stmt = SSA_NAME_DEF_STMT (arg);
922 if (is_gimple_assign (stmt))
923 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
924 call, stmt, arg);
925 else if (gimple_code (stmt) == GIMPLE_PHI)
926 compute_complex_ancestor_jump_func (info, jfunc, call, stmt);
929 else
930 compute_known_type_jump_func (arg, jfunc, call);
934 /* Inspect the given TYPE and return true iff it has the same structure (the
935 same number of fields of the same types) as a C++ member pointer. If
936 METHOD_PTR and DELTA are non-NULL, store the trees representing the
937 corresponding fields there. */
939 static bool
940 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
942 tree fld;
944 if (TREE_CODE (type) != RECORD_TYPE)
945 return false;
947 fld = TYPE_FIELDS (type);
948 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
949 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
950 return false;
952 if (method_ptr)
953 *method_ptr = fld;
955 fld = DECL_CHAIN (fld);
956 if (!fld || INTEGRAL_TYPE_P (fld))
957 return false;
958 if (delta)
959 *delta = fld;
961 if (DECL_CHAIN (fld))
962 return false;
964 return true;
967 /* Go through arguments of the CALL and for every one that looks like a member
968 pointer, check whether it can be safely declared pass-through and if so,
969 mark that to the corresponding item of jump FUNCTIONS. Return true iff
970 there are non-pass-through member pointers within the arguments. INFO
971 describes formal parameters of the caller. PARMS_INFO is a pointer to a
972 vector containing intermediate information about each formal parameter. */
974 static bool
975 compute_pass_through_member_ptrs (struct ipa_node_params *info,
976 struct param_analysis_info *parms_ainfo,
977 struct ipa_edge_args *args,
978 gimple call)
980 bool undecided_members = false;
981 unsigned num;
982 tree arg;
984 for (num = 0; num < gimple_call_num_args (call); num++)
986 arg = gimple_call_arg (call, num);
988 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
990 if (TREE_CODE (arg) == PARM_DECL)
992 int index = ipa_get_param_decl_index (info, arg);
994 gcc_assert (index >=0);
995 if (!is_parm_modified_before_stmt (&parms_ainfo[index], call,
996 arg))
998 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args,
999 num);
1000 jfunc->type = IPA_JF_PASS_THROUGH;
1001 jfunc->value.pass_through.formal_id = index;
1002 jfunc->value.pass_through.operation = NOP_EXPR;
1004 else
1005 undecided_members = true;
1007 else
1008 undecided_members = true;
1012 return undecided_members;
1015 /* Simple function filling in a member pointer constant jump function (with PFN
1016 and DELTA as the constant value) into JFUNC. */
1018 static void
1019 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
1020 tree pfn, tree delta)
1022 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
1023 jfunc->value.member_cst.pfn = pfn;
1024 jfunc->value.member_cst.delta = delta;
1027 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1028 return the rhs of its defining statement. */
1030 static inline tree
1031 get_ssa_def_if_simple_copy (tree rhs)
1033 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1035 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1037 if (gimple_assign_single_p (def_stmt))
1038 rhs = gimple_assign_rhs1 (def_stmt);
1039 else
1040 break;
1042 return rhs;
1045 /* Traverse statements from CALL backwards, scanning whether the argument ARG
1046 which is a member pointer is filled in with constant values. If it is, fill
1047 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
1048 fields of the record type of the member pointer. To give an example, we
1049 look for a pattern looking like the following:
1051 D.2515.__pfn ={v} printStuff;
1052 D.2515.__delta ={v} 0;
1053 i_1 = doprinting (D.2515); */
1055 static void
1056 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
1057 tree delta_field, struct ipa_jump_func *jfunc)
1059 gimple_stmt_iterator gsi;
1060 tree method = NULL_TREE;
1061 tree delta = NULL_TREE;
1063 gsi = gsi_for_stmt (call);
1065 gsi_prev (&gsi);
1066 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1068 gimple stmt = gsi_stmt (gsi);
1069 tree lhs, rhs, fld;
1071 if (!stmt_may_clobber_ref_p (stmt, arg))
1072 continue;
1073 if (!gimple_assign_single_p (stmt))
1074 return;
1076 lhs = gimple_assign_lhs (stmt);
1077 rhs = gimple_assign_rhs1 (stmt);
1079 if (TREE_CODE (lhs) != COMPONENT_REF
1080 || TREE_OPERAND (lhs, 0) != arg)
1081 return;
1083 fld = TREE_OPERAND (lhs, 1);
1084 if (!method && fld == method_field)
1086 rhs = get_ssa_def_if_simple_copy (rhs);
1087 if (TREE_CODE (rhs) == ADDR_EXPR
1088 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
1089 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
1091 method = TREE_OPERAND (rhs, 0);
1092 if (delta)
1094 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
1095 return;
1098 else
1099 return;
1102 if (!delta && fld == delta_field)
1104 rhs = get_ssa_def_if_simple_copy (rhs);
1105 if (TREE_CODE (rhs) == INTEGER_CST)
1107 delta = rhs;
1108 if (method)
1110 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
1111 return;
1114 else
1115 return;
1119 return;
1122 /* Go through the arguments of the CALL and for every member pointer within
1123 tries determine whether it is a constant. If it is, create a corresponding
1124 constant jump function in FUNCTIONS which is an array of jump functions
1125 associated with the call. */
1127 static void
1128 compute_cst_member_ptr_arguments (struct ipa_edge_args *args,
1129 gimple call)
1131 unsigned num;
1132 tree arg, method_field, delta_field;
1134 for (num = 0; num < gimple_call_num_args (call); num++)
1136 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
1137 arg = gimple_call_arg (call, num);
1139 if (jfunc->type == IPA_JF_UNKNOWN
1140 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
1141 &delta_field))
1142 determine_cst_member_ptr (call, arg, method_field, delta_field, jfunc);
1146 /* Compute jump function for all arguments of callsite CS and insert the
1147 information in the jump_functions array in the ipa_edge_args corresponding
1148 to this callsite. */
1150 static void
1151 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1152 struct cgraph_edge *cs)
1154 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1155 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1156 gimple call = cs->call_stmt;
1157 int arg_num = gimple_call_num_args (call);
1159 if (arg_num == 0 || args->jump_functions)
1160 return;
1161 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1163 /* We will deal with constants and SSA scalars first: */
1164 compute_scalar_jump_functions (info, parms_ainfo, args, call);
1166 /* Let's check whether there are any potential member pointers and if so,
1167 whether we can determine their functions as pass_through. */
1168 if (!compute_pass_through_member_ptrs (info, parms_ainfo, args, call))
1169 return;
1171 /* Finally, let's check whether we actually pass a new constant member
1172 pointer here... */
1173 compute_cst_member_ptr_arguments (args, call);
1176 /* Compute jump functions for all edges - both direct and indirect - outgoing
1177 from NODE. Also count the actual arguments in the process. */
1179 static void
1180 ipa_compute_jump_functions (struct cgraph_node *node,
1181 struct param_analysis_info *parms_ainfo)
1183 struct cgraph_edge *cs;
1185 for (cs = node->callees; cs; cs = cs->next_callee)
1187 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1188 NULL);
1189 /* We do not need to bother analyzing calls to unknown
1190 functions unless they may become known during lto/whopr. */
1191 if (!callee->analyzed && !flag_lto)
1192 continue;
1193 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1196 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1197 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1200 /* If RHS looks like a rhs of a statement loading pfn from a member
1201 pointer formal parameter, return the parameter, otherwise return
1202 NULL. If USE_DELTA, then we look for a use of the delta field
1203 rather than the pfn. */
1205 static tree
1206 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1208 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1210 if (TREE_CODE (rhs) == COMPONENT_REF)
1212 ref_field = TREE_OPERAND (rhs, 1);
1213 rhs = TREE_OPERAND (rhs, 0);
1215 else
1216 ref_field = NULL_TREE;
1217 if (TREE_CODE (rhs) != MEM_REF)
1218 return NULL_TREE;
1219 rec = TREE_OPERAND (rhs, 0);
1220 if (TREE_CODE (rec) != ADDR_EXPR)
1221 return NULL_TREE;
1222 rec = TREE_OPERAND (rec, 0);
1223 if (TREE_CODE (rec) != PARM_DECL
1224 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1225 return NULL_TREE;
1227 ref_offset = TREE_OPERAND (rhs, 1);
1229 if (ref_field)
1231 if (integer_nonzerop (ref_offset))
1232 return NULL_TREE;
1234 if (use_delta)
1235 fld = delta_field;
1236 else
1237 fld = ptr_field;
1239 return ref_field == fld ? rec : NULL_TREE;
1242 if (use_delta)
1243 fld_offset = byte_position (delta_field);
1244 else
1245 fld_offset = byte_position (ptr_field);
1247 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1250 /* If STMT looks like a statement loading a value from a member pointer formal
1251 parameter, this function returns that parameter. */
1253 static tree
1254 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1256 tree rhs;
1258 if (!gimple_assign_single_p (stmt))
1259 return NULL_TREE;
1261 rhs = gimple_assign_rhs1 (stmt);
1262 return ipa_get_member_ptr_load_param (rhs, use_delta);
1265 /* Returns true iff T is an SSA_NAME defined by a statement. */
1267 static bool
1268 ipa_is_ssa_with_stmt_def (tree t)
1270 if (TREE_CODE (t) == SSA_NAME
1271 && !SSA_NAME_IS_DEFAULT_DEF (t))
1272 return true;
1273 else
1274 return false;
1277 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1278 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1279 indirect call graph edge. */
1281 static struct cgraph_edge *
1282 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1284 struct cgraph_edge *cs;
1286 cs = cgraph_edge (node, stmt);
1287 cs->indirect_info->param_index = param_index;
1288 cs->indirect_info->anc_offset = 0;
1289 cs->indirect_info->polymorphic = 0;
1290 return cs;
1293 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1294 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1295 intermediate information about each formal parameter. Currently it checks
1296 whether the call calls a pointer that is a formal parameter and if so, the
1297 parameter is marked with the called flag and an indirect call graph edge
1298 describing the call is created. This is very simple for ordinary pointers
1299 represented in SSA but not-so-nice when it comes to member pointers. The
1300 ugly part of this function does nothing more than trying to match the
1301 pattern of such a call. An example of such a pattern is the gimple dump
1302 below, the call is on the last line:
1304 <bb 2>:
1305 f$__delta_5 = f.__delta;
1306 f$__pfn_24 = f.__pfn;
1309 <bb 2>:
1310 f$__delta_5 = MEM[(struct *)&f];
1311 f$__pfn_24 = MEM[(struct *)&f + 4B];
1313 and a few lines below:
1315 <bb 5>
1316 D.2496_3 = (int) f$__pfn_24;
1317 D.2497_4 = D.2496_3 & 1;
1318 if (D.2497_4 != 0)
1319 goto <bb 3>;
1320 else
1321 goto <bb 4>;
1323 <bb 6>:
1324 D.2500_7 = (unsigned int) f$__delta_5;
1325 D.2501_8 = &S + D.2500_7;
1326 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1327 D.2503_10 = *D.2502_9;
1328 D.2504_12 = f$__pfn_24 + -1;
1329 D.2505_13 = (unsigned int) D.2504_12;
1330 D.2506_14 = D.2503_10 + D.2505_13;
1331 D.2507_15 = *D.2506_14;
1332 iftmp.11_16 = (String:: *) D.2507_15;
1334 <bb 7>:
1335 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1336 D.2500_19 = (unsigned int) f$__delta_5;
1337 D.2508_20 = &S + D.2500_19;
1338 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1340 Such patterns are results of simple calls to a member pointer:
1342 int doprinting (int (MyString::* f)(int) const)
1344 MyString S ("somestring");
1346 return (S.*f)(4);
1350 static void
1351 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1352 struct ipa_node_params *info,
1353 struct param_analysis_info *parms_ainfo,
1354 gimple call, tree target)
1356 gimple def;
1357 tree n1, n2;
1358 gimple d1, d2;
1359 tree rec, rec2, cond;
1360 gimple branch;
1361 int index;
1362 basic_block bb, virt_bb, join;
1364 if (SSA_NAME_IS_DEFAULT_DEF (target))
1366 tree var = SSA_NAME_VAR (target);
1367 index = ipa_get_param_decl_index (info, var);
1368 if (index >= 0)
1369 ipa_note_param_call (node, index, call);
1370 return;
1373 /* Now we need to try to match the complex pattern of calling a member
1374 pointer. */
1376 if (!POINTER_TYPE_P (TREE_TYPE (target))
1377 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1378 return;
1380 def = SSA_NAME_DEF_STMT (target);
1381 if (gimple_code (def) != GIMPLE_PHI)
1382 return;
1384 if (gimple_phi_num_args (def) != 2)
1385 return;
1387 /* First, we need to check whether one of these is a load from a member
1388 pointer that is a parameter to this function. */
1389 n1 = PHI_ARG_DEF (def, 0);
1390 n2 = PHI_ARG_DEF (def, 1);
1391 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1392 return;
1393 d1 = SSA_NAME_DEF_STMT (n1);
1394 d2 = SSA_NAME_DEF_STMT (n2);
1396 join = gimple_bb (def);
1397 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1399 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1400 return;
1402 bb = EDGE_PRED (join, 0)->src;
1403 virt_bb = gimple_bb (d2);
1405 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1407 bb = EDGE_PRED (join, 1)->src;
1408 virt_bb = gimple_bb (d1);
1410 else
1411 return;
1413 /* Second, we need to check that the basic blocks are laid out in the way
1414 corresponding to the pattern. */
1416 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1417 || single_pred (virt_bb) != bb
1418 || single_succ (virt_bb) != join)
1419 return;
1421 /* Third, let's see that the branching is done depending on the least
1422 significant bit of the pfn. */
1424 branch = last_stmt (bb);
1425 if (!branch || gimple_code (branch) != GIMPLE_COND)
1426 return;
1428 if ((gimple_cond_code (branch) != NE_EXPR
1429 && gimple_cond_code (branch) != EQ_EXPR)
1430 || !integer_zerop (gimple_cond_rhs (branch)))
1431 return;
1433 cond = gimple_cond_lhs (branch);
1434 if (!ipa_is_ssa_with_stmt_def (cond))
1435 return;
1437 def = SSA_NAME_DEF_STMT (cond);
1438 if (!is_gimple_assign (def)
1439 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1440 || !integer_onep (gimple_assign_rhs2 (def)))
1441 return;
1443 cond = gimple_assign_rhs1 (def);
1444 if (!ipa_is_ssa_with_stmt_def (cond))
1445 return;
1447 def = SSA_NAME_DEF_STMT (cond);
1449 if (is_gimple_assign (def)
1450 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1452 cond = gimple_assign_rhs1 (def);
1453 if (!ipa_is_ssa_with_stmt_def (cond))
1454 return;
1455 def = SSA_NAME_DEF_STMT (cond);
1458 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1459 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1460 == ptrmemfunc_vbit_in_delta));
1462 if (rec != rec2)
1463 return;
1465 index = ipa_get_param_decl_index (info, rec);
1466 if (index >= 0 && !is_parm_modified_before_stmt (&parms_ainfo[index],
1467 call, rec))
1468 ipa_note_param_call (node, index, call);
1470 return;
1473 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1474 object referenced in the expression is a formal parameter of the caller
1475 (described by INFO), create a call note for the statement. */
1477 static void
1478 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1479 struct ipa_node_params *info, gimple call,
1480 tree target)
1482 struct cgraph_edge *cs;
1483 struct cgraph_indirect_call_info *ii;
1484 struct ipa_jump_func jfunc;
1485 tree obj = OBJ_TYPE_REF_OBJECT (target);
1486 int index;
1487 HOST_WIDE_INT anc_offset;
1489 if (!flag_devirtualize)
1490 return;
1492 if (TREE_CODE (obj) != SSA_NAME)
1493 return;
1495 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1497 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1498 return;
1500 anc_offset = 0;
1501 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1502 gcc_assert (index >= 0);
1503 if (detect_type_change_ssa (obj, call, &jfunc))
1504 return;
1506 else
1508 gimple stmt = SSA_NAME_DEF_STMT (obj);
1509 tree expr;
1511 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1512 if (!expr)
1513 return;
1514 index = ipa_get_param_decl_index (info,
1515 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1516 gcc_assert (index >= 0);
1517 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1518 return;
1521 cs = ipa_note_param_call (node, index, call);
1522 ii = cs->indirect_info;
1523 ii->anc_offset = anc_offset;
1524 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1525 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1526 ii->polymorphic = 1;
1529 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1530 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1531 containing intermediate information about each formal parameter. */
1533 static void
1534 ipa_analyze_call_uses (struct cgraph_node *node,
1535 struct ipa_node_params *info,
1536 struct param_analysis_info *parms_ainfo, gimple call)
1538 tree target = gimple_call_fn (call);
1540 if (!target)
1541 return;
1542 if (TREE_CODE (target) == SSA_NAME)
1543 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1544 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1545 ipa_analyze_virtual_call_uses (node, info, call, target);
1549 /* Analyze the call statement STMT with respect to formal parameters (described
1550 in INFO) of caller given by NODE. Currently it only checks whether formal
1551 parameters are called. PARMS_AINFO is a pointer to a vector containing
1552 intermediate information about each formal parameter. */
1554 static void
1555 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1556 struct param_analysis_info *parms_ainfo, gimple stmt)
1558 if (is_gimple_call (stmt))
1559 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1562 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1563 If OP is a parameter declaration, mark it as used in the info structure
1564 passed in DATA. */
1566 static bool
1567 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1568 tree op, void *data)
1570 struct ipa_node_params *info = (struct ipa_node_params *) data;
1572 op = get_base_address (op);
1573 if (op
1574 && TREE_CODE (op) == PARM_DECL)
1576 int index = ipa_get_param_decl_index (info, op);
1577 gcc_assert (index >= 0);
1578 ipa_set_param_used (info, index, true);
1581 return false;
1584 /* Scan the function body of NODE and inspect the uses of formal parameters.
1585 Store the findings in various structures of the associated ipa_node_params
1586 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1587 vector containing intermediate information about each formal parameter. */
1589 static void
1590 ipa_analyze_params_uses (struct cgraph_node *node,
1591 struct param_analysis_info *parms_ainfo)
1593 tree decl = node->decl;
1594 basic_block bb;
1595 struct function *func;
1596 gimple_stmt_iterator gsi;
1597 struct ipa_node_params *info = IPA_NODE_REF (node);
1598 int i;
1600 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1601 return;
1603 for (i = 0; i < ipa_get_param_count (info); i++)
1605 tree parm = ipa_get_param (info, i);
1606 /* For SSA regs see if parameter is used. For non-SSA we compute
1607 the flag during modification analysis. */
1608 if (is_gimple_reg (parm)
1609 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1610 ipa_set_param_used (info, i, true);
1613 func = DECL_STRUCT_FUNCTION (decl);
1614 FOR_EACH_BB_FN (bb, func)
1616 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1618 gimple stmt = gsi_stmt (gsi);
1620 if (is_gimple_debug (stmt))
1621 continue;
1623 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1624 walk_stmt_load_store_addr_ops (stmt, info,
1625 visit_ref_for_mod_analysis,
1626 visit_ref_for_mod_analysis,
1627 visit_ref_for_mod_analysis);
1629 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1630 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1631 visit_ref_for_mod_analysis,
1632 visit_ref_for_mod_analysis,
1633 visit_ref_for_mod_analysis);
1636 info->uses_analysis_done = 1;
1639 /* Initialize the array describing properties of of formal parameters
1640 of NODE, analyze their uses and compute jump functions associated
1641 with actual arguments of calls from within NODE. */
1643 void
1644 ipa_analyze_node (struct cgraph_node *node)
1646 struct ipa_node_params *info;
1647 struct param_analysis_info *parms_ainfo;
1648 int i, param_count;
1650 ipa_check_create_node_params ();
1651 ipa_check_create_edge_args ();
1652 info = IPA_NODE_REF (node);
1653 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1654 current_function_decl = node->decl;
1655 ipa_initialize_node_params (node);
1657 param_count = ipa_get_param_count (info);
1658 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1659 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1661 ipa_analyze_params_uses (node, parms_ainfo);
1662 ipa_compute_jump_functions (node, parms_ainfo);
1664 for (i = 0; i < param_count; i++)
1665 if (parms_ainfo[i].visited_statements)
1666 BITMAP_FREE (parms_ainfo[i].visited_statements);
1668 current_function_decl = NULL;
1669 pop_cfun ();
1673 /* Update the jump function DST when the call graph edge corresponding to SRC is
1674 is being inlined, knowing that DST is of type ancestor and src of known
1675 type. */
1677 static void
1678 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1679 struct ipa_jump_func *dst)
1681 HOST_WIDE_INT combined_offset;
1682 tree combined_type;
1684 combined_offset = src->value.known_type.offset + dst->value.ancestor.offset;
1685 combined_type = dst->value.ancestor.type;
1687 dst->type = IPA_JF_KNOWN_TYPE;
1688 dst->value.known_type.base_type = src->value.known_type.base_type;
1689 dst->value.known_type.offset = combined_offset;
1690 dst->value.known_type.component_type = combined_type;
1693 /* Update the jump functions associated with call graph edge E when the call
1694 graph edge CS is being inlined, assuming that E->caller is already (possibly
1695 indirectly) inlined into CS->callee and that E has not been inlined. */
1697 static void
1698 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1699 struct cgraph_edge *e)
1701 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1702 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1703 int count = ipa_get_cs_argument_count (args);
1704 int i;
1706 for (i = 0; i < count; i++)
1708 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1710 if (dst->type == IPA_JF_ANCESTOR)
1712 struct ipa_jump_func *src;
1714 /* Variable number of arguments can cause havoc if we try to access
1715 one that does not exist in the inlined edge. So make sure we
1716 don't. */
1717 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1719 dst->type = IPA_JF_UNKNOWN;
1720 continue;
1723 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1724 if (src->type == IPA_JF_KNOWN_TYPE)
1725 combine_known_type_and_ancestor_jfs (src, dst);
1726 else if (src->type == IPA_JF_PASS_THROUGH
1727 && src->value.pass_through.operation == NOP_EXPR)
1728 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1729 else if (src->type == IPA_JF_ANCESTOR)
1731 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1732 dst->value.ancestor.offset += src->value.ancestor.offset;
1734 else
1735 dst->type = IPA_JF_UNKNOWN;
1737 else if (dst->type == IPA_JF_PASS_THROUGH)
1739 struct ipa_jump_func *src;
1740 /* We must check range due to calls with variable number of arguments
1741 and we cannot combine jump functions with operations. */
1742 if (dst->value.pass_through.operation == NOP_EXPR
1743 && (dst->value.pass_through.formal_id
1744 < ipa_get_cs_argument_count (top)))
1746 src = ipa_get_ith_jump_func (top,
1747 dst->value.pass_through.formal_id);
1748 *dst = *src;
1750 else
1751 dst->type = IPA_JF_UNKNOWN;
1756 /* If TARGET is an addr_expr of a function declaration, make it the destination
1757 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1759 struct cgraph_edge *
1760 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1762 struct cgraph_node *callee;
1764 if (TREE_CODE (target) == ADDR_EXPR)
1765 target = TREE_OPERAND (target, 0);
1766 if (TREE_CODE (target) != FUNCTION_DECL)
1767 return NULL;
1768 callee = cgraph_get_node (target);
1769 if (!callee)
1770 return NULL;
1771 ipa_check_create_node_params ();
1773 /* We can not make edges to inline clones. It is bug that someone removed
1774 the cgraph node too early. */
1775 gcc_assert (!callee->global.inlined_to);
1777 cgraph_make_edge_direct (ie, callee);
1778 if (dump_file)
1780 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1781 "(%s/%i -> %s/%i), for stmt ",
1782 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1783 cgraph_node_name (ie->caller), ie->caller->uid,
1784 cgraph_node_name (ie->callee), ie->callee->uid);
1785 if (ie->call_stmt)
1786 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1787 else
1788 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1790 callee = cgraph_function_or_thunk_node (callee, NULL);
1792 return ie;
1795 /* Try to find a destination for indirect edge IE that corresponds to a simple
1796 call or a call of a member function pointer and where the destination is a
1797 pointer formal parameter described by jump function JFUNC. If it can be
1798 determined, return the newly direct edge, otherwise return NULL. */
1800 static struct cgraph_edge *
1801 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1802 struct ipa_jump_func *jfunc)
1804 tree target;
1806 if (jfunc->type == IPA_JF_CONST)
1807 target = jfunc->value.constant;
1808 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1809 target = jfunc->value.member_cst.pfn;
1810 else
1811 return NULL;
1813 return ipa_make_edge_direct_to_target (ie, target);
1816 /* Try to find a destination for indirect edge IE that corresponds to a
1817 virtual call based on a formal parameter which is described by jump
1818 function JFUNC and if it can be determined, make it direct and return the
1819 direct edge. Otherwise, return NULL. */
1821 static struct cgraph_edge *
1822 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1823 struct ipa_jump_func *jfunc)
1825 tree binfo, target;
1827 if (jfunc->type != IPA_JF_KNOWN_TYPE)
1828 return NULL;
1830 binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
1831 gcc_checking_assert (binfo);
1832 binfo = get_binfo_at_offset (binfo, jfunc->value.known_type.offset
1833 + ie->indirect_info->anc_offset,
1834 ie->indirect_info->otr_type);
1835 if (binfo)
1836 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
1837 binfo);
1838 else
1839 return NULL;
1841 if (target)
1842 return ipa_make_edge_direct_to_target (ie, target);
1843 else
1844 return NULL;
1847 /* Update the param called notes associated with NODE when CS is being inlined,
1848 assuming NODE is (potentially indirectly) inlined into CS->callee.
1849 Moreover, if the callee is discovered to be constant, create a new cgraph
1850 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1851 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1853 static bool
1854 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1855 struct cgraph_node *node,
1856 VEC (cgraph_edge_p, heap) **new_edges)
1858 struct ipa_edge_args *top;
1859 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1860 bool res = false;
1862 ipa_check_create_edge_args ();
1863 top = IPA_EDGE_REF (cs);
1865 for (ie = node->indirect_calls; ie; ie = next_ie)
1867 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1868 struct ipa_jump_func *jfunc;
1870 next_ie = ie->next_callee;
1872 if (ici->param_index == -1)
1873 continue;
1875 /* We must check range due to calls with variable number of arguments: */
1876 if (ici->param_index >= ipa_get_cs_argument_count (top))
1878 ici->param_index = -1;
1879 continue;
1882 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1883 if (jfunc->type == IPA_JF_PASS_THROUGH
1884 && jfunc->value.pass_through.operation == NOP_EXPR)
1885 ici->param_index = jfunc->value.pass_through.formal_id;
1886 else if (jfunc->type == IPA_JF_ANCESTOR)
1888 ici->param_index = jfunc->value.ancestor.formal_id;
1889 ici->anc_offset += jfunc->value.ancestor.offset;
1891 else
1892 /* Either we can find a destination for this edge now or never. */
1893 ici->param_index = -1;
1895 if (!flag_indirect_inlining)
1896 continue;
1898 if (ici->polymorphic)
1899 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1900 else
1901 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1903 if (new_direct_edge)
1905 new_direct_edge->indirect_inlining_edge = 1;
1906 if (new_direct_edge->call_stmt)
1907 new_direct_edge->call_stmt_cannot_inline_p
1908 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
1909 new_direct_edge->callee->decl);
1910 if (new_edges)
1912 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1913 new_direct_edge);
1914 top = IPA_EDGE_REF (cs);
1915 res = true;
1920 return res;
1923 /* Recursively traverse subtree of NODE (including node) made of inlined
1924 cgraph_edges when CS has been inlined and invoke
1925 update_indirect_edges_after_inlining on all nodes and
1926 update_jump_functions_after_inlining on all non-inlined edges that lead out
1927 of this subtree. Newly discovered indirect edges will be added to
1928 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1929 created. */
1931 static bool
1932 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1933 struct cgraph_node *node,
1934 VEC (cgraph_edge_p, heap) **new_edges)
1936 struct cgraph_edge *e;
1937 bool res;
1939 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1941 for (e = node->callees; e; e = e->next_callee)
1942 if (!e->inline_failed)
1943 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1944 else
1945 update_jump_functions_after_inlining (cs, e);
1946 for (e = node->indirect_calls; e; e = e->next_callee)
1947 update_jump_functions_after_inlining (cs, e);
1949 return res;
1952 /* Update jump functions and call note functions on inlining the call site CS.
1953 CS is expected to lead to a node already cloned by
1954 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1955 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1956 created. */
1958 bool
1959 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1960 VEC (cgraph_edge_p, heap) **new_edges)
1962 bool changed;
1963 /* Do nothing if the preparation phase has not been carried out yet
1964 (i.e. during early inlining). */
1965 if (!ipa_node_params_vector)
1966 return false;
1967 gcc_assert (ipa_edge_args_vector);
1969 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1971 /* We do not keep jump functions of inlined edges up to date. Better to free
1972 them so we do not access them accidentally. */
1973 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1974 return changed;
1977 /* Frees all dynamically allocated structures that the argument info points
1978 to. */
1980 void
1981 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1983 if (args->jump_functions)
1984 ggc_free (args->jump_functions);
1986 memset (args, 0, sizeof (*args));
1989 /* Free all ipa_edge structures. */
1991 void
1992 ipa_free_all_edge_args (void)
1994 int i;
1995 struct ipa_edge_args *args;
1997 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
1998 ipa_free_edge_args_substructures (args);
2000 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2001 ipa_edge_args_vector = NULL;
2004 /* Frees all dynamically allocated structures that the param info points
2005 to. */
2007 void
2008 ipa_free_node_params_substructures (struct ipa_node_params *info)
2010 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2011 free (info->lattices);
2012 /* Lattice values and their sources are deallocated with their alocation
2013 pool. */
2014 VEC_free (tree, heap, info->known_vals);
2015 memset (info, 0, sizeof (*info));
2018 /* Free all ipa_node_params structures. */
2020 void
2021 ipa_free_all_node_params (void)
2023 int i;
2024 struct ipa_node_params *info;
2026 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2027 ipa_free_node_params_substructures (info);
2029 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2030 ipa_node_params_vector = NULL;
2033 /* Hook that is called by cgraph.c when an edge is removed. */
2035 static void
2036 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2038 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2039 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2040 <= (unsigned)cs->uid)
2041 return;
2042 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2045 /* Hook that is called by cgraph.c when a node is removed. */
2047 static void
2048 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2050 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2051 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2052 <= (unsigned)node->uid)
2053 return;
2054 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2057 /* Hook that is called by cgraph.c when a node is duplicated. */
2059 static void
2060 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2061 __attribute__((unused)) void *data)
2063 struct ipa_edge_args *old_args, *new_args;
2065 ipa_check_create_edge_args ();
2067 old_args = IPA_EDGE_REF (src);
2068 new_args = IPA_EDGE_REF (dst);
2070 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2071 old_args->jump_functions);
2074 /* Hook that is called by cgraph.c when a node is duplicated. */
2076 static void
2077 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2078 ATTRIBUTE_UNUSED void *data)
2080 struct ipa_node_params *old_info, *new_info;
2082 ipa_check_create_node_params ();
2083 old_info = IPA_NODE_REF (src);
2084 new_info = IPA_NODE_REF (dst);
2086 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2087 old_info->descriptors);
2088 new_info->lattices = NULL;
2089 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2091 new_info->uses_analysis_done = old_info->uses_analysis_done;
2092 new_info->node_enqueued = old_info->node_enqueued;
2096 /* Analyze newly added function into callgraph. */
2098 static void
2099 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2101 ipa_analyze_node (node);
2104 /* Register our cgraph hooks if they are not already there. */
2106 void
2107 ipa_register_cgraph_hooks (void)
2109 if (!edge_removal_hook_holder)
2110 edge_removal_hook_holder =
2111 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2112 if (!node_removal_hook_holder)
2113 node_removal_hook_holder =
2114 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2115 if (!edge_duplication_hook_holder)
2116 edge_duplication_hook_holder =
2117 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2118 if (!node_duplication_hook_holder)
2119 node_duplication_hook_holder =
2120 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2121 function_insertion_hook_holder =
2122 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2125 /* Unregister our cgraph hooks if they are not already there. */
2127 static void
2128 ipa_unregister_cgraph_hooks (void)
2130 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2131 edge_removal_hook_holder = NULL;
2132 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2133 node_removal_hook_holder = NULL;
2134 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2135 edge_duplication_hook_holder = NULL;
2136 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2137 node_duplication_hook_holder = NULL;
2138 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2139 function_insertion_hook_holder = NULL;
2142 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2143 longer needed after ipa-cp. */
2145 void
2146 ipa_free_all_structures_after_ipa_cp (void)
2148 if (!optimize)
2150 ipa_free_all_edge_args ();
2151 ipa_free_all_node_params ();
2152 free_alloc_pool (ipcp_sources_pool);
2153 free_alloc_pool (ipcp_values_pool);
2154 ipa_unregister_cgraph_hooks ();
2158 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2159 longer needed after indirect inlining. */
2161 void
2162 ipa_free_all_structures_after_iinln (void)
2164 ipa_free_all_edge_args ();
2165 ipa_free_all_node_params ();
2166 ipa_unregister_cgraph_hooks ();
2167 if (ipcp_sources_pool)
2168 free_alloc_pool (ipcp_sources_pool);
2169 if (ipcp_values_pool)
2170 free_alloc_pool (ipcp_values_pool);
2173 /* Print ipa_tree_map data structures of all functions in the
2174 callgraph to F. */
2176 void
2177 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2179 int i, count;
2180 tree temp;
2181 struct ipa_node_params *info;
2183 if (!node->analyzed)
2184 return;
2185 info = IPA_NODE_REF (node);
2186 fprintf (f, " function %s parameter descriptors:\n",
2187 cgraph_node_name (node));
2188 count = ipa_get_param_count (info);
2189 for (i = 0; i < count; i++)
2191 temp = ipa_get_param (info, i);
2192 if (TREE_CODE (temp) == PARM_DECL)
2193 fprintf (f, " param %d : %s", i,
2194 (DECL_NAME (temp)
2195 ? (*lang_hooks.decl_printable_name) (temp, 2)
2196 : "(unnamed)"));
2197 if (ipa_is_param_used (info, i))
2198 fprintf (f, " used");
2199 fprintf (f, "\n");
2203 /* Print ipa_tree_map data structures of all functions in the
2204 callgraph to F. */
2206 void
2207 ipa_print_all_params (FILE * f)
2209 struct cgraph_node *node;
2211 fprintf (f, "\nFunction parameters:\n");
2212 for (node = cgraph_nodes; node; node = node->next)
2213 ipa_print_node_params (f, node);
2216 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2218 VEC(tree, heap) *
2219 ipa_get_vector_of_formal_parms (tree fndecl)
2221 VEC(tree, heap) *args;
2222 int count;
2223 tree parm;
2225 count = count_formal_params (fndecl);
2226 args = VEC_alloc (tree, heap, count);
2227 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2228 VEC_quick_push (tree, args, parm);
2230 return args;
2233 /* Return a heap allocated vector containing types of formal parameters of
2234 function type FNTYPE. */
2236 static inline VEC(tree, heap) *
2237 get_vector_of_formal_parm_types (tree fntype)
2239 VEC(tree, heap) *types;
2240 int count = 0;
2241 tree t;
2243 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2244 count++;
2246 types = VEC_alloc (tree, heap, count);
2247 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2248 VEC_quick_push (tree, types, TREE_VALUE (t));
2250 return types;
2253 /* Modify the function declaration FNDECL and its type according to the plan in
2254 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2255 to reflect the actual parameters being modified which are determined by the
2256 base_index field. */
2258 void
2259 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2260 const char *synth_parm_prefix)
2262 VEC(tree, heap) *oparms, *otypes;
2263 tree orig_type, new_type = NULL;
2264 tree old_arg_types, t, new_arg_types = NULL;
2265 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2266 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2267 tree new_reversed = NULL;
2268 bool care_for_types, last_parm_void;
2270 if (!synth_parm_prefix)
2271 synth_parm_prefix = "SYNTH";
2273 oparms = ipa_get_vector_of_formal_parms (fndecl);
2274 orig_type = TREE_TYPE (fndecl);
2275 old_arg_types = TYPE_ARG_TYPES (orig_type);
2277 /* The following test is an ugly hack, some functions simply don't have any
2278 arguments in their type. This is probably a bug but well... */
2279 care_for_types = (old_arg_types != NULL_TREE);
2280 if (care_for_types)
2282 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2283 == void_type_node);
2284 otypes = get_vector_of_formal_parm_types (orig_type);
2285 if (last_parm_void)
2286 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2287 else
2288 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2290 else
2292 last_parm_void = false;
2293 otypes = NULL;
2296 for (i = 0; i < len; i++)
2298 struct ipa_parm_adjustment *adj;
2299 gcc_assert (link);
2301 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2302 parm = VEC_index (tree, oparms, adj->base_index);
2303 adj->base = parm;
2305 if (adj->copy_param)
2307 if (care_for_types)
2308 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2309 adj->base_index),
2310 new_arg_types);
2311 *link = parm;
2312 link = &DECL_CHAIN (parm);
2314 else if (!adj->remove_param)
2316 tree new_parm;
2317 tree ptype;
2319 if (adj->by_ref)
2320 ptype = build_pointer_type (adj->type);
2321 else
2322 ptype = adj->type;
2324 if (care_for_types)
2325 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2327 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2328 ptype);
2329 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2331 DECL_ARTIFICIAL (new_parm) = 1;
2332 DECL_ARG_TYPE (new_parm) = ptype;
2333 DECL_CONTEXT (new_parm) = fndecl;
2334 TREE_USED (new_parm) = 1;
2335 DECL_IGNORED_P (new_parm) = 1;
2336 layout_decl (new_parm, 0);
2338 add_referenced_var (new_parm);
2339 mark_sym_for_renaming (new_parm);
2340 adj->base = parm;
2341 adj->reduction = new_parm;
2343 *link = new_parm;
2345 link = &DECL_CHAIN (new_parm);
2349 *link = NULL_TREE;
2351 if (care_for_types)
2353 new_reversed = nreverse (new_arg_types);
2354 if (last_parm_void)
2356 if (new_reversed)
2357 TREE_CHAIN (new_arg_types) = void_list_node;
2358 else
2359 new_reversed = void_list_node;
2363 /* Use copy_node to preserve as much as possible from original type
2364 (debug info, attribute lists etc.)
2365 Exception is METHOD_TYPEs must have THIS argument.
2366 When we are asked to remove it, we need to build new FUNCTION_TYPE
2367 instead. */
2368 if (TREE_CODE (orig_type) != METHOD_TYPE
2369 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2370 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2372 new_type = build_distinct_type_copy (orig_type);
2373 TYPE_ARG_TYPES (new_type) = new_reversed;
2375 else
2377 new_type
2378 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2379 new_reversed));
2380 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2381 DECL_VINDEX (fndecl) = NULL_TREE;
2384 /* When signature changes, we need to clear builtin info. */
2385 if (DECL_BUILT_IN (fndecl))
2387 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2388 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2391 /* This is a new type, not a copy of an old type. Need to reassociate
2392 variants. We can handle everything except the main variant lazily. */
2393 t = TYPE_MAIN_VARIANT (orig_type);
2394 if (orig_type != t)
2396 TYPE_MAIN_VARIANT (new_type) = t;
2397 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2398 TYPE_NEXT_VARIANT (t) = new_type;
2400 else
2402 TYPE_MAIN_VARIANT (new_type) = new_type;
2403 TYPE_NEXT_VARIANT (new_type) = NULL;
2406 TREE_TYPE (fndecl) = new_type;
2407 DECL_VIRTUAL_P (fndecl) = 0;
2408 if (otypes)
2409 VEC_free (tree, heap, otypes);
2410 VEC_free (tree, heap, oparms);
2413 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2414 If this is a directly recursive call, CS must be NULL. Otherwise it must
2415 contain the corresponding call graph edge. */
2417 void
2418 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2419 ipa_parm_adjustment_vec adjustments)
2421 VEC(tree, heap) *vargs;
2422 VEC(tree, gc) **debug_args = NULL;
2423 gimple new_stmt;
2424 gimple_stmt_iterator gsi;
2425 tree callee_decl;
2426 int i, len;
2428 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2429 vargs = VEC_alloc (tree, heap, len);
2430 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2432 gsi = gsi_for_stmt (stmt);
2433 for (i = 0; i < len; i++)
2435 struct ipa_parm_adjustment *adj;
2437 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2439 if (adj->copy_param)
2441 tree arg = gimple_call_arg (stmt, adj->base_index);
2443 VEC_quick_push (tree, vargs, arg);
2445 else if (!adj->remove_param)
2447 tree expr, base, off;
2448 location_t loc;
2450 /* We create a new parameter out of the value of the old one, we can
2451 do the following kind of transformations:
2453 - A scalar passed by reference is converted to a scalar passed by
2454 value. (adj->by_ref is false and the type of the original
2455 actual argument is a pointer to a scalar).
2457 - A part of an aggregate is passed instead of the whole aggregate.
2458 The part can be passed either by value or by reference, this is
2459 determined by value of adj->by_ref. Moreover, the code below
2460 handles both situations when the original aggregate is passed by
2461 value (its type is not a pointer) and when it is passed by
2462 reference (it is a pointer to an aggregate).
2464 When the new argument is passed by reference (adj->by_ref is true)
2465 it must be a part of an aggregate and therefore we form it by
2466 simply taking the address of a reference inside the original
2467 aggregate. */
2469 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2470 base = gimple_call_arg (stmt, adj->base_index);
2471 loc = EXPR_LOCATION (base);
2473 if (TREE_CODE (base) != ADDR_EXPR
2474 && POINTER_TYPE_P (TREE_TYPE (base)))
2475 off = build_int_cst (adj->alias_ptr_type,
2476 adj->offset / BITS_PER_UNIT);
2477 else
2479 HOST_WIDE_INT base_offset;
2480 tree prev_base;
2482 if (TREE_CODE (base) == ADDR_EXPR)
2483 base = TREE_OPERAND (base, 0);
2484 prev_base = base;
2485 base = get_addr_base_and_unit_offset (base, &base_offset);
2486 /* Aggregate arguments can have non-invariant addresses. */
2487 if (!base)
2489 base = build_fold_addr_expr (prev_base);
2490 off = build_int_cst (adj->alias_ptr_type,
2491 adj->offset / BITS_PER_UNIT);
2493 else if (TREE_CODE (base) == MEM_REF)
2495 off = build_int_cst (adj->alias_ptr_type,
2496 base_offset
2497 + adj->offset / BITS_PER_UNIT);
2498 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2499 off);
2500 base = TREE_OPERAND (base, 0);
2502 else
2504 off = build_int_cst (adj->alias_ptr_type,
2505 base_offset
2506 + adj->offset / BITS_PER_UNIT);
2507 base = build_fold_addr_expr (base);
2511 if (!adj->by_ref)
2513 tree type = adj->type;
2514 unsigned int align;
2515 unsigned HOST_WIDE_INT misalign;
2516 align = get_pointer_alignment_1 (base, &misalign);
2517 misalign += (double_int_sext (tree_to_double_int (off),
2518 TYPE_PRECISION (TREE_TYPE (off))).low
2519 * BITS_PER_UNIT);
2520 misalign = misalign & (align - 1);
2521 if (misalign != 0)
2522 align = (misalign & -misalign);
2523 if (align < TYPE_ALIGN (type))
2524 type = build_aligned_type (type, align);
2525 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2527 else
2529 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2530 expr = build_fold_addr_expr (expr);
2533 expr = force_gimple_operand_gsi (&gsi, expr,
2534 adj->by_ref
2535 || is_gimple_reg_type (adj->type),
2536 NULL, true, GSI_SAME_STMT);
2537 VEC_quick_push (tree, vargs, expr);
2539 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2541 unsigned int ix;
2542 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2543 gimple def_temp;
2545 arg = gimple_call_arg (stmt, adj->base_index);
2546 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2548 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2549 continue;
2550 arg = fold_convert_loc (gimple_location (stmt),
2551 TREE_TYPE (origin), arg);
2553 if (debug_args == NULL)
2554 debug_args = decl_debug_args_insert (callee_decl);
2555 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2556 if (ddecl == origin)
2558 ddecl = VEC_index (tree, *debug_args, ix + 1);
2559 break;
2561 if (ddecl == NULL)
2563 ddecl = make_node (DEBUG_EXPR_DECL);
2564 DECL_ARTIFICIAL (ddecl) = 1;
2565 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2566 DECL_MODE (ddecl) = DECL_MODE (origin);
2568 VEC_safe_push (tree, gc, *debug_args, origin);
2569 VEC_safe_push (tree, gc, *debug_args, ddecl);
2571 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2572 stmt);
2573 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2577 if (dump_file && (dump_flags & TDF_DETAILS))
2579 fprintf (dump_file, "replacing stmt:");
2580 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2583 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2584 VEC_free (tree, heap, vargs);
2585 if (gimple_call_lhs (stmt))
2586 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2588 gimple_set_block (new_stmt, gimple_block (stmt));
2589 if (gimple_has_location (stmt))
2590 gimple_set_location (new_stmt, gimple_location (stmt));
2591 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2592 gimple_call_copy_flags (new_stmt, stmt);
2594 if (dump_file && (dump_flags & TDF_DETAILS))
2596 fprintf (dump_file, "with stmt:");
2597 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2598 fprintf (dump_file, "\n");
2600 gsi_replace (&gsi, new_stmt, true);
2601 if (cs)
2602 cgraph_set_call_stmt (cs, new_stmt);
2603 update_ssa (TODO_update_ssa);
2604 free_dominance_info (CDI_DOMINATORS);
2607 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2609 static bool
2610 index_in_adjustments_multiple_times_p (int base_index,
2611 ipa_parm_adjustment_vec adjustments)
2613 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2614 bool one = false;
2616 for (i = 0; i < len; i++)
2618 struct ipa_parm_adjustment *adj;
2619 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2621 if (adj->base_index == base_index)
2623 if (one)
2624 return true;
2625 else
2626 one = true;
2629 return false;
2633 /* Return adjustments that should have the same effect on function parameters
2634 and call arguments as if they were first changed according to adjustments in
2635 INNER and then by adjustments in OUTER. */
2637 ipa_parm_adjustment_vec
2638 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2639 ipa_parm_adjustment_vec outer)
2641 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2642 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2643 int removals = 0;
2644 ipa_parm_adjustment_vec adjustments, tmp;
2646 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2647 for (i = 0; i < inlen; i++)
2649 struct ipa_parm_adjustment *n;
2650 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2652 if (n->remove_param)
2653 removals++;
2654 else
2655 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2658 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2659 for (i = 0; i < outlen; i++)
2661 struct ipa_parm_adjustment *r;
2662 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2663 outer, i);
2664 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2665 out->base_index);
2667 gcc_assert (!in->remove_param);
2668 if (out->remove_param)
2670 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2672 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2673 memset (r, 0, sizeof (*r));
2674 r->remove_param = true;
2676 continue;
2679 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2680 memset (r, 0, sizeof (*r));
2681 r->base_index = in->base_index;
2682 r->type = out->type;
2684 /* FIXME: Create nonlocal value too. */
2686 if (in->copy_param && out->copy_param)
2687 r->copy_param = true;
2688 else if (in->copy_param)
2689 r->offset = out->offset;
2690 else if (out->copy_param)
2691 r->offset = in->offset;
2692 else
2693 r->offset = in->offset + out->offset;
2696 for (i = 0; i < inlen; i++)
2698 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2699 inner, i);
2701 if (n->remove_param)
2702 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2705 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2706 return adjustments;
2709 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2710 friendly way, assuming they are meant to be applied to FNDECL. */
2712 void
2713 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2714 tree fndecl)
2716 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2717 bool first = true;
2718 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2720 fprintf (file, "IPA param adjustments: ");
2721 for (i = 0; i < len; i++)
2723 struct ipa_parm_adjustment *adj;
2724 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2726 if (!first)
2727 fprintf (file, " ");
2728 else
2729 first = false;
2731 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2732 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2733 if (adj->base)
2735 fprintf (file, ", base: ");
2736 print_generic_expr (file, adj->base, 0);
2738 if (adj->reduction)
2740 fprintf (file, ", reduction: ");
2741 print_generic_expr (file, adj->reduction, 0);
2743 if (adj->new_ssa_base)
2745 fprintf (file, ", new_ssa_base: ");
2746 print_generic_expr (file, adj->new_ssa_base, 0);
2749 if (adj->copy_param)
2750 fprintf (file, ", copy_param");
2751 else if (adj->remove_param)
2752 fprintf (file, ", remove_param");
2753 else
2754 fprintf (file, ", offset %li", (long) adj->offset);
2755 if (adj->by_ref)
2756 fprintf (file, ", by_ref");
2757 print_node_brief (file, ", type: ", adj->type, 0);
2758 fprintf (file, "\n");
2760 VEC_free (tree, heap, parms);
2763 /* Stream out jump function JUMP_FUNC to OB. */
2765 static void
2766 ipa_write_jump_function (struct output_block *ob,
2767 struct ipa_jump_func *jump_func)
2769 streamer_write_uhwi (ob, jump_func->type);
2771 switch (jump_func->type)
2773 case IPA_JF_UNKNOWN:
2774 break;
2775 case IPA_JF_KNOWN_TYPE:
2776 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
2777 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
2778 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
2779 break;
2780 case IPA_JF_CONST:
2781 stream_write_tree (ob, jump_func->value.constant, true);
2782 break;
2783 case IPA_JF_PASS_THROUGH:
2784 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
2785 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
2786 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
2787 break;
2788 case IPA_JF_ANCESTOR:
2789 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
2790 stream_write_tree (ob, jump_func->value.ancestor.type, true);
2791 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
2792 break;
2793 case IPA_JF_CONST_MEMBER_PTR:
2794 stream_write_tree (ob, jump_func->value.member_cst.pfn, true);
2795 stream_write_tree (ob, jump_func->value.member_cst.delta, false);
2796 break;
2800 /* Read in jump function JUMP_FUNC from IB. */
2802 static void
2803 ipa_read_jump_function (struct lto_input_block *ib,
2804 struct ipa_jump_func *jump_func,
2805 struct data_in *data_in)
2807 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
2809 switch (jump_func->type)
2811 case IPA_JF_UNKNOWN:
2812 break;
2813 case IPA_JF_KNOWN_TYPE:
2814 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
2815 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
2816 jump_func->value.known_type.component_type = stream_read_tree (ib,
2817 data_in);
2818 break;
2819 case IPA_JF_CONST:
2820 jump_func->value.constant = stream_read_tree (ib, data_in);
2821 break;
2822 case IPA_JF_PASS_THROUGH:
2823 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
2824 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
2825 jump_func->value.pass_through.operation
2826 = (enum tree_code) streamer_read_uhwi (ib);
2827 break;
2828 case IPA_JF_ANCESTOR:
2829 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
2830 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
2831 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
2832 break;
2833 case IPA_JF_CONST_MEMBER_PTR:
2834 jump_func->value.member_cst.pfn = stream_read_tree (ib, data_in);
2835 jump_func->value.member_cst.delta = stream_read_tree (ib, data_in);
2836 break;
2840 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2841 relevant to indirect inlining to OB. */
2843 static void
2844 ipa_write_indirect_edge_info (struct output_block *ob,
2845 struct cgraph_edge *cs)
2847 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2848 struct bitpack_d bp;
2850 streamer_write_hwi (ob, ii->param_index);
2851 streamer_write_hwi (ob, ii->anc_offset);
2852 bp = bitpack_create (ob->main_stream);
2853 bp_pack_value (&bp, ii->polymorphic, 1);
2854 streamer_write_bitpack (&bp);
2856 if (ii->polymorphic)
2858 streamer_write_hwi (ob, ii->otr_token);
2859 stream_write_tree (ob, ii->otr_type, true);
2863 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2864 relevant to indirect inlining from IB. */
2866 static void
2867 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2868 struct data_in *data_in ATTRIBUTE_UNUSED,
2869 struct cgraph_edge *cs)
2871 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2872 struct bitpack_d bp;
2874 ii->param_index = (int) streamer_read_hwi (ib);
2875 ii->anc_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
2876 bp = streamer_read_bitpack (ib);
2877 ii->polymorphic = bp_unpack_value (&bp, 1);
2878 if (ii->polymorphic)
2880 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
2881 ii->otr_type = stream_read_tree (ib, data_in);
2885 /* Stream out NODE info to OB. */
2887 static void
2888 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2890 int node_ref;
2891 lto_cgraph_encoder_t encoder;
2892 struct ipa_node_params *info = IPA_NODE_REF (node);
2893 int j;
2894 struct cgraph_edge *e;
2895 struct bitpack_d bp;
2897 encoder = ob->decl_state->cgraph_node_encoder;
2898 node_ref = lto_cgraph_encoder_encode (encoder, node);
2899 streamer_write_uhwi (ob, node_ref);
2901 bp = bitpack_create (ob->main_stream);
2902 gcc_assert (info->uses_analysis_done
2903 || ipa_get_param_count (info) == 0);
2904 gcc_assert (!info->node_enqueued);
2905 gcc_assert (!info->ipcp_orig_node);
2906 for (j = 0; j < ipa_get_param_count (info); j++)
2907 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
2908 streamer_write_bitpack (&bp);
2909 for (e = node->callees; e; e = e->next_callee)
2911 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2913 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2914 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2915 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2917 for (e = node->indirect_calls; e; e = e->next_callee)
2919 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2921 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2922 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2923 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2924 ipa_write_indirect_edge_info (ob, e);
2928 /* Stream in NODE info from IB. */
2930 static void
2931 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2932 struct data_in *data_in)
2934 struct ipa_node_params *info = IPA_NODE_REF (node);
2935 int k;
2936 struct cgraph_edge *e;
2937 struct bitpack_d bp;
2939 ipa_initialize_node_params (node);
2941 bp = streamer_read_bitpack (ib);
2942 if (ipa_get_param_count (info) != 0)
2943 info->uses_analysis_done = true;
2944 info->node_enqueued = false;
2945 for (k = 0; k < ipa_get_param_count (info); k++)
2946 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
2947 for (e = node->callees; e; e = e->next_callee)
2949 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2950 int count = streamer_read_uhwi (ib);
2952 if (!count)
2953 continue;
2954 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
2956 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2957 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2959 for (e = node->indirect_calls; e; e = e->next_callee)
2961 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2962 int count = streamer_read_uhwi (ib);
2964 if (count)
2966 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
2967 count);
2968 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2969 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
2970 data_in);
2972 ipa_read_indirect_edge_info (ib, data_in, e);
2976 /* Write jump functions for nodes in SET. */
2978 void
2979 ipa_prop_write_jump_functions (cgraph_node_set set)
2981 struct cgraph_node *node;
2982 struct output_block *ob;
2983 unsigned int count = 0;
2984 cgraph_node_set_iterator csi;
2986 if (!ipa_node_params_vector)
2987 return;
2989 ob = create_output_block (LTO_section_jump_functions);
2990 ob->cgraph_node = NULL;
2991 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2993 node = csi_node (csi);
2994 if (cgraph_function_with_gimple_body_p (node)
2995 && IPA_NODE_REF (node) != NULL)
2996 count++;
2999 streamer_write_uhwi (ob, count);
3001 /* Process all of the functions. */
3002 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
3004 node = csi_node (csi);
3005 if (cgraph_function_with_gimple_body_p (node)
3006 && IPA_NODE_REF (node) != NULL)
3007 ipa_write_node_info (ob, node);
3009 streamer_write_char_stream (ob->main_stream, 0);
3010 produce_asm (ob, NULL);
3011 destroy_output_block (ob);
3014 /* Read section in file FILE_DATA of length LEN with data DATA. */
3016 static void
3017 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3018 size_t len)
3020 const struct lto_function_header *header =
3021 (const struct lto_function_header *) data;
3022 const int cfg_offset = sizeof (struct lto_function_header);
3023 const int main_offset = cfg_offset + header->cfg_size;
3024 const int string_offset = main_offset + header->main_size;
3025 struct data_in *data_in;
3026 struct lto_input_block ib_main;
3027 unsigned int i;
3028 unsigned int count;
3030 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3031 header->main_size);
3033 data_in =
3034 lto_data_in_create (file_data, (const char *) data + string_offset,
3035 header->string_size, NULL);
3036 count = streamer_read_uhwi (&ib_main);
3038 for (i = 0; i < count; i++)
3040 unsigned int index;
3041 struct cgraph_node *node;
3042 lto_cgraph_encoder_t encoder;
3044 index = streamer_read_uhwi (&ib_main);
3045 encoder = file_data->cgraph_node_encoder;
3046 node = lto_cgraph_encoder_deref (encoder, index);
3047 gcc_assert (node->analyzed);
3048 ipa_read_node_info (&ib_main, node, data_in);
3050 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3051 len);
3052 lto_data_in_delete (data_in);
3055 /* Read ipcp jump functions. */
3057 void
3058 ipa_prop_read_jump_functions (void)
3060 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3061 struct lto_file_decl_data *file_data;
3062 unsigned int j = 0;
3064 ipa_check_create_node_params ();
3065 ipa_check_create_edge_args ();
3066 ipa_register_cgraph_hooks ();
3068 while ((file_data = file_data_vec[j++]))
3070 size_t len;
3071 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3073 if (data)
3074 ipa_prop_read_section (file_data, data, len);
3078 /* After merging units, we can get mismatch in argument counts.
3079 Also decl merging might've rendered parameter lists obsolete.
3080 Also compute called_with_variable_arg info. */
3082 void
3083 ipa_update_after_lto_read (void)
3085 struct cgraph_node *node;
3087 ipa_check_create_node_params ();
3088 ipa_check_create_edge_args ();
3090 for (node = cgraph_nodes; node; node = node->next)
3091 if (node->analyzed)
3092 ipa_initialize_node_params (node);