Merge from trunk: 215733-215743
[official-gcc.git] / gcc-4_7 / gcc / ipa-prop.c
blob1ba722b646245d59c89ed3ac2095ff51f9e7c63c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
41 #include "data-streamer.h"
42 #include "tree-streamer.h"
45 /* Intermediate information about a parameter that is only useful during the
46 run of ipa_analyze_node and is not kept afterwards. */
48 struct param_analysis_info
50 bool modified;
51 bitmap visited_statements;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
56 /* Vector where the parameter infos are actually stored. */
57 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
61 static struct cgraph_node_hook_list *node_removal_hook_holder;
62 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
63 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
64 static struct cgraph_node_hook_list *function_insertion_hook_holder;
66 /* Return index of the formal whose tree is PTREE in function which corresponds
67 to INFO. */
69 int
70 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
72 int i, count;
74 count = ipa_get_param_count (info);
75 for (i = 0; i < count; i++)
76 if (ipa_get_param (info, i) == ptree)
77 return i;
79 return -1;
82 /* Populate the param_decl field in parameter descriptors of INFO that
83 corresponds to NODE. */
85 static void
86 ipa_populate_param_decls (struct cgraph_node *node,
87 struct ipa_node_params *info)
89 tree fndecl;
90 tree fnargs;
91 tree parm;
92 int param_num;
94 fndecl = node->decl;
95 fnargs = DECL_ARGUMENTS (fndecl);
96 param_num = 0;
97 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
99 VEC_index (ipa_param_descriptor_t,
100 info->descriptors, param_num)->decl = parm;
101 param_num++;
105 /* Return how many formal parameters FNDECL has. */
107 static inline int
108 count_formal_params (tree fndecl)
110 tree parm;
111 int count = 0;
113 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
114 count++;
116 return count;
119 /* Initialize the ipa_node_params structure associated with NODE by counting
120 the function parameters, creating the descriptors and populating their
121 param_decls. */
123 void
124 ipa_initialize_node_params (struct cgraph_node *node)
126 struct ipa_node_params *info = IPA_NODE_REF (node);
128 if (!info->descriptors)
130 int param_count;
132 param_count = count_formal_params (node->decl);
133 if (param_count)
135 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
136 info->descriptors, param_count);
137 ipa_populate_param_decls (node, info);
142 /* Print the jump functions associated with call graph edge CS to file F. */
144 static void
145 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
147 int i, count;
149 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
150 for (i = 0; i < count; i++)
152 struct ipa_jump_func *jump_func;
153 enum jump_func_type type;
155 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
156 type = jump_func->type;
158 fprintf (f, " param %d: ", i);
159 if (type == IPA_JF_UNKNOWN)
160 fprintf (f, "UNKNOWN\n");
161 else if (type == IPA_JF_KNOWN_TYPE)
163 fprintf (f, "KNOWN TYPE: base ");
164 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
165 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
166 jump_func->value.known_type.offset);
167 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
168 fprintf (f, "\n");
170 else if (type == IPA_JF_CONST)
172 tree val = jump_func->value.constant;
173 fprintf (f, "CONST: ");
174 print_generic_expr (f, val, 0);
175 if (TREE_CODE (val) == ADDR_EXPR
176 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
178 fprintf (f, " -> ");
179 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
182 fprintf (f, "\n");
184 else if (type == IPA_JF_CONST_MEMBER_PTR)
186 fprintf (f, "CONST MEMBER PTR: ");
187 print_generic_expr (f, jump_func->value.member_cst.pfn, 0);
188 fprintf (f, ", ");
189 print_generic_expr (f, jump_func->value.member_cst.delta, 0);
190 fprintf (f, "\n");
192 else if (type == IPA_JF_PASS_THROUGH)
194 fprintf (f, "PASS THROUGH: ");
195 fprintf (f, "%d, op %s ",
196 jump_func->value.pass_through.formal_id,
197 tree_code_name[(int)
198 jump_func->value.pass_through.operation]);
199 if (jump_func->value.pass_through.operation != NOP_EXPR)
200 print_generic_expr (f,
201 jump_func->value.pass_through.operand, 0);
202 fprintf (f, "\n");
204 else if (type == IPA_JF_ANCESTOR)
206 fprintf (f, "ANCESTOR: ");
207 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
208 jump_func->value.ancestor.formal_id,
209 jump_func->value.ancestor.offset);
210 print_generic_expr (f, jump_func->value.ancestor.type, 0);
211 fprintf (f, "\n");
217 /* Print the jump functions of all arguments on all call graph edges going from
218 NODE to file F. */
220 void
221 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
223 struct cgraph_edge *cs;
224 int i;
226 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
227 for (cs = node->callees; cs; cs = cs->next_callee)
229 if (!ipa_edge_args_info_available_for_edge_p (cs))
230 continue;
232 fprintf (f, " callsite %s/%i -> %s/%i : \n",
233 xstrdup (cgraph_node_name (node)), node->uid,
234 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
235 ipa_print_node_jump_functions_for_edge (f, cs);
238 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
240 if (!ipa_edge_args_info_available_for_edge_p (cs))
241 continue;
243 if (cs->call_stmt)
245 fprintf (f, " indirect callsite %d for stmt ", i);
246 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
248 else
249 fprintf (f, " indirect callsite %d :\n", i);
250 ipa_print_node_jump_functions_for_edge (f, cs);
255 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
257 void
258 ipa_print_all_jump_functions (FILE *f)
260 struct cgraph_node *node;
262 fprintf (f, "\nJump functions:\n");
263 for (node = cgraph_nodes; node; node = node->next)
265 ipa_print_node_jump_functions (f, node);
269 /* Structure to be passed in between detect_type_change and
270 check_stmt_for_type_change. */
272 struct type_change_info
274 /* Offset into the object where there is the virtual method pointer we are
275 looking for. */
276 HOST_WIDE_INT offset;
277 /* The declaration or SSA_NAME pointer of the base that we are checking for
278 type change. */
279 tree object;
280 /* If we actually can tell the type that the object has changed to, it is
281 stored in this field. Otherwise it remains NULL_TREE. */
282 tree known_current_type;
283 /* Set to true if dynamic type change has been detected. */
284 bool type_maybe_changed;
285 /* Set to true if multiple types have been encountered. known_current_type
286 must be disregarded in that case. */
287 bool multiple_types_encountered;
290 /* Return true if STMT can modify a virtual method table pointer.
292 This function makes special assumptions about both constructors and
293 destructors which are all the functions that are allowed to alter the VMT
294 pointers. It assumes that destructors begin with assignment into all VMT
295 pointers and that constructors essentially look in the following way:
297 1) The very first thing they do is that they call constructors of ancestor
298 sub-objects that have them.
300 2) Then VMT pointers of this and all its ancestors is set to new values
301 corresponding to the type corresponding to the constructor.
303 3) Only afterwards, other stuff such as constructor of member sub-objects
304 and the code written by the user is run. Only this may include calling
305 virtual functions, directly or indirectly.
307 There is no way to call a constructor of an ancestor sub-object in any
308 other way.
310 This means that we do not have to care whether constructors get the correct
311 type information because they will always change it (in fact, if we define
312 the type to be given by the VMT pointer, it is undefined).
314 The most important fact to derive from the above is that if, for some
315 statement in the section 3, we try to detect whether the dynamic type has
316 changed, we can safely ignore all calls as we examine the function body
317 backwards until we reach statements in section 2 because these calls cannot
318 be ancestor constructors or destructors (if the input is not bogus) and so
319 do not change the dynamic type (this holds true only for automatically
320 allocated objects but at the moment we devirtualize only these). We then
321 must detect that statements in section 2 change the dynamic type and can try
322 to derive the new type. That is enough and we can stop, we will never see
323 the calls into constructors of sub-objects in this code. Therefore we can
324 safely ignore all call statements that we traverse.
327 static bool
328 stmt_may_be_vtbl_ptr_store (gimple stmt)
330 if (is_gimple_call (stmt))
331 return false;
332 else if (is_gimple_assign (stmt))
334 tree lhs = gimple_assign_lhs (stmt);
336 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
338 if (flag_strict_aliasing
339 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
340 return false;
342 if (TREE_CODE (lhs) == COMPONENT_REF
343 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
344 return false;
345 /* In the future we might want to use get_base_ref_and_offset to find
346 if there is a field corresponding to the offset and if so, proceed
347 almost like if it was a component ref. */
350 return true;
353 /* If STMT can be proved to be an assignment to the virtual method table
354 pointer of ANALYZED_OBJ and the type associated with the new table
355 identified, return the type. Otherwise return NULL_TREE. */
357 static tree
358 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
360 HOST_WIDE_INT offset, size, max_size;
361 tree lhs, rhs, base;
363 if (!gimple_assign_single_p (stmt))
364 return NULL_TREE;
366 lhs = gimple_assign_lhs (stmt);
367 rhs = gimple_assign_rhs1 (stmt);
368 if (TREE_CODE (lhs) != COMPONENT_REF
369 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
370 || TREE_CODE (rhs) != ADDR_EXPR)
371 return NULL_TREE;
372 rhs = get_base_address (TREE_OPERAND (rhs, 0));
373 if (!rhs
374 || TREE_CODE (rhs) != VAR_DECL
375 || !DECL_VIRTUAL_P (rhs))
376 return NULL_TREE;
378 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
379 if (offset != tci->offset
380 || size != POINTER_SIZE
381 || max_size != POINTER_SIZE)
382 return NULL_TREE;
383 if (TREE_CODE (base) == MEM_REF)
385 if (TREE_CODE (tci->object) != MEM_REF
386 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
387 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
388 TREE_OPERAND (base, 1)))
389 return NULL_TREE;
391 else if (tci->object != base)
392 return NULL_TREE;
394 return DECL_CONTEXT (rhs);
397 /* Callback of walk_aliased_vdefs and a helper function for
398 detect_type_change to check whether a particular statement may modify
399 the virtual table pointer, and if possible also determine the new type of
400 the (sub-)object. It stores its result into DATA, which points to a
401 type_change_info structure. */
403 static bool
404 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
406 gimple stmt = SSA_NAME_DEF_STMT (vdef);
407 struct type_change_info *tci = (struct type_change_info *) data;
409 if (stmt_may_be_vtbl_ptr_store (stmt))
411 tree type;
412 type = extr_type_from_vtbl_ptr_store (stmt, tci);
413 if (tci->type_maybe_changed
414 && type != tci->known_current_type)
415 tci->multiple_types_encountered = true;
416 tci->known_current_type = type;
417 tci->type_maybe_changed = true;
418 return true;
420 else
421 return false;
426 /* Like detect_type_change but with extra argument COMP_TYPE which will become
427 the component type part of new JFUNC of dynamic type change is detected and
428 the new base type is identified. */
430 static bool
431 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
432 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
434 struct type_change_info tci;
435 ao_ref ao;
437 gcc_checking_assert (DECL_P (arg)
438 || TREE_CODE (arg) == MEM_REF
439 || handled_component_p (arg));
440 /* Const calls cannot call virtual methods through VMT and so type changes do
441 not matter. */
442 if (!flag_devirtualize || !gimple_vuse (call))
443 return false;
445 ao_ref_init (&ao, arg);
446 ao.base = base;
447 ao.offset = offset;
448 ao.size = POINTER_SIZE;
449 ao.max_size = ao.size;
451 tci.offset = offset;
452 tci.object = get_base_address (arg);
453 tci.known_current_type = NULL_TREE;
454 tci.type_maybe_changed = false;
455 tci.multiple_types_encountered = false;
457 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
458 &tci, NULL);
459 if (!tci.type_maybe_changed)
460 return false;
462 if (!tci.known_current_type
463 || tci.multiple_types_encountered
464 || offset != 0)
465 jfunc->type = IPA_JF_UNKNOWN;
466 else
468 jfunc->type = IPA_JF_KNOWN_TYPE;
469 jfunc->value.known_type.base_type = tci.known_current_type;
470 jfunc->value.known_type.component_type = comp_type;
473 return true;
476 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
477 looking for assignments to its virtual table pointer. If it is, return true
478 and fill in the jump function JFUNC with relevant type information or set it
479 to unknown. ARG is the object itself (not a pointer to it, unless
480 dereferenced). BASE is the base of the memory access as returned by
481 get_ref_base_and_extent, as is the offset. */
483 static bool
484 detect_type_change (tree arg, tree base, gimple call,
485 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
487 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
490 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
491 SSA name (its dereference will become the base and the offset is assumed to
492 be zero). */
494 static bool
495 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
497 tree comp_type;
499 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
500 if (!flag_devirtualize
501 || !POINTER_TYPE_P (TREE_TYPE (arg))
502 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
503 return false;
505 comp_type = TREE_TYPE (TREE_TYPE (arg));
506 arg = build2 (MEM_REF, ptr_type_node, arg,
507 build_int_cst (ptr_type_node, 0));
509 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
512 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
513 boolean variable pointed to by DATA. */
515 static bool
516 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
517 void *data)
519 bool *b = (bool *) data;
520 *b = true;
521 return true;
524 /* Return true if the formal parameter PARM might have been modified in this
525 function before reaching the statement STMT. PARM_AINFO is a pointer to a
526 structure containing temporary information about PARM. */
528 static bool
529 is_parm_modified_before_stmt (struct param_analysis_info *parm_ainfo,
530 gimple stmt, tree parm)
532 bool modified = false;
533 ao_ref refd;
535 if (parm_ainfo->modified)
536 return true;
538 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
539 ao_ref_init (&refd, parm);
540 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
541 &modified, &parm_ainfo->visited_statements);
542 if (modified)
544 parm_ainfo->modified = true;
545 return true;
547 return false;
550 /* If STMT is an assignment that loads a value from an parameter declaration,
551 return the index of the parameter in ipa_node_params which has not been
552 modified. Otherwise return -1. */
554 static int
555 load_from_unmodified_param (struct ipa_node_params *info,
556 struct param_analysis_info *parms_ainfo,
557 gimple stmt)
559 int index;
560 tree op1;
562 if (!gimple_assign_single_p (stmt))
563 return -1;
565 op1 = gimple_assign_rhs1 (stmt);
566 if (TREE_CODE (op1) != PARM_DECL)
567 return -1;
569 index = ipa_get_param_decl_index (info, op1);
570 if (index < 0
571 || is_parm_modified_before_stmt (&parms_ainfo[index], stmt, op1))
572 return -1;
574 return index;
577 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
578 of an assignment statement STMT, try to determine whether we are actually
579 handling any of the following cases and construct an appropriate jump
580 function into JFUNC if so:
582 1) The passed value is loaded from a formal parameter which is not a gimple
583 register (most probably because it is addressable, the value has to be
584 scalar) and we can guarantee the value has not changed. This case can
585 therefore be described by a simple pass-through jump function. For example:
587 foo (int a)
589 int a.0;
591 a.0_2 = a;
592 bar (a.0_2);
594 2) The passed value can be described by a simple arithmetic pass-through
595 jump function. E.g.
597 foo (int a)
599 int D.2064;
601 D.2064_4 = a.1(D) + 4;
602 bar (D.2064_4);
604 This case can also occur in combination of the previous one, e.g.:
606 foo (int a, int z)
608 int a.0;
609 int D.2064;
611 a.0_3 = a;
612 D.2064_4 = a.0_3 + 4;
613 foo (D.2064_4);
615 3) The passed value is an address of an object within another one (which
616 also passed by reference). Such situations are described by an ancestor
617 jump function and describe situations such as:
619 B::foo() (struct B * const this)
621 struct A * D.1845;
623 D.1845_2 = &this_1(D)->D.1748;
624 A::bar (D.1845_2);
626 INFO is the structure describing individual parameters access different
627 stages of IPA optimizations. PARMS_AINFO contains the information that is
628 only needed for intraprocedural analysis. */
630 static void
631 compute_complex_assign_jump_func (struct ipa_node_params *info,
632 struct param_analysis_info *parms_ainfo,
633 struct ipa_jump_func *jfunc,
634 gimple call, gimple stmt, tree name)
636 HOST_WIDE_INT offset, size, max_size;
637 tree op1, tc_ssa, base, ssa;
638 int index;
640 op1 = gimple_assign_rhs1 (stmt);
642 if (TREE_CODE (op1) == SSA_NAME)
644 if (SSA_NAME_IS_DEFAULT_DEF (op1))
645 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
646 else
647 index = load_from_unmodified_param (info, parms_ainfo,
648 SSA_NAME_DEF_STMT (op1));
649 tc_ssa = op1;
651 else
653 index = load_from_unmodified_param (info, parms_ainfo, stmt);
654 tc_ssa = gimple_assign_lhs (stmt);
657 if (index >= 0)
659 tree op2 = gimple_assign_rhs2 (stmt);
661 if (op2)
663 if (!is_gimple_ip_invariant (op2)
664 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
665 && !useless_type_conversion_p (TREE_TYPE (name),
666 TREE_TYPE (op1))))
667 return;
669 jfunc->type = IPA_JF_PASS_THROUGH;
670 jfunc->value.pass_through.formal_id = index;
671 jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
672 jfunc->value.pass_through.operand = op2;
674 else if (gimple_assign_single_p (stmt)
675 && !detect_type_change_ssa (tc_ssa, call, jfunc))
677 jfunc->type = IPA_JF_PASS_THROUGH;
678 jfunc->value.pass_through.formal_id = index;
679 jfunc->value.pass_through.operation = NOP_EXPR;
681 return;
684 if (TREE_CODE (op1) != ADDR_EXPR)
685 return;
686 op1 = TREE_OPERAND (op1, 0);
687 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
688 return;
689 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
690 if (TREE_CODE (base) != MEM_REF
691 /* If this is a varying address, punt. */
692 || max_size == -1
693 || max_size != size)
694 return;
695 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
696 ssa = TREE_OPERAND (base, 0);
697 if (TREE_CODE (ssa) != SSA_NAME
698 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
699 || offset < 0)
700 return;
702 /* Dynamic types are changed only in constructors and destructors and */
703 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
704 if (index >= 0
705 && !detect_type_change (op1, base, call, jfunc, offset))
707 jfunc->type = IPA_JF_ANCESTOR;
708 jfunc->value.ancestor.formal_id = index;
709 jfunc->value.ancestor.offset = offset;
710 jfunc->value.ancestor.type = TREE_TYPE (op1);
714 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
715 it looks like:
717 iftmp.1_3 = &obj_2(D)->D.1762;
719 The base of the MEM_REF must be a default definition SSA NAME of a
720 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
721 whole MEM_REF expression is returned and the offset calculated from any
722 handled components and the MEM_REF itself is stored into *OFFSET. The whole
723 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
725 static tree
726 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
728 HOST_WIDE_INT size, max_size;
729 tree expr, parm, obj;
731 if (!gimple_assign_single_p (assign))
732 return NULL_TREE;
733 expr = gimple_assign_rhs1 (assign);
735 if (TREE_CODE (expr) != ADDR_EXPR)
736 return NULL_TREE;
737 expr = TREE_OPERAND (expr, 0);
738 obj = expr;
739 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
741 if (TREE_CODE (expr) != MEM_REF
742 /* If this is a varying address, punt. */
743 || max_size == -1
744 || max_size != size
745 || *offset < 0)
746 return NULL_TREE;
747 parm = TREE_OPERAND (expr, 0);
748 if (TREE_CODE (parm) != SSA_NAME
749 || !SSA_NAME_IS_DEFAULT_DEF (parm)
750 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
751 return NULL_TREE;
753 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
754 *obj_p = obj;
755 return expr;
759 /* Given that an actual argument is an SSA_NAME that is a result of a phi
760 statement PHI, try to find out whether NAME is in fact a
761 multiple-inheritance typecast from a descendant into an ancestor of a formal
762 parameter and thus can be described by an ancestor jump function and if so,
763 write the appropriate function into JFUNC.
765 Essentially we want to match the following pattern:
767 if (obj_2(D) != 0B)
768 goto <bb 3>;
769 else
770 goto <bb 4>;
772 <bb 3>:
773 iftmp.1_3 = &obj_2(D)->D.1762;
775 <bb 4>:
776 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
777 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
778 return D.1879_6; */
780 static void
781 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
782 struct ipa_jump_func *jfunc,
783 gimple call, gimple phi)
785 HOST_WIDE_INT offset;
786 gimple assign, cond;
787 basic_block phi_bb, assign_bb, cond_bb;
788 tree tmp, parm, expr, obj;
789 int index, i;
791 if (gimple_phi_num_args (phi) != 2)
792 return;
794 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
795 tmp = PHI_ARG_DEF (phi, 0);
796 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
797 tmp = PHI_ARG_DEF (phi, 1);
798 else
799 return;
800 if (TREE_CODE (tmp) != SSA_NAME
801 || SSA_NAME_IS_DEFAULT_DEF (tmp)
802 || !POINTER_TYPE_P (TREE_TYPE (tmp))
803 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
804 return;
806 assign = SSA_NAME_DEF_STMT (tmp);
807 assign_bb = gimple_bb (assign);
808 if (!single_pred_p (assign_bb))
809 return;
810 expr = get_ancestor_addr_info (assign, &obj, &offset);
811 if (!expr)
812 return;
813 parm = TREE_OPERAND (expr, 0);
814 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
815 gcc_assert (index >= 0);
817 cond_bb = single_pred (assign_bb);
818 cond = last_stmt (cond_bb);
819 if (!cond
820 || gimple_code (cond) != GIMPLE_COND
821 || gimple_cond_code (cond) != NE_EXPR
822 || gimple_cond_lhs (cond) != parm
823 || !integer_zerop (gimple_cond_rhs (cond)))
824 return;
826 phi_bb = gimple_bb (phi);
827 for (i = 0; i < 2; i++)
829 basic_block pred = EDGE_PRED (phi_bb, i)->src;
830 if (pred != assign_bb && pred != cond_bb)
831 return;
834 if (!detect_type_change (obj, expr, call, jfunc, offset))
836 jfunc->type = IPA_JF_ANCESTOR;
837 jfunc->value.ancestor.formal_id = index;
838 jfunc->value.ancestor.offset = offset;
839 jfunc->value.ancestor.type = TREE_TYPE (obj);
843 /* Given OP which is passed as an actual argument to a called function,
844 determine if it is possible to construct a KNOWN_TYPE jump function for it
845 and if so, create one and store it to JFUNC. */
847 static void
848 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
849 gimple call)
851 HOST_WIDE_INT offset, size, max_size;
852 tree base;
854 if (!flag_devirtualize
855 || TREE_CODE (op) != ADDR_EXPR
856 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
857 return;
859 op = TREE_OPERAND (op, 0);
860 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
861 if (!DECL_P (base)
862 || max_size == -1
863 || max_size != size
864 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
865 || is_global_var (base))
866 return;
868 if (!TYPE_BINFO (TREE_TYPE (base))
869 || detect_type_change (op, base, call, jfunc, offset))
870 return;
872 jfunc->type = IPA_JF_KNOWN_TYPE;
873 jfunc->value.known_type.base_type = TREE_TYPE (base);
874 jfunc->value.known_type.offset = offset;
875 jfunc->value.known_type.component_type = TREE_TYPE (op);
879 /* Determine the jump functions of scalar arguments. Scalar means SSA names
880 and constants of a number of selected types. INFO is the ipa_node_params
881 structure associated with the caller, PARMS_AINFO describes state of
882 analysis with respect to individual formal parameters. ARGS is the
883 ipa_edge_args structure describing the callsite CALL which is the call
884 statement being examined.*/
886 static void
887 compute_scalar_jump_functions (struct ipa_node_params *info,
888 struct param_analysis_info *parms_ainfo,
889 struct ipa_edge_args *args,
890 gimple call)
892 tree arg;
893 unsigned num = 0;
895 for (num = 0; num < gimple_call_num_args (call); num++)
897 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
898 arg = gimple_call_arg (call, num);
900 if (is_gimple_ip_invariant (arg))
902 tree constant = unshare_expr (arg);
903 if (constant && EXPR_P (constant))
904 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
905 jfunc->type = IPA_JF_CONST;
906 jfunc->value.constant = constant;
908 else if (TREE_CODE (arg) == SSA_NAME)
910 if (SSA_NAME_IS_DEFAULT_DEF (arg))
912 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
914 if (index >= 0
915 && !detect_type_change_ssa (arg, call, jfunc))
917 jfunc->type = IPA_JF_PASS_THROUGH;
918 jfunc->value.pass_through.formal_id = index;
919 jfunc->value.pass_through.operation = NOP_EXPR;
922 else
924 gimple stmt = SSA_NAME_DEF_STMT (arg);
925 if (is_gimple_assign (stmt))
926 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
927 call, stmt, arg);
928 else if (gimple_code (stmt) == GIMPLE_PHI)
929 compute_complex_ancestor_jump_func (info, jfunc, call, stmt);
932 else
933 compute_known_type_jump_func (arg, jfunc, call);
937 /* Inspect the given TYPE and return true iff it has the same structure (the
938 same number of fields of the same types) as a C++ member pointer. If
939 METHOD_PTR and DELTA are non-NULL, store the trees representing the
940 corresponding fields there. */
942 static bool
943 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
945 tree fld;
947 if (TREE_CODE (type) != RECORD_TYPE)
948 return false;
950 fld = TYPE_FIELDS (type);
951 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
952 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
953 return false;
955 if (method_ptr)
956 *method_ptr = fld;
958 fld = DECL_CHAIN (fld);
959 if (!fld || INTEGRAL_TYPE_P (fld))
960 return false;
961 if (delta)
962 *delta = fld;
964 if (DECL_CHAIN (fld))
965 return false;
967 return true;
970 /* Go through arguments of the CALL and for every one that looks like a member
971 pointer, check whether it can be safely declared pass-through and if so,
972 mark that to the corresponding item of jump FUNCTIONS. Return true iff
973 there are non-pass-through member pointers within the arguments. INFO
974 describes formal parameters of the caller. PARMS_INFO is a pointer to a
975 vector containing intermediate information about each formal parameter. */
977 static bool
978 compute_pass_through_member_ptrs (struct ipa_node_params *info,
979 struct param_analysis_info *parms_ainfo,
980 struct ipa_edge_args *args,
981 gimple call)
983 bool undecided_members = false;
984 unsigned num;
985 tree arg;
987 for (num = 0; num < gimple_call_num_args (call); num++)
989 arg = gimple_call_arg (call, num);
991 if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
993 if (TREE_CODE (arg) == PARM_DECL)
995 int index = ipa_get_param_decl_index (info, arg);
997 gcc_assert (index >=0);
998 if (!is_parm_modified_before_stmt (&parms_ainfo[index], call,
999 arg))
1001 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args,
1002 num);
1003 jfunc->type = IPA_JF_PASS_THROUGH;
1004 jfunc->value.pass_through.formal_id = index;
1005 jfunc->value.pass_through.operation = NOP_EXPR;
1007 else
1008 undecided_members = true;
1010 else
1011 undecided_members = true;
1015 return undecided_members;
1018 /* Simple function filling in a member pointer constant jump function (with PFN
1019 and DELTA as the constant value) into JFUNC. */
1021 static void
1022 fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
1023 tree pfn, tree delta)
1025 jfunc->type = IPA_JF_CONST_MEMBER_PTR;
1026 jfunc->value.member_cst.pfn = pfn;
1027 jfunc->value.member_cst.delta = delta;
1030 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1031 return the rhs of its defining statement. */
1033 static inline tree
1034 get_ssa_def_if_simple_copy (tree rhs)
1036 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1038 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1040 if (gimple_assign_single_p (def_stmt))
1041 rhs = gimple_assign_rhs1 (def_stmt);
1042 else
1043 break;
1045 return rhs;
1048 /* Traverse statements from CALL backwards, scanning whether the argument ARG
1049 which is a member pointer is filled in with constant values. If it is, fill
1050 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
1051 fields of the record type of the member pointer. To give an example, we
1052 look for a pattern looking like the following:
1054 D.2515.__pfn ={v} printStuff;
1055 D.2515.__delta ={v} 0;
1056 i_1 = doprinting (D.2515); */
1058 static void
1059 determine_cst_member_ptr (gimple call, tree arg, tree method_field,
1060 tree delta_field, struct ipa_jump_func *jfunc)
1062 gimple_stmt_iterator gsi;
1063 tree method = NULL_TREE;
1064 tree delta = NULL_TREE;
1066 gsi = gsi_for_stmt (call);
1068 gsi_prev (&gsi);
1069 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1071 gimple stmt = gsi_stmt (gsi);
1072 tree lhs, rhs, fld;
1074 if (!stmt_may_clobber_ref_p (stmt, arg))
1075 continue;
1076 if (!gimple_assign_single_p (stmt))
1077 return;
1079 lhs = gimple_assign_lhs (stmt);
1080 rhs = gimple_assign_rhs1 (stmt);
1082 if (TREE_CODE (lhs) != COMPONENT_REF
1083 || TREE_OPERAND (lhs, 0) != arg)
1084 return;
1086 fld = TREE_OPERAND (lhs, 1);
1087 if (!method && fld == method_field)
1089 rhs = get_ssa_def_if_simple_copy (rhs);
1090 if (TREE_CODE (rhs) == ADDR_EXPR
1091 && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
1092 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
1094 method = TREE_OPERAND (rhs, 0);
1095 if (delta)
1097 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
1098 return;
1101 else
1102 return;
1105 if (!delta && fld == delta_field)
1107 rhs = get_ssa_def_if_simple_copy (rhs);
1108 if (TREE_CODE (rhs) == INTEGER_CST)
1110 delta = rhs;
1111 if (method)
1113 fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
1114 return;
1117 else
1118 return;
1122 return;
1125 /* Go through the arguments of the CALL and for every member pointer within
1126 tries determine whether it is a constant. If it is, create a corresponding
1127 constant jump function in FUNCTIONS which is an array of jump functions
1128 associated with the call. */
1130 static void
1131 compute_cst_member_ptr_arguments (struct ipa_edge_args *args,
1132 gimple call)
1134 unsigned num;
1135 tree arg, method_field, delta_field;
1137 for (num = 0; num < gimple_call_num_args (call); num++)
1139 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
1140 arg = gimple_call_arg (call, num);
1142 if (jfunc->type == IPA_JF_UNKNOWN
1143 && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
1144 &delta_field))
1145 determine_cst_member_ptr (call, arg, method_field, delta_field, jfunc);
1149 /* Compute jump function for all arguments of callsite CS and insert the
1150 information in the jump_functions array in the ipa_edge_args corresponding
1151 to this callsite. */
1153 static void
1154 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1155 struct cgraph_edge *cs)
1157 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1158 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1159 gimple call = cs->call_stmt;
1160 int arg_num = gimple_call_num_args (call);
1162 if (arg_num == 0 || args->jump_functions)
1163 return;
1164 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1166 /* We will deal with constants and SSA scalars first: */
1167 compute_scalar_jump_functions (info, parms_ainfo, args, call);
1169 /* Let's check whether there are any potential member pointers and if so,
1170 whether we can determine their functions as pass_through. */
1171 if (!compute_pass_through_member_ptrs (info, parms_ainfo, args, call))
1172 return;
1174 /* Finally, let's check whether we actually pass a new constant member
1175 pointer here... */
1176 compute_cst_member_ptr_arguments (args, call);
1179 /* Compute jump functions for all edges - both direct and indirect - outgoing
1180 from NODE. Also count the actual arguments in the process. */
1182 static void
1183 ipa_compute_jump_functions (struct cgraph_node *node,
1184 struct param_analysis_info *parms_ainfo)
1186 struct cgraph_edge *cs;
1188 for (cs = node->callees; cs; cs = cs->next_callee)
1190 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1191 NULL);
1192 /* We do not need to bother analyzing calls to unknown
1193 functions unless they may become known during lto/whopr. */
1194 if (!callee->analyzed && !flag_lto)
1195 continue;
1196 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1199 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1200 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1203 /* If RHS looks like a rhs of a statement loading pfn from a member
1204 pointer formal parameter, return the parameter, otherwise return
1205 NULL. If USE_DELTA, then we look for a use of the delta field
1206 rather than the pfn. */
1208 static tree
1209 ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
1211 tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
1213 if (TREE_CODE (rhs) == COMPONENT_REF)
1215 ref_field = TREE_OPERAND (rhs, 1);
1216 rhs = TREE_OPERAND (rhs, 0);
1218 else
1219 ref_field = NULL_TREE;
1220 if (TREE_CODE (rhs) != MEM_REF)
1221 return NULL_TREE;
1222 rec = TREE_OPERAND (rhs, 0);
1223 if (TREE_CODE (rec) != ADDR_EXPR)
1224 return NULL_TREE;
1225 rec = TREE_OPERAND (rec, 0);
1226 if (TREE_CODE (rec) != PARM_DECL
1227 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1228 return NULL_TREE;
1230 ref_offset = TREE_OPERAND (rhs, 1);
1232 if (ref_field)
1234 if (integer_nonzerop (ref_offset))
1235 return NULL_TREE;
1237 if (use_delta)
1238 fld = delta_field;
1239 else
1240 fld = ptr_field;
1242 return ref_field == fld ? rec : NULL_TREE;
1245 if (use_delta)
1246 fld_offset = byte_position (delta_field);
1247 else
1248 fld_offset = byte_position (ptr_field);
1250 return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
1253 /* If STMT looks like a statement loading a value from a member pointer formal
1254 parameter, this function returns that parameter. */
1256 static tree
1257 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
1259 tree rhs;
1261 if (!gimple_assign_single_p (stmt))
1262 return NULL_TREE;
1264 rhs = gimple_assign_rhs1 (stmt);
1265 return ipa_get_member_ptr_load_param (rhs, use_delta);
1268 /* Returns true iff T is an SSA_NAME defined by a statement. */
1270 static bool
1271 ipa_is_ssa_with_stmt_def (tree t)
1273 if (TREE_CODE (t) == SSA_NAME
1274 && !SSA_NAME_IS_DEFAULT_DEF (t))
1275 return true;
1276 else
1277 return false;
1280 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1281 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1282 indirect call graph edge. */
1284 static struct cgraph_edge *
1285 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1287 struct cgraph_edge *cs;
1289 cs = cgraph_edge (node, stmt);
1290 cs->indirect_info->param_index = param_index;
1291 cs->indirect_info->anc_offset = 0;
1292 cs->indirect_info->polymorphic = 0;
1293 return cs;
1296 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1297 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1298 intermediate information about each formal parameter. Currently it checks
1299 whether the call calls a pointer that is a formal parameter and if so, the
1300 parameter is marked with the called flag and an indirect call graph edge
1301 describing the call is created. This is very simple for ordinary pointers
1302 represented in SSA but not-so-nice when it comes to member pointers. The
1303 ugly part of this function does nothing more than trying to match the
1304 pattern of such a call. An example of such a pattern is the gimple dump
1305 below, the call is on the last line:
1307 <bb 2>:
1308 f$__delta_5 = f.__delta;
1309 f$__pfn_24 = f.__pfn;
1312 <bb 2>:
1313 f$__delta_5 = MEM[(struct *)&f];
1314 f$__pfn_24 = MEM[(struct *)&f + 4B];
1316 and a few lines below:
1318 <bb 5>
1319 D.2496_3 = (int) f$__pfn_24;
1320 D.2497_4 = D.2496_3 & 1;
1321 if (D.2497_4 != 0)
1322 goto <bb 3>;
1323 else
1324 goto <bb 4>;
1326 <bb 6>:
1327 D.2500_7 = (unsigned int) f$__delta_5;
1328 D.2501_8 = &S + D.2500_7;
1329 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1330 D.2503_10 = *D.2502_9;
1331 D.2504_12 = f$__pfn_24 + -1;
1332 D.2505_13 = (unsigned int) D.2504_12;
1333 D.2506_14 = D.2503_10 + D.2505_13;
1334 D.2507_15 = *D.2506_14;
1335 iftmp.11_16 = (String:: *) D.2507_15;
1337 <bb 7>:
1338 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1339 D.2500_19 = (unsigned int) f$__delta_5;
1340 D.2508_20 = &S + D.2500_19;
1341 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1343 Such patterns are results of simple calls to a member pointer:
1345 int doprinting (int (MyString::* f)(int) const)
1347 MyString S ("somestring");
1349 return (S.*f)(4);
1353 static void
1354 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1355 struct ipa_node_params *info,
1356 struct param_analysis_info *parms_ainfo,
1357 gimple call, tree target)
1359 gimple def;
1360 tree n1, n2;
1361 gimple d1, d2;
1362 tree rec, rec2, cond;
1363 gimple branch;
1364 int index;
1365 basic_block bb, virt_bb, join;
1367 if (SSA_NAME_IS_DEFAULT_DEF (target))
1369 tree var = SSA_NAME_VAR (target);
1370 index = ipa_get_param_decl_index (info, var);
1371 if (index >= 0)
1372 ipa_note_param_call (node, index, call);
1373 return;
1376 /* Now we need to try to match the complex pattern of calling a member
1377 pointer. */
1379 if (!POINTER_TYPE_P (TREE_TYPE (target))
1380 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1381 return;
1383 def = SSA_NAME_DEF_STMT (target);
1384 if (gimple_code (def) != GIMPLE_PHI)
1385 return;
1387 if (gimple_phi_num_args (def) != 2)
1388 return;
1390 /* First, we need to check whether one of these is a load from a member
1391 pointer that is a parameter to this function. */
1392 n1 = PHI_ARG_DEF (def, 0);
1393 n2 = PHI_ARG_DEF (def, 1);
1394 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1395 return;
1396 d1 = SSA_NAME_DEF_STMT (n1);
1397 d2 = SSA_NAME_DEF_STMT (n2);
1399 join = gimple_bb (def);
1400 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
1402 if (ipa_get_stmt_member_ptr_load_param (d2, false))
1403 return;
1405 bb = EDGE_PRED (join, 0)->src;
1406 virt_bb = gimple_bb (d2);
1408 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
1410 bb = EDGE_PRED (join, 1)->src;
1411 virt_bb = gimple_bb (d1);
1413 else
1414 return;
1416 /* Second, we need to check that the basic blocks are laid out in the way
1417 corresponding to the pattern. */
1419 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1420 || single_pred (virt_bb) != bb
1421 || single_succ (virt_bb) != join)
1422 return;
1424 /* Third, let's see that the branching is done depending on the least
1425 significant bit of the pfn. */
1427 branch = last_stmt (bb);
1428 if (!branch || gimple_code (branch) != GIMPLE_COND)
1429 return;
1431 if ((gimple_cond_code (branch) != NE_EXPR
1432 && gimple_cond_code (branch) != EQ_EXPR)
1433 || !integer_zerop (gimple_cond_rhs (branch)))
1434 return;
1436 cond = gimple_cond_lhs (branch);
1437 if (!ipa_is_ssa_with_stmt_def (cond))
1438 return;
1440 def = SSA_NAME_DEF_STMT (cond);
1441 if (!is_gimple_assign (def)
1442 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1443 || !integer_onep (gimple_assign_rhs2 (def)))
1444 return;
1446 cond = gimple_assign_rhs1 (def);
1447 if (!ipa_is_ssa_with_stmt_def (cond))
1448 return;
1450 def = SSA_NAME_DEF_STMT (cond);
1452 if (is_gimple_assign (def)
1453 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1455 cond = gimple_assign_rhs1 (def);
1456 if (!ipa_is_ssa_with_stmt_def (cond))
1457 return;
1458 def = SSA_NAME_DEF_STMT (cond);
1461 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1462 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1463 == ptrmemfunc_vbit_in_delta));
1465 if (rec != rec2)
1466 return;
1468 index = ipa_get_param_decl_index (info, rec);
1469 if (index >= 0 && !is_parm_modified_before_stmt (&parms_ainfo[index],
1470 call, rec))
1471 ipa_note_param_call (node, index, call);
1473 return;
1476 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1477 object referenced in the expression is a formal parameter of the caller
1478 (described by INFO), create a call note for the statement. */
1480 static void
1481 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1482 struct ipa_node_params *info, gimple call,
1483 tree target)
1485 struct cgraph_edge *cs;
1486 struct cgraph_indirect_call_info *ii;
1487 struct ipa_jump_func jfunc;
1488 tree obj = OBJ_TYPE_REF_OBJECT (target);
1489 int index;
1490 HOST_WIDE_INT anc_offset;
1492 if (!flag_devirtualize)
1493 return;
1495 if (TREE_CODE (obj) != SSA_NAME)
1496 return;
1498 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1500 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1501 return;
1503 anc_offset = 0;
1504 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1505 gcc_assert (index >= 0);
1506 if (detect_type_change_ssa (obj, call, &jfunc))
1507 return;
1509 else
1511 gimple stmt = SSA_NAME_DEF_STMT (obj);
1512 tree expr;
1514 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1515 if (!expr)
1516 return;
1517 index = ipa_get_param_decl_index (info,
1518 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1519 gcc_assert (index >= 0);
1520 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1521 return;
1524 cs = ipa_note_param_call (node, index, call);
1525 ii = cs->indirect_info;
1526 ii->anc_offset = anc_offset;
1527 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1528 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1529 ii->polymorphic = 1;
1532 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1533 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1534 containing intermediate information about each formal parameter. */
1536 static void
1537 ipa_analyze_call_uses (struct cgraph_node *node,
1538 struct ipa_node_params *info,
1539 struct param_analysis_info *parms_ainfo, gimple call)
1541 tree target = gimple_call_fn (call);
1543 if (!target)
1544 return;
1545 if (TREE_CODE (target) == SSA_NAME)
1546 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1547 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1548 ipa_analyze_virtual_call_uses (node, info, call, target);
1552 /* Analyze the call statement STMT with respect to formal parameters (described
1553 in INFO) of caller given by NODE. Currently it only checks whether formal
1554 parameters are called. PARMS_AINFO is a pointer to a vector containing
1555 intermediate information about each formal parameter. */
1557 static void
1558 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1559 struct param_analysis_info *parms_ainfo, gimple stmt)
1561 if (is_gimple_call (stmt))
1562 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1565 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1566 If OP is a parameter declaration, mark it as used in the info structure
1567 passed in DATA. */
1569 static bool
1570 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1571 tree op, void *data)
1573 struct ipa_node_params *info = (struct ipa_node_params *) data;
1575 op = get_base_address (op);
1576 if (op
1577 && TREE_CODE (op) == PARM_DECL)
1579 int index = ipa_get_param_decl_index (info, op);
1580 gcc_assert (index >= 0);
1581 ipa_set_param_used (info, index, true);
1584 return false;
1587 /* Scan the function body of NODE and inspect the uses of formal parameters.
1588 Store the findings in various structures of the associated ipa_node_params
1589 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1590 vector containing intermediate information about each formal parameter. */
1592 static void
1593 ipa_analyze_params_uses (struct cgraph_node *node,
1594 struct param_analysis_info *parms_ainfo)
1596 tree decl = node->decl;
1597 basic_block bb;
1598 struct function *func;
1599 gimple_stmt_iterator gsi;
1600 struct ipa_node_params *info = IPA_NODE_REF (node);
1601 int i;
1603 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1604 return;
1606 for (i = 0; i < ipa_get_param_count (info); i++)
1608 tree parm = ipa_get_param (info, i);
1609 /* For SSA regs see if parameter is used. For non-SSA we compute
1610 the flag during modification analysis. */
1611 if (is_gimple_reg (parm)
1612 && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
1613 ipa_set_param_used (info, i, true);
1616 func = DECL_STRUCT_FUNCTION (decl);
1617 FOR_EACH_BB_FN (bb, func)
1619 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1621 gimple stmt = gsi_stmt (gsi);
1623 if (is_gimple_debug (stmt))
1624 continue;
1626 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1627 walk_stmt_load_store_addr_ops (stmt, info,
1628 visit_ref_for_mod_analysis,
1629 visit_ref_for_mod_analysis,
1630 visit_ref_for_mod_analysis);
1632 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
1633 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1634 visit_ref_for_mod_analysis,
1635 visit_ref_for_mod_analysis,
1636 visit_ref_for_mod_analysis);
1639 info->uses_analysis_done = 1;
1642 /* Initialize the array describing properties of of formal parameters
1643 of NODE, analyze their uses and compute jump functions associated
1644 with actual arguments of calls from within NODE. */
1646 void
1647 ipa_analyze_node (struct cgraph_node *node)
1649 struct ipa_node_params *info;
1650 struct param_analysis_info *parms_ainfo;
1651 int i, param_count;
1653 ipa_check_create_node_params ();
1654 ipa_check_create_edge_args ();
1655 info = IPA_NODE_REF (node);
1656 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1657 current_function_decl = node->decl;
1658 ipa_initialize_node_params (node);
1660 param_count = ipa_get_param_count (info);
1661 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1662 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1664 ipa_analyze_params_uses (node, parms_ainfo);
1665 ipa_compute_jump_functions (node, parms_ainfo);
1667 for (i = 0; i < param_count; i++)
1668 if (parms_ainfo[i].visited_statements)
1669 BITMAP_FREE (parms_ainfo[i].visited_statements);
1671 current_function_decl = NULL;
1672 pop_cfun ();
1676 /* Update the jump function DST when the call graph edge corresponding to SRC is
1677 is being inlined, knowing that DST is of type ancestor and src of known
1678 type. */
1680 static void
1681 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1682 struct ipa_jump_func *dst)
1684 HOST_WIDE_INT combined_offset;
1685 tree combined_type;
1687 combined_offset = src->value.known_type.offset + dst->value.ancestor.offset;
1688 combined_type = dst->value.ancestor.type;
1690 dst->type = IPA_JF_KNOWN_TYPE;
1691 dst->value.known_type.base_type = src->value.known_type.base_type;
1692 dst->value.known_type.offset = combined_offset;
1693 dst->value.known_type.component_type = combined_type;
1696 /* Update the jump functions associated with call graph edge E when the call
1697 graph edge CS is being inlined, assuming that E->caller is already (possibly
1698 indirectly) inlined into CS->callee and that E has not been inlined. */
1700 static void
1701 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1702 struct cgraph_edge *e)
1704 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1705 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1706 int count = ipa_get_cs_argument_count (args);
1707 int i;
1709 for (i = 0; i < count; i++)
1711 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1713 if (dst->type == IPA_JF_ANCESTOR)
1715 struct ipa_jump_func *src;
1717 /* Variable number of arguments can cause havoc if we try to access
1718 one that does not exist in the inlined edge. So make sure we
1719 don't. */
1720 if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
1722 dst->type = IPA_JF_UNKNOWN;
1723 continue;
1726 src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
1727 if (src->type == IPA_JF_KNOWN_TYPE)
1728 combine_known_type_and_ancestor_jfs (src, dst);
1729 else if (src->type == IPA_JF_PASS_THROUGH
1730 && src->value.pass_through.operation == NOP_EXPR)
1731 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
1732 else if (src->type == IPA_JF_ANCESTOR)
1734 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
1735 dst->value.ancestor.offset += src->value.ancestor.offset;
1737 else
1738 dst->type = IPA_JF_UNKNOWN;
1740 else if (dst->type == IPA_JF_PASS_THROUGH)
1742 struct ipa_jump_func *src;
1743 /* We must check range due to calls with variable number of arguments
1744 and we cannot combine jump functions with operations. */
1745 if (dst->value.pass_through.operation == NOP_EXPR
1746 && (dst->value.pass_through.formal_id
1747 < ipa_get_cs_argument_count (top)))
1749 src = ipa_get_ith_jump_func (top,
1750 dst->value.pass_through.formal_id);
1751 *dst = *src;
1753 else
1754 dst->type = IPA_JF_UNKNOWN;
1759 /* If TARGET is an addr_expr of a function declaration, make it the destination
1760 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1762 struct cgraph_edge *
1763 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
1765 struct cgraph_node *callee;
1767 if (TREE_CODE (target) == ADDR_EXPR)
1768 target = TREE_OPERAND (target, 0);
1769 if (TREE_CODE (target) != FUNCTION_DECL)
1770 return NULL;
1771 callee = cgraph_get_node (target);
1772 if (!callee)
1773 return NULL;
1774 ipa_check_create_node_params ();
1776 /* We can not make edges to inline clones. It is bug that someone removed
1777 the cgraph node too early. */
1778 gcc_assert (!callee->global.inlined_to);
1780 cgraph_make_edge_direct (ie, callee);
1781 if (dump_file)
1783 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
1784 "(%s/%i -> %s/%i), for stmt ",
1785 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
1786 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
1787 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
1788 if (ie->call_stmt)
1789 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
1790 else
1791 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
1793 callee = cgraph_function_or_thunk_node (callee, NULL);
1795 return ie;
1798 /* Try to find a destination for indirect edge IE that corresponds to a simple
1799 call or a call of a member function pointer and where the destination is a
1800 pointer formal parameter described by jump function JFUNC. If it can be
1801 determined, return the newly direct edge, otherwise return NULL. */
1803 static struct cgraph_edge *
1804 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
1805 struct ipa_jump_func *jfunc)
1807 tree target;
1809 if (jfunc->type == IPA_JF_CONST)
1810 target = jfunc->value.constant;
1811 else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
1812 target = jfunc->value.member_cst.pfn;
1813 else
1814 return NULL;
1816 return ipa_make_edge_direct_to_target (ie, target);
1819 /* Try to find a destination for indirect edge IE that corresponds to a
1820 virtual call based on a formal parameter which is described by jump
1821 function JFUNC and if it can be determined, make it direct and return the
1822 direct edge. Otherwise, return NULL. */
1824 static struct cgraph_edge *
1825 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
1826 struct ipa_jump_func *jfunc)
1828 tree binfo, target;
1830 if (jfunc->type != IPA_JF_KNOWN_TYPE)
1831 return NULL;
1833 binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
1834 gcc_checking_assert (binfo);
1835 binfo = get_binfo_at_offset (binfo, jfunc->value.known_type.offset
1836 + ie->indirect_info->anc_offset,
1837 ie->indirect_info->otr_type);
1838 if (binfo)
1839 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
1840 binfo);
1841 else
1842 return NULL;
1844 if (target)
1845 return ipa_make_edge_direct_to_target (ie, target);
1846 else
1847 return NULL;
1850 /* Update the param called notes associated with NODE when CS is being inlined,
1851 assuming NODE is (potentially indirectly) inlined into CS->callee.
1852 Moreover, if the callee is discovered to be constant, create a new cgraph
1853 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1854 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1856 static bool
1857 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
1858 struct cgraph_node *node,
1859 VEC (cgraph_edge_p, heap) **new_edges)
1861 struct ipa_edge_args *top;
1862 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
1863 bool res = false;
1865 ipa_check_create_edge_args ();
1866 top = IPA_EDGE_REF (cs);
1868 for (ie = node->indirect_calls; ie; ie = next_ie)
1870 struct cgraph_indirect_call_info *ici = ie->indirect_info;
1871 struct ipa_jump_func *jfunc;
1873 next_ie = ie->next_callee;
1875 if (ici->param_index == -1)
1876 continue;
1878 /* We must check range due to calls with variable number of arguments: */
1879 if (ici->param_index >= ipa_get_cs_argument_count (top))
1881 ici->param_index = -1;
1882 continue;
1885 jfunc = ipa_get_ith_jump_func (top, ici->param_index);
1886 if (jfunc->type == IPA_JF_PASS_THROUGH
1887 && jfunc->value.pass_through.operation == NOP_EXPR)
1888 ici->param_index = jfunc->value.pass_through.formal_id;
1889 else if (jfunc->type == IPA_JF_ANCESTOR)
1891 ici->param_index = jfunc->value.ancestor.formal_id;
1892 ici->anc_offset += jfunc->value.ancestor.offset;
1894 else
1895 /* Either we can find a destination for this edge now or never. */
1896 ici->param_index = -1;
1898 if (!flag_indirect_inlining)
1899 continue;
1901 if (ici->polymorphic)
1902 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
1903 else
1904 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
1906 if (new_direct_edge)
1908 new_direct_edge->indirect_inlining_edge = 1;
1909 if (new_direct_edge->call_stmt)
1910 new_direct_edge->call_stmt_cannot_inline_p
1911 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
1912 new_direct_edge->callee->decl);
1913 if (new_edges)
1915 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
1916 new_direct_edge);
1917 top = IPA_EDGE_REF (cs);
1918 res = true;
1923 return res;
1926 /* Recursively traverse subtree of NODE (including node) made of inlined
1927 cgraph_edges when CS has been inlined and invoke
1928 update_indirect_edges_after_inlining on all nodes and
1929 update_jump_functions_after_inlining on all non-inlined edges that lead out
1930 of this subtree. Newly discovered indirect edges will be added to
1931 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1932 created. */
1934 static bool
1935 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
1936 struct cgraph_node *node,
1937 VEC (cgraph_edge_p, heap) **new_edges)
1939 struct cgraph_edge *e;
1940 bool res;
1942 res = update_indirect_edges_after_inlining (cs, node, new_edges);
1944 for (e = node->callees; e; e = e->next_callee)
1945 if (!e->inline_failed)
1946 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
1947 else
1948 update_jump_functions_after_inlining (cs, e);
1949 for (e = node->indirect_calls; e; e = e->next_callee)
1950 update_jump_functions_after_inlining (cs, e);
1952 return res;
1955 /* Update jump functions and call note functions on inlining the call site CS.
1956 CS is expected to lead to a node already cloned by
1957 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1958 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1959 created. */
1961 bool
1962 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
1963 VEC (cgraph_edge_p, heap) **new_edges)
1965 bool changed;
1966 /* Do nothing if the preparation phase has not been carried out yet
1967 (i.e. during early inlining). */
1968 if (!ipa_node_params_vector)
1969 return false;
1970 gcc_assert (ipa_edge_args_vector);
1972 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
1974 /* We do not keep jump functions of inlined edges up to date. Better to free
1975 them so we do not access them accidentally. */
1976 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
1977 return changed;
1980 /* Frees all dynamically allocated structures that the argument info points
1981 to. */
1983 void
1984 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
1986 if (args->jump_functions)
1987 ggc_free (args->jump_functions);
1989 memset (args, 0, sizeof (*args));
1992 /* Free all ipa_edge structures. */
1994 void
1995 ipa_free_all_edge_args (void)
1997 int i;
1998 struct ipa_edge_args *args;
2000 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2001 ipa_free_edge_args_substructures (args);
2003 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2004 ipa_edge_args_vector = NULL;
2007 /* Frees all dynamically allocated structures that the param info points
2008 to. */
2010 void
2011 ipa_free_node_params_substructures (struct ipa_node_params *info)
2013 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2014 free (info->lattices);
2015 /* Lattice values and their sources are deallocated with their alocation
2016 pool. */
2017 VEC_free (tree, heap, info->known_vals);
2018 memset (info, 0, sizeof (*info));
2021 /* Free all ipa_node_params structures. */
2023 void
2024 ipa_free_all_node_params (void)
2026 int i;
2027 struct ipa_node_params *info;
2029 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2030 ipa_free_node_params_substructures (info);
2032 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2033 ipa_node_params_vector = NULL;
2036 /* Hook that is called by cgraph.c when an edge is removed. */
2038 static void
2039 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2041 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2042 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2043 <= (unsigned)cs->uid)
2044 return;
2045 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2048 /* Hook that is called by cgraph.c when a node is removed. */
2050 static void
2051 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2053 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2054 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2055 <= (unsigned)node->uid)
2056 return;
2057 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2060 /* Hook that is called by cgraph.c when a node is duplicated. */
2062 static void
2063 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2064 __attribute__((unused)) void *data)
2066 struct ipa_edge_args *old_args, *new_args;
2068 ipa_check_create_edge_args ();
2070 old_args = IPA_EDGE_REF (src);
2071 new_args = IPA_EDGE_REF (dst);
2073 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2074 old_args->jump_functions);
2077 /* Hook that is called by cgraph.c when a node is duplicated. */
2079 static void
2080 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2081 ATTRIBUTE_UNUSED void *data)
2083 struct ipa_node_params *old_info, *new_info;
2085 ipa_check_create_node_params ();
2086 old_info = IPA_NODE_REF (src);
2087 new_info = IPA_NODE_REF (dst);
2089 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2090 old_info->descriptors);
2091 new_info->lattices = NULL;
2092 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2094 new_info->uses_analysis_done = old_info->uses_analysis_done;
2095 new_info->node_enqueued = old_info->node_enqueued;
2099 /* Analyze newly added function into callgraph. */
2101 static void
2102 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2104 ipa_analyze_node (node);
2107 /* Register our cgraph hooks if they are not already there. */
2109 void
2110 ipa_register_cgraph_hooks (void)
2112 if (!edge_removal_hook_holder)
2113 edge_removal_hook_holder =
2114 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2115 if (!node_removal_hook_holder)
2116 node_removal_hook_holder =
2117 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2118 if (!edge_duplication_hook_holder)
2119 edge_duplication_hook_holder =
2120 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2121 if (!node_duplication_hook_holder)
2122 node_duplication_hook_holder =
2123 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2124 function_insertion_hook_holder =
2125 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2128 /* Unregister our cgraph hooks if they are not already there. */
2130 static void
2131 ipa_unregister_cgraph_hooks (void)
2133 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2134 edge_removal_hook_holder = NULL;
2135 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2136 node_removal_hook_holder = NULL;
2137 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2138 edge_duplication_hook_holder = NULL;
2139 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2140 node_duplication_hook_holder = NULL;
2141 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2142 function_insertion_hook_holder = NULL;
2145 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2146 longer needed after ipa-cp. */
2148 void
2149 ipa_free_all_structures_after_ipa_cp (void)
2151 if (!optimize)
2153 ipa_free_all_edge_args ();
2154 ipa_free_all_node_params ();
2155 free_alloc_pool (ipcp_sources_pool);
2156 free_alloc_pool (ipcp_values_pool);
2157 ipa_unregister_cgraph_hooks ();
2161 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2162 longer needed after indirect inlining. */
2164 void
2165 ipa_free_all_structures_after_iinln (void)
2167 ipa_free_all_edge_args ();
2168 ipa_free_all_node_params ();
2169 ipa_unregister_cgraph_hooks ();
2170 if (ipcp_sources_pool)
2171 free_alloc_pool (ipcp_sources_pool);
2172 if (ipcp_values_pool)
2173 free_alloc_pool (ipcp_values_pool);
2176 /* Print ipa_tree_map data structures of all functions in the
2177 callgraph to F. */
2179 void
2180 ipa_print_node_params (FILE * f, struct cgraph_node *node)
2182 int i, count;
2183 tree temp;
2184 struct ipa_node_params *info;
2186 if (!node->analyzed)
2187 return;
2188 info = IPA_NODE_REF (node);
2189 fprintf (f, " function %s parameter descriptors:\n",
2190 cgraph_node_name (node));
2191 count = ipa_get_param_count (info);
2192 for (i = 0; i < count; i++)
2194 temp = ipa_get_param (info, i);
2195 if (TREE_CODE (temp) == PARM_DECL)
2196 fprintf (f, " param %d : %s", i,
2197 (DECL_NAME (temp)
2198 ? (*lang_hooks.decl_printable_name) (temp, 2)
2199 : "(unnamed)"));
2200 if (ipa_is_param_used (info, i))
2201 fprintf (f, " used");
2202 fprintf (f, "\n");
2206 /* Print ipa_tree_map data structures of all functions in the
2207 callgraph to F. */
2209 void
2210 ipa_print_all_params (FILE * f)
2212 struct cgraph_node *node;
2214 fprintf (f, "\nFunction parameters:\n");
2215 for (node = cgraph_nodes; node; node = node->next)
2216 ipa_print_node_params (f, node);
2219 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2221 VEC(tree, heap) *
2222 ipa_get_vector_of_formal_parms (tree fndecl)
2224 VEC(tree, heap) *args;
2225 int count;
2226 tree parm;
2228 count = count_formal_params (fndecl);
2229 args = VEC_alloc (tree, heap, count);
2230 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2231 VEC_quick_push (tree, args, parm);
2233 return args;
2236 /* Return a heap allocated vector containing types of formal parameters of
2237 function type FNTYPE. */
2239 static inline VEC(tree, heap) *
2240 get_vector_of_formal_parm_types (tree fntype)
2242 VEC(tree, heap) *types;
2243 int count = 0;
2244 tree t;
2246 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2247 count++;
2249 types = VEC_alloc (tree, heap, count);
2250 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2251 VEC_quick_push (tree, types, TREE_VALUE (t));
2253 return types;
2256 /* Modify the function declaration FNDECL and its type according to the plan in
2257 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2258 to reflect the actual parameters being modified which are determined by the
2259 base_index field. */
2261 void
2262 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2263 const char *synth_parm_prefix)
2265 VEC(tree, heap) *oparms, *otypes;
2266 tree orig_type, new_type = NULL;
2267 tree old_arg_types, t, new_arg_types = NULL;
2268 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2269 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2270 tree new_reversed = NULL;
2271 bool care_for_types, last_parm_void;
2273 if (!synth_parm_prefix)
2274 synth_parm_prefix = "SYNTH";
2276 oparms = ipa_get_vector_of_formal_parms (fndecl);
2277 orig_type = TREE_TYPE (fndecl);
2278 old_arg_types = TYPE_ARG_TYPES (orig_type);
2280 /* The following test is an ugly hack, some functions simply don't have any
2281 arguments in their type. This is probably a bug but well... */
2282 care_for_types = (old_arg_types != NULL_TREE);
2283 if (care_for_types)
2285 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2286 == void_type_node);
2287 otypes = get_vector_of_formal_parm_types (orig_type);
2288 if (last_parm_void)
2289 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2290 else
2291 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2293 else
2295 last_parm_void = false;
2296 otypes = NULL;
2299 for (i = 0; i < len; i++)
2301 struct ipa_parm_adjustment *adj;
2302 gcc_assert (link);
2304 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2305 parm = VEC_index (tree, oparms, adj->base_index);
2306 adj->base = parm;
2308 if (adj->copy_param)
2310 if (care_for_types)
2311 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2312 adj->base_index),
2313 new_arg_types);
2314 *link = parm;
2315 link = &DECL_CHAIN (parm);
2317 else if (!adj->remove_param)
2319 tree new_parm;
2320 tree ptype;
2322 if (adj->by_ref)
2323 ptype = build_pointer_type (adj->type);
2324 else
2325 ptype = adj->type;
2327 if (care_for_types)
2328 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2330 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2331 ptype);
2332 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2334 DECL_ARTIFICIAL (new_parm) = 1;
2335 DECL_ARG_TYPE (new_parm) = ptype;
2336 DECL_CONTEXT (new_parm) = fndecl;
2337 TREE_USED (new_parm) = 1;
2338 DECL_IGNORED_P (new_parm) = 1;
2339 layout_decl (new_parm, 0);
2341 add_referenced_var (new_parm);
2342 mark_sym_for_renaming (new_parm);
2343 adj->base = parm;
2344 adj->reduction = new_parm;
2346 *link = new_parm;
2348 link = &DECL_CHAIN (new_parm);
2352 *link = NULL_TREE;
2354 if (care_for_types)
2356 new_reversed = nreverse (new_arg_types);
2357 if (last_parm_void)
2359 if (new_reversed)
2360 TREE_CHAIN (new_arg_types) = void_list_node;
2361 else
2362 new_reversed = void_list_node;
2366 /* Use copy_node to preserve as much as possible from original type
2367 (debug info, attribute lists etc.)
2368 Exception is METHOD_TYPEs must have THIS argument.
2369 When we are asked to remove it, we need to build new FUNCTION_TYPE
2370 instead. */
2371 if (TREE_CODE (orig_type) != METHOD_TYPE
2372 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
2373 && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
2375 new_type = build_distinct_type_copy (orig_type);
2376 TYPE_ARG_TYPES (new_type) = new_reversed;
2378 else
2380 new_type
2381 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2382 new_reversed));
2383 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2384 DECL_VINDEX (fndecl) = NULL_TREE;
2387 /* When signature changes, we need to clear builtin info. */
2388 if (DECL_BUILT_IN (fndecl))
2390 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2391 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2394 /* This is a new type, not a copy of an old type. Need to reassociate
2395 variants. We can handle everything except the main variant lazily. */
2396 t = TYPE_MAIN_VARIANT (orig_type);
2397 if (orig_type != t)
2399 TYPE_MAIN_VARIANT (new_type) = t;
2400 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2401 TYPE_NEXT_VARIANT (t) = new_type;
2403 else
2405 TYPE_MAIN_VARIANT (new_type) = new_type;
2406 TYPE_NEXT_VARIANT (new_type) = NULL;
2409 TREE_TYPE (fndecl) = new_type;
2410 DECL_VIRTUAL_P (fndecl) = 0;
2411 if (otypes)
2412 VEC_free (tree, heap, otypes);
2413 VEC_free (tree, heap, oparms);
2416 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2417 If this is a directly recursive call, CS must be NULL. Otherwise it must
2418 contain the corresponding call graph edge. */
2420 void
2421 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2422 ipa_parm_adjustment_vec adjustments)
2424 VEC(tree, heap) *vargs;
2425 VEC(tree, gc) **debug_args = NULL;
2426 gimple new_stmt;
2427 gimple_stmt_iterator gsi;
2428 tree callee_decl;
2429 int i, len;
2431 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2432 vargs = VEC_alloc (tree, heap, len);
2433 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
2435 gsi = gsi_for_stmt (stmt);
2436 for (i = 0; i < len; i++)
2438 struct ipa_parm_adjustment *adj;
2440 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2442 if (adj->copy_param)
2444 tree arg = gimple_call_arg (stmt, adj->base_index);
2446 VEC_quick_push (tree, vargs, arg);
2448 else if (!adj->remove_param)
2450 tree expr, base, off;
2451 location_t loc;
2453 /* We create a new parameter out of the value of the old one, we can
2454 do the following kind of transformations:
2456 - A scalar passed by reference is converted to a scalar passed by
2457 value. (adj->by_ref is false and the type of the original
2458 actual argument is a pointer to a scalar).
2460 - A part of an aggregate is passed instead of the whole aggregate.
2461 The part can be passed either by value or by reference, this is
2462 determined by value of adj->by_ref. Moreover, the code below
2463 handles both situations when the original aggregate is passed by
2464 value (its type is not a pointer) and when it is passed by
2465 reference (it is a pointer to an aggregate).
2467 When the new argument is passed by reference (adj->by_ref is true)
2468 it must be a part of an aggregate and therefore we form it by
2469 simply taking the address of a reference inside the original
2470 aggregate. */
2472 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2473 base = gimple_call_arg (stmt, adj->base_index);
2474 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
2475 : EXPR_LOCATION (base);
2477 if (TREE_CODE (base) != ADDR_EXPR
2478 && POINTER_TYPE_P (TREE_TYPE (base)))
2479 off = build_int_cst (adj->alias_ptr_type,
2480 adj->offset / BITS_PER_UNIT);
2481 else
2483 HOST_WIDE_INT base_offset;
2484 tree prev_base;
2486 if (TREE_CODE (base) == ADDR_EXPR)
2487 base = TREE_OPERAND (base, 0);
2488 prev_base = base;
2489 base = get_addr_base_and_unit_offset (base, &base_offset);
2490 /* Aggregate arguments can have non-invariant addresses. */
2491 if (!base)
2493 base = build_fold_addr_expr (prev_base);
2494 off = build_int_cst (adj->alias_ptr_type,
2495 adj->offset / BITS_PER_UNIT);
2497 else if (TREE_CODE (base) == MEM_REF)
2499 off = build_int_cst (adj->alias_ptr_type,
2500 base_offset
2501 + adj->offset / BITS_PER_UNIT);
2502 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2503 off);
2504 base = TREE_OPERAND (base, 0);
2506 else
2508 off = build_int_cst (adj->alias_ptr_type,
2509 base_offset
2510 + adj->offset / BITS_PER_UNIT);
2511 base = build_fold_addr_expr (base);
2515 if (!adj->by_ref)
2517 tree type = adj->type;
2518 unsigned int align;
2519 unsigned HOST_WIDE_INT misalign;
2520 align = get_pointer_alignment_1 (base, &misalign);
2521 misalign += (double_int_sext (tree_to_double_int (off),
2522 TYPE_PRECISION (TREE_TYPE (off))).low
2523 * BITS_PER_UNIT);
2524 misalign = misalign & (align - 1);
2525 if (misalign != 0)
2526 align = (misalign & -misalign);
2527 if (align < TYPE_ALIGN (type))
2528 type = build_aligned_type (type, align);
2529 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2531 else
2533 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2534 expr = build_fold_addr_expr (expr);
2537 expr = force_gimple_operand_gsi (&gsi, expr,
2538 adj->by_ref
2539 || is_gimple_reg_type (adj->type),
2540 NULL, true, GSI_SAME_STMT);
2541 VEC_quick_push (tree, vargs, expr);
2543 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2545 unsigned int ix;
2546 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2547 gimple def_temp;
2549 arg = gimple_call_arg (stmt, adj->base_index);
2550 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2552 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2553 continue;
2554 arg = fold_convert_loc (gimple_location (stmt),
2555 TREE_TYPE (origin), arg);
2557 if (debug_args == NULL)
2558 debug_args = decl_debug_args_insert (callee_decl);
2559 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
2560 if (ddecl == origin)
2562 ddecl = VEC_index (tree, *debug_args, ix + 1);
2563 break;
2565 if (ddecl == NULL)
2567 ddecl = make_node (DEBUG_EXPR_DECL);
2568 DECL_ARTIFICIAL (ddecl) = 1;
2569 TREE_TYPE (ddecl) = TREE_TYPE (origin);
2570 DECL_MODE (ddecl) = DECL_MODE (origin);
2572 VEC_safe_push (tree, gc, *debug_args, origin);
2573 VEC_safe_push (tree, gc, *debug_args, ddecl);
2575 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
2576 stmt);
2577 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
2581 if (dump_file && (dump_flags & TDF_DETAILS))
2583 fprintf (dump_file, "replacing stmt:");
2584 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2587 new_stmt = gimple_build_call_vec (callee_decl, vargs);
2588 VEC_free (tree, heap, vargs);
2589 if (gimple_call_lhs (stmt))
2590 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2592 gimple_set_block (new_stmt, gimple_block (stmt));
2593 if (gimple_has_location (stmt))
2594 gimple_set_location (new_stmt, gimple_location (stmt));
2595 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2596 gimple_call_copy_flags (new_stmt, stmt);
2598 if (dump_file && (dump_flags & TDF_DETAILS))
2600 fprintf (dump_file, "with stmt:");
2601 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2602 fprintf (dump_file, "\n");
2604 gsi_replace (&gsi, new_stmt, true);
2605 if (cs)
2606 cgraph_set_call_stmt (cs, new_stmt);
2607 update_ssa (TODO_update_ssa);
2608 free_dominance_info (CDI_DOMINATORS);
2611 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2613 static bool
2614 index_in_adjustments_multiple_times_p (int base_index,
2615 ipa_parm_adjustment_vec adjustments)
2617 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2618 bool one = false;
2620 for (i = 0; i < len; i++)
2622 struct ipa_parm_adjustment *adj;
2623 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2625 if (adj->base_index == base_index)
2627 if (one)
2628 return true;
2629 else
2630 one = true;
2633 return false;
2637 /* Return adjustments that should have the same effect on function parameters
2638 and call arguments as if they were first changed according to adjustments in
2639 INNER and then by adjustments in OUTER. */
2641 ipa_parm_adjustment_vec
2642 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
2643 ipa_parm_adjustment_vec outer)
2645 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
2646 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
2647 int removals = 0;
2648 ipa_parm_adjustment_vec adjustments, tmp;
2650 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
2651 for (i = 0; i < inlen; i++)
2653 struct ipa_parm_adjustment *n;
2654 n = VEC_index (ipa_parm_adjustment_t, inner, i);
2656 if (n->remove_param)
2657 removals++;
2658 else
2659 VEC_quick_push (ipa_parm_adjustment_t, tmp, n);
2662 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
2663 for (i = 0; i < outlen; i++)
2665 struct ipa_parm_adjustment *r;
2666 struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
2667 outer, i);
2668 struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
2669 out->base_index);
2671 gcc_assert (!in->remove_param);
2672 if (out->remove_param)
2674 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
2676 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2677 memset (r, 0, sizeof (*r));
2678 r->remove_param = true;
2680 continue;
2683 r = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
2684 memset (r, 0, sizeof (*r));
2685 r->base_index = in->base_index;
2686 r->type = out->type;
2688 /* FIXME: Create nonlocal value too. */
2690 if (in->copy_param && out->copy_param)
2691 r->copy_param = true;
2692 else if (in->copy_param)
2693 r->offset = out->offset;
2694 else if (out->copy_param)
2695 r->offset = in->offset;
2696 else
2697 r->offset = in->offset + out->offset;
2700 for (i = 0; i < inlen; i++)
2702 struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
2703 inner, i);
2705 if (n->remove_param)
2706 VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
2709 VEC_free (ipa_parm_adjustment_t, heap, tmp);
2710 return adjustments;
2713 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2714 friendly way, assuming they are meant to be applied to FNDECL. */
2716 void
2717 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
2718 tree fndecl)
2720 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2721 bool first = true;
2722 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
2724 fprintf (file, "IPA param adjustments: ");
2725 for (i = 0; i < len; i++)
2727 struct ipa_parm_adjustment *adj;
2728 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
2730 if (!first)
2731 fprintf (file, " ");
2732 else
2733 first = false;
2735 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
2736 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
2737 if (adj->base)
2739 fprintf (file, ", base: ");
2740 print_generic_expr (file, adj->base, 0);
2742 if (adj->reduction)
2744 fprintf (file, ", reduction: ");
2745 print_generic_expr (file, adj->reduction, 0);
2747 if (adj->new_ssa_base)
2749 fprintf (file, ", new_ssa_base: ");
2750 print_generic_expr (file, adj->new_ssa_base, 0);
2753 if (adj->copy_param)
2754 fprintf (file, ", copy_param");
2755 else if (adj->remove_param)
2756 fprintf (file, ", remove_param");
2757 else
2758 fprintf (file, ", offset %li", (long) adj->offset);
2759 if (adj->by_ref)
2760 fprintf (file, ", by_ref");
2761 print_node_brief (file, ", type: ", adj->type, 0);
2762 fprintf (file, "\n");
2764 VEC_free (tree, heap, parms);
2767 /* Stream out jump function JUMP_FUNC to OB. */
2769 static void
2770 ipa_write_jump_function (struct output_block *ob,
2771 struct ipa_jump_func *jump_func)
2773 streamer_write_uhwi (ob, jump_func->type);
2775 switch (jump_func->type)
2777 case IPA_JF_UNKNOWN:
2778 break;
2779 case IPA_JF_KNOWN_TYPE:
2780 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
2781 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
2782 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
2783 break;
2784 case IPA_JF_CONST:
2785 gcc_assert (
2786 EXPR_LOCATION (jump_func->value.constant) == UNKNOWN_LOCATION);
2787 stream_write_tree (ob, jump_func->value.constant, true);
2788 break;
2789 case IPA_JF_PASS_THROUGH:
2790 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
2791 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
2792 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
2793 break;
2794 case IPA_JF_ANCESTOR:
2795 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
2796 stream_write_tree (ob, jump_func->value.ancestor.type, true);
2797 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
2798 break;
2799 case IPA_JF_CONST_MEMBER_PTR:
2800 stream_write_tree (ob, jump_func->value.member_cst.pfn, true);
2801 stream_write_tree (ob, jump_func->value.member_cst.delta, false);
2802 break;
2806 /* Read in jump function JUMP_FUNC from IB. */
2808 static void
2809 ipa_read_jump_function (struct lto_input_block *ib,
2810 struct ipa_jump_func *jump_func,
2811 struct data_in *data_in)
2813 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
2815 switch (jump_func->type)
2817 case IPA_JF_UNKNOWN:
2818 break;
2819 case IPA_JF_KNOWN_TYPE:
2820 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
2821 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
2822 jump_func->value.known_type.component_type = stream_read_tree (ib,
2823 data_in);
2824 break;
2825 case IPA_JF_CONST:
2826 jump_func->value.constant = stream_read_tree (ib, data_in);
2827 break;
2828 case IPA_JF_PASS_THROUGH:
2829 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
2830 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
2831 jump_func->value.pass_through.operation
2832 = (enum tree_code) streamer_read_uhwi (ib);
2833 break;
2834 case IPA_JF_ANCESTOR:
2835 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
2836 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
2837 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
2838 break;
2839 case IPA_JF_CONST_MEMBER_PTR:
2840 jump_func->value.member_cst.pfn = stream_read_tree (ib, data_in);
2841 jump_func->value.member_cst.delta = stream_read_tree (ib, data_in);
2842 break;
2846 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2847 relevant to indirect inlining to OB. */
2849 static void
2850 ipa_write_indirect_edge_info (struct output_block *ob,
2851 struct cgraph_edge *cs)
2853 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2854 struct bitpack_d bp;
2856 streamer_write_hwi (ob, ii->param_index);
2857 streamer_write_hwi (ob, ii->anc_offset);
2858 bp = bitpack_create (ob->main_stream);
2859 bp_pack_value (&bp, ii->polymorphic, 1);
2860 streamer_write_bitpack (&bp);
2862 if (ii->polymorphic)
2864 streamer_write_hwi (ob, ii->otr_token);
2865 stream_write_tree (ob, ii->otr_type, true);
2869 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2870 relevant to indirect inlining from IB. */
2872 static void
2873 ipa_read_indirect_edge_info (struct lto_input_block *ib,
2874 struct data_in *data_in ATTRIBUTE_UNUSED,
2875 struct cgraph_edge *cs)
2877 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2878 struct bitpack_d bp;
2880 ii->param_index = (int) streamer_read_hwi (ib);
2881 ii->anc_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
2882 bp = streamer_read_bitpack (ib);
2883 ii->polymorphic = bp_unpack_value (&bp, 1);
2884 if (ii->polymorphic)
2886 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
2887 ii->otr_type = stream_read_tree (ib, data_in);
2891 /* Stream out NODE info to OB. */
2893 static void
2894 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
2896 int node_ref;
2897 lto_cgraph_encoder_t encoder;
2898 struct ipa_node_params *info = IPA_NODE_REF (node);
2899 int j;
2900 struct cgraph_edge *e;
2901 struct bitpack_d bp;
2903 encoder = ob->decl_state->cgraph_node_encoder;
2904 node_ref = lto_cgraph_encoder_encode (encoder, node);
2905 streamer_write_uhwi (ob, node_ref);
2907 bp = bitpack_create (ob->main_stream);
2908 gcc_assert (info->uses_analysis_done
2909 || ipa_get_param_count (info) == 0);
2910 gcc_assert (!info->node_enqueued);
2911 gcc_assert (!info->ipcp_orig_node);
2912 for (j = 0; j < ipa_get_param_count (info); j++)
2913 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
2914 streamer_write_bitpack (&bp);
2915 for (e = node->callees; e; e = e->next_callee)
2917 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2919 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2920 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2921 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2923 for (e = node->indirect_calls; e; e = e->next_callee)
2925 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2927 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
2928 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
2929 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
2930 ipa_write_indirect_edge_info (ob, e);
2934 /* Stream in NODE info from IB. */
2936 static void
2937 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
2938 struct data_in *data_in)
2940 struct ipa_node_params *info = IPA_NODE_REF (node);
2941 int k;
2942 struct cgraph_edge *e;
2943 struct bitpack_d bp;
2945 ipa_initialize_node_params (node);
2947 bp = streamer_read_bitpack (ib);
2948 if (ipa_get_param_count (info) != 0)
2949 info->uses_analysis_done = true;
2950 info->node_enqueued = false;
2951 for (k = 0; k < ipa_get_param_count (info); k++)
2952 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
2953 for (e = node->callees; e; e = e->next_callee)
2955 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2956 int count = streamer_read_uhwi (ib);
2958 if (!count)
2959 continue;
2960 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
2962 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2963 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
2965 for (e = node->indirect_calls; e; e = e->next_callee)
2967 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2968 int count = streamer_read_uhwi (ib);
2970 if (count)
2972 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
2973 count);
2974 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
2975 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
2976 data_in);
2978 ipa_read_indirect_edge_info (ib, data_in, e);
2982 /* Write jump functions for nodes in SET. */
2984 void
2985 ipa_prop_write_jump_functions (cgraph_node_set set)
2987 struct cgraph_node *node;
2988 struct output_block *ob;
2989 unsigned int count = 0;
2990 cgraph_node_set_iterator csi;
2992 if (!ipa_node_params_vector)
2993 return;
2995 ob = create_output_block (LTO_section_jump_functions);
2996 ob->cgraph_node = NULL;
2997 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
2999 node = csi_node (csi);
3000 if (cgraph_function_with_gimple_body_p (node)
3001 && IPA_NODE_REF (node) != NULL)
3002 count++;
3005 streamer_write_uhwi (ob, count);
3007 /* Process all of the functions. */
3008 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
3010 node = csi_node (csi);
3011 if (cgraph_function_with_gimple_body_p (node)
3012 && IPA_NODE_REF (node) != NULL)
3013 ipa_write_node_info (ob, node);
3015 streamer_write_char_stream (ob->main_stream, 0);
3016 produce_asm (ob, NULL);
3017 destroy_output_block (ob);
3020 /* Read section in file FILE_DATA of length LEN with data DATA. */
3022 static void
3023 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3024 size_t len)
3026 const struct lto_function_header *header =
3027 (const struct lto_function_header *) data;
3028 const int cfg_offset = sizeof (struct lto_function_header);
3029 const int main_offset = cfg_offset + header->cfg_size;
3030 const int string_offset = main_offset + header->main_size;
3031 struct data_in *data_in;
3032 struct lto_input_block ib_main;
3033 unsigned int i;
3034 unsigned int count;
3036 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3037 header->main_size);
3039 data_in =
3040 lto_data_in_create (file_data, (const char *) data + string_offset,
3041 header->string_size, NULL);
3042 count = streamer_read_uhwi (&ib_main);
3044 for (i = 0; i < count; i++)
3046 unsigned int index;
3047 struct cgraph_node *node;
3048 lto_cgraph_encoder_t encoder;
3050 index = streamer_read_uhwi (&ib_main);
3051 encoder = file_data->cgraph_node_encoder;
3052 node = lto_cgraph_encoder_deref (encoder, index);
3053 gcc_assert (node->analyzed);
3054 ipa_read_node_info (&ib_main, node, data_in);
3056 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3057 len);
3058 lto_data_in_delete (data_in);
3061 /* Read ipcp jump functions. */
3063 void
3064 ipa_prop_read_jump_functions (void)
3066 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3067 struct lto_file_decl_data *file_data;
3068 unsigned int j = 0;
3070 ipa_check_create_node_params ();
3071 ipa_check_create_edge_args ();
3072 ipa_register_cgraph_hooks ();
3074 while ((file_data = file_data_vec[j++]))
3076 size_t len;
3077 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3079 if (data)
3080 ipa_prop_read_section (file_data, data, len);
3084 /* After merging units, we can get mismatch in argument counts.
3085 Also decl merging might've rendered parameter lists obsolete.
3086 Also compute called_with_variable_arg info. */
3088 void
3089 ipa_update_after_lto_read (void)
3091 struct cgraph_node *node;
3093 ipa_check_create_node_params ();
3094 ipa_check_create_edge_args ();
3096 for (node = cgraph_nodes; node; node = node->next)
3097 if (node->analyzed)
3098 ipa_initialize_node_params (node);