re PR c++/19476 (Missed null checking elimination with new)
[official-gcc.git] / gcc / ipa-prop.c
blob2fbc9d41541c87ead65c6026acebec2f3d980402
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "langhooks.h"
25 #include "ggc.h"
26 #include "target.h"
27 #include "cgraph.h"
28 #include "ipa-prop.h"
29 #include "tree-ssa.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
41 #include "ipa-utils.h"
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
46 struct param_analysis_info
48 bool parm_modified, ref_modified, pt_modified;
49 bitmap parm_visited_statements, pt_visited_statements;
52 /* Vector where the parameter infos are actually stored. */
53 vec<ipa_node_params_t> ipa_node_params_vector;
54 /* Vector of known aggregate values in cloned nodes. */
55 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
56 /* Vector where the parameter infos are actually stored. */
57 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
61 static struct cgraph_node_hook_list *node_removal_hook_holder;
62 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
63 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
64 static struct cgraph_node_hook_list *function_insertion_hook_holder;
66 /* Description of a reference to an IPA constant. */
67 struct ipa_cst_ref_desc
69 /* Edge that corresponds to the statement which took the reference. */
70 struct cgraph_edge *cs;
71 /* Linked list of duplicates created when call graph edges are cloned. */
72 struct ipa_cst_ref_desc *next_duplicate;
73 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
74 if out of control. */
75 int refcount;
78 /* Allocation pool for reference descriptions. */
80 static alloc_pool ipa_refdesc_pool;
82 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
83 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
85 static bool
86 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
88 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->symbol.decl);
89 struct cl_optimization *os;
91 if (!fs_opts)
92 return false;
93 os = TREE_OPTIMIZATION (fs_opts);
94 return !os->x_optimize || !os->x_flag_ipa_cp;
97 /* Return index of the formal whose tree is PTREE in function which corresponds
98 to INFO. */
100 static int
101 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
103 int i, count;
105 count = descriptors.length ();
106 for (i = 0; i < count; i++)
107 if (descriptors[i].decl == ptree)
108 return i;
110 return -1;
113 /* Return index of the formal whose tree is PTREE in function which corresponds
114 to INFO. */
117 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
119 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
122 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
123 NODE. */
125 static void
126 ipa_populate_param_decls (struct cgraph_node *node,
127 vec<ipa_param_descriptor_t> &descriptors)
129 tree fndecl;
130 tree fnargs;
131 tree parm;
132 int param_num;
134 fndecl = node->symbol.decl;
135 gcc_assert (gimple_has_body_p (fndecl));
136 fnargs = DECL_ARGUMENTS (fndecl);
137 param_num = 0;
138 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
140 descriptors[param_num].decl = parm;
141 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
142 param_num++;
146 /* Return how many formal parameters FNDECL has. */
148 static inline int
149 count_formal_params (tree fndecl)
151 tree parm;
152 int count = 0;
153 gcc_assert (gimple_has_body_p (fndecl));
155 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
156 count++;
158 return count;
161 /* Return the declaration of Ith formal parameter of the function corresponding
162 to INFO. Note there is no setter function as this array is built just once
163 using ipa_initialize_node_params. */
165 void
166 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
168 fprintf (file, "param #%i", i);
169 if (info->descriptors[i].decl)
171 fprintf (file, " ");
172 print_generic_expr (file, info->descriptors[i].decl, 0);
176 /* Initialize the ipa_node_params structure associated with NODE
177 to hold PARAM_COUNT parameters. */
179 void
180 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
182 struct ipa_node_params *info = IPA_NODE_REF (node);
184 if (!info->descriptors.exists () && param_count)
185 info->descriptors.safe_grow_cleared (param_count);
188 /* Initialize the ipa_node_params structure associated with NODE by counting
189 the function parameters, creating the descriptors and populating their
190 param_decls. */
192 void
193 ipa_initialize_node_params (struct cgraph_node *node)
195 struct ipa_node_params *info = IPA_NODE_REF (node);
197 if (!info->descriptors.exists ())
199 ipa_alloc_node_params (node, count_formal_params (node->symbol.decl));
200 ipa_populate_param_decls (node, info->descriptors);
204 /* Print the jump functions associated with call graph edge CS to file F. */
206 static void
207 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
209 int i, count;
211 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
212 for (i = 0; i < count; i++)
214 struct ipa_jump_func *jump_func;
215 enum jump_func_type type;
217 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
218 type = jump_func->type;
220 fprintf (f, " param %d: ", i);
221 if (type == IPA_JF_UNKNOWN)
222 fprintf (f, "UNKNOWN\n");
223 else if (type == IPA_JF_KNOWN_TYPE)
225 fprintf (f, "KNOWN TYPE: base ");
226 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
227 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
228 jump_func->value.known_type.offset);
229 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
230 fprintf (f, "\n");
232 else if (type == IPA_JF_CONST)
234 tree val = jump_func->value.constant.value;
235 fprintf (f, "CONST: ");
236 print_generic_expr (f, val, 0);
237 if (TREE_CODE (val) == ADDR_EXPR
238 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
240 fprintf (f, " -> ");
241 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
244 fprintf (f, "\n");
246 else if (type == IPA_JF_PASS_THROUGH)
248 fprintf (f, "PASS THROUGH: ");
249 fprintf (f, "%d, op %s",
250 jump_func->value.pass_through.formal_id,
251 tree_code_name[(int)
252 jump_func->value.pass_through.operation]);
253 if (jump_func->value.pass_through.operation != NOP_EXPR)
255 fprintf (f, " ");
256 print_generic_expr (f,
257 jump_func->value.pass_through.operand, 0);
259 if (jump_func->value.pass_through.agg_preserved)
260 fprintf (f, ", agg_preserved");
261 if (jump_func->value.pass_through.type_preserved)
262 fprintf (f, ", type_preserved");
263 fprintf (f, "\n");
265 else if (type == IPA_JF_ANCESTOR)
267 fprintf (f, "ANCESTOR: ");
268 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
269 jump_func->value.ancestor.formal_id,
270 jump_func->value.ancestor.offset);
271 print_generic_expr (f, jump_func->value.ancestor.type, 0);
272 if (jump_func->value.ancestor.agg_preserved)
273 fprintf (f, ", agg_preserved");
274 if (jump_func->value.ancestor.type_preserved)
275 fprintf (f, ", type_preserved");
276 fprintf (f, "\n");
279 if (jump_func->agg.items)
281 struct ipa_agg_jf_item *item;
282 int j;
284 fprintf (f, " Aggregate passed by %s:\n",
285 jump_func->agg.by_ref ? "reference" : "value");
286 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
288 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
289 item->offset);
290 if (TYPE_P (item->value))
291 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
292 tree_low_cst (TYPE_SIZE (item->value), 1));
293 else
295 fprintf (f, "cst: ");
296 print_generic_expr (f, item->value, 0);
298 fprintf (f, "\n");
305 /* Print the jump functions of all arguments on all call graph edges going from
306 NODE to file F. */
308 void
309 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
311 struct cgraph_edge *cs;
313 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
314 node->symbol.order);
315 for (cs = node->callees; cs; cs = cs->next_callee)
317 if (!ipa_edge_args_info_available_for_edge_p (cs))
318 continue;
320 fprintf (f, " callsite %s/%i -> %s/%i : \n",
321 xstrdup (cgraph_node_name (node)), node->symbol.order,
322 xstrdup (cgraph_node_name (cs->callee)),
323 cs->callee->symbol.order);
324 ipa_print_node_jump_functions_for_edge (f, cs);
327 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
329 struct cgraph_indirect_call_info *ii;
330 if (!ipa_edge_args_info_available_for_edge_p (cs))
331 continue;
333 ii = cs->indirect_info;
334 if (ii->agg_contents)
335 fprintf (f, " indirect %s callsite, calling param %i, "
336 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
337 ii->member_ptr ? "member ptr" : "aggregate",
338 ii->param_index, ii->offset,
339 ii->by_ref ? "by reference" : "by_value");
340 else
341 fprintf (f, " indirect %s callsite, calling param %i",
342 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
344 if (cs->call_stmt)
346 fprintf (f, ", for stmt ");
347 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
349 else
350 fprintf (f, "\n");
351 ipa_print_node_jump_functions_for_edge (f, cs);
355 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
357 void
358 ipa_print_all_jump_functions (FILE *f)
360 struct cgraph_node *node;
362 fprintf (f, "\nJump functions:\n");
363 FOR_EACH_FUNCTION (node)
365 ipa_print_node_jump_functions (f, node);
369 /* Set JFUNC to be a known type jump function. */
371 static void
372 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
373 tree base_type, tree component_type)
375 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
376 && TYPE_BINFO (component_type));
377 jfunc->type = IPA_JF_KNOWN_TYPE;
378 jfunc->value.known_type.offset = offset,
379 jfunc->value.known_type.base_type = base_type;
380 jfunc->value.known_type.component_type = component_type;
383 /* Set JFUNC to be a copy of another jmp (to be used by jump function
384 combination code). The two functions will share their rdesc. */
386 static void
387 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
388 struct ipa_jump_func *src)
391 gcc_checking_assert (src->type == IPA_JF_CONST);
392 dst->type = IPA_JF_CONST;
393 dst->value.constant = src->value.constant;
396 /* Set JFUNC to be a constant jmp function. */
398 static void
399 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
400 struct cgraph_edge *cs)
402 constant = unshare_expr (constant);
403 if (constant && EXPR_P (constant))
404 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
405 jfunc->type = IPA_JF_CONST;
406 jfunc->value.constant.value = unshare_expr_without_location (constant);
408 if (TREE_CODE (constant) == ADDR_EXPR
409 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
411 struct ipa_cst_ref_desc *rdesc;
412 if (!ipa_refdesc_pool)
413 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
414 sizeof (struct ipa_cst_ref_desc), 32);
416 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
417 rdesc->cs = cs;
418 rdesc->next_duplicate = NULL;
419 rdesc->refcount = 1;
420 jfunc->value.constant.rdesc = rdesc;
422 else
423 jfunc->value.constant.rdesc = NULL;
426 /* Set JFUNC to be a simple pass-through jump function. */
427 static void
428 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
429 bool agg_preserved, bool type_preserved)
431 jfunc->type = IPA_JF_PASS_THROUGH;
432 jfunc->value.pass_through.operand = NULL_TREE;
433 jfunc->value.pass_through.formal_id = formal_id;
434 jfunc->value.pass_through.operation = NOP_EXPR;
435 jfunc->value.pass_through.agg_preserved = agg_preserved;
436 jfunc->value.pass_through.type_preserved = type_preserved;
439 /* Set JFUNC to be an arithmetic pass through jump function. */
441 static void
442 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
443 tree operand, enum tree_code operation)
445 jfunc->type = IPA_JF_PASS_THROUGH;
446 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
447 jfunc->value.pass_through.formal_id = formal_id;
448 jfunc->value.pass_through.operation = operation;
449 jfunc->value.pass_through.agg_preserved = false;
450 jfunc->value.pass_through.type_preserved = false;
453 /* Set JFUNC to be an ancestor jump function. */
455 static void
456 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
457 tree type, int formal_id, bool agg_preserved,
458 bool type_preserved)
460 jfunc->type = IPA_JF_ANCESTOR;
461 jfunc->value.ancestor.formal_id = formal_id;
462 jfunc->value.ancestor.offset = offset;
463 jfunc->value.ancestor.type = type;
464 jfunc->value.ancestor.agg_preserved = agg_preserved;
465 jfunc->value.ancestor.type_preserved = type_preserved;
468 /* Extract the acual BINFO being described by JFUNC which must be a known type
469 jump function. */
471 tree
472 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
474 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
475 if (!base_binfo)
476 return NULL_TREE;
477 return get_binfo_at_offset (base_binfo,
478 jfunc->value.known_type.offset,
479 jfunc->value.known_type.component_type);
482 /* Structure to be passed in between detect_type_change and
483 check_stmt_for_type_change. */
485 struct type_change_info
487 /* Offset into the object where there is the virtual method pointer we are
488 looking for. */
489 HOST_WIDE_INT offset;
490 /* The declaration or SSA_NAME pointer of the base that we are checking for
491 type change. */
492 tree object;
493 /* If we actually can tell the type that the object has changed to, it is
494 stored in this field. Otherwise it remains NULL_TREE. */
495 tree known_current_type;
496 /* Set to true if dynamic type change has been detected. */
497 bool type_maybe_changed;
498 /* Set to true if multiple types have been encountered. known_current_type
499 must be disregarded in that case. */
500 bool multiple_types_encountered;
503 /* Return true if STMT can modify a virtual method table pointer.
505 This function makes special assumptions about both constructors and
506 destructors which are all the functions that are allowed to alter the VMT
507 pointers. It assumes that destructors begin with assignment into all VMT
508 pointers and that constructors essentially look in the following way:
510 1) The very first thing they do is that they call constructors of ancestor
511 sub-objects that have them.
513 2) Then VMT pointers of this and all its ancestors is set to new values
514 corresponding to the type corresponding to the constructor.
516 3) Only afterwards, other stuff such as constructor of member sub-objects
517 and the code written by the user is run. Only this may include calling
518 virtual functions, directly or indirectly.
520 There is no way to call a constructor of an ancestor sub-object in any
521 other way.
523 This means that we do not have to care whether constructors get the correct
524 type information because they will always change it (in fact, if we define
525 the type to be given by the VMT pointer, it is undefined).
527 The most important fact to derive from the above is that if, for some
528 statement in the section 3, we try to detect whether the dynamic type has
529 changed, we can safely ignore all calls as we examine the function body
530 backwards until we reach statements in section 2 because these calls cannot
531 be ancestor constructors or destructors (if the input is not bogus) and so
532 do not change the dynamic type (this holds true only for automatically
533 allocated objects but at the moment we devirtualize only these). We then
534 must detect that statements in section 2 change the dynamic type and can try
535 to derive the new type. That is enough and we can stop, we will never see
536 the calls into constructors of sub-objects in this code. Therefore we can
537 safely ignore all call statements that we traverse.
540 static bool
541 stmt_may_be_vtbl_ptr_store (gimple stmt)
543 if (is_gimple_call (stmt))
544 return false;
545 else if (is_gimple_assign (stmt))
547 tree lhs = gimple_assign_lhs (stmt);
549 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
551 if (flag_strict_aliasing
552 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
553 return false;
555 if (TREE_CODE (lhs) == COMPONENT_REF
556 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
557 return false;
558 /* In the future we might want to use get_base_ref_and_offset to find
559 if there is a field corresponding to the offset and if so, proceed
560 almost like if it was a component ref. */
563 return true;
566 /* If STMT can be proved to be an assignment to the virtual method table
567 pointer of ANALYZED_OBJ and the type associated with the new table
568 identified, return the type. Otherwise return NULL_TREE. */
570 static tree
571 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
573 HOST_WIDE_INT offset, size, max_size;
574 tree lhs, rhs, base;
576 if (!gimple_assign_single_p (stmt))
577 return NULL_TREE;
579 lhs = gimple_assign_lhs (stmt);
580 rhs = gimple_assign_rhs1 (stmt);
581 if (TREE_CODE (lhs) != COMPONENT_REF
582 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
583 || TREE_CODE (rhs) != ADDR_EXPR)
584 return NULL_TREE;
585 rhs = get_base_address (TREE_OPERAND (rhs, 0));
586 if (!rhs
587 || TREE_CODE (rhs) != VAR_DECL
588 || !DECL_VIRTUAL_P (rhs))
589 return NULL_TREE;
591 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
592 if (offset != tci->offset
593 || size != POINTER_SIZE
594 || max_size != POINTER_SIZE)
595 return NULL_TREE;
596 if (TREE_CODE (base) == MEM_REF)
598 if (TREE_CODE (tci->object) != MEM_REF
599 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
600 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
601 TREE_OPERAND (base, 1)))
602 return NULL_TREE;
604 else if (tci->object != base)
605 return NULL_TREE;
607 return DECL_CONTEXT (rhs);
610 /* Callback of walk_aliased_vdefs and a helper function for
611 detect_type_change to check whether a particular statement may modify
612 the virtual table pointer, and if possible also determine the new type of
613 the (sub-)object. It stores its result into DATA, which points to a
614 type_change_info structure. */
616 static bool
617 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
619 gimple stmt = SSA_NAME_DEF_STMT (vdef);
620 struct type_change_info *tci = (struct type_change_info *) data;
622 if (stmt_may_be_vtbl_ptr_store (stmt))
624 tree type;
625 type = extr_type_from_vtbl_ptr_store (stmt, tci);
626 if (tci->type_maybe_changed
627 && type != tci->known_current_type)
628 tci->multiple_types_encountered = true;
629 tci->known_current_type = type;
630 tci->type_maybe_changed = true;
631 return true;
633 else
634 return false;
639 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
640 callsite CALL) by looking for assignments to its virtual table pointer. If
641 it is, return true and fill in the jump function JFUNC with relevant type
642 information or set it to unknown. ARG is the object itself (not a pointer
643 to it, unless dereferenced). BASE is the base of the memory access as
644 returned by get_ref_base_and_extent, as is the offset. */
646 static bool
647 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
648 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
650 struct type_change_info tci;
651 ao_ref ao;
653 gcc_checking_assert (DECL_P (arg)
654 || TREE_CODE (arg) == MEM_REF
655 || handled_component_p (arg));
656 /* Const calls cannot call virtual methods through VMT and so type changes do
657 not matter. */
658 if (!flag_devirtualize || !gimple_vuse (call)
659 /* Be sure expected_type is polymorphic. */
660 || !comp_type
661 || TREE_CODE (comp_type) != RECORD_TYPE
662 || !TYPE_BINFO (comp_type)
663 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
664 return false;
666 ao_ref_init (&ao, arg);
667 ao.base = base;
668 ao.offset = offset;
669 ao.size = POINTER_SIZE;
670 ao.max_size = ao.size;
672 tci.offset = offset;
673 tci.object = get_base_address (arg);
674 tci.known_current_type = NULL_TREE;
675 tci.type_maybe_changed = false;
676 tci.multiple_types_encountered = false;
678 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
679 &tci, NULL);
680 if (!tci.type_maybe_changed)
681 return false;
683 if (!tci.known_current_type
684 || tci.multiple_types_encountered
685 || offset != 0)
686 jfunc->type = IPA_JF_UNKNOWN;
687 else
688 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
690 return true;
693 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
694 SSA name (its dereference will become the base and the offset is assumed to
695 be zero). */
697 static bool
698 detect_type_change_ssa (tree arg, tree comp_type,
699 gimple call, struct ipa_jump_func *jfunc)
701 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
702 if (!flag_devirtualize
703 || !POINTER_TYPE_P (TREE_TYPE (arg)))
704 return false;
706 arg = build2 (MEM_REF, ptr_type_node, arg,
707 build_int_cst (ptr_type_node, 0));
709 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
712 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
713 boolean variable pointed to by DATA. */
715 static bool
716 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
717 void *data)
719 bool *b = (bool *) data;
720 *b = true;
721 return true;
724 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
725 a value known not to be modified in this function before reaching the
726 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
727 information about the parameter. */
729 static bool
730 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
731 gimple stmt, tree parm_load)
733 bool modified = false;
734 bitmap *visited_stmts;
735 ao_ref refd;
737 if (parm_ainfo && parm_ainfo->parm_modified)
738 return false;
740 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
741 ao_ref_init (&refd, parm_load);
742 /* We can cache visited statements only when parm_ainfo is available and when
743 we are looking at a naked load of the whole parameter. */
744 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
745 visited_stmts = NULL;
746 else
747 visited_stmts = &parm_ainfo->parm_visited_statements;
748 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
749 visited_stmts);
750 if (parm_ainfo && modified)
751 parm_ainfo->parm_modified = true;
752 return !modified;
755 /* If STMT is an assignment that loads a value from an parameter declaration,
756 return the index of the parameter in ipa_node_params which has not been
757 modified. Otherwise return -1. */
759 static int
760 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
761 struct param_analysis_info *parms_ainfo,
762 gimple stmt)
764 int index;
765 tree op1;
767 if (!gimple_assign_single_p (stmt))
768 return -1;
770 op1 = gimple_assign_rhs1 (stmt);
771 if (TREE_CODE (op1) != PARM_DECL)
772 return -1;
774 index = ipa_get_param_decl_index_1 (descriptors, op1);
775 if (index < 0
776 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
777 : NULL, stmt, op1))
778 return -1;
780 return index;
783 /* Return true if memory reference REF loads data that are known to be
784 unmodified in this function before reaching statement STMT. PARM_AINFO, if
785 non-NULL, is a pointer to a structure containing temporary information about
786 PARM. */
788 static bool
789 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
790 gimple stmt, tree ref)
792 bool modified = false;
793 ao_ref refd;
795 gcc_checking_assert (gimple_vuse (stmt));
796 if (parm_ainfo && parm_ainfo->ref_modified)
797 return false;
799 ao_ref_init (&refd, ref);
800 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
801 NULL);
802 if (parm_ainfo && modified)
803 parm_ainfo->ref_modified = true;
804 return !modified;
807 /* Return true if the data pointed to by PARM is known to be unmodified in this
808 function before reaching call statement CALL into which it is passed.
809 PARM_AINFO is a pointer to a structure containing temporary information
810 about PARM. */
812 static bool
813 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
814 gimple call, tree parm)
816 bool modified = false;
817 ao_ref refd;
819 /* It's unnecessary to calculate anything about memory contnets for a const
820 function because it is not goin to use it. But do not cache the result
821 either. Also, no such calculations for non-pointers. */
822 if (!gimple_vuse (call)
823 || !POINTER_TYPE_P (TREE_TYPE (parm)))
824 return false;
826 if (parm_ainfo->pt_modified)
827 return false;
829 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
830 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
831 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
832 if (modified)
833 parm_ainfo->pt_modified = true;
834 return !modified;
837 /* Return true if we can prove that OP is a memory reference loading unmodified
838 data from an aggregate passed as a parameter and if the aggregate is passed
839 by reference, that the alias type of the load corresponds to the type of the
840 formal parameter (so that we can rely on this type for TBAA in callers).
841 INFO and PARMS_AINFO describe parameters of the current function (but the
842 latter can be NULL), STMT is the load statement. If function returns true,
843 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
844 within the aggregate and whether it is a load from a value passed by
845 reference respectively. */
847 static bool
848 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
849 struct param_analysis_info *parms_ainfo, gimple stmt,
850 tree op, int *index_p, HOST_WIDE_INT *offset_p,
851 bool *by_ref_p)
853 int index;
854 HOST_WIDE_INT size, max_size;
855 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
857 if (max_size == -1 || max_size != size || *offset_p < 0)
858 return false;
860 if (DECL_P (base))
862 int index = ipa_get_param_decl_index_1 (descriptors, base);
863 if (index >= 0
864 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
865 : NULL, stmt, op))
867 *index_p = index;
868 *by_ref_p = false;
869 return true;
871 return false;
874 if (TREE_CODE (base) != MEM_REF
875 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
876 || !integer_zerop (TREE_OPERAND (base, 1)))
877 return false;
879 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
881 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
882 index = ipa_get_param_decl_index_1 (descriptors, parm);
884 else
886 /* This branch catches situations where a pointer parameter is not a
887 gimple register, for example:
889 void hip7(S*) (struct S * p)
891 void (*<T2e4>) (struct S *) D.1867;
892 struct S * p.1;
894 <bb 2>:
895 p.1_1 = p;
896 D.1867_2 = p.1_1->f;
897 D.1867_2 ();
898 gdp = &p;
901 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
902 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
905 if (index >= 0
906 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
907 stmt, op))
909 *index_p = index;
910 *by_ref_p = true;
911 return true;
913 return false;
916 /* Just like the previous function, just without the param_analysis_info
917 pointer, for users outside of this file. */
919 bool
920 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
921 tree op, int *index_p, HOST_WIDE_INT *offset_p,
922 bool *by_ref_p)
924 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
925 offset_p, by_ref_p);
928 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
929 of an assignment statement STMT, try to determine whether we are actually
930 handling any of the following cases and construct an appropriate jump
931 function into JFUNC if so:
933 1) The passed value is loaded from a formal parameter which is not a gimple
934 register (most probably because it is addressable, the value has to be
935 scalar) and we can guarantee the value has not changed. This case can
936 therefore be described by a simple pass-through jump function. For example:
938 foo (int a)
940 int a.0;
942 a.0_2 = a;
943 bar (a.0_2);
945 2) The passed value can be described by a simple arithmetic pass-through
946 jump function. E.g.
948 foo (int a)
950 int D.2064;
952 D.2064_4 = a.1(D) + 4;
953 bar (D.2064_4);
955 This case can also occur in combination of the previous one, e.g.:
957 foo (int a, int z)
959 int a.0;
960 int D.2064;
962 a.0_3 = a;
963 D.2064_4 = a.0_3 + 4;
964 foo (D.2064_4);
966 3) The passed value is an address of an object within another one (which
967 also passed by reference). Such situations are described by an ancestor
968 jump function and describe situations such as:
970 B::foo() (struct B * const this)
972 struct A * D.1845;
974 D.1845_2 = &this_1(D)->D.1748;
975 A::bar (D.1845_2);
977 INFO is the structure describing individual parameters access different
978 stages of IPA optimizations. PARMS_AINFO contains the information that is
979 only needed for intraprocedural analysis. */
981 static void
982 compute_complex_assign_jump_func (struct ipa_node_params *info,
983 struct param_analysis_info *parms_ainfo,
984 struct ipa_jump_func *jfunc,
985 gimple call, gimple stmt, tree name,
986 tree param_type)
988 HOST_WIDE_INT offset, size, max_size;
989 tree op1, tc_ssa, base, ssa;
990 int index;
992 op1 = gimple_assign_rhs1 (stmt);
994 if (TREE_CODE (op1) == SSA_NAME)
996 if (SSA_NAME_IS_DEFAULT_DEF (op1))
997 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
998 else
999 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1000 SSA_NAME_DEF_STMT (op1));
1001 tc_ssa = op1;
1003 else
1005 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1006 tc_ssa = gimple_assign_lhs (stmt);
1009 if (index >= 0)
1011 tree op2 = gimple_assign_rhs2 (stmt);
1013 if (op2)
1015 if (!is_gimple_ip_invariant (op2)
1016 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1017 && !useless_type_conversion_p (TREE_TYPE (name),
1018 TREE_TYPE (op1))))
1019 return;
1021 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1022 gimple_assign_rhs_code (stmt));
1024 else if (gimple_assign_single_p (stmt))
1026 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1027 call, tc_ssa);
1028 bool type_p = false;
1030 if (param_type && POINTER_TYPE_P (param_type))
1031 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1032 call, jfunc);
1033 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1034 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1036 return;
1039 if (TREE_CODE (op1) != ADDR_EXPR)
1040 return;
1041 op1 = TREE_OPERAND (op1, 0);
1042 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1043 return;
1044 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1045 if (TREE_CODE (base) != MEM_REF
1046 /* If this is a varying address, punt. */
1047 || max_size == -1
1048 || max_size != size)
1049 return;
1050 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1051 ssa = TREE_OPERAND (base, 0);
1052 if (TREE_CODE (ssa) != SSA_NAME
1053 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1054 || offset < 0)
1055 return;
1057 /* Dynamic types are changed in constructors and destructors. */
1058 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1059 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1061 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1062 call, jfunc, offset);
1063 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1064 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1065 parm_ref_data_pass_through_p (&parms_ainfo[index],
1066 call, ssa), type_p);
1070 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1071 it looks like:
1073 iftmp.1_3 = &obj_2(D)->D.1762;
1075 The base of the MEM_REF must be a default definition SSA NAME of a
1076 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1077 whole MEM_REF expression is returned and the offset calculated from any
1078 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1079 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1081 static tree
1082 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1084 HOST_WIDE_INT size, max_size;
1085 tree expr, parm, obj;
1087 if (!gimple_assign_single_p (assign))
1088 return NULL_TREE;
1089 expr = gimple_assign_rhs1 (assign);
1091 if (TREE_CODE (expr) != ADDR_EXPR)
1092 return NULL_TREE;
1093 expr = TREE_OPERAND (expr, 0);
1094 obj = expr;
1095 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1097 if (TREE_CODE (expr) != MEM_REF
1098 /* If this is a varying address, punt. */
1099 || max_size == -1
1100 || max_size != size
1101 || *offset < 0)
1102 return NULL_TREE;
1103 parm = TREE_OPERAND (expr, 0);
1104 if (TREE_CODE (parm) != SSA_NAME
1105 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1106 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1107 return NULL_TREE;
1109 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1110 *obj_p = obj;
1111 return expr;
1115 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1116 statement PHI, try to find out whether NAME is in fact a
1117 multiple-inheritance typecast from a descendant into an ancestor of a formal
1118 parameter and thus can be described by an ancestor jump function and if so,
1119 write the appropriate function into JFUNC.
1121 Essentially we want to match the following pattern:
1123 if (obj_2(D) != 0B)
1124 goto <bb 3>;
1125 else
1126 goto <bb 4>;
1128 <bb 3>:
1129 iftmp.1_3 = &obj_2(D)->D.1762;
1131 <bb 4>:
1132 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1133 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1134 return D.1879_6; */
1136 static void
1137 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1138 struct param_analysis_info *parms_ainfo,
1139 struct ipa_jump_func *jfunc,
1140 gimple call, gimple phi, tree param_type)
1142 HOST_WIDE_INT offset;
1143 gimple assign, cond;
1144 basic_block phi_bb, assign_bb, cond_bb;
1145 tree tmp, parm, expr, obj;
1146 int index, i;
1148 if (gimple_phi_num_args (phi) != 2)
1149 return;
1151 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1152 tmp = PHI_ARG_DEF (phi, 0);
1153 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1154 tmp = PHI_ARG_DEF (phi, 1);
1155 else
1156 return;
1157 if (TREE_CODE (tmp) != SSA_NAME
1158 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1159 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1160 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1161 return;
1163 assign = SSA_NAME_DEF_STMT (tmp);
1164 assign_bb = gimple_bb (assign);
1165 if (!single_pred_p (assign_bb))
1166 return;
1167 expr = get_ancestor_addr_info (assign, &obj, &offset);
1168 if (!expr)
1169 return;
1170 parm = TREE_OPERAND (expr, 0);
1171 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1172 gcc_assert (index >= 0);
1174 cond_bb = single_pred (assign_bb);
1175 cond = last_stmt (cond_bb);
1176 if (!cond
1177 || gimple_code (cond) != GIMPLE_COND
1178 || gimple_cond_code (cond) != NE_EXPR
1179 || gimple_cond_lhs (cond) != parm
1180 || !integer_zerop (gimple_cond_rhs (cond)))
1181 return;
1183 phi_bb = gimple_bb (phi);
1184 for (i = 0; i < 2; i++)
1186 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1187 if (pred != assign_bb && pred != cond_bb)
1188 return;
1191 bool type_p = false;
1192 if (param_type && POINTER_TYPE_P (param_type))
1193 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1194 call, jfunc, offset);
1195 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1196 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1197 parm_ref_data_pass_through_p (&parms_ainfo[index],
1198 call, parm), type_p);
1201 /* Given OP which is passed as an actual argument to a called function,
1202 determine if it is possible to construct a KNOWN_TYPE jump function for it
1203 and if so, create one and store it to JFUNC.
1204 EXPECTED_TYPE represents a type the argument should be in */
1206 static void
1207 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1208 gimple call, tree expected_type)
1210 HOST_WIDE_INT offset, size, max_size;
1211 tree base;
1213 if (!flag_devirtualize
1214 || TREE_CODE (op) != ADDR_EXPR
1215 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1216 /* Be sure expected_type is polymorphic. */
1217 || !expected_type
1218 || TREE_CODE (expected_type) != RECORD_TYPE
1219 || !TYPE_BINFO (expected_type)
1220 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1221 return;
1223 op = TREE_OPERAND (op, 0);
1224 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1225 if (!DECL_P (base)
1226 || max_size == -1
1227 || max_size != size
1228 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1229 || is_global_var (base))
1230 return;
1232 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1233 return;
1235 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1236 expected_type);
1239 /* Inspect the given TYPE and return true iff it has the same structure (the
1240 same number of fields of the same types) as a C++ member pointer. If
1241 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1242 corresponding fields there. */
1244 static bool
1245 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1247 tree fld;
1249 if (TREE_CODE (type) != RECORD_TYPE)
1250 return false;
1252 fld = TYPE_FIELDS (type);
1253 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1254 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1255 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1256 return false;
1258 if (method_ptr)
1259 *method_ptr = fld;
1261 fld = DECL_CHAIN (fld);
1262 if (!fld || INTEGRAL_TYPE_P (fld)
1263 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1264 return false;
1265 if (delta)
1266 *delta = fld;
1268 if (DECL_CHAIN (fld))
1269 return false;
1271 return true;
1274 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1275 return the rhs of its defining statement. Otherwise return RHS as it
1276 is. */
1278 static inline tree
1279 get_ssa_def_if_simple_copy (tree rhs)
1281 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1283 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1285 if (gimple_assign_single_p (def_stmt))
1286 rhs = gimple_assign_rhs1 (def_stmt);
1287 else
1288 break;
1290 return rhs;
1293 /* Simple linked list, describing known contents of an aggregate beforere
1294 call. */
1296 struct ipa_known_agg_contents_list
1298 /* Offset and size of the described part of the aggregate. */
1299 HOST_WIDE_INT offset, size;
1300 /* Known constant value or NULL if the contents is known to be unknown. */
1301 tree constant;
1302 /* Pointer to the next structure in the list. */
1303 struct ipa_known_agg_contents_list *next;
1306 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1307 in ARG is filled in with constant values. ARG can either be an aggregate
1308 expression or a pointer to an aggregate. JFUNC is the jump function into
1309 which the constants are subsequently stored. */
1311 static void
1312 determine_known_aggregate_parts (gimple call, tree arg,
1313 struct ipa_jump_func *jfunc)
1315 struct ipa_known_agg_contents_list *list = NULL;
1316 int item_count = 0, const_count = 0;
1317 HOST_WIDE_INT arg_offset, arg_size;
1318 gimple_stmt_iterator gsi;
1319 tree arg_base;
1320 bool check_ref, by_ref;
1321 ao_ref r;
1323 /* The function operates in three stages. First, we prepare check_ref, r,
1324 arg_base and arg_offset based on what is actually passed as an actual
1325 argument. */
1327 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1329 by_ref = true;
1330 if (TREE_CODE (arg) == SSA_NAME)
1332 tree type_size;
1333 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1334 return;
1335 check_ref = true;
1336 arg_base = arg;
1337 arg_offset = 0;
1338 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1339 arg_size = tree_low_cst (type_size, 1);
1340 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1342 else if (TREE_CODE (arg) == ADDR_EXPR)
1344 HOST_WIDE_INT arg_max_size;
1346 arg = TREE_OPERAND (arg, 0);
1347 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1348 &arg_max_size);
1349 if (arg_max_size == -1
1350 || arg_max_size != arg_size
1351 || arg_offset < 0)
1352 return;
1353 if (DECL_P (arg_base))
1355 tree size;
1356 check_ref = false;
1357 size = build_int_cst (integer_type_node, arg_size);
1358 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1360 else
1361 return;
1363 else
1364 return;
1366 else
1368 HOST_WIDE_INT arg_max_size;
1370 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1372 by_ref = false;
1373 check_ref = false;
1374 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1375 &arg_max_size);
1376 if (arg_max_size == -1
1377 || arg_max_size != arg_size
1378 || arg_offset < 0)
1379 return;
1381 ao_ref_init (&r, arg);
1384 /* Second stage walks back the BB, looks at individual statements and as long
1385 as it is confident of how the statements affect contents of the
1386 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1387 describing it. */
1388 gsi = gsi_for_stmt (call);
1389 gsi_prev (&gsi);
1390 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1392 struct ipa_known_agg_contents_list *n, **p;
1393 gimple stmt = gsi_stmt (gsi);
1394 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1395 tree lhs, rhs, lhs_base;
1396 bool partial_overlap;
1398 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1399 continue;
1400 if (!gimple_assign_single_p (stmt))
1401 break;
1403 lhs = gimple_assign_lhs (stmt);
1404 rhs = gimple_assign_rhs1 (stmt);
1405 if (!is_gimple_reg_type (rhs)
1406 || TREE_CODE (lhs) == BIT_FIELD_REF
1407 || contains_bitfld_component_ref_p (lhs))
1408 break;
1410 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1411 &lhs_max_size);
1412 if (lhs_max_size == -1
1413 || lhs_max_size != lhs_size
1414 || (lhs_offset < arg_offset
1415 && lhs_offset + lhs_size > arg_offset)
1416 || (lhs_offset < arg_offset + arg_size
1417 && lhs_offset + lhs_size > arg_offset + arg_size))
1418 break;
1420 if (check_ref)
1422 if (TREE_CODE (lhs_base) != MEM_REF
1423 || TREE_OPERAND (lhs_base, 0) != arg_base
1424 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1425 break;
1427 else if (lhs_base != arg_base)
1429 if (DECL_P (lhs_base))
1430 continue;
1431 else
1432 break;
1435 if (lhs_offset + lhs_size < arg_offset
1436 || lhs_offset >= (arg_offset + arg_size))
1437 continue;
1439 partial_overlap = false;
1440 p = &list;
1441 while (*p && (*p)->offset < lhs_offset)
1443 if ((*p)->offset + (*p)->size > lhs_offset)
1445 partial_overlap = true;
1446 break;
1448 p = &(*p)->next;
1450 if (partial_overlap)
1451 break;
1452 if (*p && (*p)->offset < lhs_offset + lhs_size)
1454 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1455 /* We already know this value is subsequently overwritten with
1456 something else. */
1457 continue;
1458 else
1459 /* Otherwise this is a partial overlap which we cannot
1460 represent. */
1461 break;
1464 rhs = get_ssa_def_if_simple_copy (rhs);
1465 n = XALLOCA (struct ipa_known_agg_contents_list);
1466 n->size = lhs_size;
1467 n->offset = lhs_offset;
1468 if (is_gimple_ip_invariant (rhs))
1470 n->constant = rhs;
1471 const_count++;
1473 else
1474 n->constant = NULL_TREE;
1475 n->next = *p;
1476 *p = n;
1478 item_count++;
1479 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1480 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1481 break;
1484 /* Third stage just goes over the list and creates an appropriate vector of
1485 ipa_agg_jf_item structures out of it, of sourse only if there are
1486 any known constants to begin with. */
1488 if (const_count)
1490 jfunc->agg.by_ref = by_ref;
1491 vec_alloc (jfunc->agg.items, const_count);
1492 while (list)
1494 if (list->constant)
1496 struct ipa_agg_jf_item item;
1497 item.offset = list->offset - arg_offset;
1498 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1499 item.value = unshare_expr_without_location (list->constant);
1500 jfunc->agg.items->quick_push (item);
1502 list = list->next;
1507 static tree
1508 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1510 int n;
1511 tree type = (e->callee
1512 ? TREE_TYPE (e->callee->symbol.decl)
1513 : gimple_call_fntype (e->call_stmt));
1514 tree t = TYPE_ARG_TYPES (type);
1516 for (n = 0; n < i; n++)
1518 if (!t)
1519 break;
1520 t = TREE_CHAIN (t);
1522 if (t)
1523 return TREE_VALUE (t);
1524 if (!e->callee)
1525 return NULL;
1526 t = DECL_ARGUMENTS (e->callee->symbol.decl);
1527 for (n = 0; n < i; n++)
1529 if (!t)
1530 return NULL;
1531 t = TREE_CHAIN (t);
1533 if (t)
1534 return TREE_TYPE (t);
1535 return NULL;
1538 /* Compute jump function for all arguments of callsite CS and insert the
1539 information in the jump_functions array in the ipa_edge_args corresponding
1540 to this callsite. */
1542 static void
1543 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1544 struct cgraph_edge *cs)
1546 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1547 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1548 gimple call = cs->call_stmt;
1549 int n, arg_num = gimple_call_num_args (call);
1551 if (arg_num == 0 || args->jump_functions)
1552 return;
1553 vec_safe_grow_cleared (args->jump_functions, arg_num);
1555 if (gimple_call_internal_p (call))
1556 return;
1557 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1558 return;
1560 for (n = 0; n < arg_num; n++)
1562 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1563 tree arg = gimple_call_arg (call, n);
1564 tree param_type = ipa_get_callee_param_type (cs, n);
1566 if (is_gimple_ip_invariant (arg))
1567 ipa_set_jf_constant (jfunc, arg, cs);
1568 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1569 && TREE_CODE (arg) == PARM_DECL)
1571 int index = ipa_get_param_decl_index (info, arg);
1573 gcc_assert (index >=0);
1574 /* Aggregate passed by value, check for pass-through, otherwise we
1575 will attempt to fill in aggregate contents later in this
1576 for cycle. */
1577 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1579 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1580 continue;
1583 else if (TREE_CODE (arg) == SSA_NAME)
1585 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1587 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1588 if (index >= 0)
1590 bool agg_p, type_p;
1591 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1592 call, arg);
1593 if (param_type && POINTER_TYPE_P (param_type))
1594 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1595 call, jfunc);
1596 else
1597 type_p = false;
1598 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1599 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1600 type_p);
1603 else
1605 gimple stmt = SSA_NAME_DEF_STMT (arg);
1606 if (is_gimple_assign (stmt))
1607 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1608 call, stmt, arg, param_type);
1609 else if (gimple_code (stmt) == GIMPLE_PHI)
1610 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1611 call, stmt, param_type);
1614 else
1615 compute_known_type_jump_func (arg, jfunc, call,
1616 param_type
1617 && POINTER_TYPE_P (param_type)
1618 ? TREE_TYPE (param_type)
1619 : NULL);
1621 if ((jfunc->type != IPA_JF_PASS_THROUGH
1622 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1623 && (jfunc->type != IPA_JF_ANCESTOR
1624 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1625 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1626 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1627 determine_known_aggregate_parts (call, arg, jfunc);
1631 /* Compute jump functions for all edges - both direct and indirect - outgoing
1632 from NODE. Also count the actual arguments in the process. */
1634 static void
1635 ipa_compute_jump_functions (struct cgraph_node *node,
1636 struct param_analysis_info *parms_ainfo)
1638 struct cgraph_edge *cs;
1640 for (cs = node->callees; cs; cs = cs->next_callee)
1642 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1643 NULL);
1644 /* We do not need to bother analyzing calls to unknown
1645 functions unless they may become known during lto/whopr. */
1646 if (!callee->symbol.definition && !flag_lto)
1647 continue;
1648 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1651 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1652 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1655 /* If STMT looks like a statement loading a value from a member pointer formal
1656 parameter, return that parameter and store the offset of the field to
1657 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1658 might be clobbered). If USE_DELTA, then we look for a use of the delta
1659 field rather than the pfn. */
1661 static tree
1662 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1663 HOST_WIDE_INT *offset_p)
1665 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1667 if (!gimple_assign_single_p (stmt))
1668 return NULL_TREE;
1670 rhs = gimple_assign_rhs1 (stmt);
1671 if (TREE_CODE (rhs) == COMPONENT_REF)
1673 ref_field = TREE_OPERAND (rhs, 1);
1674 rhs = TREE_OPERAND (rhs, 0);
1676 else
1677 ref_field = NULL_TREE;
1678 if (TREE_CODE (rhs) != MEM_REF)
1679 return NULL_TREE;
1680 rec = TREE_OPERAND (rhs, 0);
1681 if (TREE_CODE (rec) != ADDR_EXPR)
1682 return NULL_TREE;
1683 rec = TREE_OPERAND (rec, 0);
1684 if (TREE_CODE (rec) != PARM_DECL
1685 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1686 return NULL_TREE;
1687 ref_offset = TREE_OPERAND (rhs, 1);
1689 if (use_delta)
1690 fld = delta_field;
1691 else
1692 fld = ptr_field;
1693 if (offset_p)
1694 *offset_p = int_bit_position (fld);
1696 if (ref_field)
1698 if (integer_nonzerop (ref_offset))
1699 return NULL_TREE;
1700 return ref_field == fld ? rec : NULL_TREE;
1702 else
1703 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1704 : NULL_TREE;
1707 /* Returns true iff T is an SSA_NAME defined by a statement. */
1709 static bool
1710 ipa_is_ssa_with_stmt_def (tree t)
1712 if (TREE_CODE (t) == SSA_NAME
1713 && !SSA_NAME_IS_DEFAULT_DEF (t))
1714 return true;
1715 else
1716 return false;
1719 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1720 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1721 indirect call graph edge. */
1723 static struct cgraph_edge *
1724 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1726 struct cgraph_edge *cs;
1728 cs = cgraph_edge (node, stmt);
1729 cs->indirect_info->param_index = param_index;
1730 cs->indirect_info->offset = 0;
1731 cs->indirect_info->polymorphic = 0;
1732 cs->indirect_info->agg_contents = 0;
1733 cs->indirect_info->member_ptr = 0;
1734 return cs;
1737 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1738 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1739 intermediate information about each formal parameter. Currently it checks
1740 whether the call calls a pointer that is a formal parameter and if so, the
1741 parameter is marked with the called flag and an indirect call graph edge
1742 describing the call is created. This is very simple for ordinary pointers
1743 represented in SSA but not-so-nice when it comes to member pointers. The
1744 ugly part of this function does nothing more than trying to match the
1745 pattern of such a call. An example of such a pattern is the gimple dump
1746 below, the call is on the last line:
1748 <bb 2>:
1749 f$__delta_5 = f.__delta;
1750 f$__pfn_24 = f.__pfn;
1753 <bb 2>:
1754 f$__delta_5 = MEM[(struct *)&f];
1755 f$__pfn_24 = MEM[(struct *)&f + 4B];
1757 and a few lines below:
1759 <bb 5>
1760 D.2496_3 = (int) f$__pfn_24;
1761 D.2497_4 = D.2496_3 & 1;
1762 if (D.2497_4 != 0)
1763 goto <bb 3>;
1764 else
1765 goto <bb 4>;
1767 <bb 6>:
1768 D.2500_7 = (unsigned int) f$__delta_5;
1769 D.2501_8 = &S + D.2500_7;
1770 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1771 D.2503_10 = *D.2502_9;
1772 D.2504_12 = f$__pfn_24 + -1;
1773 D.2505_13 = (unsigned int) D.2504_12;
1774 D.2506_14 = D.2503_10 + D.2505_13;
1775 D.2507_15 = *D.2506_14;
1776 iftmp.11_16 = (String:: *) D.2507_15;
1778 <bb 7>:
1779 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1780 D.2500_19 = (unsigned int) f$__delta_5;
1781 D.2508_20 = &S + D.2500_19;
1782 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1784 Such patterns are results of simple calls to a member pointer:
1786 int doprinting (int (MyString::* f)(int) const)
1788 MyString S ("somestring");
1790 return (S.*f)(4);
1793 Moreover, the function also looks for called pointers loaded from aggregates
1794 passed by value or reference. */
1796 static void
1797 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1798 struct ipa_node_params *info,
1799 struct param_analysis_info *parms_ainfo,
1800 gimple call, tree target)
1802 gimple def;
1803 tree n1, n2;
1804 gimple d1, d2;
1805 tree rec, rec2, cond;
1806 gimple branch;
1807 int index;
1808 basic_block bb, virt_bb, join;
1809 HOST_WIDE_INT offset;
1810 bool by_ref;
1812 if (SSA_NAME_IS_DEFAULT_DEF (target))
1814 tree var = SSA_NAME_VAR (target);
1815 index = ipa_get_param_decl_index (info, var);
1816 if (index >= 0)
1817 ipa_note_param_call (node, index, call);
1818 return;
1821 def = SSA_NAME_DEF_STMT (target);
1822 if (gimple_assign_single_p (def)
1823 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1824 gimple_assign_rhs1 (def), &index, &offset,
1825 &by_ref))
1827 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1828 cs->indirect_info->offset = offset;
1829 cs->indirect_info->agg_contents = 1;
1830 cs->indirect_info->by_ref = by_ref;
1831 return;
1834 /* Now we need to try to match the complex pattern of calling a member
1835 pointer. */
1836 if (gimple_code (def) != GIMPLE_PHI
1837 || gimple_phi_num_args (def) != 2
1838 || !POINTER_TYPE_P (TREE_TYPE (target))
1839 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1840 return;
1842 /* First, we need to check whether one of these is a load from a member
1843 pointer that is a parameter to this function. */
1844 n1 = PHI_ARG_DEF (def, 0);
1845 n2 = PHI_ARG_DEF (def, 1);
1846 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1847 return;
1848 d1 = SSA_NAME_DEF_STMT (n1);
1849 d2 = SSA_NAME_DEF_STMT (n2);
1851 join = gimple_bb (def);
1852 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1854 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1855 return;
1857 bb = EDGE_PRED (join, 0)->src;
1858 virt_bb = gimple_bb (d2);
1860 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1862 bb = EDGE_PRED (join, 1)->src;
1863 virt_bb = gimple_bb (d1);
1865 else
1866 return;
1868 /* Second, we need to check that the basic blocks are laid out in the way
1869 corresponding to the pattern. */
1871 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1872 || single_pred (virt_bb) != bb
1873 || single_succ (virt_bb) != join)
1874 return;
1876 /* Third, let's see that the branching is done depending on the least
1877 significant bit of the pfn. */
1879 branch = last_stmt (bb);
1880 if (!branch || gimple_code (branch) != GIMPLE_COND)
1881 return;
1883 if ((gimple_cond_code (branch) != NE_EXPR
1884 && gimple_cond_code (branch) != EQ_EXPR)
1885 || !integer_zerop (gimple_cond_rhs (branch)))
1886 return;
1888 cond = gimple_cond_lhs (branch);
1889 if (!ipa_is_ssa_with_stmt_def (cond))
1890 return;
1892 def = SSA_NAME_DEF_STMT (cond);
1893 if (!is_gimple_assign (def)
1894 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1895 || !integer_onep (gimple_assign_rhs2 (def)))
1896 return;
1898 cond = gimple_assign_rhs1 (def);
1899 if (!ipa_is_ssa_with_stmt_def (cond))
1900 return;
1902 def = SSA_NAME_DEF_STMT (cond);
1904 if (is_gimple_assign (def)
1905 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1907 cond = gimple_assign_rhs1 (def);
1908 if (!ipa_is_ssa_with_stmt_def (cond))
1909 return;
1910 def = SSA_NAME_DEF_STMT (cond);
1913 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1914 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1915 == ptrmemfunc_vbit_in_delta),
1916 NULL);
1917 if (rec != rec2)
1918 return;
1920 index = ipa_get_param_decl_index (info, rec);
1921 if (index >= 0
1922 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1924 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1925 cs->indirect_info->offset = offset;
1926 cs->indirect_info->agg_contents = 1;
1927 cs->indirect_info->member_ptr = 1;
1930 return;
1933 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1934 object referenced in the expression is a formal parameter of the caller
1935 (described by INFO), create a call note for the statement. */
1937 static void
1938 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1939 struct ipa_node_params *info, gimple call,
1940 tree target)
1942 struct cgraph_edge *cs;
1943 struct cgraph_indirect_call_info *ii;
1944 struct ipa_jump_func jfunc;
1945 tree obj = OBJ_TYPE_REF_OBJECT (target);
1946 int index;
1947 HOST_WIDE_INT anc_offset;
1949 if (!flag_devirtualize)
1950 return;
1952 if (TREE_CODE (obj) != SSA_NAME)
1953 return;
1955 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1957 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1958 return;
1960 anc_offset = 0;
1961 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1962 gcc_assert (index >= 0);
1963 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1964 call, &jfunc))
1965 return;
1967 else
1969 gimple stmt = SSA_NAME_DEF_STMT (obj);
1970 tree expr;
1972 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1973 if (!expr)
1974 return;
1975 index = ipa_get_param_decl_index (info,
1976 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1977 gcc_assert (index >= 0);
1978 if (detect_type_change (obj, expr, obj_type_ref_class (target),
1979 call, &jfunc, anc_offset))
1980 return;
1983 cs = ipa_note_param_call (node, index, call);
1984 ii = cs->indirect_info;
1985 ii->offset = anc_offset;
1986 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1987 ii->otr_type = obj_type_ref_class (target);
1988 ii->polymorphic = 1;
1991 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1992 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1993 containing intermediate information about each formal parameter. */
1995 static void
1996 ipa_analyze_call_uses (struct cgraph_node *node,
1997 struct ipa_node_params *info,
1998 struct param_analysis_info *parms_ainfo, gimple call)
2000 tree target = gimple_call_fn (call);
2002 if (!target)
2003 return;
2004 if (TREE_CODE (target) == SSA_NAME)
2005 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2006 else if (virtual_method_call_p (target))
2007 ipa_analyze_virtual_call_uses (node, info, call, target);
2011 /* Analyze the call statement STMT with respect to formal parameters (described
2012 in INFO) of caller given by NODE. Currently it only checks whether formal
2013 parameters are called. PARMS_AINFO is a pointer to a vector containing
2014 intermediate information about each formal parameter. */
2016 static void
2017 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2018 struct param_analysis_info *parms_ainfo, gimple stmt)
2020 if (is_gimple_call (stmt))
2021 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2024 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2025 If OP is a parameter declaration, mark it as used in the info structure
2026 passed in DATA. */
2028 static bool
2029 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2030 tree op, void *data)
2032 struct ipa_node_params *info = (struct ipa_node_params *) data;
2034 op = get_base_address (op);
2035 if (op
2036 && TREE_CODE (op) == PARM_DECL)
2038 int index = ipa_get_param_decl_index (info, op);
2039 gcc_assert (index >= 0);
2040 ipa_set_param_used (info, index, true);
2043 return false;
2046 /* Scan the function body of NODE and inspect the uses of formal parameters.
2047 Store the findings in various structures of the associated ipa_node_params
2048 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2049 vector containing intermediate information about each formal parameter. */
2051 static void
2052 ipa_analyze_params_uses (struct cgraph_node *node,
2053 struct param_analysis_info *parms_ainfo)
2055 tree decl = node->symbol.decl;
2056 basic_block bb;
2057 struct function *func;
2058 gimple_stmt_iterator gsi;
2059 struct ipa_node_params *info = IPA_NODE_REF (node);
2060 int i;
2062 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2063 return;
2065 info->uses_analysis_done = 1;
2066 if (ipa_func_spec_opts_forbid_analysis_p (node))
2068 for (i = 0; i < ipa_get_param_count (info); i++)
2070 ipa_set_param_used (info, i, true);
2071 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2073 return;
2076 for (i = 0; i < ipa_get_param_count (info); i++)
2078 tree parm = ipa_get_param (info, i);
2079 int controlled_uses = 0;
2081 /* For SSA regs see if parameter is used. For non-SSA we compute
2082 the flag during modification analysis. */
2083 if (is_gimple_reg (parm))
2085 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
2086 parm);
2087 if (ddef && !has_zero_uses (ddef))
2089 imm_use_iterator imm_iter;
2090 use_operand_p use_p;
2092 ipa_set_param_used (info, i, true);
2093 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2094 if (!is_gimple_call (USE_STMT (use_p)))
2096 controlled_uses = IPA_UNDESCRIBED_USE;
2097 break;
2099 else
2100 controlled_uses++;
2102 else
2103 controlled_uses = 0;
2105 else
2106 controlled_uses = IPA_UNDESCRIBED_USE;
2107 ipa_set_controlled_uses (info, i, controlled_uses);
2110 func = DECL_STRUCT_FUNCTION (decl);
2111 FOR_EACH_BB_FN (bb, func)
2113 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2115 gimple stmt = gsi_stmt (gsi);
2117 if (is_gimple_debug (stmt))
2118 continue;
2120 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2121 walk_stmt_load_store_addr_ops (stmt, info,
2122 visit_ref_for_mod_analysis,
2123 visit_ref_for_mod_analysis,
2124 visit_ref_for_mod_analysis);
2126 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2127 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2128 visit_ref_for_mod_analysis,
2129 visit_ref_for_mod_analysis,
2130 visit_ref_for_mod_analysis);
2134 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2136 static void
2137 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2139 int i;
2141 for (i = 0; i < param_count; i++)
2143 if (parms_ainfo[i].parm_visited_statements)
2144 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2145 if (parms_ainfo[i].pt_visited_statements)
2146 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2150 /* Initialize the array describing properties of of formal parameters
2151 of NODE, analyze their uses and compute jump functions associated
2152 with actual arguments of calls from within NODE. */
2154 void
2155 ipa_analyze_node (struct cgraph_node *node)
2157 struct ipa_node_params *info;
2158 struct param_analysis_info *parms_ainfo;
2159 int param_count;
2161 ipa_check_create_node_params ();
2162 ipa_check_create_edge_args ();
2163 info = IPA_NODE_REF (node);
2164 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
2165 ipa_initialize_node_params (node);
2167 param_count = ipa_get_param_count (info);
2168 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2169 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2171 ipa_analyze_params_uses (node, parms_ainfo);
2172 ipa_compute_jump_functions (node, parms_ainfo);
2174 free_parms_ainfo (parms_ainfo, param_count);
2175 pop_cfun ();
2178 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2179 attempt a type-based devirtualization. If successful, return the
2180 target function declaration, otherwise return NULL. */
2182 tree
2183 ipa_intraprocedural_devirtualization (gimple call)
2185 tree binfo, token, fndecl;
2186 struct ipa_jump_func jfunc;
2187 tree otr = gimple_call_fn (call);
2189 jfunc.type = IPA_JF_UNKNOWN;
2190 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2191 call, obj_type_ref_class (otr));
2192 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2193 return NULL_TREE;
2194 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2195 if (!binfo)
2196 return NULL_TREE;
2197 token = OBJ_TYPE_REF_TOKEN (otr);
2198 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2199 binfo);
2200 #ifdef ENABLE_CHECKING
2201 if (fndecl)
2202 gcc_assert (possible_polymorphic_call_target_p
2203 (otr, cgraph_get_node (fndecl)));
2204 #endif
2205 return fndecl;
2208 /* Update the jump function DST when the call graph edge corresponding to SRC is
2209 is being inlined, knowing that DST is of type ancestor and src of known
2210 type. */
2212 static void
2213 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2214 struct ipa_jump_func *dst)
2216 HOST_WIDE_INT combined_offset;
2217 tree combined_type;
2219 if (!ipa_get_jf_ancestor_type_preserved (dst))
2221 dst->type = IPA_JF_UNKNOWN;
2222 return;
2225 combined_offset = ipa_get_jf_known_type_offset (src)
2226 + ipa_get_jf_ancestor_offset (dst);
2227 combined_type = ipa_get_jf_ancestor_type (dst);
2229 ipa_set_jf_known_type (dst, combined_offset,
2230 ipa_get_jf_known_type_base_type (src),
2231 combined_type);
2234 /* Update the jump functions associated with call graph edge E when the call
2235 graph edge CS is being inlined, assuming that E->caller is already (possibly
2236 indirectly) inlined into CS->callee and that E has not been inlined. */
2238 static void
2239 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2240 struct cgraph_edge *e)
2242 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2243 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2244 int count = ipa_get_cs_argument_count (args);
2245 int i;
2247 for (i = 0; i < count; i++)
2249 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2251 if (dst->type == IPA_JF_ANCESTOR)
2253 struct ipa_jump_func *src;
2254 int dst_fid = dst->value.ancestor.formal_id;
2256 /* Variable number of arguments can cause havoc if we try to access
2257 one that does not exist in the inlined edge. So make sure we
2258 don't. */
2259 if (dst_fid >= ipa_get_cs_argument_count (top))
2261 dst->type = IPA_JF_UNKNOWN;
2262 continue;
2265 src = ipa_get_ith_jump_func (top, dst_fid);
2267 if (src->agg.items
2268 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2270 struct ipa_agg_jf_item *item;
2271 int j;
2273 /* Currently we do not produce clobber aggregate jump functions,
2274 replace with merging when we do. */
2275 gcc_assert (!dst->agg.items);
2277 dst->agg.items = vec_safe_copy (src->agg.items);
2278 dst->agg.by_ref = src->agg.by_ref;
2279 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2280 item->offset -= dst->value.ancestor.offset;
2283 if (src->type == IPA_JF_KNOWN_TYPE)
2284 combine_known_type_and_ancestor_jfs (src, dst);
2285 else if (src->type == IPA_JF_PASS_THROUGH
2286 && src->value.pass_through.operation == NOP_EXPR)
2288 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2289 dst->value.ancestor.agg_preserved &=
2290 src->value.pass_through.agg_preserved;
2291 dst->value.ancestor.type_preserved &=
2292 src->value.pass_through.type_preserved;
2294 else if (src->type == IPA_JF_ANCESTOR)
2296 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2297 dst->value.ancestor.offset += src->value.ancestor.offset;
2298 dst->value.ancestor.agg_preserved &=
2299 src->value.ancestor.agg_preserved;
2300 dst->value.ancestor.type_preserved &=
2301 src->value.ancestor.type_preserved;
2303 else
2304 dst->type = IPA_JF_UNKNOWN;
2306 else if (dst->type == IPA_JF_PASS_THROUGH)
2308 struct ipa_jump_func *src;
2309 /* We must check range due to calls with variable number of arguments
2310 and we cannot combine jump functions with operations. */
2311 if (dst->value.pass_through.operation == NOP_EXPR
2312 && (dst->value.pass_through.formal_id
2313 < ipa_get_cs_argument_count (top)))
2315 int dst_fid = dst->value.pass_through.formal_id;
2316 src = ipa_get_ith_jump_func (top, dst_fid);
2317 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2319 switch (src->type)
2321 case IPA_JF_UNKNOWN:
2322 dst->type = IPA_JF_UNKNOWN;
2323 break;
2324 case IPA_JF_KNOWN_TYPE:
2325 ipa_set_jf_known_type (dst,
2326 ipa_get_jf_known_type_offset (src),
2327 ipa_get_jf_known_type_base_type (src),
2328 ipa_get_jf_known_type_base_type (src));
2329 break;
2330 case IPA_JF_CONST:
2331 ipa_set_jf_cst_copy (dst, src);
2332 break;
2334 case IPA_JF_PASS_THROUGH:
2336 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2337 enum tree_code operation;
2338 operation = ipa_get_jf_pass_through_operation (src);
2340 if (operation == NOP_EXPR)
2342 bool agg_p, type_p;
2343 agg_p = dst_agg_p
2344 && ipa_get_jf_pass_through_agg_preserved (src);
2345 type_p = ipa_get_jf_pass_through_type_preserved (src)
2346 && ipa_get_jf_pass_through_type_preserved (dst);
2347 ipa_set_jf_simple_pass_through (dst, formal_id,
2348 agg_p, type_p);
2350 else
2352 tree operand = ipa_get_jf_pass_through_operand (src);
2353 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2354 operation);
2356 break;
2358 case IPA_JF_ANCESTOR:
2360 bool agg_p, type_p;
2361 agg_p = dst_agg_p
2362 && ipa_get_jf_ancestor_agg_preserved (src);
2363 type_p = ipa_get_jf_ancestor_type_preserved (src)
2364 && ipa_get_jf_pass_through_type_preserved (dst);
2365 ipa_set_ancestor_jf (dst,
2366 ipa_get_jf_ancestor_offset (src),
2367 ipa_get_jf_ancestor_type (src),
2368 ipa_get_jf_ancestor_formal_id (src),
2369 agg_p, type_p);
2370 break;
2372 default:
2373 gcc_unreachable ();
2376 if (src->agg.items
2377 && (dst_agg_p || !src->agg.by_ref))
2379 /* Currently we do not produce clobber aggregate jump
2380 functions, replace with merging when we do. */
2381 gcc_assert (!dst->agg.items);
2383 dst->agg.by_ref = src->agg.by_ref;
2384 dst->agg.items = vec_safe_copy (src->agg.items);
2387 else
2388 dst->type = IPA_JF_UNKNOWN;
2393 /* If TARGET is an addr_expr of a function declaration, make it the destination
2394 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2396 struct cgraph_edge *
2397 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2399 struct cgraph_node *callee;
2400 struct inline_edge_summary *es = inline_edge_summary (ie);
2401 bool unreachable = false;
2403 if (TREE_CODE (target) == ADDR_EXPR)
2404 target = TREE_OPERAND (target, 0);
2405 if (TREE_CODE (target) != FUNCTION_DECL)
2407 target = canonicalize_constructor_val (target, NULL);
2408 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2410 if (ie->indirect_info->member_ptr)
2411 /* Member pointer call that goes through a VMT lookup. */
2412 return NULL;
2414 if (dump_file)
2415 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2416 " in %s/%i, making it unreachable.\n",
2417 cgraph_node_name (ie->caller), ie->caller->symbol.order);
2418 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2419 callee = cgraph_get_create_node (target);
2420 unreachable = true;
2422 else
2423 callee = cgraph_get_node (target);
2425 else
2426 callee = cgraph_get_node (target);
2428 /* Because may-edges are not explicitely represented and vtable may be external,
2429 we may create the first reference to the object in the unit. */
2430 if (!callee || callee->global.inlined_to)
2433 /* We are better to ensure we can refer to it.
2434 In the case of static functions we are out of luck, since we already
2435 removed its body. In the case of public functions we may or may
2436 not introduce the reference. */
2437 if (!canonicalize_constructor_val (target, NULL)
2438 || !TREE_PUBLIC (target))
2440 if (dump_file)
2441 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2442 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2443 xstrdup (cgraph_node_name (ie->caller)),
2444 ie->caller->symbol.order,
2445 xstrdup (cgraph_node_name (ie->callee)),
2446 ie->callee->symbol.order);
2447 return NULL;
2449 callee = cgraph_get_create_real_symbol_node (target);
2451 ipa_check_create_node_params ();
2453 /* We can not make edges to inline clones. It is bug that someone removed
2454 the cgraph node too early. */
2455 gcc_assert (!callee->global.inlined_to);
2457 if (dump_file && !unreachable)
2459 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2460 "(%s/%i -> %s/%i), for stmt ",
2461 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2462 xstrdup (cgraph_node_name (ie->caller)),
2463 ie->caller->symbol.order,
2464 xstrdup (cgraph_node_name (callee)),
2465 callee->symbol.order);
2466 if (ie->call_stmt)
2467 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2468 else
2469 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2471 ie = cgraph_make_edge_direct (ie, callee);
2472 es = inline_edge_summary (ie);
2473 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2474 - eni_size_weights.call_cost);
2475 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2476 - eni_time_weights.call_cost);
2478 return ie;
2481 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2482 return NULL if there is not any. BY_REF specifies whether the value has to
2483 be passed by reference or by value. */
2485 tree
2486 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2487 HOST_WIDE_INT offset, bool by_ref)
2489 struct ipa_agg_jf_item *item;
2490 int i;
2492 if (by_ref != agg->by_ref)
2493 return NULL;
2495 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2496 if (item->offset == offset)
2498 /* Currently we do not have clobber values, return NULL for them once
2499 we do. */
2500 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2501 return item->value;
2503 return NULL;
2506 /* Remove a reference to SYMBOL from the list of references of a node given by
2507 reference description RDESC. Return true if the reference has been
2508 successfully found and removed. */
2510 static bool
2511 remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2513 struct ipa_ref *to_del;
2514 struct cgraph_edge *origin;
2516 origin = rdesc->cs;
2517 if (!origin)
2518 return false;
2519 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
2520 origin->call_stmt, origin->lto_stmt_uid);
2521 if (!to_del)
2522 return false;
2524 ipa_remove_reference (to_del);
2525 if (dump_file)
2526 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2527 xstrdup (cgraph_node_name (origin->caller)),
2528 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
2529 return true;
2532 /* If JFUNC has a reference description with refcount different from
2533 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2534 NULL. JFUNC must be a constant jump function. */
2536 static struct ipa_cst_ref_desc *
2537 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2539 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2540 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2541 return rdesc;
2542 else
2543 return NULL;
2546 /* If the value of constant jump function JFUNC is an address of a function
2547 declaration, return the associated call graph node. Otherwise return
2548 NULL. */
2550 static cgraph_node *
2551 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2553 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2554 tree cst = ipa_get_jf_constant (jfunc);
2555 if (TREE_CODE (cst) != ADDR_EXPR
2556 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2557 return NULL;
2559 return cgraph_get_node (TREE_OPERAND (cst, 0));
2563 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2564 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2565 the edge specified in the rdesc. Return false if either the symbol or the
2566 reference could not be found, otherwise return true. */
2568 static bool
2569 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2571 struct ipa_cst_ref_desc *rdesc;
2572 if (jfunc->type == IPA_JF_CONST
2573 && (rdesc = jfunc_rdesc_usable (jfunc))
2574 && --rdesc->refcount == 0)
2576 symtab_node symbol = (symtab_node) cgraph_node_for_jfunc (jfunc);
2577 if (!symbol)
2578 return false;
2580 return remove_described_reference (symbol, rdesc);
2582 return true;
2585 /* Try to find a destination for indirect edge IE that corresponds to a simple
2586 call or a call of a member function pointer and where the destination is a
2587 pointer formal parameter described by jump function JFUNC. If it can be
2588 determined, return the newly direct edge, otherwise return NULL.
2589 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2591 static struct cgraph_edge *
2592 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2593 struct ipa_jump_func *jfunc,
2594 struct ipa_node_params *new_root_info)
2596 struct cgraph_edge *cs;
2597 tree target;
2598 bool agg_contents = ie->indirect_info->agg_contents;
2600 if (ie->indirect_info->agg_contents)
2601 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2602 ie->indirect_info->offset,
2603 ie->indirect_info->by_ref);
2604 else
2605 target = ipa_value_from_jfunc (new_root_info, jfunc);
2606 if (!target)
2607 return NULL;
2608 cs = ipa_make_edge_direct_to_target (ie, target);
2610 if (cs && !agg_contents)
2612 bool ok;
2613 gcc_checking_assert (cs->callee
2614 && (cs != ie
2615 || jfunc->type != IPA_JF_CONST
2616 || !cgraph_node_for_jfunc (jfunc)
2617 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2618 ok = try_decrement_rdesc_refcount (jfunc);
2619 gcc_checking_assert (ok);
2622 return cs;
2625 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2626 call based on a formal parameter which is described by jump function JFUNC
2627 and if it can be determined, make it direct and return the direct edge.
2628 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2629 are relative to. */
2631 static struct cgraph_edge *
2632 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2633 struct ipa_jump_func *jfunc,
2634 struct ipa_node_params *new_root_info)
2636 tree binfo, target;
2638 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2640 if (!binfo)
2641 return NULL;
2643 if (TREE_CODE (binfo) != TREE_BINFO)
2645 binfo = gimple_extract_devirt_binfo_from_cst
2646 (binfo, ie->indirect_info->otr_type);
2647 if (!binfo)
2648 return NULL;
2651 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2652 ie->indirect_info->otr_type);
2653 if (binfo)
2654 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2655 binfo);
2656 else
2657 return NULL;
2659 if (target)
2661 #ifdef ENABLE_CHECKING
2662 gcc_assert (possible_polymorphic_call_target_p
2663 (ie, cgraph_get_node (target)));
2664 #endif
2665 return ipa_make_edge_direct_to_target (ie, target);
2667 else
2668 return NULL;
2671 /* Update the param called notes associated with NODE when CS is being inlined,
2672 assuming NODE is (potentially indirectly) inlined into CS->callee.
2673 Moreover, if the callee is discovered to be constant, create a new cgraph
2674 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2675 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2677 static bool
2678 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2679 struct cgraph_node *node,
2680 vec<cgraph_edge_p> *new_edges)
2682 struct ipa_edge_args *top;
2683 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2684 struct ipa_node_params *new_root_info;
2685 bool res = false;
2687 ipa_check_create_edge_args ();
2688 top = IPA_EDGE_REF (cs);
2689 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2690 ? cs->caller->global.inlined_to
2691 : cs->caller);
2693 for (ie = node->indirect_calls; ie; ie = next_ie)
2695 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2696 struct ipa_jump_func *jfunc;
2697 int param_index;
2699 next_ie = ie->next_callee;
2701 if (ici->param_index == -1)
2702 continue;
2704 /* We must check range due to calls with variable number of arguments: */
2705 if (ici->param_index >= ipa_get_cs_argument_count (top))
2707 ici->param_index = -1;
2708 continue;
2711 param_index = ici->param_index;
2712 jfunc = ipa_get_ith_jump_func (top, param_index);
2714 if (!flag_indirect_inlining)
2715 new_direct_edge = NULL;
2716 else if (ici->polymorphic)
2717 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2718 new_root_info);
2719 else
2720 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2721 new_root_info);
2722 /* If speculation was removed, then we need to do nothing. */
2723 if (new_direct_edge && new_direct_edge != ie)
2725 new_direct_edge->indirect_inlining_edge = 1;
2726 top = IPA_EDGE_REF (cs);
2727 res = true;
2729 else if (new_direct_edge)
2731 new_direct_edge->indirect_inlining_edge = 1;
2732 if (new_direct_edge->call_stmt)
2733 new_direct_edge->call_stmt_cannot_inline_p
2734 = !gimple_check_call_matching_types (
2735 new_direct_edge->call_stmt,
2736 new_direct_edge->callee->symbol.decl, false);
2737 if (new_edges)
2739 new_edges->safe_push (new_direct_edge);
2740 res = true;
2742 top = IPA_EDGE_REF (cs);
2744 else if (jfunc->type == IPA_JF_PASS_THROUGH
2745 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2747 if (ici->agg_contents
2748 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2749 ici->param_index = -1;
2750 else
2751 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2753 else if (jfunc->type == IPA_JF_ANCESTOR)
2755 if (ici->agg_contents
2756 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2757 ici->param_index = -1;
2758 else
2760 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2761 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2764 else
2765 /* Either we can find a destination for this edge now or never. */
2766 ici->param_index = -1;
2769 return res;
2772 /* Recursively traverse subtree of NODE (including node) made of inlined
2773 cgraph_edges when CS has been inlined and invoke
2774 update_indirect_edges_after_inlining on all nodes and
2775 update_jump_functions_after_inlining on all non-inlined edges that lead out
2776 of this subtree. Newly discovered indirect edges will be added to
2777 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2778 created. */
2780 static bool
2781 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2782 struct cgraph_node *node,
2783 vec<cgraph_edge_p> *new_edges)
2785 struct cgraph_edge *e;
2786 bool res;
2788 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2790 for (e = node->callees; e; e = e->next_callee)
2791 if (!e->inline_failed)
2792 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2793 else
2794 update_jump_functions_after_inlining (cs, e);
2795 for (e = node->indirect_calls; e; e = e->next_callee)
2796 update_jump_functions_after_inlining (cs, e);
2798 return res;
2801 /* Combine two controlled uses counts as done during inlining. */
2803 static int
2804 combine_controlled_uses_counters (int c, int d)
2806 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2807 return IPA_UNDESCRIBED_USE;
2808 else
2809 return c + d - 1;
2812 /* Propagate number of controlled users from CS->caleee to the new root of the
2813 tree of inlined nodes. */
2815 static void
2816 propagate_controlled_uses (struct cgraph_edge *cs)
2818 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2819 struct cgraph_node *new_root = cs->caller->global.inlined_to
2820 ? cs->caller->global.inlined_to : cs->caller;
2821 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2822 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2823 int count, i;
2825 count = MIN (ipa_get_cs_argument_count (args),
2826 ipa_get_param_count (old_root_info));
2827 for (i = 0; i < count; i++)
2829 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2830 struct ipa_cst_ref_desc *rdesc;
2832 if (jf->type == IPA_JF_PASS_THROUGH)
2834 int src_idx, c, d;
2835 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2836 c = ipa_get_controlled_uses (new_root_info, src_idx);
2837 d = ipa_get_controlled_uses (old_root_info, i);
2839 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2840 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2841 c = combine_controlled_uses_counters (c, d);
2842 ipa_set_controlled_uses (new_root_info, src_idx, c);
2843 if (c == 0 && new_root_info->ipcp_orig_node)
2845 struct cgraph_node *n;
2846 struct ipa_ref *ref;
2847 tree t = new_root_info->known_vals[src_idx];
2849 if (t && TREE_CODE (t) == ADDR_EXPR
2850 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2851 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2852 && (ref = ipa_find_reference ((symtab_node) new_root,
2853 (symtab_node) n, NULL, 0)))
2855 if (dump_file)
2856 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2857 "reference from %s/%i to %s/%i.\n",
2858 xstrdup (cgraph_node_name (new_root)),
2859 new_root->symbol.order,
2860 xstrdup (cgraph_node_name (n)), n->symbol.order);
2861 ipa_remove_reference (ref);
2865 else if (jf->type == IPA_JF_CONST
2866 && (rdesc = jfunc_rdesc_usable (jf)))
2868 int d = ipa_get_controlled_uses (old_root_info, i);
2869 int c = rdesc->refcount;
2870 rdesc->refcount = combine_controlled_uses_counters (c, d);
2871 if (rdesc->refcount == 0)
2873 tree cst = ipa_get_jf_constant (jf);
2874 struct cgraph_node *n;
2875 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2876 && TREE_CODE (TREE_OPERAND (cst, 0))
2877 == FUNCTION_DECL);
2878 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2879 if (n)
2881 struct cgraph_node *clone;
2882 bool ok;
2883 ok = remove_described_reference ((symtab_node) n, rdesc);
2884 gcc_checking_assert (ok);
2886 clone = cs->caller;
2887 while (clone->global.inlined_to
2888 && clone != rdesc->cs->caller
2889 && IPA_NODE_REF (clone)->ipcp_orig_node)
2891 struct ipa_ref *ref;
2892 ref = ipa_find_reference ((symtab_node) clone,
2893 (symtab_node) n, NULL, 0);
2894 if (ref)
2896 if (dump_file)
2897 fprintf (dump_file, "ipa-prop: Removing "
2898 "cloning-created reference "
2899 "from %s/%i to %s/%i.\n",
2900 xstrdup (cgraph_node_name (clone)),
2901 clone->symbol.order,
2902 xstrdup (cgraph_node_name (n)),
2903 n->symbol.order);
2904 ipa_remove_reference (ref);
2906 clone = clone->callers->caller;
2913 for (i = ipa_get_param_count (old_root_info);
2914 i < ipa_get_cs_argument_count (args);
2915 i++)
2917 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2919 if (jf->type == IPA_JF_CONST)
2921 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2922 if (rdesc)
2923 rdesc->refcount = IPA_UNDESCRIBED_USE;
2925 else if (jf->type == IPA_JF_PASS_THROUGH)
2926 ipa_set_controlled_uses (new_root_info,
2927 jf->value.pass_through.formal_id,
2928 IPA_UNDESCRIBED_USE);
2932 /* Update jump functions and call note functions on inlining the call site CS.
2933 CS is expected to lead to a node already cloned by
2934 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2935 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2936 created. */
2938 bool
2939 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2940 vec<cgraph_edge_p> *new_edges)
2942 bool changed;
2943 /* Do nothing if the preparation phase has not been carried out yet
2944 (i.e. during early inlining). */
2945 if (!ipa_node_params_vector.exists ())
2946 return false;
2947 gcc_assert (ipa_edge_args_vector);
2949 propagate_controlled_uses (cs);
2950 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2952 return changed;
2955 /* Frees all dynamically allocated structures that the argument info points
2956 to. */
2958 void
2959 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2961 vec_free (args->jump_functions);
2962 memset (args, 0, sizeof (*args));
2965 /* Free all ipa_edge structures. */
2967 void
2968 ipa_free_all_edge_args (void)
2970 int i;
2971 struct ipa_edge_args *args;
2973 if (!ipa_edge_args_vector)
2974 return;
2976 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2977 ipa_free_edge_args_substructures (args);
2979 vec_free (ipa_edge_args_vector);
2982 /* Frees all dynamically allocated structures that the param info points
2983 to. */
2985 void
2986 ipa_free_node_params_substructures (struct ipa_node_params *info)
2988 info->descriptors.release ();
2989 free (info->lattices);
2990 /* Lattice values and their sources are deallocated with their alocation
2991 pool. */
2992 info->known_vals.release ();
2993 memset (info, 0, sizeof (*info));
2996 /* Free all ipa_node_params structures. */
2998 void
2999 ipa_free_all_node_params (void)
3001 int i;
3002 struct ipa_node_params *info;
3004 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3005 ipa_free_node_params_substructures (info);
3007 ipa_node_params_vector.release ();
3010 /* Set the aggregate replacements of NODE to be AGGVALS. */
3012 void
3013 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3014 struct ipa_agg_replacement_value *aggvals)
3016 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3017 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3019 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3022 /* Hook that is called by cgraph.c when an edge is removed. */
3024 static void
3025 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3027 struct ipa_edge_args *args;
3029 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3030 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3031 return;
3033 args = IPA_EDGE_REF (cs);
3034 if (args->jump_functions)
3036 struct ipa_jump_func *jf;
3037 int i;
3038 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3040 struct ipa_cst_ref_desc *rdesc;
3041 try_decrement_rdesc_refcount (jf);
3042 if (jf->type == IPA_JF_CONST
3043 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3044 && rdesc->cs == cs)
3045 rdesc->cs = NULL;
3049 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3052 /* Hook that is called by cgraph.c when a node is removed. */
3054 static void
3055 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3057 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3058 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3059 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3060 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3061 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3064 /* Hook that is called by cgraph.c when an edge is duplicated. */
3066 static void
3067 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3068 __attribute__((unused)) void *data)
3070 struct ipa_edge_args *old_args, *new_args;
3071 unsigned int i;
3073 ipa_check_create_edge_args ();
3075 old_args = IPA_EDGE_REF (src);
3076 new_args = IPA_EDGE_REF (dst);
3078 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3080 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3082 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3083 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3085 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3087 if (src_jf->type == IPA_JF_CONST)
3089 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3091 if (!src_rdesc)
3092 dst_jf->value.constant.rdesc = NULL;
3093 else if (src->caller == dst->caller)
3095 struct ipa_ref *ref;
3096 symtab_node n = (symtab_node) cgraph_node_for_jfunc (src_jf);
3097 gcc_checking_assert (n);
3098 ref = ipa_find_reference ((symtab_node) src->caller, n,
3099 src->call_stmt, src->lto_stmt_uid);
3100 gcc_checking_assert (ref);
3101 ipa_clone_ref (ref, (symtab_node) dst->caller, ref->stmt);
3103 gcc_checking_assert (ipa_refdesc_pool);
3104 struct ipa_cst_ref_desc *dst_rdesc
3105 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3106 dst_rdesc->cs = dst;
3107 dst_rdesc->refcount = src_rdesc->refcount;
3108 dst_rdesc->next_duplicate = NULL;
3109 dst_jf->value.constant.rdesc = dst_rdesc;
3111 else if (src_rdesc->cs == src)
3113 struct ipa_cst_ref_desc *dst_rdesc;
3114 gcc_checking_assert (ipa_refdesc_pool);
3115 dst_rdesc
3116 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3117 dst_rdesc->cs = dst;
3118 dst_rdesc->refcount = src_rdesc->refcount;
3119 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3120 src_rdesc->next_duplicate = dst_rdesc;
3121 dst_jf->value.constant.rdesc = dst_rdesc;
3123 else
3125 struct ipa_cst_ref_desc *dst_rdesc;
3126 /* This can happen during inlining, when a JFUNC can refer to a
3127 reference taken in a function up in the tree of inline clones.
3128 We need to find the duplicate that refers to our tree of
3129 inline clones. */
3131 gcc_assert (dst->caller->global.inlined_to);
3132 for (dst_rdesc = src_rdesc->next_duplicate;
3133 dst_rdesc;
3134 dst_rdesc = dst_rdesc->next_duplicate)
3136 struct cgraph_node *top;
3137 top = dst_rdesc->cs->caller->global.inlined_to
3138 ? dst_rdesc->cs->caller->global.inlined_to
3139 : dst_rdesc->cs->caller;
3140 if (dst->caller->global.inlined_to == top)
3141 break;
3143 gcc_assert (dst_rdesc);
3144 dst_jf->value.constant.rdesc = dst_rdesc;
3150 /* Hook that is called by cgraph.c when a node is duplicated. */
3152 static void
3153 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3154 ATTRIBUTE_UNUSED void *data)
3156 struct ipa_node_params *old_info, *new_info;
3157 struct ipa_agg_replacement_value *old_av, *new_av;
3159 ipa_check_create_node_params ();
3160 old_info = IPA_NODE_REF (src);
3161 new_info = IPA_NODE_REF (dst);
3163 new_info->descriptors = old_info->descriptors.copy ();
3164 new_info->lattices = NULL;
3165 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3167 new_info->uses_analysis_done = old_info->uses_analysis_done;
3168 new_info->node_enqueued = old_info->node_enqueued;
3170 old_av = ipa_get_agg_replacements_for_node (src);
3171 if (!old_av)
3172 return;
3174 new_av = NULL;
3175 while (old_av)
3177 struct ipa_agg_replacement_value *v;
3179 v = ggc_alloc_ipa_agg_replacement_value ();
3180 memcpy (v, old_av, sizeof (*v));
3181 v->next = new_av;
3182 new_av = v;
3183 old_av = old_av->next;
3185 ipa_set_node_agg_value_chain (dst, new_av);
3189 /* Analyze newly added function into callgraph. */
3191 static void
3192 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3194 ipa_analyze_node (node);
3197 /* Register our cgraph hooks if they are not already there. */
3199 void
3200 ipa_register_cgraph_hooks (void)
3202 if (!edge_removal_hook_holder)
3203 edge_removal_hook_holder =
3204 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3205 if (!node_removal_hook_holder)
3206 node_removal_hook_holder =
3207 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3208 if (!edge_duplication_hook_holder)
3209 edge_duplication_hook_holder =
3210 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3211 if (!node_duplication_hook_holder)
3212 node_duplication_hook_holder =
3213 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3214 function_insertion_hook_holder =
3215 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3218 /* Unregister our cgraph hooks if they are not already there. */
3220 static void
3221 ipa_unregister_cgraph_hooks (void)
3223 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3224 edge_removal_hook_holder = NULL;
3225 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3226 node_removal_hook_holder = NULL;
3227 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3228 edge_duplication_hook_holder = NULL;
3229 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3230 node_duplication_hook_holder = NULL;
3231 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3232 function_insertion_hook_holder = NULL;
3235 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3236 longer needed after ipa-cp. */
3238 void
3239 ipa_free_all_structures_after_ipa_cp (void)
3241 if (!optimize)
3243 ipa_free_all_edge_args ();
3244 ipa_free_all_node_params ();
3245 free_alloc_pool (ipcp_sources_pool);
3246 free_alloc_pool (ipcp_values_pool);
3247 free_alloc_pool (ipcp_agg_lattice_pool);
3248 ipa_unregister_cgraph_hooks ();
3249 if (ipa_refdesc_pool)
3250 free_alloc_pool (ipa_refdesc_pool);
3254 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3255 longer needed after indirect inlining. */
3257 void
3258 ipa_free_all_structures_after_iinln (void)
3260 ipa_free_all_edge_args ();
3261 ipa_free_all_node_params ();
3262 ipa_unregister_cgraph_hooks ();
3263 if (ipcp_sources_pool)
3264 free_alloc_pool (ipcp_sources_pool);
3265 if (ipcp_values_pool)
3266 free_alloc_pool (ipcp_values_pool);
3267 if (ipcp_agg_lattice_pool)
3268 free_alloc_pool (ipcp_agg_lattice_pool);
3269 if (ipa_refdesc_pool)
3270 free_alloc_pool (ipa_refdesc_pool);
3273 /* Print ipa_tree_map data structures of all functions in the
3274 callgraph to F. */
3276 void
3277 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3279 int i, count;
3280 struct ipa_node_params *info;
3282 if (!node->symbol.definition)
3283 return;
3284 info = IPA_NODE_REF (node);
3285 fprintf (f, " function %s/%i parameter descriptors:\n",
3286 cgraph_node_name (node), node->symbol.order);
3287 count = ipa_get_param_count (info);
3288 for (i = 0; i < count; i++)
3290 int c;
3292 ipa_dump_param (f, info, i);
3293 if (ipa_is_param_used (info, i))
3294 fprintf (f, " used");
3295 c = ipa_get_controlled_uses (info, i);
3296 if (c == IPA_UNDESCRIBED_USE)
3297 fprintf (f, " undescribed_use");
3298 else
3299 fprintf (f, " controlled_uses=%i", c);
3300 fprintf (f, "\n");
3304 /* Print ipa_tree_map data structures of all functions in the
3305 callgraph to F. */
3307 void
3308 ipa_print_all_params (FILE * f)
3310 struct cgraph_node *node;
3312 fprintf (f, "\nFunction parameters:\n");
3313 FOR_EACH_FUNCTION (node)
3314 ipa_print_node_params (f, node);
3317 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3319 vec<tree>
3320 ipa_get_vector_of_formal_parms (tree fndecl)
3322 vec<tree> args;
3323 int count;
3324 tree parm;
3326 gcc_assert (!flag_wpa);
3327 count = count_formal_params (fndecl);
3328 args.create (count);
3329 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3330 args.quick_push (parm);
3332 return args;
3335 /* Return a heap allocated vector containing types of formal parameters of
3336 function type FNTYPE. */
3338 static inline vec<tree>
3339 get_vector_of_formal_parm_types (tree fntype)
3341 vec<tree> types;
3342 int count = 0;
3343 tree t;
3345 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3346 count++;
3348 types.create (count);
3349 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3350 types.quick_push (TREE_VALUE (t));
3352 return types;
3355 /* Modify the function declaration FNDECL and its type according to the plan in
3356 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3357 to reflect the actual parameters being modified which are determined by the
3358 base_index field. */
3360 void
3361 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3362 const char *synth_parm_prefix)
3364 vec<tree> oparms, otypes;
3365 tree orig_type, new_type = NULL;
3366 tree old_arg_types, t, new_arg_types = NULL;
3367 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3368 int i, len = adjustments.length ();
3369 tree new_reversed = NULL;
3370 bool care_for_types, last_parm_void;
3372 if (!synth_parm_prefix)
3373 synth_parm_prefix = "SYNTH";
3375 oparms = ipa_get_vector_of_formal_parms (fndecl);
3376 orig_type = TREE_TYPE (fndecl);
3377 old_arg_types = TYPE_ARG_TYPES (orig_type);
3379 /* The following test is an ugly hack, some functions simply don't have any
3380 arguments in their type. This is probably a bug but well... */
3381 care_for_types = (old_arg_types != NULL_TREE);
3382 if (care_for_types)
3384 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3385 == void_type_node);
3386 otypes = get_vector_of_formal_parm_types (orig_type);
3387 if (last_parm_void)
3388 gcc_assert (oparms.length () + 1 == otypes.length ());
3389 else
3390 gcc_assert (oparms.length () == otypes.length ());
3392 else
3394 last_parm_void = false;
3395 otypes.create (0);
3398 for (i = 0; i < len; i++)
3400 struct ipa_parm_adjustment *adj;
3401 gcc_assert (link);
3403 adj = &adjustments[i];
3404 parm = oparms[adj->base_index];
3405 adj->base = parm;
3407 if (adj->copy_param)
3409 if (care_for_types)
3410 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3411 new_arg_types);
3412 *link = parm;
3413 link = &DECL_CHAIN (parm);
3415 else if (!adj->remove_param)
3417 tree new_parm;
3418 tree ptype;
3420 if (adj->by_ref)
3421 ptype = build_pointer_type (adj->type);
3422 else
3423 ptype = adj->type;
3425 if (care_for_types)
3426 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3428 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3429 ptype);
3430 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3432 DECL_ARTIFICIAL (new_parm) = 1;
3433 DECL_ARG_TYPE (new_parm) = ptype;
3434 DECL_CONTEXT (new_parm) = fndecl;
3435 TREE_USED (new_parm) = 1;
3436 DECL_IGNORED_P (new_parm) = 1;
3437 layout_decl (new_parm, 0);
3439 adj->base = parm;
3440 adj->reduction = new_parm;
3442 *link = new_parm;
3444 link = &DECL_CHAIN (new_parm);
3448 *link = NULL_TREE;
3450 if (care_for_types)
3452 new_reversed = nreverse (new_arg_types);
3453 if (last_parm_void)
3455 if (new_reversed)
3456 TREE_CHAIN (new_arg_types) = void_list_node;
3457 else
3458 new_reversed = void_list_node;
3462 /* Use copy_node to preserve as much as possible from original type
3463 (debug info, attribute lists etc.)
3464 Exception is METHOD_TYPEs must have THIS argument.
3465 When we are asked to remove it, we need to build new FUNCTION_TYPE
3466 instead. */
3467 if (TREE_CODE (orig_type) != METHOD_TYPE
3468 || (adjustments[0].copy_param
3469 && adjustments[0].base_index == 0))
3471 new_type = build_distinct_type_copy (orig_type);
3472 TYPE_ARG_TYPES (new_type) = new_reversed;
3474 else
3476 new_type
3477 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3478 new_reversed));
3479 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3480 DECL_VINDEX (fndecl) = NULL_TREE;
3483 /* When signature changes, we need to clear builtin info. */
3484 if (DECL_BUILT_IN (fndecl))
3486 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3487 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3490 /* This is a new type, not a copy of an old type. Need to reassociate
3491 variants. We can handle everything except the main variant lazily. */
3492 t = TYPE_MAIN_VARIANT (orig_type);
3493 if (orig_type != t)
3495 TYPE_MAIN_VARIANT (new_type) = t;
3496 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3497 TYPE_NEXT_VARIANT (t) = new_type;
3499 else
3501 TYPE_MAIN_VARIANT (new_type) = new_type;
3502 TYPE_NEXT_VARIANT (new_type) = NULL;
3505 TREE_TYPE (fndecl) = new_type;
3506 DECL_VIRTUAL_P (fndecl) = 0;
3507 otypes.release ();
3508 oparms.release ();
3511 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3512 If this is a directly recursive call, CS must be NULL. Otherwise it must
3513 contain the corresponding call graph edge. */
3515 void
3516 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3517 ipa_parm_adjustment_vec adjustments)
3519 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3520 vec<tree> vargs;
3521 vec<tree, va_gc> **debug_args = NULL;
3522 gimple new_stmt;
3523 gimple_stmt_iterator gsi, prev_gsi;
3524 tree callee_decl;
3525 int i, len;
3527 len = adjustments.length ();
3528 vargs.create (len);
3529 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3530 ipa_remove_stmt_references ((symtab_node) current_node, stmt);
3532 gsi = gsi_for_stmt (stmt);
3533 prev_gsi = gsi;
3534 gsi_prev (&prev_gsi);
3535 for (i = 0; i < len; i++)
3537 struct ipa_parm_adjustment *adj;
3539 adj = &adjustments[i];
3541 if (adj->copy_param)
3543 tree arg = gimple_call_arg (stmt, adj->base_index);
3545 vargs.quick_push (arg);
3547 else if (!adj->remove_param)
3549 tree expr, base, off;
3550 location_t loc;
3551 unsigned int deref_align = 0;
3552 bool deref_base = false;
3554 /* We create a new parameter out of the value of the old one, we can
3555 do the following kind of transformations:
3557 - A scalar passed by reference is converted to a scalar passed by
3558 value. (adj->by_ref is false and the type of the original
3559 actual argument is a pointer to a scalar).
3561 - A part of an aggregate is passed instead of the whole aggregate.
3562 The part can be passed either by value or by reference, this is
3563 determined by value of adj->by_ref. Moreover, the code below
3564 handles both situations when the original aggregate is passed by
3565 value (its type is not a pointer) and when it is passed by
3566 reference (it is a pointer to an aggregate).
3568 When the new argument is passed by reference (adj->by_ref is true)
3569 it must be a part of an aggregate and therefore we form it by
3570 simply taking the address of a reference inside the original
3571 aggregate. */
3573 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3574 base = gimple_call_arg (stmt, adj->base_index);
3575 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3576 : EXPR_LOCATION (base);
3578 if (TREE_CODE (base) != ADDR_EXPR
3579 && POINTER_TYPE_P (TREE_TYPE (base)))
3580 off = build_int_cst (adj->alias_ptr_type,
3581 adj->offset / BITS_PER_UNIT);
3582 else
3584 HOST_WIDE_INT base_offset;
3585 tree prev_base;
3586 bool addrof;
3588 if (TREE_CODE (base) == ADDR_EXPR)
3590 base = TREE_OPERAND (base, 0);
3591 addrof = true;
3593 else
3594 addrof = false;
3595 prev_base = base;
3596 base = get_addr_base_and_unit_offset (base, &base_offset);
3597 /* Aggregate arguments can have non-invariant addresses. */
3598 if (!base)
3600 base = build_fold_addr_expr (prev_base);
3601 off = build_int_cst (adj->alias_ptr_type,
3602 adj->offset / BITS_PER_UNIT);
3604 else if (TREE_CODE (base) == MEM_REF)
3606 if (!addrof)
3608 deref_base = true;
3609 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3611 off = build_int_cst (adj->alias_ptr_type,
3612 base_offset
3613 + adj->offset / BITS_PER_UNIT);
3614 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3615 off);
3616 base = TREE_OPERAND (base, 0);
3618 else
3620 off = build_int_cst (adj->alias_ptr_type,
3621 base_offset
3622 + adj->offset / BITS_PER_UNIT);
3623 base = build_fold_addr_expr (base);
3627 if (!adj->by_ref)
3629 tree type = adj->type;
3630 unsigned int align;
3631 unsigned HOST_WIDE_INT misalign;
3633 if (deref_base)
3635 align = deref_align;
3636 misalign = 0;
3638 else
3640 get_pointer_alignment_1 (base, &align, &misalign);
3641 if (TYPE_ALIGN (type) > align)
3642 align = TYPE_ALIGN (type);
3644 misalign += (tree_to_double_int (off)
3645 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3646 * BITS_PER_UNIT);
3647 misalign = misalign & (align - 1);
3648 if (misalign != 0)
3649 align = (misalign & -misalign);
3650 if (align < TYPE_ALIGN (type))
3651 type = build_aligned_type (type, align);
3652 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3654 else
3656 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3657 expr = build_fold_addr_expr (expr);
3660 expr = force_gimple_operand_gsi (&gsi, expr,
3661 adj->by_ref
3662 || is_gimple_reg_type (adj->type),
3663 NULL, true, GSI_SAME_STMT);
3664 vargs.quick_push (expr);
3666 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3668 unsigned int ix;
3669 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3670 gimple def_temp;
3672 arg = gimple_call_arg (stmt, adj->base_index);
3673 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3675 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3676 continue;
3677 arg = fold_convert_loc (gimple_location (stmt),
3678 TREE_TYPE (origin), arg);
3680 if (debug_args == NULL)
3681 debug_args = decl_debug_args_insert (callee_decl);
3682 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3683 if (ddecl == origin)
3685 ddecl = (**debug_args)[ix + 1];
3686 break;
3688 if (ddecl == NULL)
3690 ddecl = make_node (DEBUG_EXPR_DECL);
3691 DECL_ARTIFICIAL (ddecl) = 1;
3692 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3693 DECL_MODE (ddecl) = DECL_MODE (origin);
3695 vec_safe_push (*debug_args, origin);
3696 vec_safe_push (*debug_args, ddecl);
3698 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3699 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3703 if (dump_file && (dump_flags & TDF_DETAILS))
3705 fprintf (dump_file, "replacing stmt:");
3706 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3709 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3710 vargs.release ();
3711 if (gimple_call_lhs (stmt))
3712 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3714 gimple_set_block (new_stmt, gimple_block (stmt));
3715 if (gimple_has_location (stmt))
3716 gimple_set_location (new_stmt, gimple_location (stmt));
3717 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3718 gimple_call_copy_flags (new_stmt, stmt);
3720 if (dump_file && (dump_flags & TDF_DETAILS))
3722 fprintf (dump_file, "with stmt:");
3723 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3724 fprintf (dump_file, "\n");
3726 gsi_replace (&gsi, new_stmt, true);
3727 if (cs)
3728 cgraph_set_call_stmt (cs, new_stmt);
3731 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3732 gsi_prev (&gsi);
3734 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3735 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3737 update_ssa (TODO_update_ssa);
3738 free_dominance_info (CDI_DOMINATORS);
3741 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3743 static bool
3744 index_in_adjustments_multiple_times_p (int base_index,
3745 ipa_parm_adjustment_vec adjustments)
3747 int i, len = adjustments.length ();
3748 bool one = false;
3750 for (i = 0; i < len; i++)
3752 struct ipa_parm_adjustment *adj;
3753 adj = &adjustments[i];
3755 if (adj->base_index == base_index)
3757 if (one)
3758 return true;
3759 else
3760 one = true;
3763 return false;
3767 /* Return adjustments that should have the same effect on function parameters
3768 and call arguments as if they were first changed according to adjustments in
3769 INNER and then by adjustments in OUTER. */
3771 ipa_parm_adjustment_vec
3772 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3773 ipa_parm_adjustment_vec outer)
3775 int i, outlen = outer.length ();
3776 int inlen = inner.length ();
3777 int removals = 0;
3778 ipa_parm_adjustment_vec adjustments, tmp;
3780 tmp.create (inlen);
3781 for (i = 0; i < inlen; i++)
3783 struct ipa_parm_adjustment *n;
3784 n = &inner[i];
3786 if (n->remove_param)
3787 removals++;
3788 else
3789 tmp.quick_push (*n);
3792 adjustments.create (outlen + removals);
3793 for (i = 0; i < outlen; i++)
3795 struct ipa_parm_adjustment r;
3796 struct ipa_parm_adjustment *out = &outer[i];
3797 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3799 memset (&r, 0, sizeof (r));
3800 gcc_assert (!in->remove_param);
3801 if (out->remove_param)
3803 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3805 r.remove_param = true;
3806 adjustments.quick_push (r);
3808 continue;
3811 r.base_index = in->base_index;
3812 r.type = out->type;
3814 /* FIXME: Create nonlocal value too. */
3816 if (in->copy_param && out->copy_param)
3817 r.copy_param = true;
3818 else if (in->copy_param)
3819 r.offset = out->offset;
3820 else if (out->copy_param)
3821 r.offset = in->offset;
3822 else
3823 r.offset = in->offset + out->offset;
3824 adjustments.quick_push (r);
3827 for (i = 0; i < inlen; i++)
3829 struct ipa_parm_adjustment *n = &inner[i];
3831 if (n->remove_param)
3832 adjustments.quick_push (*n);
3835 tmp.release ();
3836 return adjustments;
3839 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3840 friendly way, assuming they are meant to be applied to FNDECL. */
3842 void
3843 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3844 tree fndecl)
3846 int i, len = adjustments.length ();
3847 bool first = true;
3848 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3850 fprintf (file, "IPA param adjustments: ");
3851 for (i = 0; i < len; i++)
3853 struct ipa_parm_adjustment *adj;
3854 adj = &adjustments[i];
3856 if (!first)
3857 fprintf (file, " ");
3858 else
3859 first = false;
3861 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3862 print_generic_expr (file, parms[adj->base_index], 0);
3863 if (adj->base)
3865 fprintf (file, ", base: ");
3866 print_generic_expr (file, adj->base, 0);
3868 if (adj->reduction)
3870 fprintf (file, ", reduction: ");
3871 print_generic_expr (file, adj->reduction, 0);
3873 if (adj->new_ssa_base)
3875 fprintf (file, ", new_ssa_base: ");
3876 print_generic_expr (file, adj->new_ssa_base, 0);
3879 if (adj->copy_param)
3880 fprintf (file, ", copy_param");
3881 else if (adj->remove_param)
3882 fprintf (file, ", remove_param");
3883 else
3884 fprintf (file, ", offset %li", (long) adj->offset);
3885 if (adj->by_ref)
3886 fprintf (file, ", by_ref");
3887 print_node_brief (file, ", type: ", adj->type, 0);
3888 fprintf (file, "\n");
3890 parms.release ();
3893 /* Dump the AV linked list. */
3895 void
3896 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3898 bool comma = false;
3899 fprintf (f, " Aggregate replacements:");
3900 for (; av; av = av->next)
3902 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3903 av->index, av->offset);
3904 print_generic_expr (f, av->value, 0);
3905 comma = true;
3907 fprintf (f, "\n");
3910 /* Stream out jump function JUMP_FUNC to OB. */
3912 static void
3913 ipa_write_jump_function (struct output_block *ob,
3914 struct ipa_jump_func *jump_func)
3916 struct ipa_agg_jf_item *item;
3917 struct bitpack_d bp;
3918 int i, count;
3920 streamer_write_uhwi (ob, jump_func->type);
3921 switch (jump_func->type)
3923 case IPA_JF_UNKNOWN:
3924 break;
3925 case IPA_JF_KNOWN_TYPE:
3926 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3927 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3928 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3929 break;
3930 case IPA_JF_CONST:
3931 gcc_assert (
3932 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3933 stream_write_tree (ob, jump_func->value.constant.value, true);
3934 break;
3935 case IPA_JF_PASS_THROUGH:
3936 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3937 if (jump_func->value.pass_through.operation == NOP_EXPR)
3939 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3940 bp = bitpack_create (ob->main_stream);
3941 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3942 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
3943 streamer_write_bitpack (&bp);
3945 else
3947 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3948 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3950 break;
3951 case IPA_JF_ANCESTOR:
3952 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3953 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3954 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3955 bp = bitpack_create (ob->main_stream);
3956 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3957 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
3958 streamer_write_bitpack (&bp);
3959 break;
3962 count = vec_safe_length (jump_func->agg.items);
3963 streamer_write_uhwi (ob, count);
3964 if (count)
3966 bp = bitpack_create (ob->main_stream);
3967 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3968 streamer_write_bitpack (&bp);
3971 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3973 streamer_write_uhwi (ob, item->offset);
3974 stream_write_tree (ob, item->value, true);
3978 /* Read in jump function JUMP_FUNC from IB. */
3980 static void
3981 ipa_read_jump_function (struct lto_input_block *ib,
3982 struct ipa_jump_func *jump_func,
3983 struct cgraph_edge *cs,
3984 struct data_in *data_in)
3986 enum jump_func_type jftype;
3987 enum tree_code operation;
3988 int i, count;
3990 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3991 switch (jftype)
3993 case IPA_JF_UNKNOWN:
3994 jump_func->type = IPA_JF_UNKNOWN;
3995 break;
3996 case IPA_JF_KNOWN_TYPE:
3998 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3999 tree base_type = stream_read_tree (ib, data_in);
4000 tree component_type = stream_read_tree (ib, data_in);
4002 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4003 break;
4005 case IPA_JF_CONST:
4006 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4007 break;
4008 case IPA_JF_PASS_THROUGH:
4009 operation = (enum tree_code) streamer_read_uhwi (ib);
4010 if (operation == NOP_EXPR)
4012 int formal_id = streamer_read_uhwi (ib);
4013 struct bitpack_d bp = streamer_read_bitpack (ib);
4014 bool agg_preserved = bp_unpack_value (&bp, 1);
4015 bool type_preserved = bp_unpack_value (&bp, 1);
4016 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4017 type_preserved);
4019 else
4021 tree operand = stream_read_tree (ib, data_in);
4022 int formal_id = streamer_read_uhwi (ib);
4023 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4024 operation);
4026 break;
4027 case IPA_JF_ANCESTOR:
4029 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4030 tree type = stream_read_tree (ib, data_in);
4031 int formal_id = streamer_read_uhwi (ib);
4032 struct bitpack_d bp = streamer_read_bitpack (ib);
4033 bool agg_preserved = bp_unpack_value (&bp, 1);
4034 bool type_preserved = bp_unpack_value (&bp, 1);
4036 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4037 type_preserved);
4038 break;
4042 count = streamer_read_uhwi (ib);
4043 vec_alloc (jump_func->agg.items, count);
4044 if (count)
4046 struct bitpack_d bp = streamer_read_bitpack (ib);
4047 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4049 for (i = 0; i < count; i++)
4051 struct ipa_agg_jf_item item;
4052 item.offset = streamer_read_uhwi (ib);
4053 item.value = stream_read_tree (ib, data_in);
4054 jump_func->agg.items->quick_push (item);
4058 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4059 relevant to indirect inlining to OB. */
4061 static void
4062 ipa_write_indirect_edge_info (struct output_block *ob,
4063 struct cgraph_edge *cs)
4065 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4066 struct bitpack_d bp;
4068 streamer_write_hwi (ob, ii->param_index);
4069 streamer_write_hwi (ob, ii->offset);
4070 bp = bitpack_create (ob->main_stream);
4071 bp_pack_value (&bp, ii->polymorphic, 1);
4072 bp_pack_value (&bp, ii->agg_contents, 1);
4073 bp_pack_value (&bp, ii->member_ptr, 1);
4074 bp_pack_value (&bp, ii->by_ref, 1);
4075 streamer_write_bitpack (&bp);
4077 if (ii->polymorphic)
4079 streamer_write_hwi (ob, ii->otr_token);
4080 stream_write_tree (ob, ii->otr_type, true);
4084 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4085 relevant to indirect inlining from IB. */
4087 static void
4088 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4089 struct data_in *data_in ATTRIBUTE_UNUSED,
4090 struct cgraph_edge *cs)
4092 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4093 struct bitpack_d bp;
4095 ii->param_index = (int) streamer_read_hwi (ib);
4096 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4097 bp = streamer_read_bitpack (ib);
4098 ii->polymorphic = bp_unpack_value (&bp, 1);
4099 ii->agg_contents = bp_unpack_value (&bp, 1);
4100 ii->member_ptr = bp_unpack_value (&bp, 1);
4101 ii->by_ref = bp_unpack_value (&bp, 1);
4102 if (ii->polymorphic)
4104 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4105 ii->otr_type = stream_read_tree (ib, data_in);
4109 /* Stream out NODE info to OB. */
4111 static void
4112 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4114 int node_ref;
4115 lto_symtab_encoder_t encoder;
4116 struct ipa_node_params *info = IPA_NODE_REF (node);
4117 int j;
4118 struct cgraph_edge *e;
4119 struct bitpack_d bp;
4121 encoder = ob->decl_state->symtab_node_encoder;
4122 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4123 streamer_write_uhwi (ob, node_ref);
4125 streamer_write_uhwi (ob, ipa_get_param_count (info));
4126 for (j = 0; j < ipa_get_param_count (info); j++)
4127 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4128 bp = bitpack_create (ob->main_stream);
4129 gcc_assert (info->uses_analysis_done
4130 || ipa_get_param_count (info) == 0);
4131 gcc_assert (!info->node_enqueued);
4132 gcc_assert (!info->ipcp_orig_node);
4133 for (j = 0; j < ipa_get_param_count (info); j++)
4134 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4135 streamer_write_bitpack (&bp);
4136 for (j = 0; j < ipa_get_param_count (info); j++)
4137 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4138 for (e = node->callees; e; e = e->next_callee)
4140 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4142 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4143 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4144 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4146 for (e = node->indirect_calls; e; e = e->next_callee)
4148 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4150 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4151 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4152 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4153 ipa_write_indirect_edge_info (ob, e);
4157 /* Stream in NODE info from IB. */
4159 static void
4160 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4161 struct data_in *data_in)
4163 struct ipa_node_params *info = IPA_NODE_REF (node);
4164 int k;
4165 struct cgraph_edge *e;
4166 struct bitpack_d bp;
4168 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4170 for (k = 0; k < ipa_get_param_count (info); k++)
4171 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4173 bp = streamer_read_bitpack (ib);
4174 if (ipa_get_param_count (info) != 0)
4175 info->uses_analysis_done = true;
4176 info->node_enqueued = false;
4177 for (k = 0; k < ipa_get_param_count (info); k++)
4178 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4179 for (k = 0; k < ipa_get_param_count (info); k++)
4180 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4181 for (e = node->callees; e; e = e->next_callee)
4183 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4184 int count = streamer_read_uhwi (ib);
4186 if (!count)
4187 continue;
4188 vec_safe_grow_cleared (args->jump_functions, count);
4190 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4191 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4192 data_in);
4194 for (e = node->indirect_calls; e; e = e->next_callee)
4196 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4197 int count = streamer_read_uhwi (ib);
4199 if (count)
4201 vec_safe_grow_cleared (args->jump_functions, count);
4202 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4203 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4204 data_in);
4206 ipa_read_indirect_edge_info (ib, data_in, e);
4210 /* Write jump functions for nodes in SET. */
4212 void
4213 ipa_prop_write_jump_functions (void)
4215 struct cgraph_node *node;
4216 struct output_block *ob;
4217 unsigned int count = 0;
4218 lto_symtab_encoder_iterator lsei;
4219 lto_symtab_encoder_t encoder;
4222 if (!ipa_node_params_vector.exists ())
4223 return;
4225 ob = create_output_block (LTO_section_jump_functions);
4226 encoder = ob->decl_state->symtab_node_encoder;
4227 ob->cgraph_node = NULL;
4228 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4229 lsei_next_function_in_partition (&lsei))
4231 node = lsei_cgraph_node (lsei);
4232 if (cgraph_function_with_gimple_body_p (node)
4233 && IPA_NODE_REF (node) != NULL)
4234 count++;
4237 streamer_write_uhwi (ob, count);
4239 /* Process all of the functions. */
4240 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4241 lsei_next_function_in_partition (&lsei))
4243 node = lsei_cgraph_node (lsei);
4244 if (cgraph_function_with_gimple_body_p (node)
4245 && IPA_NODE_REF (node) != NULL)
4246 ipa_write_node_info (ob, node);
4248 streamer_write_char_stream (ob->main_stream, 0);
4249 produce_asm (ob, NULL);
4250 destroy_output_block (ob);
4253 /* Read section in file FILE_DATA of length LEN with data DATA. */
4255 static void
4256 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4257 size_t len)
4259 const struct lto_function_header *header =
4260 (const struct lto_function_header *) data;
4261 const int cfg_offset = sizeof (struct lto_function_header);
4262 const int main_offset = cfg_offset + header->cfg_size;
4263 const int string_offset = main_offset + header->main_size;
4264 struct data_in *data_in;
4265 struct lto_input_block ib_main;
4266 unsigned int i;
4267 unsigned int count;
4269 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4270 header->main_size);
4272 data_in =
4273 lto_data_in_create (file_data, (const char *) data + string_offset,
4274 header->string_size, vNULL);
4275 count = streamer_read_uhwi (&ib_main);
4277 for (i = 0; i < count; i++)
4279 unsigned int index;
4280 struct cgraph_node *node;
4281 lto_symtab_encoder_t encoder;
4283 index = streamer_read_uhwi (&ib_main);
4284 encoder = file_data->symtab_node_encoder;
4285 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4286 gcc_assert (node->symbol.definition);
4287 ipa_read_node_info (&ib_main, node, data_in);
4289 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4290 len);
4291 lto_data_in_delete (data_in);
4294 /* Read ipcp jump functions. */
4296 void
4297 ipa_prop_read_jump_functions (void)
4299 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4300 struct lto_file_decl_data *file_data;
4301 unsigned int j = 0;
4303 ipa_check_create_node_params ();
4304 ipa_check_create_edge_args ();
4305 ipa_register_cgraph_hooks ();
4307 while ((file_data = file_data_vec[j++]))
4309 size_t len;
4310 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4312 if (data)
4313 ipa_prop_read_section (file_data, data, len);
4317 /* After merging units, we can get mismatch in argument counts.
4318 Also decl merging might've rendered parameter lists obsolete.
4319 Also compute called_with_variable_arg info. */
4321 void
4322 ipa_update_after_lto_read (void)
4324 ipa_check_create_node_params ();
4325 ipa_check_create_edge_args ();
4328 void
4329 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4331 int node_ref;
4332 unsigned int count = 0;
4333 lto_symtab_encoder_t encoder;
4334 struct ipa_agg_replacement_value *aggvals, *av;
4336 aggvals = ipa_get_agg_replacements_for_node (node);
4337 encoder = ob->decl_state->symtab_node_encoder;
4338 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4339 streamer_write_uhwi (ob, node_ref);
4341 for (av = aggvals; av; av = av->next)
4342 count++;
4343 streamer_write_uhwi (ob, count);
4345 for (av = aggvals; av; av = av->next)
4347 struct bitpack_d bp;
4349 streamer_write_uhwi (ob, av->offset);
4350 streamer_write_uhwi (ob, av->index);
4351 stream_write_tree (ob, av->value, true);
4353 bp = bitpack_create (ob->main_stream);
4354 bp_pack_value (&bp, av->by_ref, 1);
4355 streamer_write_bitpack (&bp);
4359 /* Stream in the aggregate value replacement chain for NODE from IB. */
4361 static void
4362 read_agg_replacement_chain (struct lto_input_block *ib,
4363 struct cgraph_node *node,
4364 struct data_in *data_in)
4366 struct ipa_agg_replacement_value *aggvals = NULL;
4367 unsigned int count, i;
4369 count = streamer_read_uhwi (ib);
4370 for (i = 0; i <count; i++)
4372 struct ipa_agg_replacement_value *av;
4373 struct bitpack_d bp;
4375 av = ggc_alloc_ipa_agg_replacement_value ();
4376 av->offset = streamer_read_uhwi (ib);
4377 av->index = streamer_read_uhwi (ib);
4378 av->value = stream_read_tree (ib, data_in);
4379 bp = streamer_read_bitpack (ib);
4380 av->by_ref = bp_unpack_value (&bp, 1);
4381 av->next = aggvals;
4382 aggvals = av;
4384 ipa_set_node_agg_value_chain (node, aggvals);
4387 /* Write all aggregate replacement for nodes in set. */
4389 void
4390 ipa_prop_write_all_agg_replacement (void)
4392 struct cgraph_node *node;
4393 struct output_block *ob;
4394 unsigned int count = 0;
4395 lto_symtab_encoder_iterator lsei;
4396 lto_symtab_encoder_t encoder;
4398 if (!ipa_node_agg_replacements)
4399 return;
4401 ob = create_output_block (LTO_section_ipcp_transform);
4402 encoder = ob->decl_state->symtab_node_encoder;
4403 ob->cgraph_node = NULL;
4404 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4405 lsei_next_function_in_partition (&lsei))
4407 node = lsei_cgraph_node (lsei);
4408 if (cgraph_function_with_gimple_body_p (node)
4409 && ipa_get_agg_replacements_for_node (node) != NULL)
4410 count++;
4413 streamer_write_uhwi (ob, count);
4415 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4416 lsei_next_function_in_partition (&lsei))
4418 node = lsei_cgraph_node (lsei);
4419 if (cgraph_function_with_gimple_body_p (node)
4420 && ipa_get_agg_replacements_for_node (node) != NULL)
4421 write_agg_replacement_chain (ob, node);
4423 streamer_write_char_stream (ob->main_stream, 0);
4424 produce_asm (ob, NULL);
4425 destroy_output_block (ob);
4428 /* Read replacements section in file FILE_DATA of length LEN with data
4429 DATA. */
4431 static void
4432 read_replacements_section (struct lto_file_decl_data *file_data,
4433 const char *data,
4434 size_t len)
4436 const struct lto_function_header *header =
4437 (const struct lto_function_header *) data;
4438 const int cfg_offset = sizeof (struct lto_function_header);
4439 const int main_offset = cfg_offset + header->cfg_size;
4440 const int string_offset = main_offset + header->main_size;
4441 struct data_in *data_in;
4442 struct lto_input_block ib_main;
4443 unsigned int i;
4444 unsigned int count;
4446 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4447 header->main_size);
4449 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4450 header->string_size, vNULL);
4451 count = streamer_read_uhwi (&ib_main);
4453 for (i = 0; i < count; i++)
4455 unsigned int index;
4456 struct cgraph_node *node;
4457 lto_symtab_encoder_t encoder;
4459 index = streamer_read_uhwi (&ib_main);
4460 encoder = file_data->symtab_node_encoder;
4461 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4462 gcc_assert (node->symbol.definition);
4463 read_agg_replacement_chain (&ib_main, node, data_in);
4465 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4466 len);
4467 lto_data_in_delete (data_in);
4470 /* Read IPA-CP aggregate replacements. */
4472 void
4473 ipa_prop_read_all_agg_replacement (void)
4475 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4476 struct lto_file_decl_data *file_data;
4477 unsigned int j = 0;
4479 while ((file_data = file_data_vec[j++]))
4481 size_t len;
4482 const char *data = lto_get_section_data (file_data,
4483 LTO_section_ipcp_transform,
4484 NULL, &len);
4485 if (data)
4486 read_replacements_section (file_data, data, len);
4490 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4491 NODE. */
4493 static void
4494 adjust_agg_replacement_values (struct cgraph_node *node,
4495 struct ipa_agg_replacement_value *aggval)
4497 struct ipa_agg_replacement_value *v;
4498 int i, c = 0, d = 0, *adj;
4500 if (!node->clone.combined_args_to_skip)
4501 return;
4503 for (v = aggval; v; v = v->next)
4505 gcc_assert (v->index >= 0);
4506 if (c < v->index)
4507 c = v->index;
4509 c++;
4511 adj = XALLOCAVEC (int, c);
4512 for (i = 0; i < c; i++)
4513 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4515 adj[i] = -1;
4516 d++;
4518 else
4519 adj[i] = i - d;
4521 for (v = aggval; v; v = v->next)
4522 v->index = adj[v->index];
4526 /* Function body transformation phase. */
4528 unsigned int
4529 ipcp_transform_function (struct cgraph_node *node)
4531 vec<ipa_param_descriptor_t> descriptors = vNULL;
4532 struct param_analysis_info *parms_ainfo;
4533 struct ipa_agg_replacement_value *aggval;
4534 gimple_stmt_iterator gsi;
4535 basic_block bb;
4536 int param_count;
4537 bool cfg_changed = false, something_changed = false;
4539 gcc_checking_assert (cfun);
4540 gcc_checking_assert (current_function_decl);
4542 if (dump_file)
4543 fprintf (dump_file, "Modification phase of node %s/%i\n",
4544 cgraph_node_name (node), node->symbol.order);
4546 aggval = ipa_get_agg_replacements_for_node (node);
4547 if (!aggval)
4548 return 0;
4549 param_count = count_formal_params (node->symbol.decl);
4550 if (param_count == 0)
4551 return 0;
4552 adjust_agg_replacement_values (node, aggval);
4553 if (dump_file)
4554 ipa_dump_agg_replacement_values (dump_file, aggval);
4555 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4556 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4557 descriptors.safe_grow_cleared (param_count);
4558 ipa_populate_param_decls (node, descriptors);
4560 FOR_EACH_BB (bb)
4561 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4563 struct ipa_agg_replacement_value *v;
4564 gimple stmt = gsi_stmt (gsi);
4565 tree rhs, val, t;
4566 HOST_WIDE_INT offset;
4567 int index;
4568 bool by_ref, vce;
4570 if (!gimple_assign_load_p (stmt))
4571 continue;
4572 rhs = gimple_assign_rhs1 (stmt);
4573 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4574 continue;
4576 vce = false;
4577 t = rhs;
4578 while (handled_component_p (t))
4580 /* V_C_E can do things like convert an array of integers to one
4581 bigger integer and similar things we do not handle below. */
4582 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4584 vce = true;
4585 break;
4587 t = TREE_OPERAND (t, 0);
4589 if (vce)
4590 continue;
4592 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4593 rhs, &index, &offset, &by_ref))
4594 continue;
4595 for (v = aggval; v; v = v->next)
4596 if (v->index == index
4597 && v->offset == offset)
4598 break;
4599 if (!v || v->by_ref != by_ref)
4600 continue;
4602 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4603 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4605 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4606 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4607 else if (TYPE_SIZE (TREE_TYPE (rhs))
4608 == TYPE_SIZE (TREE_TYPE (v->value)))
4609 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4610 else
4612 if (dump_file)
4614 fprintf (dump_file, " const ");
4615 print_generic_expr (dump_file, v->value, 0);
4616 fprintf (dump_file, " can't be converted to type of ");
4617 print_generic_expr (dump_file, rhs, 0);
4618 fprintf (dump_file, "\n");
4620 continue;
4623 else
4624 val = v->value;
4626 if (dump_file && (dump_flags & TDF_DETAILS))
4628 fprintf (dump_file, "Modifying stmt:\n ");
4629 print_gimple_stmt (dump_file, stmt, 0, 0);
4631 gimple_assign_set_rhs_from_tree (&gsi, val);
4632 update_stmt (stmt);
4634 if (dump_file && (dump_flags & TDF_DETAILS))
4636 fprintf (dump_file, "into:\n ");
4637 print_gimple_stmt (dump_file, stmt, 0, 0);
4638 fprintf (dump_file, "\n");
4641 something_changed = true;
4642 if (maybe_clean_eh_stmt (stmt)
4643 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4644 cfg_changed = true;
4647 (*ipa_node_agg_replacements)[node->uid] = NULL;
4648 free_parms_ainfo (parms_ainfo, param_count);
4649 descriptors.release ();
4651 if (!something_changed)
4652 return 0;
4653 else if (cfg_changed)
4654 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4655 else
4656 return TODO_update_ssa_only_virtuals;