opaque-vector.c: Skip long double test on hppa.
[official-gcc.git] / gcc / ipa-prop.c
blob7ec3c49e42fee028af009c1aab45525d269533e4
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "langhooks.h"
25 #include "ggc.h"
26 #include "target.h"
27 #include "cgraph.h"
28 #include "ipa-prop.h"
29 #include "tree-ssa.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
41 #include "ipa-utils.h"
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
46 struct param_analysis_info
48 bool parm_modified, ref_modified, pt_modified;
49 bitmap parm_visited_statements, pt_visited_statements;
52 /* Vector where the parameter infos are actually stored. */
53 vec<ipa_node_params_t> ipa_node_params_vector;
54 /* Vector of known aggregate values in cloned nodes. */
55 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
56 /* Vector where the parameter infos are actually stored. */
57 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
61 static struct cgraph_node_hook_list *node_removal_hook_holder;
62 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
63 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
64 static struct cgraph_node_hook_list *function_insertion_hook_holder;
66 /* Description of a reference to an IPA constant. */
67 struct ipa_cst_ref_desc
69 /* Edge that corresponds to the statement which took the reference. */
70 struct cgraph_edge *cs;
71 /* Linked list of duplicates created when call graph edges are cloned. */
72 struct ipa_cst_ref_desc *next_duplicate;
73 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
74 if out of control. */
75 int refcount;
78 /* Allocation pool for reference descriptions. */
80 static alloc_pool ipa_refdesc_pool;
82 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
83 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
85 static bool
86 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
88 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->symbol.decl);
89 struct cl_optimization *os;
91 if (!fs_opts)
92 return false;
93 os = TREE_OPTIMIZATION (fs_opts);
94 return !os->x_optimize || !os->x_flag_ipa_cp;
97 /* Return index of the formal whose tree is PTREE in function which corresponds
98 to INFO. */
100 static int
101 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
103 int i, count;
105 count = descriptors.length ();
106 for (i = 0; i < count; i++)
107 if (descriptors[i].decl == ptree)
108 return i;
110 return -1;
113 /* Return index of the formal whose tree is PTREE in function which corresponds
114 to INFO. */
117 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
119 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
122 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
123 NODE. */
125 static void
126 ipa_populate_param_decls (struct cgraph_node *node,
127 vec<ipa_param_descriptor_t> &descriptors)
129 tree fndecl;
130 tree fnargs;
131 tree parm;
132 int param_num;
134 fndecl = node->symbol.decl;
135 gcc_assert (gimple_has_body_p (fndecl));
136 fnargs = DECL_ARGUMENTS (fndecl);
137 param_num = 0;
138 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
140 descriptors[param_num].decl = parm;
141 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
142 param_num++;
146 /* Return how many formal parameters FNDECL has. */
148 static inline int
149 count_formal_params (tree fndecl)
151 tree parm;
152 int count = 0;
153 gcc_assert (gimple_has_body_p (fndecl));
155 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
156 count++;
158 return count;
161 /* Return the declaration of Ith formal parameter of the function corresponding
162 to INFO. Note there is no setter function as this array is built just once
163 using ipa_initialize_node_params. */
165 void
166 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
168 fprintf (file, "param #%i", i);
169 if (info->descriptors[i].decl)
171 fprintf (file, " ");
172 print_generic_expr (file, info->descriptors[i].decl, 0);
176 /* Initialize the ipa_node_params structure associated with NODE
177 to hold PARAM_COUNT parameters. */
179 void
180 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
182 struct ipa_node_params *info = IPA_NODE_REF (node);
184 if (!info->descriptors.exists () && param_count)
185 info->descriptors.safe_grow_cleared (param_count);
188 /* Initialize the ipa_node_params structure associated with NODE by counting
189 the function parameters, creating the descriptors and populating their
190 param_decls. */
192 void
193 ipa_initialize_node_params (struct cgraph_node *node)
195 struct ipa_node_params *info = IPA_NODE_REF (node);
197 if (!info->descriptors.exists ())
199 ipa_alloc_node_params (node, count_formal_params (node->symbol.decl));
200 ipa_populate_param_decls (node, info->descriptors);
204 /* Print the jump functions associated with call graph edge CS to file F. */
206 static void
207 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
209 int i, count;
211 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
212 for (i = 0; i < count; i++)
214 struct ipa_jump_func *jump_func;
215 enum jump_func_type type;
217 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
218 type = jump_func->type;
220 fprintf (f, " param %d: ", i);
221 if (type == IPA_JF_UNKNOWN)
222 fprintf (f, "UNKNOWN\n");
223 else if (type == IPA_JF_KNOWN_TYPE)
225 fprintf (f, "KNOWN TYPE: base ");
226 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
227 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
228 jump_func->value.known_type.offset);
229 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
230 fprintf (f, "\n");
232 else if (type == IPA_JF_CONST)
234 tree val = jump_func->value.constant.value;
235 fprintf (f, "CONST: ");
236 print_generic_expr (f, val, 0);
237 if (TREE_CODE (val) == ADDR_EXPR
238 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
240 fprintf (f, " -> ");
241 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
244 fprintf (f, "\n");
246 else if (type == IPA_JF_PASS_THROUGH)
248 fprintf (f, "PASS THROUGH: ");
249 fprintf (f, "%d, op %s",
250 jump_func->value.pass_through.formal_id,
251 get_tree_code_name(jump_func->value.pass_through.operation));
252 if (jump_func->value.pass_through.operation != NOP_EXPR)
254 fprintf (f, " ");
255 print_generic_expr (f,
256 jump_func->value.pass_through.operand, 0);
258 if (jump_func->value.pass_through.agg_preserved)
259 fprintf (f, ", agg_preserved");
260 if (jump_func->value.pass_through.type_preserved)
261 fprintf (f, ", type_preserved");
262 fprintf (f, "\n");
264 else if (type == IPA_JF_ANCESTOR)
266 fprintf (f, "ANCESTOR: ");
267 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
268 jump_func->value.ancestor.formal_id,
269 jump_func->value.ancestor.offset);
270 print_generic_expr (f, jump_func->value.ancestor.type, 0);
271 if (jump_func->value.ancestor.agg_preserved)
272 fprintf (f, ", agg_preserved");
273 if (jump_func->value.ancestor.type_preserved)
274 fprintf (f, ", type_preserved");
275 fprintf (f, "\n");
278 if (jump_func->agg.items)
280 struct ipa_agg_jf_item *item;
281 int j;
283 fprintf (f, " Aggregate passed by %s:\n",
284 jump_func->agg.by_ref ? "reference" : "value");
285 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
287 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
288 item->offset);
289 if (TYPE_P (item->value))
290 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
291 tree_low_cst (TYPE_SIZE (item->value), 1));
292 else
294 fprintf (f, "cst: ");
295 print_generic_expr (f, item->value, 0);
297 fprintf (f, "\n");
304 /* Print the jump functions of all arguments on all call graph edges going from
305 NODE to file F. */
307 void
308 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
310 struct cgraph_edge *cs;
312 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
313 node->symbol.order);
314 for (cs = node->callees; cs; cs = cs->next_callee)
316 if (!ipa_edge_args_info_available_for_edge_p (cs))
317 continue;
319 fprintf (f, " callsite %s/%i -> %s/%i : \n",
320 xstrdup (cgraph_node_name (node)), node->symbol.order,
321 xstrdup (cgraph_node_name (cs->callee)),
322 cs->callee->symbol.order);
323 ipa_print_node_jump_functions_for_edge (f, cs);
326 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
328 struct cgraph_indirect_call_info *ii;
329 if (!ipa_edge_args_info_available_for_edge_p (cs))
330 continue;
332 ii = cs->indirect_info;
333 if (ii->agg_contents)
334 fprintf (f, " indirect %s callsite, calling param %i, "
335 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
336 ii->member_ptr ? "member ptr" : "aggregate",
337 ii->param_index, ii->offset,
338 ii->by_ref ? "by reference" : "by_value");
339 else
340 fprintf (f, " indirect %s callsite, calling param %i",
341 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
343 if (cs->call_stmt)
345 fprintf (f, ", for stmt ");
346 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
348 else
349 fprintf (f, "\n");
350 ipa_print_node_jump_functions_for_edge (f, cs);
354 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
356 void
357 ipa_print_all_jump_functions (FILE *f)
359 struct cgraph_node *node;
361 fprintf (f, "\nJump functions:\n");
362 FOR_EACH_FUNCTION (node)
364 ipa_print_node_jump_functions (f, node);
368 /* Set JFUNC to be a known type jump function. */
370 static void
371 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
372 tree base_type, tree component_type)
374 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
375 && TYPE_BINFO (component_type));
376 jfunc->type = IPA_JF_KNOWN_TYPE;
377 jfunc->value.known_type.offset = offset,
378 jfunc->value.known_type.base_type = base_type;
379 jfunc->value.known_type.component_type = component_type;
382 /* Set JFUNC to be a copy of another jmp (to be used by jump function
383 combination code). The two functions will share their rdesc. */
385 static void
386 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
387 struct ipa_jump_func *src)
390 gcc_checking_assert (src->type == IPA_JF_CONST);
391 dst->type = IPA_JF_CONST;
392 dst->value.constant = src->value.constant;
395 /* Set JFUNC to be a constant jmp function. */
397 static void
398 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
399 struct cgraph_edge *cs)
401 constant = unshare_expr (constant);
402 if (constant && EXPR_P (constant))
403 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
404 jfunc->type = IPA_JF_CONST;
405 jfunc->value.constant.value = unshare_expr_without_location (constant);
407 if (TREE_CODE (constant) == ADDR_EXPR
408 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
410 struct ipa_cst_ref_desc *rdesc;
411 if (!ipa_refdesc_pool)
412 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
413 sizeof (struct ipa_cst_ref_desc), 32);
415 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
416 rdesc->cs = cs;
417 rdesc->next_duplicate = NULL;
418 rdesc->refcount = 1;
419 jfunc->value.constant.rdesc = rdesc;
421 else
422 jfunc->value.constant.rdesc = NULL;
425 /* Set JFUNC to be a simple pass-through jump function. */
426 static void
427 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
428 bool agg_preserved, bool type_preserved)
430 jfunc->type = IPA_JF_PASS_THROUGH;
431 jfunc->value.pass_through.operand = NULL_TREE;
432 jfunc->value.pass_through.formal_id = formal_id;
433 jfunc->value.pass_through.operation = NOP_EXPR;
434 jfunc->value.pass_through.agg_preserved = agg_preserved;
435 jfunc->value.pass_through.type_preserved = type_preserved;
438 /* Set JFUNC to be an arithmetic pass through jump function. */
440 static void
441 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
442 tree operand, enum tree_code operation)
444 jfunc->type = IPA_JF_PASS_THROUGH;
445 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
446 jfunc->value.pass_through.formal_id = formal_id;
447 jfunc->value.pass_through.operation = operation;
448 jfunc->value.pass_through.agg_preserved = false;
449 jfunc->value.pass_through.type_preserved = false;
452 /* Set JFUNC to be an ancestor jump function. */
454 static void
455 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
456 tree type, int formal_id, bool agg_preserved,
457 bool type_preserved)
459 jfunc->type = IPA_JF_ANCESTOR;
460 jfunc->value.ancestor.formal_id = formal_id;
461 jfunc->value.ancestor.offset = offset;
462 jfunc->value.ancestor.type = type;
463 jfunc->value.ancestor.agg_preserved = agg_preserved;
464 jfunc->value.ancestor.type_preserved = type_preserved;
467 /* Extract the acual BINFO being described by JFUNC which must be a known type
468 jump function. */
470 tree
471 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
473 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
474 if (!base_binfo)
475 return NULL_TREE;
476 return get_binfo_at_offset (base_binfo,
477 jfunc->value.known_type.offset,
478 jfunc->value.known_type.component_type);
481 /* Structure to be passed in between detect_type_change and
482 check_stmt_for_type_change. */
484 struct type_change_info
486 /* Offset into the object where there is the virtual method pointer we are
487 looking for. */
488 HOST_WIDE_INT offset;
489 /* The declaration or SSA_NAME pointer of the base that we are checking for
490 type change. */
491 tree object;
492 /* If we actually can tell the type that the object has changed to, it is
493 stored in this field. Otherwise it remains NULL_TREE. */
494 tree known_current_type;
495 /* Set to true if dynamic type change has been detected. */
496 bool type_maybe_changed;
497 /* Set to true if multiple types have been encountered. known_current_type
498 must be disregarded in that case. */
499 bool multiple_types_encountered;
502 /* Return true if STMT can modify a virtual method table pointer.
504 This function makes special assumptions about both constructors and
505 destructors which are all the functions that are allowed to alter the VMT
506 pointers. It assumes that destructors begin with assignment into all VMT
507 pointers and that constructors essentially look in the following way:
509 1) The very first thing they do is that they call constructors of ancestor
510 sub-objects that have them.
512 2) Then VMT pointers of this and all its ancestors is set to new values
513 corresponding to the type corresponding to the constructor.
515 3) Only afterwards, other stuff such as constructor of member sub-objects
516 and the code written by the user is run. Only this may include calling
517 virtual functions, directly or indirectly.
519 There is no way to call a constructor of an ancestor sub-object in any
520 other way.
522 This means that we do not have to care whether constructors get the correct
523 type information because they will always change it (in fact, if we define
524 the type to be given by the VMT pointer, it is undefined).
526 The most important fact to derive from the above is that if, for some
527 statement in the section 3, we try to detect whether the dynamic type has
528 changed, we can safely ignore all calls as we examine the function body
529 backwards until we reach statements in section 2 because these calls cannot
530 be ancestor constructors or destructors (if the input is not bogus) and so
531 do not change the dynamic type (this holds true only for automatically
532 allocated objects but at the moment we devirtualize only these). We then
533 must detect that statements in section 2 change the dynamic type and can try
534 to derive the new type. That is enough and we can stop, we will never see
535 the calls into constructors of sub-objects in this code. Therefore we can
536 safely ignore all call statements that we traverse.
539 static bool
540 stmt_may_be_vtbl_ptr_store (gimple stmt)
542 if (is_gimple_call (stmt))
543 return false;
544 else if (is_gimple_assign (stmt))
546 tree lhs = gimple_assign_lhs (stmt);
548 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
550 if (flag_strict_aliasing
551 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
552 return false;
554 if (TREE_CODE (lhs) == COMPONENT_REF
555 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
556 return false;
557 /* In the future we might want to use get_base_ref_and_offset to find
558 if there is a field corresponding to the offset and if so, proceed
559 almost like if it was a component ref. */
562 return true;
565 /* If STMT can be proved to be an assignment to the virtual method table
566 pointer of ANALYZED_OBJ and the type associated with the new table
567 identified, return the type. Otherwise return NULL_TREE. */
569 static tree
570 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
572 HOST_WIDE_INT offset, size, max_size;
573 tree lhs, rhs, base;
575 if (!gimple_assign_single_p (stmt))
576 return NULL_TREE;
578 lhs = gimple_assign_lhs (stmt);
579 rhs = gimple_assign_rhs1 (stmt);
580 if (TREE_CODE (lhs) != COMPONENT_REF
581 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
582 || TREE_CODE (rhs) != ADDR_EXPR)
583 return NULL_TREE;
584 rhs = get_base_address (TREE_OPERAND (rhs, 0));
585 if (!rhs
586 || TREE_CODE (rhs) != VAR_DECL
587 || !DECL_VIRTUAL_P (rhs))
588 return NULL_TREE;
590 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
591 if (offset != tci->offset
592 || size != POINTER_SIZE
593 || max_size != POINTER_SIZE)
594 return NULL_TREE;
595 if (TREE_CODE (base) == MEM_REF)
597 if (TREE_CODE (tci->object) != MEM_REF
598 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
599 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
600 TREE_OPERAND (base, 1)))
601 return NULL_TREE;
603 else if (tci->object != base)
604 return NULL_TREE;
606 return DECL_CONTEXT (rhs);
609 /* Callback of walk_aliased_vdefs and a helper function for
610 detect_type_change to check whether a particular statement may modify
611 the virtual table pointer, and if possible also determine the new type of
612 the (sub-)object. It stores its result into DATA, which points to a
613 type_change_info structure. */
615 static bool
616 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
618 gimple stmt = SSA_NAME_DEF_STMT (vdef);
619 struct type_change_info *tci = (struct type_change_info *) data;
621 if (stmt_may_be_vtbl_ptr_store (stmt))
623 tree type;
624 type = extr_type_from_vtbl_ptr_store (stmt, tci);
625 if (tci->type_maybe_changed
626 && type != tci->known_current_type)
627 tci->multiple_types_encountered = true;
628 tci->known_current_type = type;
629 tci->type_maybe_changed = true;
630 return true;
632 else
633 return false;
638 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
639 callsite CALL) by looking for assignments to its virtual table pointer. If
640 it is, return true and fill in the jump function JFUNC with relevant type
641 information or set it to unknown. ARG is the object itself (not a pointer
642 to it, unless dereferenced). BASE is the base of the memory access as
643 returned by get_ref_base_and_extent, as is the offset. */
645 static bool
646 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
647 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
649 struct type_change_info tci;
650 ao_ref ao;
652 gcc_checking_assert (DECL_P (arg)
653 || TREE_CODE (arg) == MEM_REF
654 || handled_component_p (arg));
655 /* Const calls cannot call virtual methods through VMT and so type changes do
656 not matter. */
657 if (!flag_devirtualize || !gimple_vuse (call)
658 /* Be sure expected_type is polymorphic. */
659 || !comp_type
660 || TREE_CODE (comp_type) != RECORD_TYPE
661 || !TYPE_BINFO (comp_type)
662 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
663 return false;
665 ao_ref_init (&ao, arg);
666 ao.base = base;
667 ao.offset = offset;
668 ao.size = POINTER_SIZE;
669 ao.max_size = ao.size;
671 tci.offset = offset;
672 tci.object = get_base_address (arg);
673 tci.known_current_type = NULL_TREE;
674 tci.type_maybe_changed = false;
675 tci.multiple_types_encountered = false;
677 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
678 &tci, NULL);
679 if (!tci.type_maybe_changed)
680 return false;
682 if (!tci.known_current_type
683 || tci.multiple_types_encountered
684 || offset != 0)
685 jfunc->type = IPA_JF_UNKNOWN;
686 else
687 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
689 return true;
692 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
693 SSA name (its dereference will become the base and the offset is assumed to
694 be zero). */
696 static bool
697 detect_type_change_ssa (tree arg, tree comp_type,
698 gimple call, struct ipa_jump_func *jfunc)
700 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
701 if (!flag_devirtualize
702 || !POINTER_TYPE_P (TREE_TYPE (arg)))
703 return false;
705 arg = build2 (MEM_REF, ptr_type_node, arg,
706 build_int_cst (ptr_type_node, 0));
708 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
711 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
712 boolean variable pointed to by DATA. */
714 static bool
715 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
716 void *data)
718 bool *b = (bool *) data;
719 *b = true;
720 return true;
723 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
724 a value known not to be modified in this function before reaching the
725 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
726 information about the parameter. */
728 static bool
729 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
730 gimple stmt, tree parm_load)
732 bool modified = false;
733 bitmap *visited_stmts;
734 ao_ref refd;
736 if (parm_ainfo && parm_ainfo->parm_modified)
737 return false;
739 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
740 ao_ref_init (&refd, parm_load);
741 /* We can cache visited statements only when parm_ainfo is available and when
742 we are looking at a naked load of the whole parameter. */
743 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
744 visited_stmts = NULL;
745 else
746 visited_stmts = &parm_ainfo->parm_visited_statements;
747 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
748 visited_stmts);
749 if (parm_ainfo && modified)
750 parm_ainfo->parm_modified = true;
751 return !modified;
754 /* If STMT is an assignment that loads a value from an parameter declaration,
755 return the index of the parameter in ipa_node_params which has not been
756 modified. Otherwise return -1. */
758 static int
759 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
760 struct param_analysis_info *parms_ainfo,
761 gimple stmt)
763 int index;
764 tree op1;
766 if (!gimple_assign_single_p (stmt))
767 return -1;
769 op1 = gimple_assign_rhs1 (stmt);
770 if (TREE_CODE (op1) != PARM_DECL)
771 return -1;
773 index = ipa_get_param_decl_index_1 (descriptors, op1);
774 if (index < 0
775 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
776 : NULL, stmt, op1))
777 return -1;
779 return index;
782 /* Return true if memory reference REF loads data that are known to be
783 unmodified in this function before reaching statement STMT. PARM_AINFO, if
784 non-NULL, is a pointer to a structure containing temporary information about
785 PARM. */
787 static bool
788 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
789 gimple stmt, tree ref)
791 bool modified = false;
792 ao_ref refd;
794 gcc_checking_assert (gimple_vuse (stmt));
795 if (parm_ainfo && parm_ainfo->ref_modified)
796 return false;
798 ao_ref_init (&refd, ref);
799 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
800 NULL);
801 if (parm_ainfo && modified)
802 parm_ainfo->ref_modified = true;
803 return !modified;
806 /* Return true if the data pointed to by PARM is known to be unmodified in this
807 function before reaching call statement CALL into which it is passed.
808 PARM_AINFO is a pointer to a structure containing temporary information
809 about PARM. */
811 static bool
812 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
813 gimple call, tree parm)
815 bool modified = false;
816 ao_ref refd;
818 /* It's unnecessary to calculate anything about memory contnets for a const
819 function because it is not goin to use it. But do not cache the result
820 either. Also, no such calculations for non-pointers. */
821 if (!gimple_vuse (call)
822 || !POINTER_TYPE_P (TREE_TYPE (parm)))
823 return false;
825 if (parm_ainfo->pt_modified)
826 return false;
828 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
829 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
830 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
831 if (modified)
832 parm_ainfo->pt_modified = true;
833 return !modified;
836 /* Return true if we can prove that OP is a memory reference loading unmodified
837 data from an aggregate passed as a parameter and if the aggregate is passed
838 by reference, that the alias type of the load corresponds to the type of the
839 formal parameter (so that we can rely on this type for TBAA in callers).
840 INFO and PARMS_AINFO describe parameters of the current function (but the
841 latter can be NULL), STMT is the load statement. If function returns true,
842 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
843 within the aggregate and whether it is a load from a value passed by
844 reference respectively. */
846 static bool
847 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
848 struct param_analysis_info *parms_ainfo, gimple stmt,
849 tree op, int *index_p, HOST_WIDE_INT *offset_p,
850 bool *by_ref_p)
852 int index;
853 HOST_WIDE_INT size, max_size;
854 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
856 if (max_size == -1 || max_size != size || *offset_p < 0)
857 return false;
859 if (DECL_P (base))
861 int index = ipa_get_param_decl_index_1 (descriptors, base);
862 if (index >= 0
863 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
864 : NULL, stmt, op))
866 *index_p = index;
867 *by_ref_p = false;
868 return true;
870 return false;
873 if (TREE_CODE (base) != MEM_REF
874 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
875 || !integer_zerop (TREE_OPERAND (base, 1)))
876 return false;
878 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
880 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
881 index = ipa_get_param_decl_index_1 (descriptors, parm);
883 else
885 /* This branch catches situations where a pointer parameter is not a
886 gimple register, for example:
888 void hip7(S*) (struct S * p)
890 void (*<T2e4>) (struct S *) D.1867;
891 struct S * p.1;
893 <bb 2>:
894 p.1_1 = p;
895 D.1867_2 = p.1_1->f;
896 D.1867_2 ();
897 gdp = &p;
900 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
901 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
904 if (index >= 0
905 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
906 stmt, op))
908 *index_p = index;
909 *by_ref_p = true;
910 return true;
912 return false;
915 /* Just like the previous function, just without the param_analysis_info
916 pointer, for users outside of this file. */
918 bool
919 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
920 tree op, int *index_p, HOST_WIDE_INT *offset_p,
921 bool *by_ref_p)
923 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
924 offset_p, by_ref_p);
927 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
928 of an assignment statement STMT, try to determine whether we are actually
929 handling any of the following cases and construct an appropriate jump
930 function into JFUNC if so:
932 1) The passed value is loaded from a formal parameter which is not a gimple
933 register (most probably because it is addressable, the value has to be
934 scalar) and we can guarantee the value has not changed. This case can
935 therefore be described by a simple pass-through jump function. For example:
937 foo (int a)
939 int a.0;
941 a.0_2 = a;
942 bar (a.0_2);
944 2) The passed value can be described by a simple arithmetic pass-through
945 jump function. E.g.
947 foo (int a)
949 int D.2064;
951 D.2064_4 = a.1(D) + 4;
952 bar (D.2064_4);
954 This case can also occur in combination of the previous one, e.g.:
956 foo (int a, int z)
958 int a.0;
959 int D.2064;
961 a.0_3 = a;
962 D.2064_4 = a.0_3 + 4;
963 foo (D.2064_4);
965 3) The passed value is an address of an object within another one (which
966 also passed by reference). Such situations are described by an ancestor
967 jump function and describe situations such as:
969 B::foo() (struct B * const this)
971 struct A * D.1845;
973 D.1845_2 = &this_1(D)->D.1748;
974 A::bar (D.1845_2);
976 INFO is the structure describing individual parameters access different
977 stages of IPA optimizations. PARMS_AINFO contains the information that is
978 only needed for intraprocedural analysis. */
980 static void
981 compute_complex_assign_jump_func (struct ipa_node_params *info,
982 struct param_analysis_info *parms_ainfo,
983 struct ipa_jump_func *jfunc,
984 gimple call, gimple stmt, tree name,
985 tree param_type)
987 HOST_WIDE_INT offset, size, max_size;
988 tree op1, tc_ssa, base, ssa;
989 int index;
991 op1 = gimple_assign_rhs1 (stmt);
993 if (TREE_CODE (op1) == SSA_NAME)
995 if (SSA_NAME_IS_DEFAULT_DEF (op1))
996 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
997 else
998 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
999 SSA_NAME_DEF_STMT (op1));
1000 tc_ssa = op1;
1002 else
1004 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1005 tc_ssa = gimple_assign_lhs (stmt);
1008 if (index >= 0)
1010 tree op2 = gimple_assign_rhs2 (stmt);
1012 if (op2)
1014 if (!is_gimple_ip_invariant (op2)
1015 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1016 && !useless_type_conversion_p (TREE_TYPE (name),
1017 TREE_TYPE (op1))))
1018 return;
1020 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1021 gimple_assign_rhs_code (stmt));
1023 else if (gimple_assign_single_p (stmt))
1025 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1026 call, tc_ssa);
1027 bool type_p = false;
1029 if (param_type && POINTER_TYPE_P (param_type))
1030 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1031 call, jfunc);
1032 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1033 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1035 return;
1038 if (TREE_CODE (op1) != ADDR_EXPR)
1039 return;
1040 op1 = TREE_OPERAND (op1, 0);
1041 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1042 return;
1043 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1044 if (TREE_CODE (base) != MEM_REF
1045 /* If this is a varying address, punt. */
1046 || max_size == -1
1047 || max_size != size)
1048 return;
1049 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1050 ssa = TREE_OPERAND (base, 0);
1051 if (TREE_CODE (ssa) != SSA_NAME
1052 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1053 || offset < 0)
1054 return;
1056 /* Dynamic types are changed in constructors and destructors. */
1057 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1058 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1060 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1061 call, jfunc, offset);
1062 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1063 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1064 parm_ref_data_pass_through_p (&parms_ainfo[index],
1065 call, ssa), type_p);
1069 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1070 it looks like:
1072 iftmp.1_3 = &obj_2(D)->D.1762;
1074 The base of the MEM_REF must be a default definition SSA NAME of a
1075 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1076 whole MEM_REF expression is returned and the offset calculated from any
1077 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1078 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1080 static tree
1081 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1083 HOST_WIDE_INT size, max_size;
1084 tree expr, parm, obj;
1086 if (!gimple_assign_single_p (assign))
1087 return NULL_TREE;
1088 expr = gimple_assign_rhs1 (assign);
1090 if (TREE_CODE (expr) != ADDR_EXPR)
1091 return NULL_TREE;
1092 expr = TREE_OPERAND (expr, 0);
1093 obj = expr;
1094 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1096 if (TREE_CODE (expr) != MEM_REF
1097 /* If this is a varying address, punt. */
1098 || max_size == -1
1099 || max_size != size
1100 || *offset < 0)
1101 return NULL_TREE;
1102 parm = TREE_OPERAND (expr, 0);
1103 if (TREE_CODE (parm) != SSA_NAME
1104 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1105 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1106 return NULL_TREE;
1108 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1109 *obj_p = obj;
1110 return expr;
1114 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1115 statement PHI, try to find out whether NAME is in fact a
1116 multiple-inheritance typecast from a descendant into an ancestor of a formal
1117 parameter and thus can be described by an ancestor jump function and if so,
1118 write the appropriate function into JFUNC.
1120 Essentially we want to match the following pattern:
1122 if (obj_2(D) != 0B)
1123 goto <bb 3>;
1124 else
1125 goto <bb 4>;
1127 <bb 3>:
1128 iftmp.1_3 = &obj_2(D)->D.1762;
1130 <bb 4>:
1131 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1132 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1133 return D.1879_6; */
1135 static void
1136 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1137 struct param_analysis_info *parms_ainfo,
1138 struct ipa_jump_func *jfunc,
1139 gimple call, gimple phi, tree param_type)
1141 HOST_WIDE_INT offset;
1142 gimple assign, cond;
1143 basic_block phi_bb, assign_bb, cond_bb;
1144 tree tmp, parm, expr, obj;
1145 int index, i;
1147 if (gimple_phi_num_args (phi) != 2)
1148 return;
1150 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1151 tmp = PHI_ARG_DEF (phi, 0);
1152 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1153 tmp = PHI_ARG_DEF (phi, 1);
1154 else
1155 return;
1156 if (TREE_CODE (tmp) != SSA_NAME
1157 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1158 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1159 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1160 return;
1162 assign = SSA_NAME_DEF_STMT (tmp);
1163 assign_bb = gimple_bb (assign);
1164 if (!single_pred_p (assign_bb))
1165 return;
1166 expr = get_ancestor_addr_info (assign, &obj, &offset);
1167 if (!expr)
1168 return;
1169 parm = TREE_OPERAND (expr, 0);
1170 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1171 gcc_assert (index >= 0);
1173 cond_bb = single_pred (assign_bb);
1174 cond = last_stmt (cond_bb);
1175 if (!cond
1176 || gimple_code (cond) != GIMPLE_COND
1177 || gimple_cond_code (cond) != NE_EXPR
1178 || gimple_cond_lhs (cond) != parm
1179 || !integer_zerop (gimple_cond_rhs (cond)))
1180 return;
1182 phi_bb = gimple_bb (phi);
1183 for (i = 0; i < 2; i++)
1185 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1186 if (pred != assign_bb && pred != cond_bb)
1187 return;
1190 bool type_p = false;
1191 if (param_type && POINTER_TYPE_P (param_type))
1192 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1193 call, jfunc, offset);
1194 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1195 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1196 parm_ref_data_pass_through_p (&parms_ainfo[index],
1197 call, parm), type_p);
1200 /* Given OP which is passed as an actual argument to a called function,
1201 determine if it is possible to construct a KNOWN_TYPE jump function for it
1202 and if so, create one and store it to JFUNC.
1203 EXPECTED_TYPE represents a type the argument should be in */
1205 static void
1206 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1207 gimple call, tree expected_type)
1209 HOST_WIDE_INT offset, size, max_size;
1210 tree base;
1212 if (!flag_devirtualize
1213 || TREE_CODE (op) != ADDR_EXPR
1214 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1215 /* Be sure expected_type is polymorphic. */
1216 || !expected_type
1217 || TREE_CODE (expected_type) != RECORD_TYPE
1218 || !TYPE_BINFO (expected_type)
1219 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1220 return;
1222 op = TREE_OPERAND (op, 0);
1223 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1224 if (!DECL_P (base)
1225 || max_size == -1
1226 || max_size != size
1227 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1228 || is_global_var (base))
1229 return;
1231 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1232 return;
1234 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1235 expected_type);
1238 /* Inspect the given TYPE and return true iff it has the same structure (the
1239 same number of fields of the same types) as a C++ member pointer. If
1240 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1241 corresponding fields there. */
1243 static bool
1244 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1246 tree fld;
1248 if (TREE_CODE (type) != RECORD_TYPE)
1249 return false;
1251 fld = TYPE_FIELDS (type);
1252 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1253 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1254 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1255 return false;
1257 if (method_ptr)
1258 *method_ptr = fld;
1260 fld = DECL_CHAIN (fld);
1261 if (!fld || INTEGRAL_TYPE_P (fld)
1262 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1263 return false;
1264 if (delta)
1265 *delta = fld;
1267 if (DECL_CHAIN (fld))
1268 return false;
1270 return true;
1273 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1274 return the rhs of its defining statement. Otherwise return RHS as it
1275 is. */
1277 static inline tree
1278 get_ssa_def_if_simple_copy (tree rhs)
1280 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1282 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1284 if (gimple_assign_single_p (def_stmt))
1285 rhs = gimple_assign_rhs1 (def_stmt);
1286 else
1287 break;
1289 return rhs;
1292 /* Simple linked list, describing known contents of an aggregate beforere
1293 call. */
1295 struct ipa_known_agg_contents_list
1297 /* Offset and size of the described part of the aggregate. */
1298 HOST_WIDE_INT offset, size;
1299 /* Known constant value or NULL if the contents is known to be unknown. */
1300 tree constant;
1301 /* Pointer to the next structure in the list. */
1302 struct ipa_known_agg_contents_list *next;
1305 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1306 in ARG is filled in with constant values. ARG can either be an aggregate
1307 expression or a pointer to an aggregate. JFUNC is the jump function into
1308 which the constants are subsequently stored. */
1310 static void
1311 determine_known_aggregate_parts (gimple call, tree arg,
1312 struct ipa_jump_func *jfunc)
1314 struct ipa_known_agg_contents_list *list = NULL;
1315 int item_count = 0, const_count = 0;
1316 HOST_WIDE_INT arg_offset, arg_size;
1317 gimple_stmt_iterator gsi;
1318 tree arg_base;
1319 bool check_ref, by_ref;
1320 ao_ref r;
1322 /* The function operates in three stages. First, we prepare check_ref, r,
1323 arg_base and arg_offset based on what is actually passed as an actual
1324 argument. */
1326 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1328 by_ref = true;
1329 if (TREE_CODE (arg) == SSA_NAME)
1331 tree type_size;
1332 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1333 return;
1334 check_ref = true;
1335 arg_base = arg;
1336 arg_offset = 0;
1337 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1338 arg_size = tree_low_cst (type_size, 1);
1339 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1341 else if (TREE_CODE (arg) == ADDR_EXPR)
1343 HOST_WIDE_INT arg_max_size;
1345 arg = TREE_OPERAND (arg, 0);
1346 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1347 &arg_max_size);
1348 if (arg_max_size == -1
1349 || arg_max_size != arg_size
1350 || arg_offset < 0)
1351 return;
1352 if (DECL_P (arg_base))
1354 tree size;
1355 check_ref = false;
1356 size = build_int_cst (integer_type_node, arg_size);
1357 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1359 else
1360 return;
1362 else
1363 return;
1365 else
1367 HOST_WIDE_INT arg_max_size;
1369 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1371 by_ref = false;
1372 check_ref = false;
1373 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1374 &arg_max_size);
1375 if (arg_max_size == -1
1376 || arg_max_size != arg_size
1377 || arg_offset < 0)
1378 return;
1380 ao_ref_init (&r, arg);
1383 /* Second stage walks back the BB, looks at individual statements and as long
1384 as it is confident of how the statements affect contents of the
1385 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1386 describing it. */
1387 gsi = gsi_for_stmt (call);
1388 gsi_prev (&gsi);
1389 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1391 struct ipa_known_agg_contents_list *n, **p;
1392 gimple stmt = gsi_stmt (gsi);
1393 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1394 tree lhs, rhs, lhs_base;
1395 bool partial_overlap;
1397 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1398 continue;
1399 if (!gimple_assign_single_p (stmt))
1400 break;
1402 lhs = gimple_assign_lhs (stmt);
1403 rhs = gimple_assign_rhs1 (stmt);
1404 if (!is_gimple_reg_type (rhs)
1405 || TREE_CODE (lhs) == BIT_FIELD_REF
1406 || contains_bitfld_component_ref_p (lhs))
1407 break;
1409 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1410 &lhs_max_size);
1411 if (lhs_max_size == -1
1412 || lhs_max_size != lhs_size
1413 || (lhs_offset < arg_offset
1414 && lhs_offset + lhs_size > arg_offset)
1415 || (lhs_offset < arg_offset + arg_size
1416 && lhs_offset + lhs_size > arg_offset + arg_size))
1417 break;
1419 if (check_ref)
1421 if (TREE_CODE (lhs_base) != MEM_REF
1422 || TREE_OPERAND (lhs_base, 0) != arg_base
1423 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1424 break;
1426 else if (lhs_base != arg_base)
1428 if (DECL_P (lhs_base))
1429 continue;
1430 else
1431 break;
1434 if (lhs_offset + lhs_size < arg_offset
1435 || lhs_offset >= (arg_offset + arg_size))
1436 continue;
1438 partial_overlap = false;
1439 p = &list;
1440 while (*p && (*p)->offset < lhs_offset)
1442 if ((*p)->offset + (*p)->size > lhs_offset)
1444 partial_overlap = true;
1445 break;
1447 p = &(*p)->next;
1449 if (partial_overlap)
1450 break;
1451 if (*p && (*p)->offset < lhs_offset + lhs_size)
1453 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1454 /* We already know this value is subsequently overwritten with
1455 something else. */
1456 continue;
1457 else
1458 /* Otherwise this is a partial overlap which we cannot
1459 represent. */
1460 break;
1463 rhs = get_ssa_def_if_simple_copy (rhs);
1464 n = XALLOCA (struct ipa_known_agg_contents_list);
1465 n->size = lhs_size;
1466 n->offset = lhs_offset;
1467 if (is_gimple_ip_invariant (rhs))
1469 n->constant = rhs;
1470 const_count++;
1472 else
1473 n->constant = NULL_TREE;
1474 n->next = *p;
1475 *p = n;
1477 item_count++;
1478 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1479 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1480 break;
1483 /* Third stage just goes over the list and creates an appropriate vector of
1484 ipa_agg_jf_item structures out of it, of sourse only if there are
1485 any known constants to begin with. */
1487 if (const_count)
1489 jfunc->agg.by_ref = by_ref;
1490 vec_alloc (jfunc->agg.items, const_count);
1491 while (list)
1493 if (list->constant)
1495 struct ipa_agg_jf_item item;
1496 item.offset = list->offset - arg_offset;
1497 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1498 item.value = unshare_expr_without_location (list->constant);
1499 jfunc->agg.items->quick_push (item);
1501 list = list->next;
1506 static tree
1507 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1509 int n;
1510 tree type = (e->callee
1511 ? TREE_TYPE (e->callee->symbol.decl)
1512 : gimple_call_fntype (e->call_stmt));
1513 tree t = TYPE_ARG_TYPES (type);
1515 for (n = 0; n < i; n++)
1517 if (!t)
1518 break;
1519 t = TREE_CHAIN (t);
1521 if (t)
1522 return TREE_VALUE (t);
1523 if (!e->callee)
1524 return NULL;
1525 t = DECL_ARGUMENTS (e->callee->symbol.decl);
1526 for (n = 0; n < i; n++)
1528 if (!t)
1529 return NULL;
1530 t = TREE_CHAIN (t);
1532 if (t)
1533 return TREE_TYPE (t);
1534 return NULL;
1537 /* Compute jump function for all arguments of callsite CS and insert the
1538 information in the jump_functions array in the ipa_edge_args corresponding
1539 to this callsite. */
1541 static void
1542 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1543 struct cgraph_edge *cs)
1545 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1546 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1547 gimple call = cs->call_stmt;
1548 int n, arg_num = gimple_call_num_args (call);
1550 if (arg_num == 0 || args->jump_functions)
1551 return;
1552 vec_safe_grow_cleared (args->jump_functions, arg_num);
1554 if (gimple_call_internal_p (call))
1555 return;
1556 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1557 return;
1559 for (n = 0; n < arg_num; n++)
1561 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1562 tree arg = gimple_call_arg (call, n);
1563 tree param_type = ipa_get_callee_param_type (cs, n);
1565 if (is_gimple_ip_invariant (arg))
1566 ipa_set_jf_constant (jfunc, arg, cs);
1567 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1568 && TREE_CODE (arg) == PARM_DECL)
1570 int index = ipa_get_param_decl_index (info, arg);
1572 gcc_assert (index >=0);
1573 /* Aggregate passed by value, check for pass-through, otherwise we
1574 will attempt to fill in aggregate contents later in this
1575 for cycle. */
1576 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1578 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1579 continue;
1582 else if (TREE_CODE (arg) == SSA_NAME)
1584 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1586 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1587 if (index >= 0)
1589 bool agg_p, type_p;
1590 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1591 call, arg);
1592 if (param_type && POINTER_TYPE_P (param_type))
1593 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1594 call, jfunc);
1595 else
1596 type_p = false;
1597 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1598 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1599 type_p);
1602 else
1604 gimple stmt = SSA_NAME_DEF_STMT (arg);
1605 if (is_gimple_assign (stmt))
1606 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1607 call, stmt, arg, param_type);
1608 else if (gimple_code (stmt) == GIMPLE_PHI)
1609 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1610 call, stmt, param_type);
1613 else
1614 compute_known_type_jump_func (arg, jfunc, call,
1615 param_type
1616 && POINTER_TYPE_P (param_type)
1617 ? TREE_TYPE (param_type)
1618 : NULL);
1620 if ((jfunc->type != IPA_JF_PASS_THROUGH
1621 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1622 && (jfunc->type != IPA_JF_ANCESTOR
1623 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1624 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1625 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1626 determine_known_aggregate_parts (call, arg, jfunc);
1630 /* Compute jump functions for all edges - both direct and indirect - outgoing
1631 from NODE. Also count the actual arguments in the process. */
1633 static void
1634 ipa_compute_jump_functions (struct cgraph_node *node,
1635 struct param_analysis_info *parms_ainfo)
1637 struct cgraph_edge *cs;
1639 for (cs = node->callees; cs; cs = cs->next_callee)
1641 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1642 NULL);
1643 /* We do not need to bother analyzing calls to unknown
1644 functions unless they may become known during lto/whopr. */
1645 if (!callee->symbol.definition && !flag_lto)
1646 continue;
1647 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1650 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1651 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1654 /* If STMT looks like a statement loading a value from a member pointer formal
1655 parameter, return that parameter and store the offset of the field to
1656 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1657 might be clobbered). If USE_DELTA, then we look for a use of the delta
1658 field rather than the pfn. */
1660 static tree
1661 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1662 HOST_WIDE_INT *offset_p)
1664 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1666 if (!gimple_assign_single_p (stmt))
1667 return NULL_TREE;
1669 rhs = gimple_assign_rhs1 (stmt);
1670 if (TREE_CODE (rhs) == COMPONENT_REF)
1672 ref_field = TREE_OPERAND (rhs, 1);
1673 rhs = TREE_OPERAND (rhs, 0);
1675 else
1676 ref_field = NULL_TREE;
1677 if (TREE_CODE (rhs) != MEM_REF)
1678 return NULL_TREE;
1679 rec = TREE_OPERAND (rhs, 0);
1680 if (TREE_CODE (rec) != ADDR_EXPR)
1681 return NULL_TREE;
1682 rec = TREE_OPERAND (rec, 0);
1683 if (TREE_CODE (rec) != PARM_DECL
1684 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1685 return NULL_TREE;
1686 ref_offset = TREE_OPERAND (rhs, 1);
1688 if (use_delta)
1689 fld = delta_field;
1690 else
1691 fld = ptr_field;
1692 if (offset_p)
1693 *offset_p = int_bit_position (fld);
1695 if (ref_field)
1697 if (integer_nonzerop (ref_offset))
1698 return NULL_TREE;
1699 return ref_field == fld ? rec : NULL_TREE;
1701 else
1702 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1703 : NULL_TREE;
1706 /* Returns true iff T is an SSA_NAME defined by a statement. */
1708 static bool
1709 ipa_is_ssa_with_stmt_def (tree t)
1711 if (TREE_CODE (t) == SSA_NAME
1712 && !SSA_NAME_IS_DEFAULT_DEF (t))
1713 return true;
1714 else
1715 return false;
1718 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1719 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1720 indirect call graph edge. */
1722 static struct cgraph_edge *
1723 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1725 struct cgraph_edge *cs;
1727 cs = cgraph_edge (node, stmt);
1728 cs->indirect_info->param_index = param_index;
1729 cs->indirect_info->offset = 0;
1730 cs->indirect_info->polymorphic = 0;
1731 cs->indirect_info->agg_contents = 0;
1732 cs->indirect_info->member_ptr = 0;
1733 return cs;
1736 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1737 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1738 intermediate information about each formal parameter. Currently it checks
1739 whether the call calls a pointer that is a formal parameter and if so, the
1740 parameter is marked with the called flag and an indirect call graph edge
1741 describing the call is created. This is very simple for ordinary pointers
1742 represented in SSA but not-so-nice when it comes to member pointers. The
1743 ugly part of this function does nothing more than trying to match the
1744 pattern of such a call. An example of such a pattern is the gimple dump
1745 below, the call is on the last line:
1747 <bb 2>:
1748 f$__delta_5 = f.__delta;
1749 f$__pfn_24 = f.__pfn;
1752 <bb 2>:
1753 f$__delta_5 = MEM[(struct *)&f];
1754 f$__pfn_24 = MEM[(struct *)&f + 4B];
1756 and a few lines below:
1758 <bb 5>
1759 D.2496_3 = (int) f$__pfn_24;
1760 D.2497_4 = D.2496_3 & 1;
1761 if (D.2497_4 != 0)
1762 goto <bb 3>;
1763 else
1764 goto <bb 4>;
1766 <bb 6>:
1767 D.2500_7 = (unsigned int) f$__delta_5;
1768 D.2501_8 = &S + D.2500_7;
1769 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1770 D.2503_10 = *D.2502_9;
1771 D.2504_12 = f$__pfn_24 + -1;
1772 D.2505_13 = (unsigned int) D.2504_12;
1773 D.2506_14 = D.2503_10 + D.2505_13;
1774 D.2507_15 = *D.2506_14;
1775 iftmp.11_16 = (String:: *) D.2507_15;
1777 <bb 7>:
1778 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1779 D.2500_19 = (unsigned int) f$__delta_5;
1780 D.2508_20 = &S + D.2500_19;
1781 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1783 Such patterns are results of simple calls to a member pointer:
1785 int doprinting (int (MyString::* f)(int) const)
1787 MyString S ("somestring");
1789 return (S.*f)(4);
1792 Moreover, the function also looks for called pointers loaded from aggregates
1793 passed by value or reference. */
1795 static void
1796 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1797 struct ipa_node_params *info,
1798 struct param_analysis_info *parms_ainfo,
1799 gimple call, tree target)
1801 gimple def;
1802 tree n1, n2;
1803 gimple d1, d2;
1804 tree rec, rec2, cond;
1805 gimple branch;
1806 int index;
1807 basic_block bb, virt_bb, join;
1808 HOST_WIDE_INT offset;
1809 bool by_ref;
1811 if (SSA_NAME_IS_DEFAULT_DEF (target))
1813 tree var = SSA_NAME_VAR (target);
1814 index = ipa_get_param_decl_index (info, var);
1815 if (index >= 0)
1816 ipa_note_param_call (node, index, call);
1817 return;
1820 def = SSA_NAME_DEF_STMT (target);
1821 if (gimple_assign_single_p (def)
1822 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1823 gimple_assign_rhs1 (def), &index, &offset,
1824 &by_ref))
1826 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1827 cs->indirect_info->offset = offset;
1828 cs->indirect_info->agg_contents = 1;
1829 cs->indirect_info->by_ref = by_ref;
1830 return;
1833 /* Now we need to try to match the complex pattern of calling a member
1834 pointer. */
1835 if (gimple_code (def) != GIMPLE_PHI
1836 || gimple_phi_num_args (def) != 2
1837 || !POINTER_TYPE_P (TREE_TYPE (target))
1838 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1839 return;
1841 /* First, we need to check whether one of these is a load from a member
1842 pointer that is a parameter to this function. */
1843 n1 = PHI_ARG_DEF (def, 0);
1844 n2 = PHI_ARG_DEF (def, 1);
1845 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1846 return;
1847 d1 = SSA_NAME_DEF_STMT (n1);
1848 d2 = SSA_NAME_DEF_STMT (n2);
1850 join = gimple_bb (def);
1851 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1853 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1854 return;
1856 bb = EDGE_PRED (join, 0)->src;
1857 virt_bb = gimple_bb (d2);
1859 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1861 bb = EDGE_PRED (join, 1)->src;
1862 virt_bb = gimple_bb (d1);
1864 else
1865 return;
1867 /* Second, we need to check that the basic blocks are laid out in the way
1868 corresponding to the pattern. */
1870 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1871 || single_pred (virt_bb) != bb
1872 || single_succ (virt_bb) != join)
1873 return;
1875 /* Third, let's see that the branching is done depending on the least
1876 significant bit of the pfn. */
1878 branch = last_stmt (bb);
1879 if (!branch || gimple_code (branch) != GIMPLE_COND)
1880 return;
1882 if ((gimple_cond_code (branch) != NE_EXPR
1883 && gimple_cond_code (branch) != EQ_EXPR)
1884 || !integer_zerop (gimple_cond_rhs (branch)))
1885 return;
1887 cond = gimple_cond_lhs (branch);
1888 if (!ipa_is_ssa_with_stmt_def (cond))
1889 return;
1891 def = SSA_NAME_DEF_STMT (cond);
1892 if (!is_gimple_assign (def)
1893 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1894 || !integer_onep (gimple_assign_rhs2 (def)))
1895 return;
1897 cond = gimple_assign_rhs1 (def);
1898 if (!ipa_is_ssa_with_stmt_def (cond))
1899 return;
1901 def = SSA_NAME_DEF_STMT (cond);
1903 if (is_gimple_assign (def)
1904 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1906 cond = gimple_assign_rhs1 (def);
1907 if (!ipa_is_ssa_with_stmt_def (cond))
1908 return;
1909 def = SSA_NAME_DEF_STMT (cond);
1912 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1913 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1914 == ptrmemfunc_vbit_in_delta),
1915 NULL);
1916 if (rec != rec2)
1917 return;
1919 index = ipa_get_param_decl_index (info, rec);
1920 if (index >= 0
1921 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1923 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1924 cs->indirect_info->offset = offset;
1925 cs->indirect_info->agg_contents = 1;
1926 cs->indirect_info->member_ptr = 1;
1929 return;
1932 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1933 object referenced in the expression is a formal parameter of the caller
1934 (described by INFO), create a call note for the statement. */
1936 static void
1937 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1938 struct ipa_node_params *info, gimple call,
1939 tree target)
1941 struct cgraph_edge *cs;
1942 struct cgraph_indirect_call_info *ii;
1943 struct ipa_jump_func jfunc;
1944 tree obj = OBJ_TYPE_REF_OBJECT (target);
1945 int index;
1946 HOST_WIDE_INT anc_offset;
1948 if (!flag_devirtualize)
1949 return;
1951 if (TREE_CODE (obj) != SSA_NAME)
1952 return;
1954 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1956 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1957 return;
1959 anc_offset = 0;
1960 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1961 gcc_assert (index >= 0);
1962 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1963 call, &jfunc))
1964 return;
1966 else
1968 gimple stmt = SSA_NAME_DEF_STMT (obj);
1969 tree expr;
1971 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1972 if (!expr)
1973 return;
1974 index = ipa_get_param_decl_index (info,
1975 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1976 gcc_assert (index >= 0);
1977 if (detect_type_change (obj, expr, obj_type_ref_class (target),
1978 call, &jfunc, anc_offset))
1979 return;
1982 cs = ipa_note_param_call (node, index, call);
1983 ii = cs->indirect_info;
1984 ii->offset = anc_offset;
1985 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1986 ii->otr_type = obj_type_ref_class (target);
1987 ii->polymorphic = 1;
1990 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1991 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1992 containing intermediate information about each formal parameter. */
1994 static void
1995 ipa_analyze_call_uses (struct cgraph_node *node,
1996 struct ipa_node_params *info,
1997 struct param_analysis_info *parms_ainfo, gimple call)
1999 tree target = gimple_call_fn (call);
2001 if (!target)
2002 return;
2003 if (TREE_CODE (target) == SSA_NAME)
2004 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2005 else if (virtual_method_call_p (target))
2006 ipa_analyze_virtual_call_uses (node, info, call, target);
2010 /* Analyze the call statement STMT with respect to formal parameters (described
2011 in INFO) of caller given by NODE. Currently it only checks whether formal
2012 parameters are called. PARMS_AINFO is a pointer to a vector containing
2013 intermediate information about each formal parameter. */
2015 static void
2016 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2017 struct param_analysis_info *parms_ainfo, gimple stmt)
2019 if (is_gimple_call (stmt))
2020 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2023 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2024 If OP is a parameter declaration, mark it as used in the info structure
2025 passed in DATA. */
2027 static bool
2028 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2029 tree op, void *data)
2031 struct ipa_node_params *info = (struct ipa_node_params *) data;
2033 op = get_base_address (op);
2034 if (op
2035 && TREE_CODE (op) == PARM_DECL)
2037 int index = ipa_get_param_decl_index (info, op);
2038 gcc_assert (index >= 0);
2039 ipa_set_param_used (info, index, true);
2042 return false;
2045 /* Scan the function body of NODE and inspect the uses of formal parameters.
2046 Store the findings in various structures of the associated ipa_node_params
2047 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2048 vector containing intermediate information about each formal parameter. */
2050 static void
2051 ipa_analyze_params_uses (struct cgraph_node *node,
2052 struct param_analysis_info *parms_ainfo)
2054 tree decl = node->symbol.decl;
2055 basic_block bb;
2056 struct function *func;
2057 gimple_stmt_iterator gsi;
2058 struct ipa_node_params *info = IPA_NODE_REF (node);
2059 int i;
2061 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2062 return;
2064 info->uses_analysis_done = 1;
2065 if (ipa_func_spec_opts_forbid_analysis_p (node))
2067 for (i = 0; i < ipa_get_param_count (info); i++)
2069 ipa_set_param_used (info, i, true);
2070 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2072 return;
2075 for (i = 0; i < ipa_get_param_count (info); i++)
2077 tree parm = ipa_get_param (info, i);
2078 int controlled_uses = 0;
2080 /* For SSA regs see if parameter is used. For non-SSA we compute
2081 the flag during modification analysis. */
2082 if (is_gimple_reg (parm))
2084 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
2085 parm);
2086 if (ddef && !has_zero_uses (ddef))
2088 imm_use_iterator imm_iter;
2089 use_operand_p use_p;
2091 ipa_set_param_used (info, i, true);
2092 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2093 if (!is_gimple_call (USE_STMT (use_p)))
2095 controlled_uses = IPA_UNDESCRIBED_USE;
2096 break;
2098 else
2099 controlled_uses++;
2101 else
2102 controlled_uses = 0;
2104 else
2105 controlled_uses = IPA_UNDESCRIBED_USE;
2106 ipa_set_controlled_uses (info, i, controlled_uses);
2109 func = DECL_STRUCT_FUNCTION (decl);
2110 FOR_EACH_BB_FN (bb, func)
2112 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2114 gimple stmt = gsi_stmt (gsi);
2116 if (is_gimple_debug (stmt))
2117 continue;
2119 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2120 walk_stmt_load_store_addr_ops (stmt, info,
2121 visit_ref_for_mod_analysis,
2122 visit_ref_for_mod_analysis,
2123 visit_ref_for_mod_analysis);
2125 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2126 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2127 visit_ref_for_mod_analysis,
2128 visit_ref_for_mod_analysis,
2129 visit_ref_for_mod_analysis);
2133 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2135 static void
2136 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2138 int i;
2140 for (i = 0; i < param_count; i++)
2142 if (parms_ainfo[i].parm_visited_statements)
2143 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2144 if (parms_ainfo[i].pt_visited_statements)
2145 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2149 /* Initialize the array describing properties of of formal parameters
2150 of NODE, analyze their uses and compute jump functions associated
2151 with actual arguments of calls from within NODE. */
2153 void
2154 ipa_analyze_node (struct cgraph_node *node)
2156 struct ipa_node_params *info;
2157 struct param_analysis_info *parms_ainfo;
2158 int param_count;
2160 ipa_check_create_node_params ();
2161 ipa_check_create_edge_args ();
2162 info = IPA_NODE_REF (node);
2163 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
2164 ipa_initialize_node_params (node);
2166 param_count = ipa_get_param_count (info);
2167 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2168 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2170 ipa_analyze_params_uses (node, parms_ainfo);
2171 ipa_compute_jump_functions (node, parms_ainfo);
2173 free_parms_ainfo (parms_ainfo, param_count);
2174 pop_cfun ();
2177 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2178 attempt a type-based devirtualization. If successful, return the
2179 target function declaration, otherwise return NULL. */
2181 tree
2182 ipa_intraprocedural_devirtualization (gimple call)
2184 tree binfo, token, fndecl;
2185 struct ipa_jump_func jfunc;
2186 tree otr = gimple_call_fn (call);
2188 jfunc.type = IPA_JF_UNKNOWN;
2189 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2190 call, obj_type_ref_class (otr));
2191 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2192 return NULL_TREE;
2193 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2194 if (!binfo)
2195 return NULL_TREE;
2196 token = OBJ_TYPE_REF_TOKEN (otr);
2197 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2198 binfo);
2199 #ifdef ENABLE_CHECKING
2200 if (fndecl)
2201 gcc_assert (possible_polymorphic_call_target_p
2202 (otr, cgraph_get_node (fndecl)));
2203 #endif
2204 return fndecl;
2207 /* Update the jump function DST when the call graph edge corresponding to SRC is
2208 is being inlined, knowing that DST is of type ancestor and src of known
2209 type. */
2211 static void
2212 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2213 struct ipa_jump_func *dst)
2215 HOST_WIDE_INT combined_offset;
2216 tree combined_type;
2218 if (!ipa_get_jf_ancestor_type_preserved (dst))
2220 dst->type = IPA_JF_UNKNOWN;
2221 return;
2224 combined_offset = ipa_get_jf_known_type_offset (src)
2225 + ipa_get_jf_ancestor_offset (dst);
2226 combined_type = ipa_get_jf_ancestor_type (dst);
2228 ipa_set_jf_known_type (dst, combined_offset,
2229 ipa_get_jf_known_type_base_type (src),
2230 combined_type);
2233 /* Update the jump functions associated with call graph edge E when the call
2234 graph edge CS is being inlined, assuming that E->caller is already (possibly
2235 indirectly) inlined into CS->callee and that E has not been inlined. */
2237 static void
2238 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2239 struct cgraph_edge *e)
2241 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2242 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2243 int count = ipa_get_cs_argument_count (args);
2244 int i;
2246 for (i = 0; i < count; i++)
2248 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2250 if (dst->type == IPA_JF_ANCESTOR)
2252 struct ipa_jump_func *src;
2253 int dst_fid = dst->value.ancestor.formal_id;
2255 /* Variable number of arguments can cause havoc if we try to access
2256 one that does not exist in the inlined edge. So make sure we
2257 don't. */
2258 if (dst_fid >= ipa_get_cs_argument_count (top))
2260 dst->type = IPA_JF_UNKNOWN;
2261 continue;
2264 src = ipa_get_ith_jump_func (top, dst_fid);
2266 if (src->agg.items
2267 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2269 struct ipa_agg_jf_item *item;
2270 int j;
2272 /* Currently we do not produce clobber aggregate jump functions,
2273 replace with merging when we do. */
2274 gcc_assert (!dst->agg.items);
2276 dst->agg.items = vec_safe_copy (src->agg.items);
2277 dst->agg.by_ref = src->agg.by_ref;
2278 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2279 item->offset -= dst->value.ancestor.offset;
2282 if (src->type == IPA_JF_KNOWN_TYPE)
2283 combine_known_type_and_ancestor_jfs (src, dst);
2284 else if (src->type == IPA_JF_PASS_THROUGH
2285 && src->value.pass_through.operation == NOP_EXPR)
2287 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2288 dst->value.ancestor.agg_preserved &=
2289 src->value.pass_through.agg_preserved;
2290 dst->value.ancestor.type_preserved &=
2291 src->value.pass_through.type_preserved;
2293 else if (src->type == IPA_JF_ANCESTOR)
2295 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2296 dst->value.ancestor.offset += src->value.ancestor.offset;
2297 dst->value.ancestor.agg_preserved &=
2298 src->value.ancestor.agg_preserved;
2299 dst->value.ancestor.type_preserved &=
2300 src->value.ancestor.type_preserved;
2302 else
2303 dst->type = IPA_JF_UNKNOWN;
2305 else if (dst->type == IPA_JF_PASS_THROUGH)
2307 struct ipa_jump_func *src;
2308 /* We must check range due to calls with variable number of arguments
2309 and we cannot combine jump functions with operations. */
2310 if (dst->value.pass_through.operation == NOP_EXPR
2311 && (dst->value.pass_through.formal_id
2312 < ipa_get_cs_argument_count (top)))
2314 int dst_fid = dst->value.pass_through.formal_id;
2315 src = ipa_get_ith_jump_func (top, dst_fid);
2316 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2318 switch (src->type)
2320 case IPA_JF_UNKNOWN:
2321 dst->type = IPA_JF_UNKNOWN;
2322 break;
2323 case IPA_JF_KNOWN_TYPE:
2324 ipa_set_jf_known_type (dst,
2325 ipa_get_jf_known_type_offset (src),
2326 ipa_get_jf_known_type_base_type (src),
2327 ipa_get_jf_known_type_base_type (src));
2328 break;
2329 case IPA_JF_CONST:
2330 ipa_set_jf_cst_copy (dst, src);
2331 break;
2333 case IPA_JF_PASS_THROUGH:
2335 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2336 enum tree_code operation;
2337 operation = ipa_get_jf_pass_through_operation (src);
2339 if (operation == NOP_EXPR)
2341 bool agg_p, type_p;
2342 agg_p = dst_agg_p
2343 && ipa_get_jf_pass_through_agg_preserved (src);
2344 type_p = ipa_get_jf_pass_through_type_preserved (src)
2345 && ipa_get_jf_pass_through_type_preserved (dst);
2346 ipa_set_jf_simple_pass_through (dst, formal_id,
2347 agg_p, type_p);
2349 else
2351 tree operand = ipa_get_jf_pass_through_operand (src);
2352 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2353 operation);
2355 break;
2357 case IPA_JF_ANCESTOR:
2359 bool agg_p, type_p;
2360 agg_p = dst_agg_p
2361 && ipa_get_jf_ancestor_agg_preserved (src);
2362 type_p = ipa_get_jf_ancestor_type_preserved (src)
2363 && ipa_get_jf_pass_through_type_preserved (dst);
2364 ipa_set_ancestor_jf (dst,
2365 ipa_get_jf_ancestor_offset (src),
2366 ipa_get_jf_ancestor_type (src),
2367 ipa_get_jf_ancestor_formal_id (src),
2368 agg_p, type_p);
2369 break;
2371 default:
2372 gcc_unreachable ();
2375 if (src->agg.items
2376 && (dst_agg_p || !src->agg.by_ref))
2378 /* Currently we do not produce clobber aggregate jump
2379 functions, replace with merging when we do. */
2380 gcc_assert (!dst->agg.items);
2382 dst->agg.by_ref = src->agg.by_ref;
2383 dst->agg.items = vec_safe_copy (src->agg.items);
2386 else
2387 dst->type = IPA_JF_UNKNOWN;
2392 /* If TARGET is an addr_expr of a function declaration, make it the destination
2393 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2395 struct cgraph_edge *
2396 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2398 struct cgraph_node *callee;
2399 struct inline_edge_summary *es = inline_edge_summary (ie);
2400 bool unreachable = false;
2402 if (TREE_CODE (target) == ADDR_EXPR)
2403 target = TREE_OPERAND (target, 0);
2404 if (TREE_CODE (target) != FUNCTION_DECL)
2406 target = canonicalize_constructor_val (target, NULL);
2407 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2409 if (ie->indirect_info->member_ptr)
2410 /* Member pointer call that goes through a VMT lookup. */
2411 return NULL;
2413 if (dump_file)
2414 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2415 " in %s/%i, making it unreachable.\n",
2416 cgraph_node_name (ie->caller), ie->caller->symbol.order);
2417 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2418 callee = cgraph_get_create_node (target);
2419 unreachable = true;
2421 else
2422 callee = cgraph_get_node (target);
2424 else
2425 callee = cgraph_get_node (target);
2427 /* Because may-edges are not explicitely represented and vtable may be external,
2428 we may create the first reference to the object in the unit. */
2429 if (!callee || callee->global.inlined_to)
2432 /* We are better to ensure we can refer to it.
2433 In the case of static functions we are out of luck, since we already
2434 removed its body. In the case of public functions we may or may
2435 not introduce the reference. */
2436 if (!canonicalize_constructor_val (target, NULL)
2437 || !TREE_PUBLIC (target))
2439 if (dump_file)
2440 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2441 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2442 xstrdup (cgraph_node_name (ie->caller)),
2443 ie->caller->symbol.order,
2444 xstrdup (cgraph_node_name (ie->callee)),
2445 ie->callee->symbol.order);
2446 return NULL;
2448 callee = cgraph_get_create_real_symbol_node (target);
2450 ipa_check_create_node_params ();
2452 /* We can not make edges to inline clones. It is bug that someone removed
2453 the cgraph node too early. */
2454 gcc_assert (!callee->global.inlined_to);
2456 if (dump_file && !unreachable)
2458 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2459 "(%s/%i -> %s/%i), for stmt ",
2460 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2461 xstrdup (cgraph_node_name (ie->caller)),
2462 ie->caller->symbol.order,
2463 xstrdup (cgraph_node_name (callee)),
2464 callee->symbol.order);
2465 if (ie->call_stmt)
2466 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2467 else
2468 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2470 ie = cgraph_make_edge_direct (ie, callee);
2471 es = inline_edge_summary (ie);
2472 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2473 - eni_size_weights.call_cost);
2474 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2475 - eni_time_weights.call_cost);
2477 return ie;
2480 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2481 return NULL if there is not any. BY_REF specifies whether the value has to
2482 be passed by reference or by value. */
2484 tree
2485 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2486 HOST_WIDE_INT offset, bool by_ref)
2488 struct ipa_agg_jf_item *item;
2489 int i;
2491 if (by_ref != agg->by_ref)
2492 return NULL;
2494 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2495 if (item->offset == offset)
2497 /* Currently we do not have clobber values, return NULL for them once
2498 we do. */
2499 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2500 return item->value;
2502 return NULL;
2505 /* Remove a reference to SYMBOL from the list of references of a node given by
2506 reference description RDESC. Return true if the reference has been
2507 successfully found and removed. */
2509 static bool
2510 remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2512 struct ipa_ref *to_del;
2513 struct cgraph_edge *origin;
2515 origin = rdesc->cs;
2516 if (!origin)
2517 return false;
2518 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
2519 origin->call_stmt, origin->lto_stmt_uid);
2520 if (!to_del)
2521 return false;
2523 ipa_remove_reference (to_del);
2524 if (dump_file)
2525 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2526 xstrdup (cgraph_node_name (origin->caller)),
2527 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
2528 return true;
2531 /* If JFUNC has a reference description with refcount different from
2532 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2533 NULL. JFUNC must be a constant jump function. */
2535 static struct ipa_cst_ref_desc *
2536 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2538 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2539 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2540 return rdesc;
2541 else
2542 return NULL;
2545 /* If the value of constant jump function JFUNC is an address of a function
2546 declaration, return the associated call graph node. Otherwise return
2547 NULL. */
2549 static cgraph_node *
2550 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2552 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2553 tree cst = ipa_get_jf_constant (jfunc);
2554 if (TREE_CODE (cst) != ADDR_EXPR
2555 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2556 return NULL;
2558 return cgraph_get_node (TREE_OPERAND (cst, 0));
2562 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2563 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2564 the edge specified in the rdesc. Return false if either the symbol or the
2565 reference could not be found, otherwise return true. */
2567 static bool
2568 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2570 struct ipa_cst_ref_desc *rdesc;
2571 if (jfunc->type == IPA_JF_CONST
2572 && (rdesc = jfunc_rdesc_usable (jfunc))
2573 && --rdesc->refcount == 0)
2575 symtab_node symbol = (symtab_node) cgraph_node_for_jfunc (jfunc);
2576 if (!symbol)
2577 return false;
2579 return remove_described_reference (symbol, rdesc);
2581 return true;
2584 /* Try to find a destination for indirect edge IE that corresponds to a simple
2585 call or a call of a member function pointer and where the destination is a
2586 pointer formal parameter described by jump function JFUNC. If it can be
2587 determined, return the newly direct edge, otherwise return NULL.
2588 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2590 static struct cgraph_edge *
2591 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2592 struct ipa_jump_func *jfunc,
2593 struct ipa_node_params *new_root_info)
2595 struct cgraph_edge *cs;
2596 tree target;
2597 bool agg_contents = ie->indirect_info->agg_contents;
2599 if (ie->indirect_info->agg_contents)
2600 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2601 ie->indirect_info->offset,
2602 ie->indirect_info->by_ref);
2603 else
2604 target = ipa_value_from_jfunc (new_root_info, jfunc);
2605 if (!target)
2606 return NULL;
2607 cs = ipa_make_edge_direct_to_target (ie, target);
2609 if (cs && !agg_contents)
2611 bool ok;
2612 gcc_checking_assert (cs->callee
2613 && (cs != ie
2614 || jfunc->type != IPA_JF_CONST
2615 || !cgraph_node_for_jfunc (jfunc)
2616 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2617 ok = try_decrement_rdesc_refcount (jfunc);
2618 gcc_checking_assert (ok);
2621 return cs;
2624 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2625 call based on a formal parameter which is described by jump function JFUNC
2626 and if it can be determined, make it direct and return the direct edge.
2627 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2628 are relative to. */
2630 static struct cgraph_edge *
2631 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2632 struct ipa_jump_func *jfunc,
2633 struct ipa_node_params *new_root_info)
2635 tree binfo, target;
2637 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2639 if (!binfo)
2640 return NULL;
2642 if (TREE_CODE (binfo) != TREE_BINFO)
2644 binfo = gimple_extract_devirt_binfo_from_cst
2645 (binfo, ie->indirect_info->otr_type);
2646 if (!binfo)
2647 return NULL;
2650 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2651 ie->indirect_info->otr_type);
2652 if (binfo)
2653 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2654 binfo);
2655 else
2656 return NULL;
2658 if (target)
2660 #ifdef ENABLE_CHECKING
2661 gcc_assert (possible_polymorphic_call_target_p
2662 (ie, cgraph_get_node (target)));
2663 #endif
2664 return ipa_make_edge_direct_to_target (ie, target);
2666 else
2667 return NULL;
2670 /* Update the param called notes associated with NODE when CS is being inlined,
2671 assuming NODE is (potentially indirectly) inlined into CS->callee.
2672 Moreover, if the callee is discovered to be constant, create a new cgraph
2673 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2674 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2676 static bool
2677 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2678 struct cgraph_node *node,
2679 vec<cgraph_edge_p> *new_edges)
2681 struct ipa_edge_args *top;
2682 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2683 struct ipa_node_params *new_root_info;
2684 bool res = false;
2686 ipa_check_create_edge_args ();
2687 top = IPA_EDGE_REF (cs);
2688 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2689 ? cs->caller->global.inlined_to
2690 : cs->caller);
2692 for (ie = node->indirect_calls; ie; ie = next_ie)
2694 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2695 struct ipa_jump_func *jfunc;
2696 int param_index;
2698 next_ie = ie->next_callee;
2700 if (ici->param_index == -1)
2701 continue;
2703 /* We must check range due to calls with variable number of arguments: */
2704 if (ici->param_index >= ipa_get_cs_argument_count (top))
2706 ici->param_index = -1;
2707 continue;
2710 param_index = ici->param_index;
2711 jfunc = ipa_get_ith_jump_func (top, param_index);
2713 if (!flag_indirect_inlining)
2714 new_direct_edge = NULL;
2715 else if (ici->polymorphic)
2716 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2717 new_root_info);
2718 else
2719 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2720 new_root_info);
2721 /* If speculation was removed, then we need to do nothing. */
2722 if (new_direct_edge && new_direct_edge != ie)
2724 new_direct_edge->indirect_inlining_edge = 1;
2725 top = IPA_EDGE_REF (cs);
2726 res = true;
2728 else if (new_direct_edge)
2730 new_direct_edge->indirect_inlining_edge = 1;
2731 if (new_direct_edge->call_stmt)
2732 new_direct_edge->call_stmt_cannot_inline_p
2733 = !gimple_check_call_matching_types (
2734 new_direct_edge->call_stmt,
2735 new_direct_edge->callee->symbol.decl, false);
2736 if (new_edges)
2738 new_edges->safe_push (new_direct_edge);
2739 res = true;
2741 top = IPA_EDGE_REF (cs);
2743 else if (jfunc->type == IPA_JF_PASS_THROUGH
2744 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2746 if (ici->agg_contents
2747 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2748 ici->param_index = -1;
2749 else
2750 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2752 else if (jfunc->type == IPA_JF_ANCESTOR)
2754 if (ici->agg_contents
2755 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2756 ici->param_index = -1;
2757 else
2759 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2760 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2763 else
2764 /* Either we can find a destination for this edge now or never. */
2765 ici->param_index = -1;
2768 return res;
2771 /* Recursively traverse subtree of NODE (including node) made of inlined
2772 cgraph_edges when CS has been inlined and invoke
2773 update_indirect_edges_after_inlining on all nodes and
2774 update_jump_functions_after_inlining on all non-inlined edges that lead out
2775 of this subtree. Newly discovered indirect edges will be added to
2776 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2777 created. */
2779 static bool
2780 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2781 struct cgraph_node *node,
2782 vec<cgraph_edge_p> *new_edges)
2784 struct cgraph_edge *e;
2785 bool res;
2787 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2789 for (e = node->callees; e; e = e->next_callee)
2790 if (!e->inline_failed)
2791 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2792 else
2793 update_jump_functions_after_inlining (cs, e);
2794 for (e = node->indirect_calls; e; e = e->next_callee)
2795 update_jump_functions_after_inlining (cs, e);
2797 return res;
2800 /* Combine two controlled uses counts as done during inlining. */
2802 static int
2803 combine_controlled_uses_counters (int c, int d)
2805 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2806 return IPA_UNDESCRIBED_USE;
2807 else
2808 return c + d - 1;
2811 /* Propagate number of controlled users from CS->caleee to the new root of the
2812 tree of inlined nodes. */
2814 static void
2815 propagate_controlled_uses (struct cgraph_edge *cs)
2817 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2818 struct cgraph_node *new_root = cs->caller->global.inlined_to
2819 ? cs->caller->global.inlined_to : cs->caller;
2820 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2821 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2822 int count, i;
2824 count = MIN (ipa_get_cs_argument_count (args),
2825 ipa_get_param_count (old_root_info));
2826 for (i = 0; i < count; i++)
2828 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2829 struct ipa_cst_ref_desc *rdesc;
2831 if (jf->type == IPA_JF_PASS_THROUGH)
2833 int src_idx, c, d;
2834 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2835 c = ipa_get_controlled_uses (new_root_info, src_idx);
2836 d = ipa_get_controlled_uses (old_root_info, i);
2838 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2839 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2840 c = combine_controlled_uses_counters (c, d);
2841 ipa_set_controlled_uses (new_root_info, src_idx, c);
2842 if (c == 0 && new_root_info->ipcp_orig_node)
2844 struct cgraph_node *n;
2845 struct ipa_ref *ref;
2846 tree t = new_root_info->known_vals[src_idx];
2848 if (t && TREE_CODE (t) == ADDR_EXPR
2849 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2850 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2851 && (ref = ipa_find_reference ((symtab_node) new_root,
2852 (symtab_node) n, NULL, 0)))
2854 if (dump_file)
2855 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2856 "reference from %s/%i to %s/%i.\n",
2857 xstrdup (cgraph_node_name (new_root)),
2858 new_root->symbol.order,
2859 xstrdup (cgraph_node_name (n)), n->symbol.order);
2860 ipa_remove_reference (ref);
2864 else if (jf->type == IPA_JF_CONST
2865 && (rdesc = jfunc_rdesc_usable (jf)))
2867 int d = ipa_get_controlled_uses (old_root_info, i);
2868 int c = rdesc->refcount;
2869 rdesc->refcount = combine_controlled_uses_counters (c, d);
2870 if (rdesc->refcount == 0)
2872 tree cst = ipa_get_jf_constant (jf);
2873 struct cgraph_node *n;
2874 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2875 && TREE_CODE (TREE_OPERAND (cst, 0))
2876 == FUNCTION_DECL);
2877 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2878 if (n)
2880 struct cgraph_node *clone;
2881 bool ok;
2882 ok = remove_described_reference ((symtab_node) n, rdesc);
2883 gcc_checking_assert (ok);
2885 clone = cs->caller;
2886 while (clone->global.inlined_to
2887 && clone != rdesc->cs->caller
2888 && IPA_NODE_REF (clone)->ipcp_orig_node)
2890 struct ipa_ref *ref;
2891 ref = ipa_find_reference ((symtab_node) clone,
2892 (symtab_node) n, NULL, 0);
2893 if (ref)
2895 if (dump_file)
2896 fprintf (dump_file, "ipa-prop: Removing "
2897 "cloning-created reference "
2898 "from %s/%i to %s/%i.\n",
2899 xstrdup (cgraph_node_name (clone)),
2900 clone->symbol.order,
2901 xstrdup (cgraph_node_name (n)),
2902 n->symbol.order);
2903 ipa_remove_reference (ref);
2905 clone = clone->callers->caller;
2912 for (i = ipa_get_param_count (old_root_info);
2913 i < ipa_get_cs_argument_count (args);
2914 i++)
2916 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2918 if (jf->type == IPA_JF_CONST)
2920 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2921 if (rdesc)
2922 rdesc->refcount = IPA_UNDESCRIBED_USE;
2924 else if (jf->type == IPA_JF_PASS_THROUGH)
2925 ipa_set_controlled_uses (new_root_info,
2926 jf->value.pass_through.formal_id,
2927 IPA_UNDESCRIBED_USE);
2931 /* Update jump functions and call note functions on inlining the call site CS.
2932 CS is expected to lead to a node already cloned by
2933 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2934 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2935 created. */
2937 bool
2938 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2939 vec<cgraph_edge_p> *new_edges)
2941 bool changed;
2942 /* Do nothing if the preparation phase has not been carried out yet
2943 (i.e. during early inlining). */
2944 if (!ipa_node_params_vector.exists ())
2945 return false;
2946 gcc_assert (ipa_edge_args_vector);
2948 propagate_controlled_uses (cs);
2949 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2951 return changed;
2954 /* Frees all dynamically allocated structures that the argument info points
2955 to. */
2957 void
2958 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2960 vec_free (args->jump_functions);
2961 memset (args, 0, sizeof (*args));
2964 /* Free all ipa_edge structures. */
2966 void
2967 ipa_free_all_edge_args (void)
2969 int i;
2970 struct ipa_edge_args *args;
2972 if (!ipa_edge_args_vector)
2973 return;
2975 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2976 ipa_free_edge_args_substructures (args);
2978 vec_free (ipa_edge_args_vector);
2981 /* Frees all dynamically allocated structures that the param info points
2982 to. */
2984 void
2985 ipa_free_node_params_substructures (struct ipa_node_params *info)
2987 info->descriptors.release ();
2988 free (info->lattices);
2989 /* Lattice values and their sources are deallocated with their alocation
2990 pool. */
2991 info->known_vals.release ();
2992 memset (info, 0, sizeof (*info));
2995 /* Free all ipa_node_params structures. */
2997 void
2998 ipa_free_all_node_params (void)
3000 int i;
3001 struct ipa_node_params *info;
3003 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3004 ipa_free_node_params_substructures (info);
3006 ipa_node_params_vector.release ();
3009 /* Set the aggregate replacements of NODE to be AGGVALS. */
3011 void
3012 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3013 struct ipa_agg_replacement_value *aggvals)
3015 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3016 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3018 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3021 /* Hook that is called by cgraph.c when an edge is removed. */
3023 static void
3024 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3026 struct ipa_edge_args *args;
3028 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3029 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3030 return;
3032 args = IPA_EDGE_REF (cs);
3033 if (args->jump_functions)
3035 struct ipa_jump_func *jf;
3036 int i;
3037 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3039 struct ipa_cst_ref_desc *rdesc;
3040 try_decrement_rdesc_refcount (jf);
3041 if (jf->type == IPA_JF_CONST
3042 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3043 && rdesc->cs == cs)
3044 rdesc->cs = NULL;
3048 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3051 /* Hook that is called by cgraph.c when a node is removed. */
3053 static void
3054 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3056 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3057 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3058 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3059 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3060 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3063 /* Hook that is called by cgraph.c when an edge is duplicated. */
3065 static void
3066 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3067 __attribute__((unused)) void *data)
3069 struct ipa_edge_args *old_args, *new_args;
3070 unsigned int i;
3072 ipa_check_create_edge_args ();
3074 old_args = IPA_EDGE_REF (src);
3075 new_args = IPA_EDGE_REF (dst);
3077 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3079 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3081 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3082 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3084 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3086 if (src_jf->type == IPA_JF_CONST)
3088 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3090 if (!src_rdesc)
3091 dst_jf->value.constant.rdesc = NULL;
3092 else if (src->caller == dst->caller)
3094 struct ipa_ref *ref;
3095 symtab_node n = (symtab_node) cgraph_node_for_jfunc (src_jf);
3096 gcc_checking_assert (n);
3097 ref = ipa_find_reference ((symtab_node) src->caller, n,
3098 src->call_stmt, src->lto_stmt_uid);
3099 gcc_checking_assert (ref);
3100 ipa_clone_ref (ref, (symtab_node) dst->caller, ref->stmt);
3102 gcc_checking_assert (ipa_refdesc_pool);
3103 struct ipa_cst_ref_desc *dst_rdesc
3104 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3105 dst_rdesc->cs = dst;
3106 dst_rdesc->refcount = src_rdesc->refcount;
3107 dst_rdesc->next_duplicate = NULL;
3108 dst_jf->value.constant.rdesc = dst_rdesc;
3110 else if (src_rdesc->cs == src)
3112 struct ipa_cst_ref_desc *dst_rdesc;
3113 gcc_checking_assert (ipa_refdesc_pool);
3114 dst_rdesc
3115 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3116 dst_rdesc->cs = dst;
3117 dst_rdesc->refcount = src_rdesc->refcount;
3118 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3119 src_rdesc->next_duplicate = dst_rdesc;
3120 dst_jf->value.constant.rdesc = dst_rdesc;
3122 else
3124 struct ipa_cst_ref_desc *dst_rdesc;
3125 /* This can happen during inlining, when a JFUNC can refer to a
3126 reference taken in a function up in the tree of inline clones.
3127 We need to find the duplicate that refers to our tree of
3128 inline clones. */
3130 gcc_assert (dst->caller->global.inlined_to);
3131 for (dst_rdesc = src_rdesc->next_duplicate;
3132 dst_rdesc;
3133 dst_rdesc = dst_rdesc->next_duplicate)
3135 struct cgraph_node *top;
3136 top = dst_rdesc->cs->caller->global.inlined_to
3137 ? dst_rdesc->cs->caller->global.inlined_to
3138 : dst_rdesc->cs->caller;
3139 if (dst->caller->global.inlined_to == top)
3140 break;
3142 gcc_assert (dst_rdesc);
3143 dst_jf->value.constant.rdesc = dst_rdesc;
3149 /* Hook that is called by cgraph.c when a node is duplicated. */
3151 static void
3152 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3153 ATTRIBUTE_UNUSED void *data)
3155 struct ipa_node_params *old_info, *new_info;
3156 struct ipa_agg_replacement_value *old_av, *new_av;
3158 ipa_check_create_node_params ();
3159 old_info = IPA_NODE_REF (src);
3160 new_info = IPA_NODE_REF (dst);
3162 new_info->descriptors = old_info->descriptors.copy ();
3163 new_info->lattices = NULL;
3164 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3166 new_info->uses_analysis_done = old_info->uses_analysis_done;
3167 new_info->node_enqueued = old_info->node_enqueued;
3169 old_av = ipa_get_agg_replacements_for_node (src);
3170 if (!old_av)
3171 return;
3173 new_av = NULL;
3174 while (old_av)
3176 struct ipa_agg_replacement_value *v;
3178 v = ggc_alloc_ipa_agg_replacement_value ();
3179 memcpy (v, old_av, sizeof (*v));
3180 v->next = new_av;
3181 new_av = v;
3182 old_av = old_av->next;
3184 ipa_set_node_agg_value_chain (dst, new_av);
3188 /* Analyze newly added function into callgraph. */
3190 static void
3191 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3193 ipa_analyze_node (node);
3196 /* Register our cgraph hooks if they are not already there. */
3198 void
3199 ipa_register_cgraph_hooks (void)
3201 if (!edge_removal_hook_holder)
3202 edge_removal_hook_holder =
3203 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3204 if (!node_removal_hook_holder)
3205 node_removal_hook_holder =
3206 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3207 if (!edge_duplication_hook_holder)
3208 edge_duplication_hook_holder =
3209 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3210 if (!node_duplication_hook_holder)
3211 node_duplication_hook_holder =
3212 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3213 function_insertion_hook_holder =
3214 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3217 /* Unregister our cgraph hooks if they are not already there. */
3219 static void
3220 ipa_unregister_cgraph_hooks (void)
3222 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3223 edge_removal_hook_holder = NULL;
3224 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3225 node_removal_hook_holder = NULL;
3226 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3227 edge_duplication_hook_holder = NULL;
3228 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3229 node_duplication_hook_holder = NULL;
3230 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3231 function_insertion_hook_holder = NULL;
3234 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3235 longer needed after ipa-cp. */
3237 void
3238 ipa_free_all_structures_after_ipa_cp (void)
3240 if (!optimize)
3242 ipa_free_all_edge_args ();
3243 ipa_free_all_node_params ();
3244 free_alloc_pool (ipcp_sources_pool);
3245 free_alloc_pool (ipcp_values_pool);
3246 free_alloc_pool (ipcp_agg_lattice_pool);
3247 ipa_unregister_cgraph_hooks ();
3248 if (ipa_refdesc_pool)
3249 free_alloc_pool (ipa_refdesc_pool);
3253 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3254 longer needed after indirect inlining. */
3256 void
3257 ipa_free_all_structures_after_iinln (void)
3259 ipa_free_all_edge_args ();
3260 ipa_free_all_node_params ();
3261 ipa_unregister_cgraph_hooks ();
3262 if (ipcp_sources_pool)
3263 free_alloc_pool (ipcp_sources_pool);
3264 if (ipcp_values_pool)
3265 free_alloc_pool (ipcp_values_pool);
3266 if (ipcp_agg_lattice_pool)
3267 free_alloc_pool (ipcp_agg_lattice_pool);
3268 if (ipa_refdesc_pool)
3269 free_alloc_pool (ipa_refdesc_pool);
3272 /* Print ipa_tree_map data structures of all functions in the
3273 callgraph to F. */
3275 void
3276 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3278 int i, count;
3279 struct ipa_node_params *info;
3281 if (!node->symbol.definition)
3282 return;
3283 info = IPA_NODE_REF (node);
3284 fprintf (f, " function %s/%i parameter descriptors:\n",
3285 cgraph_node_name (node), node->symbol.order);
3286 count = ipa_get_param_count (info);
3287 for (i = 0; i < count; i++)
3289 int c;
3291 ipa_dump_param (f, info, i);
3292 if (ipa_is_param_used (info, i))
3293 fprintf (f, " used");
3294 c = ipa_get_controlled_uses (info, i);
3295 if (c == IPA_UNDESCRIBED_USE)
3296 fprintf (f, " undescribed_use");
3297 else
3298 fprintf (f, " controlled_uses=%i", c);
3299 fprintf (f, "\n");
3303 /* Print ipa_tree_map data structures of all functions in the
3304 callgraph to F. */
3306 void
3307 ipa_print_all_params (FILE * f)
3309 struct cgraph_node *node;
3311 fprintf (f, "\nFunction parameters:\n");
3312 FOR_EACH_FUNCTION (node)
3313 ipa_print_node_params (f, node);
3316 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3318 vec<tree>
3319 ipa_get_vector_of_formal_parms (tree fndecl)
3321 vec<tree> args;
3322 int count;
3323 tree parm;
3325 gcc_assert (!flag_wpa);
3326 count = count_formal_params (fndecl);
3327 args.create (count);
3328 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3329 args.quick_push (parm);
3331 return args;
3334 /* Return a heap allocated vector containing types of formal parameters of
3335 function type FNTYPE. */
3337 static inline vec<tree>
3338 get_vector_of_formal_parm_types (tree fntype)
3340 vec<tree> types;
3341 int count = 0;
3342 tree t;
3344 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3345 count++;
3347 types.create (count);
3348 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3349 types.quick_push (TREE_VALUE (t));
3351 return types;
3354 /* Modify the function declaration FNDECL and its type according to the plan in
3355 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3356 to reflect the actual parameters being modified which are determined by the
3357 base_index field. */
3359 void
3360 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3361 const char *synth_parm_prefix)
3363 vec<tree> oparms, otypes;
3364 tree orig_type, new_type = NULL;
3365 tree old_arg_types, t, new_arg_types = NULL;
3366 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3367 int i, len = adjustments.length ();
3368 tree new_reversed = NULL;
3369 bool care_for_types, last_parm_void;
3371 if (!synth_parm_prefix)
3372 synth_parm_prefix = "SYNTH";
3374 oparms = ipa_get_vector_of_formal_parms (fndecl);
3375 orig_type = TREE_TYPE (fndecl);
3376 old_arg_types = TYPE_ARG_TYPES (orig_type);
3378 /* The following test is an ugly hack, some functions simply don't have any
3379 arguments in their type. This is probably a bug but well... */
3380 care_for_types = (old_arg_types != NULL_TREE);
3381 if (care_for_types)
3383 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3384 == void_type_node);
3385 otypes = get_vector_of_formal_parm_types (orig_type);
3386 if (last_parm_void)
3387 gcc_assert (oparms.length () + 1 == otypes.length ());
3388 else
3389 gcc_assert (oparms.length () == otypes.length ());
3391 else
3393 last_parm_void = false;
3394 otypes.create (0);
3397 for (i = 0; i < len; i++)
3399 struct ipa_parm_adjustment *adj;
3400 gcc_assert (link);
3402 adj = &adjustments[i];
3403 parm = oparms[adj->base_index];
3404 adj->base = parm;
3406 if (adj->copy_param)
3408 if (care_for_types)
3409 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3410 new_arg_types);
3411 *link = parm;
3412 link = &DECL_CHAIN (parm);
3414 else if (!adj->remove_param)
3416 tree new_parm;
3417 tree ptype;
3419 if (adj->by_ref)
3420 ptype = build_pointer_type (adj->type);
3421 else
3422 ptype = adj->type;
3424 if (care_for_types)
3425 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3427 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3428 ptype);
3429 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3431 DECL_ARTIFICIAL (new_parm) = 1;
3432 DECL_ARG_TYPE (new_parm) = ptype;
3433 DECL_CONTEXT (new_parm) = fndecl;
3434 TREE_USED (new_parm) = 1;
3435 DECL_IGNORED_P (new_parm) = 1;
3436 layout_decl (new_parm, 0);
3438 adj->base = parm;
3439 adj->reduction = new_parm;
3441 *link = new_parm;
3443 link = &DECL_CHAIN (new_parm);
3447 *link = NULL_TREE;
3449 if (care_for_types)
3451 new_reversed = nreverse (new_arg_types);
3452 if (last_parm_void)
3454 if (new_reversed)
3455 TREE_CHAIN (new_arg_types) = void_list_node;
3456 else
3457 new_reversed = void_list_node;
3461 /* Use copy_node to preserve as much as possible from original type
3462 (debug info, attribute lists etc.)
3463 Exception is METHOD_TYPEs must have THIS argument.
3464 When we are asked to remove it, we need to build new FUNCTION_TYPE
3465 instead. */
3466 if (TREE_CODE (orig_type) != METHOD_TYPE
3467 || (adjustments[0].copy_param
3468 && adjustments[0].base_index == 0))
3470 new_type = build_distinct_type_copy (orig_type);
3471 TYPE_ARG_TYPES (new_type) = new_reversed;
3473 else
3475 new_type
3476 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3477 new_reversed));
3478 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3479 DECL_VINDEX (fndecl) = NULL_TREE;
3482 /* When signature changes, we need to clear builtin info. */
3483 if (DECL_BUILT_IN (fndecl))
3485 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3486 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3489 /* This is a new type, not a copy of an old type. Need to reassociate
3490 variants. We can handle everything except the main variant lazily. */
3491 t = TYPE_MAIN_VARIANT (orig_type);
3492 if (orig_type != t)
3494 TYPE_MAIN_VARIANT (new_type) = t;
3495 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3496 TYPE_NEXT_VARIANT (t) = new_type;
3498 else
3500 TYPE_MAIN_VARIANT (new_type) = new_type;
3501 TYPE_NEXT_VARIANT (new_type) = NULL;
3504 TREE_TYPE (fndecl) = new_type;
3505 DECL_VIRTUAL_P (fndecl) = 0;
3506 otypes.release ();
3507 oparms.release ();
3510 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3511 If this is a directly recursive call, CS must be NULL. Otherwise it must
3512 contain the corresponding call graph edge. */
3514 void
3515 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3516 ipa_parm_adjustment_vec adjustments)
3518 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3519 vec<tree> vargs;
3520 vec<tree, va_gc> **debug_args = NULL;
3521 gimple new_stmt;
3522 gimple_stmt_iterator gsi, prev_gsi;
3523 tree callee_decl;
3524 int i, len;
3526 len = adjustments.length ();
3527 vargs.create (len);
3528 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3529 ipa_remove_stmt_references ((symtab_node) current_node, stmt);
3531 gsi = gsi_for_stmt (stmt);
3532 prev_gsi = gsi;
3533 gsi_prev (&prev_gsi);
3534 for (i = 0; i < len; i++)
3536 struct ipa_parm_adjustment *adj;
3538 adj = &adjustments[i];
3540 if (adj->copy_param)
3542 tree arg = gimple_call_arg (stmt, adj->base_index);
3544 vargs.quick_push (arg);
3546 else if (!adj->remove_param)
3548 tree expr, base, off;
3549 location_t loc;
3550 unsigned int deref_align = 0;
3551 bool deref_base = false;
3553 /* We create a new parameter out of the value of the old one, we can
3554 do the following kind of transformations:
3556 - A scalar passed by reference is converted to a scalar passed by
3557 value. (adj->by_ref is false and the type of the original
3558 actual argument is a pointer to a scalar).
3560 - A part of an aggregate is passed instead of the whole aggregate.
3561 The part can be passed either by value or by reference, this is
3562 determined by value of adj->by_ref. Moreover, the code below
3563 handles both situations when the original aggregate is passed by
3564 value (its type is not a pointer) and when it is passed by
3565 reference (it is a pointer to an aggregate).
3567 When the new argument is passed by reference (adj->by_ref is true)
3568 it must be a part of an aggregate and therefore we form it by
3569 simply taking the address of a reference inside the original
3570 aggregate. */
3572 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3573 base = gimple_call_arg (stmt, adj->base_index);
3574 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3575 : EXPR_LOCATION (base);
3577 if (TREE_CODE (base) != ADDR_EXPR
3578 && POINTER_TYPE_P (TREE_TYPE (base)))
3579 off = build_int_cst (adj->alias_ptr_type,
3580 adj->offset / BITS_PER_UNIT);
3581 else
3583 HOST_WIDE_INT base_offset;
3584 tree prev_base;
3585 bool addrof;
3587 if (TREE_CODE (base) == ADDR_EXPR)
3589 base = TREE_OPERAND (base, 0);
3590 addrof = true;
3592 else
3593 addrof = false;
3594 prev_base = base;
3595 base = get_addr_base_and_unit_offset (base, &base_offset);
3596 /* Aggregate arguments can have non-invariant addresses. */
3597 if (!base)
3599 base = build_fold_addr_expr (prev_base);
3600 off = build_int_cst (adj->alias_ptr_type,
3601 adj->offset / BITS_PER_UNIT);
3603 else if (TREE_CODE (base) == MEM_REF)
3605 if (!addrof)
3607 deref_base = true;
3608 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3610 off = build_int_cst (adj->alias_ptr_type,
3611 base_offset
3612 + adj->offset / BITS_PER_UNIT);
3613 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3614 off);
3615 base = TREE_OPERAND (base, 0);
3617 else
3619 off = build_int_cst (adj->alias_ptr_type,
3620 base_offset
3621 + adj->offset / BITS_PER_UNIT);
3622 base = build_fold_addr_expr (base);
3626 if (!adj->by_ref)
3628 tree type = adj->type;
3629 unsigned int align;
3630 unsigned HOST_WIDE_INT misalign;
3632 if (deref_base)
3634 align = deref_align;
3635 misalign = 0;
3637 else
3639 get_pointer_alignment_1 (base, &align, &misalign);
3640 if (TYPE_ALIGN (type) > align)
3641 align = TYPE_ALIGN (type);
3643 misalign += (tree_to_double_int (off)
3644 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3645 * BITS_PER_UNIT);
3646 misalign = misalign & (align - 1);
3647 if (misalign != 0)
3648 align = (misalign & -misalign);
3649 if (align < TYPE_ALIGN (type))
3650 type = build_aligned_type (type, align);
3651 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3653 else
3655 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3656 expr = build_fold_addr_expr (expr);
3659 expr = force_gimple_operand_gsi (&gsi, expr,
3660 adj->by_ref
3661 || is_gimple_reg_type (adj->type),
3662 NULL, true, GSI_SAME_STMT);
3663 vargs.quick_push (expr);
3665 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3667 unsigned int ix;
3668 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3669 gimple def_temp;
3671 arg = gimple_call_arg (stmt, adj->base_index);
3672 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3674 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3675 continue;
3676 arg = fold_convert_loc (gimple_location (stmt),
3677 TREE_TYPE (origin), arg);
3679 if (debug_args == NULL)
3680 debug_args = decl_debug_args_insert (callee_decl);
3681 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3682 if (ddecl == origin)
3684 ddecl = (**debug_args)[ix + 1];
3685 break;
3687 if (ddecl == NULL)
3689 ddecl = make_node (DEBUG_EXPR_DECL);
3690 DECL_ARTIFICIAL (ddecl) = 1;
3691 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3692 DECL_MODE (ddecl) = DECL_MODE (origin);
3694 vec_safe_push (*debug_args, origin);
3695 vec_safe_push (*debug_args, ddecl);
3697 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3698 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3702 if (dump_file && (dump_flags & TDF_DETAILS))
3704 fprintf (dump_file, "replacing stmt:");
3705 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3708 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3709 vargs.release ();
3710 if (gimple_call_lhs (stmt))
3711 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3713 gimple_set_block (new_stmt, gimple_block (stmt));
3714 if (gimple_has_location (stmt))
3715 gimple_set_location (new_stmt, gimple_location (stmt));
3716 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3717 gimple_call_copy_flags (new_stmt, stmt);
3719 if (dump_file && (dump_flags & TDF_DETAILS))
3721 fprintf (dump_file, "with stmt:");
3722 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3723 fprintf (dump_file, "\n");
3725 gsi_replace (&gsi, new_stmt, true);
3726 if (cs)
3727 cgraph_set_call_stmt (cs, new_stmt);
3730 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3731 gsi_prev (&gsi);
3733 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3734 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3736 update_ssa (TODO_update_ssa);
3737 free_dominance_info (CDI_DOMINATORS);
3740 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3742 static bool
3743 index_in_adjustments_multiple_times_p (int base_index,
3744 ipa_parm_adjustment_vec adjustments)
3746 int i, len = adjustments.length ();
3747 bool one = false;
3749 for (i = 0; i < len; i++)
3751 struct ipa_parm_adjustment *adj;
3752 adj = &adjustments[i];
3754 if (adj->base_index == base_index)
3756 if (one)
3757 return true;
3758 else
3759 one = true;
3762 return false;
3766 /* Return adjustments that should have the same effect on function parameters
3767 and call arguments as if they were first changed according to adjustments in
3768 INNER and then by adjustments in OUTER. */
3770 ipa_parm_adjustment_vec
3771 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3772 ipa_parm_adjustment_vec outer)
3774 int i, outlen = outer.length ();
3775 int inlen = inner.length ();
3776 int removals = 0;
3777 ipa_parm_adjustment_vec adjustments, tmp;
3779 tmp.create (inlen);
3780 for (i = 0; i < inlen; i++)
3782 struct ipa_parm_adjustment *n;
3783 n = &inner[i];
3785 if (n->remove_param)
3786 removals++;
3787 else
3788 tmp.quick_push (*n);
3791 adjustments.create (outlen + removals);
3792 for (i = 0; i < outlen; i++)
3794 struct ipa_parm_adjustment r;
3795 struct ipa_parm_adjustment *out = &outer[i];
3796 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3798 memset (&r, 0, sizeof (r));
3799 gcc_assert (!in->remove_param);
3800 if (out->remove_param)
3802 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3804 r.remove_param = true;
3805 adjustments.quick_push (r);
3807 continue;
3810 r.base_index = in->base_index;
3811 r.type = out->type;
3813 /* FIXME: Create nonlocal value too. */
3815 if (in->copy_param && out->copy_param)
3816 r.copy_param = true;
3817 else if (in->copy_param)
3818 r.offset = out->offset;
3819 else if (out->copy_param)
3820 r.offset = in->offset;
3821 else
3822 r.offset = in->offset + out->offset;
3823 adjustments.quick_push (r);
3826 for (i = 0; i < inlen; i++)
3828 struct ipa_parm_adjustment *n = &inner[i];
3830 if (n->remove_param)
3831 adjustments.quick_push (*n);
3834 tmp.release ();
3835 return adjustments;
3838 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3839 friendly way, assuming they are meant to be applied to FNDECL. */
3841 void
3842 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3843 tree fndecl)
3845 int i, len = adjustments.length ();
3846 bool first = true;
3847 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3849 fprintf (file, "IPA param adjustments: ");
3850 for (i = 0; i < len; i++)
3852 struct ipa_parm_adjustment *adj;
3853 adj = &adjustments[i];
3855 if (!first)
3856 fprintf (file, " ");
3857 else
3858 first = false;
3860 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3861 print_generic_expr (file, parms[adj->base_index], 0);
3862 if (adj->base)
3864 fprintf (file, ", base: ");
3865 print_generic_expr (file, adj->base, 0);
3867 if (adj->reduction)
3869 fprintf (file, ", reduction: ");
3870 print_generic_expr (file, adj->reduction, 0);
3872 if (adj->new_ssa_base)
3874 fprintf (file, ", new_ssa_base: ");
3875 print_generic_expr (file, adj->new_ssa_base, 0);
3878 if (adj->copy_param)
3879 fprintf (file, ", copy_param");
3880 else if (adj->remove_param)
3881 fprintf (file, ", remove_param");
3882 else
3883 fprintf (file, ", offset %li", (long) adj->offset);
3884 if (adj->by_ref)
3885 fprintf (file, ", by_ref");
3886 print_node_brief (file, ", type: ", adj->type, 0);
3887 fprintf (file, "\n");
3889 parms.release ();
3892 /* Dump the AV linked list. */
3894 void
3895 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3897 bool comma = false;
3898 fprintf (f, " Aggregate replacements:");
3899 for (; av; av = av->next)
3901 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3902 av->index, av->offset);
3903 print_generic_expr (f, av->value, 0);
3904 comma = true;
3906 fprintf (f, "\n");
3909 /* Stream out jump function JUMP_FUNC to OB. */
3911 static void
3912 ipa_write_jump_function (struct output_block *ob,
3913 struct ipa_jump_func *jump_func)
3915 struct ipa_agg_jf_item *item;
3916 struct bitpack_d bp;
3917 int i, count;
3919 streamer_write_uhwi (ob, jump_func->type);
3920 switch (jump_func->type)
3922 case IPA_JF_UNKNOWN:
3923 break;
3924 case IPA_JF_KNOWN_TYPE:
3925 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3926 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3927 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3928 break;
3929 case IPA_JF_CONST:
3930 gcc_assert (
3931 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3932 stream_write_tree (ob, jump_func->value.constant.value, true);
3933 break;
3934 case IPA_JF_PASS_THROUGH:
3935 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3936 if (jump_func->value.pass_through.operation == NOP_EXPR)
3938 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3939 bp = bitpack_create (ob->main_stream);
3940 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3941 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
3942 streamer_write_bitpack (&bp);
3944 else
3946 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3947 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3949 break;
3950 case IPA_JF_ANCESTOR:
3951 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3952 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3953 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3954 bp = bitpack_create (ob->main_stream);
3955 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3956 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
3957 streamer_write_bitpack (&bp);
3958 break;
3961 count = vec_safe_length (jump_func->agg.items);
3962 streamer_write_uhwi (ob, count);
3963 if (count)
3965 bp = bitpack_create (ob->main_stream);
3966 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3967 streamer_write_bitpack (&bp);
3970 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3972 streamer_write_uhwi (ob, item->offset);
3973 stream_write_tree (ob, item->value, true);
3977 /* Read in jump function JUMP_FUNC from IB. */
3979 static void
3980 ipa_read_jump_function (struct lto_input_block *ib,
3981 struct ipa_jump_func *jump_func,
3982 struct cgraph_edge *cs,
3983 struct data_in *data_in)
3985 enum jump_func_type jftype;
3986 enum tree_code operation;
3987 int i, count;
3989 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3990 switch (jftype)
3992 case IPA_JF_UNKNOWN:
3993 jump_func->type = IPA_JF_UNKNOWN;
3994 break;
3995 case IPA_JF_KNOWN_TYPE:
3997 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3998 tree base_type = stream_read_tree (ib, data_in);
3999 tree component_type = stream_read_tree (ib, data_in);
4001 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4002 break;
4004 case IPA_JF_CONST:
4005 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4006 break;
4007 case IPA_JF_PASS_THROUGH:
4008 operation = (enum tree_code) streamer_read_uhwi (ib);
4009 if (operation == NOP_EXPR)
4011 int formal_id = streamer_read_uhwi (ib);
4012 struct bitpack_d bp = streamer_read_bitpack (ib);
4013 bool agg_preserved = bp_unpack_value (&bp, 1);
4014 bool type_preserved = bp_unpack_value (&bp, 1);
4015 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4016 type_preserved);
4018 else
4020 tree operand = stream_read_tree (ib, data_in);
4021 int formal_id = streamer_read_uhwi (ib);
4022 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4023 operation);
4025 break;
4026 case IPA_JF_ANCESTOR:
4028 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4029 tree type = stream_read_tree (ib, data_in);
4030 int formal_id = streamer_read_uhwi (ib);
4031 struct bitpack_d bp = streamer_read_bitpack (ib);
4032 bool agg_preserved = bp_unpack_value (&bp, 1);
4033 bool type_preserved = bp_unpack_value (&bp, 1);
4035 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4036 type_preserved);
4037 break;
4041 count = streamer_read_uhwi (ib);
4042 vec_alloc (jump_func->agg.items, count);
4043 if (count)
4045 struct bitpack_d bp = streamer_read_bitpack (ib);
4046 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4048 for (i = 0; i < count; i++)
4050 struct ipa_agg_jf_item item;
4051 item.offset = streamer_read_uhwi (ib);
4052 item.value = stream_read_tree (ib, data_in);
4053 jump_func->agg.items->quick_push (item);
4057 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4058 relevant to indirect inlining to OB. */
4060 static void
4061 ipa_write_indirect_edge_info (struct output_block *ob,
4062 struct cgraph_edge *cs)
4064 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4065 struct bitpack_d bp;
4067 streamer_write_hwi (ob, ii->param_index);
4068 streamer_write_hwi (ob, ii->offset);
4069 bp = bitpack_create (ob->main_stream);
4070 bp_pack_value (&bp, ii->polymorphic, 1);
4071 bp_pack_value (&bp, ii->agg_contents, 1);
4072 bp_pack_value (&bp, ii->member_ptr, 1);
4073 bp_pack_value (&bp, ii->by_ref, 1);
4074 streamer_write_bitpack (&bp);
4076 if (ii->polymorphic)
4078 streamer_write_hwi (ob, ii->otr_token);
4079 stream_write_tree (ob, ii->otr_type, true);
4083 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4084 relevant to indirect inlining from IB. */
4086 static void
4087 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4088 struct data_in *data_in ATTRIBUTE_UNUSED,
4089 struct cgraph_edge *cs)
4091 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4092 struct bitpack_d bp;
4094 ii->param_index = (int) streamer_read_hwi (ib);
4095 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4096 bp = streamer_read_bitpack (ib);
4097 ii->polymorphic = bp_unpack_value (&bp, 1);
4098 ii->agg_contents = bp_unpack_value (&bp, 1);
4099 ii->member_ptr = bp_unpack_value (&bp, 1);
4100 ii->by_ref = bp_unpack_value (&bp, 1);
4101 if (ii->polymorphic)
4103 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4104 ii->otr_type = stream_read_tree (ib, data_in);
4108 /* Stream out NODE info to OB. */
4110 static void
4111 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4113 int node_ref;
4114 lto_symtab_encoder_t encoder;
4115 struct ipa_node_params *info = IPA_NODE_REF (node);
4116 int j;
4117 struct cgraph_edge *e;
4118 struct bitpack_d bp;
4120 encoder = ob->decl_state->symtab_node_encoder;
4121 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4122 streamer_write_uhwi (ob, node_ref);
4124 streamer_write_uhwi (ob, ipa_get_param_count (info));
4125 for (j = 0; j < ipa_get_param_count (info); j++)
4126 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4127 bp = bitpack_create (ob->main_stream);
4128 gcc_assert (info->uses_analysis_done
4129 || ipa_get_param_count (info) == 0);
4130 gcc_assert (!info->node_enqueued);
4131 gcc_assert (!info->ipcp_orig_node);
4132 for (j = 0; j < ipa_get_param_count (info); j++)
4133 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4134 streamer_write_bitpack (&bp);
4135 for (j = 0; j < ipa_get_param_count (info); j++)
4136 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4137 for (e = node->callees; e; e = e->next_callee)
4139 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4141 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4142 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4143 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4145 for (e = node->indirect_calls; e; e = e->next_callee)
4147 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4149 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4150 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4151 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4152 ipa_write_indirect_edge_info (ob, e);
4156 /* Stream in NODE info from IB. */
4158 static void
4159 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4160 struct data_in *data_in)
4162 struct ipa_node_params *info = IPA_NODE_REF (node);
4163 int k;
4164 struct cgraph_edge *e;
4165 struct bitpack_d bp;
4167 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4169 for (k = 0; k < ipa_get_param_count (info); k++)
4170 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4172 bp = streamer_read_bitpack (ib);
4173 if (ipa_get_param_count (info) != 0)
4174 info->uses_analysis_done = true;
4175 info->node_enqueued = false;
4176 for (k = 0; k < ipa_get_param_count (info); k++)
4177 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4178 for (k = 0; k < ipa_get_param_count (info); k++)
4179 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4180 for (e = node->callees; e; e = e->next_callee)
4182 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4183 int count = streamer_read_uhwi (ib);
4185 if (!count)
4186 continue;
4187 vec_safe_grow_cleared (args->jump_functions, count);
4189 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4190 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4191 data_in);
4193 for (e = node->indirect_calls; e; e = e->next_callee)
4195 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4196 int count = streamer_read_uhwi (ib);
4198 if (count)
4200 vec_safe_grow_cleared (args->jump_functions, count);
4201 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4202 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4203 data_in);
4205 ipa_read_indirect_edge_info (ib, data_in, e);
4209 /* Write jump functions for nodes in SET. */
4211 void
4212 ipa_prop_write_jump_functions (void)
4214 struct cgraph_node *node;
4215 struct output_block *ob;
4216 unsigned int count = 0;
4217 lto_symtab_encoder_iterator lsei;
4218 lto_symtab_encoder_t encoder;
4221 if (!ipa_node_params_vector.exists ())
4222 return;
4224 ob = create_output_block (LTO_section_jump_functions);
4225 encoder = ob->decl_state->symtab_node_encoder;
4226 ob->cgraph_node = NULL;
4227 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4228 lsei_next_function_in_partition (&lsei))
4230 node = lsei_cgraph_node (lsei);
4231 if (cgraph_function_with_gimple_body_p (node)
4232 && IPA_NODE_REF (node) != NULL)
4233 count++;
4236 streamer_write_uhwi (ob, count);
4238 /* Process all of the functions. */
4239 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4240 lsei_next_function_in_partition (&lsei))
4242 node = lsei_cgraph_node (lsei);
4243 if (cgraph_function_with_gimple_body_p (node)
4244 && IPA_NODE_REF (node) != NULL)
4245 ipa_write_node_info (ob, node);
4247 streamer_write_char_stream (ob->main_stream, 0);
4248 produce_asm (ob, NULL);
4249 destroy_output_block (ob);
4252 /* Read section in file FILE_DATA of length LEN with data DATA. */
4254 static void
4255 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4256 size_t len)
4258 const struct lto_function_header *header =
4259 (const struct lto_function_header *) data;
4260 const int cfg_offset = sizeof (struct lto_function_header);
4261 const int main_offset = cfg_offset + header->cfg_size;
4262 const int string_offset = main_offset + header->main_size;
4263 struct data_in *data_in;
4264 struct lto_input_block ib_main;
4265 unsigned int i;
4266 unsigned int count;
4268 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4269 header->main_size);
4271 data_in =
4272 lto_data_in_create (file_data, (const char *) data + string_offset,
4273 header->string_size, vNULL);
4274 count = streamer_read_uhwi (&ib_main);
4276 for (i = 0; i < count; i++)
4278 unsigned int index;
4279 struct cgraph_node *node;
4280 lto_symtab_encoder_t encoder;
4282 index = streamer_read_uhwi (&ib_main);
4283 encoder = file_data->symtab_node_encoder;
4284 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4285 gcc_assert (node->symbol.definition);
4286 ipa_read_node_info (&ib_main, node, data_in);
4288 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4289 len);
4290 lto_data_in_delete (data_in);
4293 /* Read ipcp jump functions. */
4295 void
4296 ipa_prop_read_jump_functions (void)
4298 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4299 struct lto_file_decl_data *file_data;
4300 unsigned int j = 0;
4302 ipa_check_create_node_params ();
4303 ipa_check_create_edge_args ();
4304 ipa_register_cgraph_hooks ();
4306 while ((file_data = file_data_vec[j++]))
4308 size_t len;
4309 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4311 if (data)
4312 ipa_prop_read_section (file_data, data, len);
4316 /* After merging units, we can get mismatch in argument counts.
4317 Also decl merging might've rendered parameter lists obsolete.
4318 Also compute called_with_variable_arg info. */
4320 void
4321 ipa_update_after_lto_read (void)
4323 ipa_check_create_node_params ();
4324 ipa_check_create_edge_args ();
4327 void
4328 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4330 int node_ref;
4331 unsigned int count = 0;
4332 lto_symtab_encoder_t encoder;
4333 struct ipa_agg_replacement_value *aggvals, *av;
4335 aggvals = ipa_get_agg_replacements_for_node (node);
4336 encoder = ob->decl_state->symtab_node_encoder;
4337 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4338 streamer_write_uhwi (ob, node_ref);
4340 for (av = aggvals; av; av = av->next)
4341 count++;
4342 streamer_write_uhwi (ob, count);
4344 for (av = aggvals; av; av = av->next)
4346 struct bitpack_d bp;
4348 streamer_write_uhwi (ob, av->offset);
4349 streamer_write_uhwi (ob, av->index);
4350 stream_write_tree (ob, av->value, true);
4352 bp = bitpack_create (ob->main_stream);
4353 bp_pack_value (&bp, av->by_ref, 1);
4354 streamer_write_bitpack (&bp);
4358 /* Stream in the aggregate value replacement chain for NODE from IB. */
4360 static void
4361 read_agg_replacement_chain (struct lto_input_block *ib,
4362 struct cgraph_node *node,
4363 struct data_in *data_in)
4365 struct ipa_agg_replacement_value *aggvals = NULL;
4366 unsigned int count, i;
4368 count = streamer_read_uhwi (ib);
4369 for (i = 0; i <count; i++)
4371 struct ipa_agg_replacement_value *av;
4372 struct bitpack_d bp;
4374 av = ggc_alloc_ipa_agg_replacement_value ();
4375 av->offset = streamer_read_uhwi (ib);
4376 av->index = streamer_read_uhwi (ib);
4377 av->value = stream_read_tree (ib, data_in);
4378 bp = streamer_read_bitpack (ib);
4379 av->by_ref = bp_unpack_value (&bp, 1);
4380 av->next = aggvals;
4381 aggvals = av;
4383 ipa_set_node_agg_value_chain (node, aggvals);
4386 /* Write all aggregate replacement for nodes in set. */
4388 void
4389 ipa_prop_write_all_agg_replacement (void)
4391 struct cgraph_node *node;
4392 struct output_block *ob;
4393 unsigned int count = 0;
4394 lto_symtab_encoder_iterator lsei;
4395 lto_symtab_encoder_t encoder;
4397 if (!ipa_node_agg_replacements)
4398 return;
4400 ob = create_output_block (LTO_section_ipcp_transform);
4401 encoder = ob->decl_state->symtab_node_encoder;
4402 ob->cgraph_node = NULL;
4403 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4404 lsei_next_function_in_partition (&lsei))
4406 node = lsei_cgraph_node (lsei);
4407 if (cgraph_function_with_gimple_body_p (node)
4408 && ipa_get_agg_replacements_for_node (node) != NULL)
4409 count++;
4412 streamer_write_uhwi (ob, count);
4414 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4415 lsei_next_function_in_partition (&lsei))
4417 node = lsei_cgraph_node (lsei);
4418 if (cgraph_function_with_gimple_body_p (node)
4419 && ipa_get_agg_replacements_for_node (node) != NULL)
4420 write_agg_replacement_chain (ob, node);
4422 streamer_write_char_stream (ob->main_stream, 0);
4423 produce_asm (ob, NULL);
4424 destroy_output_block (ob);
4427 /* Read replacements section in file FILE_DATA of length LEN with data
4428 DATA. */
4430 static void
4431 read_replacements_section (struct lto_file_decl_data *file_data,
4432 const char *data,
4433 size_t len)
4435 const struct lto_function_header *header =
4436 (const struct lto_function_header *) data;
4437 const int cfg_offset = sizeof (struct lto_function_header);
4438 const int main_offset = cfg_offset + header->cfg_size;
4439 const int string_offset = main_offset + header->main_size;
4440 struct data_in *data_in;
4441 struct lto_input_block ib_main;
4442 unsigned int i;
4443 unsigned int count;
4445 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4446 header->main_size);
4448 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4449 header->string_size, vNULL);
4450 count = streamer_read_uhwi (&ib_main);
4452 for (i = 0; i < count; i++)
4454 unsigned int index;
4455 struct cgraph_node *node;
4456 lto_symtab_encoder_t encoder;
4458 index = streamer_read_uhwi (&ib_main);
4459 encoder = file_data->symtab_node_encoder;
4460 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4461 gcc_assert (node->symbol.definition);
4462 read_agg_replacement_chain (&ib_main, node, data_in);
4464 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4465 len);
4466 lto_data_in_delete (data_in);
4469 /* Read IPA-CP aggregate replacements. */
4471 void
4472 ipa_prop_read_all_agg_replacement (void)
4474 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4475 struct lto_file_decl_data *file_data;
4476 unsigned int j = 0;
4478 while ((file_data = file_data_vec[j++]))
4480 size_t len;
4481 const char *data = lto_get_section_data (file_data,
4482 LTO_section_ipcp_transform,
4483 NULL, &len);
4484 if (data)
4485 read_replacements_section (file_data, data, len);
4489 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4490 NODE. */
4492 static void
4493 adjust_agg_replacement_values (struct cgraph_node *node,
4494 struct ipa_agg_replacement_value *aggval)
4496 struct ipa_agg_replacement_value *v;
4497 int i, c = 0, d = 0, *adj;
4499 if (!node->clone.combined_args_to_skip)
4500 return;
4502 for (v = aggval; v; v = v->next)
4504 gcc_assert (v->index >= 0);
4505 if (c < v->index)
4506 c = v->index;
4508 c++;
4510 adj = XALLOCAVEC (int, c);
4511 for (i = 0; i < c; i++)
4512 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4514 adj[i] = -1;
4515 d++;
4517 else
4518 adj[i] = i - d;
4520 for (v = aggval; v; v = v->next)
4521 v->index = adj[v->index];
4525 /* Function body transformation phase. */
4527 unsigned int
4528 ipcp_transform_function (struct cgraph_node *node)
4530 vec<ipa_param_descriptor_t> descriptors = vNULL;
4531 struct param_analysis_info *parms_ainfo;
4532 struct ipa_agg_replacement_value *aggval;
4533 gimple_stmt_iterator gsi;
4534 basic_block bb;
4535 int param_count;
4536 bool cfg_changed = false, something_changed = false;
4538 gcc_checking_assert (cfun);
4539 gcc_checking_assert (current_function_decl);
4541 if (dump_file)
4542 fprintf (dump_file, "Modification phase of node %s/%i\n",
4543 cgraph_node_name (node), node->symbol.order);
4545 aggval = ipa_get_agg_replacements_for_node (node);
4546 if (!aggval)
4547 return 0;
4548 param_count = count_formal_params (node->symbol.decl);
4549 if (param_count == 0)
4550 return 0;
4551 adjust_agg_replacement_values (node, aggval);
4552 if (dump_file)
4553 ipa_dump_agg_replacement_values (dump_file, aggval);
4554 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4555 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4556 descriptors.safe_grow_cleared (param_count);
4557 ipa_populate_param_decls (node, descriptors);
4559 FOR_EACH_BB (bb)
4560 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4562 struct ipa_agg_replacement_value *v;
4563 gimple stmt = gsi_stmt (gsi);
4564 tree rhs, val, t;
4565 HOST_WIDE_INT offset;
4566 int index;
4567 bool by_ref, vce;
4569 if (!gimple_assign_load_p (stmt))
4570 continue;
4571 rhs = gimple_assign_rhs1 (stmt);
4572 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4573 continue;
4575 vce = false;
4576 t = rhs;
4577 while (handled_component_p (t))
4579 /* V_C_E can do things like convert an array of integers to one
4580 bigger integer and similar things we do not handle below. */
4581 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4583 vce = true;
4584 break;
4586 t = TREE_OPERAND (t, 0);
4588 if (vce)
4589 continue;
4591 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4592 rhs, &index, &offset, &by_ref))
4593 continue;
4594 for (v = aggval; v; v = v->next)
4595 if (v->index == index
4596 && v->offset == offset)
4597 break;
4598 if (!v || v->by_ref != by_ref)
4599 continue;
4601 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4602 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4604 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4605 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4606 else if (TYPE_SIZE (TREE_TYPE (rhs))
4607 == TYPE_SIZE (TREE_TYPE (v->value)))
4608 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4609 else
4611 if (dump_file)
4613 fprintf (dump_file, " const ");
4614 print_generic_expr (dump_file, v->value, 0);
4615 fprintf (dump_file, " can't be converted to type of ");
4616 print_generic_expr (dump_file, rhs, 0);
4617 fprintf (dump_file, "\n");
4619 continue;
4622 else
4623 val = v->value;
4625 if (dump_file && (dump_flags & TDF_DETAILS))
4627 fprintf (dump_file, "Modifying stmt:\n ");
4628 print_gimple_stmt (dump_file, stmt, 0, 0);
4630 gimple_assign_set_rhs_from_tree (&gsi, val);
4631 update_stmt (stmt);
4633 if (dump_file && (dump_flags & TDF_DETAILS))
4635 fprintf (dump_file, "into:\n ");
4636 print_gimple_stmt (dump_file, stmt, 0, 0);
4637 fprintf (dump_file, "\n");
4640 something_changed = true;
4641 if (maybe_clean_eh_stmt (stmt)
4642 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4643 cfg_changed = true;
4646 (*ipa_node_agg_replacements)[node->uid] = NULL;
4647 free_parms_ainfo (parms_ainfo, param_count);
4648 descriptors.release ();
4650 if (!something_changed)
4651 return 0;
4652 else if (cfg_changed)
4653 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4654 else
4655 return TODO_update_ssa_only_virtuals;