gcc/ChangeLog
[official-gcc.git] / gcc / ipa-prop.c
blob7129b302156aecbcdc398b08b03460fcbd93579b
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "langhooks.h"
25 #include "ggc.h"
26 #include "target.h"
27 #include "cgraph.h"
28 #include "ipa-prop.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
45 struct param_analysis_info
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
51 /* Vector where the parameter infos are actually stored. */
52 vec<ipa_node_params_t> ipa_node_params_vector;
53 /* Vector of known aggregate values in cloned nodes. */
54 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
55 /* Vector where the parameter infos are actually stored. */
56 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60 static struct cgraph_node_hook_list *node_removal_hook_holder;
61 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
63 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65 /* Description of a reference to an IPA constant. */
66 struct ipa_cst_ref_desc
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge *cs;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc *next_duplicate;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
73 if out of control. */
74 int refcount;
77 /* Allocation pool for reference descriptions. */
79 static alloc_pool ipa_refdesc_pool;
81 /* Return index of the formal whose tree is PTREE in function which corresponds
82 to INFO. */
84 static int
85 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
87 int i, count;
89 count = descriptors.length ();
90 for (i = 0; i < count; i++)
91 if (descriptors[i].decl == ptree)
92 return i;
94 return -1;
97 /* Return index of the formal whose tree is PTREE in function which corresponds
98 to INFO. */
101 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
103 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
106 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
107 NODE. */
109 static void
110 ipa_populate_param_decls (struct cgraph_node *node,
111 vec<ipa_param_descriptor_t> &descriptors)
113 tree fndecl;
114 tree fnargs;
115 tree parm;
116 int param_num;
118 fndecl = node->symbol.decl;
119 fnargs = DECL_ARGUMENTS (fndecl);
120 param_num = 0;
121 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
123 descriptors[param_num].decl = parm;
124 param_num++;
128 /* Return how many formal parameters FNDECL has. */
130 static inline int
131 count_formal_params (tree fndecl)
133 tree parm;
134 int count = 0;
136 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
137 count++;
139 return count;
142 /* Initialize the ipa_node_params structure associated with NODE by counting
143 the function parameters, creating the descriptors and populating their
144 param_decls. */
146 void
147 ipa_initialize_node_params (struct cgraph_node *node)
149 struct ipa_node_params *info = IPA_NODE_REF (node);
151 if (!info->descriptors.exists ())
153 int param_count;
155 param_count = count_formal_params (node->symbol.decl);
156 if (param_count)
158 info->descriptors.safe_grow_cleared (param_count);
159 ipa_populate_param_decls (node, info->descriptors);
164 /* Print the jump functions associated with call graph edge CS to file F. */
166 static void
167 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
169 int i, count;
171 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
172 for (i = 0; i < count; i++)
174 struct ipa_jump_func *jump_func;
175 enum jump_func_type type;
177 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
178 type = jump_func->type;
180 fprintf (f, " param %d: ", i);
181 if (type == IPA_JF_UNKNOWN)
182 fprintf (f, "UNKNOWN\n");
183 else if (type == IPA_JF_KNOWN_TYPE)
185 fprintf (f, "KNOWN TYPE: base ");
186 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
187 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
188 jump_func->value.known_type.offset);
189 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
190 fprintf (f, "\n");
192 else if (type == IPA_JF_CONST)
194 tree val = jump_func->value.constant.value;
195 fprintf (f, "CONST: ");
196 print_generic_expr (f, val, 0);
197 if (TREE_CODE (val) == ADDR_EXPR
198 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
200 fprintf (f, " -> ");
201 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
204 fprintf (f, "\n");
206 else if (type == IPA_JF_PASS_THROUGH)
208 fprintf (f, "PASS THROUGH: ");
209 fprintf (f, "%d, op %s",
210 jump_func->value.pass_through.formal_id,
211 tree_code_name[(int)
212 jump_func->value.pass_through.operation]);
213 if (jump_func->value.pass_through.operation != NOP_EXPR)
215 fprintf (f, " ");
216 print_generic_expr (f,
217 jump_func->value.pass_through.operand, 0);
219 if (jump_func->value.pass_through.agg_preserved)
220 fprintf (f, ", agg_preserved");
221 fprintf (f, "\n");
223 else if (type == IPA_JF_ANCESTOR)
225 fprintf (f, "ANCESTOR: ");
226 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
227 jump_func->value.ancestor.formal_id,
228 jump_func->value.ancestor.offset);
229 print_generic_expr (f, jump_func->value.ancestor.type, 0);
230 if (jump_func->value.ancestor.agg_preserved)
231 fprintf (f, ", agg_preserved");
232 fprintf (f, "\n");
235 if (jump_func->agg.items)
237 struct ipa_agg_jf_item *item;
238 int j;
240 fprintf (f, " Aggregate passed by %s:\n",
241 jump_func->agg.by_ref ? "reference" : "value");
242 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
244 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
245 item->offset);
246 if (TYPE_P (item->value))
247 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
248 tree_low_cst (TYPE_SIZE (item->value), 1));
249 else
251 fprintf (f, "cst: ");
252 print_generic_expr (f, item->value, 0);
254 fprintf (f, "\n");
261 /* Print the jump functions of all arguments on all call graph edges going from
262 NODE to file F. */
264 void
265 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
267 struct cgraph_edge *cs;
269 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
270 node->symbol.order);
271 for (cs = node->callees; cs; cs = cs->next_callee)
273 if (!ipa_edge_args_info_available_for_edge_p (cs))
274 continue;
276 fprintf (f, " callsite %s/%i -> %s/%i : \n",
277 xstrdup (cgraph_node_name (node)), node->symbol.order,
278 xstrdup (cgraph_node_name (cs->callee)),
279 cs->callee->symbol.order);
280 ipa_print_node_jump_functions_for_edge (f, cs);
283 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
285 struct cgraph_indirect_call_info *ii;
286 if (!ipa_edge_args_info_available_for_edge_p (cs))
287 continue;
289 ii = cs->indirect_info;
290 if (ii->agg_contents)
291 fprintf (f, " indirect aggregate callsite, calling param %i, "
292 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
293 ii->param_index, ii->offset,
294 ii->by_ref ? "by reference" : "by_value");
295 else
296 fprintf (f, " indirect %s callsite, calling param %i",
297 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
299 if (cs->call_stmt)
301 fprintf (f, ", for stmt ");
302 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
304 else
305 fprintf (f, "\n");
306 ipa_print_node_jump_functions_for_edge (f, cs);
310 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
312 void
313 ipa_print_all_jump_functions (FILE *f)
315 struct cgraph_node *node;
317 fprintf (f, "\nJump functions:\n");
318 FOR_EACH_FUNCTION (node)
320 ipa_print_node_jump_functions (f, node);
324 /* Set JFUNC to be a known type jump function. */
326 static void
327 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
328 tree base_type, tree component_type)
330 jfunc->type = IPA_JF_KNOWN_TYPE;
331 jfunc->value.known_type.offset = offset,
332 jfunc->value.known_type.base_type = base_type;
333 jfunc->value.known_type.component_type = component_type;
336 /* Set JFUNC to be a constant jmp function. */
338 static void
339 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
340 struct cgraph_edge *cs)
342 constant = unshare_expr (constant);
343 if (constant && EXPR_P (constant))
344 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
345 jfunc->type = IPA_JF_CONST;
346 jfunc->value.constant.value = unshare_expr_without_location (constant);
348 if (TREE_CODE (constant) == ADDR_EXPR
349 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
351 struct ipa_cst_ref_desc *rdesc;
352 if (!ipa_refdesc_pool)
353 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
354 sizeof (struct ipa_cst_ref_desc), 32);
356 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
357 rdesc->cs = cs;
358 rdesc->next_duplicate = NULL;
359 rdesc->refcount = 1;
360 jfunc->value.constant.rdesc = rdesc;
362 else
363 jfunc->value.constant.rdesc = NULL;
366 /* Set JFUNC to be a simple pass-through jump function. */
367 static void
368 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
369 bool agg_preserved)
371 jfunc->type = IPA_JF_PASS_THROUGH;
372 jfunc->value.pass_through.operand = NULL_TREE;
373 jfunc->value.pass_through.formal_id = formal_id;
374 jfunc->value.pass_through.operation = NOP_EXPR;
375 jfunc->value.pass_through.agg_preserved = agg_preserved;
378 /* Set JFUNC to be an arithmetic pass through jump function. */
380 static void
381 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
382 tree operand, enum tree_code operation)
384 jfunc->type = IPA_JF_PASS_THROUGH;
385 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
386 jfunc->value.pass_through.formal_id = formal_id;
387 jfunc->value.pass_through.operation = operation;
388 jfunc->value.pass_through.agg_preserved = false;
391 /* Set JFUNC to be an ancestor jump function. */
393 static void
394 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
395 tree type, int formal_id, bool agg_preserved)
397 jfunc->type = IPA_JF_ANCESTOR;
398 jfunc->value.ancestor.formal_id = formal_id;
399 jfunc->value.ancestor.offset = offset;
400 jfunc->value.ancestor.type = type;
401 jfunc->value.ancestor.agg_preserved = agg_preserved;
404 /* Extract the acual BINFO being described by JFUNC which must be a known type
405 jump function. */
407 tree
408 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
410 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
411 if (!base_binfo)
412 return NULL_TREE;
413 return get_binfo_at_offset (base_binfo,
414 jfunc->value.known_type.offset,
415 jfunc->value.known_type.component_type);
418 /* Structure to be passed in between detect_type_change and
419 check_stmt_for_type_change. */
421 struct type_change_info
423 /* Offset into the object where there is the virtual method pointer we are
424 looking for. */
425 HOST_WIDE_INT offset;
426 /* The declaration or SSA_NAME pointer of the base that we are checking for
427 type change. */
428 tree object;
429 /* If we actually can tell the type that the object has changed to, it is
430 stored in this field. Otherwise it remains NULL_TREE. */
431 tree known_current_type;
432 /* Set to true if dynamic type change has been detected. */
433 bool type_maybe_changed;
434 /* Set to true if multiple types have been encountered. known_current_type
435 must be disregarded in that case. */
436 bool multiple_types_encountered;
439 /* Return true if STMT can modify a virtual method table pointer.
441 This function makes special assumptions about both constructors and
442 destructors which are all the functions that are allowed to alter the VMT
443 pointers. It assumes that destructors begin with assignment into all VMT
444 pointers and that constructors essentially look in the following way:
446 1) The very first thing they do is that they call constructors of ancestor
447 sub-objects that have them.
449 2) Then VMT pointers of this and all its ancestors is set to new values
450 corresponding to the type corresponding to the constructor.
452 3) Only afterwards, other stuff such as constructor of member sub-objects
453 and the code written by the user is run. Only this may include calling
454 virtual functions, directly or indirectly.
456 There is no way to call a constructor of an ancestor sub-object in any
457 other way.
459 This means that we do not have to care whether constructors get the correct
460 type information because they will always change it (in fact, if we define
461 the type to be given by the VMT pointer, it is undefined).
463 The most important fact to derive from the above is that if, for some
464 statement in the section 3, we try to detect whether the dynamic type has
465 changed, we can safely ignore all calls as we examine the function body
466 backwards until we reach statements in section 2 because these calls cannot
467 be ancestor constructors or destructors (if the input is not bogus) and so
468 do not change the dynamic type (this holds true only for automatically
469 allocated objects but at the moment we devirtualize only these). We then
470 must detect that statements in section 2 change the dynamic type and can try
471 to derive the new type. That is enough and we can stop, we will never see
472 the calls into constructors of sub-objects in this code. Therefore we can
473 safely ignore all call statements that we traverse.
476 static bool
477 stmt_may_be_vtbl_ptr_store (gimple stmt)
479 if (is_gimple_call (stmt))
480 return false;
481 else if (is_gimple_assign (stmt))
483 tree lhs = gimple_assign_lhs (stmt);
485 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
487 if (flag_strict_aliasing
488 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
489 return false;
491 if (TREE_CODE (lhs) == COMPONENT_REF
492 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
493 return false;
494 /* In the future we might want to use get_base_ref_and_offset to find
495 if there is a field corresponding to the offset and if so, proceed
496 almost like if it was a component ref. */
499 return true;
502 /* If STMT can be proved to be an assignment to the virtual method table
503 pointer of ANALYZED_OBJ and the type associated with the new table
504 identified, return the type. Otherwise return NULL_TREE. */
506 static tree
507 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
509 HOST_WIDE_INT offset, size, max_size;
510 tree lhs, rhs, base;
512 if (!gimple_assign_single_p (stmt))
513 return NULL_TREE;
515 lhs = gimple_assign_lhs (stmt);
516 rhs = gimple_assign_rhs1 (stmt);
517 if (TREE_CODE (lhs) != COMPONENT_REF
518 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
519 || TREE_CODE (rhs) != ADDR_EXPR)
520 return NULL_TREE;
521 rhs = get_base_address (TREE_OPERAND (rhs, 0));
522 if (!rhs
523 || TREE_CODE (rhs) != VAR_DECL
524 || !DECL_VIRTUAL_P (rhs))
525 return NULL_TREE;
527 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
528 if (offset != tci->offset
529 || size != POINTER_SIZE
530 || max_size != POINTER_SIZE)
531 return NULL_TREE;
532 if (TREE_CODE (base) == MEM_REF)
534 if (TREE_CODE (tci->object) != MEM_REF
535 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
536 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
537 TREE_OPERAND (base, 1)))
538 return NULL_TREE;
540 else if (tci->object != base)
541 return NULL_TREE;
543 return DECL_CONTEXT (rhs);
546 /* Callback of walk_aliased_vdefs and a helper function for
547 detect_type_change to check whether a particular statement may modify
548 the virtual table pointer, and if possible also determine the new type of
549 the (sub-)object. It stores its result into DATA, which points to a
550 type_change_info structure. */
552 static bool
553 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
555 gimple stmt = SSA_NAME_DEF_STMT (vdef);
556 struct type_change_info *tci = (struct type_change_info *) data;
558 if (stmt_may_be_vtbl_ptr_store (stmt))
560 tree type;
561 type = extr_type_from_vtbl_ptr_store (stmt, tci);
562 if (tci->type_maybe_changed
563 && type != tci->known_current_type)
564 tci->multiple_types_encountered = true;
565 tci->known_current_type = type;
566 tci->type_maybe_changed = true;
567 return true;
569 else
570 return false;
575 /* Like detect_type_change but with extra argument COMP_TYPE which will become
576 the component type part of new JFUNC of dynamic type change is detected and
577 the new base type is identified. */
579 static bool
580 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
581 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
583 struct type_change_info tci;
584 ao_ref ao;
586 gcc_checking_assert (DECL_P (arg)
587 || TREE_CODE (arg) == MEM_REF
588 || handled_component_p (arg));
589 /* Const calls cannot call virtual methods through VMT and so type changes do
590 not matter. */
591 if (!flag_devirtualize || !gimple_vuse (call))
592 return false;
594 ao_ref_init (&ao, arg);
595 ao.base = base;
596 ao.offset = offset;
597 ao.size = POINTER_SIZE;
598 ao.max_size = ao.size;
600 tci.offset = offset;
601 tci.object = get_base_address (arg);
602 tci.known_current_type = NULL_TREE;
603 tci.type_maybe_changed = false;
604 tci.multiple_types_encountered = false;
606 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
607 &tci, NULL);
608 if (!tci.type_maybe_changed)
609 return false;
611 if (!tci.known_current_type
612 || tci.multiple_types_encountered
613 || offset != 0)
614 jfunc->type = IPA_JF_UNKNOWN;
615 else
616 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
618 return true;
621 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
622 looking for assignments to its virtual table pointer. If it is, return true
623 and fill in the jump function JFUNC with relevant type information or set it
624 to unknown. ARG is the object itself (not a pointer to it, unless
625 dereferenced). BASE is the base of the memory access as returned by
626 get_ref_base_and_extent, as is the offset. */
628 static bool
629 detect_type_change (tree arg, tree base, gimple call,
630 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
632 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
635 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
636 SSA name (its dereference will become the base and the offset is assumed to
637 be zero). */
639 static bool
640 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
642 tree comp_type;
644 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
645 if (!flag_devirtualize
646 || !POINTER_TYPE_P (TREE_TYPE (arg))
647 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
648 return false;
650 comp_type = TREE_TYPE (TREE_TYPE (arg));
651 arg = build2 (MEM_REF, ptr_type_node, arg,
652 build_int_cst (ptr_type_node, 0));
654 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
657 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
658 boolean variable pointed to by DATA. */
660 static bool
661 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
662 void *data)
664 bool *b = (bool *) data;
665 *b = true;
666 return true;
669 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
670 a value known not to be modified in this function before reaching the
671 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
672 information about the parameter. */
674 static bool
675 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
676 gimple stmt, tree parm_load)
678 bool modified = false;
679 bitmap *visited_stmts;
680 ao_ref refd;
682 if (parm_ainfo && parm_ainfo->parm_modified)
683 return false;
685 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
686 ao_ref_init (&refd, parm_load);
687 /* We can cache visited statements only when parm_ainfo is available and when
688 we are looking at a naked load of the whole parameter. */
689 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
690 visited_stmts = NULL;
691 else
692 visited_stmts = &parm_ainfo->parm_visited_statements;
693 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
694 visited_stmts);
695 if (parm_ainfo && modified)
696 parm_ainfo->parm_modified = true;
697 return !modified;
700 /* If STMT is an assignment that loads a value from an parameter declaration,
701 return the index of the parameter in ipa_node_params which has not been
702 modified. Otherwise return -1. */
704 static int
705 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
706 struct param_analysis_info *parms_ainfo,
707 gimple stmt)
709 int index;
710 tree op1;
712 if (!gimple_assign_single_p (stmt))
713 return -1;
715 op1 = gimple_assign_rhs1 (stmt);
716 if (TREE_CODE (op1) != PARM_DECL)
717 return -1;
719 index = ipa_get_param_decl_index_1 (descriptors, op1);
720 if (index < 0
721 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
722 : NULL, stmt, op1))
723 return -1;
725 return index;
728 /* Return true if memory reference REF loads data that are known to be
729 unmodified in this function before reaching statement STMT. PARM_AINFO, if
730 non-NULL, is a pointer to a structure containing temporary information about
731 PARM. */
733 static bool
734 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
735 gimple stmt, tree ref)
737 bool modified = false;
738 ao_ref refd;
740 gcc_checking_assert (gimple_vuse (stmt));
741 if (parm_ainfo && parm_ainfo->ref_modified)
742 return false;
744 ao_ref_init (&refd, ref);
745 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
746 NULL);
747 if (parm_ainfo && modified)
748 parm_ainfo->ref_modified = true;
749 return !modified;
752 /* Return true if the data pointed to by PARM is known to be unmodified in this
753 function before reaching call statement CALL into which it is passed.
754 PARM_AINFO is a pointer to a structure containing temporary information
755 about PARM. */
757 static bool
758 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
759 gimple call, tree parm)
761 bool modified = false;
762 ao_ref refd;
764 /* It's unnecessary to calculate anything about memory contnets for a const
765 function because it is not goin to use it. But do not cache the result
766 either. Also, no such calculations for non-pointers. */
767 if (!gimple_vuse (call)
768 || !POINTER_TYPE_P (TREE_TYPE (parm)))
769 return false;
771 if (parm_ainfo->pt_modified)
772 return false;
774 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
775 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
776 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
777 if (modified)
778 parm_ainfo->pt_modified = true;
779 return !modified;
782 /* Return true if we can prove that OP is a memory reference loading unmodified
783 data from an aggregate passed as a parameter and if the aggregate is passed
784 by reference, that the alias type of the load corresponds to the type of the
785 formal parameter (so that we can rely on this type for TBAA in callers).
786 INFO and PARMS_AINFO describe parameters of the current function (but the
787 latter can be NULL), STMT is the load statement. If function returns true,
788 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
789 within the aggregate and whether it is a load from a value passed by
790 reference respectively. */
792 static bool
793 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
794 struct param_analysis_info *parms_ainfo, gimple stmt,
795 tree op, int *index_p, HOST_WIDE_INT *offset_p,
796 bool *by_ref_p)
798 int index;
799 HOST_WIDE_INT size, max_size;
800 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
802 if (max_size == -1 || max_size != size || *offset_p < 0)
803 return false;
805 if (DECL_P (base))
807 int index = ipa_get_param_decl_index_1 (descriptors, base);
808 if (index >= 0
809 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
810 : NULL, stmt, op))
812 *index_p = index;
813 *by_ref_p = false;
814 return true;
816 return false;
819 if (TREE_CODE (base) != MEM_REF
820 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
821 || !integer_zerop (TREE_OPERAND (base, 1)))
822 return false;
824 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
826 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
827 index = ipa_get_param_decl_index_1 (descriptors, parm);
829 else
831 /* This branch catches situations where a pointer parameter is not a
832 gimple register, for example:
834 void hip7(S*) (struct S * p)
836 void (*<T2e4>) (struct S *) D.1867;
837 struct S * p.1;
839 <bb 2>:
840 p.1_1 = p;
841 D.1867_2 = p.1_1->f;
842 D.1867_2 ();
843 gdp = &p;
846 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
847 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
850 if (index >= 0
851 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
852 stmt, op))
854 *index_p = index;
855 *by_ref_p = true;
856 return true;
858 return false;
861 /* Just like the previous function, just without the param_analysis_info
862 pointer, for users outside of this file. */
864 bool
865 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
866 tree op, int *index_p, HOST_WIDE_INT *offset_p,
867 bool *by_ref_p)
869 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
870 offset_p, by_ref_p);
873 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
874 of an assignment statement STMT, try to determine whether we are actually
875 handling any of the following cases and construct an appropriate jump
876 function into JFUNC if so:
878 1) The passed value is loaded from a formal parameter which is not a gimple
879 register (most probably because it is addressable, the value has to be
880 scalar) and we can guarantee the value has not changed. This case can
881 therefore be described by a simple pass-through jump function. For example:
883 foo (int a)
885 int a.0;
887 a.0_2 = a;
888 bar (a.0_2);
890 2) The passed value can be described by a simple arithmetic pass-through
891 jump function. E.g.
893 foo (int a)
895 int D.2064;
897 D.2064_4 = a.1(D) + 4;
898 bar (D.2064_4);
900 This case can also occur in combination of the previous one, e.g.:
902 foo (int a, int z)
904 int a.0;
905 int D.2064;
907 a.0_3 = a;
908 D.2064_4 = a.0_3 + 4;
909 foo (D.2064_4);
911 3) The passed value is an address of an object within another one (which
912 also passed by reference). Such situations are described by an ancestor
913 jump function and describe situations such as:
915 B::foo() (struct B * const this)
917 struct A * D.1845;
919 D.1845_2 = &this_1(D)->D.1748;
920 A::bar (D.1845_2);
922 INFO is the structure describing individual parameters access different
923 stages of IPA optimizations. PARMS_AINFO contains the information that is
924 only needed for intraprocedural analysis. */
926 static void
927 compute_complex_assign_jump_func (struct ipa_node_params *info,
928 struct param_analysis_info *parms_ainfo,
929 struct ipa_jump_func *jfunc,
930 gimple call, gimple stmt, tree name)
932 HOST_WIDE_INT offset, size, max_size;
933 tree op1, tc_ssa, base, ssa;
934 int index;
936 op1 = gimple_assign_rhs1 (stmt);
938 if (TREE_CODE (op1) == SSA_NAME)
940 if (SSA_NAME_IS_DEFAULT_DEF (op1))
941 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
942 else
943 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
944 SSA_NAME_DEF_STMT (op1));
945 tc_ssa = op1;
947 else
949 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
950 tc_ssa = gimple_assign_lhs (stmt);
953 if (index >= 0)
955 tree op2 = gimple_assign_rhs2 (stmt);
957 if (op2)
959 if (!is_gimple_ip_invariant (op2)
960 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
961 && !useless_type_conversion_p (TREE_TYPE (name),
962 TREE_TYPE (op1))))
963 return;
965 ipa_set_jf_arith_pass_through (jfunc, index, op2,
966 gimple_assign_rhs_code (stmt));
968 else if (gimple_assign_single_p (stmt)
969 && !detect_type_change_ssa (tc_ssa, call, jfunc))
971 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
972 call, tc_ssa);
973 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
975 return;
978 if (TREE_CODE (op1) != ADDR_EXPR)
979 return;
980 op1 = TREE_OPERAND (op1, 0);
981 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
982 return;
983 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
984 if (TREE_CODE (base) != MEM_REF
985 /* If this is a varying address, punt. */
986 || max_size == -1
987 || max_size != size)
988 return;
989 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
990 ssa = TREE_OPERAND (base, 0);
991 if (TREE_CODE (ssa) != SSA_NAME
992 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
993 || offset < 0)
994 return;
996 /* Dynamic types are changed only in constructors and destructors and */
997 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
998 if (index >= 0
999 && !detect_type_change (op1, base, call, jfunc, offset))
1000 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1001 parm_ref_data_pass_through_p (&parms_ainfo[index],
1002 call, ssa));
1005 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1006 it looks like:
1008 iftmp.1_3 = &obj_2(D)->D.1762;
1010 The base of the MEM_REF must be a default definition SSA NAME of a
1011 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1012 whole MEM_REF expression is returned and the offset calculated from any
1013 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1014 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1016 static tree
1017 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1019 HOST_WIDE_INT size, max_size;
1020 tree expr, parm, obj;
1022 if (!gimple_assign_single_p (assign))
1023 return NULL_TREE;
1024 expr = gimple_assign_rhs1 (assign);
1026 if (TREE_CODE (expr) != ADDR_EXPR)
1027 return NULL_TREE;
1028 expr = TREE_OPERAND (expr, 0);
1029 obj = expr;
1030 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1032 if (TREE_CODE (expr) != MEM_REF
1033 /* If this is a varying address, punt. */
1034 || max_size == -1
1035 || max_size != size
1036 || *offset < 0)
1037 return NULL_TREE;
1038 parm = TREE_OPERAND (expr, 0);
1039 if (TREE_CODE (parm) != SSA_NAME
1040 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1041 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1042 return NULL_TREE;
1044 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1045 *obj_p = obj;
1046 return expr;
1050 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1051 statement PHI, try to find out whether NAME is in fact a
1052 multiple-inheritance typecast from a descendant into an ancestor of a formal
1053 parameter and thus can be described by an ancestor jump function and if so,
1054 write the appropriate function into JFUNC.
1056 Essentially we want to match the following pattern:
1058 if (obj_2(D) != 0B)
1059 goto <bb 3>;
1060 else
1061 goto <bb 4>;
1063 <bb 3>:
1064 iftmp.1_3 = &obj_2(D)->D.1762;
1066 <bb 4>:
1067 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1068 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1069 return D.1879_6; */
1071 static void
1072 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1073 struct param_analysis_info *parms_ainfo,
1074 struct ipa_jump_func *jfunc,
1075 gimple call, gimple phi)
1077 HOST_WIDE_INT offset;
1078 gimple assign, cond;
1079 basic_block phi_bb, assign_bb, cond_bb;
1080 tree tmp, parm, expr, obj;
1081 int index, i;
1083 if (gimple_phi_num_args (phi) != 2)
1084 return;
1086 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1087 tmp = PHI_ARG_DEF (phi, 0);
1088 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1089 tmp = PHI_ARG_DEF (phi, 1);
1090 else
1091 return;
1092 if (TREE_CODE (tmp) != SSA_NAME
1093 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1094 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1095 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1096 return;
1098 assign = SSA_NAME_DEF_STMT (tmp);
1099 assign_bb = gimple_bb (assign);
1100 if (!single_pred_p (assign_bb))
1101 return;
1102 expr = get_ancestor_addr_info (assign, &obj, &offset);
1103 if (!expr)
1104 return;
1105 parm = TREE_OPERAND (expr, 0);
1106 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1107 gcc_assert (index >= 0);
1109 cond_bb = single_pred (assign_bb);
1110 cond = last_stmt (cond_bb);
1111 if (!cond
1112 || gimple_code (cond) != GIMPLE_COND
1113 || gimple_cond_code (cond) != NE_EXPR
1114 || gimple_cond_lhs (cond) != parm
1115 || !integer_zerop (gimple_cond_rhs (cond)))
1116 return;
1118 phi_bb = gimple_bb (phi);
1119 for (i = 0; i < 2; i++)
1121 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1122 if (pred != assign_bb && pred != cond_bb)
1123 return;
1126 if (!detect_type_change (obj, expr, call, jfunc, offset))
1127 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1128 parm_ref_data_pass_through_p (&parms_ainfo[index],
1129 call, parm));
1132 /* Given OP which is passed as an actual argument to a called function,
1133 determine if it is possible to construct a KNOWN_TYPE jump function for it
1134 and if so, create one and store it to JFUNC. */
1136 static void
1137 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1138 gimple call)
1140 HOST_WIDE_INT offset, size, max_size;
1141 tree base;
1143 if (!flag_devirtualize
1144 || TREE_CODE (op) != ADDR_EXPR
1145 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1146 return;
1148 op = TREE_OPERAND (op, 0);
1149 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1150 if (!DECL_P (base)
1151 || max_size == -1
1152 || max_size != size
1153 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1154 || is_global_var (base))
1155 return;
1157 if (!TYPE_BINFO (TREE_TYPE (base))
1158 || detect_type_change (op, base, call, jfunc, offset))
1159 return;
1161 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1164 /* Inspect the given TYPE and return true iff it has the same structure (the
1165 same number of fields of the same types) as a C++ member pointer. If
1166 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1167 corresponding fields there. */
1169 static bool
1170 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1172 tree fld;
1174 if (TREE_CODE (type) != RECORD_TYPE)
1175 return false;
1177 fld = TYPE_FIELDS (type);
1178 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1179 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1180 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1181 return false;
1183 if (method_ptr)
1184 *method_ptr = fld;
1186 fld = DECL_CHAIN (fld);
1187 if (!fld || INTEGRAL_TYPE_P (fld)
1188 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1189 return false;
1190 if (delta)
1191 *delta = fld;
1193 if (DECL_CHAIN (fld))
1194 return false;
1196 return true;
1199 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1200 return the rhs of its defining statement. Otherwise return RHS as it
1201 is. */
1203 static inline tree
1204 get_ssa_def_if_simple_copy (tree rhs)
1206 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1208 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1210 if (gimple_assign_single_p (def_stmt))
1211 rhs = gimple_assign_rhs1 (def_stmt);
1212 else
1213 break;
1215 return rhs;
1218 /* Simple linked list, describing known contents of an aggregate beforere
1219 call. */
1221 struct ipa_known_agg_contents_list
1223 /* Offset and size of the described part of the aggregate. */
1224 HOST_WIDE_INT offset, size;
1225 /* Known constant value or NULL if the contents is known to be unknown. */
1226 tree constant;
1227 /* Pointer to the next structure in the list. */
1228 struct ipa_known_agg_contents_list *next;
1231 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1232 in ARG is filled in with constant values. ARG can either be an aggregate
1233 expression or a pointer to an aggregate. JFUNC is the jump function into
1234 which the constants are subsequently stored. */
1236 static void
1237 determine_known_aggregate_parts (gimple call, tree arg,
1238 struct ipa_jump_func *jfunc)
1240 struct ipa_known_agg_contents_list *list = NULL;
1241 int item_count = 0, const_count = 0;
1242 HOST_WIDE_INT arg_offset, arg_size;
1243 gimple_stmt_iterator gsi;
1244 tree arg_base;
1245 bool check_ref, by_ref;
1246 ao_ref r;
1248 /* The function operates in three stages. First, we prepare check_ref, r,
1249 arg_base and arg_offset based on what is actually passed as an actual
1250 argument. */
1252 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1254 by_ref = true;
1255 if (TREE_CODE (arg) == SSA_NAME)
1257 tree type_size;
1258 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1259 return;
1260 check_ref = true;
1261 arg_base = arg;
1262 arg_offset = 0;
1263 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1264 arg_size = tree_low_cst (type_size, 1);
1265 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1267 else if (TREE_CODE (arg) == ADDR_EXPR)
1269 HOST_WIDE_INT arg_max_size;
1271 arg = TREE_OPERAND (arg, 0);
1272 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1273 &arg_max_size);
1274 if (arg_max_size == -1
1275 || arg_max_size != arg_size
1276 || arg_offset < 0)
1277 return;
1278 if (DECL_P (arg_base))
1280 tree size;
1281 check_ref = false;
1282 size = build_int_cst (integer_type_node, arg_size);
1283 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1285 else
1286 return;
1288 else
1289 return;
1291 else
1293 HOST_WIDE_INT arg_max_size;
1295 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1297 by_ref = false;
1298 check_ref = false;
1299 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1300 &arg_max_size);
1301 if (arg_max_size == -1
1302 || arg_max_size != arg_size
1303 || arg_offset < 0)
1304 return;
1306 ao_ref_init (&r, arg);
1309 /* Second stage walks back the BB, looks at individual statements and as long
1310 as it is confident of how the statements affect contents of the
1311 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1312 describing it. */
1313 gsi = gsi_for_stmt (call);
1314 gsi_prev (&gsi);
1315 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1317 struct ipa_known_agg_contents_list *n, **p;
1318 gimple stmt = gsi_stmt (gsi);
1319 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1320 tree lhs, rhs, lhs_base;
1321 bool partial_overlap;
1323 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1324 continue;
1325 if (!gimple_assign_single_p (stmt))
1326 break;
1328 lhs = gimple_assign_lhs (stmt);
1329 rhs = gimple_assign_rhs1 (stmt);
1330 if (!is_gimple_reg_type (rhs)
1331 || TREE_CODE (lhs) == BIT_FIELD_REF
1332 || contains_bitfld_component_ref_p (lhs))
1333 break;
1335 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1336 &lhs_max_size);
1337 if (lhs_max_size == -1
1338 || lhs_max_size != lhs_size
1339 || (lhs_offset < arg_offset
1340 && lhs_offset + lhs_size > arg_offset)
1341 || (lhs_offset < arg_offset + arg_size
1342 && lhs_offset + lhs_size > arg_offset + arg_size))
1343 break;
1345 if (check_ref)
1347 if (TREE_CODE (lhs_base) != MEM_REF
1348 || TREE_OPERAND (lhs_base, 0) != arg_base
1349 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1350 break;
1352 else if (lhs_base != arg_base)
1354 if (DECL_P (lhs_base))
1355 continue;
1356 else
1357 break;
1360 if (lhs_offset + lhs_size < arg_offset
1361 || lhs_offset >= (arg_offset + arg_size))
1362 continue;
1364 partial_overlap = false;
1365 p = &list;
1366 while (*p && (*p)->offset < lhs_offset)
1368 if ((*p)->offset + (*p)->size > lhs_offset)
1370 partial_overlap = true;
1371 break;
1373 p = &(*p)->next;
1375 if (partial_overlap)
1376 break;
1377 if (*p && (*p)->offset < lhs_offset + lhs_size)
1379 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1380 /* We already know this value is subsequently overwritten with
1381 something else. */
1382 continue;
1383 else
1384 /* Otherwise this is a partial overlap which we cannot
1385 represent. */
1386 break;
1389 rhs = get_ssa_def_if_simple_copy (rhs);
1390 n = XALLOCA (struct ipa_known_agg_contents_list);
1391 n->size = lhs_size;
1392 n->offset = lhs_offset;
1393 if (is_gimple_ip_invariant (rhs))
1395 n->constant = rhs;
1396 const_count++;
1398 else
1399 n->constant = NULL_TREE;
1400 n->next = *p;
1401 *p = n;
1403 item_count++;
1404 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1405 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1406 break;
1409 /* Third stage just goes over the list and creates an appropriate vector of
1410 ipa_agg_jf_item structures out of it, of sourse only if there are
1411 any known constants to begin with. */
1413 if (const_count)
1415 jfunc->agg.by_ref = by_ref;
1416 vec_alloc (jfunc->agg.items, const_count);
1417 while (list)
1419 if (list->constant)
1421 struct ipa_agg_jf_item item;
1422 item.offset = list->offset - arg_offset;
1423 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1424 item.value = unshare_expr_without_location (list->constant);
1425 jfunc->agg.items->quick_push (item);
1427 list = list->next;
1432 /* Compute jump function for all arguments of callsite CS and insert the
1433 information in the jump_functions array in the ipa_edge_args corresponding
1434 to this callsite. */
1436 static void
1437 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1438 struct cgraph_edge *cs)
1440 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1441 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1442 gimple call = cs->call_stmt;
1443 int n, arg_num = gimple_call_num_args (call);
1445 if (arg_num == 0 || args->jump_functions)
1446 return;
1447 vec_safe_grow_cleared (args->jump_functions, arg_num);
1449 for (n = 0; n < arg_num; n++)
1451 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1452 tree arg = gimple_call_arg (call, n);
1454 if (is_gimple_ip_invariant (arg))
1455 ipa_set_jf_constant (jfunc, arg, cs);
1456 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1457 && TREE_CODE (arg) == PARM_DECL)
1459 int index = ipa_get_param_decl_index (info, arg);
1461 gcc_assert (index >=0);
1462 /* Aggregate passed by value, check for pass-through, otherwise we
1463 will attempt to fill in aggregate contents later in this
1464 for cycle. */
1465 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1467 ipa_set_jf_simple_pass_through (jfunc, index, false);
1468 continue;
1471 else if (TREE_CODE (arg) == SSA_NAME)
1473 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1475 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1476 if (index >= 0
1477 && !detect_type_change_ssa (arg, call, jfunc))
1479 bool agg_p;
1480 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1481 call, arg);
1482 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1485 else
1487 gimple stmt = SSA_NAME_DEF_STMT (arg);
1488 if (is_gimple_assign (stmt))
1489 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1490 call, stmt, arg);
1491 else if (gimple_code (stmt) == GIMPLE_PHI)
1492 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1493 call, stmt);
1496 else
1497 compute_known_type_jump_func (arg, jfunc, call);
1499 if ((jfunc->type != IPA_JF_PASS_THROUGH
1500 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1501 && (jfunc->type != IPA_JF_ANCESTOR
1502 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1503 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1504 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1505 determine_known_aggregate_parts (call, arg, jfunc);
1509 /* Compute jump functions for all edges - both direct and indirect - outgoing
1510 from NODE. Also count the actual arguments in the process. */
1512 static void
1513 ipa_compute_jump_functions (struct cgraph_node *node,
1514 struct param_analysis_info *parms_ainfo)
1516 struct cgraph_edge *cs;
1518 for (cs = node->callees; cs; cs = cs->next_callee)
1520 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1521 NULL);
1522 /* We do not need to bother analyzing calls to unknown
1523 functions unless they may become known during lto/whopr. */
1524 if (!callee->analyzed && !flag_lto)
1525 continue;
1526 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1529 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1530 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1533 /* If STMT looks like a statement loading a value from a member pointer formal
1534 parameter, return that parameter and store the offset of the field to
1535 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1536 might be clobbered). If USE_DELTA, then we look for a use of the delta
1537 field rather than the pfn. */
1539 static tree
1540 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1541 HOST_WIDE_INT *offset_p)
1543 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1545 if (!gimple_assign_single_p (stmt))
1546 return NULL_TREE;
1548 rhs = gimple_assign_rhs1 (stmt);
1549 if (TREE_CODE (rhs) == COMPONENT_REF)
1551 ref_field = TREE_OPERAND (rhs, 1);
1552 rhs = TREE_OPERAND (rhs, 0);
1554 else
1555 ref_field = NULL_TREE;
1556 if (TREE_CODE (rhs) != MEM_REF)
1557 return NULL_TREE;
1558 rec = TREE_OPERAND (rhs, 0);
1559 if (TREE_CODE (rec) != ADDR_EXPR)
1560 return NULL_TREE;
1561 rec = TREE_OPERAND (rec, 0);
1562 if (TREE_CODE (rec) != PARM_DECL
1563 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1564 return NULL_TREE;
1565 ref_offset = TREE_OPERAND (rhs, 1);
1567 if (use_delta)
1568 fld = delta_field;
1569 else
1570 fld = ptr_field;
1571 if (offset_p)
1572 *offset_p = int_bit_position (fld);
1574 if (ref_field)
1576 if (integer_nonzerop (ref_offset))
1577 return NULL_TREE;
1578 return ref_field == fld ? rec : NULL_TREE;
1580 else
1581 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1582 : NULL_TREE;
1585 /* Returns true iff T is an SSA_NAME defined by a statement. */
1587 static bool
1588 ipa_is_ssa_with_stmt_def (tree t)
1590 if (TREE_CODE (t) == SSA_NAME
1591 && !SSA_NAME_IS_DEFAULT_DEF (t))
1592 return true;
1593 else
1594 return false;
1597 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1598 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1599 indirect call graph edge. */
1601 static struct cgraph_edge *
1602 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1604 struct cgraph_edge *cs;
1606 cs = cgraph_edge (node, stmt);
1607 cs->indirect_info->param_index = param_index;
1608 cs->indirect_info->offset = 0;
1609 cs->indirect_info->polymorphic = 0;
1610 cs->indirect_info->agg_contents = 0;
1611 return cs;
1614 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1615 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1616 intermediate information about each formal parameter. Currently it checks
1617 whether the call calls a pointer that is a formal parameter and if so, the
1618 parameter is marked with the called flag and an indirect call graph edge
1619 describing the call is created. This is very simple for ordinary pointers
1620 represented in SSA but not-so-nice when it comes to member pointers. The
1621 ugly part of this function does nothing more than trying to match the
1622 pattern of such a call. An example of such a pattern is the gimple dump
1623 below, the call is on the last line:
1625 <bb 2>:
1626 f$__delta_5 = f.__delta;
1627 f$__pfn_24 = f.__pfn;
1630 <bb 2>:
1631 f$__delta_5 = MEM[(struct *)&f];
1632 f$__pfn_24 = MEM[(struct *)&f + 4B];
1634 and a few lines below:
1636 <bb 5>
1637 D.2496_3 = (int) f$__pfn_24;
1638 D.2497_4 = D.2496_3 & 1;
1639 if (D.2497_4 != 0)
1640 goto <bb 3>;
1641 else
1642 goto <bb 4>;
1644 <bb 6>:
1645 D.2500_7 = (unsigned int) f$__delta_5;
1646 D.2501_8 = &S + D.2500_7;
1647 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1648 D.2503_10 = *D.2502_9;
1649 D.2504_12 = f$__pfn_24 + -1;
1650 D.2505_13 = (unsigned int) D.2504_12;
1651 D.2506_14 = D.2503_10 + D.2505_13;
1652 D.2507_15 = *D.2506_14;
1653 iftmp.11_16 = (String:: *) D.2507_15;
1655 <bb 7>:
1656 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1657 D.2500_19 = (unsigned int) f$__delta_5;
1658 D.2508_20 = &S + D.2500_19;
1659 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1661 Such patterns are results of simple calls to a member pointer:
1663 int doprinting (int (MyString::* f)(int) const)
1665 MyString S ("somestring");
1667 return (S.*f)(4);
1670 Moreover, the function also looks for called pointers loaded from aggregates
1671 passed by value or reference. */
1673 static void
1674 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1675 struct ipa_node_params *info,
1676 struct param_analysis_info *parms_ainfo,
1677 gimple call, tree target)
1679 gimple def;
1680 tree n1, n2;
1681 gimple d1, d2;
1682 tree rec, rec2, cond;
1683 gimple branch;
1684 int index;
1685 basic_block bb, virt_bb, join;
1686 HOST_WIDE_INT offset;
1687 bool by_ref;
1689 if (SSA_NAME_IS_DEFAULT_DEF (target))
1691 tree var = SSA_NAME_VAR (target);
1692 index = ipa_get_param_decl_index (info, var);
1693 if (index >= 0)
1694 ipa_note_param_call (node, index, call);
1695 return;
1698 def = SSA_NAME_DEF_STMT (target);
1699 if (gimple_assign_single_p (def)
1700 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1701 gimple_assign_rhs1 (def), &index, &offset,
1702 &by_ref))
1704 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1705 cs->indirect_info->offset = offset;
1706 cs->indirect_info->agg_contents = 1;
1707 cs->indirect_info->by_ref = by_ref;
1708 return;
1711 /* Now we need to try to match the complex pattern of calling a member
1712 pointer. */
1713 if (gimple_code (def) != GIMPLE_PHI
1714 || gimple_phi_num_args (def) != 2
1715 || !POINTER_TYPE_P (TREE_TYPE (target))
1716 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1717 return;
1719 /* First, we need to check whether one of these is a load from a member
1720 pointer that is a parameter to this function. */
1721 n1 = PHI_ARG_DEF (def, 0);
1722 n2 = PHI_ARG_DEF (def, 1);
1723 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1724 return;
1725 d1 = SSA_NAME_DEF_STMT (n1);
1726 d2 = SSA_NAME_DEF_STMT (n2);
1728 join = gimple_bb (def);
1729 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1731 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1732 return;
1734 bb = EDGE_PRED (join, 0)->src;
1735 virt_bb = gimple_bb (d2);
1737 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1739 bb = EDGE_PRED (join, 1)->src;
1740 virt_bb = gimple_bb (d1);
1742 else
1743 return;
1745 /* Second, we need to check that the basic blocks are laid out in the way
1746 corresponding to the pattern. */
1748 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1749 || single_pred (virt_bb) != bb
1750 || single_succ (virt_bb) != join)
1751 return;
1753 /* Third, let's see that the branching is done depending on the least
1754 significant bit of the pfn. */
1756 branch = last_stmt (bb);
1757 if (!branch || gimple_code (branch) != GIMPLE_COND)
1758 return;
1760 if ((gimple_cond_code (branch) != NE_EXPR
1761 && gimple_cond_code (branch) != EQ_EXPR)
1762 || !integer_zerop (gimple_cond_rhs (branch)))
1763 return;
1765 cond = gimple_cond_lhs (branch);
1766 if (!ipa_is_ssa_with_stmt_def (cond))
1767 return;
1769 def = SSA_NAME_DEF_STMT (cond);
1770 if (!is_gimple_assign (def)
1771 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1772 || !integer_onep (gimple_assign_rhs2 (def)))
1773 return;
1775 cond = gimple_assign_rhs1 (def);
1776 if (!ipa_is_ssa_with_stmt_def (cond))
1777 return;
1779 def = SSA_NAME_DEF_STMT (cond);
1781 if (is_gimple_assign (def)
1782 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1784 cond = gimple_assign_rhs1 (def);
1785 if (!ipa_is_ssa_with_stmt_def (cond))
1786 return;
1787 def = SSA_NAME_DEF_STMT (cond);
1790 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1791 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1792 == ptrmemfunc_vbit_in_delta),
1793 NULL);
1794 if (rec != rec2)
1795 return;
1797 index = ipa_get_param_decl_index (info, rec);
1798 if (index >= 0
1799 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1801 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1802 cs->indirect_info->offset = offset;
1803 cs->indirect_info->agg_contents = 1;
1806 return;
1809 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1810 object referenced in the expression is a formal parameter of the caller
1811 (described by INFO), create a call note for the statement. */
1813 static void
1814 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1815 struct ipa_node_params *info, gimple call,
1816 tree target)
1818 struct cgraph_edge *cs;
1819 struct cgraph_indirect_call_info *ii;
1820 struct ipa_jump_func jfunc;
1821 tree obj = OBJ_TYPE_REF_OBJECT (target);
1822 int index;
1823 HOST_WIDE_INT anc_offset;
1825 if (!flag_devirtualize)
1826 return;
1828 if (TREE_CODE (obj) != SSA_NAME)
1829 return;
1831 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1833 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1834 return;
1836 anc_offset = 0;
1837 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1838 gcc_assert (index >= 0);
1839 if (detect_type_change_ssa (obj, call, &jfunc))
1840 return;
1842 else
1844 gimple stmt = SSA_NAME_DEF_STMT (obj);
1845 tree expr;
1847 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1848 if (!expr)
1849 return;
1850 index = ipa_get_param_decl_index (info,
1851 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1852 gcc_assert (index >= 0);
1853 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1854 return;
1857 cs = ipa_note_param_call (node, index, call);
1858 ii = cs->indirect_info;
1859 ii->offset = anc_offset;
1860 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1861 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1862 ii->polymorphic = 1;
1865 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1866 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1867 containing intermediate information about each formal parameter. */
1869 static void
1870 ipa_analyze_call_uses (struct cgraph_node *node,
1871 struct ipa_node_params *info,
1872 struct param_analysis_info *parms_ainfo, gimple call)
1874 tree target = gimple_call_fn (call);
1876 if (!target)
1877 return;
1878 if (TREE_CODE (target) == SSA_NAME)
1879 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1880 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1881 ipa_analyze_virtual_call_uses (node, info, call, target);
1885 /* Analyze the call statement STMT with respect to formal parameters (described
1886 in INFO) of caller given by NODE. Currently it only checks whether formal
1887 parameters are called. PARMS_AINFO is a pointer to a vector containing
1888 intermediate information about each formal parameter. */
1890 static void
1891 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1892 struct param_analysis_info *parms_ainfo, gimple stmt)
1894 if (is_gimple_call (stmt))
1895 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1898 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1899 If OP is a parameter declaration, mark it as used in the info structure
1900 passed in DATA. */
1902 static bool
1903 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1904 tree op, void *data)
1906 struct ipa_node_params *info = (struct ipa_node_params *) data;
1908 op = get_base_address (op);
1909 if (op
1910 && TREE_CODE (op) == PARM_DECL)
1912 int index = ipa_get_param_decl_index (info, op);
1913 gcc_assert (index >= 0);
1914 ipa_set_param_used (info, index, true);
1917 return false;
1920 /* Scan the function body of NODE and inspect the uses of formal parameters.
1921 Store the findings in various structures of the associated ipa_node_params
1922 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1923 vector containing intermediate information about each formal parameter. */
1925 static void
1926 ipa_analyze_params_uses (struct cgraph_node *node,
1927 struct param_analysis_info *parms_ainfo)
1929 tree decl = node->symbol.decl;
1930 basic_block bb;
1931 struct function *func;
1932 gimple_stmt_iterator gsi;
1933 struct ipa_node_params *info = IPA_NODE_REF (node);
1934 int i;
1936 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1937 return;
1939 for (i = 0; i < ipa_get_param_count (info); i++)
1941 tree parm = ipa_get_param (info, i);
1942 int controlled_uses = 0;
1944 /* For SSA regs see if parameter is used. For non-SSA we compute
1945 the flag during modification analysis. */
1946 if (is_gimple_reg (parm))
1948 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1949 parm);
1950 if (ddef && !has_zero_uses (ddef))
1952 imm_use_iterator imm_iter;
1953 use_operand_p use_p;
1955 ipa_set_param_used (info, i, true);
1956 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
1957 if (!is_gimple_call (USE_STMT (use_p)))
1959 controlled_uses = IPA_UNDESCRIBED_USE;
1960 break;
1962 else
1963 controlled_uses++;
1965 else
1966 controlled_uses = 0;
1968 else
1969 controlled_uses = IPA_UNDESCRIBED_USE;
1970 ipa_set_controlled_uses (info, i, controlled_uses);
1973 func = DECL_STRUCT_FUNCTION (decl);
1974 FOR_EACH_BB_FN (bb, func)
1976 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1978 gimple stmt = gsi_stmt (gsi);
1980 if (is_gimple_debug (stmt))
1981 continue;
1983 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1984 walk_stmt_load_store_addr_ops (stmt, info,
1985 visit_ref_for_mod_analysis,
1986 visit_ref_for_mod_analysis,
1987 visit_ref_for_mod_analysis);
1989 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1990 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1991 visit_ref_for_mod_analysis,
1992 visit_ref_for_mod_analysis,
1993 visit_ref_for_mod_analysis);
1996 info->uses_analysis_done = 1;
1999 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2001 static void
2002 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2004 int i;
2006 for (i = 0; i < param_count; i++)
2008 if (parms_ainfo[i].parm_visited_statements)
2009 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2010 if (parms_ainfo[i].pt_visited_statements)
2011 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2015 /* Initialize the array describing properties of of formal parameters
2016 of NODE, analyze their uses and compute jump functions associated
2017 with actual arguments of calls from within NODE. */
2019 void
2020 ipa_analyze_node (struct cgraph_node *node)
2022 struct ipa_node_params *info;
2023 struct param_analysis_info *parms_ainfo;
2024 int param_count;
2026 ipa_check_create_node_params ();
2027 ipa_check_create_edge_args ();
2028 info = IPA_NODE_REF (node);
2029 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
2030 ipa_initialize_node_params (node);
2032 param_count = ipa_get_param_count (info);
2033 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2034 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2036 ipa_analyze_params_uses (node, parms_ainfo);
2037 ipa_compute_jump_functions (node, parms_ainfo);
2039 free_parms_ainfo (parms_ainfo, param_count);
2040 pop_cfun ();
2043 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2044 attempt a type-based devirtualization. If successful, return the
2045 target function declaration, otherwise return NULL. */
2047 tree
2048 ipa_intraprocedural_devirtualization (gimple call)
2050 tree binfo, token, fndecl;
2051 struct ipa_jump_func jfunc;
2052 tree otr = gimple_call_fn (call);
2054 jfunc.type = IPA_JF_UNKNOWN;
2055 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2056 call);
2057 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2058 return NULL_TREE;
2059 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2060 if (!binfo)
2061 return NULL_TREE;
2062 token = OBJ_TYPE_REF_TOKEN (otr);
2063 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2064 binfo);
2065 return fndecl;
2068 /* Update the jump function DST when the call graph edge corresponding to SRC is
2069 is being inlined, knowing that DST is of type ancestor and src of known
2070 type. */
2072 static void
2073 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2074 struct ipa_jump_func *dst)
2076 HOST_WIDE_INT combined_offset;
2077 tree combined_type;
2079 combined_offset = ipa_get_jf_known_type_offset (src)
2080 + ipa_get_jf_ancestor_offset (dst);
2081 combined_type = ipa_get_jf_ancestor_type (dst);
2083 ipa_set_jf_known_type (dst, combined_offset,
2084 ipa_get_jf_known_type_base_type (src),
2085 combined_type);
2088 /* Update the jump functions associated with call graph edge E when the call
2089 graph edge CS is being inlined, assuming that E->caller is already (possibly
2090 indirectly) inlined into CS->callee and that E has not been inlined. */
2092 static void
2093 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2094 struct cgraph_edge *e)
2096 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2097 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2098 int count = ipa_get_cs_argument_count (args);
2099 int i;
2101 for (i = 0; i < count; i++)
2103 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2105 if (dst->type == IPA_JF_ANCESTOR)
2107 struct ipa_jump_func *src;
2108 int dst_fid = dst->value.ancestor.formal_id;
2110 /* Variable number of arguments can cause havoc if we try to access
2111 one that does not exist in the inlined edge. So make sure we
2112 don't. */
2113 if (dst_fid >= ipa_get_cs_argument_count (top))
2115 dst->type = IPA_JF_UNKNOWN;
2116 continue;
2119 src = ipa_get_ith_jump_func (top, dst_fid);
2121 if (src->agg.items
2122 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2124 struct ipa_agg_jf_item *item;
2125 int j;
2127 /* Currently we do not produce clobber aggregate jump functions,
2128 replace with merging when we do. */
2129 gcc_assert (!dst->agg.items);
2131 dst->agg.items = vec_safe_copy (src->agg.items);
2132 dst->agg.by_ref = src->agg.by_ref;
2133 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2134 item->offset -= dst->value.ancestor.offset;
2137 if (src->type == IPA_JF_KNOWN_TYPE)
2138 combine_known_type_and_ancestor_jfs (src, dst);
2139 else if (src->type == IPA_JF_PASS_THROUGH
2140 && src->value.pass_through.operation == NOP_EXPR)
2142 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2143 dst->value.ancestor.agg_preserved &=
2144 src->value.pass_through.agg_preserved;
2146 else if (src->type == IPA_JF_ANCESTOR)
2148 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2149 dst->value.ancestor.offset += src->value.ancestor.offset;
2150 dst->value.ancestor.agg_preserved &=
2151 src->value.ancestor.agg_preserved;
2153 else
2154 dst->type = IPA_JF_UNKNOWN;
2156 else if (dst->type == IPA_JF_PASS_THROUGH)
2158 struct ipa_jump_func *src;
2159 /* We must check range due to calls with variable number of arguments
2160 and we cannot combine jump functions with operations. */
2161 if (dst->value.pass_through.operation == NOP_EXPR
2162 && (dst->value.pass_through.formal_id
2163 < ipa_get_cs_argument_count (top)))
2165 bool agg_p;
2166 int dst_fid = dst->value.pass_through.formal_id;
2167 src = ipa_get_ith_jump_func (top, dst_fid);
2168 agg_p = dst->value.pass_through.agg_preserved;
2170 dst->type = src->type;
2171 dst->value = src->value;
2173 if (src->agg.items
2174 && (agg_p || !src->agg.by_ref))
2176 /* Currently we do not produce clobber aggregate jump
2177 functions, replace with merging when we do. */
2178 gcc_assert (!dst->agg.items);
2180 dst->agg.by_ref = src->agg.by_ref;
2181 dst->agg.items = vec_safe_copy (src->agg.items);
2184 if (!agg_p)
2186 if (dst->type == IPA_JF_PASS_THROUGH)
2187 dst->value.pass_through.agg_preserved = false;
2188 else if (dst->type == IPA_JF_ANCESTOR)
2189 dst->value.ancestor.agg_preserved = false;
2192 else
2193 dst->type = IPA_JF_UNKNOWN;
2198 /* If TARGET is an addr_expr of a function declaration, make it the destination
2199 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2201 struct cgraph_edge *
2202 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2204 struct cgraph_node *callee;
2205 struct inline_edge_summary *es = inline_edge_summary (ie);
2206 bool unreachable = false;
2208 if (TREE_CODE (target) == ADDR_EXPR)
2209 target = TREE_OPERAND (target, 0);
2210 if (TREE_CODE (target) != FUNCTION_DECL)
2212 target = canonicalize_constructor_val (target, NULL);
2213 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2215 if (dump_file)
2216 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2217 " in %s/%i, making it unreachable.\n",
2218 cgraph_node_name (ie->caller), ie->caller->symbol.order);
2219 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2220 callee = cgraph_get_create_node (target);
2221 unreachable = true;
2223 else
2224 callee = cgraph_get_node (target);
2226 else
2227 callee = cgraph_get_node (target);
2229 /* Because may-edges are not explicitely represented and vtable may be external,
2230 we may create the first reference to the object in the unit. */
2231 if (!callee || callee->global.inlined_to)
2234 /* We are better to ensure we can refer to it.
2235 In the case of static functions we are out of luck, since we already
2236 removed its body. In the case of public functions we may or may
2237 not introduce the reference. */
2238 if (!canonicalize_constructor_val (target, NULL)
2239 || !TREE_PUBLIC (target))
2241 if (dump_file)
2242 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2243 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2244 xstrdup (cgraph_node_name (ie->caller)),
2245 ie->caller->symbol.order,
2246 xstrdup (cgraph_node_name (ie->callee)),
2247 ie->callee->symbol.order);
2248 return NULL;
2250 callee = cgraph_get_create_real_symbol_node (target);
2252 ipa_check_create_node_params ();
2254 /* We can not make edges to inline clones. It is bug that someone removed
2255 the cgraph node too early. */
2256 gcc_assert (!callee->global.inlined_to);
2258 cgraph_make_edge_direct (ie, callee);
2259 es = inline_edge_summary (ie);
2260 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2261 - eni_size_weights.call_cost);
2262 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2263 - eni_time_weights.call_cost);
2264 if (dump_file && !unreachable)
2266 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2267 "(%s/%i -> %s/%i), for stmt ",
2268 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2269 xstrdup (cgraph_node_name (ie->caller)),
2270 ie->caller->symbol.order,
2271 xstrdup (cgraph_node_name (ie->callee)),
2272 ie->callee->symbol.order);
2273 if (ie->call_stmt)
2274 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2275 else
2276 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2278 callee = cgraph_function_or_thunk_node (callee, NULL);
2280 return ie;
2283 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2284 return NULL if there is not any. BY_REF specifies whether the value has to
2285 be passed by reference or by value. */
2287 tree
2288 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2289 HOST_WIDE_INT offset, bool by_ref)
2291 struct ipa_agg_jf_item *item;
2292 int i;
2294 if (by_ref != agg->by_ref)
2295 return NULL;
2297 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2298 if (item->offset == offset)
2300 /* Currently we do not have clobber values, return NULL for them once
2301 we do. */
2302 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2303 return item->value;
2305 return NULL;
2308 /* Remove a reference to SYMBOL from the list of references of a node given by
2309 reference description RDESC. */
2311 static void
2312 remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2314 struct ipa_ref *to_del;
2315 struct cgraph_edge *origin;
2317 origin = rdesc->cs;
2318 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
2319 origin->call_stmt);
2320 gcc_assert (to_del);
2321 ipa_remove_reference (to_del);
2322 if (dump_file)
2323 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2324 xstrdup (cgraph_node_name (origin->caller)),
2325 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
2328 /* If JFUNC has a reference description with refcount different from
2329 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2330 NULL. JFUNC must be a constant jump function. */
2332 static struct ipa_cst_ref_desc *
2333 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2335 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2336 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2337 return rdesc;
2338 else
2339 return NULL;
2342 /* Try to find a destination for indirect edge IE that corresponds to a simple
2343 call or a call of a member function pointer and where the destination is a
2344 pointer formal parameter described by jump function JFUNC. If it can be
2345 determined, return the newly direct edge, otherwise return NULL.
2346 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2348 static struct cgraph_edge *
2349 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2350 struct ipa_jump_func *jfunc,
2351 struct ipa_node_params *new_root_info)
2353 struct ipa_cst_ref_desc *rdesc;
2354 struct cgraph_edge *cs;
2355 tree target;
2357 if (ie->indirect_info->agg_contents)
2358 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2359 ie->indirect_info->offset,
2360 ie->indirect_info->by_ref);
2361 else
2362 target = ipa_value_from_jfunc (new_root_info, jfunc);
2363 if (!target)
2364 return NULL;
2365 cs = ipa_make_edge_direct_to_target (ie, target);
2367 if (cs && !ie->indirect_info->agg_contents
2368 && jfunc->type == IPA_JF_CONST
2369 && (rdesc = jfunc_rdesc_usable (jfunc))
2370 && --rdesc->refcount == 0)
2371 remove_described_reference ((symtab_node) cs->callee, rdesc);
2373 return cs;
2376 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2377 call based on a formal parameter which is described by jump function JFUNC
2378 and if it can be determined, make it direct and return the direct edge.
2379 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2380 are relative to. */
2382 static struct cgraph_edge *
2383 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2384 struct ipa_jump_func *jfunc,
2385 struct ipa_node_params *new_root_info)
2387 tree binfo, target;
2389 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2391 if (!binfo)
2392 return NULL;
2394 if (TREE_CODE (binfo) != TREE_BINFO)
2396 binfo = gimple_extract_devirt_binfo_from_cst (binfo);
2397 if (!binfo)
2398 return NULL;
2401 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2402 ie->indirect_info->otr_type);
2403 if (binfo)
2404 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2405 binfo);
2406 else
2407 return NULL;
2409 if (target)
2410 return ipa_make_edge_direct_to_target (ie, target);
2411 else
2412 return NULL;
2415 /* Update the param called notes associated with NODE when CS is being inlined,
2416 assuming NODE is (potentially indirectly) inlined into CS->callee.
2417 Moreover, if the callee is discovered to be constant, create a new cgraph
2418 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2419 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2421 static bool
2422 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2423 struct cgraph_node *node,
2424 vec<cgraph_edge_p> *new_edges)
2426 struct ipa_edge_args *top;
2427 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2428 struct ipa_node_params *new_root_info;
2429 bool res = false;
2431 ipa_check_create_edge_args ();
2432 top = IPA_EDGE_REF (cs);
2433 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2434 ? cs->caller->global.inlined_to
2435 : cs->caller);
2437 for (ie = node->indirect_calls; ie; ie = next_ie)
2439 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2440 struct ipa_jump_func *jfunc;
2441 int param_index;
2443 next_ie = ie->next_callee;
2445 if (ici->param_index == -1)
2446 continue;
2448 /* We must check range due to calls with variable number of arguments: */
2449 if (ici->param_index >= ipa_get_cs_argument_count (top))
2451 ici->param_index = -1;
2452 continue;
2455 param_index = ici->param_index;
2456 jfunc = ipa_get_ith_jump_func (top, param_index);
2458 if (!flag_indirect_inlining)
2459 new_direct_edge = NULL;
2460 else if (ici->polymorphic)
2461 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2462 new_root_info);
2463 else
2464 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2465 new_root_info);
2466 if (new_direct_edge)
2468 new_direct_edge->indirect_inlining_edge = 1;
2469 if (new_direct_edge->call_stmt)
2470 new_direct_edge->call_stmt_cannot_inline_p
2471 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2472 new_direct_edge->callee->symbol.decl);
2473 if (new_edges)
2475 new_edges->safe_push (new_direct_edge);
2476 top = IPA_EDGE_REF (cs);
2477 res = true;
2480 else if (jfunc->type == IPA_JF_PASS_THROUGH
2481 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2483 if (ici->agg_contents
2484 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2485 ici->param_index = -1;
2486 else
2487 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2489 else if (jfunc->type == IPA_JF_ANCESTOR)
2491 if (ici->agg_contents
2492 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2493 ici->param_index = -1;
2494 else
2496 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2497 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2500 else
2501 /* Either we can find a destination for this edge now or never. */
2502 ici->param_index = -1;
2505 return res;
2508 /* Recursively traverse subtree of NODE (including node) made of inlined
2509 cgraph_edges when CS has been inlined and invoke
2510 update_indirect_edges_after_inlining on all nodes and
2511 update_jump_functions_after_inlining on all non-inlined edges that lead out
2512 of this subtree. Newly discovered indirect edges will be added to
2513 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2514 created. */
2516 static bool
2517 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2518 struct cgraph_node *node,
2519 vec<cgraph_edge_p> *new_edges)
2521 struct cgraph_edge *e;
2522 bool res;
2524 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2526 for (e = node->callees; e; e = e->next_callee)
2527 if (!e->inline_failed)
2528 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2529 else
2530 update_jump_functions_after_inlining (cs, e);
2531 for (e = node->indirect_calls; e; e = e->next_callee)
2532 update_jump_functions_after_inlining (cs, e);
2534 return res;
2537 /* Combine two controlled uses counts as done during inlining. */
2539 static int
2540 combine_controlled_uses_counters (int c, int d)
2542 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2543 return IPA_UNDESCRIBED_USE;
2544 else
2545 return c + d - 1;
2548 /* Propagate number of controlled users from CS->caleee to the new root of the
2549 tree of inlined nodes. */
2551 static void
2552 propagate_controlled_uses (struct cgraph_edge *cs)
2554 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2555 struct cgraph_node *new_root = cs->caller->global.inlined_to
2556 ? cs->caller->global.inlined_to : cs->caller;
2557 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2558 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2559 int count, i;
2561 count = MIN (ipa_get_cs_argument_count (args),
2562 ipa_get_param_count (old_root_info));
2563 for (i = 0; i < count; i++)
2565 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2566 struct ipa_cst_ref_desc *rdesc;
2568 if (jf->type == IPA_JF_PASS_THROUGH)
2570 int src_idx, c, d;
2571 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2572 c = ipa_get_controlled_uses (new_root_info, src_idx);
2573 d = ipa_get_controlled_uses (old_root_info, i);
2575 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2576 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2577 c = combine_controlled_uses_counters (c, d);
2578 ipa_set_controlled_uses (new_root_info, src_idx, c);
2579 if (c == 0 && new_root_info->ipcp_orig_node)
2581 struct cgraph_node *n;
2582 struct ipa_ref *ref;
2583 tree t = new_root_info->known_vals[src_idx];
2585 if (t && TREE_CODE (t) == ADDR_EXPR
2586 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2587 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2588 && (ref = ipa_find_reference ((symtab_node) new_root,
2589 (symtab_node) n, NULL)))
2591 if (dump_file)
2592 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2593 "reference from %s/%i to %s/%i.\n",
2594 xstrdup (cgraph_node_name (new_root)),
2595 new_root->symbol.order,
2596 xstrdup (cgraph_node_name (n)), n->symbol.order);
2597 ipa_remove_reference (ref);
2601 else if (jf->type == IPA_JF_CONST
2602 && (rdesc = jfunc_rdesc_usable (jf)))
2604 int d = ipa_get_controlled_uses (old_root_info, i);
2605 int c = rdesc->refcount;
2606 rdesc->refcount = combine_controlled_uses_counters (c, d);
2607 if (rdesc->refcount == 0)
2609 tree cst = ipa_get_jf_constant (jf);
2610 struct cgraph_node *n;
2611 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2612 && TREE_CODE (TREE_OPERAND (cst, 0))
2613 == FUNCTION_DECL);
2614 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2615 if (n)
2617 struct cgraph_node *clone;
2618 remove_described_reference ((symtab_node) n, rdesc);
2620 clone = cs->caller;
2621 while (clone->global.inlined_to
2622 && clone != rdesc->cs->caller
2623 && IPA_NODE_REF (clone)->ipcp_orig_node)
2625 struct ipa_ref *ref;
2626 ref = ipa_find_reference ((symtab_node) clone,
2627 (symtab_node) n, NULL);
2628 if (ref)
2630 if (dump_file)
2631 fprintf (dump_file, "ipa-prop: Removing "
2632 "cloning-created reference "
2633 "from %s/%i to %s/%i.\n",
2634 xstrdup (cgraph_node_name (clone)),
2635 clone->symbol.order,
2636 xstrdup (cgraph_node_name (n)),
2637 n->symbol.order);
2638 ipa_remove_reference (ref);
2640 clone = clone->callers->caller;
2647 for (i = ipa_get_param_count (old_root_info);
2648 i < ipa_get_cs_argument_count (args);
2649 i++)
2651 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2653 if (jf->type == IPA_JF_CONST)
2655 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2656 if (rdesc)
2657 rdesc->refcount = IPA_UNDESCRIBED_USE;
2659 else if (jf->type == IPA_JF_PASS_THROUGH)
2660 ipa_set_controlled_uses (new_root_info,
2661 jf->value.pass_through.formal_id,
2662 IPA_UNDESCRIBED_USE);
2666 /* Update jump functions and call note functions on inlining the call site CS.
2667 CS is expected to lead to a node already cloned by
2668 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2669 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2670 created. */
2672 bool
2673 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2674 vec<cgraph_edge_p> *new_edges)
2676 bool changed;
2677 /* Do nothing if the preparation phase has not been carried out yet
2678 (i.e. during early inlining). */
2679 if (!ipa_node_params_vector.exists ())
2680 return false;
2681 gcc_assert (ipa_edge_args_vector);
2683 propagate_controlled_uses (cs);
2684 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2686 /* We do not keep jump functions of inlined edges up to date. Better to free
2687 them so we do not access them accidentally. */
2688 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2689 return changed;
2692 /* Frees all dynamically allocated structures that the argument info points
2693 to. */
2695 void
2696 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2698 vec_free (args->jump_functions);
2699 memset (args, 0, sizeof (*args));
2702 /* Free all ipa_edge structures. */
2704 void
2705 ipa_free_all_edge_args (void)
2707 int i;
2708 struct ipa_edge_args *args;
2710 if (!ipa_edge_args_vector)
2711 return;
2713 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2714 ipa_free_edge_args_substructures (args);
2716 vec_free (ipa_edge_args_vector);
2719 /* Frees all dynamically allocated structures that the param info points
2720 to. */
2722 void
2723 ipa_free_node_params_substructures (struct ipa_node_params *info)
2725 info->descriptors.release ();
2726 free (info->lattices);
2727 /* Lattice values and their sources are deallocated with their alocation
2728 pool. */
2729 info->known_vals.release ();
2730 memset (info, 0, sizeof (*info));
2733 /* Free all ipa_node_params structures. */
2735 void
2736 ipa_free_all_node_params (void)
2738 int i;
2739 struct ipa_node_params *info;
2741 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2742 ipa_free_node_params_substructures (info);
2744 ipa_node_params_vector.release ();
2747 /* Set the aggregate replacements of NODE to be AGGVALS. */
2749 void
2750 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2751 struct ipa_agg_replacement_value *aggvals)
2753 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2754 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2756 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2759 /* Hook that is called by cgraph.c when an edge is removed. */
2761 static void
2762 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2764 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2765 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
2766 return;
2767 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2770 /* Hook that is called by cgraph.c when a node is removed. */
2772 static void
2773 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2775 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2776 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2777 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2778 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2779 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
2782 /* Hook that is called by cgraph.c when an edge is duplicated. */
2784 static void
2785 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2786 __attribute__((unused)) void *data)
2788 struct ipa_edge_args *old_args, *new_args;
2789 unsigned int i;
2791 ipa_check_create_edge_args ();
2793 old_args = IPA_EDGE_REF (src);
2794 new_args = IPA_EDGE_REF (dst);
2796 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
2798 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
2800 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
2801 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
2803 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
2805 if (src_jf->type == IPA_JF_CONST)
2807 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
2809 if (!src_rdesc)
2810 dst_jf->value.constant.rdesc = NULL;
2811 else if (src_rdesc->cs == src)
2813 struct ipa_cst_ref_desc *dst_rdesc;
2814 gcc_checking_assert (ipa_refdesc_pool);
2815 dst_rdesc
2816 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
2817 dst_rdesc->cs = dst;
2818 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
2819 src_rdesc->next_duplicate = dst_rdesc;
2820 dst_rdesc->refcount = src_rdesc->refcount;
2821 dst_jf->value.constant.rdesc = dst_rdesc;
2823 else
2825 struct ipa_cst_ref_desc *dst_rdesc;
2826 /* This can happen during inlining, when a JFUNC can refer to a
2827 reference taken in a function up in the tree of inline clones.
2828 We need to find the duplicate that refers to our tree of
2829 inline clones. */
2831 gcc_assert (dst->caller->global.inlined_to);
2832 for (dst_rdesc = src_rdesc->next_duplicate;
2833 dst_rdesc;
2834 dst_rdesc = dst_rdesc->next_duplicate)
2836 gcc_assert (dst_rdesc->cs->caller->global.inlined_to);
2837 if (dst_rdesc->cs->caller->global.inlined_to
2838 == dst->caller->global.inlined_to)
2839 break;
2842 dst_jf->value.constant.rdesc = dst_rdesc;
2848 /* Hook that is called by cgraph.c when a node is duplicated. */
2850 static void
2851 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2852 ATTRIBUTE_UNUSED void *data)
2854 struct ipa_node_params *old_info, *new_info;
2855 struct ipa_agg_replacement_value *old_av, *new_av;
2857 ipa_check_create_node_params ();
2858 old_info = IPA_NODE_REF (src);
2859 new_info = IPA_NODE_REF (dst);
2861 new_info->descriptors = old_info->descriptors.copy ();
2862 new_info->lattices = NULL;
2863 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2865 new_info->uses_analysis_done = old_info->uses_analysis_done;
2866 new_info->node_enqueued = old_info->node_enqueued;
2868 old_av = ipa_get_agg_replacements_for_node (src);
2869 if (!old_av)
2870 return;
2872 new_av = NULL;
2873 while (old_av)
2875 struct ipa_agg_replacement_value *v;
2877 v = ggc_alloc_ipa_agg_replacement_value ();
2878 memcpy (v, old_av, sizeof (*v));
2879 v->next = new_av;
2880 new_av = v;
2881 old_av = old_av->next;
2883 ipa_set_node_agg_value_chain (dst, new_av);
2887 /* Analyze newly added function into callgraph. */
2889 static void
2890 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2892 ipa_analyze_node (node);
2895 /* Register our cgraph hooks if they are not already there. */
2897 void
2898 ipa_register_cgraph_hooks (void)
2900 if (!edge_removal_hook_holder)
2901 edge_removal_hook_holder =
2902 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2903 if (!node_removal_hook_holder)
2904 node_removal_hook_holder =
2905 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2906 if (!edge_duplication_hook_holder)
2907 edge_duplication_hook_holder =
2908 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2909 if (!node_duplication_hook_holder)
2910 node_duplication_hook_holder =
2911 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2912 function_insertion_hook_holder =
2913 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2916 /* Unregister our cgraph hooks if they are not already there. */
2918 static void
2919 ipa_unregister_cgraph_hooks (void)
2921 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2922 edge_removal_hook_holder = NULL;
2923 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2924 node_removal_hook_holder = NULL;
2925 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2926 edge_duplication_hook_holder = NULL;
2927 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2928 node_duplication_hook_holder = NULL;
2929 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2930 function_insertion_hook_holder = NULL;
2933 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2934 longer needed after ipa-cp. */
2936 void
2937 ipa_free_all_structures_after_ipa_cp (void)
2939 if (!optimize)
2941 ipa_free_all_edge_args ();
2942 ipa_free_all_node_params ();
2943 free_alloc_pool (ipcp_sources_pool);
2944 free_alloc_pool (ipcp_values_pool);
2945 free_alloc_pool (ipcp_agg_lattice_pool);
2946 ipa_unregister_cgraph_hooks ();
2947 if (ipa_refdesc_pool)
2948 free_alloc_pool (ipa_refdesc_pool);
2952 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2953 longer needed after indirect inlining. */
2955 void
2956 ipa_free_all_structures_after_iinln (void)
2958 ipa_free_all_edge_args ();
2959 ipa_free_all_node_params ();
2960 ipa_unregister_cgraph_hooks ();
2961 if (ipcp_sources_pool)
2962 free_alloc_pool (ipcp_sources_pool);
2963 if (ipcp_values_pool)
2964 free_alloc_pool (ipcp_values_pool);
2965 if (ipcp_agg_lattice_pool)
2966 free_alloc_pool (ipcp_agg_lattice_pool);
2967 if (ipa_refdesc_pool)
2968 free_alloc_pool (ipa_refdesc_pool);
2971 /* Print ipa_tree_map data structures of all functions in the
2972 callgraph to F. */
2974 void
2975 ipa_print_node_params (FILE *f, struct cgraph_node *node)
2977 int i, count;
2978 tree temp;
2979 struct ipa_node_params *info;
2981 if (!node->analyzed)
2982 return;
2983 info = IPA_NODE_REF (node);
2984 fprintf (f, " function %s/%i parameter descriptors:\n",
2985 cgraph_node_name (node), node->symbol.order);
2986 count = ipa_get_param_count (info);
2987 for (i = 0; i < count; i++)
2989 int c;
2991 temp = ipa_get_param (info, i);
2992 if (TREE_CODE (temp) == PARM_DECL)
2993 fprintf (f, " param %d : %s", i,
2994 (DECL_NAME (temp)
2995 ? (*lang_hooks.decl_printable_name) (temp, 2)
2996 : "(unnamed)"));
2997 if (ipa_is_param_used (info, i))
2998 fprintf (f, " used");
2999 c = ipa_get_controlled_uses (info, i);
3000 if (c == IPA_UNDESCRIBED_USE)
3001 fprintf (f, " undescribed_use");
3002 else
3003 fprintf (f, " controlled_uses=%i", c);
3004 fprintf (f, "\n");
3008 /* Print ipa_tree_map data structures of all functions in the
3009 callgraph to F. */
3011 void
3012 ipa_print_all_params (FILE * f)
3014 struct cgraph_node *node;
3016 fprintf (f, "\nFunction parameters:\n");
3017 FOR_EACH_FUNCTION (node)
3018 ipa_print_node_params (f, node);
3021 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3023 vec<tree>
3024 ipa_get_vector_of_formal_parms (tree fndecl)
3026 vec<tree> args;
3027 int count;
3028 tree parm;
3030 count = count_formal_params (fndecl);
3031 args.create (count);
3032 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3033 args.quick_push (parm);
3035 return args;
3038 /* Return a heap allocated vector containing types of formal parameters of
3039 function type FNTYPE. */
3041 static inline vec<tree>
3042 get_vector_of_formal_parm_types (tree fntype)
3044 vec<tree> types;
3045 int count = 0;
3046 tree t;
3048 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3049 count++;
3051 types.create (count);
3052 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3053 types.quick_push (TREE_VALUE (t));
3055 return types;
3058 /* Modify the function declaration FNDECL and its type according to the plan in
3059 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3060 to reflect the actual parameters being modified which are determined by the
3061 base_index field. */
3063 void
3064 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3065 const char *synth_parm_prefix)
3067 vec<tree> oparms, otypes;
3068 tree orig_type, new_type = NULL;
3069 tree old_arg_types, t, new_arg_types = NULL;
3070 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3071 int i, len = adjustments.length ();
3072 tree new_reversed = NULL;
3073 bool care_for_types, last_parm_void;
3075 if (!synth_parm_prefix)
3076 synth_parm_prefix = "SYNTH";
3078 oparms = ipa_get_vector_of_formal_parms (fndecl);
3079 orig_type = TREE_TYPE (fndecl);
3080 old_arg_types = TYPE_ARG_TYPES (orig_type);
3082 /* The following test is an ugly hack, some functions simply don't have any
3083 arguments in their type. This is probably a bug but well... */
3084 care_for_types = (old_arg_types != NULL_TREE);
3085 if (care_for_types)
3087 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3088 == void_type_node);
3089 otypes = get_vector_of_formal_parm_types (orig_type);
3090 if (last_parm_void)
3091 gcc_assert (oparms.length () + 1 == otypes.length ());
3092 else
3093 gcc_assert (oparms.length () == otypes.length ());
3095 else
3097 last_parm_void = false;
3098 otypes.create (0);
3101 for (i = 0; i < len; i++)
3103 struct ipa_parm_adjustment *adj;
3104 gcc_assert (link);
3106 adj = &adjustments[i];
3107 parm = oparms[adj->base_index];
3108 adj->base = parm;
3110 if (adj->copy_param)
3112 if (care_for_types)
3113 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3114 new_arg_types);
3115 *link = parm;
3116 link = &DECL_CHAIN (parm);
3118 else if (!adj->remove_param)
3120 tree new_parm;
3121 tree ptype;
3123 if (adj->by_ref)
3124 ptype = build_pointer_type (adj->type);
3125 else
3126 ptype = adj->type;
3128 if (care_for_types)
3129 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3131 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3132 ptype);
3133 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3135 DECL_ARTIFICIAL (new_parm) = 1;
3136 DECL_ARG_TYPE (new_parm) = ptype;
3137 DECL_CONTEXT (new_parm) = fndecl;
3138 TREE_USED (new_parm) = 1;
3139 DECL_IGNORED_P (new_parm) = 1;
3140 layout_decl (new_parm, 0);
3142 adj->base = parm;
3143 adj->reduction = new_parm;
3145 *link = new_parm;
3147 link = &DECL_CHAIN (new_parm);
3151 *link = NULL_TREE;
3153 if (care_for_types)
3155 new_reversed = nreverse (new_arg_types);
3156 if (last_parm_void)
3158 if (new_reversed)
3159 TREE_CHAIN (new_arg_types) = void_list_node;
3160 else
3161 new_reversed = void_list_node;
3165 /* Use copy_node to preserve as much as possible from original type
3166 (debug info, attribute lists etc.)
3167 Exception is METHOD_TYPEs must have THIS argument.
3168 When we are asked to remove it, we need to build new FUNCTION_TYPE
3169 instead. */
3170 if (TREE_CODE (orig_type) != METHOD_TYPE
3171 || (adjustments[0].copy_param
3172 && adjustments[0].base_index == 0))
3174 new_type = build_distinct_type_copy (orig_type);
3175 TYPE_ARG_TYPES (new_type) = new_reversed;
3177 else
3179 new_type
3180 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3181 new_reversed));
3182 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3183 DECL_VINDEX (fndecl) = NULL_TREE;
3186 /* When signature changes, we need to clear builtin info. */
3187 if (DECL_BUILT_IN (fndecl))
3189 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3190 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3193 /* This is a new type, not a copy of an old type. Need to reassociate
3194 variants. We can handle everything except the main variant lazily. */
3195 t = TYPE_MAIN_VARIANT (orig_type);
3196 if (orig_type != t)
3198 TYPE_MAIN_VARIANT (new_type) = t;
3199 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3200 TYPE_NEXT_VARIANT (t) = new_type;
3202 else
3204 TYPE_MAIN_VARIANT (new_type) = new_type;
3205 TYPE_NEXT_VARIANT (new_type) = NULL;
3208 TREE_TYPE (fndecl) = new_type;
3209 DECL_VIRTUAL_P (fndecl) = 0;
3210 otypes.release ();
3211 oparms.release ();
3214 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3215 If this is a directly recursive call, CS must be NULL. Otherwise it must
3216 contain the corresponding call graph edge. */
3218 void
3219 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3220 ipa_parm_adjustment_vec adjustments)
3222 vec<tree> vargs;
3223 vec<tree, va_gc> **debug_args = NULL;
3224 gimple new_stmt;
3225 gimple_stmt_iterator gsi;
3226 tree callee_decl;
3227 int i, len;
3229 len = adjustments.length ();
3230 vargs.create (len);
3231 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3233 gsi = gsi_for_stmt (stmt);
3234 for (i = 0; i < len; i++)
3236 struct ipa_parm_adjustment *adj;
3238 adj = &adjustments[i];
3240 if (adj->copy_param)
3242 tree arg = gimple_call_arg (stmt, adj->base_index);
3244 vargs.quick_push (arg);
3246 else if (!adj->remove_param)
3248 tree expr, base, off;
3249 location_t loc;
3250 unsigned int deref_align;
3251 bool deref_base = false;
3253 /* We create a new parameter out of the value of the old one, we can
3254 do the following kind of transformations:
3256 - A scalar passed by reference is converted to a scalar passed by
3257 value. (adj->by_ref is false and the type of the original
3258 actual argument is a pointer to a scalar).
3260 - A part of an aggregate is passed instead of the whole aggregate.
3261 The part can be passed either by value or by reference, this is
3262 determined by value of adj->by_ref. Moreover, the code below
3263 handles both situations when the original aggregate is passed by
3264 value (its type is not a pointer) and when it is passed by
3265 reference (it is a pointer to an aggregate).
3267 When the new argument is passed by reference (adj->by_ref is true)
3268 it must be a part of an aggregate and therefore we form it by
3269 simply taking the address of a reference inside the original
3270 aggregate. */
3272 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3273 base = gimple_call_arg (stmt, adj->base_index);
3274 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3275 : EXPR_LOCATION (base);
3277 if (TREE_CODE (base) != ADDR_EXPR
3278 && POINTER_TYPE_P (TREE_TYPE (base)))
3279 off = build_int_cst (adj->alias_ptr_type,
3280 adj->offset / BITS_PER_UNIT);
3281 else
3283 HOST_WIDE_INT base_offset;
3284 tree prev_base;
3285 bool addrof;
3287 if (TREE_CODE (base) == ADDR_EXPR)
3289 base = TREE_OPERAND (base, 0);
3290 addrof = true;
3292 else
3293 addrof = false;
3294 prev_base = base;
3295 base = get_addr_base_and_unit_offset (base, &base_offset);
3296 /* Aggregate arguments can have non-invariant addresses. */
3297 if (!base)
3299 base = build_fold_addr_expr (prev_base);
3300 off = build_int_cst (adj->alias_ptr_type,
3301 adj->offset / BITS_PER_UNIT);
3303 else if (TREE_CODE (base) == MEM_REF)
3305 if (!addrof)
3307 deref_base = true;
3308 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3310 off = build_int_cst (adj->alias_ptr_type,
3311 base_offset
3312 + adj->offset / BITS_PER_UNIT);
3313 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3314 off);
3315 base = TREE_OPERAND (base, 0);
3317 else
3319 off = build_int_cst (adj->alias_ptr_type,
3320 base_offset
3321 + adj->offset / BITS_PER_UNIT);
3322 base = build_fold_addr_expr (base);
3326 if (!adj->by_ref)
3328 tree type = adj->type;
3329 unsigned int align;
3330 unsigned HOST_WIDE_INT misalign;
3332 if (deref_base)
3334 align = deref_align;
3335 misalign = 0;
3337 else
3339 get_pointer_alignment_1 (base, &align, &misalign);
3340 if (TYPE_ALIGN (type) > align)
3341 align = TYPE_ALIGN (type);
3343 misalign += (tree_to_double_int (off)
3344 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3345 * BITS_PER_UNIT);
3346 misalign = misalign & (align - 1);
3347 if (misalign != 0)
3348 align = (misalign & -misalign);
3349 if (align < TYPE_ALIGN (type))
3350 type = build_aligned_type (type, align);
3351 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3353 else
3355 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3356 expr = build_fold_addr_expr (expr);
3359 expr = force_gimple_operand_gsi (&gsi, expr,
3360 adj->by_ref
3361 || is_gimple_reg_type (adj->type),
3362 NULL, true, GSI_SAME_STMT);
3363 vargs.quick_push (expr);
3365 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3367 unsigned int ix;
3368 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3369 gimple def_temp;
3371 arg = gimple_call_arg (stmt, adj->base_index);
3372 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3374 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3375 continue;
3376 arg = fold_convert_loc (gimple_location (stmt),
3377 TREE_TYPE (origin), arg);
3379 if (debug_args == NULL)
3380 debug_args = decl_debug_args_insert (callee_decl);
3381 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3382 if (ddecl == origin)
3384 ddecl = (**debug_args)[ix + 1];
3385 break;
3387 if (ddecl == NULL)
3389 ddecl = make_node (DEBUG_EXPR_DECL);
3390 DECL_ARTIFICIAL (ddecl) = 1;
3391 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3392 DECL_MODE (ddecl) = DECL_MODE (origin);
3394 vec_safe_push (*debug_args, origin);
3395 vec_safe_push (*debug_args, ddecl);
3397 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3398 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3402 if (dump_file && (dump_flags & TDF_DETAILS))
3404 fprintf (dump_file, "replacing stmt:");
3405 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3408 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3409 vargs.release ();
3410 if (gimple_call_lhs (stmt))
3411 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3413 gimple_set_block (new_stmt, gimple_block (stmt));
3414 if (gimple_has_location (stmt))
3415 gimple_set_location (new_stmt, gimple_location (stmt));
3416 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3417 gimple_call_copy_flags (new_stmt, stmt);
3419 if (dump_file && (dump_flags & TDF_DETAILS))
3421 fprintf (dump_file, "with stmt:");
3422 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3423 fprintf (dump_file, "\n");
3425 gsi_replace (&gsi, new_stmt, true);
3426 if (cs)
3427 cgraph_set_call_stmt (cs, new_stmt);
3428 update_ssa (TODO_update_ssa);
3429 free_dominance_info (CDI_DOMINATORS);
3432 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3434 static bool
3435 index_in_adjustments_multiple_times_p (int base_index,
3436 ipa_parm_adjustment_vec adjustments)
3438 int i, len = adjustments.length ();
3439 bool one = false;
3441 for (i = 0; i < len; i++)
3443 struct ipa_parm_adjustment *adj;
3444 adj = &adjustments[i];
3446 if (adj->base_index == base_index)
3448 if (one)
3449 return true;
3450 else
3451 one = true;
3454 return false;
3458 /* Return adjustments that should have the same effect on function parameters
3459 and call arguments as if they were first changed according to adjustments in
3460 INNER and then by adjustments in OUTER. */
3462 ipa_parm_adjustment_vec
3463 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3464 ipa_parm_adjustment_vec outer)
3466 int i, outlen = outer.length ();
3467 int inlen = inner.length ();
3468 int removals = 0;
3469 ipa_parm_adjustment_vec adjustments, tmp;
3471 tmp.create (inlen);
3472 for (i = 0; i < inlen; i++)
3474 struct ipa_parm_adjustment *n;
3475 n = &inner[i];
3477 if (n->remove_param)
3478 removals++;
3479 else
3480 tmp.quick_push (*n);
3483 adjustments.create (outlen + removals);
3484 for (i = 0; i < outlen; i++)
3486 struct ipa_parm_adjustment r;
3487 struct ipa_parm_adjustment *out = &outer[i];
3488 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3490 memset (&r, 0, sizeof (r));
3491 gcc_assert (!in->remove_param);
3492 if (out->remove_param)
3494 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3496 r.remove_param = true;
3497 adjustments.quick_push (r);
3499 continue;
3502 r.base_index = in->base_index;
3503 r.type = out->type;
3505 /* FIXME: Create nonlocal value too. */
3507 if (in->copy_param && out->copy_param)
3508 r.copy_param = true;
3509 else if (in->copy_param)
3510 r.offset = out->offset;
3511 else if (out->copy_param)
3512 r.offset = in->offset;
3513 else
3514 r.offset = in->offset + out->offset;
3515 adjustments.quick_push (r);
3518 for (i = 0; i < inlen; i++)
3520 struct ipa_parm_adjustment *n = &inner[i];
3522 if (n->remove_param)
3523 adjustments.quick_push (*n);
3526 tmp.release ();
3527 return adjustments;
3530 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3531 friendly way, assuming they are meant to be applied to FNDECL. */
3533 void
3534 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3535 tree fndecl)
3537 int i, len = adjustments.length ();
3538 bool first = true;
3539 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3541 fprintf (file, "IPA param adjustments: ");
3542 for (i = 0; i < len; i++)
3544 struct ipa_parm_adjustment *adj;
3545 adj = &adjustments[i];
3547 if (!first)
3548 fprintf (file, " ");
3549 else
3550 first = false;
3552 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3553 print_generic_expr (file, parms[adj->base_index], 0);
3554 if (adj->base)
3556 fprintf (file, ", base: ");
3557 print_generic_expr (file, adj->base, 0);
3559 if (adj->reduction)
3561 fprintf (file, ", reduction: ");
3562 print_generic_expr (file, adj->reduction, 0);
3564 if (adj->new_ssa_base)
3566 fprintf (file, ", new_ssa_base: ");
3567 print_generic_expr (file, adj->new_ssa_base, 0);
3570 if (adj->copy_param)
3571 fprintf (file, ", copy_param");
3572 else if (adj->remove_param)
3573 fprintf (file, ", remove_param");
3574 else
3575 fprintf (file, ", offset %li", (long) adj->offset);
3576 if (adj->by_ref)
3577 fprintf (file, ", by_ref");
3578 print_node_brief (file, ", type: ", adj->type, 0);
3579 fprintf (file, "\n");
3581 parms.release ();
3584 /* Dump the AV linked list. */
3586 void
3587 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3589 bool comma = false;
3590 fprintf (f, " Aggregate replacements:");
3591 for (; av; av = av->next)
3593 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3594 av->index, av->offset);
3595 print_generic_expr (f, av->value, 0);
3596 comma = true;
3598 fprintf (f, "\n");
3601 /* Stream out jump function JUMP_FUNC to OB. */
3603 static void
3604 ipa_write_jump_function (struct output_block *ob,
3605 struct ipa_jump_func *jump_func)
3607 struct ipa_agg_jf_item *item;
3608 struct bitpack_d bp;
3609 int i, count;
3611 streamer_write_uhwi (ob, jump_func->type);
3612 switch (jump_func->type)
3614 case IPA_JF_UNKNOWN:
3615 break;
3616 case IPA_JF_KNOWN_TYPE:
3617 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3618 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3619 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3620 break;
3621 case IPA_JF_CONST:
3622 gcc_assert (
3623 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3624 stream_write_tree (ob, jump_func->value.constant.value, true);
3625 break;
3626 case IPA_JF_PASS_THROUGH:
3627 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3628 if (jump_func->value.pass_through.operation == NOP_EXPR)
3630 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3631 bp = bitpack_create (ob->main_stream);
3632 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3633 streamer_write_bitpack (&bp);
3635 else
3637 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3638 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3640 break;
3641 case IPA_JF_ANCESTOR:
3642 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3643 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3644 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3645 bp = bitpack_create (ob->main_stream);
3646 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3647 streamer_write_bitpack (&bp);
3648 break;
3651 count = vec_safe_length (jump_func->agg.items);
3652 streamer_write_uhwi (ob, count);
3653 if (count)
3655 bp = bitpack_create (ob->main_stream);
3656 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3657 streamer_write_bitpack (&bp);
3660 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3662 streamer_write_uhwi (ob, item->offset);
3663 stream_write_tree (ob, item->value, true);
3667 /* Read in jump function JUMP_FUNC from IB. */
3669 static void
3670 ipa_read_jump_function (struct lto_input_block *ib,
3671 struct ipa_jump_func *jump_func,
3672 struct cgraph_edge *cs,
3673 struct data_in *data_in)
3675 enum jump_func_type jftype;
3676 enum tree_code operation;
3677 int i, count;
3679 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3680 switch (jftype)
3682 case IPA_JF_UNKNOWN:
3683 jump_func->type = IPA_JF_UNKNOWN;
3684 break;
3685 case IPA_JF_KNOWN_TYPE:
3687 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3688 tree base_type = stream_read_tree (ib, data_in);
3689 tree component_type = stream_read_tree (ib, data_in);
3691 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
3692 break;
3694 case IPA_JF_CONST:
3695 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
3696 break;
3697 case IPA_JF_PASS_THROUGH:
3698 operation = (enum tree_code) streamer_read_uhwi (ib);
3699 if (operation == NOP_EXPR)
3701 int formal_id = streamer_read_uhwi (ib);
3702 struct bitpack_d bp = streamer_read_bitpack (ib);
3703 bool agg_preserved = bp_unpack_value (&bp, 1);
3704 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
3706 else
3708 tree operand = stream_read_tree (ib, data_in);
3709 int formal_id = streamer_read_uhwi (ib);
3710 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
3711 operation);
3713 break;
3714 case IPA_JF_ANCESTOR:
3716 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3717 tree type = stream_read_tree (ib, data_in);
3718 int formal_id = streamer_read_uhwi (ib);
3719 struct bitpack_d bp = streamer_read_bitpack (ib);
3720 bool agg_preserved = bp_unpack_value (&bp, 1);
3722 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved);
3723 break;
3727 count = streamer_read_uhwi (ib);
3728 vec_alloc (jump_func->agg.items, count);
3729 if (count)
3731 struct bitpack_d bp = streamer_read_bitpack (ib);
3732 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3734 for (i = 0; i < count; i++)
3736 struct ipa_agg_jf_item item;
3737 item.offset = streamer_read_uhwi (ib);
3738 item.value = stream_read_tree (ib, data_in);
3739 jump_func->agg.items->quick_push (item);
3743 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3744 relevant to indirect inlining to OB. */
3746 static void
3747 ipa_write_indirect_edge_info (struct output_block *ob,
3748 struct cgraph_edge *cs)
3750 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3751 struct bitpack_d bp;
3753 streamer_write_hwi (ob, ii->param_index);
3754 streamer_write_hwi (ob, ii->offset);
3755 bp = bitpack_create (ob->main_stream);
3756 bp_pack_value (&bp, ii->polymorphic, 1);
3757 bp_pack_value (&bp, ii->agg_contents, 1);
3758 bp_pack_value (&bp, ii->by_ref, 1);
3759 streamer_write_bitpack (&bp);
3761 if (ii->polymorphic)
3763 streamer_write_hwi (ob, ii->otr_token);
3764 stream_write_tree (ob, ii->otr_type, true);
3768 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3769 relevant to indirect inlining from IB. */
3771 static void
3772 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3773 struct data_in *data_in ATTRIBUTE_UNUSED,
3774 struct cgraph_edge *cs)
3776 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3777 struct bitpack_d bp;
3779 ii->param_index = (int) streamer_read_hwi (ib);
3780 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3781 bp = streamer_read_bitpack (ib);
3782 ii->polymorphic = bp_unpack_value (&bp, 1);
3783 ii->agg_contents = bp_unpack_value (&bp, 1);
3784 ii->by_ref = bp_unpack_value (&bp, 1);
3785 if (ii->polymorphic)
3787 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3788 ii->otr_type = stream_read_tree (ib, data_in);
3792 /* Stream out NODE info to OB. */
3794 static void
3795 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3797 int node_ref;
3798 lto_symtab_encoder_t encoder;
3799 struct ipa_node_params *info = IPA_NODE_REF (node);
3800 int j;
3801 struct cgraph_edge *e;
3802 struct bitpack_d bp;
3804 encoder = ob->decl_state->symtab_node_encoder;
3805 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3806 streamer_write_uhwi (ob, node_ref);
3808 bp = bitpack_create (ob->main_stream);
3809 gcc_assert (info->uses_analysis_done
3810 || ipa_get_param_count (info) == 0);
3811 gcc_assert (!info->node_enqueued);
3812 gcc_assert (!info->ipcp_orig_node);
3813 for (j = 0; j < ipa_get_param_count (info); j++)
3814 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3815 streamer_write_bitpack (&bp);
3816 for (j = 0; j < ipa_get_param_count (info); j++)
3817 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
3818 for (e = node->callees; e; e = e->next_callee)
3820 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3822 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3823 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3824 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3826 for (e = node->indirect_calls; e; e = e->next_callee)
3828 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3830 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3831 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3832 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3833 ipa_write_indirect_edge_info (ob, e);
3837 /* Stream in NODE info from IB. */
3839 static void
3840 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3841 struct data_in *data_in)
3843 struct ipa_node_params *info = IPA_NODE_REF (node);
3844 int k;
3845 struct cgraph_edge *e;
3846 struct bitpack_d bp;
3848 ipa_initialize_node_params (node);
3850 bp = streamer_read_bitpack (ib);
3851 if (ipa_get_param_count (info) != 0)
3852 info->uses_analysis_done = true;
3853 info->node_enqueued = false;
3854 for (k = 0; k < ipa_get_param_count (info); k++)
3855 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3856 for (k = 0; k < ipa_get_param_count (info); k++)
3857 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
3858 for (e = node->callees; e; e = e->next_callee)
3860 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3861 int count = streamer_read_uhwi (ib);
3863 if (!count)
3864 continue;
3865 vec_safe_grow_cleared (args->jump_functions, count);
3867 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3868 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
3869 data_in);
3871 for (e = node->indirect_calls; e; e = e->next_callee)
3873 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3874 int count = streamer_read_uhwi (ib);
3876 if (count)
3878 vec_safe_grow_cleared (args->jump_functions, count);
3879 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3880 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
3881 data_in);
3883 ipa_read_indirect_edge_info (ib, data_in, e);
3887 /* Write jump functions for nodes in SET. */
3889 void
3890 ipa_prop_write_jump_functions (void)
3892 struct cgraph_node *node;
3893 struct output_block *ob;
3894 unsigned int count = 0;
3895 lto_symtab_encoder_iterator lsei;
3896 lto_symtab_encoder_t encoder;
3899 if (!ipa_node_params_vector.exists ())
3900 return;
3902 ob = create_output_block (LTO_section_jump_functions);
3903 encoder = ob->decl_state->symtab_node_encoder;
3904 ob->cgraph_node = NULL;
3905 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3906 lsei_next_function_in_partition (&lsei))
3908 node = lsei_cgraph_node (lsei);
3909 if (cgraph_function_with_gimple_body_p (node)
3910 && IPA_NODE_REF (node) != NULL)
3911 count++;
3914 streamer_write_uhwi (ob, count);
3916 /* Process all of the functions. */
3917 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3918 lsei_next_function_in_partition (&lsei))
3920 node = lsei_cgraph_node (lsei);
3921 if (cgraph_function_with_gimple_body_p (node)
3922 && IPA_NODE_REF (node) != NULL)
3923 ipa_write_node_info (ob, node);
3925 streamer_write_char_stream (ob->main_stream, 0);
3926 produce_asm (ob, NULL);
3927 destroy_output_block (ob);
3930 /* Read section in file FILE_DATA of length LEN with data DATA. */
3932 static void
3933 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3934 size_t len)
3936 const struct lto_function_header *header =
3937 (const struct lto_function_header *) data;
3938 const int cfg_offset = sizeof (struct lto_function_header);
3939 const int main_offset = cfg_offset + header->cfg_size;
3940 const int string_offset = main_offset + header->main_size;
3941 struct data_in *data_in;
3942 struct lto_input_block ib_main;
3943 unsigned int i;
3944 unsigned int count;
3946 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3947 header->main_size);
3949 data_in =
3950 lto_data_in_create (file_data, (const char *) data + string_offset,
3951 header->string_size, vNULL);
3952 count = streamer_read_uhwi (&ib_main);
3954 for (i = 0; i < count; i++)
3956 unsigned int index;
3957 struct cgraph_node *node;
3958 lto_symtab_encoder_t encoder;
3960 index = streamer_read_uhwi (&ib_main);
3961 encoder = file_data->symtab_node_encoder;
3962 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3963 gcc_assert (node->analyzed);
3964 ipa_read_node_info (&ib_main, node, data_in);
3966 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3967 len);
3968 lto_data_in_delete (data_in);
3971 /* Read ipcp jump functions. */
3973 void
3974 ipa_prop_read_jump_functions (void)
3976 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3977 struct lto_file_decl_data *file_data;
3978 unsigned int j = 0;
3980 ipa_check_create_node_params ();
3981 ipa_check_create_edge_args ();
3982 ipa_register_cgraph_hooks ();
3984 while ((file_data = file_data_vec[j++]))
3986 size_t len;
3987 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3989 if (data)
3990 ipa_prop_read_section (file_data, data, len);
3994 /* After merging units, we can get mismatch in argument counts.
3995 Also decl merging might've rendered parameter lists obsolete.
3996 Also compute called_with_variable_arg info. */
3998 void
3999 ipa_update_after_lto_read (void)
4001 struct cgraph_node *node;
4003 ipa_check_create_node_params ();
4004 ipa_check_create_edge_args ();
4006 FOR_EACH_DEFINED_FUNCTION (node)
4007 if (node->analyzed)
4008 ipa_initialize_node_params (node);
4011 void
4012 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4014 int node_ref;
4015 unsigned int count = 0;
4016 lto_symtab_encoder_t encoder;
4017 struct ipa_agg_replacement_value *aggvals, *av;
4019 aggvals = ipa_get_agg_replacements_for_node (node);
4020 encoder = ob->decl_state->symtab_node_encoder;
4021 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4022 streamer_write_uhwi (ob, node_ref);
4024 for (av = aggvals; av; av = av->next)
4025 count++;
4026 streamer_write_uhwi (ob, count);
4028 for (av = aggvals; av; av = av->next)
4030 struct bitpack_d bp;
4032 streamer_write_uhwi (ob, av->offset);
4033 streamer_write_uhwi (ob, av->index);
4034 stream_write_tree (ob, av->value, true);
4036 bp = bitpack_create (ob->main_stream);
4037 bp_pack_value (&bp, av->by_ref, 1);
4038 streamer_write_bitpack (&bp);
4042 /* Stream in the aggregate value replacement chain for NODE from IB. */
4044 static void
4045 read_agg_replacement_chain (struct lto_input_block *ib,
4046 struct cgraph_node *node,
4047 struct data_in *data_in)
4049 struct ipa_agg_replacement_value *aggvals = NULL;
4050 unsigned int count, i;
4052 count = streamer_read_uhwi (ib);
4053 for (i = 0; i <count; i++)
4055 struct ipa_agg_replacement_value *av;
4056 struct bitpack_d bp;
4058 av = ggc_alloc_ipa_agg_replacement_value ();
4059 av->offset = streamer_read_uhwi (ib);
4060 av->index = streamer_read_uhwi (ib);
4061 av->value = stream_read_tree (ib, data_in);
4062 bp = streamer_read_bitpack (ib);
4063 av->by_ref = bp_unpack_value (&bp, 1);
4064 av->next = aggvals;
4065 aggvals = av;
4067 ipa_set_node_agg_value_chain (node, aggvals);
4070 /* Write all aggregate replacement for nodes in set. */
4072 void
4073 ipa_prop_write_all_agg_replacement (void)
4075 struct cgraph_node *node;
4076 struct output_block *ob;
4077 unsigned int count = 0;
4078 lto_symtab_encoder_iterator lsei;
4079 lto_symtab_encoder_t encoder;
4081 if (!ipa_node_agg_replacements)
4082 return;
4084 ob = create_output_block (LTO_section_ipcp_transform);
4085 encoder = ob->decl_state->symtab_node_encoder;
4086 ob->cgraph_node = NULL;
4087 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4088 lsei_next_function_in_partition (&lsei))
4090 node = lsei_cgraph_node (lsei);
4091 if (cgraph_function_with_gimple_body_p (node)
4092 && ipa_get_agg_replacements_for_node (node) != NULL)
4093 count++;
4096 streamer_write_uhwi (ob, count);
4098 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4099 lsei_next_function_in_partition (&lsei))
4101 node = lsei_cgraph_node (lsei);
4102 if (cgraph_function_with_gimple_body_p (node)
4103 && ipa_get_agg_replacements_for_node (node) != NULL)
4104 write_agg_replacement_chain (ob, node);
4106 streamer_write_char_stream (ob->main_stream, 0);
4107 produce_asm (ob, NULL);
4108 destroy_output_block (ob);
4111 /* Read replacements section in file FILE_DATA of length LEN with data
4112 DATA. */
4114 static void
4115 read_replacements_section (struct lto_file_decl_data *file_data,
4116 const char *data,
4117 size_t len)
4119 const struct lto_function_header *header =
4120 (const struct lto_function_header *) data;
4121 const int cfg_offset = sizeof (struct lto_function_header);
4122 const int main_offset = cfg_offset + header->cfg_size;
4123 const int string_offset = main_offset + header->main_size;
4124 struct data_in *data_in;
4125 struct lto_input_block ib_main;
4126 unsigned int i;
4127 unsigned int count;
4129 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4130 header->main_size);
4132 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4133 header->string_size, vNULL);
4134 count = streamer_read_uhwi (&ib_main);
4136 for (i = 0; i < count; i++)
4138 unsigned int index;
4139 struct cgraph_node *node;
4140 lto_symtab_encoder_t encoder;
4142 index = streamer_read_uhwi (&ib_main);
4143 encoder = file_data->symtab_node_encoder;
4144 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4145 gcc_assert (node->analyzed);
4146 read_agg_replacement_chain (&ib_main, node, data_in);
4148 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4149 len);
4150 lto_data_in_delete (data_in);
4153 /* Read IPA-CP aggregate replacements. */
4155 void
4156 ipa_prop_read_all_agg_replacement (void)
4158 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4159 struct lto_file_decl_data *file_data;
4160 unsigned int j = 0;
4162 while ((file_data = file_data_vec[j++]))
4164 size_t len;
4165 const char *data = lto_get_section_data (file_data,
4166 LTO_section_ipcp_transform,
4167 NULL, &len);
4168 if (data)
4169 read_replacements_section (file_data, data, len);
4173 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4174 NODE. */
4176 static void
4177 adjust_agg_replacement_values (struct cgraph_node *node,
4178 struct ipa_agg_replacement_value *aggval)
4180 struct ipa_agg_replacement_value *v;
4181 int i, c = 0, d = 0, *adj;
4183 if (!node->clone.combined_args_to_skip)
4184 return;
4186 for (v = aggval; v; v = v->next)
4188 gcc_assert (v->index >= 0);
4189 if (c < v->index)
4190 c = v->index;
4192 c++;
4194 adj = XALLOCAVEC (int, c);
4195 for (i = 0; i < c; i++)
4196 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4198 adj[i] = -1;
4199 d++;
4201 else
4202 adj[i] = i - d;
4204 for (v = aggval; v; v = v->next)
4205 v->index = adj[v->index];
4209 /* Function body transformation phase. */
4211 unsigned int
4212 ipcp_transform_function (struct cgraph_node *node)
4214 vec<ipa_param_descriptor_t> descriptors = vNULL;
4215 struct param_analysis_info *parms_ainfo;
4216 struct ipa_agg_replacement_value *aggval;
4217 gimple_stmt_iterator gsi;
4218 basic_block bb;
4219 int param_count;
4220 bool cfg_changed = false, something_changed = false;
4222 gcc_checking_assert (cfun);
4223 gcc_checking_assert (current_function_decl);
4225 if (dump_file)
4226 fprintf (dump_file, "Modification phase of node %s/%i\n",
4227 cgraph_node_name (node), node->symbol.order);
4229 aggval = ipa_get_agg_replacements_for_node (node);
4230 if (!aggval)
4231 return 0;
4232 param_count = count_formal_params (node->symbol.decl);
4233 if (param_count == 0)
4234 return 0;
4235 adjust_agg_replacement_values (node, aggval);
4236 if (dump_file)
4237 ipa_dump_agg_replacement_values (dump_file, aggval);
4238 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4239 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4240 descriptors.safe_grow_cleared (param_count);
4241 ipa_populate_param_decls (node, descriptors);
4243 FOR_EACH_BB (bb)
4244 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4246 struct ipa_agg_replacement_value *v;
4247 gimple stmt = gsi_stmt (gsi);
4248 tree rhs, val, t;
4249 HOST_WIDE_INT offset;
4250 int index;
4251 bool by_ref, vce;
4253 if (!gimple_assign_load_p (stmt))
4254 continue;
4255 rhs = gimple_assign_rhs1 (stmt);
4256 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4257 continue;
4259 vce = false;
4260 t = rhs;
4261 while (handled_component_p (t))
4263 /* V_C_E can do things like convert an array of integers to one
4264 bigger integer and similar things we do not handle below. */
4265 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4267 vce = true;
4268 break;
4270 t = TREE_OPERAND (t, 0);
4272 if (vce)
4273 continue;
4275 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4276 rhs, &index, &offset, &by_ref))
4277 continue;
4278 for (v = aggval; v; v = v->next)
4279 if (v->index == index
4280 && v->offset == offset)
4281 break;
4282 if (!v || v->by_ref != by_ref)
4283 continue;
4285 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4286 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4288 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4289 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4290 else if (TYPE_SIZE (TREE_TYPE (rhs))
4291 == TYPE_SIZE (TREE_TYPE (v->value)))
4292 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4293 else
4295 if (dump_file)
4297 fprintf (dump_file, " const ");
4298 print_generic_expr (dump_file, v->value, 0);
4299 fprintf (dump_file, " can't be converted to type of ");
4300 print_generic_expr (dump_file, rhs, 0);
4301 fprintf (dump_file, "\n");
4303 continue;
4306 else
4307 val = v->value;
4309 if (dump_file && (dump_flags & TDF_DETAILS))
4311 fprintf (dump_file, "Modifying stmt:\n ");
4312 print_gimple_stmt (dump_file, stmt, 0, 0);
4314 gimple_assign_set_rhs_from_tree (&gsi, val);
4315 update_stmt (stmt);
4317 if (dump_file && (dump_flags & TDF_DETAILS))
4319 fprintf (dump_file, "into:\n ");
4320 print_gimple_stmt (dump_file, stmt, 0, 0);
4321 fprintf (dump_file, "\n");
4324 something_changed = true;
4325 if (maybe_clean_eh_stmt (stmt)
4326 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4327 cfg_changed = true;
4330 (*ipa_node_agg_replacements)[node->uid] = NULL;
4331 free_parms_ainfo (parms_ainfo, param_count);
4332 descriptors.release ();
4334 if (!something_changed)
4335 return 0;
4336 else if (cfg_changed)
4337 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4338 else
4339 return TODO_update_ssa_only_virtuals;