New vectorizer messages; message format change.
[official-gcc.git] / gcc / ipa-prop.c
blobca133134d509e7e00469b5aa05b58fbb0811fa9e
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "langhooks.h"
25 #include "ggc.h"
26 #include "target.h"
27 #include "cgraph.h"
28 #include "ipa-prop.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
40 #include "params.h"
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
45 struct param_analysis_info
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
51 /* Vector where the parameter infos are actually stored. */
52 vec<ipa_node_params_t> ipa_node_params_vector;
53 /* Vector of known aggregate values in cloned nodes. */
54 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
55 /* Vector where the parameter infos are actually stored. */
56 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60 static struct cgraph_node_hook_list *node_removal_hook_holder;
61 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
63 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65 /* Description of a reference to an IPA constant. */
66 struct ipa_cst_ref_desc
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge *cs;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc *next_duplicate;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
73 if out of control. */
74 int refcount;
77 /* Allocation pool for reference descriptions. */
79 static alloc_pool ipa_refdesc_pool;
81 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
82 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
84 static bool
85 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
87 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->symbol.decl);
88 struct cl_optimization *os;
90 if (!fs_opts)
91 return false;
92 os = TREE_OPTIMIZATION (fs_opts);
93 return !os->x_optimize || !os->x_flag_ipa_cp;
96 /* Return index of the formal whose tree is PTREE in function which corresponds
97 to INFO. */
99 static int
100 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
102 int i, count;
104 count = descriptors.length ();
105 for (i = 0; i < count; i++)
106 if (descriptors[i].decl == ptree)
107 return i;
109 return -1;
112 /* Return index of the formal whose tree is PTREE in function which corresponds
113 to INFO. */
116 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
118 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
122 NODE. */
124 static void
125 ipa_populate_param_decls (struct cgraph_node *node,
126 vec<ipa_param_descriptor_t> &descriptors)
128 tree fndecl;
129 tree fnargs;
130 tree parm;
131 int param_num;
133 fndecl = node->symbol.decl;
134 gcc_assert (gimple_has_body_p (fndecl));
135 fnargs = DECL_ARGUMENTS (fndecl);
136 param_num = 0;
137 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
139 descriptors[param_num].decl = parm;
140 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
141 param_num++;
145 /* Return how many formal parameters FNDECL has. */
147 static inline int
148 count_formal_params (tree fndecl)
150 tree parm;
151 int count = 0;
152 gcc_assert (gimple_has_body_p (fndecl));
154 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
155 count++;
157 return count;
160 /* Return the declaration of Ith formal parameter of the function corresponding
161 to INFO. Note there is no setter function as this array is built just once
162 using ipa_initialize_node_params. */
164 void
165 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
167 fprintf (file, "param #%i", i);
168 if (info->descriptors[i].decl)
170 fprintf (file, " ");
171 print_generic_expr (file, info->descriptors[i].decl, 0);
175 /* Initialize the ipa_node_params structure associated with NODE
176 to hold PARAM_COUNT parameters. */
178 void
179 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
181 struct ipa_node_params *info = IPA_NODE_REF (node);
183 if (!info->descriptors.exists () && param_count)
184 info->descriptors.safe_grow_cleared (param_count);
187 /* Initialize the ipa_node_params structure associated with NODE by counting
188 the function parameters, creating the descriptors and populating their
189 param_decls. */
191 void
192 ipa_initialize_node_params (struct cgraph_node *node)
194 struct ipa_node_params *info = IPA_NODE_REF (node);
196 if (!info->descriptors.exists ())
198 ipa_alloc_node_params (node, count_formal_params (node->symbol.decl));
199 ipa_populate_param_decls (node, info->descriptors);
203 /* Print the jump functions associated with call graph edge CS to file F. */
205 static void
206 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
208 int i, count;
210 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
211 for (i = 0; i < count; i++)
213 struct ipa_jump_func *jump_func;
214 enum jump_func_type type;
216 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
217 type = jump_func->type;
219 fprintf (f, " param %d: ", i);
220 if (type == IPA_JF_UNKNOWN)
221 fprintf (f, "UNKNOWN\n");
222 else if (type == IPA_JF_KNOWN_TYPE)
224 fprintf (f, "KNOWN TYPE: base ");
225 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
226 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
227 jump_func->value.known_type.offset);
228 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
229 fprintf (f, "\n");
231 else if (type == IPA_JF_CONST)
233 tree val = jump_func->value.constant.value;
234 fprintf (f, "CONST: ");
235 print_generic_expr (f, val, 0);
236 if (TREE_CODE (val) == ADDR_EXPR
237 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
239 fprintf (f, " -> ");
240 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
243 fprintf (f, "\n");
245 else if (type == IPA_JF_PASS_THROUGH)
247 fprintf (f, "PASS THROUGH: ");
248 fprintf (f, "%d, op %s",
249 jump_func->value.pass_through.formal_id,
250 tree_code_name[(int)
251 jump_func->value.pass_through.operation]);
252 if (jump_func->value.pass_through.operation != NOP_EXPR)
254 fprintf (f, " ");
255 print_generic_expr (f,
256 jump_func->value.pass_through.operand, 0);
258 if (jump_func->value.pass_through.agg_preserved)
259 fprintf (f, ", agg_preserved");
260 if (jump_func->value.pass_through.type_preserved)
261 fprintf (f, ", type_preserved");
262 fprintf (f, "\n");
264 else if (type == IPA_JF_ANCESTOR)
266 fprintf (f, "ANCESTOR: ");
267 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
268 jump_func->value.ancestor.formal_id,
269 jump_func->value.ancestor.offset);
270 print_generic_expr (f, jump_func->value.ancestor.type, 0);
271 if (jump_func->value.ancestor.agg_preserved)
272 fprintf (f, ", agg_preserved");
273 if (jump_func->value.ancestor.type_preserved)
274 fprintf (f, ", type_preserved");
275 fprintf (f, "\n");
278 if (jump_func->agg.items)
280 struct ipa_agg_jf_item *item;
281 int j;
283 fprintf (f, " Aggregate passed by %s:\n",
284 jump_func->agg.by_ref ? "reference" : "value");
285 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
287 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
288 item->offset);
289 if (TYPE_P (item->value))
290 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
291 tree_low_cst (TYPE_SIZE (item->value), 1));
292 else
294 fprintf (f, "cst: ");
295 print_generic_expr (f, item->value, 0);
297 fprintf (f, "\n");
304 /* Print the jump functions of all arguments on all call graph edges going from
305 NODE to file F. */
307 void
308 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
310 struct cgraph_edge *cs;
312 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
313 node->symbol.order);
314 for (cs = node->callees; cs; cs = cs->next_callee)
316 if (!ipa_edge_args_info_available_for_edge_p (cs))
317 continue;
319 fprintf (f, " callsite %s/%i -> %s/%i : \n",
320 xstrdup (cgraph_node_name (node)), node->symbol.order,
321 xstrdup (cgraph_node_name (cs->callee)),
322 cs->callee->symbol.order);
323 ipa_print_node_jump_functions_for_edge (f, cs);
326 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
328 struct cgraph_indirect_call_info *ii;
329 if (!ipa_edge_args_info_available_for_edge_p (cs))
330 continue;
332 ii = cs->indirect_info;
333 if (ii->agg_contents)
334 fprintf (f, " indirect %s callsite, calling param %i, "
335 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
336 ii->member_ptr ? "member ptr" : "aggregate",
337 ii->param_index, ii->offset,
338 ii->by_ref ? "by reference" : "by_value");
339 else
340 fprintf (f, " indirect %s callsite, calling param %i",
341 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
343 if (cs->call_stmt)
345 fprintf (f, ", for stmt ");
346 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
348 else
349 fprintf (f, "\n");
350 ipa_print_node_jump_functions_for_edge (f, cs);
354 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
356 void
357 ipa_print_all_jump_functions (FILE *f)
359 struct cgraph_node *node;
361 fprintf (f, "\nJump functions:\n");
362 FOR_EACH_FUNCTION (node)
364 ipa_print_node_jump_functions (f, node);
368 /* Set JFUNC to be a known type jump function. */
370 static void
371 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
372 tree base_type, tree component_type)
374 jfunc->type = IPA_JF_KNOWN_TYPE;
375 jfunc->value.known_type.offset = offset,
376 jfunc->value.known_type.base_type = base_type;
377 jfunc->value.known_type.component_type = component_type;
380 /* Set JFUNC to be a copy of another jmp (to be used by jump function
381 combination code). The two functions will share their rdesc. */
383 static void
384 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
385 struct ipa_jump_func *src)
388 gcc_checking_assert (src->type == IPA_JF_CONST);
389 dst->type = IPA_JF_CONST;
390 dst->value.constant = src->value.constant;
393 /* Set JFUNC to be a constant jmp function. */
395 static void
396 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
397 struct cgraph_edge *cs)
399 constant = unshare_expr (constant);
400 if (constant && EXPR_P (constant))
401 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
402 jfunc->type = IPA_JF_CONST;
403 jfunc->value.constant.value = unshare_expr_without_location (constant);
405 if (TREE_CODE (constant) == ADDR_EXPR
406 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
408 struct ipa_cst_ref_desc *rdesc;
409 if (!ipa_refdesc_pool)
410 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
411 sizeof (struct ipa_cst_ref_desc), 32);
413 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
414 rdesc->cs = cs;
415 rdesc->next_duplicate = NULL;
416 rdesc->refcount = 1;
417 jfunc->value.constant.rdesc = rdesc;
419 else
420 jfunc->value.constant.rdesc = NULL;
423 /* Set JFUNC to be a simple pass-through jump function. */
424 static void
425 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
426 bool agg_preserved, bool type_preserved)
428 jfunc->type = IPA_JF_PASS_THROUGH;
429 jfunc->value.pass_through.operand = NULL_TREE;
430 jfunc->value.pass_through.formal_id = formal_id;
431 jfunc->value.pass_through.operation = NOP_EXPR;
432 jfunc->value.pass_through.agg_preserved = agg_preserved;
433 jfunc->value.pass_through.type_preserved = type_preserved;
436 /* Set JFUNC to be an arithmetic pass through jump function. */
438 static void
439 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
440 tree operand, enum tree_code operation)
442 jfunc->type = IPA_JF_PASS_THROUGH;
443 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
444 jfunc->value.pass_through.formal_id = formal_id;
445 jfunc->value.pass_through.operation = operation;
446 jfunc->value.pass_through.agg_preserved = false;
447 jfunc->value.pass_through.type_preserved = false;
450 /* Set JFUNC to be an ancestor jump function. */
452 static void
453 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
454 tree type, int formal_id, bool agg_preserved,
455 bool type_preserved)
457 jfunc->type = IPA_JF_ANCESTOR;
458 jfunc->value.ancestor.formal_id = formal_id;
459 jfunc->value.ancestor.offset = offset;
460 jfunc->value.ancestor.type = type;
461 jfunc->value.ancestor.agg_preserved = agg_preserved;
462 jfunc->value.ancestor.type_preserved = type_preserved;
465 /* Extract the acual BINFO being described by JFUNC which must be a known type
466 jump function. */
468 tree
469 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
471 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
472 if (!base_binfo)
473 return NULL_TREE;
474 return get_binfo_at_offset (base_binfo,
475 jfunc->value.known_type.offset,
476 jfunc->value.known_type.component_type);
479 /* Structure to be passed in between detect_type_change and
480 check_stmt_for_type_change. */
482 struct type_change_info
484 /* Offset into the object where there is the virtual method pointer we are
485 looking for. */
486 HOST_WIDE_INT offset;
487 /* The declaration or SSA_NAME pointer of the base that we are checking for
488 type change. */
489 tree object;
490 /* If we actually can tell the type that the object has changed to, it is
491 stored in this field. Otherwise it remains NULL_TREE. */
492 tree known_current_type;
493 /* Set to true if dynamic type change has been detected. */
494 bool type_maybe_changed;
495 /* Set to true if multiple types have been encountered. known_current_type
496 must be disregarded in that case. */
497 bool multiple_types_encountered;
500 /* Return true if STMT can modify a virtual method table pointer.
502 This function makes special assumptions about both constructors and
503 destructors which are all the functions that are allowed to alter the VMT
504 pointers. It assumes that destructors begin with assignment into all VMT
505 pointers and that constructors essentially look in the following way:
507 1) The very first thing they do is that they call constructors of ancestor
508 sub-objects that have them.
510 2) Then VMT pointers of this and all its ancestors is set to new values
511 corresponding to the type corresponding to the constructor.
513 3) Only afterwards, other stuff such as constructor of member sub-objects
514 and the code written by the user is run. Only this may include calling
515 virtual functions, directly or indirectly.
517 There is no way to call a constructor of an ancestor sub-object in any
518 other way.
520 This means that we do not have to care whether constructors get the correct
521 type information because they will always change it (in fact, if we define
522 the type to be given by the VMT pointer, it is undefined).
524 The most important fact to derive from the above is that if, for some
525 statement in the section 3, we try to detect whether the dynamic type has
526 changed, we can safely ignore all calls as we examine the function body
527 backwards until we reach statements in section 2 because these calls cannot
528 be ancestor constructors or destructors (if the input is not bogus) and so
529 do not change the dynamic type (this holds true only for automatically
530 allocated objects but at the moment we devirtualize only these). We then
531 must detect that statements in section 2 change the dynamic type and can try
532 to derive the new type. That is enough and we can stop, we will never see
533 the calls into constructors of sub-objects in this code. Therefore we can
534 safely ignore all call statements that we traverse.
537 static bool
538 stmt_may_be_vtbl_ptr_store (gimple stmt)
540 if (is_gimple_call (stmt))
541 return false;
542 else if (is_gimple_assign (stmt))
544 tree lhs = gimple_assign_lhs (stmt);
546 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
548 if (flag_strict_aliasing
549 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
550 return false;
552 if (TREE_CODE (lhs) == COMPONENT_REF
553 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
554 return false;
555 /* In the future we might want to use get_base_ref_and_offset to find
556 if there is a field corresponding to the offset and if so, proceed
557 almost like if it was a component ref. */
560 return true;
563 /* If STMT can be proved to be an assignment to the virtual method table
564 pointer of ANALYZED_OBJ and the type associated with the new table
565 identified, return the type. Otherwise return NULL_TREE. */
567 static tree
568 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
570 HOST_WIDE_INT offset, size, max_size;
571 tree lhs, rhs, base;
573 if (!gimple_assign_single_p (stmt))
574 return NULL_TREE;
576 lhs = gimple_assign_lhs (stmt);
577 rhs = gimple_assign_rhs1 (stmt);
578 if (TREE_CODE (lhs) != COMPONENT_REF
579 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
580 || TREE_CODE (rhs) != ADDR_EXPR)
581 return NULL_TREE;
582 rhs = get_base_address (TREE_OPERAND (rhs, 0));
583 if (!rhs
584 || TREE_CODE (rhs) != VAR_DECL
585 || !DECL_VIRTUAL_P (rhs))
586 return NULL_TREE;
588 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
589 if (offset != tci->offset
590 || size != POINTER_SIZE
591 || max_size != POINTER_SIZE)
592 return NULL_TREE;
593 if (TREE_CODE (base) == MEM_REF)
595 if (TREE_CODE (tci->object) != MEM_REF
596 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
597 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
598 TREE_OPERAND (base, 1)))
599 return NULL_TREE;
601 else if (tci->object != base)
602 return NULL_TREE;
604 return DECL_CONTEXT (rhs);
607 /* Callback of walk_aliased_vdefs and a helper function for
608 detect_type_change to check whether a particular statement may modify
609 the virtual table pointer, and if possible also determine the new type of
610 the (sub-)object. It stores its result into DATA, which points to a
611 type_change_info structure. */
613 static bool
614 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
616 gimple stmt = SSA_NAME_DEF_STMT (vdef);
617 struct type_change_info *tci = (struct type_change_info *) data;
619 if (stmt_may_be_vtbl_ptr_store (stmt))
621 tree type;
622 type = extr_type_from_vtbl_ptr_store (stmt, tci);
623 if (tci->type_maybe_changed
624 && type != tci->known_current_type)
625 tci->multiple_types_encountered = true;
626 tci->known_current_type = type;
627 tci->type_maybe_changed = true;
628 return true;
630 else
631 return false;
636 /* Like detect_type_change but with extra argument COMP_TYPE which will become
637 the component type part of new JFUNC of dynamic type change is detected and
638 the new base type is identified. */
640 static bool
641 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
642 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
644 struct type_change_info tci;
645 ao_ref ao;
647 gcc_checking_assert (DECL_P (arg)
648 || TREE_CODE (arg) == MEM_REF
649 || handled_component_p (arg));
650 /* Const calls cannot call virtual methods through VMT and so type changes do
651 not matter. */
652 if (!flag_devirtualize || !gimple_vuse (call))
653 return false;
655 ao_ref_init (&ao, arg);
656 ao.base = base;
657 ao.offset = offset;
658 ao.size = POINTER_SIZE;
659 ao.max_size = ao.size;
661 tci.offset = offset;
662 tci.object = get_base_address (arg);
663 tci.known_current_type = NULL_TREE;
664 tci.type_maybe_changed = false;
665 tci.multiple_types_encountered = false;
667 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
668 &tci, NULL);
669 if (!tci.type_maybe_changed)
670 return false;
672 if (!tci.known_current_type
673 || tci.multiple_types_encountered
674 || offset != 0)
675 jfunc->type = IPA_JF_UNKNOWN;
676 else
677 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
679 return true;
682 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
683 looking for assignments to its virtual table pointer. If it is, return true
684 and fill in the jump function JFUNC with relevant type information or set it
685 to unknown. ARG is the object itself (not a pointer to it, unless
686 dereferenced). BASE is the base of the memory access as returned by
687 get_ref_base_and_extent, as is the offset. */
689 static bool
690 detect_type_change (tree arg, tree base, gimple call,
691 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
693 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
696 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
697 SSA name (its dereference will become the base and the offset is assumed to
698 be zero). */
700 static bool
701 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
703 tree comp_type;
705 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
706 if (!flag_devirtualize
707 || !POINTER_TYPE_P (TREE_TYPE (arg))
708 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
709 return false;
711 comp_type = TREE_TYPE (TREE_TYPE (arg));
712 arg = build2 (MEM_REF, ptr_type_node, arg,
713 build_int_cst (ptr_type_node, 0));
715 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
718 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
719 boolean variable pointed to by DATA. */
721 static bool
722 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
723 void *data)
725 bool *b = (bool *) data;
726 *b = true;
727 return true;
730 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
731 a value known not to be modified in this function before reaching the
732 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
733 information about the parameter. */
735 static bool
736 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
737 gimple stmt, tree parm_load)
739 bool modified = false;
740 bitmap *visited_stmts;
741 ao_ref refd;
743 if (parm_ainfo && parm_ainfo->parm_modified)
744 return false;
746 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
747 ao_ref_init (&refd, parm_load);
748 /* We can cache visited statements only when parm_ainfo is available and when
749 we are looking at a naked load of the whole parameter. */
750 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
751 visited_stmts = NULL;
752 else
753 visited_stmts = &parm_ainfo->parm_visited_statements;
754 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
755 visited_stmts);
756 if (parm_ainfo && modified)
757 parm_ainfo->parm_modified = true;
758 return !modified;
761 /* If STMT is an assignment that loads a value from an parameter declaration,
762 return the index of the parameter in ipa_node_params which has not been
763 modified. Otherwise return -1. */
765 static int
766 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
767 struct param_analysis_info *parms_ainfo,
768 gimple stmt)
770 int index;
771 tree op1;
773 if (!gimple_assign_single_p (stmt))
774 return -1;
776 op1 = gimple_assign_rhs1 (stmt);
777 if (TREE_CODE (op1) != PARM_DECL)
778 return -1;
780 index = ipa_get_param_decl_index_1 (descriptors, op1);
781 if (index < 0
782 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
783 : NULL, stmt, op1))
784 return -1;
786 return index;
789 /* Return true if memory reference REF loads data that are known to be
790 unmodified in this function before reaching statement STMT. PARM_AINFO, if
791 non-NULL, is a pointer to a structure containing temporary information about
792 PARM. */
794 static bool
795 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
796 gimple stmt, tree ref)
798 bool modified = false;
799 ao_ref refd;
801 gcc_checking_assert (gimple_vuse (stmt));
802 if (parm_ainfo && parm_ainfo->ref_modified)
803 return false;
805 ao_ref_init (&refd, ref);
806 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
807 NULL);
808 if (parm_ainfo && modified)
809 parm_ainfo->ref_modified = true;
810 return !modified;
813 /* Return true if the data pointed to by PARM is known to be unmodified in this
814 function before reaching call statement CALL into which it is passed.
815 PARM_AINFO is a pointer to a structure containing temporary information
816 about PARM. */
818 static bool
819 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
820 gimple call, tree parm)
822 bool modified = false;
823 ao_ref refd;
825 /* It's unnecessary to calculate anything about memory contnets for a const
826 function because it is not goin to use it. But do not cache the result
827 either. Also, no such calculations for non-pointers. */
828 if (!gimple_vuse (call)
829 || !POINTER_TYPE_P (TREE_TYPE (parm)))
830 return false;
832 if (parm_ainfo->pt_modified)
833 return false;
835 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
836 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
837 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
838 if (modified)
839 parm_ainfo->pt_modified = true;
840 return !modified;
843 /* Return true if we can prove that OP is a memory reference loading unmodified
844 data from an aggregate passed as a parameter and if the aggregate is passed
845 by reference, that the alias type of the load corresponds to the type of the
846 formal parameter (so that we can rely on this type for TBAA in callers).
847 INFO and PARMS_AINFO describe parameters of the current function (but the
848 latter can be NULL), STMT is the load statement. If function returns true,
849 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
850 within the aggregate and whether it is a load from a value passed by
851 reference respectively. */
853 static bool
854 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
855 struct param_analysis_info *parms_ainfo, gimple stmt,
856 tree op, int *index_p, HOST_WIDE_INT *offset_p,
857 bool *by_ref_p)
859 int index;
860 HOST_WIDE_INT size, max_size;
861 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
863 if (max_size == -1 || max_size != size || *offset_p < 0)
864 return false;
866 if (DECL_P (base))
868 int index = ipa_get_param_decl_index_1 (descriptors, base);
869 if (index >= 0
870 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
871 : NULL, stmt, op))
873 *index_p = index;
874 *by_ref_p = false;
875 return true;
877 return false;
880 if (TREE_CODE (base) != MEM_REF
881 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
882 || !integer_zerop (TREE_OPERAND (base, 1)))
883 return false;
885 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
887 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
888 index = ipa_get_param_decl_index_1 (descriptors, parm);
890 else
892 /* This branch catches situations where a pointer parameter is not a
893 gimple register, for example:
895 void hip7(S*) (struct S * p)
897 void (*<T2e4>) (struct S *) D.1867;
898 struct S * p.1;
900 <bb 2>:
901 p.1_1 = p;
902 D.1867_2 = p.1_1->f;
903 D.1867_2 ();
904 gdp = &p;
907 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
908 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
911 if (index >= 0
912 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
913 stmt, op))
915 *index_p = index;
916 *by_ref_p = true;
917 return true;
919 return false;
922 /* Just like the previous function, just without the param_analysis_info
923 pointer, for users outside of this file. */
925 bool
926 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
927 tree op, int *index_p, HOST_WIDE_INT *offset_p,
928 bool *by_ref_p)
930 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
931 offset_p, by_ref_p);
934 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
935 of an assignment statement STMT, try to determine whether we are actually
936 handling any of the following cases and construct an appropriate jump
937 function into JFUNC if so:
939 1) The passed value is loaded from a formal parameter which is not a gimple
940 register (most probably because it is addressable, the value has to be
941 scalar) and we can guarantee the value has not changed. This case can
942 therefore be described by a simple pass-through jump function. For example:
944 foo (int a)
946 int a.0;
948 a.0_2 = a;
949 bar (a.0_2);
951 2) The passed value can be described by a simple arithmetic pass-through
952 jump function. E.g.
954 foo (int a)
956 int D.2064;
958 D.2064_4 = a.1(D) + 4;
959 bar (D.2064_4);
961 This case can also occur in combination of the previous one, e.g.:
963 foo (int a, int z)
965 int a.0;
966 int D.2064;
968 a.0_3 = a;
969 D.2064_4 = a.0_3 + 4;
970 foo (D.2064_4);
972 3) The passed value is an address of an object within another one (which
973 also passed by reference). Such situations are described by an ancestor
974 jump function and describe situations such as:
976 B::foo() (struct B * const this)
978 struct A * D.1845;
980 D.1845_2 = &this_1(D)->D.1748;
981 A::bar (D.1845_2);
983 INFO is the structure describing individual parameters access different
984 stages of IPA optimizations. PARMS_AINFO contains the information that is
985 only needed for intraprocedural analysis. */
987 static void
988 compute_complex_assign_jump_func (struct ipa_node_params *info,
989 struct param_analysis_info *parms_ainfo,
990 struct ipa_jump_func *jfunc,
991 gimple call, gimple stmt, tree name)
993 HOST_WIDE_INT offset, size, max_size;
994 tree op1, tc_ssa, base, ssa;
995 int index;
997 op1 = gimple_assign_rhs1 (stmt);
999 if (TREE_CODE (op1) == SSA_NAME)
1001 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1002 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1003 else
1004 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1005 SSA_NAME_DEF_STMT (op1));
1006 tc_ssa = op1;
1008 else
1010 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1011 tc_ssa = gimple_assign_lhs (stmt);
1014 if (index >= 0)
1016 tree op2 = gimple_assign_rhs2 (stmt);
1018 if (op2)
1020 if (!is_gimple_ip_invariant (op2)
1021 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1022 && !useless_type_conversion_p (TREE_TYPE (name),
1023 TREE_TYPE (op1))))
1024 return;
1026 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1027 gimple_assign_rhs_code (stmt));
1029 else if (gimple_assign_single_p (stmt))
1031 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1032 call, tc_ssa);
1033 bool type_p = !detect_type_change_ssa (tc_ssa, call, jfunc);
1034 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1035 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1037 return;
1040 if (TREE_CODE (op1) != ADDR_EXPR)
1041 return;
1042 op1 = TREE_OPERAND (op1, 0);
1043 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1044 return;
1045 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1046 if (TREE_CODE (base) != MEM_REF
1047 /* If this is a varying address, punt. */
1048 || max_size == -1
1049 || max_size != size)
1050 return;
1051 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1052 ssa = TREE_OPERAND (base, 0);
1053 if (TREE_CODE (ssa) != SSA_NAME
1054 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1055 || offset < 0)
1056 return;
1058 /* Dynamic types are changed in constructors and destructors. */
1059 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1060 if (index >= 0)
1062 bool type_p = !detect_type_change (op1, base, call, jfunc, offset);
1063 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1064 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1065 parm_ref_data_pass_through_p (&parms_ainfo[index],
1066 call, ssa), type_p);
1070 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1071 it looks like:
1073 iftmp.1_3 = &obj_2(D)->D.1762;
1075 The base of the MEM_REF must be a default definition SSA NAME of a
1076 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1077 whole MEM_REF expression is returned and the offset calculated from any
1078 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1079 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1081 static tree
1082 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1084 HOST_WIDE_INT size, max_size;
1085 tree expr, parm, obj;
1087 if (!gimple_assign_single_p (assign))
1088 return NULL_TREE;
1089 expr = gimple_assign_rhs1 (assign);
1091 if (TREE_CODE (expr) != ADDR_EXPR)
1092 return NULL_TREE;
1093 expr = TREE_OPERAND (expr, 0);
1094 obj = expr;
1095 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1097 if (TREE_CODE (expr) != MEM_REF
1098 /* If this is a varying address, punt. */
1099 || max_size == -1
1100 || max_size != size
1101 || *offset < 0)
1102 return NULL_TREE;
1103 parm = TREE_OPERAND (expr, 0);
1104 if (TREE_CODE (parm) != SSA_NAME
1105 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1106 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1107 return NULL_TREE;
1109 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1110 *obj_p = obj;
1111 return expr;
1115 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1116 statement PHI, try to find out whether NAME is in fact a
1117 multiple-inheritance typecast from a descendant into an ancestor of a formal
1118 parameter and thus can be described by an ancestor jump function and if so,
1119 write the appropriate function into JFUNC.
1121 Essentially we want to match the following pattern:
1123 if (obj_2(D) != 0B)
1124 goto <bb 3>;
1125 else
1126 goto <bb 4>;
1128 <bb 3>:
1129 iftmp.1_3 = &obj_2(D)->D.1762;
1131 <bb 4>:
1132 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1133 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1134 return D.1879_6; */
1136 static void
1137 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1138 struct param_analysis_info *parms_ainfo,
1139 struct ipa_jump_func *jfunc,
1140 gimple call, gimple phi)
1142 HOST_WIDE_INT offset;
1143 gimple assign, cond;
1144 basic_block phi_bb, assign_bb, cond_bb;
1145 tree tmp, parm, expr, obj;
1146 int index, i;
1148 if (gimple_phi_num_args (phi) != 2)
1149 return;
1151 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1152 tmp = PHI_ARG_DEF (phi, 0);
1153 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1154 tmp = PHI_ARG_DEF (phi, 1);
1155 else
1156 return;
1157 if (TREE_CODE (tmp) != SSA_NAME
1158 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1159 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1160 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1161 return;
1163 assign = SSA_NAME_DEF_STMT (tmp);
1164 assign_bb = gimple_bb (assign);
1165 if (!single_pred_p (assign_bb))
1166 return;
1167 expr = get_ancestor_addr_info (assign, &obj, &offset);
1168 if (!expr)
1169 return;
1170 parm = TREE_OPERAND (expr, 0);
1171 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1172 gcc_assert (index >= 0);
1174 cond_bb = single_pred (assign_bb);
1175 cond = last_stmt (cond_bb);
1176 if (!cond
1177 || gimple_code (cond) != GIMPLE_COND
1178 || gimple_cond_code (cond) != NE_EXPR
1179 || gimple_cond_lhs (cond) != parm
1180 || !integer_zerop (gimple_cond_rhs (cond)))
1181 return;
1183 phi_bb = gimple_bb (phi);
1184 for (i = 0; i < 2; i++)
1186 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1187 if (pred != assign_bb && pred != cond_bb)
1188 return;
1191 bool type_p = !detect_type_change (obj, expr, call, jfunc, offset);
1192 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1193 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1194 parm_ref_data_pass_through_p (&parms_ainfo[index],
1195 call, parm), type_p);
1198 /* Given OP which is passed as an actual argument to a called function,
1199 determine if it is possible to construct a KNOWN_TYPE jump function for it
1200 and if so, create one and store it to JFUNC. */
1202 static void
1203 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1204 gimple call)
1206 HOST_WIDE_INT offset, size, max_size;
1207 tree base;
1209 if (!flag_devirtualize
1210 || TREE_CODE (op) != ADDR_EXPR
1211 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1212 return;
1214 op = TREE_OPERAND (op, 0);
1215 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1216 if (!DECL_P (base)
1217 || max_size == -1
1218 || max_size != size
1219 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1220 || is_global_var (base))
1221 return;
1223 if (!TYPE_BINFO (TREE_TYPE (base))
1224 || detect_type_change (op, base, call, jfunc, offset))
1225 return;
1227 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1230 /* Inspect the given TYPE and return true iff it has the same structure (the
1231 same number of fields of the same types) as a C++ member pointer. If
1232 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1233 corresponding fields there. */
1235 static bool
1236 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1238 tree fld;
1240 if (TREE_CODE (type) != RECORD_TYPE)
1241 return false;
1243 fld = TYPE_FIELDS (type);
1244 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1245 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1246 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1247 return false;
1249 if (method_ptr)
1250 *method_ptr = fld;
1252 fld = DECL_CHAIN (fld);
1253 if (!fld || INTEGRAL_TYPE_P (fld)
1254 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1255 return false;
1256 if (delta)
1257 *delta = fld;
1259 if (DECL_CHAIN (fld))
1260 return false;
1262 return true;
1265 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1266 return the rhs of its defining statement. Otherwise return RHS as it
1267 is. */
1269 static inline tree
1270 get_ssa_def_if_simple_copy (tree rhs)
1272 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1274 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1276 if (gimple_assign_single_p (def_stmt))
1277 rhs = gimple_assign_rhs1 (def_stmt);
1278 else
1279 break;
1281 return rhs;
1284 /* Simple linked list, describing known contents of an aggregate beforere
1285 call. */
1287 struct ipa_known_agg_contents_list
1289 /* Offset and size of the described part of the aggregate. */
1290 HOST_WIDE_INT offset, size;
1291 /* Known constant value or NULL if the contents is known to be unknown. */
1292 tree constant;
1293 /* Pointer to the next structure in the list. */
1294 struct ipa_known_agg_contents_list *next;
1297 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1298 in ARG is filled in with constant values. ARG can either be an aggregate
1299 expression or a pointer to an aggregate. JFUNC is the jump function into
1300 which the constants are subsequently stored. */
1302 static void
1303 determine_known_aggregate_parts (gimple call, tree arg,
1304 struct ipa_jump_func *jfunc)
1306 struct ipa_known_agg_contents_list *list = NULL;
1307 int item_count = 0, const_count = 0;
1308 HOST_WIDE_INT arg_offset, arg_size;
1309 gimple_stmt_iterator gsi;
1310 tree arg_base;
1311 bool check_ref, by_ref;
1312 ao_ref r;
1314 /* The function operates in three stages. First, we prepare check_ref, r,
1315 arg_base and arg_offset based on what is actually passed as an actual
1316 argument. */
1318 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1320 by_ref = true;
1321 if (TREE_CODE (arg) == SSA_NAME)
1323 tree type_size;
1324 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1325 return;
1326 check_ref = true;
1327 arg_base = arg;
1328 arg_offset = 0;
1329 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1330 arg_size = tree_low_cst (type_size, 1);
1331 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1333 else if (TREE_CODE (arg) == ADDR_EXPR)
1335 HOST_WIDE_INT arg_max_size;
1337 arg = TREE_OPERAND (arg, 0);
1338 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1339 &arg_max_size);
1340 if (arg_max_size == -1
1341 || arg_max_size != arg_size
1342 || arg_offset < 0)
1343 return;
1344 if (DECL_P (arg_base))
1346 tree size;
1347 check_ref = false;
1348 size = build_int_cst (integer_type_node, arg_size);
1349 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1351 else
1352 return;
1354 else
1355 return;
1357 else
1359 HOST_WIDE_INT arg_max_size;
1361 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1363 by_ref = false;
1364 check_ref = false;
1365 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1366 &arg_max_size);
1367 if (arg_max_size == -1
1368 || arg_max_size != arg_size
1369 || arg_offset < 0)
1370 return;
1372 ao_ref_init (&r, arg);
1375 /* Second stage walks back the BB, looks at individual statements and as long
1376 as it is confident of how the statements affect contents of the
1377 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1378 describing it. */
1379 gsi = gsi_for_stmt (call);
1380 gsi_prev (&gsi);
1381 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1383 struct ipa_known_agg_contents_list *n, **p;
1384 gimple stmt = gsi_stmt (gsi);
1385 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1386 tree lhs, rhs, lhs_base;
1387 bool partial_overlap;
1389 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1390 continue;
1391 if (!gimple_assign_single_p (stmt))
1392 break;
1394 lhs = gimple_assign_lhs (stmt);
1395 rhs = gimple_assign_rhs1 (stmt);
1396 if (!is_gimple_reg_type (rhs)
1397 || TREE_CODE (lhs) == BIT_FIELD_REF
1398 || contains_bitfld_component_ref_p (lhs))
1399 break;
1401 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1402 &lhs_max_size);
1403 if (lhs_max_size == -1
1404 || lhs_max_size != lhs_size
1405 || (lhs_offset < arg_offset
1406 && lhs_offset + lhs_size > arg_offset)
1407 || (lhs_offset < arg_offset + arg_size
1408 && lhs_offset + lhs_size > arg_offset + arg_size))
1409 break;
1411 if (check_ref)
1413 if (TREE_CODE (lhs_base) != MEM_REF
1414 || TREE_OPERAND (lhs_base, 0) != arg_base
1415 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1416 break;
1418 else if (lhs_base != arg_base)
1420 if (DECL_P (lhs_base))
1421 continue;
1422 else
1423 break;
1426 if (lhs_offset + lhs_size < arg_offset
1427 || lhs_offset >= (arg_offset + arg_size))
1428 continue;
1430 partial_overlap = false;
1431 p = &list;
1432 while (*p && (*p)->offset < lhs_offset)
1434 if ((*p)->offset + (*p)->size > lhs_offset)
1436 partial_overlap = true;
1437 break;
1439 p = &(*p)->next;
1441 if (partial_overlap)
1442 break;
1443 if (*p && (*p)->offset < lhs_offset + lhs_size)
1445 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1446 /* We already know this value is subsequently overwritten with
1447 something else. */
1448 continue;
1449 else
1450 /* Otherwise this is a partial overlap which we cannot
1451 represent. */
1452 break;
1455 rhs = get_ssa_def_if_simple_copy (rhs);
1456 n = XALLOCA (struct ipa_known_agg_contents_list);
1457 n->size = lhs_size;
1458 n->offset = lhs_offset;
1459 if (is_gimple_ip_invariant (rhs))
1461 n->constant = rhs;
1462 const_count++;
1464 else
1465 n->constant = NULL_TREE;
1466 n->next = *p;
1467 *p = n;
1469 item_count++;
1470 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1471 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1472 break;
1475 /* Third stage just goes over the list and creates an appropriate vector of
1476 ipa_agg_jf_item structures out of it, of sourse only if there are
1477 any known constants to begin with. */
1479 if (const_count)
1481 jfunc->agg.by_ref = by_ref;
1482 vec_alloc (jfunc->agg.items, const_count);
1483 while (list)
1485 if (list->constant)
1487 struct ipa_agg_jf_item item;
1488 item.offset = list->offset - arg_offset;
1489 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1490 item.value = unshare_expr_without_location (list->constant);
1491 jfunc->agg.items->quick_push (item);
1493 list = list->next;
1498 /* Compute jump function for all arguments of callsite CS and insert the
1499 information in the jump_functions array in the ipa_edge_args corresponding
1500 to this callsite. */
1502 static void
1503 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1504 struct cgraph_edge *cs)
1506 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1507 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1508 gimple call = cs->call_stmt;
1509 int n, arg_num = gimple_call_num_args (call);
1511 if (arg_num == 0 || args->jump_functions)
1512 return;
1513 vec_safe_grow_cleared (args->jump_functions, arg_num);
1515 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1516 return;
1518 for (n = 0; n < arg_num; n++)
1520 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1521 tree arg = gimple_call_arg (call, n);
1523 if (is_gimple_ip_invariant (arg))
1524 ipa_set_jf_constant (jfunc, arg, cs);
1525 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1526 && TREE_CODE (arg) == PARM_DECL)
1528 int index = ipa_get_param_decl_index (info, arg);
1530 gcc_assert (index >=0);
1531 /* Aggregate passed by value, check for pass-through, otherwise we
1532 will attempt to fill in aggregate contents later in this
1533 for cycle. */
1534 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1536 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1537 continue;
1540 else if (TREE_CODE (arg) == SSA_NAME)
1542 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1544 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1545 if (index >= 0)
1547 bool agg_p, type_p;
1548 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1549 call, arg);
1550 type_p = !detect_type_change_ssa (arg, call, jfunc);
1551 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1552 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1555 else
1557 gimple stmt = SSA_NAME_DEF_STMT (arg);
1558 if (is_gimple_assign (stmt))
1559 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1560 call, stmt, arg);
1561 else if (gimple_code (stmt) == GIMPLE_PHI)
1562 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1563 call, stmt);
1566 else
1567 compute_known_type_jump_func (arg, jfunc, call);
1569 if ((jfunc->type != IPA_JF_PASS_THROUGH
1570 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1571 && (jfunc->type != IPA_JF_ANCESTOR
1572 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1573 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1574 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1575 determine_known_aggregate_parts (call, arg, jfunc);
1579 /* Compute jump functions for all edges - both direct and indirect - outgoing
1580 from NODE. Also count the actual arguments in the process. */
1582 static void
1583 ipa_compute_jump_functions (struct cgraph_node *node,
1584 struct param_analysis_info *parms_ainfo)
1586 struct cgraph_edge *cs;
1588 for (cs = node->callees; cs; cs = cs->next_callee)
1590 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1591 NULL);
1592 /* We do not need to bother analyzing calls to unknown
1593 functions unless they may become known during lto/whopr. */
1594 if (!callee->symbol.definition && !flag_lto)
1595 continue;
1596 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1599 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1600 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1603 /* If STMT looks like a statement loading a value from a member pointer formal
1604 parameter, return that parameter and store the offset of the field to
1605 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1606 might be clobbered). If USE_DELTA, then we look for a use of the delta
1607 field rather than the pfn. */
1609 static tree
1610 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1611 HOST_WIDE_INT *offset_p)
1613 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1615 if (!gimple_assign_single_p (stmt))
1616 return NULL_TREE;
1618 rhs = gimple_assign_rhs1 (stmt);
1619 if (TREE_CODE (rhs) == COMPONENT_REF)
1621 ref_field = TREE_OPERAND (rhs, 1);
1622 rhs = TREE_OPERAND (rhs, 0);
1624 else
1625 ref_field = NULL_TREE;
1626 if (TREE_CODE (rhs) != MEM_REF)
1627 return NULL_TREE;
1628 rec = TREE_OPERAND (rhs, 0);
1629 if (TREE_CODE (rec) != ADDR_EXPR)
1630 return NULL_TREE;
1631 rec = TREE_OPERAND (rec, 0);
1632 if (TREE_CODE (rec) != PARM_DECL
1633 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1634 return NULL_TREE;
1635 ref_offset = TREE_OPERAND (rhs, 1);
1637 if (use_delta)
1638 fld = delta_field;
1639 else
1640 fld = ptr_field;
1641 if (offset_p)
1642 *offset_p = int_bit_position (fld);
1644 if (ref_field)
1646 if (integer_nonzerop (ref_offset))
1647 return NULL_TREE;
1648 return ref_field == fld ? rec : NULL_TREE;
1650 else
1651 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1652 : NULL_TREE;
1655 /* Returns true iff T is an SSA_NAME defined by a statement. */
1657 static bool
1658 ipa_is_ssa_with_stmt_def (tree t)
1660 if (TREE_CODE (t) == SSA_NAME
1661 && !SSA_NAME_IS_DEFAULT_DEF (t))
1662 return true;
1663 else
1664 return false;
1667 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1668 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1669 indirect call graph edge. */
1671 static struct cgraph_edge *
1672 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1674 struct cgraph_edge *cs;
1676 cs = cgraph_edge (node, stmt);
1677 cs->indirect_info->param_index = param_index;
1678 cs->indirect_info->offset = 0;
1679 cs->indirect_info->polymorphic = 0;
1680 cs->indirect_info->agg_contents = 0;
1681 cs->indirect_info->member_ptr = 0;
1682 return cs;
1685 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1686 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1687 intermediate information about each formal parameter. Currently it checks
1688 whether the call calls a pointer that is a formal parameter and if so, the
1689 parameter is marked with the called flag and an indirect call graph edge
1690 describing the call is created. This is very simple for ordinary pointers
1691 represented in SSA but not-so-nice when it comes to member pointers. The
1692 ugly part of this function does nothing more than trying to match the
1693 pattern of such a call. An example of such a pattern is the gimple dump
1694 below, the call is on the last line:
1696 <bb 2>:
1697 f$__delta_5 = f.__delta;
1698 f$__pfn_24 = f.__pfn;
1701 <bb 2>:
1702 f$__delta_5 = MEM[(struct *)&f];
1703 f$__pfn_24 = MEM[(struct *)&f + 4B];
1705 and a few lines below:
1707 <bb 5>
1708 D.2496_3 = (int) f$__pfn_24;
1709 D.2497_4 = D.2496_3 & 1;
1710 if (D.2497_4 != 0)
1711 goto <bb 3>;
1712 else
1713 goto <bb 4>;
1715 <bb 6>:
1716 D.2500_7 = (unsigned int) f$__delta_5;
1717 D.2501_8 = &S + D.2500_7;
1718 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1719 D.2503_10 = *D.2502_9;
1720 D.2504_12 = f$__pfn_24 + -1;
1721 D.2505_13 = (unsigned int) D.2504_12;
1722 D.2506_14 = D.2503_10 + D.2505_13;
1723 D.2507_15 = *D.2506_14;
1724 iftmp.11_16 = (String:: *) D.2507_15;
1726 <bb 7>:
1727 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1728 D.2500_19 = (unsigned int) f$__delta_5;
1729 D.2508_20 = &S + D.2500_19;
1730 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1732 Such patterns are results of simple calls to a member pointer:
1734 int doprinting (int (MyString::* f)(int) const)
1736 MyString S ("somestring");
1738 return (S.*f)(4);
1741 Moreover, the function also looks for called pointers loaded from aggregates
1742 passed by value or reference. */
1744 static void
1745 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1746 struct ipa_node_params *info,
1747 struct param_analysis_info *parms_ainfo,
1748 gimple call, tree target)
1750 gimple def;
1751 tree n1, n2;
1752 gimple d1, d2;
1753 tree rec, rec2, cond;
1754 gimple branch;
1755 int index;
1756 basic_block bb, virt_bb, join;
1757 HOST_WIDE_INT offset;
1758 bool by_ref;
1760 if (SSA_NAME_IS_DEFAULT_DEF (target))
1762 tree var = SSA_NAME_VAR (target);
1763 index = ipa_get_param_decl_index (info, var);
1764 if (index >= 0)
1765 ipa_note_param_call (node, index, call);
1766 return;
1769 def = SSA_NAME_DEF_STMT (target);
1770 if (gimple_assign_single_p (def)
1771 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1772 gimple_assign_rhs1 (def), &index, &offset,
1773 &by_ref))
1775 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1776 cs->indirect_info->offset = offset;
1777 cs->indirect_info->agg_contents = 1;
1778 cs->indirect_info->by_ref = by_ref;
1779 return;
1782 /* Now we need to try to match the complex pattern of calling a member
1783 pointer. */
1784 if (gimple_code (def) != GIMPLE_PHI
1785 || gimple_phi_num_args (def) != 2
1786 || !POINTER_TYPE_P (TREE_TYPE (target))
1787 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1788 return;
1790 /* First, we need to check whether one of these is a load from a member
1791 pointer that is a parameter to this function. */
1792 n1 = PHI_ARG_DEF (def, 0);
1793 n2 = PHI_ARG_DEF (def, 1);
1794 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1795 return;
1796 d1 = SSA_NAME_DEF_STMT (n1);
1797 d2 = SSA_NAME_DEF_STMT (n2);
1799 join = gimple_bb (def);
1800 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1802 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1803 return;
1805 bb = EDGE_PRED (join, 0)->src;
1806 virt_bb = gimple_bb (d2);
1808 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1810 bb = EDGE_PRED (join, 1)->src;
1811 virt_bb = gimple_bb (d1);
1813 else
1814 return;
1816 /* Second, we need to check that the basic blocks are laid out in the way
1817 corresponding to the pattern. */
1819 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1820 || single_pred (virt_bb) != bb
1821 || single_succ (virt_bb) != join)
1822 return;
1824 /* Third, let's see that the branching is done depending on the least
1825 significant bit of the pfn. */
1827 branch = last_stmt (bb);
1828 if (!branch || gimple_code (branch) != GIMPLE_COND)
1829 return;
1831 if ((gimple_cond_code (branch) != NE_EXPR
1832 && gimple_cond_code (branch) != EQ_EXPR)
1833 || !integer_zerop (gimple_cond_rhs (branch)))
1834 return;
1836 cond = gimple_cond_lhs (branch);
1837 if (!ipa_is_ssa_with_stmt_def (cond))
1838 return;
1840 def = SSA_NAME_DEF_STMT (cond);
1841 if (!is_gimple_assign (def)
1842 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1843 || !integer_onep (gimple_assign_rhs2 (def)))
1844 return;
1846 cond = gimple_assign_rhs1 (def);
1847 if (!ipa_is_ssa_with_stmt_def (cond))
1848 return;
1850 def = SSA_NAME_DEF_STMT (cond);
1852 if (is_gimple_assign (def)
1853 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1855 cond = gimple_assign_rhs1 (def);
1856 if (!ipa_is_ssa_with_stmt_def (cond))
1857 return;
1858 def = SSA_NAME_DEF_STMT (cond);
1861 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1862 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1863 == ptrmemfunc_vbit_in_delta),
1864 NULL);
1865 if (rec != rec2)
1866 return;
1868 index = ipa_get_param_decl_index (info, rec);
1869 if (index >= 0
1870 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1872 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1873 cs->indirect_info->offset = offset;
1874 cs->indirect_info->agg_contents = 1;
1875 cs->indirect_info->member_ptr = 1;
1878 return;
1881 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1882 object referenced in the expression is a formal parameter of the caller
1883 (described by INFO), create a call note for the statement. */
1885 static void
1886 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1887 struct ipa_node_params *info, gimple call,
1888 tree target)
1890 struct cgraph_edge *cs;
1891 struct cgraph_indirect_call_info *ii;
1892 struct ipa_jump_func jfunc;
1893 tree obj = OBJ_TYPE_REF_OBJECT (target);
1894 int index;
1895 HOST_WIDE_INT anc_offset;
1897 if (!flag_devirtualize)
1898 return;
1900 if (TREE_CODE (obj) != SSA_NAME)
1901 return;
1903 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1905 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1906 return;
1908 anc_offset = 0;
1909 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1910 gcc_assert (index >= 0);
1911 if (detect_type_change_ssa (obj, call, &jfunc))
1912 return;
1914 else
1916 gimple stmt = SSA_NAME_DEF_STMT (obj);
1917 tree expr;
1919 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1920 if (!expr)
1921 return;
1922 index = ipa_get_param_decl_index (info,
1923 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1924 gcc_assert (index >= 0);
1925 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1926 return;
1929 cs = ipa_note_param_call (node, index, call);
1930 ii = cs->indirect_info;
1931 ii->offset = anc_offset;
1932 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1933 ii->otr_type = obj_type_ref_class (target);
1934 ii->polymorphic = 1;
1937 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1938 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1939 containing intermediate information about each formal parameter. */
1941 static void
1942 ipa_analyze_call_uses (struct cgraph_node *node,
1943 struct ipa_node_params *info,
1944 struct param_analysis_info *parms_ainfo, gimple call)
1946 tree target = gimple_call_fn (call);
1948 if (!target)
1949 return;
1950 if (TREE_CODE (target) == SSA_NAME)
1951 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1952 else if (virtual_method_call_p (target))
1953 ipa_analyze_virtual_call_uses (node, info, call, target);
1957 /* Analyze the call statement STMT with respect to formal parameters (described
1958 in INFO) of caller given by NODE. Currently it only checks whether formal
1959 parameters are called. PARMS_AINFO is a pointer to a vector containing
1960 intermediate information about each formal parameter. */
1962 static void
1963 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1964 struct param_analysis_info *parms_ainfo, gimple stmt)
1966 if (is_gimple_call (stmt))
1967 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1970 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1971 If OP is a parameter declaration, mark it as used in the info structure
1972 passed in DATA. */
1974 static bool
1975 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1976 tree op, void *data)
1978 struct ipa_node_params *info = (struct ipa_node_params *) data;
1980 op = get_base_address (op);
1981 if (op
1982 && TREE_CODE (op) == PARM_DECL)
1984 int index = ipa_get_param_decl_index (info, op);
1985 gcc_assert (index >= 0);
1986 ipa_set_param_used (info, index, true);
1989 return false;
1992 /* Scan the function body of NODE and inspect the uses of formal parameters.
1993 Store the findings in various structures of the associated ipa_node_params
1994 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1995 vector containing intermediate information about each formal parameter. */
1997 static void
1998 ipa_analyze_params_uses (struct cgraph_node *node,
1999 struct param_analysis_info *parms_ainfo)
2001 tree decl = node->symbol.decl;
2002 basic_block bb;
2003 struct function *func;
2004 gimple_stmt_iterator gsi;
2005 struct ipa_node_params *info = IPA_NODE_REF (node);
2006 int i;
2008 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2009 return;
2011 info->uses_analysis_done = 1;
2012 if (ipa_func_spec_opts_forbid_analysis_p (node))
2014 for (i = 0; i < ipa_get_param_count (info); i++)
2016 ipa_set_param_used (info, i, true);
2017 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2019 return;
2022 for (i = 0; i < ipa_get_param_count (info); i++)
2024 tree parm = ipa_get_param (info, i);
2025 int controlled_uses = 0;
2027 /* For SSA regs see if parameter is used. For non-SSA we compute
2028 the flag during modification analysis. */
2029 if (is_gimple_reg (parm))
2031 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
2032 parm);
2033 if (ddef && !has_zero_uses (ddef))
2035 imm_use_iterator imm_iter;
2036 use_operand_p use_p;
2038 ipa_set_param_used (info, i, true);
2039 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2040 if (!is_gimple_call (USE_STMT (use_p)))
2042 controlled_uses = IPA_UNDESCRIBED_USE;
2043 break;
2045 else
2046 controlled_uses++;
2048 else
2049 controlled_uses = 0;
2051 else
2052 controlled_uses = IPA_UNDESCRIBED_USE;
2053 ipa_set_controlled_uses (info, i, controlled_uses);
2056 func = DECL_STRUCT_FUNCTION (decl);
2057 FOR_EACH_BB_FN (bb, func)
2059 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2061 gimple stmt = gsi_stmt (gsi);
2063 if (is_gimple_debug (stmt))
2064 continue;
2066 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2067 walk_stmt_load_store_addr_ops (stmt, info,
2068 visit_ref_for_mod_analysis,
2069 visit_ref_for_mod_analysis,
2070 visit_ref_for_mod_analysis);
2072 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2073 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2074 visit_ref_for_mod_analysis,
2075 visit_ref_for_mod_analysis,
2076 visit_ref_for_mod_analysis);
2080 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2082 static void
2083 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2085 int i;
2087 for (i = 0; i < param_count; i++)
2089 if (parms_ainfo[i].parm_visited_statements)
2090 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2091 if (parms_ainfo[i].pt_visited_statements)
2092 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2096 /* Initialize the array describing properties of of formal parameters
2097 of NODE, analyze their uses and compute jump functions associated
2098 with actual arguments of calls from within NODE. */
2100 void
2101 ipa_analyze_node (struct cgraph_node *node)
2103 struct ipa_node_params *info;
2104 struct param_analysis_info *parms_ainfo;
2105 int param_count;
2107 ipa_check_create_node_params ();
2108 ipa_check_create_edge_args ();
2109 info = IPA_NODE_REF (node);
2110 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
2111 ipa_initialize_node_params (node);
2113 param_count = ipa_get_param_count (info);
2114 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2115 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2117 ipa_analyze_params_uses (node, parms_ainfo);
2118 ipa_compute_jump_functions (node, parms_ainfo);
2120 free_parms_ainfo (parms_ainfo, param_count);
2121 pop_cfun ();
2124 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2125 attempt a type-based devirtualization. If successful, return the
2126 target function declaration, otherwise return NULL. */
2128 tree
2129 ipa_intraprocedural_devirtualization (gimple call)
2131 tree binfo, token, fndecl;
2132 struct ipa_jump_func jfunc;
2133 tree otr = gimple_call_fn (call);
2135 jfunc.type = IPA_JF_UNKNOWN;
2136 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2137 call);
2138 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2139 return NULL_TREE;
2140 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2141 if (!binfo)
2142 return NULL_TREE;
2143 token = OBJ_TYPE_REF_TOKEN (otr);
2144 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2145 binfo);
2146 return fndecl;
2149 /* Update the jump function DST when the call graph edge corresponding to SRC is
2150 is being inlined, knowing that DST is of type ancestor and src of known
2151 type. */
2153 static void
2154 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2155 struct ipa_jump_func *dst)
2157 HOST_WIDE_INT combined_offset;
2158 tree combined_type;
2160 if (!ipa_get_jf_ancestor_type_preserved (dst))
2162 dst->type = IPA_JF_UNKNOWN;
2163 return;
2166 combined_offset = ipa_get_jf_known_type_offset (src)
2167 + ipa_get_jf_ancestor_offset (dst);
2168 combined_type = ipa_get_jf_ancestor_type (dst);
2170 ipa_set_jf_known_type (dst, combined_offset,
2171 ipa_get_jf_known_type_base_type (src),
2172 combined_type);
2175 /* Update the jump functions associated with call graph edge E when the call
2176 graph edge CS is being inlined, assuming that E->caller is already (possibly
2177 indirectly) inlined into CS->callee and that E has not been inlined. */
2179 static void
2180 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2181 struct cgraph_edge *e)
2183 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2184 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2185 int count = ipa_get_cs_argument_count (args);
2186 int i;
2188 for (i = 0; i < count; i++)
2190 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2192 if (dst->type == IPA_JF_ANCESTOR)
2194 struct ipa_jump_func *src;
2195 int dst_fid = dst->value.ancestor.formal_id;
2197 /* Variable number of arguments can cause havoc if we try to access
2198 one that does not exist in the inlined edge. So make sure we
2199 don't. */
2200 if (dst_fid >= ipa_get_cs_argument_count (top))
2202 dst->type = IPA_JF_UNKNOWN;
2203 continue;
2206 src = ipa_get_ith_jump_func (top, dst_fid);
2208 if (src->agg.items
2209 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2211 struct ipa_agg_jf_item *item;
2212 int j;
2214 /* Currently we do not produce clobber aggregate jump functions,
2215 replace with merging when we do. */
2216 gcc_assert (!dst->agg.items);
2218 dst->agg.items = vec_safe_copy (src->agg.items);
2219 dst->agg.by_ref = src->agg.by_ref;
2220 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2221 item->offset -= dst->value.ancestor.offset;
2224 if (src->type == IPA_JF_KNOWN_TYPE)
2225 combine_known_type_and_ancestor_jfs (src, dst);
2226 else if (src->type == IPA_JF_PASS_THROUGH
2227 && src->value.pass_through.operation == NOP_EXPR)
2229 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2230 dst->value.ancestor.agg_preserved &=
2231 src->value.pass_through.agg_preserved;
2232 dst->value.ancestor.type_preserved &=
2233 src->value.pass_through.type_preserved;
2235 else if (src->type == IPA_JF_ANCESTOR)
2237 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2238 dst->value.ancestor.offset += src->value.ancestor.offset;
2239 dst->value.ancestor.agg_preserved &=
2240 src->value.ancestor.agg_preserved;
2241 dst->value.ancestor.type_preserved &=
2242 src->value.ancestor.type_preserved;
2244 else
2245 dst->type = IPA_JF_UNKNOWN;
2247 else if (dst->type == IPA_JF_PASS_THROUGH)
2249 struct ipa_jump_func *src;
2250 /* We must check range due to calls with variable number of arguments
2251 and we cannot combine jump functions with operations. */
2252 if (dst->value.pass_through.operation == NOP_EXPR
2253 && (dst->value.pass_through.formal_id
2254 < ipa_get_cs_argument_count (top)))
2256 int dst_fid = dst->value.pass_through.formal_id;
2257 src = ipa_get_ith_jump_func (top, dst_fid);
2258 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2260 switch (src->type)
2262 case IPA_JF_UNKNOWN:
2263 dst->type = IPA_JF_UNKNOWN;
2264 break;
2265 case IPA_JF_KNOWN_TYPE:
2266 ipa_set_jf_known_type (dst,
2267 ipa_get_jf_known_type_offset (src),
2268 ipa_get_jf_known_type_base_type (src),
2269 ipa_get_jf_known_type_base_type (src));
2270 break;
2271 case IPA_JF_CONST:
2272 ipa_set_jf_cst_copy (dst, src);
2273 break;
2275 case IPA_JF_PASS_THROUGH:
2277 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2278 enum tree_code operation;
2279 operation = ipa_get_jf_pass_through_operation (src);
2281 if (operation == NOP_EXPR)
2283 bool agg_p, type_p;
2284 agg_p = dst_agg_p
2285 && ipa_get_jf_pass_through_agg_preserved (src);
2286 type_p = ipa_get_jf_pass_through_type_preserved (src)
2287 && ipa_get_jf_pass_through_type_preserved (dst);
2288 ipa_set_jf_simple_pass_through (dst, formal_id,
2289 agg_p, type_p);
2291 else
2293 tree operand = ipa_get_jf_pass_through_operand (src);
2294 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2295 operation);
2297 break;
2299 case IPA_JF_ANCESTOR:
2301 bool agg_p, type_p;
2302 agg_p = dst_agg_p
2303 && ipa_get_jf_ancestor_agg_preserved (src);
2304 type_p = ipa_get_jf_ancestor_type_preserved (src)
2305 && ipa_get_jf_pass_through_type_preserved (dst);
2306 ipa_set_ancestor_jf (dst,
2307 ipa_get_jf_ancestor_offset (src),
2308 ipa_get_jf_ancestor_type (src),
2309 ipa_get_jf_ancestor_formal_id (src),
2310 agg_p, type_p);
2311 break;
2313 default:
2314 gcc_unreachable ();
2317 if (src->agg.items
2318 && (dst_agg_p || !src->agg.by_ref))
2320 /* Currently we do not produce clobber aggregate jump
2321 functions, replace with merging when we do. */
2322 gcc_assert (!dst->agg.items);
2324 dst->agg.by_ref = src->agg.by_ref;
2325 dst->agg.items = vec_safe_copy (src->agg.items);
2328 else
2329 dst->type = IPA_JF_UNKNOWN;
2334 /* If TARGET is an addr_expr of a function declaration, make it the destination
2335 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2337 struct cgraph_edge *
2338 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2340 struct cgraph_node *callee;
2341 struct inline_edge_summary *es = inline_edge_summary (ie);
2342 bool unreachable = false;
2344 if (TREE_CODE (target) == ADDR_EXPR)
2345 target = TREE_OPERAND (target, 0);
2346 if (TREE_CODE (target) != FUNCTION_DECL)
2348 target = canonicalize_constructor_val (target, NULL);
2349 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2351 if (ie->indirect_info->member_ptr)
2352 /* Member pointer call that goes through a VMT lookup. */
2353 return NULL;
2355 if (dump_file)
2356 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2357 " in %s/%i, making it unreachable.\n",
2358 cgraph_node_name (ie->caller), ie->caller->symbol.order);
2359 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2360 callee = cgraph_get_create_node (target);
2361 unreachable = true;
2363 else
2364 callee = cgraph_get_node (target);
2366 else
2367 callee = cgraph_get_node (target);
2369 /* Because may-edges are not explicitely represented and vtable may be external,
2370 we may create the first reference to the object in the unit. */
2371 if (!callee || callee->global.inlined_to)
2374 /* We are better to ensure we can refer to it.
2375 In the case of static functions we are out of luck, since we already
2376 removed its body. In the case of public functions we may or may
2377 not introduce the reference. */
2378 if (!canonicalize_constructor_val (target, NULL)
2379 || !TREE_PUBLIC (target))
2381 if (dump_file)
2382 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2383 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2384 xstrdup (cgraph_node_name (ie->caller)),
2385 ie->caller->symbol.order,
2386 xstrdup (cgraph_node_name (ie->callee)),
2387 ie->callee->symbol.order);
2388 return NULL;
2390 callee = cgraph_get_create_real_symbol_node (target);
2392 ipa_check_create_node_params ();
2394 /* We can not make edges to inline clones. It is bug that someone removed
2395 the cgraph node too early. */
2396 gcc_assert (!callee->global.inlined_to);
2398 if (dump_file && !unreachable)
2400 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2401 "(%s/%i -> %s/%i), for stmt ",
2402 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2403 xstrdup (cgraph_node_name (ie->caller)),
2404 ie->caller->symbol.order,
2405 xstrdup (cgraph_node_name (callee)),
2406 callee->symbol.order);
2407 if (ie->call_stmt)
2408 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2409 else
2410 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2412 ie = cgraph_make_edge_direct (ie, callee);
2413 es = inline_edge_summary (ie);
2414 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2415 - eni_size_weights.call_cost);
2416 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2417 - eni_time_weights.call_cost);
2419 return ie;
2422 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2423 return NULL if there is not any. BY_REF specifies whether the value has to
2424 be passed by reference or by value. */
2426 tree
2427 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2428 HOST_WIDE_INT offset, bool by_ref)
2430 struct ipa_agg_jf_item *item;
2431 int i;
2433 if (by_ref != agg->by_ref)
2434 return NULL;
2436 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2437 if (item->offset == offset)
2439 /* Currently we do not have clobber values, return NULL for them once
2440 we do. */
2441 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2442 return item->value;
2444 return NULL;
2447 /* Remove a reference to SYMBOL from the list of references of a node given by
2448 reference description RDESC. */
2450 static void
2451 remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2453 struct ipa_ref *to_del;
2454 struct cgraph_edge *origin;
2456 origin = rdesc->cs;
2457 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
2458 origin->call_stmt, origin->lto_stmt_uid);
2459 gcc_assert (to_del);
2460 ipa_remove_reference (to_del);
2461 if (dump_file)
2462 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2463 xstrdup (cgraph_node_name (origin->caller)),
2464 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
2467 /* If JFUNC has a reference description with refcount different from
2468 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2469 NULL. JFUNC must be a constant jump function. */
2471 static struct ipa_cst_ref_desc *
2472 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2474 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2475 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2476 return rdesc;
2477 else
2478 return NULL;
2481 /* Try to find a destination for indirect edge IE that corresponds to a simple
2482 call or a call of a member function pointer and where the destination is a
2483 pointer formal parameter described by jump function JFUNC. If it can be
2484 determined, return the newly direct edge, otherwise return NULL.
2485 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2487 static struct cgraph_edge *
2488 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2489 struct ipa_jump_func *jfunc,
2490 struct ipa_node_params *new_root_info)
2492 struct cgraph_edge *cs;
2493 tree target;
2494 bool agg_contents = ie->indirect_info->agg_contents;
2495 bool speculative = ie->speculative;
2496 struct ipa_cst_ref_desc *rdesc;
2498 if (ie->indirect_info->agg_contents)
2499 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2500 ie->indirect_info->offset,
2501 ie->indirect_info->by_ref);
2502 else
2503 target = ipa_value_from_jfunc (new_root_info, jfunc);
2504 if (!target)
2505 return NULL;
2506 cs = ipa_make_edge_direct_to_target (ie, target);
2508 /* FIXME: speculative edges can be handled. */
2509 if (cs && !agg_contents && !speculative
2510 && jfunc->type == IPA_JF_CONST
2511 && (rdesc = jfunc_rdesc_usable (jfunc))
2512 && --rdesc->refcount == 0)
2513 remove_described_reference ((symtab_node) cs->callee, rdesc);
2515 return cs;
2518 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2519 call based on a formal parameter which is described by jump function JFUNC
2520 and if it can be determined, make it direct and return the direct edge.
2521 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2522 are relative to. */
2524 static struct cgraph_edge *
2525 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2526 struct ipa_jump_func *jfunc,
2527 struct ipa_node_params *new_root_info)
2529 tree binfo, target;
2531 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2533 if (!binfo)
2534 return NULL;
2536 if (TREE_CODE (binfo) != TREE_BINFO)
2538 binfo = gimple_extract_devirt_binfo_from_cst
2539 (binfo, ie->indirect_info->otr_type);
2540 if (!binfo)
2541 return NULL;
2544 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2545 ie->indirect_info->otr_type);
2546 if (binfo)
2547 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2548 binfo);
2549 else
2550 return NULL;
2552 if (target)
2553 return ipa_make_edge_direct_to_target (ie, target);
2554 else
2555 return NULL;
2558 /* Update the param called notes associated with NODE when CS is being inlined,
2559 assuming NODE is (potentially indirectly) inlined into CS->callee.
2560 Moreover, if the callee is discovered to be constant, create a new cgraph
2561 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2562 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2564 static bool
2565 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2566 struct cgraph_node *node,
2567 vec<cgraph_edge_p> *new_edges)
2569 struct ipa_edge_args *top;
2570 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2571 struct ipa_node_params *new_root_info;
2572 bool res = false;
2574 ipa_check_create_edge_args ();
2575 top = IPA_EDGE_REF (cs);
2576 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2577 ? cs->caller->global.inlined_to
2578 : cs->caller);
2580 for (ie = node->indirect_calls; ie; ie = next_ie)
2582 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2583 struct ipa_jump_func *jfunc;
2584 int param_index;
2586 next_ie = ie->next_callee;
2588 if (ici->param_index == -1)
2589 continue;
2591 /* We must check range due to calls with variable number of arguments: */
2592 if (ici->param_index >= ipa_get_cs_argument_count (top))
2594 ici->param_index = -1;
2595 continue;
2598 param_index = ici->param_index;
2599 jfunc = ipa_get_ith_jump_func (top, param_index);
2601 if (!flag_indirect_inlining)
2602 new_direct_edge = NULL;
2603 else if (ici->polymorphic)
2604 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2605 new_root_info);
2606 else
2607 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2608 new_root_info);
2609 /* If speculation was removed, then we need to do nothing. */
2610 if (new_direct_edge && new_direct_edge != ie)
2612 new_direct_edge->indirect_inlining_edge = 1;
2613 top = IPA_EDGE_REF (cs);
2614 res = true;
2616 else if (new_direct_edge)
2618 new_direct_edge->indirect_inlining_edge = 1;
2619 if (new_direct_edge->call_stmt)
2620 new_direct_edge->call_stmt_cannot_inline_p
2621 = !gimple_check_call_matching_types (
2622 new_direct_edge->call_stmt,
2623 new_direct_edge->callee->symbol.decl, false);
2624 if (new_edges)
2626 new_edges->safe_push (new_direct_edge);
2627 res = true;
2629 top = IPA_EDGE_REF (cs);
2631 else if (jfunc->type == IPA_JF_PASS_THROUGH
2632 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2634 if (ici->agg_contents
2635 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2636 ici->param_index = -1;
2637 else
2638 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2640 else if (jfunc->type == IPA_JF_ANCESTOR)
2642 if (ici->agg_contents
2643 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2644 ici->param_index = -1;
2645 else
2647 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2648 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2651 else
2652 /* Either we can find a destination for this edge now or never. */
2653 ici->param_index = -1;
2656 return res;
2659 /* Recursively traverse subtree of NODE (including node) made of inlined
2660 cgraph_edges when CS has been inlined and invoke
2661 update_indirect_edges_after_inlining on all nodes and
2662 update_jump_functions_after_inlining on all non-inlined edges that lead out
2663 of this subtree. Newly discovered indirect edges will be added to
2664 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2665 created. */
2667 static bool
2668 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2669 struct cgraph_node *node,
2670 vec<cgraph_edge_p> *new_edges)
2672 struct cgraph_edge *e;
2673 bool res;
2675 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2677 for (e = node->callees; e; e = e->next_callee)
2678 if (!e->inline_failed)
2679 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2680 else
2681 update_jump_functions_after_inlining (cs, e);
2682 for (e = node->indirect_calls; e; e = e->next_callee)
2683 update_jump_functions_after_inlining (cs, e);
2685 return res;
2688 /* Combine two controlled uses counts as done during inlining. */
2690 static int
2691 combine_controlled_uses_counters (int c, int d)
2693 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2694 return IPA_UNDESCRIBED_USE;
2695 else
2696 return c + d - 1;
2699 /* Propagate number of controlled users from CS->caleee to the new root of the
2700 tree of inlined nodes. */
2702 static void
2703 propagate_controlled_uses (struct cgraph_edge *cs)
2705 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2706 struct cgraph_node *new_root = cs->caller->global.inlined_to
2707 ? cs->caller->global.inlined_to : cs->caller;
2708 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2709 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2710 int count, i;
2712 count = MIN (ipa_get_cs_argument_count (args),
2713 ipa_get_param_count (old_root_info));
2714 for (i = 0; i < count; i++)
2716 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2717 struct ipa_cst_ref_desc *rdesc;
2719 if (jf->type == IPA_JF_PASS_THROUGH)
2721 int src_idx, c, d;
2722 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2723 c = ipa_get_controlled_uses (new_root_info, src_idx);
2724 d = ipa_get_controlled_uses (old_root_info, i);
2726 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2727 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2728 c = combine_controlled_uses_counters (c, d);
2729 ipa_set_controlled_uses (new_root_info, src_idx, c);
2730 if (c == 0 && new_root_info->ipcp_orig_node)
2732 struct cgraph_node *n;
2733 struct ipa_ref *ref;
2734 tree t = new_root_info->known_vals[src_idx];
2736 if (t && TREE_CODE (t) == ADDR_EXPR
2737 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2738 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2739 && (ref = ipa_find_reference ((symtab_node) new_root,
2740 (symtab_node) n, NULL, 0)))
2742 if (dump_file)
2743 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2744 "reference from %s/%i to %s/%i.\n",
2745 xstrdup (cgraph_node_name (new_root)),
2746 new_root->symbol.order,
2747 xstrdup (cgraph_node_name (n)), n->symbol.order);
2748 ipa_remove_reference (ref);
2752 else if (jf->type == IPA_JF_CONST
2753 && (rdesc = jfunc_rdesc_usable (jf)))
2755 int d = ipa_get_controlled_uses (old_root_info, i);
2756 int c = rdesc->refcount;
2757 rdesc->refcount = combine_controlled_uses_counters (c, d);
2758 if (rdesc->refcount == 0)
2760 tree cst = ipa_get_jf_constant (jf);
2761 struct cgraph_node *n;
2762 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2763 && TREE_CODE (TREE_OPERAND (cst, 0))
2764 == FUNCTION_DECL);
2765 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2766 if (n)
2768 struct cgraph_node *clone;
2769 remove_described_reference ((symtab_node) n, rdesc);
2771 clone = cs->caller;
2772 while (clone->global.inlined_to
2773 && clone != rdesc->cs->caller
2774 && IPA_NODE_REF (clone)->ipcp_orig_node)
2776 struct ipa_ref *ref;
2777 ref = ipa_find_reference ((symtab_node) clone,
2778 (symtab_node) n, NULL, 0);
2779 if (ref)
2781 if (dump_file)
2782 fprintf (dump_file, "ipa-prop: Removing "
2783 "cloning-created reference "
2784 "from %s/%i to %s/%i.\n",
2785 xstrdup (cgraph_node_name (clone)),
2786 clone->symbol.order,
2787 xstrdup (cgraph_node_name (n)),
2788 n->symbol.order);
2789 ipa_remove_reference (ref);
2791 clone = clone->callers->caller;
2798 for (i = ipa_get_param_count (old_root_info);
2799 i < ipa_get_cs_argument_count (args);
2800 i++)
2802 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2804 if (jf->type == IPA_JF_CONST)
2806 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2807 if (rdesc)
2808 rdesc->refcount = IPA_UNDESCRIBED_USE;
2810 else if (jf->type == IPA_JF_PASS_THROUGH)
2811 ipa_set_controlled_uses (new_root_info,
2812 jf->value.pass_through.formal_id,
2813 IPA_UNDESCRIBED_USE);
2817 /* Update jump functions and call note functions on inlining the call site CS.
2818 CS is expected to lead to a node already cloned by
2819 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2820 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2821 created. */
2823 bool
2824 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2825 vec<cgraph_edge_p> *new_edges)
2827 bool changed;
2828 /* Do nothing if the preparation phase has not been carried out yet
2829 (i.e. during early inlining). */
2830 if (!ipa_node_params_vector.exists ())
2831 return false;
2832 gcc_assert (ipa_edge_args_vector);
2834 propagate_controlled_uses (cs);
2835 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2837 return changed;
2840 /* Frees all dynamically allocated structures that the argument info points
2841 to. */
2843 void
2844 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2846 vec_free (args->jump_functions);
2847 memset (args, 0, sizeof (*args));
2850 /* Free all ipa_edge structures. */
2852 void
2853 ipa_free_all_edge_args (void)
2855 int i;
2856 struct ipa_edge_args *args;
2858 if (!ipa_edge_args_vector)
2859 return;
2861 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2862 ipa_free_edge_args_substructures (args);
2864 vec_free (ipa_edge_args_vector);
2867 /* Frees all dynamically allocated structures that the param info points
2868 to. */
2870 void
2871 ipa_free_node_params_substructures (struct ipa_node_params *info)
2873 info->descriptors.release ();
2874 free (info->lattices);
2875 /* Lattice values and their sources are deallocated with their alocation
2876 pool. */
2877 info->known_vals.release ();
2878 memset (info, 0, sizeof (*info));
2881 /* Free all ipa_node_params structures. */
2883 void
2884 ipa_free_all_node_params (void)
2886 int i;
2887 struct ipa_node_params *info;
2889 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2890 ipa_free_node_params_substructures (info);
2892 ipa_node_params_vector.release ();
2895 /* Set the aggregate replacements of NODE to be AGGVALS. */
2897 void
2898 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2899 struct ipa_agg_replacement_value *aggvals)
2901 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2902 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2904 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2907 /* Hook that is called by cgraph.c when an edge is removed. */
2909 static void
2910 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2912 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2913 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
2914 return;
2915 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2918 /* Hook that is called by cgraph.c when a node is removed. */
2920 static void
2921 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2923 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2924 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2925 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2926 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2927 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
2930 /* Hook that is called by cgraph.c when an edge is duplicated. */
2932 static void
2933 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2934 __attribute__((unused)) void *data)
2936 struct ipa_edge_args *old_args, *new_args;
2937 unsigned int i;
2939 ipa_check_create_edge_args ();
2941 old_args = IPA_EDGE_REF (src);
2942 new_args = IPA_EDGE_REF (dst);
2944 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
2946 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
2948 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
2949 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
2951 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
2953 if (src_jf->type == IPA_JF_CONST)
2955 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
2957 if (!src_rdesc)
2958 dst_jf->value.constant.rdesc = NULL;
2959 else if (src_rdesc->cs == src)
2961 struct ipa_cst_ref_desc *dst_rdesc;
2962 gcc_checking_assert (ipa_refdesc_pool);
2963 dst_rdesc
2964 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
2965 dst_rdesc->cs = dst;
2966 dst_rdesc->refcount = src_rdesc->refcount;
2967 if (dst->caller->global.inlined_to)
2969 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
2970 src_rdesc->next_duplicate = dst_rdesc;
2972 dst_jf->value.constant.rdesc = dst_rdesc;
2974 else
2976 struct ipa_cst_ref_desc *dst_rdesc;
2977 /* This can happen during inlining, when a JFUNC can refer to a
2978 reference taken in a function up in the tree of inline clones.
2979 We need to find the duplicate that refers to our tree of
2980 inline clones. */
2982 gcc_assert (dst->caller->global.inlined_to);
2983 for (dst_rdesc = src_rdesc->next_duplicate;
2984 dst_rdesc;
2985 dst_rdesc = dst_rdesc->next_duplicate)
2986 if (dst_rdesc->cs->caller->global.inlined_to
2987 == dst->caller->global.inlined_to)
2988 break;
2989 gcc_assert (dst_rdesc);
2990 dst_jf->value.constant.rdesc = dst_rdesc;
2996 /* Hook that is called by cgraph.c when a node is duplicated. */
2998 static void
2999 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3000 ATTRIBUTE_UNUSED void *data)
3002 struct ipa_node_params *old_info, *new_info;
3003 struct ipa_agg_replacement_value *old_av, *new_av;
3005 ipa_check_create_node_params ();
3006 old_info = IPA_NODE_REF (src);
3007 new_info = IPA_NODE_REF (dst);
3009 new_info->descriptors = old_info->descriptors.copy ();
3010 new_info->lattices = NULL;
3011 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3013 new_info->uses_analysis_done = old_info->uses_analysis_done;
3014 new_info->node_enqueued = old_info->node_enqueued;
3016 old_av = ipa_get_agg_replacements_for_node (src);
3017 if (!old_av)
3018 return;
3020 new_av = NULL;
3021 while (old_av)
3023 struct ipa_agg_replacement_value *v;
3025 v = ggc_alloc_ipa_agg_replacement_value ();
3026 memcpy (v, old_av, sizeof (*v));
3027 v->next = new_av;
3028 new_av = v;
3029 old_av = old_av->next;
3031 ipa_set_node_agg_value_chain (dst, new_av);
3035 /* Analyze newly added function into callgraph. */
3037 static void
3038 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3040 ipa_analyze_node (node);
3043 /* Register our cgraph hooks if they are not already there. */
3045 void
3046 ipa_register_cgraph_hooks (void)
3048 if (!edge_removal_hook_holder)
3049 edge_removal_hook_holder =
3050 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3051 if (!node_removal_hook_holder)
3052 node_removal_hook_holder =
3053 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3054 if (!edge_duplication_hook_holder)
3055 edge_duplication_hook_holder =
3056 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3057 if (!node_duplication_hook_holder)
3058 node_duplication_hook_holder =
3059 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3060 function_insertion_hook_holder =
3061 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3064 /* Unregister our cgraph hooks if they are not already there. */
3066 static void
3067 ipa_unregister_cgraph_hooks (void)
3069 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3070 edge_removal_hook_holder = NULL;
3071 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3072 node_removal_hook_holder = NULL;
3073 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3074 edge_duplication_hook_holder = NULL;
3075 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3076 node_duplication_hook_holder = NULL;
3077 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3078 function_insertion_hook_holder = NULL;
3081 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3082 longer needed after ipa-cp. */
3084 void
3085 ipa_free_all_structures_after_ipa_cp (void)
3087 if (!optimize)
3089 ipa_free_all_edge_args ();
3090 ipa_free_all_node_params ();
3091 free_alloc_pool (ipcp_sources_pool);
3092 free_alloc_pool (ipcp_values_pool);
3093 free_alloc_pool (ipcp_agg_lattice_pool);
3094 ipa_unregister_cgraph_hooks ();
3095 if (ipa_refdesc_pool)
3096 free_alloc_pool (ipa_refdesc_pool);
3100 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3101 longer needed after indirect inlining. */
3103 void
3104 ipa_free_all_structures_after_iinln (void)
3106 ipa_free_all_edge_args ();
3107 ipa_free_all_node_params ();
3108 ipa_unregister_cgraph_hooks ();
3109 if (ipcp_sources_pool)
3110 free_alloc_pool (ipcp_sources_pool);
3111 if (ipcp_values_pool)
3112 free_alloc_pool (ipcp_values_pool);
3113 if (ipcp_agg_lattice_pool)
3114 free_alloc_pool (ipcp_agg_lattice_pool);
3115 if (ipa_refdesc_pool)
3116 free_alloc_pool (ipa_refdesc_pool);
3119 /* Print ipa_tree_map data structures of all functions in the
3120 callgraph to F. */
3122 void
3123 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3125 int i, count;
3126 struct ipa_node_params *info;
3128 if (!node->symbol.definition)
3129 return;
3130 info = IPA_NODE_REF (node);
3131 fprintf (f, " function %s/%i parameter descriptors:\n",
3132 cgraph_node_name (node), node->symbol.order);
3133 count = ipa_get_param_count (info);
3134 for (i = 0; i < count; i++)
3136 int c;
3138 ipa_dump_param (f, info, i);
3139 if (ipa_is_param_used (info, i))
3140 fprintf (f, " used");
3141 c = ipa_get_controlled_uses (info, i);
3142 if (c == IPA_UNDESCRIBED_USE)
3143 fprintf (f, " undescribed_use");
3144 else
3145 fprintf (f, " controlled_uses=%i", c);
3146 fprintf (f, "\n");
3150 /* Print ipa_tree_map data structures of all functions in the
3151 callgraph to F. */
3153 void
3154 ipa_print_all_params (FILE * f)
3156 struct cgraph_node *node;
3158 fprintf (f, "\nFunction parameters:\n");
3159 FOR_EACH_FUNCTION (node)
3160 ipa_print_node_params (f, node);
3163 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3165 vec<tree>
3166 ipa_get_vector_of_formal_parms (tree fndecl)
3168 vec<tree> args;
3169 int count;
3170 tree parm;
3172 gcc_assert (!flag_wpa);
3173 count = count_formal_params (fndecl);
3174 args.create (count);
3175 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3176 args.quick_push (parm);
3178 return args;
3181 /* Return a heap allocated vector containing types of formal parameters of
3182 function type FNTYPE. */
3184 static inline vec<tree>
3185 get_vector_of_formal_parm_types (tree fntype)
3187 vec<tree> types;
3188 int count = 0;
3189 tree t;
3191 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3192 count++;
3194 types.create (count);
3195 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3196 types.quick_push (TREE_VALUE (t));
3198 return types;
3201 /* Modify the function declaration FNDECL and its type according to the plan in
3202 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3203 to reflect the actual parameters being modified which are determined by the
3204 base_index field. */
3206 void
3207 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3208 const char *synth_parm_prefix)
3210 vec<tree> oparms, otypes;
3211 tree orig_type, new_type = NULL;
3212 tree old_arg_types, t, new_arg_types = NULL;
3213 tree parm, *link = &DECL_ARGUMENTS (fndecl);
3214 int i, len = adjustments.length ();
3215 tree new_reversed = NULL;
3216 bool care_for_types, last_parm_void;
3218 if (!synth_parm_prefix)
3219 synth_parm_prefix = "SYNTH";
3221 oparms = ipa_get_vector_of_formal_parms (fndecl);
3222 orig_type = TREE_TYPE (fndecl);
3223 old_arg_types = TYPE_ARG_TYPES (orig_type);
3225 /* The following test is an ugly hack, some functions simply don't have any
3226 arguments in their type. This is probably a bug but well... */
3227 care_for_types = (old_arg_types != NULL_TREE);
3228 if (care_for_types)
3230 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3231 == void_type_node);
3232 otypes = get_vector_of_formal_parm_types (orig_type);
3233 if (last_parm_void)
3234 gcc_assert (oparms.length () + 1 == otypes.length ());
3235 else
3236 gcc_assert (oparms.length () == otypes.length ());
3238 else
3240 last_parm_void = false;
3241 otypes.create (0);
3244 for (i = 0; i < len; i++)
3246 struct ipa_parm_adjustment *adj;
3247 gcc_assert (link);
3249 adj = &adjustments[i];
3250 parm = oparms[adj->base_index];
3251 adj->base = parm;
3253 if (adj->copy_param)
3255 if (care_for_types)
3256 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3257 new_arg_types);
3258 *link = parm;
3259 link = &DECL_CHAIN (parm);
3261 else if (!adj->remove_param)
3263 tree new_parm;
3264 tree ptype;
3266 if (adj->by_ref)
3267 ptype = build_pointer_type (adj->type);
3268 else
3269 ptype = adj->type;
3271 if (care_for_types)
3272 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3274 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3275 ptype);
3276 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3278 DECL_ARTIFICIAL (new_parm) = 1;
3279 DECL_ARG_TYPE (new_parm) = ptype;
3280 DECL_CONTEXT (new_parm) = fndecl;
3281 TREE_USED (new_parm) = 1;
3282 DECL_IGNORED_P (new_parm) = 1;
3283 layout_decl (new_parm, 0);
3285 adj->base = parm;
3286 adj->reduction = new_parm;
3288 *link = new_parm;
3290 link = &DECL_CHAIN (new_parm);
3294 *link = NULL_TREE;
3296 if (care_for_types)
3298 new_reversed = nreverse (new_arg_types);
3299 if (last_parm_void)
3301 if (new_reversed)
3302 TREE_CHAIN (new_arg_types) = void_list_node;
3303 else
3304 new_reversed = void_list_node;
3308 /* Use copy_node to preserve as much as possible from original type
3309 (debug info, attribute lists etc.)
3310 Exception is METHOD_TYPEs must have THIS argument.
3311 When we are asked to remove it, we need to build new FUNCTION_TYPE
3312 instead. */
3313 if (TREE_CODE (orig_type) != METHOD_TYPE
3314 || (adjustments[0].copy_param
3315 && adjustments[0].base_index == 0))
3317 new_type = build_distinct_type_copy (orig_type);
3318 TYPE_ARG_TYPES (new_type) = new_reversed;
3320 else
3322 new_type
3323 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3324 new_reversed));
3325 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3326 DECL_VINDEX (fndecl) = NULL_TREE;
3329 /* When signature changes, we need to clear builtin info. */
3330 if (DECL_BUILT_IN (fndecl))
3332 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3333 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3336 /* This is a new type, not a copy of an old type. Need to reassociate
3337 variants. We can handle everything except the main variant lazily. */
3338 t = TYPE_MAIN_VARIANT (orig_type);
3339 if (orig_type != t)
3341 TYPE_MAIN_VARIANT (new_type) = t;
3342 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3343 TYPE_NEXT_VARIANT (t) = new_type;
3345 else
3347 TYPE_MAIN_VARIANT (new_type) = new_type;
3348 TYPE_NEXT_VARIANT (new_type) = NULL;
3351 TREE_TYPE (fndecl) = new_type;
3352 DECL_VIRTUAL_P (fndecl) = 0;
3353 otypes.release ();
3354 oparms.release ();
3357 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3358 If this is a directly recursive call, CS must be NULL. Otherwise it must
3359 contain the corresponding call graph edge. */
3361 void
3362 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3363 ipa_parm_adjustment_vec adjustments)
3365 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3366 vec<tree> vargs;
3367 vec<tree, va_gc> **debug_args = NULL;
3368 gimple new_stmt;
3369 gimple_stmt_iterator gsi, prev_gsi;
3370 tree callee_decl;
3371 int i, len;
3373 len = adjustments.length ();
3374 vargs.create (len);
3375 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3376 ipa_remove_stmt_references ((symtab_node) current_node, stmt);
3378 gsi = gsi_for_stmt (stmt);
3379 prev_gsi = gsi;
3380 gsi_prev (&prev_gsi);
3381 for (i = 0; i < len; i++)
3383 struct ipa_parm_adjustment *adj;
3385 adj = &adjustments[i];
3387 if (adj->copy_param)
3389 tree arg = gimple_call_arg (stmt, adj->base_index);
3391 vargs.quick_push (arg);
3393 else if (!adj->remove_param)
3395 tree expr, base, off;
3396 location_t loc;
3397 unsigned int deref_align;
3398 bool deref_base = false;
3400 /* We create a new parameter out of the value of the old one, we can
3401 do the following kind of transformations:
3403 - A scalar passed by reference is converted to a scalar passed by
3404 value. (adj->by_ref is false and the type of the original
3405 actual argument is a pointer to a scalar).
3407 - A part of an aggregate is passed instead of the whole aggregate.
3408 The part can be passed either by value or by reference, this is
3409 determined by value of adj->by_ref. Moreover, the code below
3410 handles both situations when the original aggregate is passed by
3411 value (its type is not a pointer) and when it is passed by
3412 reference (it is a pointer to an aggregate).
3414 When the new argument is passed by reference (adj->by_ref is true)
3415 it must be a part of an aggregate and therefore we form it by
3416 simply taking the address of a reference inside the original
3417 aggregate. */
3419 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3420 base = gimple_call_arg (stmt, adj->base_index);
3421 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3422 : EXPR_LOCATION (base);
3424 if (TREE_CODE (base) != ADDR_EXPR
3425 && POINTER_TYPE_P (TREE_TYPE (base)))
3426 off = build_int_cst (adj->alias_ptr_type,
3427 adj->offset / BITS_PER_UNIT);
3428 else
3430 HOST_WIDE_INT base_offset;
3431 tree prev_base;
3432 bool addrof;
3434 if (TREE_CODE (base) == ADDR_EXPR)
3436 base = TREE_OPERAND (base, 0);
3437 addrof = true;
3439 else
3440 addrof = false;
3441 prev_base = base;
3442 base = get_addr_base_and_unit_offset (base, &base_offset);
3443 /* Aggregate arguments can have non-invariant addresses. */
3444 if (!base)
3446 base = build_fold_addr_expr (prev_base);
3447 off = build_int_cst (adj->alias_ptr_type,
3448 adj->offset / BITS_PER_UNIT);
3450 else if (TREE_CODE (base) == MEM_REF)
3452 if (!addrof)
3454 deref_base = true;
3455 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3457 off = build_int_cst (adj->alias_ptr_type,
3458 base_offset
3459 + adj->offset / BITS_PER_UNIT);
3460 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3461 off);
3462 base = TREE_OPERAND (base, 0);
3464 else
3466 off = build_int_cst (adj->alias_ptr_type,
3467 base_offset
3468 + adj->offset / BITS_PER_UNIT);
3469 base = build_fold_addr_expr (base);
3473 if (!adj->by_ref)
3475 tree type = adj->type;
3476 unsigned int align;
3477 unsigned HOST_WIDE_INT misalign;
3479 if (deref_base)
3481 align = deref_align;
3482 misalign = 0;
3484 else
3486 get_pointer_alignment_1 (base, &align, &misalign);
3487 if (TYPE_ALIGN (type) > align)
3488 align = TYPE_ALIGN (type);
3490 misalign += (tree_to_double_int (off)
3491 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3492 * BITS_PER_UNIT);
3493 misalign = misalign & (align - 1);
3494 if (misalign != 0)
3495 align = (misalign & -misalign);
3496 if (align < TYPE_ALIGN (type))
3497 type = build_aligned_type (type, align);
3498 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3500 else
3502 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3503 expr = build_fold_addr_expr (expr);
3506 expr = force_gimple_operand_gsi (&gsi, expr,
3507 adj->by_ref
3508 || is_gimple_reg_type (adj->type),
3509 NULL, true, GSI_SAME_STMT);
3510 vargs.quick_push (expr);
3512 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3514 unsigned int ix;
3515 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3516 gimple def_temp;
3518 arg = gimple_call_arg (stmt, adj->base_index);
3519 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3521 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3522 continue;
3523 arg = fold_convert_loc (gimple_location (stmt),
3524 TREE_TYPE (origin), arg);
3526 if (debug_args == NULL)
3527 debug_args = decl_debug_args_insert (callee_decl);
3528 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3529 if (ddecl == origin)
3531 ddecl = (**debug_args)[ix + 1];
3532 break;
3534 if (ddecl == NULL)
3536 ddecl = make_node (DEBUG_EXPR_DECL);
3537 DECL_ARTIFICIAL (ddecl) = 1;
3538 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3539 DECL_MODE (ddecl) = DECL_MODE (origin);
3541 vec_safe_push (*debug_args, origin);
3542 vec_safe_push (*debug_args, ddecl);
3544 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3545 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3549 if (dump_file && (dump_flags & TDF_DETAILS))
3551 fprintf (dump_file, "replacing stmt:");
3552 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3555 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3556 vargs.release ();
3557 if (gimple_call_lhs (stmt))
3558 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3560 gimple_set_block (new_stmt, gimple_block (stmt));
3561 if (gimple_has_location (stmt))
3562 gimple_set_location (new_stmt, gimple_location (stmt));
3563 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3564 gimple_call_copy_flags (new_stmt, stmt);
3566 if (dump_file && (dump_flags & TDF_DETAILS))
3568 fprintf (dump_file, "with stmt:");
3569 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3570 fprintf (dump_file, "\n");
3572 gsi_replace (&gsi, new_stmt, true);
3573 if (cs)
3574 cgraph_set_call_stmt (cs, new_stmt);
3577 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3578 gsi_prev (&gsi);
3580 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3581 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3583 update_ssa (TODO_update_ssa);
3584 free_dominance_info (CDI_DOMINATORS);
3587 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3589 static bool
3590 index_in_adjustments_multiple_times_p (int base_index,
3591 ipa_parm_adjustment_vec adjustments)
3593 int i, len = adjustments.length ();
3594 bool one = false;
3596 for (i = 0; i < len; i++)
3598 struct ipa_parm_adjustment *adj;
3599 adj = &adjustments[i];
3601 if (adj->base_index == base_index)
3603 if (one)
3604 return true;
3605 else
3606 one = true;
3609 return false;
3613 /* Return adjustments that should have the same effect on function parameters
3614 and call arguments as if they were first changed according to adjustments in
3615 INNER and then by adjustments in OUTER. */
3617 ipa_parm_adjustment_vec
3618 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3619 ipa_parm_adjustment_vec outer)
3621 int i, outlen = outer.length ();
3622 int inlen = inner.length ();
3623 int removals = 0;
3624 ipa_parm_adjustment_vec adjustments, tmp;
3626 tmp.create (inlen);
3627 for (i = 0; i < inlen; i++)
3629 struct ipa_parm_adjustment *n;
3630 n = &inner[i];
3632 if (n->remove_param)
3633 removals++;
3634 else
3635 tmp.quick_push (*n);
3638 adjustments.create (outlen + removals);
3639 for (i = 0; i < outlen; i++)
3641 struct ipa_parm_adjustment r;
3642 struct ipa_parm_adjustment *out = &outer[i];
3643 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3645 memset (&r, 0, sizeof (r));
3646 gcc_assert (!in->remove_param);
3647 if (out->remove_param)
3649 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3651 r.remove_param = true;
3652 adjustments.quick_push (r);
3654 continue;
3657 r.base_index = in->base_index;
3658 r.type = out->type;
3660 /* FIXME: Create nonlocal value too. */
3662 if (in->copy_param && out->copy_param)
3663 r.copy_param = true;
3664 else if (in->copy_param)
3665 r.offset = out->offset;
3666 else if (out->copy_param)
3667 r.offset = in->offset;
3668 else
3669 r.offset = in->offset + out->offset;
3670 adjustments.quick_push (r);
3673 for (i = 0; i < inlen; i++)
3675 struct ipa_parm_adjustment *n = &inner[i];
3677 if (n->remove_param)
3678 adjustments.quick_push (*n);
3681 tmp.release ();
3682 return adjustments;
3685 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3686 friendly way, assuming they are meant to be applied to FNDECL. */
3688 void
3689 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3690 tree fndecl)
3692 int i, len = adjustments.length ();
3693 bool first = true;
3694 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3696 fprintf (file, "IPA param adjustments: ");
3697 for (i = 0; i < len; i++)
3699 struct ipa_parm_adjustment *adj;
3700 adj = &adjustments[i];
3702 if (!first)
3703 fprintf (file, " ");
3704 else
3705 first = false;
3707 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3708 print_generic_expr (file, parms[adj->base_index], 0);
3709 if (adj->base)
3711 fprintf (file, ", base: ");
3712 print_generic_expr (file, adj->base, 0);
3714 if (adj->reduction)
3716 fprintf (file, ", reduction: ");
3717 print_generic_expr (file, adj->reduction, 0);
3719 if (adj->new_ssa_base)
3721 fprintf (file, ", new_ssa_base: ");
3722 print_generic_expr (file, adj->new_ssa_base, 0);
3725 if (adj->copy_param)
3726 fprintf (file, ", copy_param");
3727 else if (adj->remove_param)
3728 fprintf (file, ", remove_param");
3729 else
3730 fprintf (file, ", offset %li", (long) adj->offset);
3731 if (adj->by_ref)
3732 fprintf (file, ", by_ref");
3733 print_node_brief (file, ", type: ", adj->type, 0);
3734 fprintf (file, "\n");
3736 parms.release ();
3739 /* Dump the AV linked list. */
3741 void
3742 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3744 bool comma = false;
3745 fprintf (f, " Aggregate replacements:");
3746 for (; av; av = av->next)
3748 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3749 av->index, av->offset);
3750 print_generic_expr (f, av->value, 0);
3751 comma = true;
3753 fprintf (f, "\n");
3756 /* Stream out jump function JUMP_FUNC to OB. */
3758 static void
3759 ipa_write_jump_function (struct output_block *ob,
3760 struct ipa_jump_func *jump_func)
3762 struct ipa_agg_jf_item *item;
3763 struct bitpack_d bp;
3764 int i, count;
3766 streamer_write_uhwi (ob, jump_func->type);
3767 switch (jump_func->type)
3769 case IPA_JF_UNKNOWN:
3770 break;
3771 case IPA_JF_KNOWN_TYPE:
3772 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3773 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3774 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3775 break;
3776 case IPA_JF_CONST:
3777 gcc_assert (
3778 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3779 stream_write_tree (ob, jump_func->value.constant.value, true);
3780 break;
3781 case IPA_JF_PASS_THROUGH:
3782 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3783 if (jump_func->value.pass_through.operation == NOP_EXPR)
3785 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3786 bp = bitpack_create (ob->main_stream);
3787 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3788 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
3789 streamer_write_bitpack (&bp);
3791 else
3793 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3794 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3796 break;
3797 case IPA_JF_ANCESTOR:
3798 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3799 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3800 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3801 bp = bitpack_create (ob->main_stream);
3802 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3803 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
3804 streamer_write_bitpack (&bp);
3805 break;
3808 count = vec_safe_length (jump_func->agg.items);
3809 streamer_write_uhwi (ob, count);
3810 if (count)
3812 bp = bitpack_create (ob->main_stream);
3813 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3814 streamer_write_bitpack (&bp);
3817 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3819 streamer_write_uhwi (ob, item->offset);
3820 stream_write_tree (ob, item->value, true);
3824 /* Read in jump function JUMP_FUNC from IB. */
3826 static void
3827 ipa_read_jump_function (struct lto_input_block *ib,
3828 struct ipa_jump_func *jump_func,
3829 struct cgraph_edge *cs,
3830 struct data_in *data_in)
3832 enum jump_func_type jftype;
3833 enum tree_code operation;
3834 int i, count;
3836 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3837 switch (jftype)
3839 case IPA_JF_UNKNOWN:
3840 jump_func->type = IPA_JF_UNKNOWN;
3841 break;
3842 case IPA_JF_KNOWN_TYPE:
3844 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3845 tree base_type = stream_read_tree (ib, data_in);
3846 tree component_type = stream_read_tree (ib, data_in);
3848 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
3849 break;
3851 case IPA_JF_CONST:
3852 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
3853 break;
3854 case IPA_JF_PASS_THROUGH:
3855 operation = (enum tree_code) streamer_read_uhwi (ib);
3856 if (operation == NOP_EXPR)
3858 int formal_id = streamer_read_uhwi (ib);
3859 struct bitpack_d bp = streamer_read_bitpack (ib);
3860 bool agg_preserved = bp_unpack_value (&bp, 1);
3861 bool type_preserved = bp_unpack_value (&bp, 1);
3862 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
3863 type_preserved);
3865 else
3867 tree operand = stream_read_tree (ib, data_in);
3868 int formal_id = streamer_read_uhwi (ib);
3869 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
3870 operation);
3872 break;
3873 case IPA_JF_ANCESTOR:
3875 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3876 tree type = stream_read_tree (ib, data_in);
3877 int formal_id = streamer_read_uhwi (ib);
3878 struct bitpack_d bp = streamer_read_bitpack (ib);
3879 bool agg_preserved = bp_unpack_value (&bp, 1);
3880 bool type_preserved = bp_unpack_value (&bp, 1);
3882 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
3883 type_preserved);
3884 break;
3888 count = streamer_read_uhwi (ib);
3889 vec_alloc (jump_func->agg.items, count);
3890 if (count)
3892 struct bitpack_d bp = streamer_read_bitpack (ib);
3893 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3895 for (i = 0; i < count; i++)
3897 struct ipa_agg_jf_item item;
3898 item.offset = streamer_read_uhwi (ib);
3899 item.value = stream_read_tree (ib, data_in);
3900 jump_func->agg.items->quick_push (item);
3904 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3905 relevant to indirect inlining to OB. */
3907 static void
3908 ipa_write_indirect_edge_info (struct output_block *ob,
3909 struct cgraph_edge *cs)
3911 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3912 struct bitpack_d bp;
3914 streamer_write_hwi (ob, ii->param_index);
3915 streamer_write_hwi (ob, ii->offset);
3916 bp = bitpack_create (ob->main_stream);
3917 bp_pack_value (&bp, ii->polymorphic, 1);
3918 bp_pack_value (&bp, ii->agg_contents, 1);
3919 bp_pack_value (&bp, ii->member_ptr, 1);
3920 bp_pack_value (&bp, ii->by_ref, 1);
3921 streamer_write_bitpack (&bp);
3923 if (ii->polymorphic)
3925 streamer_write_hwi (ob, ii->otr_token);
3926 stream_write_tree (ob, ii->otr_type, true);
3930 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3931 relevant to indirect inlining from IB. */
3933 static void
3934 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3935 struct data_in *data_in ATTRIBUTE_UNUSED,
3936 struct cgraph_edge *cs)
3938 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3939 struct bitpack_d bp;
3941 ii->param_index = (int) streamer_read_hwi (ib);
3942 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3943 bp = streamer_read_bitpack (ib);
3944 ii->polymorphic = bp_unpack_value (&bp, 1);
3945 ii->agg_contents = bp_unpack_value (&bp, 1);
3946 ii->member_ptr = bp_unpack_value (&bp, 1);
3947 ii->by_ref = bp_unpack_value (&bp, 1);
3948 if (ii->polymorphic)
3950 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3951 ii->otr_type = stream_read_tree (ib, data_in);
3955 /* Stream out NODE info to OB. */
3957 static void
3958 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3960 int node_ref;
3961 lto_symtab_encoder_t encoder;
3962 struct ipa_node_params *info = IPA_NODE_REF (node);
3963 int j;
3964 struct cgraph_edge *e;
3965 struct bitpack_d bp;
3967 encoder = ob->decl_state->symtab_node_encoder;
3968 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3969 streamer_write_uhwi (ob, node_ref);
3971 streamer_write_uhwi (ob, ipa_get_param_count (info));
3972 for (j = 0; j < ipa_get_param_count (info); j++)
3973 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
3974 bp = bitpack_create (ob->main_stream);
3975 gcc_assert (info->uses_analysis_done
3976 || ipa_get_param_count (info) == 0);
3977 gcc_assert (!info->node_enqueued);
3978 gcc_assert (!info->ipcp_orig_node);
3979 for (j = 0; j < ipa_get_param_count (info); j++)
3980 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3981 streamer_write_bitpack (&bp);
3982 for (j = 0; j < ipa_get_param_count (info); j++)
3983 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
3984 for (e = node->callees; e; e = e->next_callee)
3986 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3988 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3989 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3990 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3992 for (e = node->indirect_calls; e; e = e->next_callee)
3994 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3996 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3997 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3998 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3999 ipa_write_indirect_edge_info (ob, e);
4003 /* Stream in NODE info from IB. */
4005 static void
4006 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4007 struct data_in *data_in)
4009 struct ipa_node_params *info = IPA_NODE_REF (node);
4010 int k;
4011 struct cgraph_edge *e;
4012 struct bitpack_d bp;
4014 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4016 for (k = 0; k < ipa_get_param_count (info); k++)
4017 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4019 bp = streamer_read_bitpack (ib);
4020 if (ipa_get_param_count (info) != 0)
4021 info->uses_analysis_done = true;
4022 info->node_enqueued = false;
4023 for (k = 0; k < ipa_get_param_count (info); k++)
4024 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4025 for (k = 0; k < ipa_get_param_count (info); k++)
4026 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4027 for (e = node->callees; e; e = e->next_callee)
4029 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4030 int count = streamer_read_uhwi (ib);
4032 if (!count)
4033 continue;
4034 vec_safe_grow_cleared (args->jump_functions, count);
4036 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4037 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4038 data_in);
4040 for (e = node->indirect_calls; e; e = e->next_callee)
4042 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4043 int count = streamer_read_uhwi (ib);
4045 if (count)
4047 vec_safe_grow_cleared (args->jump_functions, count);
4048 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4049 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4050 data_in);
4052 ipa_read_indirect_edge_info (ib, data_in, e);
4056 /* Write jump functions for nodes in SET. */
4058 void
4059 ipa_prop_write_jump_functions (void)
4061 struct cgraph_node *node;
4062 struct output_block *ob;
4063 unsigned int count = 0;
4064 lto_symtab_encoder_iterator lsei;
4065 lto_symtab_encoder_t encoder;
4068 if (!ipa_node_params_vector.exists ())
4069 return;
4071 ob = create_output_block (LTO_section_jump_functions);
4072 encoder = ob->decl_state->symtab_node_encoder;
4073 ob->cgraph_node = NULL;
4074 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4075 lsei_next_function_in_partition (&lsei))
4077 node = lsei_cgraph_node (lsei);
4078 if (cgraph_function_with_gimple_body_p (node)
4079 && IPA_NODE_REF (node) != NULL)
4080 count++;
4083 streamer_write_uhwi (ob, count);
4085 /* Process all of the functions. */
4086 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4087 lsei_next_function_in_partition (&lsei))
4089 node = lsei_cgraph_node (lsei);
4090 if (cgraph_function_with_gimple_body_p (node)
4091 && IPA_NODE_REF (node) != NULL)
4092 ipa_write_node_info (ob, node);
4094 streamer_write_char_stream (ob->main_stream, 0);
4095 produce_asm (ob, NULL);
4096 destroy_output_block (ob);
4099 /* Read section in file FILE_DATA of length LEN with data DATA. */
4101 static void
4102 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4103 size_t len)
4105 const struct lto_function_header *header =
4106 (const struct lto_function_header *) data;
4107 const int cfg_offset = sizeof (struct lto_function_header);
4108 const int main_offset = cfg_offset + header->cfg_size;
4109 const int string_offset = main_offset + header->main_size;
4110 struct data_in *data_in;
4111 struct lto_input_block ib_main;
4112 unsigned int i;
4113 unsigned int count;
4115 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4116 header->main_size);
4118 data_in =
4119 lto_data_in_create (file_data, (const char *) data + string_offset,
4120 header->string_size, vNULL);
4121 count = streamer_read_uhwi (&ib_main);
4123 for (i = 0; i < count; i++)
4125 unsigned int index;
4126 struct cgraph_node *node;
4127 lto_symtab_encoder_t encoder;
4129 index = streamer_read_uhwi (&ib_main);
4130 encoder = file_data->symtab_node_encoder;
4131 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4132 gcc_assert (node->symbol.definition);
4133 ipa_read_node_info (&ib_main, node, data_in);
4135 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4136 len);
4137 lto_data_in_delete (data_in);
4140 /* Read ipcp jump functions. */
4142 void
4143 ipa_prop_read_jump_functions (void)
4145 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4146 struct lto_file_decl_data *file_data;
4147 unsigned int j = 0;
4149 ipa_check_create_node_params ();
4150 ipa_check_create_edge_args ();
4151 ipa_register_cgraph_hooks ();
4153 while ((file_data = file_data_vec[j++]))
4155 size_t len;
4156 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4158 if (data)
4159 ipa_prop_read_section (file_data, data, len);
4163 /* After merging units, we can get mismatch in argument counts.
4164 Also decl merging might've rendered parameter lists obsolete.
4165 Also compute called_with_variable_arg info. */
4167 void
4168 ipa_update_after_lto_read (void)
4170 ipa_check_create_node_params ();
4171 ipa_check_create_edge_args ();
4174 void
4175 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4177 int node_ref;
4178 unsigned int count = 0;
4179 lto_symtab_encoder_t encoder;
4180 struct ipa_agg_replacement_value *aggvals, *av;
4182 aggvals = ipa_get_agg_replacements_for_node (node);
4183 encoder = ob->decl_state->symtab_node_encoder;
4184 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4185 streamer_write_uhwi (ob, node_ref);
4187 for (av = aggvals; av; av = av->next)
4188 count++;
4189 streamer_write_uhwi (ob, count);
4191 for (av = aggvals; av; av = av->next)
4193 struct bitpack_d bp;
4195 streamer_write_uhwi (ob, av->offset);
4196 streamer_write_uhwi (ob, av->index);
4197 stream_write_tree (ob, av->value, true);
4199 bp = bitpack_create (ob->main_stream);
4200 bp_pack_value (&bp, av->by_ref, 1);
4201 streamer_write_bitpack (&bp);
4205 /* Stream in the aggregate value replacement chain for NODE from IB. */
4207 static void
4208 read_agg_replacement_chain (struct lto_input_block *ib,
4209 struct cgraph_node *node,
4210 struct data_in *data_in)
4212 struct ipa_agg_replacement_value *aggvals = NULL;
4213 unsigned int count, i;
4215 count = streamer_read_uhwi (ib);
4216 for (i = 0; i <count; i++)
4218 struct ipa_agg_replacement_value *av;
4219 struct bitpack_d bp;
4221 av = ggc_alloc_ipa_agg_replacement_value ();
4222 av->offset = streamer_read_uhwi (ib);
4223 av->index = streamer_read_uhwi (ib);
4224 av->value = stream_read_tree (ib, data_in);
4225 bp = streamer_read_bitpack (ib);
4226 av->by_ref = bp_unpack_value (&bp, 1);
4227 av->next = aggvals;
4228 aggvals = av;
4230 ipa_set_node_agg_value_chain (node, aggvals);
4233 /* Write all aggregate replacement for nodes in set. */
4235 void
4236 ipa_prop_write_all_agg_replacement (void)
4238 struct cgraph_node *node;
4239 struct output_block *ob;
4240 unsigned int count = 0;
4241 lto_symtab_encoder_iterator lsei;
4242 lto_symtab_encoder_t encoder;
4244 if (!ipa_node_agg_replacements)
4245 return;
4247 ob = create_output_block (LTO_section_ipcp_transform);
4248 encoder = ob->decl_state->symtab_node_encoder;
4249 ob->cgraph_node = NULL;
4250 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4251 lsei_next_function_in_partition (&lsei))
4253 node = lsei_cgraph_node (lsei);
4254 if (cgraph_function_with_gimple_body_p (node)
4255 && ipa_get_agg_replacements_for_node (node) != NULL)
4256 count++;
4259 streamer_write_uhwi (ob, count);
4261 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4262 lsei_next_function_in_partition (&lsei))
4264 node = lsei_cgraph_node (lsei);
4265 if (cgraph_function_with_gimple_body_p (node)
4266 && ipa_get_agg_replacements_for_node (node) != NULL)
4267 write_agg_replacement_chain (ob, node);
4269 streamer_write_char_stream (ob->main_stream, 0);
4270 produce_asm (ob, NULL);
4271 destroy_output_block (ob);
4274 /* Read replacements section in file FILE_DATA of length LEN with data
4275 DATA. */
4277 static void
4278 read_replacements_section (struct lto_file_decl_data *file_data,
4279 const char *data,
4280 size_t len)
4282 const struct lto_function_header *header =
4283 (const struct lto_function_header *) data;
4284 const int cfg_offset = sizeof (struct lto_function_header);
4285 const int main_offset = cfg_offset + header->cfg_size;
4286 const int string_offset = main_offset + header->main_size;
4287 struct data_in *data_in;
4288 struct lto_input_block ib_main;
4289 unsigned int i;
4290 unsigned int count;
4292 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4293 header->main_size);
4295 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4296 header->string_size, vNULL);
4297 count = streamer_read_uhwi (&ib_main);
4299 for (i = 0; i < count; i++)
4301 unsigned int index;
4302 struct cgraph_node *node;
4303 lto_symtab_encoder_t encoder;
4305 index = streamer_read_uhwi (&ib_main);
4306 encoder = file_data->symtab_node_encoder;
4307 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4308 gcc_assert (node->symbol.definition);
4309 read_agg_replacement_chain (&ib_main, node, data_in);
4311 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4312 len);
4313 lto_data_in_delete (data_in);
4316 /* Read IPA-CP aggregate replacements. */
4318 void
4319 ipa_prop_read_all_agg_replacement (void)
4321 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4322 struct lto_file_decl_data *file_data;
4323 unsigned int j = 0;
4325 while ((file_data = file_data_vec[j++]))
4327 size_t len;
4328 const char *data = lto_get_section_data (file_data,
4329 LTO_section_ipcp_transform,
4330 NULL, &len);
4331 if (data)
4332 read_replacements_section (file_data, data, len);
4336 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4337 NODE. */
4339 static void
4340 adjust_agg_replacement_values (struct cgraph_node *node,
4341 struct ipa_agg_replacement_value *aggval)
4343 struct ipa_agg_replacement_value *v;
4344 int i, c = 0, d = 0, *adj;
4346 if (!node->clone.combined_args_to_skip)
4347 return;
4349 for (v = aggval; v; v = v->next)
4351 gcc_assert (v->index >= 0);
4352 if (c < v->index)
4353 c = v->index;
4355 c++;
4357 adj = XALLOCAVEC (int, c);
4358 for (i = 0; i < c; i++)
4359 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4361 adj[i] = -1;
4362 d++;
4364 else
4365 adj[i] = i - d;
4367 for (v = aggval; v; v = v->next)
4368 v->index = adj[v->index];
4372 /* Function body transformation phase. */
4374 unsigned int
4375 ipcp_transform_function (struct cgraph_node *node)
4377 vec<ipa_param_descriptor_t> descriptors = vNULL;
4378 struct param_analysis_info *parms_ainfo;
4379 struct ipa_agg_replacement_value *aggval;
4380 gimple_stmt_iterator gsi;
4381 basic_block bb;
4382 int param_count;
4383 bool cfg_changed = false, something_changed = false;
4385 gcc_checking_assert (cfun);
4386 gcc_checking_assert (current_function_decl);
4388 if (dump_file)
4389 fprintf (dump_file, "Modification phase of node %s/%i\n",
4390 cgraph_node_name (node), node->symbol.order);
4392 aggval = ipa_get_agg_replacements_for_node (node);
4393 if (!aggval)
4394 return 0;
4395 param_count = count_formal_params (node->symbol.decl);
4396 if (param_count == 0)
4397 return 0;
4398 adjust_agg_replacement_values (node, aggval);
4399 if (dump_file)
4400 ipa_dump_agg_replacement_values (dump_file, aggval);
4401 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4402 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4403 descriptors.safe_grow_cleared (param_count);
4404 ipa_populate_param_decls (node, descriptors);
4406 FOR_EACH_BB (bb)
4407 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4409 struct ipa_agg_replacement_value *v;
4410 gimple stmt = gsi_stmt (gsi);
4411 tree rhs, val, t;
4412 HOST_WIDE_INT offset;
4413 int index;
4414 bool by_ref, vce;
4416 if (!gimple_assign_load_p (stmt))
4417 continue;
4418 rhs = gimple_assign_rhs1 (stmt);
4419 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4420 continue;
4422 vce = false;
4423 t = rhs;
4424 while (handled_component_p (t))
4426 /* V_C_E can do things like convert an array of integers to one
4427 bigger integer and similar things we do not handle below. */
4428 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4430 vce = true;
4431 break;
4433 t = TREE_OPERAND (t, 0);
4435 if (vce)
4436 continue;
4438 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4439 rhs, &index, &offset, &by_ref))
4440 continue;
4441 for (v = aggval; v; v = v->next)
4442 if (v->index == index
4443 && v->offset == offset)
4444 break;
4445 if (!v || v->by_ref != by_ref)
4446 continue;
4448 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4449 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4451 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4452 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4453 else if (TYPE_SIZE (TREE_TYPE (rhs))
4454 == TYPE_SIZE (TREE_TYPE (v->value)))
4455 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4456 else
4458 if (dump_file)
4460 fprintf (dump_file, " const ");
4461 print_generic_expr (dump_file, v->value, 0);
4462 fprintf (dump_file, " can't be converted to type of ");
4463 print_generic_expr (dump_file, rhs, 0);
4464 fprintf (dump_file, "\n");
4466 continue;
4469 else
4470 val = v->value;
4472 if (dump_file && (dump_flags & TDF_DETAILS))
4474 fprintf (dump_file, "Modifying stmt:\n ");
4475 print_gimple_stmt (dump_file, stmt, 0, 0);
4477 gimple_assign_set_rhs_from_tree (&gsi, val);
4478 update_stmt (stmt);
4480 if (dump_file && (dump_flags & TDF_DETAILS))
4482 fprintf (dump_file, "into:\n ");
4483 print_gimple_stmt (dump_file, stmt, 0, 0);
4484 fprintf (dump_file, "\n");
4487 something_changed = true;
4488 if (maybe_clean_eh_stmt (stmt)
4489 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4490 cfg_changed = true;
4493 (*ipa_node_agg_replacements)[node->uid] = NULL;
4494 free_parms_ainfo (parms_ainfo, param_count);
4495 descriptors.release ();
4497 if (!something_changed)
4498 return 0;
4499 else if (cfg_changed)
4500 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4501 else
4502 return TODO_update_ssa_only_virtuals;