* gcc.dg/atomic/c11-atomic-exec-5.c (dg-additional-options): Use
[official-gcc.git] / gcc / ipa-prop.c
blob40f696bf960999e4c314c4b1a7ecb144790cb4df
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65 #include "calls.h"
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
72 struct param_aa_status
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
83 bool parm_modified, ref_modified, pt_modified;
86 /* Information related to a given BB that used only when looking at function
87 body. */
89 struct ipa_bb_info
91 /* Call graph edges going out of this BB. */
92 vec<cgraph_edge_p> cg_edges;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
97 /* Structure with global information that is only used when looking at function
98 body. */
100 struct func_body_info
102 /* The node that is being analyzed. */
103 cgraph_node *node;
105 /* Its info. */
106 struct ipa_node_params *info;
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
111 /* Number of parameters. */
112 int param_count;
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
118 /* Vector where the parameter infos are actually stored. */
119 vec<ipa_node_params> ipa_node_params_vector;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
122 /* Vector where the parameter infos are actually stored. */
123 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127 static struct cgraph_node_hook_list *node_removal_hook_holder;
128 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
130 static struct cgraph_node_hook_list *function_insertion_hook_holder;
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
144 /* Allocation pool for reference descriptions. */
146 static alloc_pool ipa_refdesc_pool;
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
151 static bool
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
155 struct cl_optimization *os;
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
163 /* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
166 static int
167 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
169 int i, count;
171 count = descriptors.length ();
172 for (i = 0; i < count; i++)
173 if (descriptors[i].decl == ptree)
174 return i;
176 return -1;
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
183 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
191 static void
192 ipa_populate_param_decls (struct cgraph_node *node,
193 vec<ipa_param_descriptor> &descriptors)
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
200 fndecl = node->decl;
201 gcc_assert (gimple_has_body_p (fndecl));
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
206 descriptors[param_num].decl = parm;
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
208 param_num++;
212 /* Return how many formal parameters FNDECL has. */
214 static inline int
215 count_formal_params (tree fndecl)
217 tree parm;
218 int count = 0;
219 gcc_assert (gimple_has_body_p (fndecl));
221 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
222 count++;
224 return count;
227 /* Return the declaration of Ith formal parameter of the function corresponding
228 to INFO. Note there is no setter function as this array is built just once
229 using ipa_initialize_node_params. */
231 void
232 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
234 fprintf (file, "param #%i", i);
235 if (info->descriptors[i].decl)
237 fprintf (file, " ");
238 print_generic_expr (file, info->descriptors[i].decl, 0);
242 /* Initialize the ipa_node_params structure associated with NODE
243 to hold PARAM_COUNT parameters. */
245 void
246 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
248 struct ipa_node_params *info = IPA_NODE_REF (node);
250 if (!info->descriptors.exists () && param_count)
251 info->descriptors.safe_grow_cleared (param_count);
254 /* Initialize the ipa_node_params structure associated with NODE by counting
255 the function parameters, creating the descriptors and populating their
256 param_decls. */
258 void
259 ipa_initialize_node_params (struct cgraph_node *node)
261 struct ipa_node_params *info = IPA_NODE_REF (node);
263 if (!info->descriptors.exists ())
265 ipa_alloc_node_params (node, count_formal_params (node->decl));
266 ipa_populate_param_decls (node, info->descriptors);
270 /* Print the jump functions associated with call graph edge CS to file F. */
272 static void
273 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
275 int i, count;
277 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
278 for (i = 0; i < count; i++)
280 struct ipa_jump_func *jump_func;
281 enum jump_func_type type;
283 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
284 type = jump_func->type;
286 fprintf (f, " param %d: ", i);
287 if (type == IPA_JF_UNKNOWN)
288 fprintf (f, "UNKNOWN\n");
289 else if (type == IPA_JF_KNOWN_TYPE)
291 fprintf (f, "KNOWN TYPE: base ");
292 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
293 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
294 jump_func->value.known_type.offset);
295 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
296 fprintf (f, "\n");
298 else if (type == IPA_JF_CONST)
300 tree val = jump_func->value.constant.value;
301 fprintf (f, "CONST: ");
302 print_generic_expr (f, val, 0);
303 if (TREE_CODE (val) == ADDR_EXPR
304 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
306 fprintf (f, " -> ");
307 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
310 fprintf (f, "\n");
312 else if (type == IPA_JF_PASS_THROUGH)
314 fprintf (f, "PASS THROUGH: ");
315 fprintf (f, "%d, op %s",
316 jump_func->value.pass_through.formal_id,
317 get_tree_code_name(jump_func->value.pass_through.operation));
318 if (jump_func->value.pass_through.operation != NOP_EXPR)
320 fprintf (f, " ");
321 print_generic_expr (f,
322 jump_func->value.pass_through.operand, 0);
324 if (jump_func->value.pass_through.agg_preserved)
325 fprintf (f, ", agg_preserved");
326 if (jump_func->value.pass_through.type_preserved)
327 fprintf (f, ", type_preserved");
328 fprintf (f, "\n");
330 else if (type == IPA_JF_ANCESTOR)
332 fprintf (f, "ANCESTOR: ");
333 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
334 jump_func->value.ancestor.formal_id,
335 jump_func->value.ancestor.offset);
336 print_generic_expr (f, jump_func->value.ancestor.type, 0);
337 if (jump_func->value.ancestor.agg_preserved)
338 fprintf (f, ", agg_preserved");
339 if (jump_func->value.ancestor.type_preserved)
340 fprintf (f, ", type_preserved");
341 fprintf (f, "\n");
344 if (jump_func->agg.items)
346 struct ipa_agg_jf_item *item;
347 int j;
349 fprintf (f, " Aggregate passed by %s:\n",
350 jump_func->agg.by_ref ? "reference" : "value");
351 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
353 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
354 item->offset);
355 if (TYPE_P (item->value))
356 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
357 tree_to_uhwi (TYPE_SIZE (item->value)));
358 else
360 fprintf (f, "cst: ");
361 print_generic_expr (f, item->value, 0);
363 fprintf (f, "\n");
370 /* Print the jump functions of all arguments on all call graph edges going from
371 NODE to file F. */
373 void
374 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
376 struct cgraph_edge *cs;
378 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
379 node->order);
380 for (cs = node->callees; cs; cs = cs->next_callee)
382 if (!ipa_edge_args_info_available_for_edge_p (cs))
383 continue;
385 fprintf (f, " callsite %s/%i -> %s/%i : \n",
386 xstrdup (node->name ()), node->order,
387 xstrdup (cs->callee->name ()),
388 cs->callee->order);
389 ipa_print_node_jump_functions_for_edge (f, cs);
392 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
394 struct cgraph_indirect_call_info *ii;
395 if (!ipa_edge_args_info_available_for_edge_p (cs))
396 continue;
398 ii = cs->indirect_info;
399 if (ii->agg_contents)
400 fprintf (f, " indirect %s callsite, calling param %i, "
401 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
402 ii->member_ptr ? "member ptr" : "aggregate",
403 ii->param_index, ii->offset,
404 ii->by_ref ? "by reference" : "by_value");
405 else
406 fprintf (f, " indirect %s callsite, calling param %i, "
407 "offset " HOST_WIDE_INT_PRINT_DEC,
408 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
409 ii->offset);
411 if (cs->call_stmt)
413 fprintf (f, ", for stmt ");
414 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
416 else
417 fprintf (f, "\n");
418 ipa_print_node_jump_functions_for_edge (f, cs);
422 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
424 void
425 ipa_print_all_jump_functions (FILE *f)
427 struct cgraph_node *node;
429 fprintf (f, "\nJump functions:\n");
430 FOR_EACH_FUNCTION (node)
432 ipa_print_node_jump_functions (f, node);
436 /* Set JFUNC to be a known type jump function. */
438 static void
439 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
440 tree base_type, tree component_type)
442 /* Recording and propagating main variants increases change that types
443 will match. */
444 base_type = TYPE_MAIN_VARIANT (base_type);
445 component_type = TYPE_MAIN_VARIANT (component_type);
447 gcc_assert (contains_polymorphic_type_p (base_type)
448 && contains_polymorphic_type_p (component_type));
449 if (!flag_devirtualize)
450 return;
451 jfunc->type = IPA_JF_KNOWN_TYPE;
452 jfunc->value.known_type.offset = offset,
453 jfunc->value.known_type.base_type = base_type;
454 jfunc->value.known_type.component_type = component_type;
455 gcc_assert (component_type);
458 /* Set JFUNC to be a copy of another jmp (to be used by jump function
459 combination code). The two functions will share their rdesc. */
461 static void
462 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
463 struct ipa_jump_func *src)
466 gcc_checking_assert (src->type == IPA_JF_CONST);
467 dst->type = IPA_JF_CONST;
468 dst->value.constant = src->value.constant;
471 /* Set JFUNC to be a constant jmp function. */
473 static void
474 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
475 struct cgraph_edge *cs)
477 constant = unshare_expr (constant);
478 if (constant && EXPR_P (constant))
479 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
480 jfunc->type = IPA_JF_CONST;
481 jfunc->value.constant.value = unshare_expr_without_location (constant);
483 if (TREE_CODE (constant) == ADDR_EXPR
484 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
486 struct ipa_cst_ref_desc *rdesc;
487 if (!ipa_refdesc_pool)
488 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
489 sizeof (struct ipa_cst_ref_desc), 32);
491 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
492 rdesc->cs = cs;
493 rdesc->next_duplicate = NULL;
494 rdesc->refcount = 1;
495 jfunc->value.constant.rdesc = rdesc;
497 else
498 jfunc->value.constant.rdesc = NULL;
501 /* Set JFUNC to be a simple pass-through jump function. */
502 static void
503 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
504 bool agg_preserved, bool type_preserved)
506 jfunc->type = IPA_JF_PASS_THROUGH;
507 jfunc->value.pass_through.operand = NULL_TREE;
508 jfunc->value.pass_through.formal_id = formal_id;
509 jfunc->value.pass_through.operation = NOP_EXPR;
510 jfunc->value.pass_through.agg_preserved = agg_preserved;
511 jfunc->value.pass_through.type_preserved = type_preserved;
514 /* Set JFUNC to be an arithmetic pass through jump function. */
516 static void
517 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
518 tree operand, enum tree_code operation)
520 jfunc->type = IPA_JF_PASS_THROUGH;
521 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
522 jfunc->value.pass_through.formal_id = formal_id;
523 jfunc->value.pass_through.operation = operation;
524 jfunc->value.pass_through.agg_preserved = false;
525 jfunc->value.pass_through.type_preserved = false;
528 /* Set JFUNC to be an ancestor jump function. */
530 static void
531 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
532 tree type, int formal_id, bool agg_preserved,
533 bool type_preserved)
535 if (!flag_devirtualize)
536 type_preserved = false;
537 if (!type_preserved)
538 type = NULL_TREE;
539 if (type)
540 type = TYPE_MAIN_VARIANT (type);
541 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
542 jfunc->type = IPA_JF_ANCESTOR;
543 jfunc->value.ancestor.formal_id = formal_id;
544 jfunc->value.ancestor.offset = offset;
545 jfunc->value.ancestor.type = type_preserved ? type : NULL;
546 jfunc->value.ancestor.agg_preserved = agg_preserved;
547 jfunc->value.ancestor.type_preserved = type_preserved;
550 /* Extract the acual BINFO being described by JFUNC which must be a known type
551 jump function. */
553 tree
554 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
556 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
557 return NULL_TREE;
559 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
561 if (!base_binfo)
562 return NULL_TREE;
563 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
564 we have no ODR equivalency on those. This should be fixed by
565 propagating on types rather than binfos that would make type
566 matching here unnecesary. */
567 if (in_lto_p
568 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
569 || !TYPE_BINFO (jfunc->value.known_type.component_type)
570 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
572 if (!jfunc->value.known_type.offset)
573 return base_binfo;
574 return NULL;
576 return get_binfo_at_offset (base_binfo,
577 jfunc->value.known_type.offset,
578 jfunc->value.known_type.component_type);
581 /* Get IPA BB information about the given BB. FBI is the context of analyzis
582 of this function body. */
584 static struct ipa_bb_info *
585 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
587 gcc_checking_assert (fbi);
588 return &fbi->bb_infos[bb->index];
591 /* Structure to be passed in between detect_type_change and
592 check_stmt_for_type_change. */
594 struct type_change_info
596 /* Offset into the object where there is the virtual method pointer we are
597 looking for. */
598 HOST_WIDE_INT offset;
599 /* The declaration or SSA_NAME pointer of the base that we are checking for
600 type change. */
601 tree object;
602 /* If we actually can tell the type that the object has changed to, it is
603 stored in this field. Otherwise it remains NULL_TREE. */
604 tree known_current_type;
605 /* Set to true if dynamic type change has been detected. */
606 bool type_maybe_changed;
607 /* Set to true if multiple types have been encountered. known_current_type
608 must be disregarded in that case. */
609 bool multiple_types_encountered;
612 /* Return true if STMT can modify a virtual method table pointer.
614 This function makes special assumptions about both constructors and
615 destructors which are all the functions that are allowed to alter the VMT
616 pointers. It assumes that destructors begin with assignment into all VMT
617 pointers and that constructors essentially look in the following way:
619 1) The very first thing they do is that they call constructors of ancestor
620 sub-objects that have them.
622 2) Then VMT pointers of this and all its ancestors is set to new values
623 corresponding to the type corresponding to the constructor.
625 3) Only afterwards, other stuff such as constructor of member sub-objects
626 and the code written by the user is run. Only this may include calling
627 virtual functions, directly or indirectly.
629 There is no way to call a constructor of an ancestor sub-object in any
630 other way.
632 This means that we do not have to care whether constructors get the correct
633 type information because they will always change it (in fact, if we define
634 the type to be given by the VMT pointer, it is undefined).
636 The most important fact to derive from the above is that if, for some
637 statement in the section 3, we try to detect whether the dynamic type has
638 changed, we can safely ignore all calls as we examine the function body
639 backwards until we reach statements in section 2 because these calls cannot
640 be ancestor constructors or destructors (if the input is not bogus) and so
641 do not change the dynamic type (this holds true only for automatically
642 allocated objects but at the moment we devirtualize only these). We then
643 must detect that statements in section 2 change the dynamic type and can try
644 to derive the new type. That is enough and we can stop, we will never see
645 the calls into constructors of sub-objects in this code. Therefore we can
646 safely ignore all call statements that we traverse.
649 static bool
650 stmt_may_be_vtbl_ptr_store (gimple stmt)
652 if (is_gimple_call (stmt))
653 return false;
654 if (gimple_clobber_p (stmt))
655 return false;
656 else if (is_gimple_assign (stmt))
658 tree lhs = gimple_assign_lhs (stmt);
660 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
662 if (flag_strict_aliasing
663 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
664 return false;
666 if (TREE_CODE (lhs) == COMPONENT_REF
667 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
668 return false;
669 /* In the future we might want to use get_base_ref_and_offset to find
670 if there is a field corresponding to the offset and if so, proceed
671 almost like if it was a component ref. */
674 return true;
677 /* If STMT can be proved to be an assignment to the virtual method table
678 pointer of ANALYZED_OBJ and the type associated with the new table
679 identified, return the type. Otherwise return NULL_TREE. */
681 static tree
682 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
684 HOST_WIDE_INT offset, size, max_size;
685 tree lhs, rhs, base, binfo;
687 if (!gimple_assign_single_p (stmt))
688 return NULL_TREE;
690 lhs = gimple_assign_lhs (stmt);
691 rhs = gimple_assign_rhs1 (stmt);
692 if (TREE_CODE (lhs) != COMPONENT_REF
693 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
694 return NULL_TREE;
696 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
697 if (offset != tci->offset
698 || size != POINTER_SIZE
699 || max_size != POINTER_SIZE)
700 return NULL_TREE;
701 if (TREE_CODE (base) == MEM_REF)
703 if (TREE_CODE (tci->object) != MEM_REF
704 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
705 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
706 TREE_OPERAND (base, 1)))
707 return NULL_TREE;
709 else if (tci->object != base)
710 return NULL_TREE;
712 binfo = vtable_pointer_value_to_binfo (rhs);
714 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
715 base of outer type. In this case we would need to either
716 work on binfos or translate it back to outer type and offset.
717 KNOWN_TYPE jump functions are not ready for that, yet. */
718 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
719 return NULL;
721 return BINFO_TYPE (binfo);
724 /* Callback of walk_aliased_vdefs and a helper function for
725 detect_type_change to check whether a particular statement may modify
726 the virtual table pointer, and if possible also determine the new type of
727 the (sub-)object. It stores its result into DATA, which points to a
728 type_change_info structure. */
730 static bool
731 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
733 gimple stmt = SSA_NAME_DEF_STMT (vdef);
734 struct type_change_info *tci = (struct type_change_info *) data;
736 if (stmt_may_be_vtbl_ptr_store (stmt))
738 tree type;
740 type = extr_type_from_vtbl_ptr_store (stmt, tci);
741 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
742 if (tci->type_maybe_changed
743 && type != tci->known_current_type)
744 tci->multiple_types_encountered = true;
745 tci->known_current_type = type;
746 tci->type_maybe_changed = true;
747 return true;
749 else
750 return false;
753 /* See if ARG is PARAM_DECl describing instance passed by pointer
754 or reference in FUNCTION. Return false if the dynamic type may change
755 in between beggining of the function until CALL is invoked.
757 Generally functions are not allowed to change type of such instances,
758 but they call destructors. We assume that methods can not destroy the THIS
759 pointer. Also as a special cases, constructor and destructors may change
760 type of the THIS pointer. */
762 static bool
763 param_type_may_change_p (tree function, tree arg, gimple call)
765 /* Pure functions can not do any changes on the dynamic type;
766 that require writting to memory. */
767 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
768 return false;
769 /* We need to check if we are within inlined consturctor
770 or destructor (ideally we would have way to check that the
771 inline cdtor is actually working on ARG, but we don't have
772 easy tie on this, so punt on all non-pure cdtors.
773 We may also record the types of cdtors and once we know type
774 of the instance match them.
776 Also code unification optimizations may merge calls from
777 different blocks making return values unreliable. So
778 do nothing during late optimization. */
779 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
780 return true;
781 if (TREE_CODE (arg) == SSA_NAME
782 && SSA_NAME_IS_DEFAULT_DEF (arg)
783 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
785 /* Normal (non-THIS) argument. */
786 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
787 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
788 /* THIS pointer of an method - here we we want to watch constructors
789 and destructors as those definitely may change the dynamic
790 type. */
791 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
792 && !DECL_CXX_CONSTRUCTOR_P (function)
793 && !DECL_CXX_DESTRUCTOR_P (function)
794 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
796 /* Walk the inline stack and watch out for ctors/dtors. */
797 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
798 block = BLOCK_SUPERCONTEXT (block))
799 if (BLOCK_ABSTRACT_ORIGIN (block)
800 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
802 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
804 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
805 continue;
806 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
807 && (DECL_CXX_CONSTRUCTOR_P (fn)
808 || DECL_CXX_DESTRUCTOR_P (fn)))
809 return true;
811 return false;
814 return true;
817 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
818 callsite CALL) by looking for assignments to its virtual table pointer. If
819 it is, return true and fill in the jump function JFUNC with relevant type
820 information or set it to unknown. ARG is the object itself (not a pointer
821 to it, unless dereferenced). BASE is the base of the memory access as
822 returned by get_ref_base_and_extent, as is the offset.
824 This is helper function for detect_type_change and detect_type_change_ssa
825 that does the heavy work which is usually unnecesary. */
827 static bool
828 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
829 gimple call, struct ipa_jump_func *jfunc,
830 HOST_WIDE_INT offset)
832 struct type_change_info tci;
833 ao_ref ao;
834 bool entry_reached = false;
836 gcc_checking_assert (DECL_P (arg)
837 || TREE_CODE (arg) == MEM_REF
838 || handled_component_p (arg));
840 comp_type = TYPE_MAIN_VARIANT (comp_type);
842 /* Const calls cannot call virtual methods through VMT and so type changes do
843 not matter. */
844 if (!flag_devirtualize || !gimple_vuse (call)
845 /* Be sure expected_type is polymorphic. */
846 || !comp_type
847 || TREE_CODE (comp_type) != RECORD_TYPE
848 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
849 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
850 return true;
852 ao_ref_init (&ao, arg);
853 ao.base = base;
854 ao.offset = offset;
855 ao.size = POINTER_SIZE;
856 ao.max_size = ao.size;
858 tci.offset = offset;
859 tci.object = get_base_address (arg);
860 tci.known_current_type = NULL_TREE;
861 tci.type_maybe_changed = false;
862 tci.multiple_types_encountered = false;
864 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
865 &tci, NULL, &entry_reached);
866 if (!tci.type_maybe_changed)
867 return false;
869 if (!tci.known_current_type
870 || tci.multiple_types_encountered
871 || offset != 0
872 /* When the walk reached function entry, it means that type
873 is set along some paths but not along others. */
874 || entry_reached)
875 jfunc->type = IPA_JF_UNKNOWN;
876 else
877 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
879 return true;
882 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
883 If it is, return true and fill in the jump function JFUNC with relevant type
884 information or set it to unknown. ARG is the object itself (not a pointer
885 to it, unless dereferenced). BASE is the base of the memory access as
886 returned by get_ref_base_and_extent, as is the offset. */
888 static bool
889 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
890 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
892 if (!flag_devirtualize)
893 return false;
895 if (TREE_CODE (base) == MEM_REF
896 && !param_type_may_change_p (current_function_decl,
897 TREE_OPERAND (base, 0),
898 call))
899 return false;
900 return detect_type_change_from_memory_writes (arg, base, comp_type,
901 call, jfunc, offset);
904 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
905 SSA name (its dereference will become the base and the offset is assumed to
906 be zero). */
908 static bool
909 detect_type_change_ssa (tree arg, tree comp_type,
910 gimple call, struct ipa_jump_func *jfunc)
912 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
913 if (!flag_devirtualize
914 || !POINTER_TYPE_P (TREE_TYPE (arg)))
915 return false;
917 if (!param_type_may_change_p (current_function_decl, arg, call))
918 return false;
920 arg = build2 (MEM_REF, ptr_type_node, arg,
921 build_int_cst (ptr_type_node, 0));
923 return detect_type_change_from_memory_writes (arg, arg, comp_type,
924 call, jfunc, 0);
927 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
928 boolean variable pointed to by DATA. */
930 static bool
931 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
932 void *data)
934 bool *b = (bool *) data;
935 *b = true;
936 return true;
939 /* Return true if we have already walked so many statements in AA that we
940 should really just start giving up. */
942 static bool
943 aa_overwalked (struct func_body_info *fbi)
945 gcc_checking_assert (fbi);
946 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
949 /* Find the nearest valid aa status for parameter specified by INDEX that
950 dominates BB. */
952 static struct param_aa_status *
953 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
954 int index)
956 while (true)
958 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
959 if (!bb)
960 return NULL;
961 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
962 if (!bi->param_aa_statuses.is_empty ()
963 && bi->param_aa_statuses[index].valid)
964 return &bi->param_aa_statuses[index];
968 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
969 structures and/or intialize the result with a dominating description as
970 necessary. */
972 static struct param_aa_status *
973 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
974 int index)
976 gcc_checking_assert (fbi);
977 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
978 if (bi->param_aa_statuses.is_empty ())
979 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
980 struct param_aa_status *paa = &bi->param_aa_statuses[index];
981 if (!paa->valid)
983 gcc_checking_assert (!paa->parm_modified
984 && !paa->ref_modified
985 && !paa->pt_modified);
986 struct param_aa_status *dom_paa;
987 dom_paa = find_dominating_aa_status (fbi, bb, index);
988 if (dom_paa)
989 *paa = *dom_paa;
990 else
991 paa->valid = true;
994 return paa;
997 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
998 a value known not to be modified in this function before reaching the
999 statement STMT. FBI holds information about the function we have so far
1000 gathered but do not survive the summary building stage. */
1002 static bool
1003 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1004 gimple stmt, tree parm_load)
1006 struct param_aa_status *paa;
1007 bool modified = false;
1008 ao_ref refd;
1010 /* FIXME: FBI can be NULL if we are being called from outside
1011 ipa_node_analysis or ipcp_transform_function, which currently happens
1012 during inlining analysis. It would be great to extend fbi's lifetime and
1013 always have it. Currently, we are just not afraid of too much walking in
1014 that case. */
1015 if (fbi)
1017 if (aa_overwalked (fbi))
1018 return false;
1019 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1020 if (paa->parm_modified)
1021 return false;
1023 else
1024 paa = NULL;
1026 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1027 ao_ref_init (&refd, parm_load);
1028 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1029 &modified, NULL);
1030 if (fbi)
1031 fbi->aa_walked += walked;
1032 if (paa && modified)
1033 paa->parm_modified = true;
1034 return !modified;
1037 /* If STMT is an assignment that loads a value from an parameter declaration,
1038 return the index of the parameter in ipa_node_params which has not been
1039 modified. Otherwise return -1. */
1041 static int
1042 load_from_unmodified_param (struct func_body_info *fbi,
1043 vec<ipa_param_descriptor> descriptors,
1044 gimple stmt)
1046 int index;
1047 tree op1;
1049 if (!gimple_assign_single_p (stmt))
1050 return -1;
1052 op1 = gimple_assign_rhs1 (stmt);
1053 if (TREE_CODE (op1) != PARM_DECL)
1054 return -1;
1056 index = ipa_get_param_decl_index_1 (descriptors, op1);
1057 if (index < 0
1058 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1059 return -1;
1061 return index;
1064 /* Return true if memory reference REF (which must be a load through parameter
1065 with INDEX) loads data that are known to be unmodified in this function
1066 before reaching statement STMT. */
1068 static bool
1069 parm_ref_data_preserved_p (struct func_body_info *fbi,
1070 int index, gimple stmt, tree ref)
1072 struct param_aa_status *paa;
1073 bool modified = false;
1074 ao_ref refd;
1076 /* FIXME: FBI can be NULL if we are being called from outside
1077 ipa_node_analysis or ipcp_transform_function, which currently happens
1078 during inlining analysis. It would be great to extend fbi's lifetime and
1079 always have it. Currently, we are just not afraid of too much walking in
1080 that case. */
1081 if (fbi)
1083 if (aa_overwalked (fbi))
1084 return false;
1085 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1086 if (paa->ref_modified)
1087 return false;
1089 else
1090 paa = NULL;
1092 gcc_checking_assert (gimple_vuse (stmt));
1093 ao_ref_init (&refd, ref);
1094 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1095 &modified, NULL);
1096 if (fbi)
1097 fbi->aa_walked += walked;
1098 if (paa && modified)
1099 paa->ref_modified = true;
1100 return !modified;
1103 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1104 is known to be unmodified in this function before reaching call statement
1105 CALL into which it is passed. FBI describes the function body. */
1107 static bool
1108 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1109 gimple call, tree parm)
1111 bool modified = false;
1112 ao_ref refd;
1114 /* It's unnecessary to calculate anything about memory contnets for a const
1115 function because it is not goin to use it. But do not cache the result
1116 either. Also, no such calculations for non-pointers. */
1117 if (!gimple_vuse (call)
1118 || !POINTER_TYPE_P (TREE_TYPE (parm))
1119 || aa_overwalked (fbi))
1120 return false;
1122 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1123 index);
1124 if (paa->pt_modified)
1125 return false;
1127 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1128 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1129 &modified, NULL);
1130 fbi->aa_walked += walked;
1131 if (modified)
1132 paa->pt_modified = true;
1133 return !modified;
1136 /* Return true if we can prove that OP is a memory reference loading unmodified
1137 data from an aggregate passed as a parameter and if the aggregate is passed
1138 by reference, that the alias type of the load corresponds to the type of the
1139 formal parameter (so that we can rely on this type for TBAA in callers).
1140 INFO and PARMS_AINFO describe parameters of the current function (but the
1141 latter can be NULL), STMT is the load statement. If function returns true,
1142 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1143 within the aggregate and whether it is a load from a value passed by
1144 reference respectively. */
1146 static bool
1147 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1148 vec<ipa_param_descriptor> descriptors,
1149 gimple stmt, tree op, int *index_p,
1150 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1151 bool *by_ref_p)
1153 int index;
1154 HOST_WIDE_INT size, max_size;
1155 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1157 if (max_size == -1 || max_size != size || *offset_p < 0)
1158 return false;
1160 if (DECL_P (base))
1162 int index = ipa_get_param_decl_index_1 (descriptors, base);
1163 if (index >= 0
1164 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1166 *index_p = index;
1167 *by_ref_p = false;
1168 if (size_p)
1169 *size_p = size;
1170 return true;
1172 return false;
1175 if (TREE_CODE (base) != MEM_REF
1176 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1177 || !integer_zerop (TREE_OPERAND (base, 1)))
1178 return false;
1180 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1182 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1183 index = ipa_get_param_decl_index_1 (descriptors, parm);
1185 else
1187 /* This branch catches situations where a pointer parameter is not a
1188 gimple register, for example:
1190 void hip7(S*) (struct S * p)
1192 void (*<T2e4>) (struct S *) D.1867;
1193 struct S * p.1;
1195 <bb 2>:
1196 p.1_1 = p;
1197 D.1867_2 = p.1_1->f;
1198 D.1867_2 ();
1199 gdp = &p;
1202 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1203 index = load_from_unmodified_param (fbi, descriptors, def);
1206 if (index >= 0
1207 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1209 *index_p = index;
1210 *by_ref_p = true;
1211 if (size_p)
1212 *size_p = size;
1213 return true;
1215 return false;
1218 /* Just like the previous function, just without the param_analysis_info
1219 pointer, for users outside of this file. */
1221 bool
1222 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1223 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1224 bool *by_ref_p)
1226 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1227 offset_p, NULL, by_ref_p);
1230 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1231 of an assignment statement STMT, try to determine whether we are actually
1232 handling any of the following cases and construct an appropriate jump
1233 function into JFUNC if so:
1235 1) The passed value is loaded from a formal parameter which is not a gimple
1236 register (most probably because it is addressable, the value has to be
1237 scalar) and we can guarantee the value has not changed. This case can
1238 therefore be described by a simple pass-through jump function. For example:
1240 foo (int a)
1242 int a.0;
1244 a.0_2 = a;
1245 bar (a.0_2);
1247 2) The passed value can be described by a simple arithmetic pass-through
1248 jump function. E.g.
1250 foo (int a)
1252 int D.2064;
1254 D.2064_4 = a.1(D) + 4;
1255 bar (D.2064_4);
1257 This case can also occur in combination of the previous one, e.g.:
1259 foo (int a, int z)
1261 int a.0;
1262 int D.2064;
1264 a.0_3 = a;
1265 D.2064_4 = a.0_3 + 4;
1266 foo (D.2064_4);
1268 3) The passed value is an address of an object within another one (which
1269 also passed by reference). Such situations are described by an ancestor
1270 jump function and describe situations such as:
1272 B::foo() (struct B * const this)
1274 struct A * D.1845;
1276 D.1845_2 = &this_1(D)->D.1748;
1277 A::bar (D.1845_2);
1279 INFO is the structure describing individual parameters access different
1280 stages of IPA optimizations. PARMS_AINFO contains the information that is
1281 only needed for intraprocedural analysis. */
1283 static void
1284 compute_complex_assign_jump_func (struct func_body_info *fbi,
1285 struct ipa_node_params *info,
1286 struct ipa_jump_func *jfunc,
1287 gimple call, gimple stmt, tree name,
1288 tree param_type)
1290 HOST_WIDE_INT offset, size, max_size;
1291 tree op1, tc_ssa, base, ssa;
1292 int index;
1294 op1 = gimple_assign_rhs1 (stmt);
1296 if (TREE_CODE (op1) == SSA_NAME)
1298 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1299 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1300 else
1301 index = load_from_unmodified_param (fbi, info->descriptors,
1302 SSA_NAME_DEF_STMT (op1));
1303 tc_ssa = op1;
1305 else
1307 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1308 tc_ssa = gimple_assign_lhs (stmt);
1311 if (index >= 0)
1313 tree op2 = gimple_assign_rhs2 (stmt);
1315 if (op2)
1317 if (!is_gimple_ip_invariant (op2)
1318 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1319 && !useless_type_conversion_p (TREE_TYPE (name),
1320 TREE_TYPE (op1))))
1321 return;
1323 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1324 gimple_assign_rhs_code (stmt));
1326 else if (gimple_assign_single_p (stmt))
1328 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1329 bool type_p = false;
1331 if (param_type && POINTER_TYPE_P (param_type))
1332 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1333 call, jfunc);
1334 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1335 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1337 return;
1340 if (TREE_CODE (op1) != ADDR_EXPR)
1341 return;
1342 op1 = TREE_OPERAND (op1, 0);
1343 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1344 return;
1345 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1346 if (TREE_CODE (base) != MEM_REF
1347 /* If this is a varying address, punt. */
1348 || max_size == -1
1349 || max_size != size)
1350 return;
1351 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1352 ssa = TREE_OPERAND (base, 0);
1353 if (TREE_CODE (ssa) != SSA_NAME
1354 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1355 || offset < 0)
1356 return;
1358 /* Dynamic types are changed in constructors and destructors. */
1359 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1360 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1362 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1363 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1364 call, jfunc, offset));
1365 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1366 ipa_set_ancestor_jf (jfunc, offset,
1367 type_p ? TREE_TYPE (param_type) : NULL, index,
1368 parm_ref_data_pass_through_p (fbi, index,
1369 call, ssa), type_p);
1373 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1374 it looks like:
1376 iftmp.1_3 = &obj_2(D)->D.1762;
1378 The base of the MEM_REF must be a default definition SSA NAME of a
1379 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1380 whole MEM_REF expression is returned and the offset calculated from any
1381 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1382 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1384 static tree
1385 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1387 HOST_WIDE_INT size, max_size;
1388 tree expr, parm, obj;
1390 if (!gimple_assign_single_p (assign))
1391 return NULL_TREE;
1392 expr = gimple_assign_rhs1 (assign);
1394 if (TREE_CODE (expr) != ADDR_EXPR)
1395 return NULL_TREE;
1396 expr = TREE_OPERAND (expr, 0);
1397 obj = expr;
1398 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1400 if (TREE_CODE (expr) != MEM_REF
1401 /* If this is a varying address, punt. */
1402 || max_size == -1
1403 || max_size != size
1404 || *offset < 0)
1405 return NULL_TREE;
1406 parm = TREE_OPERAND (expr, 0);
1407 if (TREE_CODE (parm) != SSA_NAME
1408 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1409 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1410 return NULL_TREE;
1412 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1413 *obj_p = obj;
1414 return expr;
1418 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1419 statement PHI, try to find out whether NAME is in fact a
1420 multiple-inheritance typecast from a descendant into an ancestor of a formal
1421 parameter and thus can be described by an ancestor jump function and if so,
1422 write the appropriate function into JFUNC.
1424 Essentially we want to match the following pattern:
1426 if (obj_2(D) != 0B)
1427 goto <bb 3>;
1428 else
1429 goto <bb 4>;
1431 <bb 3>:
1432 iftmp.1_3 = &obj_2(D)->D.1762;
1434 <bb 4>:
1435 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1436 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1437 return D.1879_6; */
1439 static void
1440 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1441 struct ipa_node_params *info,
1442 struct ipa_jump_func *jfunc,
1443 gimple call, gimple phi, tree param_type)
1445 HOST_WIDE_INT offset;
1446 gimple assign, cond;
1447 basic_block phi_bb, assign_bb, cond_bb;
1448 tree tmp, parm, expr, obj;
1449 int index, i;
1451 if (gimple_phi_num_args (phi) != 2)
1452 return;
1454 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1455 tmp = PHI_ARG_DEF (phi, 0);
1456 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1457 tmp = PHI_ARG_DEF (phi, 1);
1458 else
1459 return;
1460 if (TREE_CODE (tmp) != SSA_NAME
1461 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1462 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1463 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1464 return;
1466 assign = SSA_NAME_DEF_STMT (tmp);
1467 assign_bb = gimple_bb (assign);
1468 if (!single_pred_p (assign_bb))
1469 return;
1470 expr = get_ancestor_addr_info (assign, &obj, &offset);
1471 if (!expr)
1472 return;
1473 parm = TREE_OPERAND (expr, 0);
1474 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1475 if (index < 0)
1476 return;
1478 cond_bb = single_pred (assign_bb);
1479 cond = last_stmt (cond_bb);
1480 if (!cond
1481 || gimple_code (cond) != GIMPLE_COND
1482 || gimple_cond_code (cond) != NE_EXPR
1483 || gimple_cond_lhs (cond) != parm
1484 || !integer_zerop (gimple_cond_rhs (cond)))
1485 return;
1487 phi_bb = gimple_bb (phi);
1488 for (i = 0; i < 2; i++)
1490 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1491 if (pred != assign_bb && pred != cond_bb)
1492 return;
1495 bool type_p = false;
1496 if (param_type && POINTER_TYPE_P (param_type)
1497 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1498 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1499 call, jfunc, offset);
1500 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1501 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1502 index,
1503 parm_ref_data_pass_through_p (fbi, index, call, parm),
1504 type_p);
1507 /* Given OP which is passed as an actual argument to a called function,
1508 determine if it is possible to construct a KNOWN_TYPE jump function for it
1509 and if so, create one and store it to JFUNC.
1510 EXPECTED_TYPE represents a type the argument should be in */
1512 static void
1513 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1514 gimple call, tree expected_type)
1516 HOST_WIDE_INT offset, size, max_size;
1517 tree base;
1519 if (!flag_devirtualize
1520 || TREE_CODE (op) != ADDR_EXPR
1521 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1522 /* Be sure expected_type is polymorphic. */
1523 || !expected_type
1524 || !contains_polymorphic_type_p (expected_type))
1525 return;
1527 op = TREE_OPERAND (op, 0);
1528 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1529 if (!DECL_P (base)
1530 || max_size == -1
1531 || max_size != size
1532 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1533 return;
1535 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1536 call, current_function_decl)
1537 /* Even if the var seems to be in construction by inline call stack,
1538 we may work out the actual type by walking memory writes. */
1539 && (!is_global_var (base)
1540 && detect_type_change (op, base, expected_type, call, jfunc, offset)))
1541 return;
1543 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1544 expected_type);
1547 /* Inspect the given TYPE and return true iff it has the same structure (the
1548 same number of fields of the same types) as a C++ member pointer. If
1549 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1550 corresponding fields there. */
1552 static bool
1553 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1555 tree fld;
1557 if (TREE_CODE (type) != RECORD_TYPE)
1558 return false;
1560 fld = TYPE_FIELDS (type);
1561 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1562 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1563 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1564 return false;
1566 if (method_ptr)
1567 *method_ptr = fld;
1569 fld = DECL_CHAIN (fld);
1570 if (!fld || INTEGRAL_TYPE_P (fld)
1571 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1572 return false;
1573 if (delta)
1574 *delta = fld;
1576 if (DECL_CHAIN (fld))
1577 return false;
1579 return true;
1582 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1583 return the rhs of its defining statement. Otherwise return RHS as it
1584 is. */
1586 static inline tree
1587 get_ssa_def_if_simple_copy (tree rhs)
1589 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1591 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1593 if (gimple_assign_single_p (def_stmt))
1594 rhs = gimple_assign_rhs1 (def_stmt);
1595 else
1596 break;
1598 return rhs;
1601 /* Simple linked list, describing known contents of an aggregate beforere
1602 call. */
1604 struct ipa_known_agg_contents_list
1606 /* Offset and size of the described part of the aggregate. */
1607 HOST_WIDE_INT offset, size;
1608 /* Known constant value or NULL if the contents is known to be unknown. */
1609 tree constant;
1610 /* Pointer to the next structure in the list. */
1611 struct ipa_known_agg_contents_list *next;
1614 /* Find the proper place in linked list of ipa_known_agg_contents_list
1615 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1616 unless there is a partial overlap, in which case return NULL, or such
1617 element is already there, in which case set *ALREADY_THERE to true. */
1619 static struct ipa_known_agg_contents_list **
1620 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1621 HOST_WIDE_INT lhs_offset,
1622 HOST_WIDE_INT lhs_size,
1623 bool *already_there)
1625 struct ipa_known_agg_contents_list **p = list;
1626 while (*p && (*p)->offset < lhs_offset)
1628 if ((*p)->offset + (*p)->size > lhs_offset)
1629 return NULL;
1630 p = &(*p)->next;
1633 if (*p && (*p)->offset < lhs_offset + lhs_size)
1635 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1636 /* We already know this value is subsequently overwritten with
1637 something else. */
1638 *already_there = true;
1639 else
1640 /* Otherwise this is a partial overlap which we cannot
1641 represent. */
1642 return NULL;
1644 return p;
1647 /* Build aggregate jump function from LIST, assuming there are exactly
1648 CONST_COUNT constant entries there and that th offset of the passed argument
1649 is ARG_OFFSET and store it into JFUNC. */
1651 static void
1652 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1653 int const_count, HOST_WIDE_INT arg_offset,
1654 struct ipa_jump_func *jfunc)
1656 vec_alloc (jfunc->agg.items, const_count);
1657 while (list)
1659 if (list->constant)
1661 struct ipa_agg_jf_item item;
1662 item.offset = list->offset - arg_offset;
1663 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1664 item.value = unshare_expr_without_location (list->constant);
1665 jfunc->agg.items->quick_push (item);
1667 list = list->next;
1671 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1672 in ARG is filled in with constant values. ARG can either be an aggregate
1673 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1674 aggregate. JFUNC is the jump function into which the constants are
1675 subsequently stored. */
1677 static void
1678 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1679 struct ipa_jump_func *jfunc)
1681 struct ipa_known_agg_contents_list *list = NULL;
1682 int item_count = 0, const_count = 0;
1683 HOST_WIDE_INT arg_offset, arg_size;
1684 gimple_stmt_iterator gsi;
1685 tree arg_base;
1686 bool check_ref, by_ref;
1687 ao_ref r;
1689 /* The function operates in three stages. First, we prepare check_ref, r,
1690 arg_base and arg_offset based on what is actually passed as an actual
1691 argument. */
1693 if (POINTER_TYPE_P (arg_type))
1695 by_ref = true;
1696 if (TREE_CODE (arg) == SSA_NAME)
1698 tree type_size;
1699 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1700 return;
1701 check_ref = true;
1702 arg_base = arg;
1703 arg_offset = 0;
1704 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1705 arg_size = tree_to_uhwi (type_size);
1706 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1708 else if (TREE_CODE (arg) == ADDR_EXPR)
1710 HOST_WIDE_INT arg_max_size;
1712 arg = TREE_OPERAND (arg, 0);
1713 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1714 &arg_max_size);
1715 if (arg_max_size == -1
1716 || arg_max_size != arg_size
1717 || arg_offset < 0)
1718 return;
1719 if (DECL_P (arg_base))
1721 check_ref = false;
1722 ao_ref_init (&r, arg_base);
1724 else
1725 return;
1727 else
1728 return;
1730 else
1732 HOST_WIDE_INT arg_max_size;
1734 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1736 by_ref = false;
1737 check_ref = false;
1738 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1739 &arg_max_size);
1740 if (arg_max_size == -1
1741 || arg_max_size != arg_size
1742 || arg_offset < 0)
1743 return;
1745 ao_ref_init (&r, arg);
1748 /* Second stage walks back the BB, looks at individual statements and as long
1749 as it is confident of how the statements affect contents of the
1750 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1751 describing it. */
1752 gsi = gsi_for_stmt (call);
1753 gsi_prev (&gsi);
1754 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1756 struct ipa_known_agg_contents_list *n, **p;
1757 gimple stmt = gsi_stmt (gsi);
1758 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1759 tree lhs, rhs, lhs_base;
1761 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1762 continue;
1763 if (!gimple_assign_single_p (stmt))
1764 break;
1766 lhs = gimple_assign_lhs (stmt);
1767 rhs = gimple_assign_rhs1 (stmt);
1768 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1769 || TREE_CODE (lhs) == BIT_FIELD_REF
1770 || contains_bitfld_component_ref_p (lhs))
1771 break;
1773 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1774 &lhs_max_size);
1775 if (lhs_max_size == -1
1776 || lhs_max_size != lhs_size)
1777 break;
1779 if (check_ref)
1781 if (TREE_CODE (lhs_base) != MEM_REF
1782 || TREE_OPERAND (lhs_base, 0) != arg_base
1783 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1784 break;
1786 else if (lhs_base != arg_base)
1788 if (DECL_P (lhs_base))
1789 continue;
1790 else
1791 break;
1794 bool already_there = false;
1795 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1796 &already_there);
1797 if (!p)
1798 break;
1799 if (already_there)
1800 continue;
1802 rhs = get_ssa_def_if_simple_copy (rhs);
1803 n = XALLOCA (struct ipa_known_agg_contents_list);
1804 n->size = lhs_size;
1805 n->offset = lhs_offset;
1806 if (is_gimple_ip_invariant (rhs))
1808 n->constant = rhs;
1809 const_count++;
1811 else
1812 n->constant = NULL_TREE;
1813 n->next = *p;
1814 *p = n;
1816 item_count++;
1817 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1818 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1819 break;
1822 /* Third stage just goes over the list and creates an appropriate vector of
1823 ipa_agg_jf_item structures out of it, of sourse only if there are
1824 any known constants to begin with. */
1826 if (const_count)
1828 jfunc->agg.by_ref = by_ref;
1829 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1833 static tree
1834 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1836 int n;
1837 tree type = (e->callee
1838 ? TREE_TYPE (e->callee->decl)
1839 : gimple_call_fntype (e->call_stmt));
1840 tree t = TYPE_ARG_TYPES (type);
1842 for (n = 0; n < i; n++)
1844 if (!t)
1845 break;
1846 t = TREE_CHAIN (t);
1848 if (t)
1849 return TREE_VALUE (t);
1850 if (!e->callee)
1851 return NULL;
1852 t = DECL_ARGUMENTS (e->callee->decl);
1853 for (n = 0; n < i; n++)
1855 if (!t)
1856 return NULL;
1857 t = TREE_CHAIN (t);
1859 if (t)
1860 return TREE_TYPE (t);
1861 return NULL;
1864 /* Compute jump function for all arguments of callsite CS and insert the
1865 information in the jump_functions array in the ipa_edge_args corresponding
1866 to this callsite. */
1868 static void
1869 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1870 struct cgraph_edge *cs)
1872 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1873 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1874 gimple call = cs->call_stmt;
1875 int n, arg_num = gimple_call_num_args (call);
1877 if (arg_num == 0 || args->jump_functions)
1878 return;
1879 vec_safe_grow_cleared (args->jump_functions, arg_num);
1881 if (gimple_call_internal_p (call))
1882 return;
1883 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1884 return;
1886 for (n = 0; n < arg_num; n++)
1888 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1889 tree arg = gimple_call_arg (call, n);
1890 tree param_type = ipa_get_callee_param_type (cs, n);
1892 if (is_gimple_ip_invariant (arg))
1893 ipa_set_jf_constant (jfunc, arg, cs);
1894 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1895 && TREE_CODE (arg) == PARM_DECL)
1897 int index = ipa_get_param_decl_index (info, arg);
1899 gcc_assert (index >=0);
1900 /* Aggregate passed by value, check for pass-through, otherwise we
1901 will attempt to fill in aggregate contents later in this
1902 for cycle. */
1903 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1905 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1906 continue;
1909 else if (TREE_CODE (arg) == SSA_NAME)
1911 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1913 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1914 if (index >= 0)
1916 bool agg_p, type_p;
1917 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1918 if (param_type && POINTER_TYPE_P (param_type))
1919 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1920 call, jfunc);
1921 else
1922 type_p = false;
1923 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1924 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1925 type_p);
1928 else
1930 gimple stmt = SSA_NAME_DEF_STMT (arg);
1931 if (is_gimple_assign (stmt))
1932 compute_complex_assign_jump_func (fbi, info, jfunc,
1933 call, stmt, arg, param_type);
1934 else if (gimple_code (stmt) == GIMPLE_PHI)
1935 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1936 call, stmt, param_type);
1939 else
1940 compute_known_type_jump_func (arg, jfunc, call,
1941 param_type
1942 && POINTER_TYPE_P (param_type)
1943 ? TREE_TYPE (param_type)
1944 : NULL);
1946 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1947 passed (because type conversions are ignored in gimple). Usually we can
1948 safely get type from function declaration, but in case of K&R prototypes or
1949 variadic functions we can try our luck with type of the pointer passed.
1950 TODO: Since we look for actual initialization of the memory object, we may better
1951 work out the type based on the memory stores we find. */
1952 if (!param_type)
1953 param_type = TREE_TYPE (arg);
1955 if ((jfunc->type != IPA_JF_PASS_THROUGH
1956 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1957 && (jfunc->type != IPA_JF_ANCESTOR
1958 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1959 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1960 || POINTER_TYPE_P (param_type)))
1961 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1965 /* Compute jump functions for all edges - both direct and indirect - outgoing
1966 from BB. */
1968 static void
1969 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1971 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1972 int i;
1973 struct cgraph_edge *cs;
1975 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1977 struct cgraph_node *callee = cs->callee;
1979 if (callee)
1981 cgraph_function_or_thunk_node (callee, NULL);
1982 /* We do not need to bother analyzing calls to unknown functions
1983 unless they may become known during lto/whopr. */
1984 if (!callee->definition && !flag_lto)
1985 continue;
1987 ipa_compute_jump_functions_for_edge (fbi, cs);
1991 /* If STMT looks like a statement loading a value from a member pointer formal
1992 parameter, return that parameter and store the offset of the field to
1993 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1994 might be clobbered). If USE_DELTA, then we look for a use of the delta
1995 field rather than the pfn. */
1997 static tree
1998 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1999 HOST_WIDE_INT *offset_p)
2001 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2003 if (!gimple_assign_single_p (stmt))
2004 return NULL_TREE;
2006 rhs = gimple_assign_rhs1 (stmt);
2007 if (TREE_CODE (rhs) == COMPONENT_REF)
2009 ref_field = TREE_OPERAND (rhs, 1);
2010 rhs = TREE_OPERAND (rhs, 0);
2012 else
2013 ref_field = NULL_TREE;
2014 if (TREE_CODE (rhs) != MEM_REF)
2015 return NULL_TREE;
2016 rec = TREE_OPERAND (rhs, 0);
2017 if (TREE_CODE (rec) != ADDR_EXPR)
2018 return NULL_TREE;
2019 rec = TREE_OPERAND (rec, 0);
2020 if (TREE_CODE (rec) != PARM_DECL
2021 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2022 return NULL_TREE;
2023 ref_offset = TREE_OPERAND (rhs, 1);
2025 if (use_delta)
2026 fld = delta_field;
2027 else
2028 fld = ptr_field;
2029 if (offset_p)
2030 *offset_p = int_bit_position (fld);
2032 if (ref_field)
2034 if (integer_nonzerop (ref_offset))
2035 return NULL_TREE;
2036 return ref_field == fld ? rec : NULL_TREE;
2038 else
2039 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2040 : NULL_TREE;
2043 /* Returns true iff T is an SSA_NAME defined by a statement. */
2045 static bool
2046 ipa_is_ssa_with_stmt_def (tree t)
2048 if (TREE_CODE (t) == SSA_NAME
2049 && !SSA_NAME_IS_DEFAULT_DEF (t))
2050 return true;
2051 else
2052 return false;
2055 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2056 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2057 indirect call graph edge. */
2059 static struct cgraph_edge *
2060 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2062 struct cgraph_edge *cs;
2064 cs = cgraph_edge (node, stmt);
2065 cs->indirect_info->param_index = param_index;
2066 cs->indirect_info->agg_contents = 0;
2067 cs->indirect_info->member_ptr = 0;
2068 return cs;
2071 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2072 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2073 intermediate information about each formal parameter. Currently it checks
2074 whether the call calls a pointer that is a formal parameter and if so, the
2075 parameter is marked with the called flag and an indirect call graph edge
2076 describing the call is created. This is very simple for ordinary pointers
2077 represented in SSA but not-so-nice when it comes to member pointers. The
2078 ugly part of this function does nothing more than trying to match the
2079 pattern of such a call. An example of such a pattern is the gimple dump
2080 below, the call is on the last line:
2082 <bb 2>:
2083 f$__delta_5 = f.__delta;
2084 f$__pfn_24 = f.__pfn;
2087 <bb 2>:
2088 f$__delta_5 = MEM[(struct *)&f];
2089 f$__pfn_24 = MEM[(struct *)&f + 4B];
2091 and a few lines below:
2093 <bb 5>
2094 D.2496_3 = (int) f$__pfn_24;
2095 D.2497_4 = D.2496_3 & 1;
2096 if (D.2497_4 != 0)
2097 goto <bb 3>;
2098 else
2099 goto <bb 4>;
2101 <bb 6>:
2102 D.2500_7 = (unsigned int) f$__delta_5;
2103 D.2501_8 = &S + D.2500_7;
2104 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2105 D.2503_10 = *D.2502_9;
2106 D.2504_12 = f$__pfn_24 + -1;
2107 D.2505_13 = (unsigned int) D.2504_12;
2108 D.2506_14 = D.2503_10 + D.2505_13;
2109 D.2507_15 = *D.2506_14;
2110 iftmp.11_16 = (String:: *) D.2507_15;
2112 <bb 7>:
2113 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2114 D.2500_19 = (unsigned int) f$__delta_5;
2115 D.2508_20 = &S + D.2500_19;
2116 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2118 Such patterns are results of simple calls to a member pointer:
2120 int doprinting (int (MyString::* f)(int) const)
2122 MyString S ("somestring");
2124 return (S.*f)(4);
2127 Moreover, the function also looks for called pointers loaded from aggregates
2128 passed by value or reference. */
2130 static void
2131 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2132 tree target)
2134 struct ipa_node_params *info = fbi->info;
2135 HOST_WIDE_INT offset;
2136 bool by_ref;
2138 if (SSA_NAME_IS_DEFAULT_DEF (target))
2140 tree var = SSA_NAME_VAR (target);
2141 int index = ipa_get_param_decl_index (info, var);
2142 if (index >= 0)
2143 ipa_note_param_call (fbi->node, index, call);
2144 return;
2147 int index;
2148 gimple def = SSA_NAME_DEF_STMT (target);
2149 if (gimple_assign_single_p (def)
2150 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2151 gimple_assign_rhs1 (def), &index, &offset,
2152 NULL, &by_ref))
2154 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2155 if (cs->indirect_info->offset != offset)
2156 cs->indirect_info->outer_type = NULL;
2157 cs->indirect_info->offset = offset;
2158 cs->indirect_info->agg_contents = 1;
2159 cs->indirect_info->by_ref = by_ref;
2160 return;
2163 /* Now we need to try to match the complex pattern of calling a member
2164 pointer. */
2165 if (gimple_code (def) != GIMPLE_PHI
2166 || gimple_phi_num_args (def) != 2
2167 || !POINTER_TYPE_P (TREE_TYPE (target))
2168 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2169 return;
2171 /* First, we need to check whether one of these is a load from a member
2172 pointer that is a parameter to this function. */
2173 tree n1 = PHI_ARG_DEF (def, 0);
2174 tree n2 = PHI_ARG_DEF (def, 1);
2175 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2176 return;
2177 gimple d1 = SSA_NAME_DEF_STMT (n1);
2178 gimple d2 = SSA_NAME_DEF_STMT (n2);
2180 tree rec;
2181 basic_block bb, virt_bb;
2182 basic_block join = gimple_bb (def);
2183 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2185 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2186 return;
2188 bb = EDGE_PRED (join, 0)->src;
2189 virt_bb = gimple_bb (d2);
2191 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2193 bb = EDGE_PRED (join, 1)->src;
2194 virt_bb = gimple_bb (d1);
2196 else
2197 return;
2199 /* Second, we need to check that the basic blocks are laid out in the way
2200 corresponding to the pattern. */
2202 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2203 || single_pred (virt_bb) != bb
2204 || single_succ (virt_bb) != join)
2205 return;
2207 /* Third, let's see that the branching is done depending on the least
2208 significant bit of the pfn. */
2210 gimple branch = last_stmt (bb);
2211 if (!branch || gimple_code (branch) != GIMPLE_COND)
2212 return;
2214 if ((gimple_cond_code (branch) != NE_EXPR
2215 && gimple_cond_code (branch) != EQ_EXPR)
2216 || !integer_zerop (gimple_cond_rhs (branch)))
2217 return;
2219 tree cond = gimple_cond_lhs (branch);
2220 if (!ipa_is_ssa_with_stmt_def (cond))
2221 return;
2223 def = SSA_NAME_DEF_STMT (cond);
2224 if (!is_gimple_assign (def)
2225 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2226 || !integer_onep (gimple_assign_rhs2 (def)))
2227 return;
2229 cond = gimple_assign_rhs1 (def);
2230 if (!ipa_is_ssa_with_stmt_def (cond))
2231 return;
2233 def = SSA_NAME_DEF_STMT (cond);
2235 if (is_gimple_assign (def)
2236 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2238 cond = gimple_assign_rhs1 (def);
2239 if (!ipa_is_ssa_with_stmt_def (cond))
2240 return;
2241 def = SSA_NAME_DEF_STMT (cond);
2244 tree rec2;
2245 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2246 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2247 == ptrmemfunc_vbit_in_delta),
2248 NULL);
2249 if (rec != rec2)
2250 return;
2252 index = ipa_get_param_decl_index (info, rec);
2253 if (index >= 0
2254 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2256 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2257 if (cs->indirect_info->offset != offset)
2258 cs->indirect_info->outer_type = NULL;
2259 cs->indirect_info->offset = offset;
2260 cs->indirect_info->agg_contents = 1;
2261 cs->indirect_info->member_ptr = 1;
2264 return;
2267 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2268 object referenced in the expression is a formal parameter of the caller
2269 FBI->node (described by FBI->info), create a call note for the
2270 statement. */
2272 static void
2273 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2274 gimple call, tree target)
2276 tree obj = OBJ_TYPE_REF_OBJECT (target);
2277 int index;
2278 HOST_WIDE_INT anc_offset;
2280 if (!flag_devirtualize)
2281 return;
2283 if (TREE_CODE (obj) != SSA_NAME)
2284 return;
2286 struct ipa_node_params *info = fbi->info;
2287 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2289 struct ipa_jump_func jfunc;
2290 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2291 return;
2293 anc_offset = 0;
2294 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2295 gcc_assert (index >= 0);
2296 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2297 call, &jfunc))
2298 return;
2300 else
2302 struct ipa_jump_func jfunc;
2303 gimple stmt = SSA_NAME_DEF_STMT (obj);
2304 tree expr;
2306 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2307 if (!expr)
2308 return;
2309 index = ipa_get_param_decl_index (info,
2310 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2311 gcc_assert (index >= 0);
2312 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2313 call, &jfunc, anc_offset))
2314 return;
2317 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2318 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2319 ii->offset = anc_offset;
2320 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2321 ii->otr_type = obj_type_ref_class (target);
2322 ii->polymorphic = 1;
2325 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2326 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2327 containing intermediate information about each formal parameter. */
2329 static void
2330 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2332 tree target = gimple_call_fn (call);
2334 if (!target
2335 || (TREE_CODE (target) != SSA_NAME
2336 && !virtual_method_call_p (target)))
2337 return;
2339 /* If we previously turned the call into a direct call, there is
2340 no need to analyze. */
2341 struct cgraph_edge *cs = cgraph_edge (fbi->node, call);
2342 if (cs && !cs->indirect_unknown_callee)
2343 return;
2344 if (TREE_CODE (target) == SSA_NAME)
2345 ipa_analyze_indirect_call_uses (fbi, call, target);
2346 else if (virtual_method_call_p (target))
2347 ipa_analyze_virtual_call_uses (fbi, call, target);
2351 /* Analyze the call statement STMT with respect to formal parameters (described
2352 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2353 formal parameters are called. */
2355 static void
2356 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2358 if (is_gimple_call (stmt))
2359 ipa_analyze_call_uses (fbi, stmt);
2362 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2363 If OP is a parameter declaration, mark it as used in the info structure
2364 passed in DATA. */
2366 static bool
2367 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2369 struct ipa_node_params *info = (struct ipa_node_params *) data;
2371 op = get_base_address (op);
2372 if (op
2373 && TREE_CODE (op) == PARM_DECL)
2375 int index = ipa_get_param_decl_index (info, op);
2376 gcc_assert (index >= 0);
2377 ipa_set_param_used (info, index, true);
2380 return false;
2383 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2384 the findings in various structures of the associated ipa_node_params
2385 structure, such as parameter flags, notes etc. FBI holds various data about
2386 the function being analyzed. */
2388 static void
2389 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2391 gimple_stmt_iterator gsi;
2392 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2394 gimple stmt = gsi_stmt (gsi);
2396 if (is_gimple_debug (stmt))
2397 continue;
2399 ipa_analyze_stmt_uses (fbi, stmt);
2400 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2401 visit_ref_for_mod_analysis,
2402 visit_ref_for_mod_analysis,
2403 visit_ref_for_mod_analysis);
2405 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2406 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2407 visit_ref_for_mod_analysis,
2408 visit_ref_for_mod_analysis,
2409 visit_ref_for_mod_analysis);
2412 /* Calculate controlled uses of parameters of NODE. */
2414 static void
2415 ipa_analyze_controlled_uses (struct cgraph_node *node)
2417 struct ipa_node_params *info = IPA_NODE_REF (node);
2419 for (int i = 0; i < ipa_get_param_count (info); i++)
2421 tree parm = ipa_get_param (info, i);
2422 int controlled_uses = 0;
2424 /* For SSA regs see if parameter is used. For non-SSA we compute
2425 the flag during modification analysis. */
2426 if (is_gimple_reg (parm))
2428 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2429 parm);
2430 if (ddef && !has_zero_uses (ddef))
2432 imm_use_iterator imm_iter;
2433 use_operand_p use_p;
2435 ipa_set_param_used (info, i, true);
2436 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2437 if (!is_gimple_call (USE_STMT (use_p)))
2439 if (!is_gimple_debug (USE_STMT (use_p)))
2441 controlled_uses = IPA_UNDESCRIBED_USE;
2442 break;
2445 else
2446 controlled_uses++;
2448 else
2449 controlled_uses = 0;
2451 else
2452 controlled_uses = IPA_UNDESCRIBED_USE;
2453 ipa_set_controlled_uses (info, i, controlled_uses);
2457 /* Free stuff in BI. */
2459 static void
2460 free_ipa_bb_info (struct ipa_bb_info *bi)
2462 bi->cg_edges.release ();
2463 bi->param_aa_statuses.release ();
2466 /* Dominator walker driving the analysis. */
2468 class analysis_dom_walker : public dom_walker
2470 public:
2471 analysis_dom_walker (struct func_body_info *fbi)
2472 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2474 virtual void before_dom_children (basic_block);
2476 private:
2477 struct func_body_info *m_fbi;
2480 void
2481 analysis_dom_walker::before_dom_children (basic_block bb)
2483 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2484 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2487 /* Initialize the array describing properties of of formal parameters
2488 of NODE, analyze their uses and compute jump functions associated
2489 with actual arguments of calls from within NODE. */
2491 void
2492 ipa_analyze_node (struct cgraph_node *node)
2494 struct func_body_info fbi;
2495 struct ipa_node_params *info;
2497 ipa_check_create_node_params ();
2498 ipa_check_create_edge_args ();
2499 info = IPA_NODE_REF (node);
2501 if (info->analysis_done)
2502 return;
2503 info->analysis_done = 1;
2505 if (ipa_func_spec_opts_forbid_analysis_p (node))
2507 for (int i = 0; i < ipa_get_param_count (info); i++)
2509 ipa_set_param_used (info, i, true);
2510 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2512 return;
2515 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2516 push_cfun (func);
2517 calculate_dominance_info (CDI_DOMINATORS);
2518 ipa_initialize_node_params (node);
2519 ipa_analyze_controlled_uses (node);
2521 fbi.node = node;
2522 fbi.info = IPA_NODE_REF (node);
2523 fbi.bb_infos = vNULL;
2524 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2525 fbi.param_count = ipa_get_param_count (info);
2526 fbi.aa_walked = 0;
2528 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2530 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2531 bi->cg_edges.safe_push (cs);
2534 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2536 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2537 bi->cg_edges.safe_push (cs);
2540 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2542 int i;
2543 struct ipa_bb_info *bi;
2544 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2545 free_ipa_bb_info (bi);
2546 fbi.bb_infos.release ();
2547 free_dominance_info (CDI_DOMINATORS);
2548 pop_cfun ();
2551 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2552 attempt a type-based devirtualization. If successful, return the
2553 target function declaration, otherwise return NULL. */
2555 tree
2556 ipa_intraprocedural_devirtualization (gimple call)
2558 tree binfo, token, fndecl;
2559 struct ipa_jump_func jfunc;
2560 tree otr = gimple_call_fn (call);
2562 jfunc.type = IPA_JF_UNKNOWN;
2563 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2564 call, obj_type_ref_class (otr));
2565 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2566 return NULL_TREE;
2567 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2568 if (!binfo)
2569 return NULL_TREE;
2570 token = OBJ_TYPE_REF_TOKEN (otr);
2571 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2572 binfo);
2573 #ifdef ENABLE_CHECKING
2574 if (fndecl)
2575 gcc_assert (possible_polymorphic_call_target_p
2576 (otr, cgraph_get_node (fndecl)));
2577 #endif
2578 return fndecl;
2581 /* Update the jump function DST when the call graph edge corresponding to SRC is
2582 is being inlined, knowing that DST is of type ancestor and src of known
2583 type. */
2585 static void
2586 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2587 struct ipa_jump_func *dst)
2589 HOST_WIDE_INT combined_offset;
2590 tree combined_type;
2592 if (!ipa_get_jf_ancestor_type_preserved (dst))
2594 dst->type = IPA_JF_UNKNOWN;
2595 return;
2598 combined_offset = ipa_get_jf_known_type_offset (src)
2599 + ipa_get_jf_ancestor_offset (dst);
2600 combined_type = ipa_get_jf_ancestor_type (dst);
2602 ipa_set_jf_known_type (dst, combined_offset,
2603 ipa_get_jf_known_type_base_type (src),
2604 combined_type);
2607 /* Update the jump functions associated with call graph edge E when the call
2608 graph edge CS is being inlined, assuming that E->caller is already (possibly
2609 indirectly) inlined into CS->callee and that E has not been inlined. */
2611 static void
2612 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2613 struct cgraph_edge *e)
2615 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2616 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2617 int count = ipa_get_cs_argument_count (args);
2618 int i;
2620 for (i = 0; i < count; i++)
2622 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2624 if (dst->type == IPA_JF_ANCESTOR)
2626 struct ipa_jump_func *src;
2627 int dst_fid = dst->value.ancestor.formal_id;
2629 /* Variable number of arguments can cause havoc if we try to access
2630 one that does not exist in the inlined edge. So make sure we
2631 don't. */
2632 if (dst_fid >= ipa_get_cs_argument_count (top))
2634 dst->type = IPA_JF_UNKNOWN;
2635 continue;
2638 src = ipa_get_ith_jump_func (top, dst_fid);
2640 if (src->agg.items
2641 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2643 struct ipa_agg_jf_item *item;
2644 int j;
2646 /* Currently we do not produce clobber aggregate jump functions,
2647 replace with merging when we do. */
2648 gcc_assert (!dst->agg.items);
2650 dst->agg.items = vec_safe_copy (src->agg.items);
2651 dst->agg.by_ref = src->agg.by_ref;
2652 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2653 item->offset -= dst->value.ancestor.offset;
2656 if (src->type == IPA_JF_KNOWN_TYPE)
2657 combine_known_type_and_ancestor_jfs (src, dst);
2658 else if (src->type == IPA_JF_PASS_THROUGH
2659 && src->value.pass_through.operation == NOP_EXPR)
2661 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2662 dst->value.ancestor.agg_preserved &=
2663 src->value.pass_through.agg_preserved;
2664 dst->value.ancestor.type_preserved &=
2665 src->value.pass_through.type_preserved;
2667 else if (src->type == IPA_JF_ANCESTOR)
2669 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2670 dst->value.ancestor.offset += src->value.ancestor.offset;
2671 dst->value.ancestor.agg_preserved &=
2672 src->value.ancestor.agg_preserved;
2673 dst->value.ancestor.type_preserved &=
2674 src->value.ancestor.type_preserved;
2676 else
2677 dst->type = IPA_JF_UNKNOWN;
2679 else if (dst->type == IPA_JF_PASS_THROUGH)
2681 struct ipa_jump_func *src;
2682 /* We must check range due to calls with variable number of arguments
2683 and we cannot combine jump functions with operations. */
2684 if (dst->value.pass_through.operation == NOP_EXPR
2685 && (dst->value.pass_through.formal_id
2686 < ipa_get_cs_argument_count (top)))
2688 int dst_fid = dst->value.pass_through.formal_id;
2689 src = ipa_get_ith_jump_func (top, dst_fid);
2690 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2692 switch (src->type)
2694 case IPA_JF_UNKNOWN:
2695 dst->type = IPA_JF_UNKNOWN;
2696 break;
2697 case IPA_JF_KNOWN_TYPE:
2698 if (ipa_get_jf_pass_through_type_preserved (dst))
2699 ipa_set_jf_known_type (dst,
2700 ipa_get_jf_known_type_offset (src),
2701 ipa_get_jf_known_type_base_type (src),
2702 ipa_get_jf_known_type_component_type (src));
2703 else
2704 dst->type = IPA_JF_UNKNOWN;
2705 break;
2706 case IPA_JF_CONST:
2707 ipa_set_jf_cst_copy (dst, src);
2708 break;
2710 case IPA_JF_PASS_THROUGH:
2712 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2713 enum tree_code operation;
2714 operation = ipa_get_jf_pass_through_operation (src);
2716 if (operation == NOP_EXPR)
2718 bool agg_p, type_p;
2719 agg_p = dst_agg_p
2720 && ipa_get_jf_pass_through_agg_preserved (src);
2721 type_p = ipa_get_jf_pass_through_type_preserved (src)
2722 && ipa_get_jf_pass_through_type_preserved (dst);
2723 ipa_set_jf_simple_pass_through (dst, formal_id,
2724 agg_p, type_p);
2726 else
2728 tree operand = ipa_get_jf_pass_through_operand (src);
2729 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2730 operation);
2732 break;
2734 case IPA_JF_ANCESTOR:
2736 bool agg_p, type_p;
2737 agg_p = dst_agg_p
2738 && ipa_get_jf_ancestor_agg_preserved (src);
2739 type_p = ipa_get_jf_ancestor_type_preserved (src)
2740 && ipa_get_jf_pass_through_type_preserved (dst);
2741 ipa_set_ancestor_jf (dst,
2742 ipa_get_jf_ancestor_offset (src),
2743 ipa_get_jf_ancestor_type (src),
2744 ipa_get_jf_ancestor_formal_id (src),
2745 agg_p, type_p);
2746 break;
2748 default:
2749 gcc_unreachable ();
2752 if (src->agg.items
2753 && (dst_agg_p || !src->agg.by_ref))
2755 /* Currently we do not produce clobber aggregate jump
2756 functions, replace with merging when we do. */
2757 gcc_assert (!dst->agg.items);
2759 dst->agg.by_ref = src->agg.by_ref;
2760 dst->agg.items = vec_safe_copy (src->agg.items);
2763 else
2764 dst->type = IPA_JF_UNKNOWN;
2769 /* If TARGET is an addr_expr of a function declaration, make it the destination
2770 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2772 struct cgraph_edge *
2773 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2775 struct cgraph_node *callee;
2776 struct inline_edge_summary *es = inline_edge_summary (ie);
2777 bool unreachable = false;
2779 if (TREE_CODE (target) == ADDR_EXPR)
2780 target = TREE_OPERAND (target, 0);
2781 if (TREE_CODE (target) != FUNCTION_DECL)
2783 target = canonicalize_constructor_val (target, NULL);
2784 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2786 if (ie->indirect_info->member_ptr)
2787 /* Member pointer call that goes through a VMT lookup. */
2788 return NULL;
2790 if (dump_enabled_p ())
2792 location_t loc = gimple_location_safe (ie->call_stmt);
2793 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2794 "discovered direct call to non-function in %s/%i, "
2795 "making it __builtin_unreachable\n",
2796 ie->caller->name (), ie->caller->order);
2799 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2800 callee = cgraph_get_create_node (target);
2801 unreachable = true;
2803 else
2804 callee = cgraph_get_node (target);
2806 else
2807 callee = cgraph_get_node (target);
2809 /* Because may-edges are not explicitely represented and vtable may be external,
2810 we may create the first reference to the object in the unit. */
2811 if (!callee || callee->global.inlined_to)
2814 /* We are better to ensure we can refer to it.
2815 In the case of static functions we are out of luck, since we already
2816 removed its body. In the case of public functions we may or may
2817 not introduce the reference. */
2818 if (!canonicalize_constructor_val (target, NULL)
2819 || !TREE_PUBLIC (target))
2821 if (dump_file)
2822 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2823 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2824 xstrdup (ie->caller->name ()),
2825 ie->caller->order,
2826 xstrdup (ie->callee->name ()),
2827 ie->callee->order);
2828 return NULL;
2830 callee = cgraph_get_create_node (target);
2833 if (!dbg_cnt (devirt))
2834 return NULL;
2836 ipa_check_create_node_params ();
2838 /* We can not make edges to inline clones. It is bug that someone removed
2839 the cgraph node too early. */
2840 gcc_assert (!callee->global.inlined_to);
2842 if (dump_file && !unreachable)
2844 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2845 "(%s/%i -> %s/%i), for stmt ",
2846 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2847 xstrdup (ie->caller->name ()),
2848 ie->caller->order,
2849 xstrdup (callee->name ()),
2850 callee->order);
2851 if (ie->call_stmt)
2852 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2853 else
2854 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2856 if (dump_enabled_p ())
2858 location_t loc = gimple_location_safe (ie->call_stmt);
2860 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2861 "converting indirect call in %s to direct call to %s\n",
2862 ie->caller->name (), callee->name ());
2864 ie = cgraph_make_edge_direct (ie, callee);
2865 es = inline_edge_summary (ie);
2866 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2867 - eni_size_weights.call_cost);
2868 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2869 - eni_time_weights.call_cost);
2871 return ie;
2874 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2875 return NULL if there is not any. BY_REF specifies whether the value has to
2876 be passed by reference or by value. */
2878 tree
2879 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2880 HOST_WIDE_INT offset, bool by_ref)
2882 struct ipa_agg_jf_item *item;
2883 int i;
2885 if (by_ref != agg->by_ref)
2886 return NULL;
2888 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2889 if (item->offset == offset)
2891 /* Currently we do not have clobber values, return NULL for them once
2892 we do. */
2893 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2894 return item->value;
2896 return NULL;
2899 /* Remove a reference to SYMBOL from the list of references of a node given by
2900 reference description RDESC. Return true if the reference has been
2901 successfully found and removed. */
2903 static bool
2904 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2906 struct ipa_ref *to_del;
2907 struct cgraph_edge *origin;
2909 origin = rdesc->cs;
2910 if (!origin)
2911 return false;
2912 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2913 origin->lto_stmt_uid);
2914 if (!to_del)
2915 return false;
2917 to_del->remove_reference ();
2918 if (dump_file)
2919 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2920 xstrdup (origin->caller->name ()),
2921 origin->caller->order, xstrdup (symbol->name ()));
2922 return true;
2925 /* If JFUNC has a reference description with refcount different from
2926 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2927 NULL. JFUNC must be a constant jump function. */
2929 static struct ipa_cst_ref_desc *
2930 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2932 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2933 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2934 return rdesc;
2935 else
2936 return NULL;
2939 /* If the value of constant jump function JFUNC is an address of a function
2940 declaration, return the associated call graph node. Otherwise return
2941 NULL. */
2943 static cgraph_node *
2944 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2946 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2947 tree cst = ipa_get_jf_constant (jfunc);
2948 if (TREE_CODE (cst) != ADDR_EXPR
2949 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2950 return NULL;
2952 return cgraph_get_node (TREE_OPERAND (cst, 0));
2956 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2957 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2958 the edge specified in the rdesc. Return false if either the symbol or the
2959 reference could not be found, otherwise return true. */
2961 static bool
2962 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2964 struct ipa_cst_ref_desc *rdesc;
2965 if (jfunc->type == IPA_JF_CONST
2966 && (rdesc = jfunc_rdesc_usable (jfunc))
2967 && --rdesc->refcount == 0)
2969 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2970 if (!symbol)
2971 return false;
2973 return remove_described_reference (symbol, rdesc);
2975 return true;
2978 /* Try to find a destination for indirect edge IE that corresponds to a simple
2979 call or a call of a member function pointer and where the destination is a
2980 pointer formal parameter described by jump function JFUNC. If it can be
2981 determined, return the newly direct edge, otherwise return NULL.
2982 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2984 static struct cgraph_edge *
2985 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2986 struct ipa_jump_func *jfunc,
2987 struct ipa_node_params *new_root_info)
2989 struct cgraph_edge *cs;
2990 tree target;
2991 bool agg_contents = ie->indirect_info->agg_contents;
2993 if (ie->indirect_info->agg_contents)
2994 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2995 ie->indirect_info->offset,
2996 ie->indirect_info->by_ref);
2997 else
2998 target = ipa_value_from_jfunc (new_root_info, jfunc);
2999 if (!target)
3000 return NULL;
3001 cs = ipa_make_edge_direct_to_target (ie, target);
3003 if (cs && !agg_contents)
3005 bool ok;
3006 gcc_checking_assert (cs->callee
3007 && (cs != ie
3008 || jfunc->type != IPA_JF_CONST
3009 || !cgraph_node_for_jfunc (jfunc)
3010 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3011 ok = try_decrement_rdesc_refcount (jfunc);
3012 gcc_checking_assert (ok);
3015 return cs;
3018 /* Return the target to be used in cases of impossible devirtualization. IE
3019 and target (the latter can be NULL) are dumped when dumping is enabled. */
3021 tree
3022 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3024 if (dump_file)
3026 if (target)
3027 fprintf (dump_file,
3028 "Type inconsistent devirtualization: %s/%i->%s\n",
3029 ie->caller->name (), ie->caller->order,
3030 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3031 else
3032 fprintf (dump_file,
3033 "No devirtualization target in %s/%i\n",
3034 ie->caller->name (), ie->caller->order);
3036 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3037 cgraph_get_create_node (new_target);
3038 return new_target;
3041 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3042 call based on a formal parameter which is described by jump function JFUNC
3043 and if it can be determined, make it direct and return the direct edge.
3044 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3045 are relative to. */
3047 static struct cgraph_edge *
3048 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3049 struct ipa_jump_func *jfunc,
3050 struct ipa_node_params *new_root_info)
3052 tree binfo, target;
3054 if (!flag_devirtualize)
3055 return NULL;
3057 /* First try to do lookup via known virtual table pointer value. */
3058 if (!ie->indirect_info->by_ref)
3060 tree vtable;
3061 unsigned HOST_WIDE_INT offset;
3062 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3063 ie->indirect_info->offset,
3064 true);
3065 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3067 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3068 vtable, offset);
3069 if (target)
3071 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
3072 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
3073 || !possible_polymorphic_call_target_p
3074 (ie, cgraph_get_node (target)))
3075 target = ipa_impossible_devirt_target (ie, target);
3076 return ipa_make_edge_direct_to_target (ie, target);
3081 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3083 if (!binfo)
3084 return NULL;
3086 if (TREE_CODE (binfo) != TREE_BINFO)
3088 ipa_polymorphic_call_context context;
3089 vec <cgraph_node *>targets;
3090 bool final;
3092 if (!get_polymorphic_call_info_from_invariant
3093 (&context, binfo, ie->indirect_info->otr_type,
3094 ie->indirect_info->offset))
3095 return NULL;
3096 targets = possible_polymorphic_call_targets
3097 (ie->indirect_info->otr_type,
3098 ie->indirect_info->otr_token,
3099 context, &final);
3100 if (!final || targets.length () > 1)
3101 return NULL;
3102 if (targets.length () == 1)
3103 target = targets[0]->decl;
3104 else
3105 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3107 else
3109 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3110 ie->indirect_info->otr_type);
3111 if (binfo)
3112 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3113 binfo);
3114 else
3115 return NULL;
3118 if (target)
3120 if (!possible_polymorphic_call_target_p (ie, cgraph_get_node (target)))
3121 target = ipa_impossible_devirt_target (ie, target);
3122 return ipa_make_edge_direct_to_target (ie, target);
3124 else
3125 return NULL;
3128 /* Update the param called notes associated with NODE when CS is being inlined,
3129 assuming NODE is (potentially indirectly) inlined into CS->callee.
3130 Moreover, if the callee is discovered to be constant, create a new cgraph
3131 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3132 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3134 static bool
3135 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3136 struct cgraph_node *node,
3137 vec<cgraph_edge_p> *new_edges)
3139 struct ipa_edge_args *top;
3140 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3141 struct ipa_node_params *new_root_info;
3142 bool res = false;
3144 ipa_check_create_edge_args ();
3145 top = IPA_EDGE_REF (cs);
3146 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3147 ? cs->caller->global.inlined_to
3148 : cs->caller);
3150 for (ie = node->indirect_calls; ie; ie = next_ie)
3152 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3153 struct ipa_jump_func *jfunc;
3154 int param_index;
3156 next_ie = ie->next_callee;
3158 if (ici->param_index == -1)
3159 continue;
3161 /* We must check range due to calls with variable number of arguments: */
3162 if (ici->param_index >= ipa_get_cs_argument_count (top))
3164 ici->param_index = -1;
3165 continue;
3168 param_index = ici->param_index;
3169 jfunc = ipa_get_ith_jump_func (top, param_index);
3171 if (!flag_indirect_inlining)
3172 new_direct_edge = NULL;
3173 else if (ici->polymorphic)
3174 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3175 new_root_info);
3176 else
3177 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3178 new_root_info);
3179 /* If speculation was removed, then we need to do nothing. */
3180 if (new_direct_edge && new_direct_edge != ie)
3182 new_direct_edge->indirect_inlining_edge = 1;
3183 top = IPA_EDGE_REF (cs);
3184 res = true;
3186 else if (new_direct_edge)
3188 new_direct_edge->indirect_inlining_edge = 1;
3189 if (new_direct_edge->call_stmt)
3190 new_direct_edge->call_stmt_cannot_inline_p
3191 = !gimple_check_call_matching_types (
3192 new_direct_edge->call_stmt,
3193 new_direct_edge->callee->decl, false);
3194 if (new_edges)
3196 new_edges->safe_push (new_direct_edge);
3197 res = true;
3199 top = IPA_EDGE_REF (cs);
3201 else if (jfunc->type == IPA_JF_PASS_THROUGH
3202 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3204 if ((ici->agg_contents
3205 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3206 || (ici->polymorphic
3207 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3208 ici->param_index = -1;
3209 else
3210 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3212 else if (jfunc->type == IPA_JF_ANCESTOR)
3214 if ((ici->agg_contents
3215 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3216 || (ici->polymorphic
3217 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3218 ici->param_index = -1;
3219 else
3221 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3222 if (ipa_get_jf_ancestor_offset (jfunc))
3223 ici->outer_type = NULL;
3224 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3227 else
3228 /* Either we can find a destination for this edge now or never. */
3229 ici->param_index = -1;
3232 return res;
3235 /* Recursively traverse subtree of NODE (including node) made of inlined
3236 cgraph_edges when CS has been inlined and invoke
3237 update_indirect_edges_after_inlining on all nodes and
3238 update_jump_functions_after_inlining on all non-inlined edges that lead out
3239 of this subtree. Newly discovered indirect edges will be added to
3240 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3241 created. */
3243 static bool
3244 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3245 struct cgraph_node *node,
3246 vec<cgraph_edge_p> *new_edges)
3248 struct cgraph_edge *e;
3249 bool res;
3251 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3253 for (e = node->callees; e; e = e->next_callee)
3254 if (!e->inline_failed)
3255 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3256 else
3257 update_jump_functions_after_inlining (cs, e);
3258 for (e = node->indirect_calls; e; e = e->next_callee)
3259 update_jump_functions_after_inlining (cs, e);
3261 return res;
3264 /* Combine two controlled uses counts as done during inlining. */
3266 static int
3267 combine_controlled_uses_counters (int c, int d)
3269 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3270 return IPA_UNDESCRIBED_USE;
3271 else
3272 return c + d - 1;
3275 /* Propagate number of controlled users from CS->caleee to the new root of the
3276 tree of inlined nodes. */
3278 static void
3279 propagate_controlled_uses (struct cgraph_edge *cs)
3281 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3282 struct cgraph_node *new_root = cs->caller->global.inlined_to
3283 ? cs->caller->global.inlined_to : cs->caller;
3284 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3285 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3286 int count, i;
3288 count = MIN (ipa_get_cs_argument_count (args),
3289 ipa_get_param_count (old_root_info));
3290 for (i = 0; i < count; i++)
3292 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3293 struct ipa_cst_ref_desc *rdesc;
3295 if (jf->type == IPA_JF_PASS_THROUGH)
3297 int src_idx, c, d;
3298 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3299 c = ipa_get_controlled_uses (new_root_info, src_idx);
3300 d = ipa_get_controlled_uses (old_root_info, i);
3302 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3303 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3304 c = combine_controlled_uses_counters (c, d);
3305 ipa_set_controlled_uses (new_root_info, src_idx, c);
3306 if (c == 0 && new_root_info->ipcp_orig_node)
3308 struct cgraph_node *n;
3309 struct ipa_ref *ref;
3310 tree t = new_root_info->known_vals[src_idx];
3312 if (t && TREE_CODE (t) == ADDR_EXPR
3313 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3314 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
3315 && (ref = new_root->find_reference (n, NULL, 0)))
3317 if (dump_file)
3318 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3319 "reference from %s/%i to %s/%i.\n",
3320 xstrdup (new_root->name ()),
3321 new_root->order,
3322 xstrdup (n->name ()), n->order);
3323 ref->remove_reference ();
3327 else if (jf->type == IPA_JF_CONST
3328 && (rdesc = jfunc_rdesc_usable (jf)))
3330 int d = ipa_get_controlled_uses (old_root_info, i);
3331 int c = rdesc->refcount;
3332 rdesc->refcount = combine_controlled_uses_counters (c, d);
3333 if (rdesc->refcount == 0)
3335 tree cst = ipa_get_jf_constant (jf);
3336 struct cgraph_node *n;
3337 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3338 && TREE_CODE (TREE_OPERAND (cst, 0))
3339 == FUNCTION_DECL);
3340 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3341 if (n)
3343 struct cgraph_node *clone;
3344 bool ok;
3345 ok = remove_described_reference (n, rdesc);
3346 gcc_checking_assert (ok);
3348 clone = cs->caller;
3349 while (clone->global.inlined_to
3350 && clone != rdesc->cs->caller
3351 && IPA_NODE_REF (clone)->ipcp_orig_node)
3353 struct ipa_ref *ref;
3354 ref = clone->find_reference (n, NULL, 0);
3355 if (ref)
3357 if (dump_file)
3358 fprintf (dump_file, "ipa-prop: Removing "
3359 "cloning-created reference "
3360 "from %s/%i to %s/%i.\n",
3361 xstrdup (clone->name ()),
3362 clone->order,
3363 xstrdup (n->name ()),
3364 n->order);
3365 ref->remove_reference ();
3367 clone = clone->callers->caller;
3374 for (i = ipa_get_param_count (old_root_info);
3375 i < ipa_get_cs_argument_count (args);
3376 i++)
3378 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3380 if (jf->type == IPA_JF_CONST)
3382 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3383 if (rdesc)
3384 rdesc->refcount = IPA_UNDESCRIBED_USE;
3386 else if (jf->type == IPA_JF_PASS_THROUGH)
3387 ipa_set_controlled_uses (new_root_info,
3388 jf->value.pass_through.formal_id,
3389 IPA_UNDESCRIBED_USE);
3393 /* Update jump functions and call note functions on inlining the call site CS.
3394 CS is expected to lead to a node already cloned by
3395 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3396 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3397 created. */
3399 bool
3400 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3401 vec<cgraph_edge_p> *new_edges)
3403 bool changed;
3404 /* Do nothing if the preparation phase has not been carried out yet
3405 (i.e. during early inlining). */
3406 if (!ipa_node_params_vector.exists ())
3407 return false;
3408 gcc_assert (ipa_edge_args_vector);
3410 propagate_controlled_uses (cs);
3411 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3413 return changed;
3416 /* Frees all dynamically allocated structures that the argument info points
3417 to. */
3419 void
3420 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3422 vec_free (args->jump_functions);
3423 memset (args, 0, sizeof (*args));
3426 /* Free all ipa_edge structures. */
3428 void
3429 ipa_free_all_edge_args (void)
3431 int i;
3432 struct ipa_edge_args *args;
3434 if (!ipa_edge_args_vector)
3435 return;
3437 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3438 ipa_free_edge_args_substructures (args);
3440 vec_free (ipa_edge_args_vector);
3443 /* Frees all dynamically allocated structures that the param info points
3444 to. */
3446 void
3447 ipa_free_node_params_substructures (struct ipa_node_params *info)
3449 info->descriptors.release ();
3450 free (info->lattices);
3451 /* Lattice values and their sources are deallocated with their alocation
3452 pool. */
3453 info->known_vals.release ();
3454 memset (info, 0, sizeof (*info));
3457 /* Free all ipa_node_params structures. */
3459 void
3460 ipa_free_all_node_params (void)
3462 int i;
3463 struct ipa_node_params *info;
3465 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3466 ipa_free_node_params_substructures (info);
3468 ipa_node_params_vector.release ();
3471 /* Set the aggregate replacements of NODE to be AGGVALS. */
3473 void
3474 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3475 struct ipa_agg_replacement_value *aggvals)
3477 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3478 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3480 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3483 /* Hook that is called by cgraph.c when an edge is removed. */
3485 static void
3486 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3488 struct ipa_edge_args *args;
3490 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3491 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3492 return;
3494 args = IPA_EDGE_REF (cs);
3495 if (args->jump_functions)
3497 struct ipa_jump_func *jf;
3498 int i;
3499 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3501 struct ipa_cst_ref_desc *rdesc;
3502 try_decrement_rdesc_refcount (jf);
3503 if (jf->type == IPA_JF_CONST
3504 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3505 && rdesc->cs == cs)
3506 rdesc->cs = NULL;
3510 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3513 /* Hook that is called by cgraph.c when a node is removed. */
3515 static void
3516 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3518 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3519 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3520 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3521 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3522 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3525 /* Hook that is called by cgraph.c when an edge is duplicated. */
3527 static void
3528 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3529 __attribute__((unused)) void *data)
3531 struct ipa_edge_args *old_args, *new_args;
3532 unsigned int i;
3534 ipa_check_create_edge_args ();
3536 old_args = IPA_EDGE_REF (src);
3537 new_args = IPA_EDGE_REF (dst);
3539 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3541 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3543 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3544 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3546 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3548 if (src_jf->type == IPA_JF_CONST)
3550 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3552 if (!src_rdesc)
3553 dst_jf->value.constant.rdesc = NULL;
3554 else if (src->caller == dst->caller)
3556 struct ipa_ref *ref;
3557 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3558 gcc_checking_assert (n);
3559 ref = src->caller->find_reference (n, src->call_stmt,
3560 src->lto_stmt_uid);
3561 gcc_checking_assert (ref);
3562 dst->caller->clone_reference (ref, ref->stmt);
3564 gcc_checking_assert (ipa_refdesc_pool);
3565 struct ipa_cst_ref_desc *dst_rdesc
3566 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3567 dst_rdesc->cs = dst;
3568 dst_rdesc->refcount = src_rdesc->refcount;
3569 dst_rdesc->next_duplicate = NULL;
3570 dst_jf->value.constant.rdesc = dst_rdesc;
3572 else if (src_rdesc->cs == src)
3574 struct ipa_cst_ref_desc *dst_rdesc;
3575 gcc_checking_assert (ipa_refdesc_pool);
3576 dst_rdesc
3577 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3578 dst_rdesc->cs = dst;
3579 dst_rdesc->refcount = src_rdesc->refcount;
3580 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3581 src_rdesc->next_duplicate = dst_rdesc;
3582 dst_jf->value.constant.rdesc = dst_rdesc;
3584 else
3586 struct ipa_cst_ref_desc *dst_rdesc;
3587 /* This can happen during inlining, when a JFUNC can refer to a
3588 reference taken in a function up in the tree of inline clones.
3589 We need to find the duplicate that refers to our tree of
3590 inline clones. */
3592 gcc_assert (dst->caller->global.inlined_to);
3593 for (dst_rdesc = src_rdesc->next_duplicate;
3594 dst_rdesc;
3595 dst_rdesc = dst_rdesc->next_duplicate)
3597 struct cgraph_node *top;
3598 top = dst_rdesc->cs->caller->global.inlined_to
3599 ? dst_rdesc->cs->caller->global.inlined_to
3600 : dst_rdesc->cs->caller;
3601 if (dst->caller->global.inlined_to == top)
3602 break;
3604 gcc_assert (dst_rdesc);
3605 dst_jf->value.constant.rdesc = dst_rdesc;
3611 /* Hook that is called by cgraph.c when a node is duplicated. */
3613 static void
3614 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3615 ATTRIBUTE_UNUSED void *data)
3617 struct ipa_node_params *old_info, *new_info;
3618 struct ipa_agg_replacement_value *old_av, *new_av;
3620 ipa_check_create_node_params ();
3621 old_info = IPA_NODE_REF (src);
3622 new_info = IPA_NODE_REF (dst);
3624 new_info->descriptors = old_info->descriptors.copy ();
3625 new_info->lattices = NULL;
3626 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3628 new_info->analysis_done = old_info->analysis_done;
3629 new_info->node_enqueued = old_info->node_enqueued;
3631 old_av = ipa_get_agg_replacements_for_node (src);
3632 if (!old_av)
3633 return;
3635 new_av = NULL;
3636 while (old_av)
3638 struct ipa_agg_replacement_value *v;
3640 v = ggc_alloc<ipa_agg_replacement_value> ();
3641 memcpy (v, old_av, sizeof (*v));
3642 v->next = new_av;
3643 new_av = v;
3644 old_av = old_av->next;
3646 ipa_set_node_agg_value_chain (dst, new_av);
3650 /* Analyze newly added function into callgraph. */
3652 static void
3653 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3655 if (cgraph_function_with_gimple_body_p (node))
3656 ipa_analyze_node (node);
3659 /* Register our cgraph hooks if they are not already there. */
3661 void
3662 ipa_register_cgraph_hooks (void)
3664 if (!edge_removal_hook_holder)
3665 edge_removal_hook_holder =
3666 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3667 if (!node_removal_hook_holder)
3668 node_removal_hook_holder =
3669 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3670 if (!edge_duplication_hook_holder)
3671 edge_duplication_hook_holder =
3672 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3673 if (!node_duplication_hook_holder)
3674 node_duplication_hook_holder =
3675 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3676 function_insertion_hook_holder =
3677 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3680 /* Unregister our cgraph hooks if they are not already there. */
3682 static void
3683 ipa_unregister_cgraph_hooks (void)
3685 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3686 edge_removal_hook_holder = NULL;
3687 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3688 node_removal_hook_holder = NULL;
3689 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3690 edge_duplication_hook_holder = NULL;
3691 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3692 node_duplication_hook_holder = NULL;
3693 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3694 function_insertion_hook_holder = NULL;
3697 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3698 longer needed after ipa-cp. */
3700 void
3701 ipa_free_all_structures_after_ipa_cp (void)
3703 if (!optimize)
3705 ipa_free_all_edge_args ();
3706 ipa_free_all_node_params ();
3707 free_alloc_pool (ipcp_sources_pool);
3708 free_alloc_pool (ipcp_values_pool);
3709 free_alloc_pool (ipcp_agg_lattice_pool);
3710 ipa_unregister_cgraph_hooks ();
3711 if (ipa_refdesc_pool)
3712 free_alloc_pool (ipa_refdesc_pool);
3716 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3717 longer needed after indirect inlining. */
3719 void
3720 ipa_free_all_structures_after_iinln (void)
3722 ipa_free_all_edge_args ();
3723 ipa_free_all_node_params ();
3724 ipa_unregister_cgraph_hooks ();
3725 if (ipcp_sources_pool)
3726 free_alloc_pool (ipcp_sources_pool);
3727 if (ipcp_values_pool)
3728 free_alloc_pool (ipcp_values_pool);
3729 if (ipcp_agg_lattice_pool)
3730 free_alloc_pool (ipcp_agg_lattice_pool);
3731 if (ipa_refdesc_pool)
3732 free_alloc_pool (ipa_refdesc_pool);
3735 /* Print ipa_tree_map data structures of all functions in the
3736 callgraph to F. */
3738 void
3739 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3741 int i, count;
3742 struct ipa_node_params *info;
3744 if (!node->definition)
3745 return;
3746 info = IPA_NODE_REF (node);
3747 fprintf (f, " function %s/%i parameter descriptors:\n",
3748 node->name (), node->order);
3749 count = ipa_get_param_count (info);
3750 for (i = 0; i < count; i++)
3752 int c;
3754 fprintf (f, " ");
3755 ipa_dump_param (f, info, i);
3756 if (ipa_is_param_used (info, i))
3757 fprintf (f, " used");
3758 c = ipa_get_controlled_uses (info, i);
3759 if (c == IPA_UNDESCRIBED_USE)
3760 fprintf (f, " undescribed_use");
3761 else
3762 fprintf (f, " controlled_uses=%i", c);
3763 fprintf (f, "\n");
3767 /* Print ipa_tree_map data structures of all functions in the
3768 callgraph to F. */
3770 void
3771 ipa_print_all_params (FILE * f)
3773 struct cgraph_node *node;
3775 fprintf (f, "\nFunction parameters:\n");
3776 FOR_EACH_FUNCTION (node)
3777 ipa_print_node_params (f, node);
3780 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3782 vec<tree>
3783 ipa_get_vector_of_formal_parms (tree fndecl)
3785 vec<tree> args;
3786 int count;
3787 tree parm;
3789 gcc_assert (!flag_wpa);
3790 count = count_formal_params (fndecl);
3791 args.create (count);
3792 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3793 args.quick_push (parm);
3795 return args;
3798 /* Return a heap allocated vector containing types of formal parameters of
3799 function type FNTYPE. */
3801 vec<tree>
3802 ipa_get_vector_of_formal_parm_types (tree fntype)
3804 vec<tree> types;
3805 int count = 0;
3806 tree t;
3808 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3809 count++;
3811 types.create (count);
3812 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3813 types.quick_push (TREE_VALUE (t));
3815 return types;
3818 /* Modify the function declaration FNDECL and its type according to the plan in
3819 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3820 to reflect the actual parameters being modified which are determined by the
3821 base_index field. */
3823 void
3824 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3826 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3827 tree orig_type = TREE_TYPE (fndecl);
3828 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3830 /* The following test is an ugly hack, some functions simply don't have any
3831 arguments in their type. This is probably a bug but well... */
3832 bool care_for_types = (old_arg_types != NULL_TREE);
3833 bool last_parm_void;
3834 vec<tree> otypes;
3835 if (care_for_types)
3837 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3838 == void_type_node);
3839 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3840 if (last_parm_void)
3841 gcc_assert (oparms.length () + 1 == otypes.length ());
3842 else
3843 gcc_assert (oparms.length () == otypes.length ());
3845 else
3847 last_parm_void = false;
3848 otypes.create (0);
3851 int len = adjustments.length ();
3852 tree *link = &DECL_ARGUMENTS (fndecl);
3853 tree new_arg_types = NULL;
3854 for (int i = 0; i < len; i++)
3856 struct ipa_parm_adjustment *adj;
3857 gcc_assert (link);
3859 adj = &adjustments[i];
3860 tree parm;
3861 if (adj->op == IPA_PARM_OP_NEW)
3862 parm = NULL;
3863 else
3864 parm = oparms[adj->base_index];
3865 adj->base = parm;
3867 if (adj->op == IPA_PARM_OP_COPY)
3869 if (care_for_types)
3870 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3871 new_arg_types);
3872 *link = parm;
3873 link = &DECL_CHAIN (parm);
3875 else if (adj->op != IPA_PARM_OP_REMOVE)
3877 tree new_parm;
3878 tree ptype;
3880 if (adj->by_ref)
3881 ptype = build_pointer_type (adj->type);
3882 else
3884 ptype = adj->type;
3885 if (is_gimple_reg_type (ptype))
3887 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3888 if (TYPE_ALIGN (ptype) < malign)
3889 ptype = build_aligned_type (ptype, malign);
3893 if (care_for_types)
3894 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3896 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3897 ptype);
3898 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3899 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3900 DECL_ARTIFICIAL (new_parm) = 1;
3901 DECL_ARG_TYPE (new_parm) = ptype;
3902 DECL_CONTEXT (new_parm) = fndecl;
3903 TREE_USED (new_parm) = 1;
3904 DECL_IGNORED_P (new_parm) = 1;
3905 layout_decl (new_parm, 0);
3907 if (adj->op == IPA_PARM_OP_NEW)
3908 adj->base = NULL;
3909 else
3910 adj->base = parm;
3911 adj->new_decl = new_parm;
3913 *link = new_parm;
3914 link = &DECL_CHAIN (new_parm);
3918 *link = NULL_TREE;
3920 tree new_reversed = NULL;
3921 if (care_for_types)
3923 new_reversed = nreverse (new_arg_types);
3924 if (last_parm_void)
3926 if (new_reversed)
3927 TREE_CHAIN (new_arg_types) = void_list_node;
3928 else
3929 new_reversed = void_list_node;
3933 /* Use copy_node to preserve as much as possible from original type
3934 (debug info, attribute lists etc.)
3935 Exception is METHOD_TYPEs must have THIS argument.
3936 When we are asked to remove it, we need to build new FUNCTION_TYPE
3937 instead. */
3938 tree new_type = NULL;
3939 if (TREE_CODE (orig_type) != METHOD_TYPE
3940 || (adjustments[0].op == IPA_PARM_OP_COPY
3941 && adjustments[0].base_index == 0))
3943 new_type = build_distinct_type_copy (orig_type);
3944 TYPE_ARG_TYPES (new_type) = new_reversed;
3946 else
3948 new_type
3949 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3950 new_reversed));
3951 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3952 DECL_VINDEX (fndecl) = NULL_TREE;
3955 /* When signature changes, we need to clear builtin info. */
3956 if (DECL_BUILT_IN (fndecl))
3958 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3959 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3962 /* This is a new type, not a copy of an old type. Need to reassociate
3963 variants. We can handle everything except the main variant lazily. */
3964 tree t = TYPE_MAIN_VARIANT (orig_type);
3965 if (orig_type != t)
3967 TYPE_MAIN_VARIANT (new_type) = t;
3968 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3969 TYPE_NEXT_VARIANT (t) = new_type;
3971 else
3973 TYPE_MAIN_VARIANT (new_type) = new_type;
3974 TYPE_NEXT_VARIANT (new_type) = NULL;
3977 TREE_TYPE (fndecl) = new_type;
3978 DECL_VIRTUAL_P (fndecl) = 0;
3979 DECL_LANG_SPECIFIC (fndecl) = NULL;
3980 otypes.release ();
3981 oparms.release ();
3984 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3985 If this is a directly recursive call, CS must be NULL. Otherwise it must
3986 contain the corresponding call graph edge. */
3988 void
3989 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3990 ipa_parm_adjustment_vec adjustments)
3992 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3993 vec<tree> vargs;
3994 vec<tree, va_gc> **debug_args = NULL;
3995 gimple new_stmt;
3996 gimple_stmt_iterator gsi, prev_gsi;
3997 tree callee_decl;
3998 int i, len;
4000 len = adjustments.length ();
4001 vargs.create (len);
4002 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4003 current_node->remove_stmt_references (stmt);
4005 gsi = gsi_for_stmt (stmt);
4006 prev_gsi = gsi;
4007 gsi_prev (&prev_gsi);
4008 for (i = 0; i < len; i++)
4010 struct ipa_parm_adjustment *adj;
4012 adj = &adjustments[i];
4014 if (adj->op == IPA_PARM_OP_COPY)
4016 tree arg = gimple_call_arg (stmt, adj->base_index);
4018 vargs.quick_push (arg);
4020 else if (adj->op != IPA_PARM_OP_REMOVE)
4022 tree expr, base, off;
4023 location_t loc;
4024 unsigned int deref_align = 0;
4025 bool deref_base = false;
4027 /* We create a new parameter out of the value of the old one, we can
4028 do the following kind of transformations:
4030 - A scalar passed by reference is converted to a scalar passed by
4031 value. (adj->by_ref is false and the type of the original
4032 actual argument is a pointer to a scalar).
4034 - A part of an aggregate is passed instead of the whole aggregate.
4035 The part can be passed either by value or by reference, this is
4036 determined by value of adj->by_ref. Moreover, the code below
4037 handles both situations when the original aggregate is passed by
4038 value (its type is not a pointer) and when it is passed by
4039 reference (it is a pointer to an aggregate).
4041 When the new argument is passed by reference (adj->by_ref is true)
4042 it must be a part of an aggregate and therefore we form it by
4043 simply taking the address of a reference inside the original
4044 aggregate. */
4046 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4047 base = gimple_call_arg (stmt, adj->base_index);
4048 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4049 : EXPR_LOCATION (base);
4051 if (TREE_CODE (base) != ADDR_EXPR
4052 && POINTER_TYPE_P (TREE_TYPE (base)))
4053 off = build_int_cst (adj->alias_ptr_type,
4054 adj->offset / BITS_PER_UNIT);
4055 else
4057 HOST_WIDE_INT base_offset;
4058 tree prev_base;
4059 bool addrof;
4061 if (TREE_CODE (base) == ADDR_EXPR)
4063 base = TREE_OPERAND (base, 0);
4064 addrof = true;
4066 else
4067 addrof = false;
4068 prev_base = base;
4069 base = get_addr_base_and_unit_offset (base, &base_offset);
4070 /* Aggregate arguments can have non-invariant addresses. */
4071 if (!base)
4073 base = build_fold_addr_expr (prev_base);
4074 off = build_int_cst (adj->alias_ptr_type,
4075 adj->offset / BITS_PER_UNIT);
4077 else if (TREE_CODE (base) == MEM_REF)
4079 if (!addrof)
4081 deref_base = true;
4082 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4084 off = build_int_cst (adj->alias_ptr_type,
4085 base_offset
4086 + adj->offset / BITS_PER_UNIT);
4087 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4088 off);
4089 base = TREE_OPERAND (base, 0);
4091 else
4093 off = build_int_cst (adj->alias_ptr_type,
4094 base_offset
4095 + adj->offset / BITS_PER_UNIT);
4096 base = build_fold_addr_expr (base);
4100 if (!adj->by_ref)
4102 tree type = adj->type;
4103 unsigned int align;
4104 unsigned HOST_WIDE_INT misalign;
4106 if (deref_base)
4108 align = deref_align;
4109 misalign = 0;
4111 else
4113 get_pointer_alignment_1 (base, &align, &misalign);
4114 if (TYPE_ALIGN (type) > align)
4115 align = TYPE_ALIGN (type);
4117 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4118 * BITS_PER_UNIT);
4119 misalign = misalign & (align - 1);
4120 if (misalign != 0)
4121 align = (misalign & -misalign);
4122 if (align < TYPE_ALIGN (type))
4123 type = build_aligned_type (type, align);
4124 base = force_gimple_operand_gsi (&gsi, base,
4125 true, NULL, true, GSI_SAME_STMT);
4126 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4127 /* If expr is not a valid gimple call argument emit
4128 a load into a temporary. */
4129 if (is_gimple_reg_type (TREE_TYPE (expr)))
4131 gimple tem = gimple_build_assign (NULL_TREE, expr);
4132 if (gimple_in_ssa_p (cfun))
4134 gimple_set_vuse (tem, gimple_vuse (stmt));
4135 expr = make_ssa_name (TREE_TYPE (expr), tem);
4137 else
4138 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4139 gimple_assign_set_lhs (tem, expr);
4140 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4143 else
4145 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4146 expr = build_fold_addr_expr (expr);
4147 expr = force_gimple_operand_gsi (&gsi, expr,
4148 true, NULL, true, GSI_SAME_STMT);
4150 vargs.quick_push (expr);
4152 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4154 unsigned int ix;
4155 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4156 gimple def_temp;
4158 arg = gimple_call_arg (stmt, adj->base_index);
4159 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4161 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4162 continue;
4163 arg = fold_convert_loc (gimple_location (stmt),
4164 TREE_TYPE (origin), arg);
4166 if (debug_args == NULL)
4167 debug_args = decl_debug_args_insert (callee_decl);
4168 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4169 if (ddecl == origin)
4171 ddecl = (**debug_args)[ix + 1];
4172 break;
4174 if (ddecl == NULL)
4176 ddecl = make_node (DEBUG_EXPR_DECL);
4177 DECL_ARTIFICIAL (ddecl) = 1;
4178 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4179 DECL_MODE (ddecl) = DECL_MODE (origin);
4181 vec_safe_push (*debug_args, origin);
4182 vec_safe_push (*debug_args, ddecl);
4184 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4185 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4189 if (dump_file && (dump_flags & TDF_DETAILS))
4191 fprintf (dump_file, "replacing stmt:");
4192 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4195 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4196 vargs.release ();
4197 if (gimple_call_lhs (stmt))
4198 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4200 gimple_set_block (new_stmt, gimple_block (stmt));
4201 if (gimple_has_location (stmt))
4202 gimple_set_location (new_stmt, gimple_location (stmt));
4203 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4204 gimple_call_copy_flags (new_stmt, stmt);
4205 if (gimple_in_ssa_p (cfun))
4207 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4208 if (gimple_vdef (stmt))
4210 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4211 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4215 if (dump_file && (dump_flags & TDF_DETAILS))
4217 fprintf (dump_file, "with stmt:");
4218 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4219 fprintf (dump_file, "\n");
4221 gsi_replace (&gsi, new_stmt, true);
4222 if (cs)
4223 cgraph_set_call_stmt (cs, new_stmt);
4226 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
4227 gsi_prev (&gsi);
4229 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4232 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4233 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4234 specifies whether the function should care about type incompatibility the
4235 current and new expressions. If it is false, the function will leave
4236 incompatibility issues to the caller. Return true iff the expression
4237 was modified. */
4239 bool
4240 ipa_modify_expr (tree *expr, bool convert,
4241 ipa_parm_adjustment_vec adjustments)
4243 struct ipa_parm_adjustment *cand
4244 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4245 if (!cand)
4246 return false;
4248 tree src;
4249 if (cand->by_ref)
4250 src = build_simple_mem_ref (cand->new_decl);
4251 else
4252 src = cand->new_decl;
4254 if (dump_file && (dump_flags & TDF_DETAILS))
4256 fprintf (dump_file, "About to replace expr ");
4257 print_generic_expr (dump_file, *expr, 0);
4258 fprintf (dump_file, " with ");
4259 print_generic_expr (dump_file, src, 0);
4260 fprintf (dump_file, "\n");
4263 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4265 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4266 *expr = vce;
4268 else
4269 *expr = src;
4270 return true;
4273 /* If T is an SSA_NAME, return NULL if it is not a default def or
4274 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4275 the base variable is always returned, regardless if it is a default
4276 def. Return T if it is not an SSA_NAME. */
4278 static tree
4279 get_ssa_base_param (tree t, bool ignore_default_def)
4281 if (TREE_CODE (t) == SSA_NAME)
4283 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4284 return SSA_NAME_VAR (t);
4285 else
4286 return NULL_TREE;
4288 return t;
4291 /* Given an expression, return an adjustment entry specifying the
4292 transformation to be done on EXPR. If no suitable adjustment entry
4293 was found, returns NULL.
4295 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4296 default def, otherwise bail on them.
4298 If CONVERT is non-NULL, this function will set *CONVERT if the
4299 expression provided is a component reference. ADJUSTMENTS is the
4300 adjustments vector. */
4302 ipa_parm_adjustment *
4303 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4304 ipa_parm_adjustment_vec adjustments,
4305 bool ignore_default_def)
4307 if (TREE_CODE (**expr) == BIT_FIELD_REF
4308 || TREE_CODE (**expr) == IMAGPART_EXPR
4309 || TREE_CODE (**expr) == REALPART_EXPR)
4311 *expr = &TREE_OPERAND (**expr, 0);
4312 if (convert)
4313 *convert = true;
4316 HOST_WIDE_INT offset, size, max_size;
4317 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4318 if (!base || size == -1 || max_size == -1)
4319 return NULL;
4321 if (TREE_CODE (base) == MEM_REF)
4323 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4324 base = TREE_OPERAND (base, 0);
4327 base = get_ssa_base_param (base, ignore_default_def);
4328 if (!base || TREE_CODE (base) != PARM_DECL)
4329 return NULL;
4331 struct ipa_parm_adjustment *cand = NULL;
4332 unsigned int len = adjustments.length ();
4333 for (unsigned i = 0; i < len; i++)
4335 struct ipa_parm_adjustment *adj = &adjustments[i];
4337 if (adj->base == base
4338 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4340 cand = adj;
4341 break;
4345 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4346 return NULL;
4347 return cand;
4350 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4352 static bool
4353 index_in_adjustments_multiple_times_p (int base_index,
4354 ipa_parm_adjustment_vec adjustments)
4356 int i, len = adjustments.length ();
4357 bool one = false;
4359 for (i = 0; i < len; i++)
4361 struct ipa_parm_adjustment *adj;
4362 adj = &adjustments[i];
4364 if (adj->base_index == base_index)
4366 if (one)
4367 return true;
4368 else
4369 one = true;
4372 return false;
4376 /* Return adjustments that should have the same effect on function parameters
4377 and call arguments as if they were first changed according to adjustments in
4378 INNER and then by adjustments in OUTER. */
4380 ipa_parm_adjustment_vec
4381 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4382 ipa_parm_adjustment_vec outer)
4384 int i, outlen = outer.length ();
4385 int inlen = inner.length ();
4386 int removals = 0;
4387 ipa_parm_adjustment_vec adjustments, tmp;
4389 tmp.create (inlen);
4390 for (i = 0; i < inlen; i++)
4392 struct ipa_parm_adjustment *n;
4393 n = &inner[i];
4395 if (n->op == IPA_PARM_OP_REMOVE)
4396 removals++;
4397 else
4399 /* FIXME: Handling of new arguments are not implemented yet. */
4400 gcc_assert (n->op != IPA_PARM_OP_NEW);
4401 tmp.quick_push (*n);
4405 adjustments.create (outlen + removals);
4406 for (i = 0; i < outlen; i++)
4408 struct ipa_parm_adjustment r;
4409 struct ipa_parm_adjustment *out = &outer[i];
4410 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4412 memset (&r, 0, sizeof (r));
4413 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4414 if (out->op == IPA_PARM_OP_REMOVE)
4416 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4418 r.op = IPA_PARM_OP_REMOVE;
4419 adjustments.quick_push (r);
4421 continue;
4423 else
4425 /* FIXME: Handling of new arguments are not implemented yet. */
4426 gcc_assert (out->op != IPA_PARM_OP_NEW);
4429 r.base_index = in->base_index;
4430 r.type = out->type;
4432 /* FIXME: Create nonlocal value too. */
4434 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4435 r.op = IPA_PARM_OP_COPY;
4436 else if (in->op == IPA_PARM_OP_COPY)
4437 r.offset = out->offset;
4438 else if (out->op == IPA_PARM_OP_COPY)
4439 r.offset = in->offset;
4440 else
4441 r.offset = in->offset + out->offset;
4442 adjustments.quick_push (r);
4445 for (i = 0; i < inlen; i++)
4447 struct ipa_parm_adjustment *n = &inner[i];
4449 if (n->op == IPA_PARM_OP_REMOVE)
4450 adjustments.quick_push (*n);
4453 tmp.release ();
4454 return adjustments;
4457 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4458 friendly way, assuming they are meant to be applied to FNDECL. */
4460 void
4461 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4462 tree fndecl)
4464 int i, len = adjustments.length ();
4465 bool first = true;
4466 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4468 fprintf (file, "IPA param adjustments: ");
4469 for (i = 0; i < len; i++)
4471 struct ipa_parm_adjustment *adj;
4472 adj = &adjustments[i];
4474 if (!first)
4475 fprintf (file, " ");
4476 else
4477 first = false;
4479 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4480 print_generic_expr (file, parms[adj->base_index], 0);
4481 if (adj->base)
4483 fprintf (file, ", base: ");
4484 print_generic_expr (file, adj->base, 0);
4486 if (adj->new_decl)
4488 fprintf (file, ", new_decl: ");
4489 print_generic_expr (file, adj->new_decl, 0);
4491 if (adj->new_ssa_base)
4493 fprintf (file, ", new_ssa_base: ");
4494 print_generic_expr (file, adj->new_ssa_base, 0);
4497 if (adj->op == IPA_PARM_OP_COPY)
4498 fprintf (file, ", copy_param");
4499 else if (adj->op == IPA_PARM_OP_REMOVE)
4500 fprintf (file, ", remove_param");
4501 else
4502 fprintf (file, ", offset %li", (long) adj->offset);
4503 if (adj->by_ref)
4504 fprintf (file, ", by_ref");
4505 print_node_brief (file, ", type: ", adj->type, 0);
4506 fprintf (file, "\n");
4508 parms.release ();
4511 /* Dump the AV linked list. */
4513 void
4514 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4516 bool comma = false;
4517 fprintf (f, " Aggregate replacements:");
4518 for (; av; av = av->next)
4520 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4521 av->index, av->offset);
4522 print_generic_expr (f, av->value, 0);
4523 comma = true;
4525 fprintf (f, "\n");
4528 /* Stream out jump function JUMP_FUNC to OB. */
4530 static void
4531 ipa_write_jump_function (struct output_block *ob,
4532 struct ipa_jump_func *jump_func)
4534 struct ipa_agg_jf_item *item;
4535 struct bitpack_d bp;
4536 int i, count;
4538 streamer_write_uhwi (ob, jump_func->type);
4539 switch (jump_func->type)
4541 case IPA_JF_UNKNOWN:
4542 break;
4543 case IPA_JF_KNOWN_TYPE:
4544 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4545 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4546 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4547 break;
4548 case IPA_JF_CONST:
4549 gcc_assert (
4550 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4551 stream_write_tree (ob, jump_func->value.constant.value, true);
4552 break;
4553 case IPA_JF_PASS_THROUGH:
4554 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4555 if (jump_func->value.pass_through.operation == NOP_EXPR)
4557 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4558 bp = bitpack_create (ob->main_stream);
4559 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4560 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4561 streamer_write_bitpack (&bp);
4563 else
4565 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4566 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4568 break;
4569 case IPA_JF_ANCESTOR:
4570 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4571 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4572 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4573 bp = bitpack_create (ob->main_stream);
4574 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4575 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4576 streamer_write_bitpack (&bp);
4577 break;
4580 count = vec_safe_length (jump_func->agg.items);
4581 streamer_write_uhwi (ob, count);
4582 if (count)
4584 bp = bitpack_create (ob->main_stream);
4585 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4586 streamer_write_bitpack (&bp);
4589 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4591 streamer_write_uhwi (ob, item->offset);
4592 stream_write_tree (ob, item->value, true);
4596 /* Read in jump function JUMP_FUNC from IB. */
4598 static void
4599 ipa_read_jump_function (struct lto_input_block *ib,
4600 struct ipa_jump_func *jump_func,
4601 struct cgraph_edge *cs,
4602 struct data_in *data_in)
4604 enum jump_func_type jftype;
4605 enum tree_code operation;
4606 int i, count;
4608 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4609 switch (jftype)
4611 case IPA_JF_UNKNOWN:
4612 jump_func->type = IPA_JF_UNKNOWN;
4613 break;
4614 case IPA_JF_KNOWN_TYPE:
4616 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4617 tree base_type = stream_read_tree (ib, data_in);
4618 tree component_type = stream_read_tree (ib, data_in);
4620 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4621 break;
4623 case IPA_JF_CONST:
4624 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4625 break;
4626 case IPA_JF_PASS_THROUGH:
4627 operation = (enum tree_code) streamer_read_uhwi (ib);
4628 if (operation == NOP_EXPR)
4630 int formal_id = streamer_read_uhwi (ib);
4631 struct bitpack_d bp = streamer_read_bitpack (ib);
4632 bool agg_preserved = bp_unpack_value (&bp, 1);
4633 bool type_preserved = bp_unpack_value (&bp, 1);
4634 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4635 type_preserved);
4637 else
4639 tree operand = stream_read_tree (ib, data_in);
4640 int formal_id = streamer_read_uhwi (ib);
4641 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4642 operation);
4644 break;
4645 case IPA_JF_ANCESTOR:
4647 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4648 tree type = stream_read_tree (ib, data_in);
4649 int formal_id = streamer_read_uhwi (ib);
4650 struct bitpack_d bp = streamer_read_bitpack (ib);
4651 bool agg_preserved = bp_unpack_value (&bp, 1);
4652 bool type_preserved = bp_unpack_value (&bp, 1);
4654 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4655 type_preserved);
4656 break;
4660 count = streamer_read_uhwi (ib);
4661 vec_alloc (jump_func->agg.items, count);
4662 if (count)
4664 struct bitpack_d bp = streamer_read_bitpack (ib);
4665 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4667 for (i = 0; i < count; i++)
4669 struct ipa_agg_jf_item item;
4670 item.offset = streamer_read_uhwi (ib);
4671 item.value = stream_read_tree (ib, data_in);
4672 jump_func->agg.items->quick_push (item);
4676 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4677 relevant to indirect inlining to OB. */
4679 static void
4680 ipa_write_indirect_edge_info (struct output_block *ob,
4681 struct cgraph_edge *cs)
4683 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4684 struct bitpack_d bp;
4686 streamer_write_hwi (ob, ii->param_index);
4687 streamer_write_hwi (ob, ii->offset);
4688 bp = bitpack_create (ob->main_stream);
4689 bp_pack_value (&bp, ii->polymorphic, 1);
4690 bp_pack_value (&bp, ii->agg_contents, 1);
4691 bp_pack_value (&bp, ii->member_ptr, 1);
4692 bp_pack_value (&bp, ii->by_ref, 1);
4693 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4694 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4695 streamer_write_bitpack (&bp);
4697 if (ii->polymorphic)
4699 streamer_write_hwi (ob, ii->otr_token);
4700 stream_write_tree (ob, ii->otr_type, true);
4701 stream_write_tree (ob, ii->outer_type, true);
4705 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4706 relevant to indirect inlining from IB. */
4708 static void
4709 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4710 struct data_in *data_in ATTRIBUTE_UNUSED,
4711 struct cgraph_edge *cs)
4713 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4714 struct bitpack_d bp;
4716 ii->param_index = (int) streamer_read_hwi (ib);
4717 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4718 bp = streamer_read_bitpack (ib);
4719 ii->polymorphic = bp_unpack_value (&bp, 1);
4720 ii->agg_contents = bp_unpack_value (&bp, 1);
4721 ii->member_ptr = bp_unpack_value (&bp, 1);
4722 ii->by_ref = bp_unpack_value (&bp, 1);
4723 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4724 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4725 if (ii->polymorphic)
4727 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4728 ii->otr_type = stream_read_tree (ib, data_in);
4729 ii->outer_type = stream_read_tree (ib, data_in);
4733 /* Stream out NODE info to OB. */
4735 static void
4736 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4738 int node_ref;
4739 lto_symtab_encoder_t encoder;
4740 struct ipa_node_params *info = IPA_NODE_REF (node);
4741 int j;
4742 struct cgraph_edge *e;
4743 struct bitpack_d bp;
4745 encoder = ob->decl_state->symtab_node_encoder;
4746 node_ref = lto_symtab_encoder_encode (encoder, node);
4747 streamer_write_uhwi (ob, node_ref);
4749 streamer_write_uhwi (ob, ipa_get_param_count (info));
4750 for (j = 0; j < ipa_get_param_count (info); j++)
4751 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4752 bp = bitpack_create (ob->main_stream);
4753 gcc_assert (info->analysis_done
4754 || ipa_get_param_count (info) == 0);
4755 gcc_assert (!info->node_enqueued);
4756 gcc_assert (!info->ipcp_orig_node);
4757 for (j = 0; j < ipa_get_param_count (info); j++)
4758 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4759 streamer_write_bitpack (&bp);
4760 for (j = 0; j < ipa_get_param_count (info); j++)
4761 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4762 for (e = node->callees; e; e = e->next_callee)
4764 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4766 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4767 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4768 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4770 for (e = node->indirect_calls; e; e = e->next_callee)
4772 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4774 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4775 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4776 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4777 ipa_write_indirect_edge_info (ob, e);
4781 /* Stream in NODE info from IB. */
4783 static void
4784 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4785 struct data_in *data_in)
4787 struct ipa_node_params *info = IPA_NODE_REF (node);
4788 int k;
4789 struct cgraph_edge *e;
4790 struct bitpack_d bp;
4792 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4794 for (k = 0; k < ipa_get_param_count (info); k++)
4795 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4797 bp = streamer_read_bitpack (ib);
4798 if (ipa_get_param_count (info) != 0)
4799 info->analysis_done = true;
4800 info->node_enqueued = false;
4801 for (k = 0; k < ipa_get_param_count (info); k++)
4802 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4803 for (k = 0; k < ipa_get_param_count (info); k++)
4804 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4805 for (e = node->callees; e; e = e->next_callee)
4807 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4808 int count = streamer_read_uhwi (ib);
4810 if (!count)
4811 continue;
4812 vec_safe_grow_cleared (args->jump_functions, count);
4814 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4815 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4816 data_in);
4818 for (e = node->indirect_calls; e; e = e->next_callee)
4820 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4821 int count = streamer_read_uhwi (ib);
4823 if (count)
4825 vec_safe_grow_cleared (args->jump_functions, count);
4826 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4827 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4828 data_in);
4830 ipa_read_indirect_edge_info (ib, data_in, e);
4834 /* Write jump functions for nodes in SET. */
4836 void
4837 ipa_prop_write_jump_functions (void)
4839 struct cgraph_node *node;
4840 struct output_block *ob;
4841 unsigned int count = 0;
4842 lto_symtab_encoder_iterator lsei;
4843 lto_symtab_encoder_t encoder;
4846 if (!ipa_node_params_vector.exists ())
4847 return;
4849 ob = create_output_block (LTO_section_jump_functions);
4850 encoder = ob->decl_state->symtab_node_encoder;
4851 ob->symbol = NULL;
4852 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4853 lsei_next_function_in_partition (&lsei))
4855 node = lsei_cgraph_node (lsei);
4856 if (cgraph_function_with_gimple_body_p (node)
4857 && IPA_NODE_REF (node) != NULL)
4858 count++;
4861 streamer_write_uhwi (ob, count);
4863 /* Process all of the functions. */
4864 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4865 lsei_next_function_in_partition (&lsei))
4867 node = lsei_cgraph_node (lsei);
4868 if (cgraph_function_with_gimple_body_p (node)
4869 && IPA_NODE_REF (node) != NULL)
4870 ipa_write_node_info (ob, node);
4872 streamer_write_char_stream (ob->main_stream, 0);
4873 produce_asm (ob, NULL);
4874 destroy_output_block (ob);
4877 /* Read section in file FILE_DATA of length LEN with data DATA. */
4879 static void
4880 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4881 size_t len)
4883 const struct lto_function_header *header =
4884 (const struct lto_function_header *) data;
4885 const int cfg_offset = sizeof (struct lto_function_header);
4886 const int main_offset = cfg_offset + header->cfg_size;
4887 const int string_offset = main_offset + header->main_size;
4888 struct data_in *data_in;
4889 struct lto_input_block ib_main;
4890 unsigned int i;
4891 unsigned int count;
4893 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4894 header->main_size);
4896 data_in =
4897 lto_data_in_create (file_data, (const char *) data + string_offset,
4898 header->string_size, vNULL);
4899 count = streamer_read_uhwi (&ib_main);
4901 for (i = 0; i < count; i++)
4903 unsigned int index;
4904 struct cgraph_node *node;
4905 lto_symtab_encoder_t encoder;
4907 index = streamer_read_uhwi (&ib_main);
4908 encoder = file_data->symtab_node_encoder;
4909 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4910 gcc_assert (node->definition);
4911 ipa_read_node_info (&ib_main, node, data_in);
4913 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4914 len);
4915 lto_data_in_delete (data_in);
4918 /* Read ipcp jump functions. */
4920 void
4921 ipa_prop_read_jump_functions (void)
4923 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4924 struct lto_file_decl_data *file_data;
4925 unsigned int j = 0;
4927 ipa_check_create_node_params ();
4928 ipa_check_create_edge_args ();
4929 ipa_register_cgraph_hooks ();
4931 while ((file_data = file_data_vec[j++]))
4933 size_t len;
4934 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4936 if (data)
4937 ipa_prop_read_section (file_data, data, len);
4941 /* After merging units, we can get mismatch in argument counts.
4942 Also decl merging might've rendered parameter lists obsolete.
4943 Also compute called_with_variable_arg info. */
4945 void
4946 ipa_update_after_lto_read (void)
4948 ipa_check_create_node_params ();
4949 ipa_check_create_edge_args ();
4952 void
4953 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4955 int node_ref;
4956 unsigned int count = 0;
4957 lto_symtab_encoder_t encoder;
4958 struct ipa_agg_replacement_value *aggvals, *av;
4960 aggvals = ipa_get_agg_replacements_for_node (node);
4961 encoder = ob->decl_state->symtab_node_encoder;
4962 node_ref = lto_symtab_encoder_encode (encoder, node);
4963 streamer_write_uhwi (ob, node_ref);
4965 for (av = aggvals; av; av = av->next)
4966 count++;
4967 streamer_write_uhwi (ob, count);
4969 for (av = aggvals; av; av = av->next)
4971 struct bitpack_d bp;
4973 streamer_write_uhwi (ob, av->offset);
4974 streamer_write_uhwi (ob, av->index);
4975 stream_write_tree (ob, av->value, true);
4977 bp = bitpack_create (ob->main_stream);
4978 bp_pack_value (&bp, av->by_ref, 1);
4979 streamer_write_bitpack (&bp);
4983 /* Stream in the aggregate value replacement chain for NODE from IB. */
4985 static void
4986 read_agg_replacement_chain (struct lto_input_block *ib,
4987 struct cgraph_node *node,
4988 struct data_in *data_in)
4990 struct ipa_agg_replacement_value *aggvals = NULL;
4991 unsigned int count, i;
4993 count = streamer_read_uhwi (ib);
4994 for (i = 0; i <count; i++)
4996 struct ipa_agg_replacement_value *av;
4997 struct bitpack_d bp;
4999 av = ggc_alloc<ipa_agg_replacement_value> ();
5000 av->offset = streamer_read_uhwi (ib);
5001 av->index = streamer_read_uhwi (ib);
5002 av->value = stream_read_tree (ib, data_in);
5003 bp = streamer_read_bitpack (ib);
5004 av->by_ref = bp_unpack_value (&bp, 1);
5005 av->next = aggvals;
5006 aggvals = av;
5008 ipa_set_node_agg_value_chain (node, aggvals);
5011 /* Write all aggregate replacement for nodes in set. */
5013 void
5014 ipa_prop_write_all_agg_replacement (void)
5016 struct cgraph_node *node;
5017 struct output_block *ob;
5018 unsigned int count = 0;
5019 lto_symtab_encoder_iterator lsei;
5020 lto_symtab_encoder_t encoder;
5022 if (!ipa_node_agg_replacements)
5023 return;
5025 ob = create_output_block (LTO_section_ipcp_transform);
5026 encoder = ob->decl_state->symtab_node_encoder;
5027 ob->symbol = NULL;
5028 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5029 lsei_next_function_in_partition (&lsei))
5031 node = lsei_cgraph_node (lsei);
5032 if (cgraph_function_with_gimple_body_p (node)
5033 && ipa_get_agg_replacements_for_node (node) != NULL)
5034 count++;
5037 streamer_write_uhwi (ob, count);
5039 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5040 lsei_next_function_in_partition (&lsei))
5042 node = lsei_cgraph_node (lsei);
5043 if (cgraph_function_with_gimple_body_p (node)
5044 && ipa_get_agg_replacements_for_node (node) != NULL)
5045 write_agg_replacement_chain (ob, node);
5047 streamer_write_char_stream (ob->main_stream, 0);
5048 produce_asm (ob, NULL);
5049 destroy_output_block (ob);
5052 /* Read replacements section in file FILE_DATA of length LEN with data
5053 DATA. */
5055 static void
5056 read_replacements_section (struct lto_file_decl_data *file_data,
5057 const char *data,
5058 size_t len)
5060 const struct lto_function_header *header =
5061 (const struct lto_function_header *) data;
5062 const int cfg_offset = sizeof (struct lto_function_header);
5063 const int main_offset = cfg_offset + header->cfg_size;
5064 const int string_offset = main_offset + header->main_size;
5065 struct data_in *data_in;
5066 struct lto_input_block ib_main;
5067 unsigned int i;
5068 unsigned int count;
5070 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
5071 header->main_size);
5073 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5074 header->string_size, vNULL);
5075 count = streamer_read_uhwi (&ib_main);
5077 for (i = 0; i < count; i++)
5079 unsigned int index;
5080 struct cgraph_node *node;
5081 lto_symtab_encoder_t encoder;
5083 index = streamer_read_uhwi (&ib_main);
5084 encoder = file_data->symtab_node_encoder;
5085 node = cgraph (lto_symtab_encoder_deref (encoder, index));
5086 gcc_assert (node->definition);
5087 read_agg_replacement_chain (&ib_main, node, data_in);
5089 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5090 len);
5091 lto_data_in_delete (data_in);
5094 /* Read IPA-CP aggregate replacements. */
5096 void
5097 ipa_prop_read_all_agg_replacement (void)
5099 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5100 struct lto_file_decl_data *file_data;
5101 unsigned int j = 0;
5103 while ((file_data = file_data_vec[j++]))
5105 size_t len;
5106 const char *data = lto_get_section_data (file_data,
5107 LTO_section_ipcp_transform,
5108 NULL, &len);
5109 if (data)
5110 read_replacements_section (file_data, data, len);
5114 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5115 NODE. */
5117 static void
5118 adjust_agg_replacement_values (struct cgraph_node *node,
5119 struct ipa_agg_replacement_value *aggval)
5121 struct ipa_agg_replacement_value *v;
5122 int i, c = 0, d = 0, *adj;
5124 if (!node->clone.combined_args_to_skip)
5125 return;
5127 for (v = aggval; v; v = v->next)
5129 gcc_assert (v->index >= 0);
5130 if (c < v->index)
5131 c = v->index;
5133 c++;
5135 adj = XALLOCAVEC (int, c);
5136 for (i = 0; i < c; i++)
5137 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5139 adj[i] = -1;
5140 d++;
5142 else
5143 adj[i] = i - d;
5145 for (v = aggval; v; v = v->next)
5146 v->index = adj[v->index];
5149 /* Dominator walker driving the ipcp modification phase. */
5151 class ipcp_modif_dom_walker : public dom_walker
5153 public:
5154 ipcp_modif_dom_walker (struct func_body_info *fbi,
5155 vec<ipa_param_descriptor> descs,
5156 struct ipa_agg_replacement_value *av,
5157 bool *sc, bool *cc)
5158 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5159 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5161 virtual void before_dom_children (basic_block);
5163 private:
5164 struct func_body_info *m_fbi;
5165 vec<ipa_param_descriptor> m_descriptors;
5166 struct ipa_agg_replacement_value *m_aggval;
5167 bool *m_something_changed, *m_cfg_changed;
5170 void
5171 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5173 gimple_stmt_iterator gsi;
5174 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5176 struct ipa_agg_replacement_value *v;
5177 gimple stmt = gsi_stmt (gsi);
5178 tree rhs, val, t;
5179 HOST_WIDE_INT offset, size;
5180 int index;
5181 bool by_ref, vce;
5183 if (!gimple_assign_load_p (stmt))
5184 continue;
5185 rhs = gimple_assign_rhs1 (stmt);
5186 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5187 continue;
5189 vce = false;
5190 t = rhs;
5191 while (handled_component_p (t))
5193 /* V_C_E can do things like convert an array of integers to one
5194 bigger integer and similar things we do not handle below. */
5195 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5197 vce = true;
5198 break;
5200 t = TREE_OPERAND (t, 0);
5202 if (vce)
5203 continue;
5205 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5206 &offset, &size, &by_ref))
5207 continue;
5208 for (v = m_aggval; v; v = v->next)
5209 if (v->index == index
5210 && v->offset == offset)
5211 break;
5212 if (!v
5213 || v->by_ref != by_ref
5214 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5215 continue;
5217 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5218 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5220 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5221 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5222 else if (TYPE_SIZE (TREE_TYPE (rhs))
5223 == TYPE_SIZE (TREE_TYPE (v->value)))
5224 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5225 else
5227 if (dump_file)
5229 fprintf (dump_file, " const ");
5230 print_generic_expr (dump_file, v->value, 0);
5231 fprintf (dump_file, " can't be converted to type of ");
5232 print_generic_expr (dump_file, rhs, 0);
5233 fprintf (dump_file, "\n");
5235 continue;
5238 else
5239 val = v->value;
5241 if (dump_file && (dump_flags & TDF_DETAILS))
5243 fprintf (dump_file, "Modifying stmt:\n ");
5244 print_gimple_stmt (dump_file, stmt, 0, 0);
5246 gimple_assign_set_rhs_from_tree (&gsi, val);
5247 update_stmt (stmt);
5249 if (dump_file && (dump_flags & TDF_DETAILS))
5251 fprintf (dump_file, "into:\n ");
5252 print_gimple_stmt (dump_file, stmt, 0, 0);
5253 fprintf (dump_file, "\n");
5256 *m_something_changed = true;
5257 if (maybe_clean_eh_stmt (stmt)
5258 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5259 *m_cfg_changed = true;
5264 /* IPCP transformation phase doing propagation of aggregate values. */
5266 unsigned int
5267 ipcp_transform_function (struct cgraph_node *node)
5269 vec<ipa_param_descriptor> descriptors = vNULL;
5270 struct func_body_info fbi;
5271 struct ipa_agg_replacement_value *aggval;
5272 int param_count;
5273 bool cfg_changed = false, something_changed = false;
5275 gcc_checking_assert (cfun);
5276 gcc_checking_assert (current_function_decl);
5278 if (dump_file)
5279 fprintf (dump_file, "Modification phase of node %s/%i\n",
5280 node->name (), node->order);
5282 aggval = ipa_get_agg_replacements_for_node (node);
5283 if (!aggval)
5284 return 0;
5285 param_count = count_formal_params (node->decl);
5286 if (param_count == 0)
5287 return 0;
5288 adjust_agg_replacement_values (node, aggval);
5289 if (dump_file)
5290 ipa_dump_agg_replacement_values (dump_file, aggval);
5292 fbi.node = node;
5293 fbi.info = NULL;
5294 fbi.bb_infos = vNULL;
5295 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5296 fbi.param_count = param_count;
5297 fbi.aa_walked = 0;
5299 descriptors.safe_grow_cleared (param_count);
5300 ipa_populate_param_decls (node, descriptors);
5301 calculate_dominance_info (CDI_DOMINATORS);
5302 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5303 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5305 int i;
5306 struct ipa_bb_info *bi;
5307 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5308 free_ipa_bb_info (bi);
5309 fbi.bb_infos.release ();
5310 free_dominance_info (CDI_DOMINATORS);
5311 (*ipa_node_agg_replacements)[node->uid] = NULL;
5312 descriptors.release ();
5314 if (!something_changed)
5315 return 0;
5316 else if (cfg_changed)
5317 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5318 else
5319 return TODO_update_ssa_only_virtuals;