re PR middle-end/87916 (ICE in dwarf2out_abstract_function, at dwarf2out.c:22479...
[official-gcc.git] / gcc / ipa-prop.c
blob4bd0b4b454107986d7519da5667fdfaf62264877
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
59 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
61 /* Edge summary for IPA-CP edge information. */
62 ipa_edge_args_sum_t *ipa_edge_args_sum;
64 /* Traits for a hash table for reusing already existing ipa_bits. */
66 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
68 typedef ipa_bits *value_type;
69 typedef ipa_bits *compare_type;
70 static hashval_t
71 hash (const ipa_bits *p)
73 hashval_t t = (hashval_t) p->value.to_shwi ();
74 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
76 static bool
77 equal (const ipa_bits *a, const ipa_bits *b)
79 return a->value == b->value && a->mask == b->mask;
81 static void
82 mark_empty (ipa_bits *&p)
84 p = NULL;
86 static bool
87 is_empty (const ipa_bits *p)
89 return p == NULL;
91 static bool
92 is_deleted (const ipa_bits *p)
94 return p == reinterpret_cast<const ipa_bits *> (1);
96 static void
97 mark_deleted (ipa_bits *&p)
99 p = reinterpret_cast<ipa_bits *> (1);
103 /* Hash table for avoid repeated allocations of equal ipa_bits. */
104 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
107 the equiv bitmap is not hashed and is expected to be NULL. */
109 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
111 typedef value_range *value_type;
112 typedef value_range *compare_type;
113 static hashval_t
114 hash (const value_range *p)
116 gcc_checking_assert (!p->equiv ());
117 inchash::hash hstate (p->kind ());
118 inchash::add_expr (p->min (), hstate);
119 inchash::add_expr (p->max (), hstate);
120 return hstate.end ();
122 static bool
123 equal (const value_range *a, const value_range *b)
125 return a->ignore_equivs_equal_p (*b);
127 static void
128 mark_empty (value_range *&p)
130 p = NULL;
132 static bool
133 is_empty (const value_range *p)
135 return p == NULL;
137 static bool
138 is_deleted (const value_range *p)
140 return p == reinterpret_cast<const value_range *> (1);
142 static void
143 mark_deleted (value_range *&p)
145 p = reinterpret_cast<value_range *> (1);
149 /* Hash table for avoid repeated allocations of equal value_ranges. */
150 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
152 /* Holders of ipa cgraph hooks: */
153 static struct cgraph_node_hook_list *function_insertion_hook_holder;
155 /* Description of a reference to an IPA constant. */
156 struct ipa_cst_ref_desc
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge *cs;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc *next_duplicate;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
164 int refcount;
167 /* Allocation pool for reference descriptions. */
169 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
170 ("IPA-PROP ref descriptions");
172 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
175 static bool
176 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
178 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
180 if (!fs_opts)
181 return false;
182 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
185 /* Return index of the formal whose tree is PTREE in function which corresponds
186 to INFO. */
188 static int
189 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
190 tree ptree)
192 int i, count;
194 count = vec_safe_length (descriptors);
195 for (i = 0; i < count; i++)
196 if ((*descriptors)[i].decl_or_type == ptree)
197 return i;
199 return -1;
202 /* Return index of the formal whose tree is PTREE in function which corresponds
203 to INFO. */
206 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
208 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
211 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
212 NODE. */
214 static void
215 ipa_populate_param_decls (struct cgraph_node *node,
216 vec<ipa_param_descriptor, va_gc> &descriptors)
218 tree fndecl;
219 tree fnargs;
220 tree parm;
221 int param_num;
223 fndecl = node->decl;
224 gcc_assert (gimple_has_body_p (fndecl));
225 fnargs = DECL_ARGUMENTS (fndecl);
226 param_num = 0;
227 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
229 descriptors[param_num].decl_or_type = parm;
230 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
231 true);
232 param_num++;
236 /* Return how many formal parameters FNDECL has. */
239 count_formal_params (tree fndecl)
241 tree parm;
242 int count = 0;
243 gcc_assert (gimple_has_body_p (fndecl));
245 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
246 count++;
248 return count;
251 /* Return the declaration of Ith formal parameter of the function corresponding
252 to INFO. Note there is no setter function as this array is built just once
253 using ipa_initialize_node_params. */
255 void
256 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
258 fprintf (file, "param #%i", i);
259 if ((*info->descriptors)[i].decl_or_type)
261 fprintf (file, " ");
262 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
266 /* If necessary, allocate vector of parameter descriptors in info of NODE.
267 Return true if they were allocated, false if not. */
269 static bool
270 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
272 struct ipa_node_params *info = IPA_NODE_REF (node);
274 if (!info->descriptors && param_count)
276 vec_safe_grow_cleared (info->descriptors, param_count);
277 return true;
279 else
280 return false;
283 /* Initialize the ipa_node_params structure associated with NODE by counting
284 the function parameters, creating the descriptors and populating their
285 param_decls. */
287 void
288 ipa_initialize_node_params (struct cgraph_node *node)
290 struct ipa_node_params *info = IPA_NODE_REF (node);
292 if (!info->descriptors
293 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
294 ipa_populate_param_decls (node, *info->descriptors);
297 /* Print the jump functions associated with call graph edge CS to file F. */
299 static void
300 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
302 int i, count;
304 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
305 for (i = 0; i < count; i++)
307 struct ipa_jump_func *jump_func;
308 enum jump_func_type type;
310 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
311 type = jump_func->type;
313 fprintf (f, " param %d: ", i);
314 if (type == IPA_JF_UNKNOWN)
315 fprintf (f, "UNKNOWN\n");
316 else if (type == IPA_JF_CONST)
318 tree val = jump_func->value.constant.value;
319 fprintf (f, "CONST: ");
320 print_generic_expr (f, val);
321 if (TREE_CODE (val) == ADDR_EXPR
322 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
324 fprintf (f, " -> ");
325 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
327 fprintf (f, "\n");
329 else if (type == IPA_JF_PASS_THROUGH)
331 fprintf (f, "PASS THROUGH: ");
332 fprintf (f, "%d, op %s",
333 jump_func->value.pass_through.formal_id,
334 get_tree_code_name(jump_func->value.pass_through.operation));
335 if (jump_func->value.pass_through.operation != NOP_EXPR)
337 fprintf (f, " ");
338 print_generic_expr (f, jump_func->value.pass_through.operand);
340 if (jump_func->value.pass_through.agg_preserved)
341 fprintf (f, ", agg_preserved");
342 fprintf (f, "\n");
344 else if (type == IPA_JF_ANCESTOR)
346 fprintf (f, "ANCESTOR: ");
347 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
348 jump_func->value.ancestor.formal_id,
349 jump_func->value.ancestor.offset);
350 if (jump_func->value.ancestor.agg_preserved)
351 fprintf (f, ", agg_preserved");
352 fprintf (f, "\n");
355 if (jump_func->agg.items)
357 struct ipa_agg_jf_item *item;
358 int j;
360 fprintf (f, " Aggregate passed by %s:\n",
361 jump_func->agg.by_ref ? "reference" : "value");
362 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
364 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
365 item->offset);
366 if (TYPE_P (item->value))
367 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
368 tree_to_uhwi (TYPE_SIZE (item->value)));
369 else
371 fprintf (f, "cst: ");
372 print_generic_expr (f, item->value);
374 fprintf (f, "\n");
378 struct ipa_polymorphic_call_context *ctx
379 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
380 if (ctx && !ctx->useless_p ())
382 fprintf (f, " Context: ");
383 ctx->dump (dump_file);
386 if (jump_func->bits)
388 fprintf (f, " value: ");
389 print_hex (jump_func->bits->value, f);
390 fprintf (f, ", mask: ");
391 print_hex (jump_func->bits->mask, f);
392 fprintf (f, "\n");
394 else
395 fprintf (f, " Unknown bits\n");
397 if (jump_func->m_vr)
399 fprintf (f, " VR ");
400 fprintf (f, "%s[",
401 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
402 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
403 fprintf (f, ", ");
404 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
405 fprintf (f, "]\n");
407 else
408 fprintf (f, " Unknown VR\n");
413 /* Print the jump functions of all arguments on all call graph edges going from
414 NODE to file F. */
416 void
417 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
419 struct cgraph_edge *cs;
421 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
422 for (cs = node->callees; cs; cs = cs->next_callee)
424 if (!ipa_edge_args_info_available_for_edge_p (cs))
425 continue;
427 fprintf (f, " callsite %s -> %s : \n",
428 node->dump_name (),
429 cs->callee->dump_name ());
430 ipa_print_node_jump_functions_for_edge (f, cs);
433 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
435 struct cgraph_indirect_call_info *ii;
436 if (!ipa_edge_args_info_available_for_edge_p (cs))
437 continue;
439 ii = cs->indirect_info;
440 if (ii->agg_contents)
441 fprintf (f, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
443 ii->member_ptr ? "member ptr" : "aggregate",
444 ii->param_index, ii->offset,
445 ii->by_ref ? "by reference" : "by_value");
446 else
447 fprintf (f, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC,
449 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
450 ii->offset);
452 if (cs->call_stmt)
454 fprintf (f, ", for stmt ");
455 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
457 else
458 fprintf (f, "\n");
459 if (ii->polymorphic)
460 ii->context.dump (f);
461 ipa_print_node_jump_functions_for_edge (f, cs);
465 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
467 void
468 ipa_print_all_jump_functions (FILE *f)
470 struct cgraph_node *node;
472 fprintf (f, "\nJump functions:\n");
473 FOR_EACH_FUNCTION (node)
475 ipa_print_node_jump_functions (f, node);
479 /* Set jfunc to be a know-really nothing jump function. */
481 static void
482 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
484 jfunc->type = IPA_JF_UNKNOWN;
485 jfunc->bits = NULL;
486 jfunc->m_vr = NULL;
489 /* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
492 static void
493 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
494 struct ipa_jump_func *src)
497 gcc_checking_assert (src->type == IPA_JF_CONST);
498 dst->type = IPA_JF_CONST;
499 dst->value.constant = src->value.constant;
502 /* Set JFUNC to be a constant jmp function. */
504 static void
505 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
506 struct cgraph_edge *cs)
508 jfunc->type = IPA_JF_CONST;
509 jfunc->value.constant.value = unshare_expr_without_location (constant);
511 if (TREE_CODE (constant) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
514 struct ipa_cst_ref_desc *rdesc;
516 rdesc = ipa_refdesc_pool.allocate ();
517 rdesc->cs = cs;
518 rdesc->next_duplicate = NULL;
519 rdesc->refcount = 1;
520 jfunc->value.constant.rdesc = rdesc;
522 else
523 jfunc->value.constant.rdesc = NULL;
526 /* Set JFUNC to be a simple pass-through jump function. */
527 static void
528 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
529 bool agg_preserved)
531 jfunc->type = IPA_JF_PASS_THROUGH;
532 jfunc->value.pass_through.operand = NULL_TREE;
533 jfunc->value.pass_through.formal_id = formal_id;
534 jfunc->value.pass_through.operation = NOP_EXPR;
535 jfunc->value.pass_through.agg_preserved = agg_preserved;
538 /* Set JFUNC to be an unary pass through jump function. */
540 static void
541 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
542 enum tree_code operation)
544 jfunc->type = IPA_JF_PASS_THROUGH;
545 jfunc->value.pass_through.operand = NULL_TREE;
546 jfunc->value.pass_through.formal_id = formal_id;
547 jfunc->value.pass_through.operation = operation;
548 jfunc->value.pass_through.agg_preserved = false;
550 /* Set JFUNC to be an arithmetic pass through jump function. */
552 static void
553 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
554 tree operand, enum tree_code operation)
556 jfunc->type = IPA_JF_PASS_THROUGH;
557 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
558 jfunc->value.pass_through.formal_id = formal_id;
559 jfunc->value.pass_through.operation = operation;
560 jfunc->value.pass_through.agg_preserved = false;
563 /* Set JFUNC to be an ancestor jump function. */
565 static void
566 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
567 int formal_id, bool agg_preserved)
569 jfunc->type = IPA_JF_ANCESTOR;
570 jfunc->value.ancestor.formal_id = formal_id;
571 jfunc->value.ancestor.offset = offset;
572 jfunc->value.ancestor.agg_preserved = agg_preserved;
575 /* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
578 static struct ipa_bb_info *
579 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
581 gcc_checking_assert (fbi);
582 return &fbi->bb_infos[bb->index];
585 /* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
588 struct prop_type_change_info
590 /* Offset into the object where there is the virtual method pointer we are
591 looking for. */
592 HOST_WIDE_INT offset;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
594 type change. */
595 tree object;
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed;
600 /* Return true if STMT can modify a virtual method table pointer.
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
617 There is no way to call a constructor of an ancestor sub-object in any
618 other way.
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
637 static bool
638 stmt_may_be_vtbl_ptr_store (gimple *stmt)
640 if (is_gimple_call (stmt))
641 return false;
642 if (gimple_clobber_p (stmt))
643 return false;
644 else if (is_gimple_assign (stmt))
646 tree lhs = gimple_assign_lhs (stmt);
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
652 return false;
654 if (TREE_CODE (lhs) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
656 return false;
657 /* In the future we might want to use get_ref_base_and_extent to find
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
662 return true;
665 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
668 prop_type_change_info structure. */
670 static bool
671 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
673 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
674 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
676 if (stmt_may_be_vtbl_ptr_store (stmt))
678 tci->type_maybe_changed = true;
679 return true;
681 else
682 return false;
685 /* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
689 Generally functions are not allowed to change type of such instances,
690 but they call destructors. We assume that methods can not destroy the THIS
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
694 static bool
695 param_type_may_change_p (tree function, tree arg, gimple *call)
697 /* Pure functions can not do any changes on the dynamic type;
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
700 return false;
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
712 return true;
713 if (TREE_CODE (arg) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg)
715 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
719 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
720 /* THIS pointer of an method - here we want to watch constructors
721 and destructors as those definitely may change the dynamic
722 type. */
723 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function)
725 && !DECL_CXX_DESTRUCTOR_P (function)
726 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
730 block = BLOCK_SUPERCONTEXT (block))
731 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
732 return true;
733 return false;
736 return true;
739 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
744 returned by get_ref_base_and_extent, as is the offset.
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
749 static bool
750 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
751 gcall *call, struct ipa_jump_func *jfunc,
752 HOST_WIDE_INT offset)
754 struct prop_type_change_info tci;
755 ao_ref ao;
756 bool entry_reached = false;
758 gcc_checking_assert (DECL_P (arg)
759 || TREE_CODE (arg) == MEM_REF
760 || handled_component_p (arg));
762 comp_type = TYPE_MAIN_VARIANT (comp_type);
764 /* Const calls cannot call virtual methods through VMT and so type changes do
765 not matter. */
766 if (!flag_devirtualize || !gimple_vuse (call)
767 /* Be sure expected_type is polymorphic. */
768 || !comp_type
769 || TREE_CODE (comp_type) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
772 return true;
774 ao_ref_init (&ao, arg);
775 ao.base = base;
776 ao.offset = offset;
777 ao.size = POINTER_SIZE;
778 ao.max_size = ao.size;
780 tci.offset = offset;
781 tci.object = get_base_address (arg);
782 tci.type_maybe_changed = false;
784 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
785 &tci, NULL, &entry_reached);
786 if (!tci.type_maybe_changed)
787 return false;
789 ipa_set_jf_unknown (jfunc);
790 return true;
793 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
794 If it is, return true and fill in the jump function JFUNC with relevant type
795 information or set it to unknown. ARG is the object itself (not a pointer
796 to it, unless dereferenced). BASE is the base of the memory access as
797 returned by get_ref_base_and_extent, as is the offset. */
799 static bool
800 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
801 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
803 if (!flag_devirtualize)
804 return false;
806 if (TREE_CODE (base) == MEM_REF
807 && !param_type_may_change_p (current_function_decl,
808 TREE_OPERAND (base, 0),
809 call))
810 return false;
811 return detect_type_change_from_memory_writes (arg, base, comp_type,
812 call, jfunc, offset);
815 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
816 SSA name (its dereference will become the base and the offset is assumed to
817 be zero). */
819 static bool
820 detect_type_change_ssa (tree arg, tree comp_type,
821 gcall *call, struct ipa_jump_func *jfunc)
823 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
824 if (!flag_devirtualize
825 || !POINTER_TYPE_P (TREE_TYPE (arg)))
826 return false;
828 if (!param_type_may_change_p (current_function_decl, arg, call))
829 return false;
831 arg = build2 (MEM_REF, ptr_type_node, arg,
832 build_int_cst (ptr_type_node, 0));
834 return detect_type_change_from_memory_writes (arg, arg, comp_type,
835 call, jfunc, 0);
838 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
839 boolean variable pointed to by DATA. */
841 static bool
842 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
843 void *data)
845 bool *b = (bool *) data;
846 *b = true;
847 return true;
850 /* Return true if we have already walked so many statements in AA that we
851 should really just start giving up. */
853 static bool
854 aa_overwalked (struct ipa_func_body_info *fbi)
856 gcc_checking_assert (fbi);
857 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
860 /* Find the nearest valid aa status for parameter specified by INDEX that
861 dominates BB. */
863 static struct ipa_param_aa_status *
864 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
865 int index)
867 while (true)
869 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
870 if (!bb)
871 return NULL;
872 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
873 if (!bi->param_aa_statuses.is_empty ()
874 && bi->param_aa_statuses[index].valid)
875 return &bi->param_aa_statuses[index];
879 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
880 structures and/or intialize the result with a dominating description as
881 necessary. */
883 static struct ipa_param_aa_status *
884 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
885 int index)
887 gcc_checking_assert (fbi);
888 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
889 if (bi->param_aa_statuses.is_empty ())
890 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
891 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
892 if (!paa->valid)
894 gcc_checking_assert (!paa->parm_modified
895 && !paa->ref_modified
896 && !paa->pt_modified);
897 struct ipa_param_aa_status *dom_paa;
898 dom_paa = find_dominating_aa_status (fbi, bb, index);
899 if (dom_paa)
900 *paa = *dom_paa;
901 else
902 paa->valid = true;
905 return paa;
908 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
909 a value known not to be modified in this function before reaching the
910 statement STMT. FBI holds information about the function we have so far
911 gathered but do not survive the summary building stage. */
913 static bool
914 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
915 gimple *stmt, tree parm_load)
917 struct ipa_param_aa_status *paa;
918 bool modified = false;
919 ao_ref refd;
921 tree base = get_base_address (parm_load);
922 gcc_assert (TREE_CODE (base) == PARM_DECL);
923 if (TREE_READONLY (base))
924 return true;
926 /* FIXME: FBI can be NULL if we are being called from outside
927 ipa_node_analysis or ipcp_transform_function, which currently happens
928 during inlining analysis. It would be great to extend fbi's lifetime and
929 always have it. Currently, we are just not afraid of too much walking in
930 that case. */
931 if (fbi)
933 if (aa_overwalked (fbi))
934 return false;
935 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
936 if (paa->parm_modified)
937 return false;
939 else
940 paa = NULL;
942 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
943 ao_ref_init (&refd, parm_load);
944 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
945 &modified, NULL);
946 if (fbi)
947 fbi->aa_walked += walked;
948 if (paa && modified)
949 paa->parm_modified = true;
950 return !modified;
953 /* If STMT is an assignment that loads a value from an parameter declaration,
954 return the index of the parameter in ipa_node_params which has not been
955 modified. Otherwise return -1. */
957 static int
958 load_from_unmodified_param (struct ipa_func_body_info *fbi,
959 vec<ipa_param_descriptor, va_gc> *descriptors,
960 gimple *stmt)
962 int index;
963 tree op1;
965 if (!gimple_assign_single_p (stmt))
966 return -1;
968 op1 = gimple_assign_rhs1 (stmt);
969 if (TREE_CODE (op1) != PARM_DECL)
970 return -1;
972 index = ipa_get_param_decl_index_1 (descriptors, op1);
973 if (index < 0
974 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
975 return -1;
977 return index;
980 /* Return true if memory reference REF (which must be a load through parameter
981 with INDEX) loads data that are known to be unmodified in this function
982 before reaching statement STMT. */
984 static bool
985 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
986 int index, gimple *stmt, tree ref)
988 struct ipa_param_aa_status *paa;
989 bool modified = false;
990 ao_ref refd;
992 /* FIXME: FBI can be NULL if we are being called from outside
993 ipa_node_analysis or ipcp_transform_function, which currently happens
994 during inlining analysis. It would be great to extend fbi's lifetime and
995 always have it. Currently, we are just not afraid of too much walking in
996 that case. */
997 if (fbi)
999 if (aa_overwalked (fbi))
1000 return false;
1001 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1002 if (paa->ref_modified)
1003 return false;
1005 else
1006 paa = NULL;
1008 gcc_checking_assert (gimple_vuse (stmt));
1009 ao_ref_init (&refd, ref);
1010 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1011 &modified, NULL);
1012 if (fbi)
1013 fbi->aa_walked += walked;
1014 if (paa && modified)
1015 paa->ref_modified = true;
1016 return !modified;
1019 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1020 is known to be unmodified in this function before reaching call statement
1021 CALL into which it is passed. FBI describes the function body. */
1023 static bool
1024 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1025 gimple *call, tree parm)
1027 bool modified = false;
1028 ao_ref refd;
1030 /* It's unnecessary to calculate anything about memory contnets for a const
1031 function because it is not goin to use it. But do not cache the result
1032 either. Also, no such calculations for non-pointers. */
1033 if (!gimple_vuse (call)
1034 || !POINTER_TYPE_P (TREE_TYPE (parm))
1035 || aa_overwalked (fbi))
1036 return false;
1038 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1039 gimple_bb (call),
1040 index);
1041 if (paa->pt_modified)
1042 return false;
1044 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1045 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1046 &modified, NULL);
1047 fbi->aa_walked += walked;
1048 if (modified)
1049 paa->pt_modified = true;
1050 return !modified;
1053 /* Return true if we can prove that OP is a memory reference loading
1054 data from an aggregate passed as a parameter.
1056 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1057 false if it cannot prove that the value has not been modified before the
1058 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1059 if it cannot prove the value has not been modified, in that case it will
1060 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1062 INFO and PARMS_AINFO describe parameters of the current function (but the
1063 latter can be NULL), STMT is the load statement. If function returns true,
1064 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1065 within the aggregate and whether it is a load from a value passed by
1066 reference respectively. */
1068 bool
1069 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1070 vec<ipa_param_descriptor, va_gc> *descriptors,
1071 gimple *stmt, tree op, int *index_p,
1072 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1073 bool *by_ref_p, bool *guaranteed_unmodified)
1075 int index;
1076 HOST_WIDE_INT size;
1077 bool reverse;
1078 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1080 if (!base)
1081 return false;
1083 if (DECL_P (base))
1085 int index = ipa_get_param_decl_index_1 (descriptors, base);
1086 if (index >= 0
1087 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1089 *index_p = index;
1090 *by_ref_p = false;
1091 if (size_p)
1092 *size_p = size;
1093 if (guaranteed_unmodified)
1094 *guaranteed_unmodified = true;
1095 return true;
1097 return false;
1100 if (TREE_CODE (base) != MEM_REF
1101 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1102 || !integer_zerop (TREE_OPERAND (base, 1)))
1103 return false;
1105 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1107 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1108 index = ipa_get_param_decl_index_1 (descriptors, parm);
1110 else
1112 /* This branch catches situations where a pointer parameter is not a
1113 gimple register, for example:
1115 void hip7(S*) (struct S * p)
1117 void (*<T2e4>) (struct S *) D.1867;
1118 struct S * p.1;
1120 <bb 2>:
1121 p.1_1 = p;
1122 D.1867_2 = p.1_1->f;
1123 D.1867_2 ();
1124 gdp = &p;
1127 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1128 index = load_from_unmodified_param (fbi, descriptors, def);
1131 if (index >= 0)
1133 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1134 if (!data_preserved && !guaranteed_unmodified)
1135 return false;
1137 *index_p = index;
1138 *by_ref_p = true;
1139 if (size_p)
1140 *size_p = size;
1141 if (guaranteed_unmodified)
1142 *guaranteed_unmodified = data_preserved;
1143 return true;
1145 return false;
1148 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1149 of an assignment statement STMT, try to determine whether we are actually
1150 handling any of the following cases and construct an appropriate jump
1151 function into JFUNC if so:
1153 1) The passed value is loaded from a formal parameter which is not a gimple
1154 register (most probably because it is addressable, the value has to be
1155 scalar) and we can guarantee the value has not changed. This case can
1156 therefore be described by a simple pass-through jump function. For example:
1158 foo (int a)
1160 int a.0;
1162 a.0_2 = a;
1163 bar (a.0_2);
1165 2) The passed value can be described by a simple arithmetic pass-through
1166 jump function. E.g.
1168 foo (int a)
1170 int D.2064;
1172 D.2064_4 = a.1(D) + 4;
1173 bar (D.2064_4);
1175 This case can also occur in combination of the previous one, e.g.:
1177 foo (int a, int z)
1179 int a.0;
1180 int D.2064;
1182 a.0_3 = a;
1183 D.2064_4 = a.0_3 + 4;
1184 foo (D.2064_4);
1186 3) The passed value is an address of an object within another one (which
1187 also passed by reference). Such situations are described by an ancestor
1188 jump function and describe situations such as:
1190 B::foo() (struct B * const this)
1192 struct A * D.1845;
1194 D.1845_2 = &this_1(D)->D.1748;
1195 A::bar (D.1845_2);
1197 INFO is the structure describing individual parameters access different
1198 stages of IPA optimizations. PARMS_AINFO contains the information that is
1199 only needed for intraprocedural analysis. */
1201 static void
1202 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1203 struct ipa_node_params *info,
1204 struct ipa_jump_func *jfunc,
1205 gcall *call, gimple *stmt, tree name,
1206 tree param_type)
1208 HOST_WIDE_INT offset, size;
1209 tree op1, tc_ssa, base, ssa;
1210 bool reverse;
1211 int index;
1213 op1 = gimple_assign_rhs1 (stmt);
1215 if (TREE_CODE (op1) == SSA_NAME)
1217 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1218 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1219 else
1220 index = load_from_unmodified_param (fbi, info->descriptors,
1221 SSA_NAME_DEF_STMT (op1));
1222 tc_ssa = op1;
1224 else
1226 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1227 tc_ssa = gimple_assign_lhs (stmt);
1230 if (index >= 0)
1232 switch (gimple_assign_rhs_class (stmt))
1234 case GIMPLE_BINARY_RHS:
1236 tree op2 = gimple_assign_rhs2 (stmt);
1237 if (!is_gimple_ip_invariant (op2)
1238 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1239 != tcc_comparison)
1240 && !useless_type_conversion_p (TREE_TYPE (name),
1241 TREE_TYPE (op1))))
1242 return;
1244 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1245 gimple_assign_rhs_code (stmt));
1246 break;
1248 case GIMPLE_SINGLE_RHS:
1250 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1251 tc_ssa);
1252 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1253 break;
1255 case GIMPLE_UNARY_RHS:
1256 if (is_gimple_assign (stmt)
1257 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1258 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1259 ipa_set_jf_unary_pass_through (jfunc, index,
1260 gimple_assign_rhs_code (stmt));
1261 default:;
1263 return;
1266 if (TREE_CODE (op1) != ADDR_EXPR)
1267 return;
1268 op1 = TREE_OPERAND (op1, 0);
1269 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1270 return;
1271 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1272 offset_int mem_offset;
1273 if (!base
1274 || TREE_CODE (base) != MEM_REF
1275 || !mem_ref_offset (base).is_constant (&mem_offset))
1276 return;
1277 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1278 ssa = TREE_OPERAND (base, 0);
1279 if (TREE_CODE (ssa) != SSA_NAME
1280 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1281 || offset < 0)
1282 return;
1284 /* Dynamic types are changed in constructors and destructors. */
1285 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1286 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1287 ipa_set_ancestor_jf (jfunc, offset, index,
1288 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1291 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1292 it looks like:
1294 iftmp.1_3 = &obj_2(D)->D.1762;
1296 The base of the MEM_REF must be a default definition SSA NAME of a
1297 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1298 whole MEM_REF expression is returned and the offset calculated from any
1299 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1300 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1302 static tree
1303 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1305 HOST_WIDE_INT size;
1306 tree expr, parm, obj;
1307 bool reverse;
1309 if (!gimple_assign_single_p (assign))
1310 return NULL_TREE;
1311 expr = gimple_assign_rhs1 (assign);
1313 if (TREE_CODE (expr) != ADDR_EXPR)
1314 return NULL_TREE;
1315 expr = TREE_OPERAND (expr, 0);
1316 obj = expr;
1317 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1319 offset_int mem_offset;
1320 if (!expr
1321 || TREE_CODE (expr) != MEM_REF
1322 || !mem_ref_offset (expr).is_constant (&mem_offset))
1323 return NULL_TREE;
1324 parm = TREE_OPERAND (expr, 0);
1325 if (TREE_CODE (parm) != SSA_NAME
1326 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1327 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1328 return NULL_TREE;
1330 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1331 *obj_p = obj;
1332 return expr;
1336 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1337 statement PHI, try to find out whether NAME is in fact a
1338 multiple-inheritance typecast from a descendant into an ancestor of a formal
1339 parameter and thus can be described by an ancestor jump function and if so,
1340 write the appropriate function into JFUNC.
1342 Essentially we want to match the following pattern:
1344 if (obj_2(D) != 0B)
1345 goto <bb 3>;
1346 else
1347 goto <bb 4>;
1349 <bb 3>:
1350 iftmp.1_3 = &obj_2(D)->D.1762;
1352 <bb 4>:
1353 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1354 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1355 return D.1879_6; */
1357 static void
1358 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1359 struct ipa_node_params *info,
1360 struct ipa_jump_func *jfunc,
1361 gcall *call, gphi *phi)
1363 HOST_WIDE_INT offset;
1364 gimple *assign, *cond;
1365 basic_block phi_bb, assign_bb, cond_bb;
1366 tree tmp, parm, expr, obj;
1367 int index, i;
1369 if (gimple_phi_num_args (phi) != 2)
1370 return;
1372 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1373 tmp = PHI_ARG_DEF (phi, 0);
1374 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1375 tmp = PHI_ARG_DEF (phi, 1);
1376 else
1377 return;
1378 if (TREE_CODE (tmp) != SSA_NAME
1379 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1380 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1381 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1382 return;
1384 assign = SSA_NAME_DEF_STMT (tmp);
1385 assign_bb = gimple_bb (assign);
1386 if (!single_pred_p (assign_bb))
1387 return;
1388 expr = get_ancestor_addr_info (assign, &obj, &offset);
1389 if (!expr)
1390 return;
1391 parm = TREE_OPERAND (expr, 0);
1392 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1393 if (index < 0)
1394 return;
1396 cond_bb = single_pred (assign_bb);
1397 cond = last_stmt (cond_bb);
1398 if (!cond
1399 || gimple_code (cond) != GIMPLE_COND
1400 || gimple_cond_code (cond) != NE_EXPR
1401 || gimple_cond_lhs (cond) != parm
1402 || !integer_zerop (gimple_cond_rhs (cond)))
1403 return;
1405 phi_bb = gimple_bb (phi);
1406 for (i = 0; i < 2; i++)
1408 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1409 if (pred != assign_bb && pred != cond_bb)
1410 return;
1413 ipa_set_ancestor_jf (jfunc, offset, index,
1414 parm_ref_data_pass_through_p (fbi, index, call, parm));
1417 /* Inspect the given TYPE and return true iff it has the same structure (the
1418 same number of fields of the same types) as a C++ member pointer. If
1419 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1420 corresponding fields there. */
1422 static bool
1423 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1425 tree fld;
1427 if (TREE_CODE (type) != RECORD_TYPE)
1428 return false;
1430 fld = TYPE_FIELDS (type);
1431 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1432 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1433 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1434 return false;
1436 if (method_ptr)
1437 *method_ptr = fld;
1439 fld = DECL_CHAIN (fld);
1440 if (!fld || INTEGRAL_TYPE_P (fld)
1441 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1442 return false;
1443 if (delta)
1444 *delta = fld;
1446 if (DECL_CHAIN (fld))
1447 return false;
1449 return true;
1452 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1453 return the rhs of its defining statement. Otherwise return RHS as it
1454 is. */
1456 static inline tree
1457 get_ssa_def_if_simple_copy (tree rhs)
1459 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1461 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1463 if (gimple_assign_single_p (def_stmt))
1464 rhs = gimple_assign_rhs1 (def_stmt);
1465 else
1466 break;
1468 return rhs;
1471 /* Simple linked list, describing known contents of an aggregate beforere
1472 call. */
1474 struct ipa_known_agg_contents_list
1476 /* Offset and size of the described part of the aggregate. */
1477 HOST_WIDE_INT offset, size;
1478 /* Known constant value or NULL if the contents is known to be unknown. */
1479 tree constant;
1480 /* Pointer to the next structure in the list. */
1481 struct ipa_known_agg_contents_list *next;
1484 /* Find the proper place in linked list of ipa_known_agg_contents_list
1485 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1486 unless there is a partial overlap, in which case return NULL, or such
1487 element is already there, in which case set *ALREADY_THERE to true. */
1489 static struct ipa_known_agg_contents_list **
1490 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1491 HOST_WIDE_INT lhs_offset,
1492 HOST_WIDE_INT lhs_size,
1493 bool *already_there)
1495 struct ipa_known_agg_contents_list **p = list;
1496 while (*p && (*p)->offset < lhs_offset)
1498 if ((*p)->offset + (*p)->size > lhs_offset)
1499 return NULL;
1500 p = &(*p)->next;
1503 if (*p && (*p)->offset < lhs_offset + lhs_size)
1505 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1506 /* We already know this value is subsequently overwritten with
1507 something else. */
1508 *already_there = true;
1509 else
1510 /* Otherwise this is a partial overlap which we cannot
1511 represent. */
1512 return NULL;
1514 return p;
1517 /* Build aggregate jump function from LIST, assuming there are exactly
1518 CONST_COUNT constant entries there and that th offset of the passed argument
1519 is ARG_OFFSET and store it into JFUNC. */
1521 static void
1522 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1523 int const_count, HOST_WIDE_INT arg_offset,
1524 struct ipa_jump_func *jfunc)
1526 vec_alloc (jfunc->agg.items, const_count);
1527 while (list)
1529 if (list->constant)
1531 struct ipa_agg_jf_item item;
1532 item.offset = list->offset - arg_offset;
1533 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1534 item.value = unshare_expr_without_location (list->constant);
1535 jfunc->agg.items->quick_push (item);
1537 list = list->next;
1541 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1542 in ARG is filled in with constant values. ARG can either be an aggregate
1543 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1544 aggregate. JFUNC is the jump function into which the constants are
1545 subsequently stored. */
1547 static void
1548 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1549 tree arg_type,
1550 struct ipa_jump_func *jfunc)
1552 struct ipa_known_agg_contents_list *list = NULL;
1553 int item_count = 0, const_count = 0;
1554 HOST_WIDE_INT arg_offset, arg_size;
1555 gimple_stmt_iterator gsi;
1556 tree arg_base;
1557 bool check_ref, by_ref;
1558 ao_ref r;
1560 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1561 return;
1563 /* The function operates in three stages. First, we prepare check_ref, r,
1564 arg_base and arg_offset based on what is actually passed as an actual
1565 argument. */
1567 if (POINTER_TYPE_P (arg_type))
1569 by_ref = true;
1570 if (TREE_CODE (arg) == SSA_NAME)
1572 tree type_size;
1573 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1574 return;
1575 check_ref = true;
1576 arg_base = arg;
1577 arg_offset = 0;
1578 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1579 arg_size = tree_to_uhwi (type_size);
1580 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1582 else if (TREE_CODE (arg) == ADDR_EXPR)
1584 bool reverse;
1586 arg = TREE_OPERAND (arg, 0);
1587 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1588 &arg_size, &reverse);
1589 if (!arg_base)
1590 return;
1591 if (DECL_P (arg_base))
1593 check_ref = false;
1594 ao_ref_init (&r, arg_base);
1596 else
1597 return;
1599 else
1600 return;
1602 else
1604 bool reverse;
1606 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1608 by_ref = false;
1609 check_ref = false;
1610 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1611 &arg_size, &reverse);
1612 if (!arg_base)
1613 return;
1615 ao_ref_init (&r, arg);
1618 /* Second stage walks back the BB, looks at individual statements and as long
1619 as it is confident of how the statements affect contents of the
1620 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1621 describing it. */
1622 gsi = gsi_for_stmt (call);
1623 gsi_prev (&gsi);
1624 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1626 struct ipa_known_agg_contents_list *n, **p;
1627 gimple *stmt = gsi_stmt (gsi);
1628 HOST_WIDE_INT lhs_offset, lhs_size;
1629 tree lhs, rhs, lhs_base;
1630 bool reverse;
1632 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1633 continue;
1634 if (!gimple_assign_single_p (stmt))
1635 break;
1637 lhs = gimple_assign_lhs (stmt);
1638 rhs = gimple_assign_rhs1 (stmt);
1639 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1640 || TREE_CODE (lhs) == BIT_FIELD_REF
1641 || contains_bitfld_component_ref_p (lhs))
1642 break;
1644 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1645 &lhs_size, &reverse);
1646 if (!lhs_base)
1647 break;
1649 if (check_ref)
1651 if (TREE_CODE (lhs_base) != MEM_REF
1652 || TREE_OPERAND (lhs_base, 0) != arg_base
1653 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1654 break;
1656 else if (lhs_base != arg_base)
1658 if (DECL_P (lhs_base))
1659 continue;
1660 else
1661 break;
1664 bool already_there = false;
1665 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1666 &already_there);
1667 if (!p)
1668 break;
1669 if (already_there)
1670 continue;
1672 rhs = get_ssa_def_if_simple_copy (rhs);
1673 n = XALLOCA (struct ipa_known_agg_contents_list);
1674 n->size = lhs_size;
1675 n->offset = lhs_offset;
1676 if (is_gimple_ip_invariant (rhs))
1678 n->constant = rhs;
1679 const_count++;
1681 else
1682 n->constant = NULL_TREE;
1683 n->next = *p;
1684 *p = n;
1686 item_count++;
1687 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1688 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1689 break;
1692 /* Third stage just goes over the list and creates an appropriate vector of
1693 ipa_agg_jf_item structures out of it, of sourse only if there are
1694 any known constants to begin with. */
1696 if (const_count)
1698 jfunc->agg.by_ref = by_ref;
1699 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1703 /* Return the Ith param type of callee associated with call graph
1704 edge E. */
1706 tree
1707 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1709 int n;
1710 tree type = (e->callee
1711 ? TREE_TYPE (e->callee->decl)
1712 : gimple_call_fntype (e->call_stmt));
1713 tree t = TYPE_ARG_TYPES (type);
1715 for (n = 0; n < i; n++)
1717 if (!t)
1718 break;
1719 t = TREE_CHAIN (t);
1721 if (t)
1722 return TREE_VALUE (t);
1723 if (!e->callee)
1724 return NULL;
1725 t = DECL_ARGUMENTS (e->callee->decl);
1726 for (n = 0; n < i; n++)
1728 if (!t)
1729 return NULL;
1730 t = TREE_CHAIN (t);
1732 if (t)
1733 return TREE_TYPE (t);
1734 return NULL;
1737 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1738 allocated structure or a previously existing one shared with other jump
1739 functions and/or transformation summaries. */
1741 ipa_bits *
1742 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1744 ipa_bits tmp;
1745 tmp.value = value;
1746 tmp.mask = mask;
1748 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1749 if (*slot)
1750 return *slot;
1752 ipa_bits *res = ggc_alloc<ipa_bits> ();
1753 res->value = value;
1754 res->mask = mask;
1755 *slot = res;
1757 return res;
1760 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1761 table in order to avoid creating multiple same ipa_bits structures. */
1763 static void
1764 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1765 const widest_int &mask)
1767 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1770 /* Return a pointer to a value_range just like *TMP, but either find it in
1771 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1773 static value_range *
1774 ipa_get_value_range (value_range *tmp)
1776 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1777 if (*slot)
1778 return *slot;
1780 value_range *vr = ggc_alloc<value_range> ();
1781 *vr = *tmp;
1782 *slot = vr;
1784 return vr;
1787 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1788 equiv set. Use hash table in order to avoid creating multiple same copies of
1789 value_ranges. */
1791 static value_range *
1792 ipa_get_value_range (enum value_range_kind type, tree min, tree max)
1794 value_range tmp (type, min, max);
1795 return ipa_get_value_range (&tmp);
1798 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1799 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1800 same value_range structures. */
1802 static void
1803 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
1804 tree min, tree max)
1806 jf->m_vr = ipa_get_value_range (type, min, max);
1809 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1810 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1812 static void
1813 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1815 jf->m_vr = ipa_get_value_range (tmp);
1818 /* Compute jump function for all arguments of callsite CS and insert the
1819 information in the jump_functions array in the ipa_edge_args corresponding
1820 to this callsite. */
1822 static void
1823 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1824 struct cgraph_edge *cs)
1826 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1827 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1828 gcall *call = cs->call_stmt;
1829 int n, arg_num = gimple_call_num_args (call);
1830 bool useful_context = false;
1832 if (arg_num == 0 || args->jump_functions)
1833 return;
1834 vec_safe_grow_cleared (args->jump_functions, arg_num);
1835 if (flag_devirtualize)
1836 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1838 if (gimple_call_internal_p (call))
1839 return;
1840 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1841 return;
1843 for (n = 0; n < arg_num; n++)
1845 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1846 tree arg = gimple_call_arg (call, n);
1847 tree param_type = ipa_get_callee_param_type (cs, n);
1848 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1850 tree instance;
1851 struct ipa_polymorphic_call_context context (cs->caller->decl,
1852 arg, cs->call_stmt,
1853 &instance);
1854 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1855 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1856 if (!context.useless_p ())
1857 useful_context = true;
1860 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1862 bool addr_nonzero = false;
1863 bool strict_overflow = false;
1865 if (TREE_CODE (arg) == SSA_NAME
1866 && param_type
1867 && get_ptr_nonnull (arg))
1868 addr_nonzero = true;
1869 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1870 addr_nonzero = true;
1872 if (addr_nonzero)
1874 tree z = build_int_cst (TREE_TYPE (arg), 0);
1875 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1877 else
1878 gcc_assert (!jfunc->m_vr);
1880 else
1882 wide_int min, max;
1883 value_range_kind type;
1884 if (TREE_CODE (arg) == SSA_NAME
1885 && param_type
1886 && (type = get_range_info (arg, &min, &max))
1887 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1889 value_range resvr;
1890 value_range tmpvr (type,
1891 wide_int_to_tree (TREE_TYPE (arg), min),
1892 wide_int_to_tree (TREE_TYPE (arg), max));
1893 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1894 &tmpvr, TREE_TYPE (arg));
1895 if (!resvr.undefined_p () && !resvr.varying_p ())
1896 ipa_set_jfunc_vr (jfunc, &resvr);
1897 else
1898 gcc_assert (!jfunc->m_vr);
1900 else
1901 gcc_assert (!jfunc->m_vr);
1904 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1905 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1907 if (TREE_CODE (arg) == SSA_NAME)
1908 ipa_set_jfunc_bits (jfunc, 0,
1909 widest_int::from (get_nonzero_bits (arg),
1910 TYPE_SIGN (TREE_TYPE (arg))));
1911 else
1912 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1914 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1916 unsigned HOST_WIDE_INT bitpos;
1917 unsigned align;
1919 get_pointer_alignment_1 (arg, &align, &bitpos);
1920 widest_int mask = wi::bit_and_not
1921 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1922 align / BITS_PER_UNIT - 1);
1923 widest_int value = bitpos / BITS_PER_UNIT;
1924 ipa_set_jfunc_bits (jfunc, value, mask);
1926 else
1927 gcc_assert (!jfunc->bits);
1929 if (is_gimple_ip_invariant (arg)
1930 || (VAR_P (arg)
1931 && is_global_var (arg)
1932 && TREE_READONLY (arg)))
1933 ipa_set_jf_constant (jfunc, arg, cs);
1934 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1935 && TREE_CODE (arg) == PARM_DECL)
1937 int index = ipa_get_param_decl_index (info, arg);
1939 gcc_assert (index >=0);
1940 /* Aggregate passed by value, check for pass-through, otherwise we
1941 will attempt to fill in aggregate contents later in this
1942 for cycle. */
1943 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1945 ipa_set_jf_simple_pass_through (jfunc, index, false);
1946 continue;
1949 else if (TREE_CODE (arg) == SSA_NAME)
1951 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1953 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1954 if (index >= 0)
1956 bool agg_p;
1957 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1958 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1961 else
1963 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1964 if (is_gimple_assign (stmt))
1965 compute_complex_assign_jump_func (fbi, info, jfunc,
1966 call, stmt, arg, param_type);
1967 else if (gimple_code (stmt) == GIMPLE_PHI)
1968 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1969 call,
1970 as_a <gphi *> (stmt));
1974 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1975 passed (because type conversions are ignored in gimple). Usually we can
1976 safely get type from function declaration, but in case of K&R prototypes or
1977 variadic functions we can try our luck with type of the pointer passed.
1978 TODO: Since we look for actual initialization of the memory object, we may better
1979 work out the type based on the memory stores we find. */
1980 if (!param_type)
1981 param_type = TREE_TYPE (arg);
1983 if ((jfunc->type != IPA_JF_PASS_THROUGH
1984 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1985 && (jfunc->type != IPA_JF_ANCESTOR
1986 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1987 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1988 || POINTER_TYPE_P (param_type)))
1989 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1991 if (!useful_context)
1992 vec_free (args->polymorphic_call_contexts);
1995 /* Compute jump functions for all edges - both direct and indirect - outgoing
1996 from BB. */
1998 static void
1999 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2001 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2002 int i;
2003 struct cgraph_edge *cs;
2005 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2007 struct cgraph_node *callee = cs->callee;
2009 if (callee)
2011 callee->ultimate_alias_target ();
2012 /* We do not need to bother analyzing calls to unknown functions
2013 unless they may become known during lto/whopr. */
2014 if (!callee->definition && !flag_lto)
2015 continue;
2017 ipa_compute_jump_functions_for_edge (fbi, cs);
2021 /* If STMT looks like a statement loading a value from a member pointer formal
2022 parameter, return that parameter and store the offset of the field to
2023 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2024 might be clobbered). If USE_DELTA, then we look for a use of the delta
2025 field rather than the pfn. */
2027 static tree
2028 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2029 HOST_WIDE_INT *offset_p)
2031 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2033 if (!gimple_assign_single_p (stmt))
2034 return NULL_TREE;
2036 rhs = gimple_assign_rhs1 (stmt);
2037 if (TREE_CODE (rhs) == COMPONENT_REF)
2039 ref_field = TREE_OPERAND (rhs, 1);
2040 rhs = TREE_OPERAND (rhs, 0);
2042 else
2043 ref_field = NULL_TREE;
2044 if (TREE_CODE (rhs) != MEM_REF)
2045 return NULL_TREE;
2046 rec = TREE_OPERAND (rhs, 0);
2047 if (TREE_CODE (rec) != ADDR_EXPR)
2048 return NULL_TREE;
2049 rec = TREE_OPERAND (rec, 0);
2050 if (TREE_CODE (rec) != PARM_DECL
2051 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2052 return NULL_TREE;
2053 ref_offset = TREE_OPERAND (rhs, 1);
2055 if (use_delta)
2056 fld = delta_field;
2057 else
2058 fld = ptr_field;
2059 if (offset_p)
2060 *offset_p = int_bit_position (fld);
2062 if (ref_field)
2064 if (integer_nonzerop (ref_offset))
2065 return NULL_TREE;
2066 return ref_field == fld ? rec : NULL_TREE;
2068 else
2069 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2070 : NULL_TREE;
2073 /* Returns true iff T is an SSA_NAME defined by a statement. */
2075 static bool
2076 ipa_is_ssa_with_stmt_def (tree t)
2078 if (TREE_CODE (t) == SSA_NAME
2079 && !SSA_NAME_IS_DEFAULT_DEF (t))
2080 return true;
2081 else
2082 return false;
2085 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2086 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2087 indirect call graph edge. */
2089 static struct cgraph_edge *
2090 ipa_note_param_call (struct cgraph_node *node, int param_index,
2091 gcall *stmt)
2093 struct cgraph_edge *cs;
2095 cs = node->get_edge (stmt);
2096 cs->indirect_info->param_index = param_index;
2097 cs->indirect_info->agg_contents = 0;
2098 cs->indirect_info->member_ptr = 0;
2099 cs->indirect_info->guaranteed_unmodified = 0;
2100 return cs;
2103 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2104 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2105 intermediate information about each formal parameter. Currently it checks
2106 whether the call calls a pointer that is a formal parameter and if so, the
2107 parameter is marked with the called flag and an indirect call graph edge
2108 describing the call is created. This is very simple for ordinary pointers
2109 represented in SSA but not-so-nice when it comes to member pointers. The
2110 ugly part of this function does nothing more than trying to match the
2111 pattern of such a call. An example of such a pattern is the gimple dump
2112 below, the call is on the last line:
2114 <bb 2>:
2115 f$__delta_5 = f.__delta;
2116 f$__pfn_24 = f.__pfn;
2119 <bb 2>:
2120 f$__delta_5 = MEM[(struct *)&f];
2121 f$__pfn_24 = MEM[(struct *)&f + 4B];
2123 and a few lines below:
2125 <bb 5>
2126 D.2496_3 = (int) f$__pfn_24;
2127 D.2497_4 = D.2496_3 & 1;
2128 if (D.2497_4 != 0)
2129 goto <bb 3>;
2130 else
2131 goto <bb 4>;
2133 <bb 6>:
2134 D.2500_7 = (unsigned int) f$__delta_5;
2135 D.2501_8 = &S + D.2500_7;
2136 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2137 D.2503_10 = *D.2502_9;
2138 D.2504_12 = f$__pfn_24 + -1;
2139 D.2505_13 = (unsigned int) D.2504_12;
2140 D.2506_14 = D.2503_10 + D.2505_13;
2141 D.2507_15 = *D.2506_14;
2142 iftmp.11_16 = (String:: *) D.2507_15;
2144 <bb 7>:
2145 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2146 D.2500_19 = (unsigned int) f$__delta_5;
2147 D.2508_20 = &S + D.2500_19;
2148 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2150 Such patterns are results of simple calls to a member pointer:
2152 int doprinting (int (MyString::* f)(int) const)
2154 MyString S ("somestring");
2156 return (S.*f)(4);
2159 Moreover, the function also looks for called pointers loaded from aggregates
2160 passed by value or reference. */
2162 static void
2163 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2164 tree target)
2166 struct ipa_node_params *info = fbi->info;
2167 HOST_WIDE_INT offset;
2168 bool by_ref;
2170 if (SSA_NAME_IS_DEFAULT_DEF (target))
2172 tree var = SSA_NAME_VAR (target);
2173 int index = ipa_get_param_decl_index (info, var);
2174 if (index >= 0)
2175 ipa_note_param_call (fbi->node, index, call);
2176 return;
2179 int index;
2180 gimple *def = SSA_NAME_DEF_STMT (target);
2181 bool guaranteed_unmodified;
2182 if (gimple_assign_single_p (def)
2183 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2184 gimple_assign_rhs1 (def), &index, &offset,
2185 NULL, &by_ref, &guaranteed_unmodified))
2187 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2188 cs->indirect_info->offset = offset;
2189 cs->indirect_info->agg_contents = 1;
2190 cs->indirect_info->by_ref = by_ref;
2191 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2192 return;
2195 /* Now we need to try to match the complex pattern of calling a member
2196 pointer. */
2197 if (gimple_code (def) != GIMPLE_PHI
2198 || gimple_phi_num_args (def) != 2
2199 || !POINTER_TYPE_P (TREE_TYPE (target))
2200 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2201 return;
2203 /* First, we need to check whether one of these is a load from a member
2204 pointer that is a parameter to this function. */
2205 tree n1 = PHI_ARG_DEF (def, 0);
2206 tree n2 = PHI_ARG_DEF (def, 1);
2207 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2208 return;
2209 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2210 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2212 tree rec;
2213 basic_block bb, virt_bb;
2214 basic_block join = gimple_bb (def);
2215 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2217 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2218 return;
2220 bb = EDGE_PRED (join, 0)->src;
2221 virt_bb = gimple_bb (d2);
2223 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2225 bb = EDGE_PRED (join, 1)->src;
2226 virt_bb = gimple_bb (d1);
2228 else
2229 return;
2231 /* Second, we need to check that the basic blocks are laid out in the way
2232 corresponding to the pattern. */
2234 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2235 || single_pred (virt_bb) != bb
2236 || single_succ (virt_bb) != join)
2237 return;
2239 /* Third, let's see that the branching is done depending on the least
2240 significant bit of the pfn. */
2242 gimple *branch = last_stmt (bb);
2243 if (!branch || gimple_code (branch) != GIMPLE_COND)
2244 return;
2246 if ((gimple_cond_code (branch) != NE_EXPR
2247 && gimple_cond_code (branch) != EQ_EXPR)
2248 || !integer_zerop (gimple_cond_rhs (branch)))
2249 return;
2251 tree cond = gimple_cond_lhs (branch);
2252 if (!ipa_is_ssa_with_stmt_def (cond))
2253 return;
2255 def = SSA_NAME_DEF_STMT (cond);
2256 if (!is_gimple_assign (def)
2257 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2258 || !integer_onep (gimple_assign_rhs2 (def)))
2259 return;
2261 cond = gimple_assign_rhs1 (def);
2262 if (!ipa_is_ssa_with_stmt_def (cond))
2263 return;
2265 def = SSA_NAME_DEF_STMT (cond);
2267 if (is_gimple_assign (def)
2268 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2270 cond = gimple_assign_rhs1 (def);
2271 if (!ipa_is_ssa_with_stmt_def (cond))
2272 return;
2273 def = SSA_NAME_DEF_STMT (cond);
2276 tree rec2;
2277 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2278 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2279 == ptrmemfunc_vbit_in_delta),
2280 NULL);
2281 if (rec != rec2)
2282 return;
2284 index = ipa_get_param_decl_index (info, rec);
2285 if (index >= 0
2286 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2288 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2289 cs->indirect_info->offset = offset;
2290 cs->indirect_info->agg_contents = 1;
2291 cs->indirect_info->member_ptr = 1;
2292 cs->indirect_info->guaranteed_unmodified = 1;
2295 return;
2298 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2299 object referenced in the expression is a formal parameter of the caller
2300 FBI->node (described by FBI->info), create a call note for the
2301 statement. */
2303 static void
2304 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2305 gcall *call, tree target)
2307 tree obj = OBJ_TYPE_REF_OBJECT (target);
2308 int index;
2309 HOST_WIDE_INT anc_offset;
2311 if (!flag_devirtualize)
2312 return;
2314 if (TREE_CODE (obj) != SSA_NAME)
2315 return;
2317 struct ipa_node_params *info = fbi->info;
2318 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2320 struct ipa_jump_func jfunc;
2321 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2322 return;
2324 anc_offset = 0;
2325 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2326 gcc_assert (index >= 0);
2327 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2328 call, &jfunc))
2329 return;
2331 else
2333 struct ipa_jump_func jfunc;
2334 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2335 tree expr;
2337 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2338 if (!expr)
2339 return;
2340 index = ipa_get_param_decl_index (info,
2341 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2342 gcc_assert (index >= 0);
2343 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2344 call, &jfunc, anc_offset))
2345 return;
2348 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2349 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2350 ii->offset = anc_offset;
2351 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2352 ii->otr_type = obj_type_ref_class (target);
2353 ii->polymorphic = 1;
2356 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2357 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2358 containing intermediate information about each formal parameter. */
2360 static void
2361 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2363 tree target = gimple_call_fn (call);
2365 if (!target
2366 || (TREE_CODE (target) != SSA_NAME
2367 && !virtual_method_call_p (target)))
2368 return;
2370 struct cgraph_edge *cs = fbi->node->get_edge (call);
2371 /* If we previously turned the call into a direct call, there is
2372 no need to analyze. */
2373 if (cs && !cs->indirect_unknown_callee)
2374 return;
2376 if (cs->indirect_info->polymorphic && flag_devirtualize)
2378 tree instance;
2379 tree target = gimple_call_fn (call);
2380 ipa_polymorphic_call_context context (current_function_decl,
2381 target, call, &instance);
2383 gcc_checking_assert (cs->indirect_info->otr_type
2384 == obj_type_ref_class (target));
2385 gcc_checking_assert (cs->indirect_info->otr_token
2386 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2388 cs->indirect_info->vptr_changed
2389 = !context.get_dynamic_type (instance,
2390 OBJ_TYPE_REF_OBJECT (target),
2391 obj_type_ref_class (target), call);
2392 cs->indirect_info->context = context;
2395 if (TREE_CODE (target) == SSA_NAME)
2396 ipa_analyze_indirect_call_uses (fbi, call, target);
2397 else if (virtual_method_call_p (target))
2398 ipa_analyze_virtual_call_uses (fbi, call, target);
2402 /* Analyze the call statement STMT with respect to formal parameters (described
2403 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2404 formal parameters are called. */
2406 static void
2407 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2409 if (is_gimple_call (stmt))
2410 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2413 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2414 If OP is a parameter declaration, mark it as used in the info structure
2415 passed in DATA. */
2417 static bool
2418 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2420 struct ipa_node_params *info = (struct ipa_node_params *) data;
2422 op = get_base_address (op);
2423 if (op
2424 && TREE_CODE (op) == PARM_DECL)
2426 int index = ipa_get_param_decl_index (info, op);
2427 gcc_assert (index >= 0);
2428 ipa_set_param_used (info, index, true);
2431 return false;
2434 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2435 the findings in various structures of the associated ipa_node_params
2436 structure, such as parameter flags, notes etc. FBI holds various data about
2437 the function being analyzed. */
2439 static void
2440 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2442 gimple_stmt_iterator gsi;
2443 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2445 gimple *stmt = gsi_stmt (gsi);
2447 if (is_gimple_debug (stmt))
2448 continue;
2450 ipa_analyze_stmt_uses (fbi, stmt);
2451 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2452 visit_ref_for_mod_analysis,
2453 visit_ref_for_mod_analysis,
2454 visit_ref_for_mod_analysis);
2456 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2457 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2458 visit_ref_for_mod_analysis,
2459 visit_ref_for_mod_analysis,
2460 visit_ref_for_mod_analysis);
2463 /* Calculate controlled uses of parameters of NODE. */
2465 static void
2466 ipa_analyze_controlled_uses (struct cgraph_node *node)
2468 struct ipa_node_params *info = IPA_NODE_REF (node);
2470 for (int i = 0; i < ipa_get_param_count (info); i++)
2472 tree parm = ipa_get_param (info, i);
2473 int controlled_uses = 0;
2475 /* For SSA regs see if parameter is used. For non-SSA we compute
2476 the flag during modification analysis. */
2477 if (is_gimple_reg (parm))
2479 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2480 parm);
2481 if (ddef && !has_zero_uses (ddef))
2483 imm_use_iterator imm_iter;
2484 use_operand_p use_p;
2486 ipa_set_param_used (info, i, true);
2487 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2488 if (!is_gimple_call (USE_STMT (use_p)))
2490 if (!is_gimple_debug (USE_STMT (use_p)))
2492 controlled_uses = IPA_UNDESCRIBED_USE;
2493 break;
2496 else
2497 controlled_uses++;
2499 else
2500 controlled_uses = 0;
2502 else
2503 controlled_uses = IPA_UNDESCRIBED_USE;
2504 ipa_set_controlled_uses (info, i, controlled_uses);
2508 /* Free stuff in BI. */
2510 static void
2511 free_ipa_bb_info (struct ipa_bb_info *bi)
2513 bi->cg_edges.release ();
2514 bi->param_aa_statuses.release ();
2517 /* Dominator walker driving the analysis. */
2519 class analysis_dom_walker : public dom_walker
2521 public:
2522 analysis_dom_walker (struct ipa_func_body_info *fbi)
2523 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2525 virtual edge before_dom_children (basic_block);
2527 private:
2528 struct ipa_func_body_info *m_fbi;
2531 edge
2532 analysis_dom_walker::before_dom_children (basic_block bb)
2534 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2535 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2536 return NULL;
2539 /* Release body info FBI. */
2541 void
2542 ipa_release_body_info (struct ipa_func_body_info *fbi)
2544 int i;
2545 struct ipa_bb_info *bi;
2547 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2548 free_ipa_bb_info (bi);
2549 fbi->bb_infos.release ();
2552 /* Initialize the array describing properties of formal parameters
2553 of NODE, analyze their uses and compute jump functions associated
2554 with actual arguments of calls from within NODE. */
2556 void
2557 ipa_analyze_node (struct cgraph_node *node)
2559 struct ipa_func_body_info fbi;
2560 struct ipa_node_params *info;
2562 ipa_check_create_node_params ();
2563 ipa_check_create_edge_args ();
2564 info = IPA_NODE_REF (node);
2566 if (info->analysis_done)
2567 return;
2568 info->analysis_done = 1;
2570 if (ipa_func_spec_opts_forbid_analysis_p (node))
2572 for (int i = 0; i < ipa_get_param_count (info); i++)
2574 ipa_set_param_used (info, i, true);
2575 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2577 return;
2580 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2581 push_cfun (func);
2582 calculate_dominance_info (CDI_DOMINATORS);
2583 ipa_initialize_node_params (node);
2584 ipa_analyze_controlled_uses (node);
2586 fbi.node = node;
2587 fbi.info = IPA_NODE_REF (node);
2588 fbi.bb_infos = vNULL;
2589 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2590 fbi.param_count = ipa_get_param_count (info);
2591 fbi.aa_walked = 0;
2593 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2595 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2596 bi->cg_edges.safe_push (cs);
2599 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2601 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2602 bi->cg_edges.safe_push (cs);
2605 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2607 ipa_release_body_info (&fbi);
2608 free_dominance_info (CDI_DOMINATORS);
2609 pop_cfun ();
2612 /* Update the jump functions associated with call graph edge E when the call
2613 graph edge CS is being inlined, assuming that E->caller is already (possibly
2614 indirectly) inlined into CS->callee and that E has not been inlined. */
2616 static void
2617 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2618 struct cgraph_edge *e)
2620 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2621 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2622 int count = ipa_get_cs_argument_count (args);
2623 int i;
2625 for (i = 0; i < count; i++)
2627 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2628 struct ipa_polymorphic_call_context *dst_ctx
2629 = ipa_get_ith_polymorhic_call_context (args, i);
2631 if (dst->type == IPA_JF_ANCESTOR)
2633 struct ipa_jump_func *src;
2634 int dst_fid = dst->value.ancestor.formal_id;
2635 struct ipa_polymorphic_call_context *src_ctx
2636 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2638 /* Variable number of arguments can cause havoc if we try to access
2639 one that does not exist in the inlined edge. So make sure we
2640 don't. */
2641 if (dst_fid >= ipa_get_cs_argument_count (top))
2643 ipa_set_jf_unknown (dst);
2644 continue;
2647 src = ipa_get_ith_jump_func (top, dst_fid);
2649 if (src_ctx && !src_ctx->useless_p ())
2651 struct ipa_polymorphic_call_context ctx = *src_ctx;
2653 /* TODO: Make type preserved safe WRT contexts. */
2654 if (!ipa_get_jf_ancestor_type_preserved (dst))
2655 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2656 ctx.offset_by (dst->value.ancestor.offset);
2657 if (!ctx.useless_p ())
2659 if (!dst_ctx)
2661 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2662 count);
2663 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2666 dst_ctx->combine_with (ctx);
2670 if (src->agg.items
2671 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2673 struct ipa_agg_jf_item *item;
2674 int j;
2676 /* Currently we do not produce clobber aggregate jump functions,
2677 replace with merging when we do. */
2678 gcc_assert (!dst->agg.items);
2680 dst->agg.items = vec_safe_copy (src->agg.items);
2681 dst->agg.by_ref = src->agg.by_ref;
2682 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2683 item->offset -= dst->value.ancestor.offset;
2686 if (src->type == IPA_JF_PASS_THROUGH
2687 && src->value.pass_through.operation == NOP_EXPR)
2689 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2690 dst->value.ancestor.agg_preserved &=
2691 src->value.pass_through.agg_preserved;
2693 else if (src->type == IPA_JF_PASS_THROUGH
2694 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2696 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2697 dst->value.ancestor.agg_preserved = false;
2699 else if (src->type == IPA_JF_ANCESTOR)
2701 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2702 dst->value.ancestor.offset += src->value.ancestor.offset;
2703 dst->value.ancestor.agg_preserved &=
2704 src->value.ancestor.agg_preserved;
2706 else
2707 ipa_set_jf_unknown (dst);
2709 else if (dst->type == IPA_JF_PASS_THROUGH)
2711 struct ipa_jump_func *src;
2712 /* We must check range due to calls with variable number of arguments
2713 and we cannot combine jump functions with operations. */
2714 if (dst->value.pass_through.operation == NOP_EXPR
2715 && (dst->value.pass_through.formal_id
2716 < ipa_get_cs_argument_count (top)))
2718 int dst_fid = dst->value.pass_through.formal_id;
2719 src = ipa_get_ith_jump_func (top, dst_fid);
2720 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2721 struct ipa_polymorphic_call_context *src_ctx
2722 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2724 if (src_ctx && !src_ctx->useless_p ())
2726 struct ipa_polymorphic_call_context ctx = *src_ctx;
2728 /* TODO: Make type preserved safe WRT contexts. */
2729 if (!ipa_get_jf_pass_through_type_preserved (dst))
2730 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2731 if (!ctx.useless_p ())
2733 if (!dst_ctx)
2735 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2736 count);
2737 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2739 dst_ctx->combine_with (ctx);
2742 switch (src->type)
2744 case IPA_JF_UNKNOWN:
2745 ipa_set_jf_unknown (dst);
2746 break;
2747 case IPA_JF_CONST:
2748 ipa_set_jf_cst_copy (dst, src);
2749 break;
2751 case IPA_JF_PASS_THROUGH:
2753 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2754 enum tree_code operation;
2755 operation = ipa_get_jf_pass_through_operation (src);
2757 if (operation == NOP_EXPR)
2759 bool agg_p;
2760 agg_p = dst_agg_p
2761 && ipa_get_jf_pass_through_agg_preserved (src);
2762 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2764 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2765 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2766 else
2768 tree operand = ipa_get_jf_pass_through_operand (src);
2769 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2770 operation);
2772 break;
2774 case IPA_JF_ANCESTOR:
2776 bool agg_p;
2777 agg_p = dst_agg_p
2778 && ipa_get_jf_ancestor_agg_preserved (src);
2779 ipa_set_ancestor_jf (dst,
2780 ipa_get_jf_ancestor_offset (src),
2781 ipa_get_jf_ancestor_formal_id (src),
2782 agg_p);
2783 break;
2785 default:
2786 gcc_unreachable ();
2789 if (src->agg.items
2790 && (dst_agg_p || !src->agg.by_ref))
2792 /* Currently we do not produce clobber aggregate jump
2793 functions, replace with merging when we do. */
2794 gcc_assert (!dst->agg.items);
2796 dst->agg.by_ref = src->agg.by_ref;
2797 dst->agg.items = vec_safe_copy (src->agg.items);
2800 else
2801 ipa_set_jf_unknown (dst);
2806 /* If TARGET is an addr_expr of a function declaration, make it the
2807 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2808 Otherwise, return NULL. */
2810 struct cgraph_edge *
2811 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2812 bool speculative)
2814 struct cgraph_node *callee;
2815 bool unreachable = false;
2817 if (TREE_CODE (target) == ADDR_EXPR)
2818 target = TREE_OPERAND (target, 0);
2819 if (TREE_CODE (target) != FUNCTION_DECL)
2821 target = canonicalize_constructor_val (target, NULL);
2822 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2824 /* Member pointer call that goes through a VMT lookup. */
2825 if (ie->indirect_info->member_ptr
2826 /* Or if target is not an invariant expression and we do not
2827 know if it will evaulate to function at runtime.
2828 This can happen when folding through &VAR, where &VAR
2829 is IP invariant, but VAR itself is not.
2831 TODO: Revisit this when GCC 5 is branched. It seems that
2832 member_ptr check is not needed and that we may try to fold
2833 the expression and see if VAR is readonly. */
2834 || !is_gimple_ip_invariant (target))
2836 if (dump_enabled_p ())
2838 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2839 "discovered direct call non-invariant %s\n",
2840 ie->caller->dump_name ());
2842 return NULL;
2846 if (dump_enabled_p ())
2848 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2849 "discovered direct call to non-function in %s, "
2850 "making it __builtin_unreachable\n",
2851 ie->caller->dump_name ());
2854 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2855 callee = cgraph_node::get_create (target);
2856 unreachable = true;
2858 else
2859 callee = cgraph_node::get (target);
2861 else
2862 callee = cgraph_node::get (target);
2864 /* Because may-edges are not explicitely represented and vtable may be external,
2865 we may create the first reference to the object in the unit. */
2866 if (!callee || callee->global.inlined_to)
2869 /* We are better to ensure we can refer to it.
2870 In the case of static functions we are out of luck, since we already
2871 removed its body. In the case of public functions we may or may
2872 not introduce the reference. */
2873 if (!canonicalize_constructor_val (target, NULL)
2874 || !TREE_PUBLIC (target))
2876 if (dump_file)
2877 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2878 "(%s -> %s) but can not refer to it. Giving up.\n",
2879 ie->caller->dump_name (),
2880 ie->callee->dump_name ());
2881 return NULL;
2883 callee = cgraph_node::get_create (target);
2886 /* If the edge is already speculated. */
2887 if (speculative && ie->speculative)
2889 struct cgraph_edge *e2;
2890 struct ipa_ref *ref;
2891 ie->speculative_call_info (e2, ie, ref);
2892 if (e2->callee->ultimate_alias_target ()
2893 != callee->ultimate_alias_target ())
2895 if (dump_file)
2896 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2897 "target (%s -> %s) but the call is already "
2898 "speculated to %s. Giving up.\n",
2899 ie->caller->dump_name (), callee->dump_name (),
2900 e2->callee->dump_name ());
2902 else
2904 if (dump_file)
2905 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2906 "(%s -> %s) this agree with previous speculation.\n",
2907 ie->caller->dump_name (), callee->dump_name ());
2909 return NULL;
2912 if (!dbg_cnt (devirt))
2913 return NULL;
2915 ipa_check_create_node_params ();
2917 /* We can not make edges to inline clones. It is bug that someone removed
2918 the cgraph node too early. */
2919 gcc_assert (!callee->global.inlined_to);
2921 if (dump_file && !unreachable)
2923 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2924 "(%s -> %s), for stmt ",
2925 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2926 speculative ? "speculative" : "known",
2927 ie->caller->dump_name (),
2928 callee->dump_name ());
2929 if (ie->call_stmt)
2930 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2931 else
2932 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2934 if (dump_enabled_p ())
2936 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2937 "converting indirect call in %s to direct call to %s\n",
2938 ie->caller->name (), callee->name ());
2940 if (!speculative)
2942 struct cgraph_edge *orig = ie;
2943 ie = ie->make_direct (callee);
2944 /* If we resolved speculative edge the cost is already up to date
2945 for direct call (adjusted by inline_edge_duplication_hook). */
2946 if (ie == orig)
2948 ipa_call_summary *es = ipa_call_summaries->get (ie);
2949 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2950 - eni_size_weights.call_cost);
2951 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2952 - eni_time_weights.call_cost);
2955 else
2957 if (!callee->can_be_discarded_p ())
2959 cgraph_node *alias;
2960 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2961 if (alias)
2962 callee = alias;
2964 /* make_speculative will update ie's cost to direct call cost. */
2965 ie = ie->make_speculative
2966 (callee, ie->count.apply_scale (8, 10));
2969 return ie;
2972 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2973 CONSTRUCTOR and return it. Return NULL if the search fails for some
2974 reason. */
2976 static tree
2977 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2979 tree type = TREE_TYPE (constructor);
2980 if (TREE_CODE (type) != ARRAY_TYPE
2981 && TREE_CODE (type) != RECORD_TYPE)
2982 return NULL;
2984 unsigned ix;
2985 tree index, val;
2986 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2988 HOST_WIDE_INT elt_offset;
2989 if (TREE_CODE (type) == ARRAY_TYPE)
2991 offset_int off;
2992 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2993 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2995 if (index)
2997 if (TREE_CODE (index) == RANGE_EXPR)
2998 off = wi::to_offset (TREE_OPERAND (index, 0));
2999 else
3000 off = wi::to_offset (index);
3001 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3003 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3004 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3005 off = wi::sext (off - wi::to_offset (low_bound),
3006 TYPE_PRECISION (TREE_TYPE (index)));
3008 off *= wi::to_offset (unit_size);
3009 /* ??? Handle more than just the first index of a
3010 RANGE_EXPR. */
3012 else
3013 off = wi::to_offset (unit_size) * ix;
3015 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3016 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3017 continue;
3018 elt_offset = off.to_shwi ();
3020 else if (TREE_CODE (type) == RECORD_TYPE)
3022 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3023 if (DECL_BIT_FIELD (index))
3024 continue;
3025 elt_offset = int_bit_position (index);
3027 else
3028 gcc_unreachable ();
3030 if (elt_offset > req_offset)
3031 return NULL;
3033 if (TREE_CODE (val) == CONSTRUCTOR)
3034 return find_constructor_constant_at_offset (val,
3035 req_offset - elt_offset);
3037 if (elt_offset == req_offset
3038 && is_gimple_reg_type (TREE_TYPE (val))
3039 && is_gimple_ip_invariant (val))
3040 return val;
3042 return NULL;
3045 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3046 invariant from a static constructor and if so, return it. Otherwise return
3047 NULL. */
3049 static tree
3050 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3052 if (by_ref)
3054 if (TREE_CODE (scalar) != ADDR_EXPR)
3055 return NULL;
3056 scalar = TREE_OPERAND (scalar, 0);
3059 if (!VAR_P (scalar)
3060 || !is_global_var (scalar)
3061 || !TREE_READONLY (scalar)
3062 || !DECL_INITIAL (scalar)
3063 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3064 return NULL;
3066 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3069 /* Retrieve value from aggregate jump function AGG or static initializer of
3070 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3071 none. BY_REF specifies whether the value has to be passed by reference or
3072 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3073 to is set to true if the value comes from an initializer of a constant. */
3075 tree
3076 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3077 HOST_WIDE_INT offset, bool by_ref,
3078 bool *from_global_constant)
3080 struct ipa_agg_jf_item *item;
3081 int i;
3083 if (scalar)
3085 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3086 if (res)
3088 if (from_global_constant)
3089 *from_global_constant = true;
3090 return res;
3094 if (!agg
3095 || by_ref != agg->by_ref)
3096 return NULL;
3098 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3099 if (item->offset == offset)
3101 /* Currently we do not have clobber values, return NULL for them once
3102 we do. */
3103 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3104 if (from_global_constant)
3105 *from_global_constant = false;
3106 return item->value;
3108 return NULL;
3111 /* Remove a reference to SYMBOL from the list of references of a node given by
3112 reference description RDESC. Return true if the reference has been
3113 successfully found and removed. */
3115 static bool
3116 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3118 struct ipa_ref *to_del;
3119 struct cgraph_edge *origin;
3121 origin = rdesc->cs;
3122 if (!origin)
3123 return false;
3124 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3125 origin->lto_stmt_uid);
3126 if (!to_del)
3127 return false;
3129 to_del->remove_reference ();
3130 if (dump_file)
3131 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3132 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3133 return true;
3136 /* If JFUNC has a reference description with refcount different from
3137 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3138 NULL. JFUNC must be a constant jump function. */
3140 static struct ipa_cst_ref_desc *
3141 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3143 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3144 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3145 return rdesc;
3146 else
3147 return NULL;
3150 /* If the value of constant jump function JFUNC is an address of a function
3151 declaration, return the associated call graph node. Otherwise return
3152 NULL. */
3154 static cgraph_node *
3155 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3157 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3158 tree cst = ipa_get_jf_constant (jfunc);
3159 if (TREE_CODE (cst) != ADDR_EXPR
3160 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3161 return NULL;
3163 return cgraph_node::get (TREE_OPERAND (cst, 0));
3167 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3168 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3169 the edge specified in the rdesc. Return false if either the symbol or the
3170 reference could not be found, otherwise return true. */
3172 static bool
3173 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3175 struct ipa_cst_ref_desc *rdesc;
3176 if (jfunc->type == IPA_JF_CONST
3177 && (rdesc = jfunc_rdesc_usable (jfunc))
3178 && --rdesc->refcount == 0)
3180 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3181 if (!symbol)
3182 return false;
3184 return remove_described_reference (symbol, rdesc);
3186 return true;
3189 /* Try to find a destination for indirect edge IE that corresponds to a simple
3190 call or a call of a member function pointer and where the destination is a
3191 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3192 the type of the parameter to which the result of JFUNC is passed. If it can
3193 be determined, return the newly direct edge, otherwise return NULL.
3194 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3196 static struct cgraph_edge *
3197 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3198 struct ipa_jump_func *jfunc, tree target_type,
3199 struct ipa_node_params *new_root_info)
3201 struct cgraph_edge *cs;
3202 tree target;
3203 bool agg_contents = ie->indirect_info->agg_contents;
3204 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3205 if (agg_contents)
3207 bool from_global_constant;
3208 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3209 ie->indirect_info->offset,
3210 ie->indirect_info->by_ref,
3211 &from_global_constant);
3212 if (target
3213 && !from_global_constant
3214 && !ie->indirect_info->guaranteed_unmodified)
3215 return NULL;
3217 else
3218 target = scalar;
3219 if (!target)
3220 return NULL;
3221 cs = ipa_make_edge_direct_to_target (ie, target);
3223 if (cs && !agg_contents)
3225 bool ok;
3226 gcc_checking_assert (cs->callee
3227 && (cs != ie
3228 || jfunc->type != IPA_JF_CONST
3229 || !cgraph_node_for_jfunc (jfunc)
3230 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3231 ok = try_decrement_rdesc_refcount (jfunc);
3232 gcc_checking_assert (ok);
3235 return cs;
3238 /* Return the target to be used in cases of impossible devirtualization. IE
3239 and target (the latter can be NULL) are dumped when dumping is enabled. */
3241 tree
3242 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3244 if (dump_file)
3246 if (target)
3247 fprintf (dump_file,
3248 "Type inconsistent devirtualization: %s->%s\n",
3249 ie->caller->dump_name (),
3250 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3251 else
3252 fprintf (dump_file,
3253 "No devirtualization target in %s\n",
3254 ie->caller->dump_name ());
3256 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3257 cgraph_node::get_create (new_target);
3258 return new_target;
3261 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3262 call based on a formal parameter which is described by jump function JFUNC
3263 and if it can be determined, make it direct and return the direct edge.
3264 Otherwise, return NULL. CTX describes the polymorphic context that the
3265 parameter the call is based on brings along with it. */
3267 static struct cgraph_edge *
3268 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3269 struct ipa_jump_func *jfunc,
3270 struct ipa_polymorphic_call_context ctx)
3272 tree target = NULL;
3273 bool speculative = false;
3275 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3276 return NULL;
3278 gcc_assert (!ie->indirect_info->by_ref);
3280 /* Try to do lookup via known virtual table pointer value. */
3281 if (!ie->indirect_info->vptr_changed
3282 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3284 tree vtable;
3285 unsigned HOST_WIDE_INT offset;
3286 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3287 : NULL;
3288 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3289 ie->indirect_info->offset,
3290 true);
3291 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3293 bool can_refer;
3294 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3295 vtable, offset, &can_refer);
3296 if (can_refer)
3298 if (!t
3299 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3300 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3301 || !possible_polymorphic_call_target_p
3302 (ie, cgraph_node::get (t)))
3304 /* Do not speculate builtin_unreachable, it is stupid! */
3305 if (!ie->indirect_info->vptr_changed)
3306 target = ipa_impossible_devirt_target (ie, target);
3307 else
3308 target = NULL;
3310 else
3312 target = t;
3313 speculative = ie->indirect_info->vptr_changed;
3319 ipa_polymorphic_call_context ie_context (ie);
3320 vec <cgraph_node *>targets;
3321 bool final;
3323 ctx.offset_by (ie->indirect_info->offset);
3324 if (ie->indirect_info->vptr_changed)
3325 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3326 ie->indirect_info->otr_type);
3327 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3328 targets = possible_polymorphic_call_targets
3329 (ie->indirect_info->otr_type,
3330 ie->indirect_info->otr_token,
3331 ctx, &final);
3332 if (final && targets.length () <= 1)
3334 speculative = false;
3335 if (targets.length () == 1)
3336 target = targets[0]->decl;
3337 else
3338 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3340 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3341 && !ie->speculative && ie->maybe_hot_p ())
3343 cgraph_node *n;
3344 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3345 ie->indirect_info->otr_token,
3346 ie->indirect_info->context);
3347 if (n)
3349 target = n->decl;
3350 speculative = true;
3354 if (target)
3356 if (!possible_polymorphic_call_target_p
3357 (ie, cgraph_node::get_create (target)))
3359 if (speculative)
3360 return NULL;
3361 target = ipa_impossible_devirt_target (ie, target);
3363 return ipa_make_edge_direct_to_target (ie, target, speculative);
3365 else
3366 return NULL;
3369 /* Update the param called notes associated with NODE when CS is being inlined,
3370 assuming NODE is (potentially indirectly) inlined into CS->callee.
3371 Moreover, if the callee is discovered to be constant, create a new cgraph
3372 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3373 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3375 static bool
3376 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3377 struct cgraph_node *node,
3378 vec<cgraph_edge *> *new_edges)
3380 struct ipa_edge_args *top;
3381 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3382 struct ipa_node_params *new_root_info, *inlined_node_info;
3383 bool res = false;
3385 ipa_check_create_edge_args ();
3386 top = IPA_EDGE_REF (cs);
3387 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3388 ? cs->caller->global.inlined_to
3389 : cs->caller);
3390 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3392 for (ie = node->indirect_calls; ie; ie = next_ie)
3394 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3395 struct ipa_jump_func *jfunc;
3396 int param_index;
3397 cgraph_node *spec_target = NULL;
3399 next_ie = ie->next_callee;
3401 if (ici->param_index == -1)
3402 continue;
3404 /* We must check range due to calls with variable number of arguments: */
3405 if (ici->param_index >= ipa_get_cs_argument_count (top))
3407 ici->param_index = -1;
3408 continue;
3411 param_index = ici->param_index;
3412 jfunc = ipa_get_ith_jump_func (top, param_index);
3414 if (ie->speculative)
3416 struct cgraph_edge *de;
3417 struct ipa_ref *ref;
3418 ie->speculative_call_info (de, ie, ref);
3419 spec_target = de->callee;
3422 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3423 new_direct_edge = NULL;
3424 else if (ici->polymorphic)
3426 ipa_polymorphic_call_context ctx;
3427 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3428 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3430 else
3432 tree target_type = ipa_get_type (inlined_node_info, param_index);
3433 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3434 target_type,
3435 new_root_info);
3438 /* If speculation was removed, then we need to do nothing. */
3439 if (new_direct_edge && new_direct_edge != ie
3440 && new_direct_edge->callee == spec_target)
3442 new_direct_edge->indirect_inlining_edge = 1;
3443 top = IPA_EDGE_REF (cs);
3444 res = true;
3445 if (!new_direct_edge->speculative)
3446 continue;
3448 else if (new_direct_edge)
3450 new_direct_edge->indirect_inlining_edge = 1;
3451 if (new_direct_edge->call_stmt)
3452 new_direct_edge->call_stmt_cannot_inline_p
3453 = !gimple_check_call_matching_types (
3454 new_direct_edge->call_stmt,
3455 new_direct_edge->callee->decl, false);
3456 if (new_edges)
3458 new_edges->safe_push (new_direct_edge);
3459 res = true;
3461 top = IPA_EDGE_REF (cs);
3462 /* If speculative edge was introduced we still need to update
3463 call info of the indirect edge. */
3464 if (!new_direct_edge->speculative)
3465 continue;
3467 if (jfunc->type == IPA_JF_PASS_THROUGH
3468 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3470 if (ici->agg_contents
3471 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3472 && !ici->polymorphic)
3473 ici->param_index = -1;
3474 else
3476 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3477 if (ici->polymorphic
3478 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3479 ici->vptr_changed = true;
3482 else if (jfunc->type == IPA_JF_ANCESTOR)
3484 if (ici->agg_contents
3485 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3486 && !ici->polymorphic)
3487 ici->param_index = -1;
3488 else
3490 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3491 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3492 if (ici->polymorphic
3493 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3494 ici->vptr_changed = true;
3497 else
3498 /* Either we can find a destination for this edge now or never. */
3499 ici->param_index = -1;
3502 return res;
3505 /* Recursively traverse subtree of NODE (including node) made of inlined
3506 cgraph_edges when CS has been inlined and invoke
3507 update_indirect_edges_after_inlining on all nodes and
3508 update_jump_functions_after_inlining on all non-inlined edges that lead out
3509 of this subtree. Newly discovered indirect edges will be added to
3510 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3511 created. */
3513 static bool
3514 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3515 struct cgraph_node *node,
3516 vec<cgraph_edge *> *new_edges)
3518 struct cgraph_edge *e;
3519 bool res;
3521 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3523 for (e = node->callees; e; e = e->next_callee)
3524 if (!e->inline_failed)
3525 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3526 else
3527 update_jump_functions_after_inlining (cs, e);
3528 for (e = node->indirect_calls; e; e = e->next_callee)
3529 update_jump_functions_after_inlining (cs, e);
3531 return res;
3534 /* Combine two controlled uses counts as done during inlining. */
3536 static int
3537 combine_controlled_uses_counters (int c, int d)
3539 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3540 return IPA_UNDESCRIBED_USE;
3541 else
3542 return c + d - 1;
3545 /* Propagate number of controlled users from CS->caleee to the new root of the
3546 tree of inlined nodes. */
3548 static void
3549 propagate_controlled_uses (struct cgraph_edge *cs)
3551 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3552 struct cgraph_node *new_root = cs->caller->global.inlined_to
3553 ? cs->caller->global.inlined_to : cs->caller;
3554 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3555 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3556 int count, i;
3558 count = MIN (ipa_get_cs_argument_count (args),
3559 ipa_get_param_count (old_root_info));
3560 for (i = 0; i < count; i++)
3562 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3563 struct ipa_cst_ref_desc *rdesc;
3565 if (jf->type == IPA_JF_PASS_THROUGH)
3567 int src_idx, c, d;
3568 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3569 c = ipa_get_controlled_uses (new_root_info, src_idx);
3570 d = ipa_get_controlled_uses (old_root_info, i);
3572 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3573 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3574 c = combine_controlled_uses_counters (c, d);
3575 ipa_set_controlled_uses (new_root_info, src_idx, c);
3576 if (c == 0 && new_root_info->ipcp_orig_node)
3578 struct cgraph_node *n;
3579 struct ipa_ref *ref;
3580 tree t = new_root_info->known_csts[src_idx];
3582 if (t && TREE_CODE (t) == ADDR_EXPR
3583 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3584 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3585 && (ref = new_root->find_reference (n, NULL, 0)))
3587 if (dump_file)
3588 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3589 "reference from %s to %s.\n",
3590 new_root->dump_name (),
3591 n->dump_name ());
3592 ref->remove_reference ();
3596 else if (jf->type == IPA_JF_CONST
3597 && (rdesc = jfunc_rdesc_usable (jf)))
3599 int d = ipa_get_controlled_uses (old_root_info, i);
3600 int c = rdesc->refcount;
3601 rdesc->refcount = combine_controlled_uses_counters (c, d);
3602 if (rdesc->refcount == 0)
3604 tree cst = ipa_get_jf_constant (jf);
3605 struct cgraph_node *n;
3606 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3607 && TREE_CODE (TREE_OPERAND (cst, 0))
3608 == FUNCTION_DECL);
3609 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3610 if (n)
3612 struct cgraph_node *clone;
3613 bool ok;
3614 ok = remove_described_reference (n, rdesc);
3615 gcc_checking_assert (ok);
3617 clone = cs->caller;
3618 while (clone->global.inlined_to
3619 && clone != rdesc->cs->caller
3620 && IPA_NODE_REF (clone)->ipcp_orig_node)
3622 struct ipa_ref *ref;
3623 ref = clone->find_reference (n, NULL, 0);
3624 if (ref)
3626 if (dump_file)
3627 fprintf (dump_file, "ipa-prop: Removing "
3628 "cloning-created reference "
3629 "from %s to %s.\n",
3630 clone->dump_name (),
3631 n->dump_name ());
3632 ref->remove_reference ();
3634 clone = clone->callers->caller;
3641 for (i = ipa_get_param_count (old_root_info);
3642 i < ipa_get_cs_argument_count (args);
3643 i++)
3645 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3647 if (jf->type == IPA_JF_CONST)
3649 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3650 if (rdesc)
3651 rdesc->refcount = IPA_UNDESCRIBED_USE;
3653 else if (jf->type == IPA_JF_PASS_THROUGH)
3654 ipa_set_controlled_uses (new_root_info,
3655 jf->value.pass_through.formal_id,
3656 IPA_UNDESCRIBED_USE);
3660 /* Update jump functions and call note functions on inlining the call site CS.
3661 CS is expected to lead to a node already cloned by
3662 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3663 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3664 created. */
3666 bool
3667 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3668 vec<cgraph_edge *> *new_edges)
3670 bool changed;
3671 /* Do nothing if the preparation phase has not been carried out yet
3672 (i.e. during early inlining). */
3673 if (!ipa_node_params_sum)
3674 return false;
3675 gcc_assert (ipa_edge_args_sum);
3677 propagate_controlled_uses (cs);
3678 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3680 return changed;
3683 /* Ensure that array of edge arguments infos is big enough to accommodate a
3684 structure for all edges and reallocates it if not. Also, allocate
3685 associated hash tables is they do not already exist. */
3687 void
3688 ipa_check_create_edge_args (void)
3690 if (!ipa_edge_args_sum)
3691 ipa_edge_args_sum
3692 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3693 ipa_edge_args_sum_t (symtab, true));
3694 if (!ipa_bits_hash_table)
3695 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3696 if (!ipa_vr_hash_table)
3697 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3700 /* Free all ipa_edge structures. */
3702 void
3703 ipa_free_all_edge_args (void)
3705 if (!ipa_edge_args_sum)
3706 return;
3708 ipa_edge_args_sum->release ();
3709 ipa_edge_args_sum = NULL;
3712 /* Free all ipa_node_params structures. */
3714 void
3715 ipa_free_all_node_params (void)
3717 ipa_node_params_sum->release ();
3718 ipa_node_params_sum = NULL;
3721 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3722 tables if they do not already exist. */
3724 void
3725 ipcp_transformation_initialize (void)
3727 if (!ipa_bits_hash_table)
3728 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3729 if (!ipa_vr_hash_table)
3730 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3731 if (ipcp_transformation_sum == NULL)
3732 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
3735 /* Set the aggregate replacements of NODE to be AGGVALS. */
3737 void
3738 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3739 struct ipa_agg_replacement_value *aggvals)
3741 ipcp_transformation_initialize ();
3742 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3743 s->agg_values = aggvals;
3746 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3747 count data structures accordingly. */
3749 void
3750 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3752 if (args->jump_functions)
3754 struct ipa_jump_func *jf;
3755 int i;
3756 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3758 struct ipa_cst_ref_desc *rdesc;
3759 try_decrement_rdesc_refcount (jf);
3760 if (jf->type == IPA_JF_CONST
3761 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3762 && rdesc->cs == cs)
3763 rdesc->cs = NULL;
3768 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3769 reference count data strucutres accordingly. */
3771 void
3772 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3773 ipa_edge_args *old_args, ipa_edge_args *new_args)
3775 unsigned int i;
3777 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3778 if (old_args->polymorphic_call_contexts)
3779 new_args->polymorphic_call_contexts
3780 = vec_safe_copy (old_args->polymorphic_call_contexts);
3782 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3784 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3785 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3787 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3789 if (src_jf->type == IPA_JF_CONST)
3791 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3793 if (!src_rdesc)
3794 dst_jf->value.constant.rdesc = NULL;
3795 else if (src->caller == dst->caller)
3797 struct ipa_ref *ref;
3798 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3799 gcc_checking_assert (n);
3800 ref = src->caller->find_reference (n, src->call_stmt,
3801 src->lto_stmt_uid);
3802 gcc_checking_assert (ref);
3803 dst->caller->clone_reference (ref, ref->stmt);
3805 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3806 dst_rdesc->cs = dst;
3807 dst_rdesc->refcount = src_rdesc->refcount;
3808 dst_rdesc->next_duplicate = NULL;
3809 dst_jf->value.constant.rdesc = dst_rdesc;
3811 else if (src_rdesc->cs == src)
3813 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3814 dst_rdesc->cs = dst;
3815 dst_rdesc->refcount = src_rdesc->refcount;
3816 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3817 src_rdesc->next_duplicate = dst_rdesc;
3818 dst_jf->value.constant.rdesc = dst_rdesc;
3820 else
3822 struct ipa_cst_ref_desc *dst_rdesc;
3823 /* This can happen during inlining, when a JFUNC can refer to a
3824 reference taken in a function up in the tree of inline clones.
3825 We need to find the duplicate that refers to our tree of
3826 inline clones. */
3828 gcc_assert (dst->caller->global.inlined_to);
3829 for (dst_rdesc = src_rdesc->next_duplicate;
3830 dst_rdesc;
3831 dst_rdesc = dst_rdesc->next_duplicate)
3833 struct cgraph_node *top;
3834 top = dst_rdesc->cs->caller->global.inlined_to
3835 ? dst_rdesc->cs->caller->global.inlined_to
3836 : dst_rdesc->cs->caller;
3837 if (dst->caller->global.inlined_to == top)
3838 break;
3840 gcc_assert (dst_rdesc);
3841 dst_jf->value.constant.rdesc = dst_rdesc;
3844 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3845 && src->caller == dst->caller)
3847 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3848 ? dst->caller->global.inlined_to : dst->caller;
3849 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3850 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3852 int c = ipa_get_controlled_uses (root_info, idx);
3853 if (c != IPA_UNDESCRIBED_USE)
3855 c++;
3856 ipa_set_controlled_uses (root_info, idx, c);
3862 /* Analyze newly added function into callgraph. */
3864 static void
3865 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3867 if (node->has_gimple_body_p ())
3868 ipa_analyze_node (node);
3871 /* Hook that is called by summary when a node is duplicated. */
3873 void
3874 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3875 ipa_node_params *old_info,
3876 ipa_node_params *new_info)
3878 ipa_agg_replacement_value *old_av, *new_av;
3880 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3881 new_info->lattices = NULL;
3882 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3883 new_info->known_csts = old_info->known_csts.copy ();
3884 new_info->known_contexts = old_info->known_contexts.copy ();
3886 new_info->analysis_done = old_info->analysis_done;
3887 new_info->node_enqueued = old_info->node_enqueued;
3888 new_info->versionable = old_info->versionable;
3890 old_av = ipa_get_agg_replacements_for_node (src);
3891 if (old_av)
3893 new_av = NULL;
3894 while (old_av)
3896 struct ipa_agg_replacement_value *v;
3898 v = ggc_alloc<ipa_agg_replacement_value> ();
3899 memcpy (v, old_av, sizeof (*v));
3900 v->next = new_av;
3901 new_av = v;
3902 old_av = old_av->next;
3904 ipa_set_node_agg_value_chain (dst, new_av);
3907 ipcp_transformation *src_trans = ipcp_get_transformation_summary (src);
3909 if (src_trans)
3911 ipcp_transformation_initialize ();
3912 src_trans = ipcp_transformation_sum->get_create (src);
3913 ipcp_transformation *dst_trans
3914 = ipcp_transformation_sum->get_create (dst);
3916 dst_trans->bits = vec_safe_copy (src_trans->bits);
3918 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3919 vec<ipa_vr, va_gc> *&dst_vr
3920 = ipcp_get_transformation_summary (dst)->m_vr;
3921 if (vec_safe_length (src_trans->m_vr) > 0)
3923 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3924 for (unsigned i = 0; i < src_vr->length (); ++i)
3925 dst_vr->quick_push ((*src_vr)[i]);
3930 /* Register our cgraph hooks if they are not already there. */
3932 void
3933 ipa_register_cgraph_hooks (void)
3935 ipa_check_create_node_params ();
3936 ipa_check_create_edge_args ();
3938 function_insertion_hook_holder =
3939 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3942 /* Unregister our cgraph hooks if they are not already there. */
3944 static void
3945 ipa_unregister_cgraph_hooks (void)
3947 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3948 function_insertion_hook_holder = NULL;
3951 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3952 longer needed after ipa-cp. */
3954 void
3955 ipa_free_all_structures_after_ipa_cp (void)
3957 if (!optimize && !in_lto_p)
3959 ipa_free_all_edge_args ();
3960 ipa_free_all_node_params ();
3961 ipcp_sources_pool.release ();
3962 ipcp_cst_values_pool.release ();
3963 ipcp_poly_ctx_values_pool.release ();
3964 ipcp_agg_lattice_pool.release ();
3965 ipa_unregister_cgraph_hooks ();
3966 ipa_refdesc_pool.release ();
3970 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3971 longer needed after indirect inlining. */
3973 void
3974 ipa_free_all_structures_after_iinln (void)
3976 ipa_free_all_edge_args ();
3977 ipa_free_all_node_params ();
3978 ipa_unregister_cgraph_hooks ();
3979 ipcp_sources_pool.release ();
3980 ipcp_cst_values_pool.release ();
3981 ipcp_poly_ctx_values_pool.release ();
3982 ipcp_agg_lattice_pool.release ();
3983 ipa_refdesc_pool.release ();
3986 /* Print ipa_tree_map data structures of all functions in the
3987 callgraph to F. */
3989 void
3990 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3992 int i, count;
3993 struct ipa_node_params *info;
3995 if (!node->definition)
3996 return;
3997 info = IPA_NODE_REF (node);
3998 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
3999 count = ipa_get_param_count (info);
4000 for (i = 0; i < count; i++)
4002 int c;
4004 fprintf (f, " ");
4005 ipa_dump_param (f, info, i);
4006 if (ipa_is_param_used (info, i))
4007 fprintf (f, " used");
4008 c = ipa_get_controlled_uses (info, i);
4009 if (c == IPA_UNDESCRIBED_USE)
4010 fprintf (f, " undescribed_use");
4011 else
4012 fprintf (f, " controlled_uses=%i", c);
4013 fprintf (f, "\n");
4017 /* Print ipa_tree_map data structures of all functions in the
4018 callgraph to F. */
4020 void
4021 ipa_print_all_params (FILE * f)
4023 struct cgraph_node *node;
4025 fprintf (f, "\nFunction parameters:\n");
4026 FOR_EACH_FUNCTION (node)
4027 ipa_print_node_params (f, node);
4030 /* Dump the AV linked list. */
4032 void
4033 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4035 bool comma = false;
4036 fprintf (f, " Aggregate replacements:");
4037 for (; av; av = av->next)
4039 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4040 av->index, av->offset);
4041 print_generic_expr (f, av->value);
4042 comma = true;
4044 fprintf (f, "\n");
4047 /* Stream out jump function JUMP_FUNC to OB. */
4049 static void
4050 ipa_write_jump_function (struct output_block *ob,
4051 struct ipa_jump_func *jump_func)
4053 struct ipa_agg_jf_item *item;
4054 struct bitpack_d bp;
4055 int i, count;
4057 streamer_write_uhwi (ob, jump_func->type);
4058 switch (jump_func->type)
4060 case IPA_JF_UNKNOWN:
4061 break;
4062 case IPA_JF_CONST:
4063 gcc_assert (
4064 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4065 stream_write_tree (ob, jump_func->value.constant.value, true);
4066 break;
4067 case IPA_JF_PASS_THROUGH:
4068 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4069 if (jump_func->value.pass_through.operation == NOP_EXPR)
4071 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4072 bp = bitpack_create (ob->main_stream);
4073 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4074 streamer_write_bitpack (&bp);
4076 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4077 == tcc_unary)
4078 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4079 else
4081 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4082 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4084 break;
4085 case IPA_JF_ANCESTOR:
4086 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4087 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4088 bp = bitpack_create (ob->main_stream);
4089 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4090 streamer_write_bitpack (&bp);
4091 break;
4094 count = vec_safe_length (jump_func->agg.items);
4095 streamer_write_uhwi (ob, count);
4096 if (count)
4098 bp = bitpack_create (ob->main_stream);
4099 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4100 streamer_write_bitpack (&bp);
4103 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4105 streamer_write_uhwi (ob, item->offset);
4106 stream_write_tree (ob, item->value, true);
4109 bp = bitpack_create (ob->main_stream);
4110 bp_pack_value (&bp, !!jump_func->bits, 1);
4111 streamer_write_bitpack (&bp);
4112 if (jump_func->bits)
4114 streamer_write_widest_int (ob, jump_func->bits->value);
4115 streamer_write_widest_int (ob, jump_func->bits->mask);
4117 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4118 streamer_write_bitpack (&bp);
4119 if (jump_func->m_vr)
4121 streamer_write_enum (ob->main_stream, value_rang_type,
4122 VR_LAST, jump_func->m_vr->kind ());
4123 stream_write_tree (ob, jump_func->m_vr->min (), true);
4124 stream_write_tree (ob, jump_func->m_vr->max (), true);
4128 /* Read in jump function JUMP_FUNC from IB. */
4130 static void
4131 ipa_read_jump_function (struct lto_input_block *ib,
4132 struct ipa_jump_func *jump_func,
4133 struct cgraph_edge *cs,
4134 struct data_in *data_in)
4136 enum jump_func_type jftype;
4137 enum tree_code operation;
4138 int i, count;
4140 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4141 switch (jftype)
4143 case IPA_JF_UNKNOWN:
4144 ipa_set_jf_unknown (jump_func);
4145 break;
4146 case IPA_JF_CONST:
4147 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4148 break;
4149 case IPA_JF_PASS_THROUGH:
4150 operation = (enum tree_code) streamer_read_uhwi (ib);
4151 if (operation == NOP_EXPR)
4153 int formal_id = streamer_read_uhwi (ib);
4154 struct bitpack_d bp = streamer_read_bitpack (ib);
4155 bool agg_preserved = bp_unpack_value (&bp, 1);
4156 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4158 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4160 int formal_id = streamer_read_uhwi (ib);
4161 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4163 else
4165 tree operand = stream_read_tree (ib, data_in);
4166 int formal_id = streamer_read_uhwi (ib);
4167 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4168 operation);
4170 break;
4171 case IPA_JF_ANCESTOR:
4173 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4174 int formal_id = streamer_read_uhwi (ib);
4175 struct bitpack_d bp = streamer_read_bitpack (ib);
4176 bool agg_preserved = bp_unpack_value (&bp, 1);
4177 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4178 break;
4182 count = streamer_read_uhwi (ib);
4183 vec_alloc (jump_func->agg.items, count);
4184 if (count)
4186 struct bitpack_d bp = streamer_read_bitpack (ib);
4187 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4189 for (i = 0; i < count; i++)
4191 struct ipa_agg_jf_item item;
4192 item.offset = streamer_read_uhwi (ib);
4193 item.value = stream_read_tree (ib, data_in);
4194 jump_func->agg.items->quick_push (item);
4197 struct bitpack_d bp = streamer_read_bitpack (ib);
4198 bool bits_known = bp_unpack_value (&bp, 1);
4199 if (bits_known)
4201 widest_int value = streamer_read_widest_int (ib);
4202 widest_int mask = streamer_read_widest_int (ib);
4203 ipa_set_jfunc_bits (jump_func, value, mask);
4205 else
4206 jump_func->bits = NULL;
4208 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4209 bool vr_known = bp_unpack_value (&vr_bp, 1);
4210 if (vr_known)
4212 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4213 VR_LAST);
4214 tree min = stream_read_tree (ib, data_in);
4215 tree max = stream_read_tree (ib, data_in);
4216 ipa_set_jfunc_vr (jump_func, type, min, max);
4218 else
4219 jump_func->m_vr = NULL;
4222 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4223 relevant to indirect inlining to OB. */
4225 static void
4226 ipa_write_indirect_edge_info (struct output_block *ob,
4227 struct cgraph_edge *cs)
4229 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4230 struct bitpack_d bp;
4232 streamer_write_hwi (ob, ii->param_index);
4233 bp = bitpack_create (ob->main_stream);
4234 bp_pack_value (&bp, ii->polymorphic, 1);
4235 bp_pack_value (&bp, ii->agg_contents, 1);
4236 bp_pack_value (&bp, ii->member_ptr, 1);
4237 bp_pack_value (&bp, ii->by_ref, 1);
4238 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4239 bp_pack_value (&bp, ii->vptr_changed, 1);
4240 streamer_write_bitpack (&bp);
4241 if (ii->agg_contents || ii->polymorphic)
4242 streamer_write_hwi (ob, ii->offset);
4243 else
4244 gcc_assert (ii->offset == 0);
4246 if (ii->polymorphic)
4248 streamer_write_hwi (ob, ii->otr_token);
4249 stream_write_tree (ob, ii->otr_type, true);
4250 ii->context.stream_out (ob);
4254 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4255 relevant to indirect inlining from IB. */
4257 static void
4258 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4259 struct data_in *data_in,
4260 struct cgraph_edge *cs)
4262 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4263 struct bitpack_d bp;
4265 ii->param_index = (int) streamer_read_hwi (ib);
4266 bp = streamer_read_bitpack (ib);
4267 ii->polymorphic = bp_unpack_value (&bp, 1);
4268 ii->agg_contents = bp_unpack_value (&bp, 1);
4269 ii->member_ptr = bp_unpack_value (&bp, 1);
4270 ii->by_ref = bp_unpack_value (&bp, 1);
4271 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4272 ii->vptr_changed = bp_unpack_value (&bp, 1);
4273 if (ii->agg_contents || ii->polymorphic)
4274 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4275 else
4276 ii->offset = 0;
4277 if (ii->polymorphic)
4279 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4280 ii->otr_type = stream_read_tree (ib, data_in);
4281 ii->context.stream_in (ib, data_in);
4285 /* Stream out NODE info to OB. */
4287 static void
4288 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4290 int node_ref;
4291 lto_symtab_encoder_t encoder;
4292 struct ipa_node_params *info = IPA_NODE_REF (node);
4293 int j;
4294 struct cgraph_edge *e;
4295 struct bitpack_d bp;
4297 encoder = ob->decl_state->symtab_node_encoder;
4298 node_ref = lto_symtab_encoder_encode (encoder, node);
4299 streamer_write_uhwi (ob, node_ref);
4301 streamer_write_uhwi (ob, ipa_get_param_count (info));
4302 for (j = 0; j < ipa_get_param_count (info); j++)
4303 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4304 bp = bitpack_create (ob->main_stream);
4305 gcc_assert (info->analysis_done
4306 || ipa_get_param_count (info) == 0);
4307 gcc_assert (!info->node_enqueued);
4308 gcc_assert (!info->ipcp_orig_node);
4309 for (j = 0; j < ipa_get_param_count (info); j++)
4310 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4311 streamer_write_bitpack (&bp);
4312 for (j = 0; j < ipa_get_param_count (info); j++)
4314 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4315 stream_write_tree (ob, ipa_get_type (info, j), true);
4317 for (e = node->callees; e; e = e->next_callee)
4319 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4321 streamer_write_uhwi (ob,
4322 ipa_get_cs_argument_count (args) * 2
4323 + (args->polymorphic_call_contexts != NULL));
4324 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4326 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4327 if (args->polymorphic_call_contexts != NULL)
4328 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4331 for (e = node->indirect_calls; e; e = e->next_callee)
4333 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4335 streamer_write_uhwi (ob,
4336 ipa_get_cs_argument_count (args) * 2
4337 + (args->polymorphic_call_contexts != NULL));
4338 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4340 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4341 if (args->polymorphic_call_contexts != NULL)
4342 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4344 ipa_write_indirect_edge_info (ob, e);
4348 /* Stream in NODE info from IB. */
4350 static void
4351 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4352 struct data_in *data_in)
4354 struct ipa_node_params *info = IPA_NODE_REF (node);
4355 int k;
4356 struct cgraph_edge *e;
4357 struct bitpack_d bp;
4359 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4361 for (k = 0; k < ipa_get_param_count (info); k++)
4362 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4364 bp = streamer_read_bitpack (ib);
4365 if (ipa_get_param_count (info) != 0)
4366 info->analysis_done = true;
4367 info->node_enqueued = false;
4368 for (k = 0; k < ipa_get_param_count (info); k++)
4369 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4370 for (k = 0; k < ipa_get_param_count (info); k++)
4372 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4373 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
4375 for (e = node->callees; e; e = e->next_callee)
4377 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4378 int count = streamer_read_uhwi (ib);
4379 bool contexts_computed = count & 1;
4380 count /= 2;
4382 if (!count)
4383 continue;
4384 vec_safe_grow_cleared (args->jump_functions, count);
4385 if (contexts_computed)
4386 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4388 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4390 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4391 data_in);
4392 if (contexts_computed)
4393 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4396 for (e = node->indirect_calls; e; e = e->next_callee)
4398 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4399 int count = streamer_read_uhwi (ib);
4400 bool contexts_computed = count & 1;
4401 count /= 2;
4403 if (count)
4405 vec_safe_grow_cleared (args->jump_functions, count);
4406 if (contexts_computed)
4407 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4408 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4410 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4411 data_in);
4412 if (contexts_computed)
4413 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4416 ipa_read_indirect_edge_info (ib, data_in, e);
4420 /* Write jump functions for nodes in SET. */
4422 void
4423 ipa_prop_write_jump_functions (void)
4425 struct cgraph_node *node;
4426 struct output_block *ob;
4427 unsigned int count = 0;
4428 lto_symtab_encoder_iterator lsei;
4429 lto_symtab_encoder_t encoder;
4431 if (!ipa_node_params_sum || !ipa_edge_args_sum)
4432 return;
4434 ob = create_output_block (LTO_section_jump_functions);
4435 encoder = ob->decl_state->symtab_node_encoder;
4436 ob->symbol = NULL;
4437 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4438 lsei_next_function_in_partition (&lsei))
4440 node = lsei_cgraph_node (lsei);
4441 if (node->has_gimple_body_p ()
4442 && IPA_NODE_REF (node) != NULL)
4443 count++;
4446 streamer_write_uhwi (ob, count);
4448 /* Process all of the functions. */
4449 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4450 lsei_next_function_in_partition (&lsei))
4452 node = lsei_cgraph_node (lsei);
4453 if (node->has_gimple_body_p ()
4454 && IPA_NODE_REF (node) != NULL)
4455 ipa_write_node_info (ob, node);
4457 streamer_write_char_stream (ob->main_stream, 0);
4458 produce_asm (ob, NULL);
4459 destroy_output_block (ob);
4462 /* Read section in file FILE_DATA of length LEN with data DATA. */
4464 static void
4465 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4466 size_t len)
4468 const struct lto_function_header *header =
4469 (const struct lto_function_header *) data;
4470 const int cfg_offset = sizeof (struct lto_function_header);
4471 const int main_offset = cfg_offset + header->cfg_size;
4472 const int string_offset = main_offset + header->main_size;
4473 struct data_in *data_in;
4474 unsigned int i;
4475 unsigned int count;
4477 lto_input_block ib_main ((const char *) data + main_offset,
4478 header->main_size, file_data->mode_table);
4480 data_in =
4481 lto_data_in_create (file_data, (const char *) data + string_offset,
4482 header->string_size, vNULL);
4483 count = streamer_read_uhwi (&ib_main);
4485 for (i = 0; i < count; i++)
4487 unsigned int index;
4488 struct cgraph_node *node;
4489 lto_symtab_encoder_t encoder;
4491 index = streamer_read_uhwi (&ib_main);
4492 encoder = file_data->symtab_node_encoder;
4493 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4494 index));
4495 gcc_assert (node->definition);
4496 ipa_read_node_info (&ib_main, node, data_in);
4498 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4499 len);
4500 lto_data_in_delete (data_in);
4503 /* Read ipcp jump functions. */
4505 void
4506 ipa_prop_read_jump_functions (void)
4508 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4509 struct lto_file_decl_data *file_data;
4510 unsigned int j = 0;
4512 ipa_check_create_node_params ();
4513 ipa_check_create_edge_args ();
4514 ipa_register_cgraph_hooks ();
4516 while ((file_data = file_data_vec[j++]))
4518 size_t len;
4519 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4521 if (data)
4522 ipa_prop_read_section (file_data, data, len);
4526 void
4527 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4529 int node_ref;
4530 unsigned int count = 0;
4531 lto_symtab_encoder_t encoder;
4532 struct ipa_agg_replacement_value *aggvals, *av;
4534 aggvals = ipa_get_agg_replacements_for_node (node);
4535 encoder = ob->decl_state->symtab_node_encoder;
4536 node_ref = lto_symtab_encoder_encode (encoder, node);
4537 streamer_write_uhwi (ob, node_ref);
4539 for (av = aggvals; av; av = av->next)
4540 count++;
4541 streamer_write_uhwi (ob, count);
4543 for (av = aggvals; av; av = av->next)
4545 struct bitpack_d bp;
4547 streamer_write_uhwi (ob, av->offset);
4548 streamer_write_uhwi (ob, av->index);
4549 stream_write_tree (ob, av->value, true);
4551 bp = bitpack_create (ob->main_stream);
4552 bp_pack_value (&bp, av->by_ref, 1);
4553 streamer_write_bitpack (&bp);
4556 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4557 if (ts && vec_safe_length (ts->m_vr) > 0)
4559 count = ts->m_vr->length ();
4560 streamer_write_uhwi (ob, count);
4561 for (unsigned i = 0; i < count; ++i)
4563 struct bitpack_d bp;
4564 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4565 bp = bitpack_create (ob->main_stream);
4566 bp_pack_value (&bp, parm_vr->known, 1);
4567 streamer_write_bitpack (&bp);
4568 if (parm_vr->known)
4570 streamer_write_enum (ob->main_stream, value_rang_type,
4571 VR_LAST, parm_vr->type);
4572 streamer_write_wide_int (ob, parm_vr->min);
4573 streamer_write_wide_int (ob, parm_vr->max);
4577 else
4578 streamer_write_uhwi (ob, 0);
4580 if (ts && vec_safe_length (ts->bits) > 0)
4582 count = ts->bits->length ();
4583 streamer_write_uhwi (ob, count);
4585 for (unsigned i = 0; i < count; ++i)
4587 const ipa_bits *bits_jfunc = (*ts->bits)[i];
4588 struct bitpack_d bp = bitpack_create (ob->main_stream);
4589 bp_pack_value (&bp, !!bits_jfunc, 1);
4590 streamer_write_bitpack (&bp);
4591 if (bits_jfunc)
4593 streamer_write_widest_int (ob, bits_jfunc->value);
4594 streamer_write_widest_int (ob, bits_jfunc->mask);
4598 else
4599 streamer_write_uhwi (ob, 0);
4602 /* Stream in the aggregate value replacement chain for NODE from IB. */
4604 static void
4605 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4606 data_in *data_in)
4608 struct ipa_agg_replacement_value *aggvals = NULL;
4609 unsigned int count, i;
4611 count = streamer_read_uhwi (ib);
4612 for (i = 0; i <count; i++)
4614 struct ipa_agg_replacement_value *av;
4615 struct bitpack_d bp;
4617 av = ggc_alloc<ipa_agg_replacement_value> ();
4618 av->offset = streamer_read_uhwi (ib);
4619 av->index = streamer_read_uhwi (ib);
4620 av->value = stream_read_tree (ib, data_in);
4621 bp = streamer_read_bitpack (ib);
4622 av->by_ref = bp_unpack_value (&bp, 1);
4623 av->next = aggvals;
4624 aggvals = av;
4626 ipa_set_node_agg_value_chain (node, aggvals);
4628 count = streamer_read_uhwi (ib);
4629 if (count > 0)
4631 ipcp_transformation_initialize ();
4632 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4633 vec_safe_grow_cleared (ts->m_vr, count);
4634 for (i = 0; i < count; i++)
4636 ipa_vr *parm_vr;
4637 parm_vr = &(*ts->m_vr)[i];
4638 struct bitpack_d bp;
4639 bp = streamer_read_bitpack (ib);
4640 parm_vr->known = bp_unpack_value (&bp, 1);
4641 if (parm_vr->known)
4643 parm_vr->type = streamer_read_enum (ib, value_range_kind,
4644 VR_LAST);
4645 parm_vr->min = streamer_read_wide_int (ib);
4646 parm_vr->max = streamer_read_wide_int (ib);
4650 count = streamer_read_uhwi (ib);
4651 if (count > 0)
4653 ipcp_transformation_initialize ();
4654 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4655 vec_safe_grow_cleared (ts->bits, count);
4657 for (i = 0; i < count; i++)
4659 struct bitpack_d bp = streamer_read_bitpack (ib);
4660 bool known = bp_unpack_value (&bp, 1);
4661 if (known)
4663 ipa_bits *bits
4664 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4665 streamer_read_widest_int (ib));
4666 (*ts->bits)[i] = bits;
4672 /* Write all aggregate replacement for nodes in set. */
4674 void
4675 ipcp_write_transformation_summaries (void)
4677 struct cgraph_node *node;
4678 struct output_block *ob;
4679 unsigned int count = 0;
4680 lto_symtab_encoder_iterator lsei;
4681 lto_symtab_encoder_t encoder;
4683 ob = create_output_block (LTO_section_ipcp_transform);
4684 encoder = ob->decl_state->symtab_node_encoder;
4685 ob->symbol = NULL;
4686 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4687 lsei_next_function_in_partition (&lsei))
4689 node = lsei_cgraph_node (lsei);
4690 if (node->has_gimple_body_p ())
4691 count++;
4694 streamer_write_uhwi (ob, count);
4696 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4697 lsei_next_function_in_partition (&lsei))
4699 node = lsei_cgraph_node (lsei);
4700 if (node->has_gimple_body_p ())
4701 write_ipcp_transformation_info (ob, node);
4703 streamer_write_char_stream (ob->main_stream, 0);
4704 produce_asm (ob, NULL);
4705 destroy_output_block (ob);
4708 /* Read replacements section in file FILE_DATA of length LEN with data
4709 DATA. */
4711 static void
4712 read_replacements_section (struct lto_file_decl_data *file_data,
4713 const char *data,
4714 size_t len)
4716 const struct lto_function_header *header =
4717 (const struct lto_function_header *) data;
4718 const int cfg_offset = sizeof (struct lto_function_header);
4719 const int main_offset = cfg_offset + header->cfg_size;
4720 const int string_offset = main_offset + header->main_size;
4721 struct data_in *data_in;
4722 unsigned int i;
4723 unsigned int count;
4725 lto_input_block ib_main ((const char *) data + main_offset,
4726 header->main_size, file_data->mode_table);
4728 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4729 header->string_size, vNULL);
4730 count = streamer_read_uhwi (&ib_main);
4732 for (i = 0; i < count; i++)
4734 unsigned int index;
4735 struct cgraph_node *node;
4736 lto_symtab_encoder_t encoder;
4738 index = streamer_read_uhwi (&ib_main);
4739 encoder = file_data->symtab_node_encoder;
4740 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4741 index));
4742 gcc_assert (node->definition);
4743 read_ipcp_transformation_info (&ib_main, node, data_in);
4745 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4746 len);
4747 lto_data_in_delete (data_in);
4750 /* Read IPA-CP aggregate replacements. */
4752 void
4753 ipcp_read_transformation_summaries (void)
4755 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4756 struct lto_file_decl_data *file_data;
4757 unsigned int j = 0;
4759 while ((file_data = file_data_vec[j++]))
4761 size_t len;
4762 const char *data = lto_get_section_data (file_data,
4763 LTO_section_ipcp_transform,
4764 NULL, &len);
4765 if (data)
4766 read_replacements_section (file_data, data, len);
4770 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4771 NODE. */
4773 static void
4774 adjust_agg_replacement_values (struct cgraph_node *node,
4775 struct ipa_agg_replacement_value *aggval)
4777 struct ipa_agg_replacement_value *v;
4778 int i, c = 0, d = 0, *adj;
4780 if (!node->clone.combined_args_to_skip)
4781 return;
4783 for (v = aggval; v; v = v->next)
4785 gcc_assert (v->index >= 0);
4786 if (c < v->index)
4787 c = v->index;
4789 c++;
4791 adj = XALLOCAVEC (int, c);
4792 for (i = 0; i < c; i++)
4793 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4795 adj[i] = -1;
4796 d++;
4798 else
4799 adj[i] = i - d;
4801 for (v = aggval; v; v = v->next)
4802 v->index = adj[v->index];
4805 /* Dominator walker driving the ipcp modification phase. */
4807 class ipcp_modif_dom_walker : public dom_walker
4809 public:
4810 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4811 vec<ipa_param_descriptor, va_gc> *descs,
4812 struct ipa_agg_replacement_value *av,
4813 bool *sc, bool *cc)
4814 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4815 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4817 virtual edge before_dom_children (basic_block);
4819 private:
4820 struct ipa_func_body_info *m_fbi;
4821 vec<ipa_param_descriptor, va_gc> *m_descriptors;
4822 struct ipa_agg_replacement_value *m_aggval;
4823 bool *m_something_changed, *m_cfg_changed;
4826 edge
4827 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4829 gimple_stmt_iterator gsi;
4830 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4832 struct ipa_agg_replacement_value *v;
4833 gimple *stmt = gsi_stmt (gsi);
4834 tree rhs, val, t;
4835 HOST_WIDE_INT offset, size;
4836 int index;
4837 bool by_ref, vce;
4839 if (!gimple_assign_load_p (stmt))
4840 continue;
4841 rhs = gimple_assign_rhs1 (stmt);
4842 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4843 continue;
4845 vce = false;
4846 t = rhs;
4847 while (handled_component_p (t))
4849 /* V_C_E can do things like convert an array of integers to one
4850 bigger integer and similar things we do not handle below. */
4851 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4853 vce = true;
4854 break;
4856 t = TREE_OPERAND (t, 0);
4858 if (vce)
4859 continue;
4861 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4862 &offset, &size, &by_ref))
4863 continue;
4864 for (v = m_aggval; v; v = v->next)
4865 if (v->index == index
4866 && v->offset == offset)
4867 break;
4868 if (!v
4869 || v->by_ref != by_ref
4870 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4871 continue;
4873 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4874 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4876 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4877 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4878 else if (TYPE_SIZE (TREE_TYPE (rhs))
4879 == TYPE_SIZE (TREE_TYPE (v->value)))
4880 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4881 else
4883 if (dump_file)
4885 fprintf (dump_file, " const ");
4886 print_generic_expr (dump_file, v->value);
4887 fprintf (dump_file, " can't be converted to type of ");
4888 print_generic_expr (dump_file, rhs);
4889 fprintf (dump_file, "\n");
4891 continue;
4894 else
4895 val = v->value;
4897 if (dump_file && (dump_flags & TDF_DETAILS))
4899 fprintf (dump_file, "Modifying stmt:\n ");
4900 print_gimple_stmt (dump_file, stmt, 0);
4902 gimple_assign_set_rhs_from_tree (&gsi, val);
4903 update_stmt (stmt);
4905 if (dump_file && (dump_flags & TDF_DETAILS))
4907 fprintf (dump_file, "into:\n ");
4908 print_gimple_stmt (dump_file, stmt, 0);
4909 fprintf (dump_file, "\n");
4912 *m_something_changed = true;
4913 if (maybe_clean_eh_stmt (stmt)
4914 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4915 *m_cfg_changed = true;
4917 return NULL;
4920 /* Update bits info of formal parameters as described in
4921 ipcp_transformation. */
4923 static void
4924 ipcp_update_bits (struct cgraph_node *node)
4926 tree parm = DECL_ARGUMENTS (node->decl);
4927 tree next_parm = parm;
4928 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4930 if (!ts || vec_safe_length (ts->bits) == 0)
4931 return;
4933 vec<ipa_bits *, va_gc> &bits = *ts->bits;
4934 unsigned count = bits.length ();
4936 for (unsigned i = 0; i < count; ++i, parm = next_parm)
4938 if (node->clone.combined_args_to_skip
4939 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
4940 continue;
4942 gcc_checking_assert (parm);
4943 next_parm = DECL_CHAIN (parm);
4945 if (!bits[i]
4946 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
4947 || POINTER_TYPE_P (TREE_TYPE (parm)))
4948 || !is_gimple_reg (parm))
4949 continue;
4951 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
4952 if (!ddef)
4953 continue;
4955 if (dump_file)
4957 fprintf (dump_file, "Adjusting mask for param %u to ", i);
4958 print_hex (bits[i]->mask, dump_file);
4959 fprintf (dump_file, "\n");
4962 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
4964 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
4965 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
4967 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
4968 | wide_int::from (bits[i]->value, prec, sgn);
4969 set_nonzero_bits (ddef, nonzero_bits);
4971 else
4973 unsigned tem = bits[i]->mask.to_uhwi ();
4974 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
4975 unsigned align = tem & -tem;
4976 unsigned misalign = bitpos & (align - 1);
4978 if (align > 1)
4980 if (dump_file)
4981 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
4983 unsigned old_align, old_misalign;
4984 struct ptr_info_def *pi = get_ptr_info (ddef);
4985 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
4987 if (old_known
4988 && old_align > align)
4990 if (dump_file)
4992 fprintf (dump_file, "But alignment was already %u.\n", old_align);
4993 if ((old_misalign & (align - 1)) != misalign)
4994 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
4995 old_misalign, misalign);
4997 continue;
5000 if (old_known
5001 && ((misalign & (old_align - 1)) != old_misalign)
5002 && dump_file)
5003 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5004 old_misalign, misalign);
5006 set_ptr_info_alignment (pi, align, misalign);
5012 /* Update value range of formal parameters as described in
5013 ipcp_transformation. */
5015 static void
5016 ipcp_update_vr (struct cgraph_node *node)
5018 tree fndecl = node->decl;
5019 tree parm = DECL_ARGUMENTS (fndecl);
5020 tree next_parm = parm;
5021 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5022 if (!ts || vec_safe_length (ts->m_vr) == 0)
5023 return;
5024 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5025 unsigned count = vr.length ();
5027 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5029 if (node->clone.combined_args_to_skip
5030 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5031 continue;
5032 gcc_checking_assert (parm);
5033 next_parm = DECL_CHAIN (parm);
5034 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5036 if (!ddef || !is_gimple_reg (parm))
5037 continue;
5039 if (vr[i].known
5040 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5042 tree type = TREE_TYPE (ddef);
5043 unsigned prec = TYPE_PRECISION (type);
5044 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5046 if (dump_file)
5048 fprintf (dump_file, "Setting value range of param %u ", i);
5049 fprintf (dump_file, "%s[",
5050 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5051 print_decs (vr[i].min, dump_file);
5052 fprintf (dump_file, ", ");
5053 print_decs (vr[i].max, dump_file);
5054 fprintf (dump_file, "]\n");
5056 set_range_info (ddef, vr[i].type,
5057 wide_int_storage::from (vr[i].min, prec,
5058 TYPE_SIGN (type)),
5059 wide_int_storage::from (vr[i].max, prec,
5060 TYPE_SIGN (type)));
5062 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5063 && vr[i].type == VR_ANTI_RANGE
5064 && wi::eq_p (vr[i].min, 0)
5065 && wi::eq_p (vr[i].max, 0))
5067 if (dump_file)
5068 fprintf (dump_file, "Setting nonnull for %u\n", i);
5069 set_ptr_nonnull (ddef);
5075 /* IPCP transformation phase doing propagation of aggregate values. */
5077 unsigned int
5078 ipcp_transform_function (struct cgraph_node *node)
5080 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5081 struct ipa_func_body_info fbi;
5082 struct ipa_agg_replacement_value *aggval;
5083 int param_count;
5084 bool cfg_changed = false, something_changed = false;
5086 gcc_checking_assert (cfun);
5087 gcc_checking_assert (current_function_decl);
5089 if (dump_file)
5090 fprintf (dump_file, "Modification phase of node %s\n",
5091 node->dump_name ());
5093 ipcp_update_bits (node);
5094 ipcp_update_vr (node);
5095 aggval = ipa_get_agg_replacements_for_node (node);
5096 if (!aggval)
5097 return 0;
5098 param_count = count_formal_params (node->decl);
5099 if (param_count == 0)
5100 return 0;
5101 adjust_agg_replacement_values (node, aggval);
5102 if (dump_file)
5103 ipa_dump_agg_replacement_values (dump_file, aggval);
5105 fbi.node = node;
5106 fbi.info = NULL;
5107 fbi.bb_infos = vNULL;
5108 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5109 fbi.param_count = param_count;
5110 fbi.aa_walked = 0;
5112 vec_safe_grow_cleared (descriptors, param_count);
5113 ipa_populate_param_decls (node, *descriptors);
5114 calculate_dominance_info (CDI_DOMINATORS);
5115 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5116 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5118 int i;
5119 struct ipa_bb_info *bi;
5120 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5121 free_ipa_bb_info (bi);
5122 fbi.bb_infos.release ();
5123 free_dominance_info (CDI_DOMINATORS);
5125 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5126 s->agg_values = NULL;
5127 s->bits = NULL;
5128 s->m_vr = NULL;
5130 vec_free (descriptors);
5132 if (!something_changed)
5133 return 0;
5134 else if (cfg_changed)
5135 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5136 else
5137 return TODO_update_ssa_only_virtuals;
5140 #include "gt-ipa-prop.h"