re PR debug/91929 (missing inline subroutine information in build using sin/cos)
[official-gcc.git] / gcc / ipa-prop.c
blob5020f4a44d5afce521c65cd703d2eae7e3e11b6a
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55 #include "tree-cfgcleanup.h"
57 /* Function summary where the parameter infos are actually stored. */
58 ipa_node_params_t *ipa_node_params_sum = NULL;
60 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
62 /* Edge summary for IPA-CP edge information. */
63 ipa_edge_args_sum_t *ipa_edge_args_sum;
65 /* Traits for a hash table for reusing already existing ipa_bits. */
67 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
69 typedef ipa_bits *value_type;
70 typedef ipa_bits *compare_type;
71 static hashval_t
72 hash (const ipa_bits *p)
74 hashval_t t = (hashval_t) p->value.to_shwi ();
75 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
77 static bool
78 equal (const ipa_bits *a, const ipa_bits *b)
80 return a->value == b->value && a->mask == b->mask;
82 static void
83 mark_empty (ipa_bits *&p)
85 p = NULL;
87 static bool
88 is_empty (const ipa_bits *p)
90 return p == NULL;
92 static bool
93 is_deleted (const ipa_bits *p)
95 return p == reinterpret_cast<const ipa_bits *> (1);
97 static void
98 mark_deleted (ipa_bits *&p)
100 p = reinterpret_cast<ipa_bits *> (1);
104 /* Hash table for avoid repeated allocations of equal ipa_bits. */
105 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
107 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
108 the equiv bitmap is not hashed and is expected to be NULL. */
110 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range_base *>
112 typedef value_range_base *value_type;
113 typedef value_range_base *compare_type;
114 static hashval_t
115 hash (const value_range_base *p)
117 inchash::hash hstate (p->kind ());
118 inchash::add_expr (p->min (), hstate);
119 inchash::add_expr (p->max (), hstate);
120 return hstate.end ();
122 static bool
123 equal (const value_range_base *a, const value_range_base *b)
125 return a->equal_p (*b);
127 static void
128 mark_empty (value_range_base *&p)
130 p = NULL;
132 static bool
133 is_empty (const value_range_base *p)
135 return p == NULL;
137 static bool
138 is_deleted (const value_range_base *p)
140 return p == reinterpret_cast<const value_range_base *> (1);
142 static void
143 mark_deleted (value_range_base *&p)
145 p = reinterpret_cast<value_range_base *> (1);
149 /* Hash table for avoid repeated allocations of equal value_ranges. */
150 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
152 /* Holders of ipa cgraph hooks: */
153 static struct cgraph_node_hook_list *function_insertion_hook_holder;
155 /* Description of a reference to an IPA constant. */
156 struct ipa_cst_ref_desc
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge *cs;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc *next_duplicate;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
164 int refcount;
167 /* Allocation pool for reference descriptions. */
169 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
170 ("IPA-PROP ref descriptions");
172 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
175 static bool
176 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
178 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
180 if (!fs_opts)
181 return false;
182 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
185 /* Return index of the formal whose tree is PTREE in function which corresponds
186 to INFO. */
188 static int
189 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
190 tree ptree)
192 int i, count;
194 count = vec_safe_length (descriptors);
195 for (i = 0; i < count; i++)
196 if ((*descriptors)[i].decl_or_type == ptree)
197 return i;
199 return -1;
202 /* Return index of the formal whose tree is PTREE in function which corresponds
203 to INFO. */
206 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
208 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
211 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
212 NODE. */
214 static void
215 ipa_populate_param_decls (struct cgraph_node *node,
216 vec<ipa_param_descriptor, va_gc> &descriptors)
218 tree fndecl;
219 tree fnargs;
220 tree parm;
221 int param_num;
223 fndecl = node->decl;
224 gcc_assert (gimple_has_body_p (fndecl));
225 fnargs = DECL_ARGUMENTS (fndecl);
226 param_num = 0;
227 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
229 descriptors[param_num].decl_or_type = parm;
230 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
231 true);
232 param_num++;
236 /* Return how many formal parameters FNDECL has. */
239 count_formal_params (tree fndecl)
241 tree parm;
242 int count = 0;
243 gcc_assert (gimple_has_body_p (fndecl));
245 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
246 count++;
248 return count;
251 /* Return the declaration of Ith formal parameter of the function corresponding
252 to INFO. Note there is no setter function as this array is built just once
253 using ipa_initialize_node_params. */
255 void
256 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
258 fprintf (file, "param #%i", i);
259 if ((*info->descriptors)[i].decl_or_type)
261 fprintf (file, " ");
262 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
266 /* If necessary, allocate vector of parameter descriptors in info of NODE.
267 Return true if they were allocated, false if not. */
269 static bool
270 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
272 class ipa_node_params *info = IPA_NODE_REF (node);
274 if (!info->descriptors && param_count)
276 vec_safe_grow_cleared (info->descriptors, param_count);
277 return true;
279 else
280 return false;
283 /* Initialize the ipa_node_params structure associated with NODE by counting
284 the function parameters, creating the descriptors and populating their
285 param_decls. */
287 void
288 ipa_initialize_node_params (struct cgraph_node *node)
290 class ipa_node_params *info = IPA_NODE_REF (node);
292 if (!info->descriptors
293 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
294 ipa_populate_param_decls (node, *info->descriptors);
297 /* Print the jump functions associated with call graph edge CS to file F. */
299 static void
300 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
302 int i, count;
304 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
305 for (i = 0; i < count; i++)
307 struct ipa_jump_func *jump_func;
308 enum jump_func_type type;
310 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
311 type = jump_func->type;
313 fprintf (f, " param %d: ", i);
314 if (type == IPA_JF_UNKNOWN)
315 fprintf (f, "UNKNOWN\n");
316 else if (type == IPA_JF_CONST)
318 tree val = jump_func->value.constant.value;
319 fprintf (f, "CONST: ");
320 print_generic_expr (f, val);
321 if (TREE_CODE (val) == ADDR_EXPR
322 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
324 fprintf (f, " -> ");
325 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
327 fprintf (f, "\n");
329 else if (type == IPA_JF_PASS_THROUGH)
331 fprintf (f, "PASS THROUGH: ");
332 fprintf (f, "%d, op %s",
333 jump_func->value.pass_through.formal_id,
334 get_tree_code_name(jump_func->value.pass_through.operation));
335 if (jump_func->value.pass_through.operation != NOP_EXPR)
337 fprintf (f, " ");
338 print_generic_expr (f, jump_func->value.pass_through.operand);
340 if (jump_func->value.pass_through.agg_preserved)
341 fprintf (f, ", agg_preserved");
342 fprintf (f, "\n");
344 else if (type == IPA_JF_ANCESTOR)
346 fprintf (f, "ANCESTOR: ");
347 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
348 jump_func->value.ancestor.formal_id,
349 jump_func->value.ancestor.offset);
350 if (jump_func->value.ancestor.agg_preserved)
351 fprintf (f, ", agg_preserved");
352 fprintf (f, "\n");
355 if (jump_func->agg.items)
357 struct ipa_agg_jf_item *item;
358 int j;
360 fprintf (f, " Aggregate passed by %s:\n",
361 jump_func->agg.by_ref ? "reference" : "value");
362 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
364 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
365 item->offset);
366 if (TYPE_P (item->value))
367 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
368 tree_to_uhwi (TYPE_SIZE (item->value)));
369 else
371 fprintf (f, "cst: ");
372 print_generic_expr (f, item->value);
374 fprintf (f, "\n");
378 class ipa_polymorphic_call_context *ctx
379 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
380 if (ctx && !ctx->useless_p ())
382 fprintf (f, " Context: ");
383 ctx->dump (dump_file);
386 if (jump_func->bits)
388 fprintf (f, " value: ");
389 print_hex (jump_func->bits->value, f);
390 fprintf (f, ", mask: ");
391 print_hex (jump_func->bits->mask, f);
392 fprintf (f, "\n");
394 else
395 fprintf (f, " Unknown bits\n");
397 if (jump_func->m_vr)
399 fprintf (f, " VR ");
400 fprintf (f, "%s[",
401 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
402 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
403 fprintf (f, ", ");
404 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
405 fprintf (f, "]\n");
407 else
408 fprintf (f, " Unknown VR\n");
413 /* Print the jump functions of all arguments on all call graph edges going from
414 NODE to file F. */
416 void
417 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
419 struct cgraph_edge *cs;
421 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
422 for (cs = node->callees; cs; cs = cs->next_callee)
424 if (!ipa_edge_args_info_available_for_edge_p (cs))
425 continue;
427 fprintf (f, " callsite %s -> %s : \n",
428 node->dump_name (),
429 cs->callee->dump_name ());
430 ipa_print_node_jump_functions_for_edge (f, cs);
433 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
435 class cgraph_indirect_call_info *ii;
436 if (!ipa_edge_args_info_available_for_edge_p (cs))
437 continue;
439 ii = cs->indirect_info;
440 if (ii->agg_contents)
441 fprintf (f, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
443 ii->member_ptr ? "member ptr" : "aggregate",
444 ii->param_index, ii->offset,
445 ii->by_ref ? "by reference" : "by_value");
446 else
447 fprintf (f, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC,
449 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
450 ii->offset);
452 if (cs->call_stmt)
454 fprintf (f, ", for stmt ");
455 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
457 else
458 fprintf (f, "\n");
459 if (ii->polymorphic)
460 ii->context.dump (f);
461 ipa_print_node_jump_functions_for_edge (f, cs);
465 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
467 void
468 ipa_print_all_jump_functions (FILE *f)
470 struct cgraph_node *node;
472 fprintf (f, "\nJump functions:\n");
473 FOR_EACH_FUNCTION (node)
475 ipa_print_node_jump_functions (f, node);
479 /* Set jfunc to be a know-really nothing jump function. */
481 static void
482 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
484 jfunc->type = IPA_JF_UNKNOWN;
485 jfunc->bits = NULL;
486 jfunc->m_vr = NULL;
489 /* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
492 static void
493 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
494 struct ipa_jump_func *src)
497 gcc_checking_assert (src->type == IPA_JF_CONST);
498 dst->type = IPA_JF_CONST;
499 dst->value.constant = src->value.constant;
502 /* Set JFUNC to be a constant jmp function. */
504 static void
505 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
506 struct cgraph_edge *cs)
508 jfunc->type = IPA_JF_CONST;
509 jfunc->value.constant.value = unshare_expr_without_location (constant);
511 if (TREE_CODE (constant) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
514 struct ipa_cst_ref_desc *rdesc;
516 rdesc = ipa_refdesc_pool.allocate ();
517 rdesc->cs = cs;
518 rdesc->next_duplicate = NULL;
519 rdesc->refcount = 1;
520 jfunc->value.constant.rdesc = rdesc;
522 else
523 jfunc->value.constant.rdesc = NULL;
526 /* Set JFUNC to be a simple pass-through jump function. */
527 static void
528 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
529 bool agg_preserved)
531 jfunc->type = IPA_JF_PASS_THROUGH;
532 jfunc->value.pass_through.operand = NULL_TREE;
533 jfunc->value.pass_through.formal_id = formal_id;
534 jfunc->value.pass_through.operation = NOP_EXPR;
535 jfunc->value.pass_through.agg_preserved = agg_preserved;
538 /* Set JFUNC to be an unary pass through jump function. */
540 static void
541 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
542 enum tree_code operation)
544 jfunc->type = IPA_JF_PASS_THROUGH;
545 jfunc->value.pass_through.operand = NULL_TREE;
546 jfunc->value.pass_through.formal_id = formal_id;
547 jfunc->value.pass_through.operation = operation;
548 jfunc->value.pass_through.agg_preserved = false;
550 /* Set JFUNC to be an arithmetic pass through jump function. */
552 static void
553 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
554 tree operand, enum tree_code operation)
556 jfunc->type = IPA_JF_PASS_THROUGH;
557 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
558 jfunc->value.pass_through.formal_id = formal_id;
559 jfunc->value.pass_through.operation = operation;
560 jfunc->value.pass_through.agg_preserved = false;
563 /* Set JFUNC to be an ancestor jump function. */
565 static void
566 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
567 int formal_id, bool agg_preserved)
569 jfunc->type = IPA_JF_ANCESTOR;
570 jfunc->value.ancestor.formal_id = formal_id;
571 jfunc->value.ancestor.offset = offset;
572 jfunc->value.ancestor.agg_preserved = agg_preserved;
575 /* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
578 static struct ipa_bb_info *
579 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
581 gcc_checking_assert (fbi);
582 return &fbi->bb_infos[bb->index];
585 /* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
588 struct prop_type_change_info
590 /* Offset into the object where there is the virtual method pointer we are
591 looking for. */
592 HOST_WIDE_INT offset;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
594 type change. */
595 tree object;
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed;
600 /* Return true if STMT can modify a virtual method table pointer.
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
617 There is no way to call a constructor of an ancestor sub-object in any
618 other way.
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
637 static bool
638 stmt_may_be_vtbl_ptr_store (gimple *stmt)
640 if (is_gimple_call (stmt))
641 return false;
642 if (gimple_clobber_p (stmt))
643 return false;
644 else if (is_gimple_assign (stmt))
646 tree lhs = gimple_assign_lhs (stmt);
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
652 return false;
654 if (TREE_CODE (lhs) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
656 return false;
657 /* In the future we might want to use get_ref_base_and_extent to find
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
662 return true;
665 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
668 prop_type_change_info structure. */
670 static bool
671 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
673 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
674 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
676 if (stmt_may_be_vtbl_ptr_store (stmt))
678 tci->type_maybe_changed = true;
679 return true;
681 else
682 return false;
685 /* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
689 Generally functions are not allowed to change type of such instances,
690 but they call destructors. We assume that methods cannot destroy the THIS
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
694 static bool
695 param_type_may_change_p (tree function, tree arg, gimple *call)
697 /* Pure functions cannot do any changes on the dynamic type;
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
700 return false;
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
712 return true;
713 if (TREE_CODE (arg) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg)
715 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
719 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
720 /* THIS pointer of an method - here we want to watch constructors
721 and destructors as those definitely may change the dynamic
722 type. */
723 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function)
725 && !DECL_CXX_DESTRUCTOR_P (function)
726 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
730 block = BLOCK_SUPERCONTEXT (block))
731 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
732 return true;
733 return false;
736 return true;
739 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
744 returned by get_ref_base_and_extent, as is the offset.
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
749 static bool
750 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
751 tree base, tree comp_type, gcall *call,
752 struct ipa_jump_func *jfunc,
753 HOST_WIDE_INT offset)
755 struct prop_type_change_info tci;
756 ao_ref ao;
758 gcc_checking_assert (DECL_P (arg)
759 || TREE_CODE (arg) == MEM_REF
760 || handled_component_p (arg));
762 comp_type = TYPE_MAIN_VARIANT (comp_type);
764 /* Const calls cannot call virtual methods through VMT and so type changes do
765 not matter. */
766 if (!flag_devirtualize || !gimple_vuse (call)
767 /* Be sure expected_type is polymorphic. */
768 || !comp_type
769 || TREE_CODE (comp_type) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
772 return true;
774 ao_ref_init (&ao, arg);
775 ao.base = base;
776 ao.offset = offset;
777 ao.size = POINTER_SIZE;
778 ao.max_size = ao.size;
780 tci.offset = offset;
781 tci.object = get_base_address (arg);
782 tci.type_maybe_changed = false;
784 int walked
785 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
786 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
788 if (walked >= 0 && !tci.type_maybe_changed)
789 return false;
791 ipa_set_jf_unknown (jfunc);
792 return true;
795 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
796 If it is, return true and fill in the jump function JFUNC with relevant type
797 information or set it to unknown. ARG is the object itself (not a pointer
798 to it, unless dereferenced). BASE is the base of the memory access as
799 returned by get_ref_base_and_extent, as is the offset. */
801 static bool
802 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
803 tree comp_type, gcall *call, struct ipa_jump_func *jfunc,
804 HOST_WIDE_INT offset)
806 if (!flag_devirtualize)
807 return false;
809 if (TREE_CODE (base) == MEM_REF
810 && !param_type_may_change_p (current_function_decl,
811 TREE_OPERAND (base, 0),
812 call))
813 return false;
814 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
815 call, jfunc, offset);
818 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
819 SSA name (its dereference will become the base and the offset is assumed to
820 be zero). */
822 static bool
823 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
824 gcall *call, struct ipa_jump_func *jfunc)
826 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
827 if (!flag_devirtualize
828 || !POINTER_TYPE_P (TREE_TYPE (arg)))
829 return false;
831 if (!param_type_may_change_p (current_function_decl, arg, call))
832 return false;
834 arg = build2 (MEM_REF, ptr_type_node, arg,
835 build_int_cst (ptr_type_node, 0));
837 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
838 call, jfunc, 0);
841 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
842 boolean variable pointed to by DATA. */
844 static bool
845 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
846 void *data)
848 bool *b = (bool *) data;
849 *b = true;
850 return true;
853 /* Find the nearest valid aa status for parameter specified by INDEX that
854 dominates BB. */
856 static struct ipa_param_aa_status *
857 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
858 int index)
860 while (true)
862 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
863 if (!bb)
864 return NULL;
865 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
866 if (!bi->param_aa_statuses.is_empty ()
867 && bi->param_aa_statuses[index].valid)
868 return &bi->param_aa_statuses[index];
872 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
873 structures and/or intialize the result with a dominating description as
874 necessary. */
876 static struct ipa_param_aa_status *
877 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
878 int index)
880 gcc_checking_assert (fbi);
881 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
882 if (bi->param_aa_statuses.is_empty ())
883 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
884 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
885 if (!paa->valid)
887 gcc_checking_assert (!paa->parm_modified
888 && !paa->ref_modified
889 && !paa->pt_modified);
890 struct ipa_param_aa_status *dom_paa;
891 dom_paa = find_dominating_aa_status (fbi, bb, index);
892 if (dom_paa)
893 *paa = *dom_paa;
894 else
895 paa->valid = true;
898 return paa;
901 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
902 a value known not to be modified in this function before reaching the
903 statement STMT. FBI holds information about the function we have so far
904 gathered but do not survive the summary building stage. */
906 static bool
907 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
908 gimple *stmt, tree parm_load)
910 struct ipa_param_aa_status *paa;
911 bool modified = false;
912 ao_ref refd;
914 tree base = get_base_address (parm_load);
915 gcc_assert (TREE_CODE (base) == PARM_DECL);
916 if (TREE_READONLY (base))
917 return true;
919 gcc_checking_assert (fbi);
920 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
921 if (paa->parm_modified)
922 return false;
924 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
925 ao_ref_init (&refd, parm_load);
926 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
927 &modified, NULL, NULL,
928 fbi->aa_walk_budget + 1);
929 if (walked < 0)
931 modified = true;
932 if (fbi)
933 fbi->aa_walk_budget = 0;
935 else if (fbi)
936 fbi->aa_walk_budget -= walked;
937 if (paa && modified)
938 paa->parm_modified = true;
939 return !modified;
942 /* If STMT is an assignment that loads a value from an parameter declaration,
943 return the index of the parameter in ipa_node_params which has not been
944 modified. Otherwise return -1. */
946 static int
947 load_from_unmodified_param (struct ipa_func_body_info *fbi,
948 vec<ipa_param_descriptor, va_gc> *descriptors,
949 gimple *stmt)
951 int index;
952 tree op1;
954 if (!gimple_assign_single_p (stmt))
955 return -1;
957 op1 = gimple_assign_rhs1 (stmt);
958 if (TREE_CODE (op1) != PARM_DECL)
959 return -1;
961 index = ipa_get_param_decl_index_1 (descriptors, op1);
962 if (index < 0
963 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
964 return -1;
966 return index;
969 /* Return true if memory reference REF (which must be a load through parameter
970 with INDEX) loads data that are known to be unmodified in this function
971 before reaching statement STMT. */
973 static bool
974 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
975 int index, gimple *stmt, tree ref)
977 struct ipa_param_aa_status *paa;
978 bool modified = false;
979 ao_ref refd;
981 gcc_checking_assert (fbi);
982 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
983 if (paa->ref_modified)
984 return false;
986 gcc_checking_assert (gimple_vuse (stmt));
987 ao_ref_init (&refd, ref);
988 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
989 &modified, NULL, NULL,
990 fbi->aa_walk_budget + 1);
991 if (walked < 0)
993 modified = true;
994 fbi->aa_walk_budget = 0;
996 else
997 fbi->aa_walk_budget -= walked;
998 if (modified)
999 paa->ref_modified = true;
1000 return !modified;
1003 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1004 is known to be unmodified in this function before reaching call statement
1005 CALL into which it is passed. FBI describes the function body. */
1007 static bool
1008 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1009 gimple *call, tree parm)
1011 bool modified = false;
1012 ao_ref refd;
1014 /* It's unnecessary to calculate anything about memory contnets for a const
1015 function because it is not goin to use it. But do not cache the result
1016 either. Also, no such calculations for non-pointers. */
1017 if (!gimple_vuse (call)
1018 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1019 return false;
1021 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1022 gimple_bb (call),
1023 index);
1024 if (paa->pt_modified)
1025 return false;
1027 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1028 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1029 &modified, NULL, NULL,
1030 fbi->aa_walk_budget + 1);
1031 if (walked < 0)
1033 fbi->aa_walk_budget = 0;
1034 modified = true;
1036 else
1037 fbi->aa_walk_budget -= walked;
1038 if (modified)
1039 paa->pt_modified = true;
1040 return !modified;
1043 /* Return true if we can prove that OP is a memory reference loading
1044 data from an aggregate passed as a parameter.
1046 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1047 false if it cannot prove that the value has not been modified before the
1048 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1049 if it cannot prove the value has not been modified, in that case it will
1050 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1052 INFO and PARMS_AINFO describe parameters of the current function (but the
1053 latter can be NULL), STMT is the load statement. If function returns true,
1054 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1055 within the aggregate and whether it is a load from a value passed by
1056 reference respectively. */
1058 bool
1059 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1060 vec<ipa_param_descriptor, va_gc> *descriptors,
1061 gimple *stmt, tree op, int *index_p,
1062 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1063 bool *by_ref_p, bool *guaranteed_unmodified)
1065 int index;
1066 HOST_WIDE_INT size;
1067 bool reverse;
1068 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1070 if (!base)
1071 return false;
1073 if (DECL_P (base))
1075 int index = ipa_get_param_decl_index_1 (descriptors, base);
1076 if (index >= 0
1077 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1079 *index_p = index;
1080 *by_ref_p = false;
1081 if (size_p)
1082 *size_p = size;
1083 if (guaranteed_unmodified)
1084 *guaranteed_unmodified = true;
1085 return true;
1087 return false;
1090 if (TREE_CODE (base) != MEM_REF
1091 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1092 || !integer_zerop (TREE_OPERAND (base, 1)))
1093 return false;
1095 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1097 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1098 index = ipa_get_param_decl_index_1 (descriptors, parm);
1100 else
1102 /* This branch catches situations where a pointer parameter is not a
1103 gimple register, for example:
1105 void hip7(S*) (struct S * p)
1107 void (*<T2e4>) (struct S *) D.1867;
1108 struct S * p.1;
1110 <bb 2>:
1111 p.1_1 = p;
1112 D.1867_2 = p.1_1->f;
1113 D.1867_2 ();
1114 gdp = &p;
1117 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1118 index = load_from_unmodified_param (fbi, descriptors, def);
1121 if (index >= 0)
1123 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1124 if (!data_preserved && !guaranteed_unmodified)
1125 return false;
1127 *index_p = index;
1128 *by_ref_p = true;
1129 if (size_p)
1130 *size_p = size;
1131 if (guaranteed_unmodified)
1132 *guaranteed_unmodified = data_preserved;
1133 return true;
1135 return false;
1138 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1139 of an assignment statement STMT, try to determine whether we are actually
1140 handling any of the following cases and construct an appropriate jump
1141 function into JFUNC if so:
1143 1) The passed value is loaded from a formal parameter which is not a gimple
1144 register (most probably because it is addressable, the value has to be
1145 scalar) and we can guarantee the value has not changed. This case can
1146 therefore be described by a simple pass-through jump function. For example:
1148 foo (int a)
1150 int a.0;
1152 a.0_2 = a;
1153 bar (a.0_2);
1155 2) The passed value can be described by a simple arithmetic pass-through
1156 jump function. E.g.
1158 foo (int a)
1160 int D.2064;
1162 D.2064_4 = a.1(D) + 4;
1163 bar (D.2064_4);
1165 This case can also occur in combination of the previous one, e.g.:
1167 foo (int a, int z)
1169 int a.0;
1170 int D.2064;
1172 a.0_3 = a;
1173 D.2064_4 = a.0_3 + 4;
1174 foo (D.2064_4);
1176 3) The passed value is an address of an object within another one (which
1177 also passed by reference). Such situations are described by an ancestor
1178 jump function and describe situations such as:
1180 B::foo() (struct B * const this)
1182 struct A * D.1845;
1184 D.1845_2 = &this_1(D)->D.1748;
1185 A::bar (D.1845_2);
1187 INFO is the structure describing individual parameters access different
1188 stages of IPA optimizations. PARMS_AINFO contains the information that is
1189 only needed for intraprocedural analysis. */
1191 static void
1192 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1193 class ipa_node_params *info,
1194 struct ipa_jump_func *jfunc,
1195 gcall *call, gimple *stmt, tree name,
1196 tree param_type)
1198 HOST_WIDE_INT offset, size;
1199 tree op1, tc_ssa, base, ssa;
1200 bool reverse;
1201 int index;
1203 op1 = gimple_assign_rhs1 (stmt);
1205 if (TREE_CODE (op1) == SSA_NAME)
1207 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1208 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1209 else
1210 index = load_from_unmodified_param (fbi, info->descriptors,
1211 SSA_NAME_DEF_STMT (op1));
1212 tc_ssa = op1;
1214 else
1216 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1217 tc_ssa = gimple_assign_lhs (stmt);
1220 if (index >= 0)
1222 switch (gimple_assign_rhs_class (stmt))
1224 case GIMPLE_BINARY_RHS:
1226 tree op2 = gimple_assign_rhs2 (stmt);
1227 if (!is_gimple_ip_invariant (op2)
1228 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1229 != tcc_comparison)
1230 && !useless_type_conversion_p (TREE_TYPE (name),
1231 TREE_TYPE (op1))))
1232 return;
1234 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1235 gimple_assign_rhs_code (stmt));
1236 break;
1238 case GIMPLE_SINGLE_RHS:
1240 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1241 tc_ssa);
1242 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1243 break;
1245 case GIMPLE_UNARY_RHS:
1246 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1247 ipa_set_jf_unary_pass_through (jfunc, index,
1248 gimple_assign_rhs_code (stmt));
1249 default:;
1251 return;
1254 if (TREE_CODE (op1) != ADDR_EXPR)
1255 return;
1256 op1 = TREE_OPERAND (op1, 0);
1257 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1258 return;
1259 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1260 offset_int mem_offset;
1261 if (!base
1262 || TREE_CODE (base) != MEM_REF
1263 || !mem_ref_offset (base).is_constant (&mem_offset))
1264 return;
1265 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1266 ssa = TREE_OPERAND (base, 0);
1267 if (TREE_CODE (ssa) != SSA_NAME
1268 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1269 || offset < 0)
1270 return;
1272 /* Dynamic types are changed in constructors and destructors. */
1273 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1274 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1275 ipa_set_ancestor_jf (jfunc, offset, index,
1276 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1279 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1280 it looks like:
1282 iftmp.1_3 = &obj_2(D)->D.1762;
1284 The base of the MEM_REF must be a default definition SSA NAME of a
1285 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1286 whole MEM_REF expression is returned and the offset calculated from any
1287 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1288 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1290 static tree
1291 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1293 HOST_WIDE_INT size;
1294 tree expr, parm, obj;
1295 bool reverse;
1297 if (!gimple_assign_single_p (assign))
1298 return NULL_TREE;
1299 expr = gimple_assign_rhs1 (assign);
1301 if (TREE_CODE (expr) != ADDR_EXPR)
1302 return NULL_TREE;
1303 expr = TREE_OPERAND (expr, 0);
1304 obj = expr;
1305 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1307 offset_int mem_offset;
1308 if (!expr
1309 || TREE_CODE (expr) != MEM_REF
1310 || !mem_ref_offset (expr).is_constant (&mem_offset))
1311 return NULL_TREE;
1312 parm = TREE_OPERAND (expr, 0);
1313 if (TREE_CODE (parm) != SSA_NAME
1314 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1315 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1316 return NULL_TREE;
1318 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1319 *obj_p = obj;
1320 return expr;
1324 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1325 statement PHI, try to find out whether NAME is in fact a
1326 multiple-inheritance typecast from a descendant into an ancestor of a formal
1327 parameter and thus can be described by an ancestor jump function and if so,
1328 write the appropriate function into JFUNC.
1330 Essentially we want to match the following pattern:
1332 if (obj_2(D) != 0B)
1333 goto <bb 3>;
1334 else
1335 goto <bb 4>;
1337 <bb 3>:
1338 iftmp.1_3 = &obj_2(D)->D.1762;
1340 <bb 4>:
1341 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1342 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1343 return D.1879_6; */
1345 static void
1346 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1347 class ipa_node_params *info,
1348 struct ipa_jump_func *jfunc,
1349 gcall *call, gphi *phi)
1351 HOST_WIDE_INT offset;
1352 gimple *assign, *cond;
1353 basic_block phi_bb, assign_bb, cond_bb;
1354 tree tmp, parm, expr, obj;
1355 int index, i;
1357 if (gimple_phi_num_args (phi) != 2)
1358 return;
1360 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1361 tmp = PHI_ARG_DEF (phi, 0);
1362 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1363 tmp = PHI_ARG_DEF (phi, 1);
1364 else
1365 return;
1366 if (TREE_CODE (tmp) != SSA_NAME
1367 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1368 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1369 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1370 return;
1372 assign = SSA_NAME_DEF_STMT (tmp);
1373 assign_bb = gimple_bb (assign);
1374 if (!single_pred_p (assign_bb))
1375 return;
1376 expr = get_ancestor_addr_info (assign, &obj, &offset);
1377 if (!expr)
1378 return;
1379 parm = TREE_OPERAND (expr, 0);
1380 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1381 if (index < 0)
1382 return;
1384 cond_bb = single_pred (assign_bb);
1385 cond = last_stmt (cond_bb);
1386 if (!cond
1387 || gimple_code (cond) != GIMPLE_COND
1388 || gimple_cond_code (cond) != NE_EXPR
1389 || gimple_cond_lhs (cond) != parm
1390 || !integer_zerop (gimple_cond_rhs (cond)))
1391 return;
1393 phi_bb = gimple_bb (phi);
1394 for (i = 0; i < 2; i++)
1396 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1397 if (pred != assign_bb && pred != cond_bb)
1398 return;
1401 ipa_set_ancestor_jf (jfunc, offset, index,
1402 parm_ref_data_pass_through_p (fbi, index, call, parm));
1405 /* Inspect the given TYPE and return true iff it has the same structure (the
1406 same number of fields of the same types) as a C++ member pointer. If
1407 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1408 corresponding fields there. */
1410 static bool
1411 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1413 tree fld;
1415 if (TREE_CODE (type) != RECORD_TYPE)
1416 return false;
1418 fld = TYPE_FIELDS (type);
1419 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1420 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1421 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1422 return false;
1424 if (method_ptr)
1425 *method_ptr = fld;
1427 fld = DECL_CHAIN (fld);
1428 if (!fld || INTEGRAL_TYPE_P (fld)
1429 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1430 return false;
1431 if (delta)
1432 *delta = fld;
1434 if (DECL_CHAIN (fld))
1435 return false;
1437 return true;
1440 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1441 return the rhs of its defining statement. Otherwise return RHS as it
1442 is. */
1444 static inline tree
1445 get_ssa_def_if_simple_copy (tree rhs)
1447 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1449 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1451 if (gimple_assign_single_p (def_stmt))
1452 rhs = gimple_assign_rhs1 (def_stmt);
1453 else
1454 break;
1456 return rhs;
1459 /* Simple linked list, describing known contents of an aggregate before
1460 call. */
1462 struct ipa_known_agg_contents_list
1464 /* Offset and size of the described part of the aggregate. */
1465 HOST_WIDE_INT offset, size;
1466 /* Known constant value or NULL if the contents is known to be unknown. */
1467 tree constant;
1468 /* Pointer to the next structure in the list. */
1469 struct ipa_known_agg_contents_list *next;
1472 /* Add a known content item into a linked list of ipa_known_agg_contents_list
1473 structure, in which all elements are sorted ascendingly by offset. */
1475 static inline void
1476 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1477 struct ipa_known_agg_contents_list *item)
1479 struct ipa_known_agg_contents_list *list = *plist;
1481 for (; list; list = list->next)
1483 if (list->offset >= item->offset)
1484 break;
1486 plist = &list->next;
1489 item->next = list;
1490 *plist = item;
1493 /* Check whether a given known content is clobbered by certain element in
1494 a linked list of ipa_known_agg_contents_list. */
1496 static inline bool
1497 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1498 struct ipa_known_agg_contents_list *item)
1500 for (; list; list = list->next)
1502 if (list->offset >= item->offset)
1503 return list->offset < item->offset + item->size;
1505 if (list->offset + list->size > item->offset)
1506 return true;
1509 return false;
1512 /* Build aggregate jump function from LIST, assuming there are exactly
1513 CONST_COUNT constant entries there and that offset of the passed argument
1514 is ARG_OFFSET and store it into JFUNC. */
1516 static void
1517 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1518 int const_count, HOST_WIDE_INT arg_offset,
1519 struct ipa_jump_func *jfunc)
1521 vec_alloc (jfunc->agg.items, const_count);
1522 while (list)
1524 if (list->constant)
1526 struct ipa_agg_jf_item item;
1527 item.offset = list->offset - arg_offset;
1528 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1529 item.value = unshare_expr_without_location (list->constant);
1530 jfunc->agg.items->quick_push (item);
1532 list = list->next;
1536 /* If STMT is a memory store to the object whose address is BASE, extract
1537 information (offset, size, and value) into CONTENT, and return true,
1538 otherwise we conservatively assume the whole object is modified with
1539 unknown content, and return false. CHECK_REF means that access to object
1540 is expected to be in form of MEM_REF expression. */
1542 static bool
1543 extract_mem_content (gimple *stmt, tree base, bool check_ref,
1544 struct ipa_known_agg_contents_list *content)
1546 HOST_WIDE_INT lhs_offset, lhs_size;
1547 tree lhs, rhs, lhs_base;
1548 bool reverse;
1550 if (!gimple_assign_single_p (stmt))
1551 return false;
1553 lhs = gimple_assign_lhs (stmt);
1554 rhs = gimple_assign_rhs1 (stmt);
1556 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1557 || TREE_CODE (lhs) == BIT_FIELD_REF
1558 || contains_bitfld_component_ref_p (lhs))
1559 return false;
1561 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1562 &lhs_size, &reverse);
1563 if (!lhs_base)
1564 return false;
1566 if (check_ref)
1568 if (TREE_CODE (lhs_base) != MEM_REF
1569 || TREE_OPERAND (lhs_base, 0) != base
1570 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1571 return false;
1573 else if (lhs_base != base)
1574 return false;
1576 rhs = get_ssa_def_if_simple_copy (rhs);
1578 content->size = lhs_size;
1579 content->offset = lhs_offset;
1580 content->constant = is_gimple_ip_invariant (rhs) ? rhs : NULL_TREE;
1581 content->next = NULL;
1583 return true;
1586 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1587 in ARG is filled in with constant values. ARG can either be an aggregate
1588 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1589 aggregate. JFUNC is the jump function into which the constants are
1590 subsequently stored. AA_WALK_BUDGET_P points to limit on number of
1591 statements we allow get_continuation_for_phi to examine. */
1593 static void
1594 determine_known_aggregate_parts (gcall *call, tree arg,
1595 tree arg_type,
1596 struct ipa_jump_func *jfunc,
1597 unsigned *aa_walk_budget_p)
1599 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1600 bitmap visited = NULL;
1601 int item_count = 0, const_count = 0;
1602 int ipa_max_agg_items = PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS);
1603 HOST_WIDE_INT arg_offset, arg_size;
1604 tree arg_base;
1605 bool check_ref, by_ref;
1606 ao_ref r;
1608 if (ipa_max_agg_items == 0)
1609 return;
1611 /* The function operates in three stages. First, we prepare check_ref, r,
1612 arg_base and arg_offset based on what is actually passed as an actual
1613 argument. */
1615 if (POINTER_TYPE_P (arg_type))
1617 by_ref = true;
1618 if (TREE_CODE (arg) == SSA_NAME)
1620 tree type_size;
1621 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1622 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1623 return;
1624 check_ref = true;
1625 arg_base = arg;
1626 arg_offset = 0;
1627 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1628 arg_size = tree_to_uhwi (type_size);
1629 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1631 else if (TREE_CODE (arg) == ADDR_EXPR)
1633 bool reverse;
1635 arg = TREE_OPERAND (arg, 0);
1636 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1637 &arg_size, &reverse);
1638 if (!arg_base)
1639 return;
1640 if (DECL_P (arg_base))
1642 check_ref = false;
1643 ao_ref_init (&r, arg_base);
1645 else
1646 return;
1648 else
1649 return;
1651 else
1653 bool reverse;
1655 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1657 by_ref = false;
1658 check_ref = false;
1659 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1660 &arg_size, &reverse);
1661 if (!arg_base)
1662 return;
1664 ao_ref_init (&r, arg);
1667 /* Second stage traverses virtual SSA web backwards starting from the call
1668 statement, only looks at individual dominating virtual operand (its
1669 definition dominates the call), as long as it is confident that content
1670 of the aggregate is affected by definition of the virtual operand, it
1671 builds a sorted linked list of ipa_agg_jf_list describing that. */
1673 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1675 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
1677 if (gimple_code (stmt) == GIMPLE_PHI)
1679 dom_vuse = get_continuation_for_phi (stmt, &r, true,
1680 *aa_walk_budget_p,
1681 &visited, false, NULL, NULL);
1682 continue;
1685 if (stmt_may_clobber_ref_p_1 (stmt, &r))
1687 struct ipa_known_agg_contents_list *content
1688 = XALLOCA (struct ipa_known_agg_contents_list);
1690 if (!extract_mem_content (stmt, arg_base, check_ref, content))
1691 break;
1693 /* Now we get a dominating virtual operand, and need to check
1694 whether its value is clobbered any other dominating one. */
1695 if (content->constant
1696 && !clobber_by_agg_contents_list_p (all_list, content))
1698 struct ipa_known_agg_contents_list *copy
1699 = XALLOCA (struct ipa_known_agg_contents_list);
1701 /* Add to the list consisting of only dominating virtual
1702 operands, whose definitions can finally reach the call. */
1703 add_to_agg_contents_list (&list, (*copy = *content, copy));
1705 if (++const_count == ipa_max_agg_items)
1706 break;
1709 /* Add to the list consisting of all dominating virtual operands. */
1710 add_to_agg_contents_list (&all_list, content);
1712 if (++item_count == 2 * ipa_max_agg_items)
1713 break;
1715 dom_vuse = gimple_vuse (stmt);
1718 if (visited)
1719 BITMAP_FREE (visited);
1721 /* Third stage just goes over the list and creates an appropriate vector of
1722 ipa_agg_jf_item structures out of it, of course only if there are
1723 any known constants to begin with. */
1725 if (const_count)
1727 jfunc->agg.by_ref = by_ref;
1728 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1733 /* Return the Ith param type of callee associated with call graph
1734 edge E. */
1736 tree
1737 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1739 int n;
1740 tree type = (e->callee
1741 ? TREE_TYPE (e->callee->decl)
1742 : gimple_call_fntype (e->call_stmt));
1743 tree t = TYPE_ARG_TYPES (type);
1745 for (n = 0; n < i; n++)
1747 if (!t)
1748 break;
1749 t = TREE_CHAIN (t);
1751 if (t)
1752 return TREE_VALUE (t);
1753 if (!e->callee)
1754 return NULL;
1755 t = DECL_ARGUMENTS (e->callee->decl);
1756 for (n = 0; n < i; n++)
1758 if (!t)
1759 return NULL;
1760 t = TREE_CHAIN (t);
1762 if (t)
1763 return TREE_TYPE (t);
1764 return NULL;
1767 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1768 allocated structure or a previously existing one shared with other jump
1769 functions and/or transformation summaries. */
1771 ipa_bits *
1772 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1774 ipa_bits tmp;
1775 tmp.value = value;
1776 tmp.mask = mask;
1778 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1779 if (*slot)
1780 return *slot;
1782 ipa_bits *res = ggc_alloc<ipa_bits> ();
1783 res->value = value;
1784 res->mask = mask;
1785 *slot = res;
1787 return res;
1790 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1791 table in order to avoid creating multiple same ipa_bits structures. */
1793 static void
1794 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1795 const widest_int &mask)
1797 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1800 /* Return a pointer to a value_range just like *TMP, but either find it in
1801 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1803 static value_range_base *
1804 ipa_get_value_range (value_range_base *tmp)
1806 value_range_base **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1807 if (*slot)
1808 return *slot;
1810 value_range_base *vr = ggc_alloc<value_range_base> ();
1811 *vr = *tmp;
1812 *slot = vr;
1814 return vr;
1817 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1818 equiv set. Use hash table in order to avoid creating multiple same copies of
1819 value_ranges. */
1821 static value_range_base *
1822 ipa_get_value_range (enum value_range_kind type, tree min, tree max)
1824 value_range_base tmp (type, min, max);
1825 return ipa_get_value_range (&tmp);
1828 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1829 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1830 same value_range structures. */
1832 static void
1833 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
1834 tree min, tree max)
1836 jf->m_vr = ipa_get_value_range (type, min, max);
1839 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
1840 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1842 static void
1843 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range_base *tmp)
1845 jf->m_vr = ipa_get_value_range (tmp);
1848 /* Compute jump function for all arguments of callsite CS and insert the
1849 information in the jump_functions array in the ipa_edge_args corresponding
1850 to this callsite. */
1852 static void
1853 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1854 struct cgraph_edge *cs)
1856 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
1857 class ipa_edge_args *args = IPA_EDGE_REF (cs);
1858 gcall *call = cs->call_stmt;
1859 int n, arg_num = gimple_call_num_args (call);
1860 bool useful_context = false;
1862 if (arg_num == 0 || args->jump_functions)
1863 return;
1864 vec_safe_grow_cleared (args->jump_functions, arg_num);
1865 if (flag_devirtualize)
1866 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1868 if (gimple_call_internal_p (call))
1869 return;
1870 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1871 return;
1873 for (n = 0; n < arg_num; n++)
1875 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1876 tree arg = gimple_call_arg (call, n);
1877 tree param_type = ipa_get_callee_param_type (cs, n);
1878 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1880 tree instance;
1881 class ipa_polymorphic_call_context context (cs->caller->decl,
1882 arg, cs->call_stmt,
1883 &instance);
1884 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
1885 &fbi->aa_walk_budget);
1886 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1887 if (!context.useless_p ())
1888 useful_context = true;
1891 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1893 bool addr_nonzero = false;
1894 bool strict_overflow = false;
1896 if (TREE_CODE (arg) == SSA_NAME
1897 && param_type
1898 && get_ptr_nonnull (arg))
1899 addr_nonzero = true;
1900 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1901 addr_nonzero = true;
1903 if (addr_nonzero)
1905 tree z = build_int_cst (TREE_TYPE (arg), 0);
1906 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1908 else
1909 gcc_assert (!jfunc->m_vr);
1911 else
1913 wide_int min, max;
1914 value_range_kind type;
1915 if (TREE_CODE (arg) == SSA_NAME
1916 && param_type
1917 && (type = get_range_info (arg, &min, &max))
1918 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1920 value_range_base resvr;
1921 value_range_base tmpvr (type,
1922 wide_int_to_tree (TREE_TYPE (arg), min),
1923 wide_int_to_tree (TREE_TYPE (arg), max));
1924 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
1925 &tmpvr, TREE_TYPE (arg));
1926 if (!resvr.undefined_p () && !resvr.varying_p ())
1927 ipa_set_jfunc_vr (jfunc, &resvr);
1928 else
1929 gcc_assert (!jfunc->m_vr);
1931 else
1932 gcc_assert (!jfunc->m_vr);
1935 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1936 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1938 if (TREE_CODE (arg) == SSA_NAME)
1939 ipa_set_jfunc_bits (jfunc, 0,
1940 widest_int::from (get_nonzero_bits (arg),
1941 TYPE_SIGN (TREE_TYPE (arg))));
1942 else
1943 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1945 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1947 unsigned HOST_WIDE_INT bitpos;
1948 unsigned align;
1950 get_pointer_alignment_1 (arg, &align, &bitpos);
1951 widest_int mask = wi::bit_and_not
1952 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1953 align / BITS_PER_UNIT - 1);
1954 widest_int value = bitpos / BITS_PER_UNIT;
1955 ipa_set_jfunc_bits (jfunc, value, mask);
1957 else
1958 gcc_assert (!jfunc->bits);
1960 if (is_gimple_ip_invariant (arg)
1961 || (VAR_P (arg)
1962 && is_global_var (arg)
1963 && TREE_READONLY (arg)))
1964 ipa_set_jf_constant (jfunc, arg, cs);
1965 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1966 && TREE_CODE (arg) == PARM_DECL)
1968 int index = ipa_get_param_decl_index (info, arg);
1970 gcc_assert (index >=0);
1971 /* Aggregate passed by value, check for pass-through, otherwise we
1972 will attempt to fill in aggregate contents later in this
1973 for cycle. */
1974 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1976 ipa_set_jf_simple_pass_through (jfunc, index, false);
1977 continue;
1980 else if (TREE_CODE (arg) == SSA_NAME)
1982 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1984 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1985 if (index >= 0)
1987 bool agg_p;
1988 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1989 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1992 else
1994 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1995 if (is_gimple_assign (stmt))
1996 compute_complex_assign_jump_func (fbi, info, jfunc,
1997 call, stmt, arg, param_type);
1998 else if (gimple_code (stmt) == GIMPLE_PHI)
1999 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2000 call,
2001 as_a <gphi *> (stmt));
2005 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2006 passed (because type conversions are ignored in gimple). Usually we can
2007 safely get type from function declaration, but in case of K&R prototypes or
2008 variadic functions we can try our luck with type of the pointer passed.
2009 TODO: Since we look for actual initialization of the memory object, we may better
2010 work out the type based on the memory stores we find. */
2011 if (!param_type)
2012 param_type = TREE_TYPE (arg);
2014 if ((jfunc->type != IPA_JF_PASS_THROUGH
2015 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2016 && (jfunc->type != IPA_JF_ANCESTOR
2017 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2018 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2019 || POINTER_TYPE_P (param_type)))
2020 determine_known_aggregate_parts (call, arg, param_type, jfunc,
2021 &fbi->aa_walk_budget);
2023 if (!useful_context)
2024 vec_free (args->polymorphic_call_contexts);
2027 /* Compute jump functions for all edges - both direct and indirect - outgoing
2028 from BB. */
2030 static void
2031 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2033 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2034 int i;
2035 struct cgraph_edge *cs;
2037 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2039 struct cgraph_node *callee = cs->callee;
2041 if (callee)
2043 callee->ultimate_alias_target ();
2044 /* We do not need to bother analyzing calls to unknown functions
2045 unless they may become known during lto/whopr. */
2046 if (!callee->definition && !flag_lto)
2047 continue;
2049 ipa_compute_jump_functions_for_edge (fbi, cs);
2053 /* If STMT looks like a statement loading a value from a member pointer formal
2054 parameter, return that parameter and store the offset of the field to
2055 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2056 might be clobbered). If USE_DELTA, then we look for a use of the delta
2057 field rather than the pfn. */
2059 static tree
2060 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2061 HOST_WIDE_INT *offset_p)
2063 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2065 if (!gimple_assign_single_p (stmt))
2066 return NULL_TREE;
2068 rhs = gimple_assign_rhs1 (stmt);
2069 if (TREE_CODE (rhs) == COMPONENT_REF)
2071 ref_field = TREE_OPERAND (rhs, 1);
2072 rhs = TREE_OPERAND (rhs, 0);
2074 else
2075 ref_field = NULL_TREE;
2076 if (TREE_CODE (rhs) != MEM_REF)
2077 return NULL_TREE;
2078 rec = TREE_OPERAND (rhs, 0);
2079 if (TREE_CODE (rec) != ADDR_EXPR)
2080 return NULL_TREE;
2081 rec = TREE_OPERAND (rec, 0);
2082 if (TREE_CODE (rec) != PARM_DECL
2083 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2084 return NULL_TREE;
2085 ref_offset = TREE_OPERAND (rhs, 1);
2087 if (use_delta)
2088 fld = delta_field;
2089 else
2090 fld = ptr_field;
2091 if (offset_p)
2092 *offset_p = int_bit_position (fld);
2094 if (ref_field)
2096 if (integer_nonzerop (ref_offset))
2097 return NULL_TREE;
2098 return ref_field == fld ? rec : NULL_TREE;
2100 else
2101 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2102 : NULL_TREE;
2105 /* Returns true iff T is an SSA_NAME defined by a statement. */
2107 static bool
2108 ipa_is_ssa_with_stmt_def (tree t)
2110 if (TREE_CODE (t) == SSA_NAME
2111 && !SSA_NAME_IS_DEFAULT_DEF (t))
2112 return true;
2113 else
2114 return false;
2117 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2118 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2119 indirect call graph edge. */
2121 static struct cgraph_edge *
2122 ipa_note_param_call (struct cgraph_node *node, int param_index,
2123 gcall *stmt)
2125 struct cgraph_edge *cs;
2127 cs = node->get_edge (stmt);
2128 cs->indirect_info->param_index = param_index;
2129 cs->indirect_info->agg_contents = 0;
2130 cs->indirect_info->member_ptr = 0;
2131 cs->indirect_info->guaranteed_unmodified = 0;
2132 return cs;
2135 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2136 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2137 intermediate information about each formal parameter. Currently it checks
2138 whether the call calls a pointer that is a formal parameter and if so, the
2139 parameter is marked with the called flag and an indirect call graph edge
2140 describing the call is created. This is very simple for ordinary pointers
2141 represented in SSA but not-so-nice when it comes to member pointers. The
2142 ugly part of this function does nothing more than trying to match the
2143 pattern of such a call. An example of such a pattern is the gimple dump
2144 below, the call is on the last line:
2146 <bb 2>:
2147 f$__delta_5 = f.__delta;
2148 f$__pfn_24 = f.__pfn;
2151 <bb 2>:
2152 f$__delta_5 = MEM[(struct *)&f];
2153 f$__pfn_24 = MEM[(struct *)&f + 4B];
2155 and a few lines below:
2157 <bb 5>
2158 D.2496_3 = (int) f$__pfn_24;
2159 D.2497_4 = D.2496_3 & 1;
2160 if (D.2497_4 != 0)
2161 goto <bb 3>;
2162 else
2163 goto <bb 4>;
2165 <bb 6>:
2166 D.2500_7 = (unsigned int) f$__delta_5;
2167 D.2501_8 = &S + D.2500_7;
2168 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2169 D.2503_10 = *D.2502_9;
2170 D.2504_12 = f$__pfn_24 + -1;
2171 D.2505_13 = (unsigned int) D.2504_12;
2172 D.2506_14 = D.2503_10 + D.2505_13;
2173 D.2507_15 = *D.2506_14;
2174 iftmp.11_16 = (String:: *) D.2507_15;
2176 <bb 7>:
2177 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2178 D.2500_19 = (unsigned int) f$__delta_5;
2179 D.2508_20 = &S + D.2500_19;
2180 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2182 Such patterns are results of simple calls to a member pointer:
2184 int doprinting (int (MyString::* f)(int) const)
2186 MyString S ("somestring");
2188 return (S.*f)(4);
2191 Moreover, the function also looks for called pointers loaded from aggregates
2192 passed by value or reference. */
2194 static void
2195 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2196 tree target)
2198 class ipa_node_params *info = fbi->info;
2199 HOST_WIDE_INT offset;
2200 bool by_ref;
2202 if (SSA_NAME_IS_DEFAULT_DEF (target))
2204 tree var = SSA_NAME_VAR (target);
2205 int index = ipa_get_param_decl_index (info, var);
2206 if (index >= 0)
2207 ipa_note_param_call (fbi->node, index, call);
2208 return;
2211 int index;
2212 gimple *def = SSA_NAME_DEF_STMT (target);
2213 bool guaranteed_unmodified;
2214 if (gimple_assign_single_p (def)
2215 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2216 gimple_assign_rhs1 (def), &index, &offset,
2217 NULL, &by_ref, &guaranteed_unmodified))
2219 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2220 cs->indirect_info->offset = offset;
2221 cs->indirect_info->agg_contents = 1;
2222 cs->indirect_info->by_ref = by_ref;
2223 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2224 return;
2227 /* Now we need to try to match the complex pattern of calling a member
2228 pointer. */
2229 if (gimple_code (def) != GIMPLE_PHI
2230 || gimple_phi_num_args (def) != 2
2231 || !POINTER_TYPE_P (TREE_TYPE (target))
2232 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2233 return;
2235 /* First, we need to check whether one of these is a load from a member
2236 pointer that is a parameter to this function. */
2237 tree n1 = PHI_ARG_DEF (def, 0);
2238 tree n2 = PHI_ARG_DEF (def, 1);
2239 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2240 return;
2241 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2242 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2244 tree rec;
2245 basic_block bb, virt_bb;
2246 basic_block join = gimple_bb (def);
2247 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2249 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2250 return;
2252 bb = EDGE_PRED (join, 0)->src;
2253 virt_bb = gimple_bb (d2);
2255 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2257 bb = EDGE_PRED (join, 1)->src;
2258 virt_bb = gimple_bb (d1);
2260 else
2261 return;
2263 /* Second, we need to check that the basic blocks are laid out in the way
2264 corresponding to the pattern. */
2266 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2267 || single_pred (virt_bb) != bb
2268 || single_succ (virt_bb) != join)
2269 return;
2271 /* Third, let's see that the branching is done depending on the least
2272 significant bit of the pfn. */
2274 gimple *branch = last_stmt (bb);
2275 if (!branch || gimple_code (branch) != GIMPLE_COND)
2276 return;
2278 if ((gimple_cond_code (branch) != NE_EXPR
2279 && gimple_cond_code (branch) != EQ_EXPR)
2280 || !integer_zerop (gimple_cond_rhs (branch)))
2281 return;
2283 tree cond = gimple_cond_lhs (branch);
2284 if (!ipa_is_ssa_with_stmt_def (cond))
2285 return;
2287 def = SSA_NAME_DEF_STMT (cond);
2288 if (!is_gimple_assign (def)
2289 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2290 || !integer_onep (gimple_assign_rhs2 (def)))
2291 return;
2293 cond = gimple_assign_rhs1 (def);
2294 if (!ipa_is_ssa_with_stmt_def (cond))
2295 return;
2297 def = SSA_NAME_DEF_STMT (cond);
2299 if (is_gimple_assign (def)
2300 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2302 cond = gimple_assign_rhs1 (def);
2303 if (!ipa_is_ssa_with_stmt_def (cond))
2304 return;
2305 def = SSA_NAME_DEF_STMT (cond);
2308 tree rec2;
2309 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2310 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2311 == ptrmemfunc_vbit_in_delta),
2312 NULL);
2313 if (rec != rec2)
2314 return;
2316 index = ipa_get_param_decl_index (info, rec);
2317 if (index >= 0
2318 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2320 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2321 cs->indirect_info->offset = offset;
2322 cs->indirect_info->agg_contents = 1;
2323 cs->indirect_info->member_ptr = 1;
2324 cs->indirect_info->guaranteed_unmodified = 1;
2327 return;
2330 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2331 object referenced in the expression is a formal parameter of the caller
2332 FBI->node (described by FBI->info), create a call note for the
2333 statement. */
2335 static void
2336 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2337 gcall *call, tree target)
2339 tree obj = OBJ_TYPE_REF_OBJECT (target);
2340 int index;
2341 HOST_WIDE_INT anc_offset;
2343 if (!flag_devirtualize)
2344 return;
2346 if (TREE_CODE (obj) != SSA_NAME)
2347 return;
2349 class ipa_node_params *info = fbi->info;
2350 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2352 struct ipa_jump_func jfunc;
2353 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2354 return;
2356 anc_offset = 0;
2357 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2358 gcc_assert (index >= 0);
2359 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2360 call, &jfunc))
2361 return;
2363 else
2365 struct ipa_jump_func jfunc;
2366 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2367 tree expr;
2369 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2370 if (!expr)
2371 return;
2372 index = ipa_get_param_decl_index (info,
2373 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2374 gcc_assert (index >= 0);
2375 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2376 call, &jfunc, anc_offset))
2377 return;
2380 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2381 class cgraph_indirect_call_info *ii = cs->indirect_info;
2382 ii->offset = anc_offset;
2383 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2384 ii->otr_type = obj_type_ref_class (target);
2385 ii->polymorphic = 1;
2388 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2389 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2390 containing intermediate information about each formal parameter. */
2392 static void
2393 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2395 tree target = gimple_call_fn (call);
2397 if (!target
2398 || (TREE_CODE (target) != SSA_NAME
2399 && !virtual_method_call_p (target)))
2400 return;
2402 struct cgraph_edge *cs = fbi->node->get_edge (call);
2403 /* If we previously turned the call into a direct call, there is
2404 no need to analyze. */
2405 if (cs && !cs->indirect_unknown_callee)
2406 return;
2408 if (cs->indirect_info->polymorphic && flag_devirtualize)
2410 tree instance;
2411 tree target = gimple_call_fn (call);
2412 ipa_polymorphic_call_context context (current_function_decl,
2413 target, call, &instance);
2415 gcc_checking_assert (cs->indirect_info->otr_type
2416 == obj_type_ref_class (target));
2417 gcc_checking_assert (cs->indirect_info->otr_token
2418 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2420 cs->indirect_info->vptr_changed
2421 = !context.get_dynamic_type (instance,
2422 OBJ_TYPE_REF_OBJECT (target),
2423 obj_type_ref_class (target), call,
2424 &fbi->aa_walk_budget);
2425 cs->indirect_info->context = context;
2428 if (TREE_CODE (target) == SSA_NAME)
2429 ipa_analyze_indirect_call_uses (fbi, call, target);
2430 else if (virtual_method_call_p (target))
2431 ipa_analyze_virtual_call_uses (fbi, call, target);
2435 /* Analyze the call statement STMT with respect to formal parameters (described
2436 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2437 formal parameters are called. */
2439 static void
2440 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2442 if (is_gimple_call (stmt))
2443 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2446 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2447 If OP is a parameter declaration, mark it as used in the info structure
2448 passed in DATA. */
2450 static bool
2451 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2453 class ipa_node_params *info = (class ipa_node_params *) data;
2455 op = get_base_address (op);
2456 if (op
2457 && TREE_CODE (op) == PARM_DECL)
2459 int index = ipa_get_param_decl_index (info, op);
2460 gcc_assert (index >= 0);
2461 ipa_set_param_used (info, index, true);
2464 return false;
2467 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2468 the findings in various structures of the associated ipa_node_params
2469 structure, such as parameter flags, notes etc. FBI holds various data about
2470 the function being analyzed. */
2472 static void
2473 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2475 gimple_stmt_iterator gsi;
2476 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2478 gimple *stmt = gsi_stmt (gsi);
2480 if (is_gimple_debug (stmt))
2481 continue;
2483 ipa_analyze_stmt_uses (fbi, stmt);
2484 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2485 visit_ref_for_mod_analysis,
2486 visit_ref_for_mod_analysis,
2487 visit_ref_for_mod_analysis);
2489 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2490 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2491 visit_ref_for_mod_analysis,
2492 visit_ref_for_mod_analysis,
2493 visit_ref_for_mod_analysis);
2496 /* Calculate controlled uses of parameters of NODE. */
2498 static void
2499 ipa_analyze_controlled_uses (struct cgraph_node *node)
2501 class ipa_node_params *info = IPA_NODE_REF (node);
2503 for (int i = 0; i < ipa_get_param_count (info); i++)
2505 tree parm = ipa_get_param (info, i);
2506 int controlled_uses = 0;
2508 /* For SSA regs see if parameter is used. For non-SSA we compute
2509 the flag during modification analysis. */
2510 if (is_gimple_reg (parm))
2512 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2513 parm);
2514 if (ddef && !has_zero_uses (ddef))
2516 imm_use_iterator imm_iter;
2517 use_operand_p use_p;
2519 ipa_set_param_used (info, i, true);
2520 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2521 if (!is_gimple_call (USE_STMT (use_p)))
2523 if (!is_gimple_debug (USE_STMT (use_p)))
2525 controlled_uses = IPA_UNDESCRIBED_USE;
2526 break;
2529 else
2530 controlled_uses++;
2532 else
2533 controlled_uses = 0;
2535 else
2536 controlled_uses = IPA_UNDESCRIBED_USE;
2537 ipa_set_controlled_uses (info, i, controlled_uses);
2541 /* Free stuff in BI. */
2543 static void
2544 free_ipa_bb_info (struct ipa_bb_info *bi)
2546 bi->cg_edges.release ();
2547 bi->param_aa_statuses.release ();
2550 /* Dominator walker driving the analysis. */
2552 class analysis_dom_walker : public dom_walker
2554 public:
2555 analysis_dom_walker (struct ipa_func_body_info *fbi)
2556 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2558 virtual edge before_dom_children (basic_block);
2560 private:
2561 struct ipa_func_body_info *m_fbi;
2564 edge
2565 analysis_dom_walker::before_dom_children (basic_block bb)
2567 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2568 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2569 return NULL;
2572 /* Release body info FBI. */
2574 void
2575 ipa_release_body_info (struct ipa_func_body_info *fbi)
2577 int i;
2578 struct ipa_bb_info *bi;
2580 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2581 free_ipa_bb_info (bi);
2582 fbi->bb_infos.release ();
2585 /* Initialize the array describing properties of formal parameters
2586 of NODE, analyze their uses and compute jump functions associated
2587 with actual arguments of calls from within NODE. */
2589 void
2590 ipa_analyze_node (struct cgraph_node *node)
2592 struct ipa_func_body_info fbi;
2593 class ipa_node_params *info;
2595 ipa_check_create_node_params ();
2596 ipa_check_create_edge_args ();
2597 info = IPA_NODE_REF (node);
2599 if (info->analysis_done)
2600 return;
2601 info->analysis_done = 1;
2603 if (ipa_func_spec_opts_forbid_analysis_p (node))
2605 for (int i = 0; i < ipa_get_param_count (info); i++)
2607 ipa_set_param_used (info, i, true);
2608 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2610 return;
2613 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2614 push_cfun (func);
2615 calculate_dominance_info (CDI_DOMINATORS);
2616 ipa_initialize_node_params (node);
2617 ipa_analyze_controlled_uses (node);
2619 fbi.node = node;
2620 fbi.info = IPA_NODE_REF (node);
2621 fbi.bb_infos = vNULL;
2622 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2623 fbi.param_count = ipa_get_param_count (info);
2624 fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
2626 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2628 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2629 bi->cg_edges.safe_push (cs);
2632 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2634 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2635 bi->cg_edges.safe_push (cs);
2638 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2640 ipa_release_body_info (&fbi);
2641 free_dominance_info (CDI_DOMINATORS);
2642 pop_cfun ();
2645 /* Update the jump functions associated with call graph edge E when the call
2646 graph edge CS is being inlined, assuming that E->caller is already (possibly
2647 indirectly) inlined into CS->callee and that E has not been inlined. */
2649 static void
2650 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2651 struct cgraph_edge *e)
2653 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2654 class ipa_edge_args *args = IPA_EDGE_REF (e);
2655 int count = ipa_get_cs_argument_count (args);
2656 int i;
2658 for (i = 0; i < count; i++)
2660 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2661 class ipa_polymorphic_call_context *dst_ctx
2662 = ipa_get_ith_polymorhic_call_context (args, i);
2664 if (dst->type == IPA_JF_ANCESTOR)
2666 struct ipa_jump_func *src;
2667 int dst_fid = dst->value.ancestor.formal_id;
2668 class ipa_polymorphic_call_context *src_ctx
2669 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2671 /* Variable number of arguments can cause havoc if we try to access
2672 one that does not exist in the inlined edge. So make sure we
2673 don't. */
2674 if (dst_fid >= ipa_get_cs_argument_count (top))
2676 ipa_set_jf_unknown (dst);
2677 continue;
2680 src = ipa_get_ith_jump_func (top, dst_fid);
2682 if (src_ctx && !src_ctx->useless_p ())
2684 class ipa_polymorphic_call_context ctx = *src_ctx;
2686 /* TODO: Make type preserved safe WRT contexts. */
2687 if (!ipa_get_jf_ancestor_type_preserved (dst))
2688 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2689 ctx.offset_by (dst->value.ancestor.offset);
2690 if (!ctx.useless_p ())
2692 if (!dst_ctx)
2694 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2695 count);
2696 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2699 dst_ctx->combine_with (ctx);
2703 if (src->agg.items
2704 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2706 struct ipa_agg_jf_item *item;
2707 int j;
2709 /* Currently we do not produce clobber aggregate jump functions,
2710 replace with merging when we do. */
2711 gcc_assert (!dst->agg.items);
2713 dst->agg.items = vec_safe_copy (src->agg.items);
2714 dst->agg.by_ref = src->agg.by_ref;
2715 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2716 item->offset -= dst->value.ancestor.offset;
2719 if (src->type == IPA_JF_PASS_THROUGH
2720 && src->value.pass_through.operation == NOP_EXPR)
2722 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2723 dst->value.ancestor.agg_preserved &=
2724 src->value.pass_through.agg_preserved;
2726 else if (src->type == IPA_JF_ANCESTOR)
2728 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2729 dst->value.ancestor.offset += src->value.ancestor.offset;
2730 dst->value.ancestor.agg_preserved &=
2731 src->value.ancestor.agg_preserved;
2733 else
2734 ipa_set_jf_unknown (dst);
2736 else if (dst->type == IPA_JF_PASS_THROUGH)
2738 struct ipa_jump_func *src;
2739 /* We must check range due to calls with variable number of arguments
2740 and we cannot combine jump functions with operations. */
2741 if (dst->value.pass_through.operation == NOP_EXPR
2742 && (dst->value.pass_through.formal_id
2743 < ipa_get_cs_argument_count (top)))
2745 int dst_fid = dst->value.pass_through.formal_id;
2746 src = ipa_get_ith_jump_func (top, dst_fid);
2747 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2748 class ipa_polymorphic_call_context *src_ctx
2749 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2751 if (src_ctx && !src_ctx->useless_p ())
2753 class ipa_polymorphic_call_context ctx = *src_ctx;
2755 /* TODO: Make type preserved safe WRT contexts. */
2756 if (!ipa_get_jf_pass_through_type_preserved (dst))
2757 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2758 if (!ctx.useless_p ())
2760 if (!dst_ctx)
2762 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2763 count);
2764 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2766 dst_ctx->combine_with (ctx);
2769 switch (src->type)
2771 case IPA_JF_UNKNOWN:
2772 ipa_set_jf_unknown (dst);
2773 break;
2774 case IPA_JF_CONST:
2775 ipa_set_jf_cst_copy (dst, src);
2776 break;
2778 case IPA_JF_PASS_THROUGH:
2780 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2781 enum tree_code operation;
2782 operation = ipa_get_jf_pass_through_operation (src);
2784 if (operation == NOP_EXPR)
2786 bool agg_p;
2787 agg_p = dst_agg_p
2788 && ipa_get_jf_pass_through_agg_preserved (src);
2789 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2791 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2792 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2793 else
2795 tree operand = ipa_get_jf_pass_through_operand (src);
2796 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2797 operation);
2799 break;
2801 case IPA_JF_ANCESTOR:
2803 bool agg_p;
2804 agg_p = dst_agg_p
2805 && ipa_get_jf_ancestor_agg_preserved (src);
2806 ipa_set_ancestor_jf (dst,
2807 ipa_get_jf_ancestor_offset (src),
2808 ipa_get_jf_ancestor_formal_id (src),
2809 agg_p);
2810 break;
2812 default:
2813 gcc_unreachable ();
2816 if (src->agg.items
2817 && (dst_agg_p || !src->agg.by_ref))
2819 /* Currently we do not produce clobber aggregate jump
2820 functions, replace with merging when we do. */
2821 gcc_assert (!dst->agg.items);
2823 dst->agg.by_ref = src->agg.by_ref;
2824 dst->agg.items = vec_safe_copy (src->agg.items);
2827 else
2828 ipa_set_jf_unknown (dst);
2833 /* If TARGET is an addr_expr of a function declaration, make it the
2834 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2835 Otherwise, return NULL. */
2837 struct cgraph_edge *
2838 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2839 bool speculative)
2841 struct cgraph_node *callee;
2842 bool unreachable = false;
2844 if (TREE_CODE (target) == ADDR_EXPR)
2845 target = TREE_OPERAND (target, 0);
2846 if (TREE_CODE (target) != FUNCTION_DECL)
2848 target = canonicalize_constructor_val (target, NULL);
2849 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2851 /* Member pointer call that goes through a VMT lookup. */
2852 if (ie->indirect_info->member_ptr
2853 /* Or if target is not an invariant expression and we do not
2854 know if it will evaulate to function at runtime.
2855 This can happen when folding through &VAR, where &VAR
2856 is IP invariant, but VAR itself is not.
2858 TODO: Revisit this when GCC 5 is branched. It seems that
2859 member_ptr check is not needed and that we may try to fold
2860 the expression and see if VAR is readonly. */
2861 || !is_gimple_ip_invariant (target))
2863 if (dump_enabled_p ())
2865 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2866 "discovered direct call non-invariant %s\n",
2867 ie->caller->dump_name ());
2869 return NULL;
2873 if (dump_enabled_p ())
2875 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2876 "discovered direct call to non-function in %s, "
2877 "making it __builtin_unreachable\n",
2878 ie->caller->dump_name ());
2881 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2882 callee = cgraph_node::get_create (target);
2883 unreachable = true;
2885 else
2886 callee = cgraph_node::get (target);
2888 else
2889 callee = cgraph_node::get (target);
2891 /* Because may-edges are not explicitely represented and vtable may be external,
2892 we may create the first reference to the object in the unit. */
2893 if (!callee || callee->global.inlined_to)
2896 /* We are better to ensure we can refer to it.
2897 In the case of static functions we are out of luck, since we already
2898 removed its body. In the case of public functions we may or may
2899 not introduce the reference. */
2900 if (!canonicalize_constructor_val (target, NULL)
2901 || !TREE_PUBLIC (target))
2903 if (dump_file)
2904 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2905 "(%s -> %s) but cannot refer to it. Giving up.\n",
2906 ie->caller->dump_name (),
2907 ie->callee->dump_name ());
2908 return NULL;
2910 callee = cgraph_node::get_create (target);
2913 /* If the edge is already speculated. */
2914 if (speculative && ie->speculative)
2916 struct cgraph_edge *e2;
2917 struct ipa_ref *ref;
2918 ie->speculative_call_info (e2, ie, ref);
2919 if (e2->callee->ultimate_alias_target ()
2920 != callee->ultimate_alias_target ())
2922 if (dump_file)
2923 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2924 "target (%s -> %s) but the call is already "
2925 "speculated to %s. Giving up.\n",
2926 ie->caller->dump_name (), callee->dump_name (),
2927 e2->callee->dump_name ());
2929 else
2931 if (dump_file)
2932 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2933 "(%s -> %s) this agree with previous speculation.\n",
2934 ie->caller->dump_name (), callee->dump_name ());
2936 return NULL;
2939 if (!dbg_cnt (devirt))
2940 return NULL;
2942 ipa_check_create_node_params ();
2944 /* We cannot make edges to inline clones. It is bug that someone removed
2945 the cgraph node too early. */
2946 gcc_assert (!callee->global.inlined_to);
2948 if (dump_file && !unreachable)
2950 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2951 "(%s -> %s), for stmt ",
2952 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2953 speculative ? "speculative" : "known",
2954 ie->caller->dump_name (),
2955 callee->dump_name ());
2956 if (ie->call_stmt)
2957 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2958 else
2959 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2961 if (dump_enabled_p ())
2963 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2964 "converting indirect call in %s to direct call to %s\n",
2965 ie->caller->name (), callee->name ());
2967 if (!speculative)
2969 struct cgraph_edge *orig = ie;
2970 ie = ie->make_direct (callee);
2971 /* If we resolved speculative edge the cost is already up to date
2972 for direct call (adjusted by inline_edge_duplication_hook). */
2973 if (ie == orig)
2975 ipa_call_summary *es = ipa_call_summaries->get (ie);
2976 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2977 - eni_size_weights.call_cost);
2978 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2979 - eni_time_weights.call_cost);
2982 else
2984 if (!callee->can_be_discarded_p ())
2986 cgraph_node *alias;
2987 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2988 if (alias)
2989 callee = alias;
2991 /* make_speculative will update ie's cost to direct call cost. */
2992 ie = ie->make_speculative
2993 (callee, ie->count.apply_scale (8, 10));
2996 return ie;
2999 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3000 CONSTRUCTOR and return it. Return NULL if the search fails for some
3001 reason. */
3003 static tree
3004 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3006 tree type = TREE_TYPE (constructor);
3007 if (TREE_CODE (type) != ARRAY_TYPE
3008 && TREE_CODE (type) != RECORD_TYPE)
3009 return NULL;
3011 unsigned ix;
3012 tree index, val;
3013 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3015 HOST_WIDE_INT elt_offset;
3016 if (TREE_CODE (type) == ARRAY_TYPE)
3018 offset_int off;
3019 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3020 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3022 if (index)
3024 if (TREE_CODE (index) == RANGE_EXPR)
3025 off = wi::to_offset (TREE_OPERAND (index, 0));
3026 else
3027 off = wi::to_offset (index);
3028 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3030 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3031 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3032 off = wi::sext (off - wi::to_offset (low_bound),
3033 TYPE_PRECISION (TREE_TYPE (index)));
3035 off *= wi::to_offset (unit_size);
3036 /* ??? Handle more than just the first index of a
3037 RANGE_EXPR. */
3039 else
3040 off = wi::to_offset (unit_size) * ix;
3042 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3043 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3044 continue;
3045 elt_offset = off.to_shwi ();
3047 else if (TREE_CODE (type) == RECORD_TYPE)
3049 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3050 if (DECL_BIT_FIELD (index))
3051 continue;
3052 elt_offset = int_bit_position (index);
3054 else
3055 gcc_unreachable ();
3057 if (elt_offset > req_offset)
3058 return NULL;
3060 if (TREE_CODE (val) == CONSTRUCTOR)
3061 return find_constructor_constant_at_offset (val,
3062 req_offset - elt_offset);
3064 if (elt_offset == req_offset
3065 && is_gimple_reg_type (TREE_TYPE (val))
3066 && is_gimple_ip_invariant (val))
3067 return val;
3069 return NULL;
3072 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3073 invariant from a static constructor and if so, return it. Otherwise return
3074 NULL. */
3076 static tree
3077 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3079 if (by_ref)
3081 if (TREE_CODE (scalar) != ADDR_EXPR)
3082 return NULL;
3083 scalar = TREE_OPERAND (scalar, 0);
3086 if (!VAR_P (scalar)
3087 || !is_global_var (scalar)
3088 || !TREE_READONLY (scalar)
3089 || !DECL_INITIAL (scalar)
3090 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3091 return NULL;
3093 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3096 /* Retrieve value from aggregate jump function AGG or static initializer of
3097 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3098 none. BY_REF specifies whether the value has to be passed by reference or
3099 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3100 to is set to true if the value comes from an initializer of a constant. */
3102 tree
3103 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3104 HOST_WIDE_INT offset, bool by_ref,
3105 bool *from_global_constant)
3107 struct ipa_agg_jf_item *item;
3108 int i;
3110 if (scalar)
3112 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3113 if (res)
3115 if (from_global_constant)
3116 *from_global_constant = true;
3117 return res;
3121 if (!agg
3122 || by_ref != agg->by_ref)
3123 return NULL;
3125 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3126 if (item->offset == offset)
3128 /* Currently we do not have clobber values, return NULL for them once
3129 we do. */
3130 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3131 if (from_global_constant)
3132 *from_global_constant = false;
3133 return item->value;
3135 return NULL;
3138 /* Remove a reference to SYMBOL from the list of references of a node given by
3139 reference description RDESC. Return true if the reference has been
3140 successfully found and removed. */
3142 static bool
3143 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3145 struct ipa_ref *to_del;
3146 struct cgraph_edge *origin;
3148 origin = rdesc->cs;
3149 if (!origin)
3150 return false;
3151 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3152 origin->lto_stmt_uid);
3153 if (!to_del)
3154 return false;
3156 to_del->remove_reference ();
3157 if (dump_file)
3158 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3159 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3160 return true;
3163 /* If JFUNC has a reference description with refcount different from
3164 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3165 NULL. JFUNC must be a constant jump function. */
3167 static struct ipa_cst_ref_desc *
3168 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3170 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3171 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3172 return rdesc;
3173 else
3174 return NULL;
3177 /* If the value of constant jump function JFUNC is an address of a function
3178 declaration, return the associated call graph node. Otherwise return
3179 NULL. */
3181 static cgraph_node *
3182 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3184 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3185 tree cst = ipa_get_jf_constant (jfunc);
3186 if (TREE_CODE (cst) != ADDR_EXPR
3187 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3188 return NULL;
3190 return cgraph_node::get (TREE_OPERAND (cst, 0));
3194 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3195 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3196 the edge specified in the rdesc. Return false if either the symbol or the
3197 reference could not be found, otherwise return true. */
3199 static bool
3200 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3202 struct ipa_cst_ref_desc *rdesc;
3203 if (jfunc->type == IPA_JF_CONST
3204 && (rdesc = jfunc_rdesc_usable (jfunc))
3205 && --rdesc->refcount == 0)
3207 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3208 if (!symbol)
3209 return false;
3211 return remove_described_reference (symbol, rdesc);
3213 return true;
3216 /* Try to find a destination for indirect edge IE that corresponds to a simple
3217 call or a call of a member function pointer and where the destination is a
3218 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3219 the type of the parameter to which the result of JFUNC is passed. If it can
3220 be determined, return the newly direct edge, otherwise return NULL.
3221 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3223 static struct cgraph_edge *
3224 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3225 struct ipa_jump_func *jfunc, tree target_type,
3226 class ipa_node_params *new_root_info)
3228 struct cgraph_edge *cs;
3229 tree target;
3230 bool agg_contents = ie->indirect_info->agg_contents;
3231 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3232 if (agg_contents)
3234 bool from_global_constant;
3235 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3236 ie->indirect_info->offset,
3237 ie->indirect_info->by_ref,
3238 &from_global_constant);
3239 if (target
3240 && !from_global_constant
3241 && !ie->indirect_info->guaranteed_unmodified)
3242 return NULL;
3244 else
3245 target = scalar;
3246 if (!target)
3247 return NULL;
3248 cs = ipa_make_edge_direct_to_target (ie, target);
3250 if (cs && !agg_contents)
3252 bool ok;
3253 gcc_checking_assert (cs->callee
3254 && (cs != ie
3255 || jfunc->type != IPA_JF_CONST
3256 || !cgraph_node_for_jfunc (jfunc)
3257 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3258 ok = try_decrement_rdesc_refcount (jfunc);
3259 gcc_checking_assert (ok);
3262 return cs;
3265 /* Return the target to be used in cases of impossible devirtualization. IE
3266 and target (the latter can be NULL) are dumped when dumping is enabled. */
3268 tree
3269 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3271 if (dump_file)
3273 if (target)
3274 fprintf (dump_file,
3275 "Type inconsistent devirtualization: %s->%s\n",
3276 ie->caller->dump_name (),
3277 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3278 else
3279 fprintf (dump_file,
3280 "No devirtualization target in %s\n",
3281 ie->caller->dump_name ());
3283 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3284 cgraph_node::get_create (new_target);
3285 return new_target;
3288 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3289 call based on a formal parameter which is described by jump function JFUNC
3290 and if it can be determined, make it direct and return the direct edge.
3291 Otherwise, return NULL. CTX describes the polymorphic context that the
3292 parameter the call is based on brings along with it. */
3294 static struct cgraph_edge *
3295 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3296 struct ipa_jump_func *jfunc,
3297 class ipa_polymorphic_call_context ctx)
3299 tree target = NULL;
3300 bool speculative = false;
3302 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3303 return NULL;
3305 gcc_assert (!ie->indirect_info->by_ref);
3307 /* Try to do lookup via known virtual table pointer value. */
3308 if (!ie->indirect_info->vptr_changed
3309 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3311 tree vtable;
3312 unsigned HOST_WIDE_INT offset;
3313 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3314 : NULL;
3315 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3316 ie->indirect_info->offset,
3317 true);
3318 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3320 bool can_refer;
3321 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3322 vtable, offset, &can_refer);
3323 if (can_refer)
3325 if (!t
3326 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3327 || !possible_polymorphic_call_target_p
3328 (ie, cgraph_node::get (t)))
3330 /* Do not speculate builtin_unreachable, it is stupid! */
3331 if (!ie->indirect_info->vptr_changed)
3332 target = ipa_impossible_devirt_target (ie, target);
3333 else
3334 target = NULL;
3336 else
3338 target = t;
3339 speculative = ie->indirect_info->vptr_changed;
3345 ipa_polymorphic_call_context ie_context (ie);
3346 vec <cgraph_node *>targets;
3347 bool final;
3349 ctx.offset_by (ie->indirect_info->offset);
3350 if (ie->indirect_info->vptr_changed)
3351 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3352 ie->indirect_info->otr_type);
3353 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3354 targets = possible_polymorphic_call_targets
3355 (ie->indirect_info->otr_type,
3356 ie->indirect_info->otr_token,
3357 ctx, &final);
3358 if (final && targets.length () <= 1)
3360 speculative = false;
3361 if (targets.length () == 1)
3362 target = targets[0]->decl;
3363 else
3364 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3366 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3367 && !ie->speculative && ie->maybe_hot_p ())
3369 cgraph_node *n;
3370 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3371 ie->indirect_info->otr_token,
3372 ie->indirect_info->context);
3373 if (n)
3375 target = n->decl;
3376 speculative = true;
3380 if (target)
3382 if (!possible_polymorphic_call_target_p
3383 (ie, cgraph_node::get_create (target)))
3385 if (speculative)
3386 return NULL;
3387 target = ipa_impossible_devirt_target (ie, target);
3389 return ipa_make_edge_direct_to_target (ie, target, speculative);
3391 else
3392 return NULL;
3395 /* Update the param called notes associated with NODE when CS is being inlined,
3396 assuming NODE is (potentially indirectly) inlined into CS->callee.
3397 Moreover, if the callee is discovered to be constant, create a new cgraph
3398 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3399 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3401 static bool
3402 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3403 struct cgraph_node *node,
3404 vec<cgraph_edge *> *new_edges)
3406 class ipa_edge_args *top;
3407 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3408 class ipa_node_params *new_root_info, *inlined_node_info;
3409 bool res = false;
3411 ipa_check_create_edge_args ();
3412 top = IPA_EDGE_REF (cs);
3413 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3414 ? cs->caller->global.inlined_to
3415 : cs->caller);
3416 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3418 for (ie = node->indirect_calls; ie; ie = next_ie)
3420 class cgraph_indirect_call_info *ici = ie->indirect_info;
3421 struct ipa_jump_func *jfunc;
3422 int param_index;
3423 cgraph_node *spec_target = NULL;
3425 next_ie = ie->next_callee;
3427 if (ici->param_index == -1)
3428 continue;
3430 /* We must check range due to calls with variable number of arguments: */
3431 if (ici->param_index >= ipa_get_cs_argument_count (top))
3433 ici->param_index = -1;
3434 continue;
3437 param_index = ici->param_index;
3438 jfunc = ipa_get_ith_jump_func (top, param_index);
3440 if (ie->speculative)
3442 struct cgraph_edge *de;
3443 struct ipa_ref *ref;
3444 ie->speculative_call_info (de, ie, ref);
3445 spec_target = de->callee;
3448 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3449 new_direct_edge = NULL;
3450 else if (ici->polymorphic)
3452 ipa_polymorphic_call_context ctx;
3453 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3454 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3456 else
3458 tree target_type = ipa_get_type (inlined_node_info, param_index);
3459 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3460 target_type,
3461 new_root_info);
3464 /* If speculation was removed, then we need to do nothing. */
3465 if (new_direct_edge && new_direct_edge != ie
3466 && new_direct_edge->callee == spec_target)
3468 new_direct_edge->indirect_inlining_edge = 1;
3469 top = IPA_EDGE_REF (cs);
3470 res = true;
3471 if (!new_direct_edge->speculative)
3472 continue;
3474 else if (new_direct_edge)
3476 new_direct_edge->indirect_inlining_edge = 1;
3477 if (new_direct_edge->call_stmt)
3478 new_direct_edge->call_stmt_cannot_inline_p
3479 = !gimple_check_call_matching_types (
3480 new_direct_edge->call_stmt,
3481 new_direct_edge->callee->decl, false);
3482 if (new_edges)
3484 new_edges->safe_push (new_direct_edge);
3485 res = true;
3487 top = IPA_EDGE_REF (cs);
3488 /* If speculative edge was introduced we still need to update
3489 call info of the indirect edge. */
3490 if (!new_direct_edge->speculative)
3491 continue;
3493 if (jfunc->type == IPA_JF_PASS_THROUGH
3494 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3496 if (ici->agg_contents
3497 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3498 && !ici->polymorphic)
3499 ici->param_index = -1;
3500 else
3502 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3503 if (ici->polymorphic
3504 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3505 ici->vptr_changed = true;
3508 else if (jfunc->type == IPA_JF_ANCESTOR)
3510 if (ici->agg_contents
3511 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3512 && !ici->polymorphic)
3513 ici->param_index = -1;
3514 else
3516 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3517 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3518 if (ici->polymorphic
3519 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3520 ici->vptr_changed = true;
3523 else
3524 /* Either we can find a destination for this edge now or never. */
3525 ici->param_index = -1;
3528 return res;
3531 /* Recursively traverse subtree of NODE (including node) made of inlined
3532 cgraph_edges when CS has been inlined and invoke
3533 update_indirect_edges_after_inlining on all nodes and
3534 update_jump_functions_after_inlining on all non-inlined edges that lead out
3535 of this subtree. Newly discovered indirect edges will be added to
3536 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3537 created. */
3539 static bool
3540 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3541 struct cgraph_node *node,
3542 vec<cgraph_edge *> *new_edges)
3544 struct cgraph_edge *e;
3545 bool res;
3547 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3549 for (e = node->callees; e; e = e->next_callee)
3550 if (!e->inline_failed)
3551 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3552 else
3553 update_jump_functions_after_inlining (cs, e);
3554 for (e = node->indirect_calls; e; e = e->next_callee)
3555 update_jump_functions_after_inlining (cs, e);
3557 return res;
3560 /* Combine two controlled uses counts as done during inlining. */
3562 static int
3563 combine_controlled_uses_counters (int c, int d)
3565 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3566 return IPA_UNDESCRIBED_USE;
3567 else
3568 return c + d - 1;
3571 /* Propagate number of controlled users from CS->caleee to the new root of the
3572 tree of inlined nodes. */
3574 static void
3575 propagate_controlled_uses (struct cgraph_edge *cs)
3577 class ipa_edge_args *args = IPA_EDGE_REF (cs);
3578 struct cgraph_node *new_root = cs->caller->global.inlined_to
3579 ? cs->caller->global.inlined_to : cs->caller;
3580 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3581 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3582 int count, i;
3584 count = MIN (ipa_get_cs_argument_count (args),
3585 ipa_get_param_count (old_root_info));
3586 for (i = 0; i < count; i++)
3588 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3589 struct ipa_cst_ref_desc *rdesc;
3591 if (jf->type == IPA_JF_PASS_THROUGH)
3593 int src_idx, c, d;
3594 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3595 c = ipa_get_controlled_uses (new_root_info, src_idx);
3596 d = ipa_get_controlled_uses (old_root_info, i);
3598 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3599 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3600 c = combine_controlled_uses_counters (c, d);
3601 ipa_set_controlled_uses (new_root_info, src_idx, c);
3602 if (c == 0 && new_root_info->ipcp_orig_node)
3604 struct cgraph_node *n;
3605 struct ipa_ref *ref;
3606 tree t = new_root_info->known_csts[src_idx];
3608 if (t && TREE_CODE (t) == ADDR_EXPR
3609 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3610 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3611 && (ref = new_root->find_reference (n, NULL, 0)))
3613 if (dump_file)
3614 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3615 "reference from %s to %s.\n",
3616 new_root->dump_name (),
3617 n->dump_name ());
3618 ref->remove_reference ();
3622 else if (jf->type == IPA_JF_CONST
3623 && (rdesc = jfunc_rdesc_usable (jf)))
3625 int d = ipa_get_controlled_uses (old_root_info, i);
3626 int c = rdesc->refcount;
3627 rdesc->refcount = combine_controlled_uses_counters (c, d);
3628 if (rdesc->refcount == 0)
3630 tree cst = ipa_get_jf_constant (jf);
3631 struct cgraph_node *n;
3632 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3633 && TREE_CODE (TREE_OPERAND (cst, 0))
3634 == FUNCTION_DECL);
3635 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3636 if (n)
3638 struct cgraph_node *clone;
3639 bool ok;
3640 ok = remove_described_reference (n, rdesc);
3641 gcc_checking_assert (ok);
3643 clone = cs->caller;
3644 while (clone->global.inlined_to
3645 && clone != rdesc->cs->caller
3646 && IPA_NODE_REF (clone)->ipcp_orig_node)
3648 struct ipa_ref *ref;
3649 ref = clone->find_reference (n, NULL, 0);
3650 if (ref)
3652 if (dump_file)
3653 fprintf (dump_file, "ipa-prop: Removing "
3654 "cloning-created reference "
3655 "from %s to %s.\n",
3656 clone->dump_name (),
3657 n->dump_name ());
3658 ref->remove_reference ();
3660 clone = clone->callers->caller;
3667 for (i = ipa_get_param_count (old_root_info);
3668 i < ipa_get_cs_argument_count (args);
3669 i++)
3671 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3673 if (jf->type == IPA_JF_CONST)
3675 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3676 if (rdesc)
3677 rdesc->refcount = IPA_UNDESCRIBED_USE;
3679 else if (jf->type == IPA_JF_PASS_THROUGH)
3680 ipa_set_controlled_uses (new_root_info,
3681 jf->value.pass_through.formal_id,
3682 IPA_UNDESCRIBED_USE);
3686 /* Update jump functions and call note functions on inlining the call site CS.
3687 CS is expected to lead to a node already cloned by
3688 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3689 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3690 created. */
3692 bool
3693 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3694 vec<cgraph_edge *> *new_edges)
3696 bool changed;
3697 /* Do nothing if the preparation phase has not been carried out yet
3698 (i.e. during early inlining). */
3699 if (!ipa_node_params_sum)
3700 return false;
3701 gcc_assert (ipa_edge_args_sum);
3703 propagate_controlled_uses (cs);
3704 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3706 return changed;
3709 /* Ensure that array of edge arguments infos is big enough to accommodate a
3710 structure for all edges and reallocates it if not. Also, allocate
3711 associated hash tables is they do not already exist. */
3713 void
3714 ipa_check_create_edge_args (void)
3716 if (!ipa_edge_args_sum)
3717 ipa_edge_args_sum
3718 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3719 ipa_edge_args_sum_t (symtab, true));
3720 if (!ipa_bits_hash_table)
3721 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3722 if (!ipa_vr_hash_table)
3723 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3726 /* Free all ipa_edge structures. */
3728 void
3729 ipa_free_all_edge_args (void)
3731 if (!ipa_edge_args_sum)
3732 return;
3734 ipa_edge_args_sum->release ();
3735 ipa_edge_args_sum = NULL;
3738 /* Free all ipa_node_params structures. */
3740 void
3741 ipa_free_all_node_params (void)
3743 ipa_node_params_sum->release ();
3744 ipa_node_params_sum = NULL;
3747 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3748 tables if they do not already exist. */
3750 void
3751 ipcp_transformation_initialize (void)
3753 if (!ipa_bits_hash_table)
3754 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3755 if (!ipa_vr_hash_table)
3756 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3757 if (ipcp_transformation_sum == NULL)
3758 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
3761 /* Release the IPA CP transformation summary. */
3763 void
3764 ipcp_free_transformation_sum (void)
3766 if (!ipcp_transformation_sum)
3767 return;
3769 ipcp_transformation_sum->release ();
3770 ipcp_transformation_sum = NULL;
3773 /* Set the aggregate replacements of NODE to be AGGVALS. */
3775 void
3776 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3777 struct ipa_agg_replacement_value *aggvals)
3779 ipcp_transformation_initialize ();
3780 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3781 s->agg_values = aggvals;
3784 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3785 count data structures accordingly. */
3787 void
3788 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3790 if (args->jump_functions)
3792 struct ipa_jump_func *jf;
3793 int i;
3794 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3796 struct ipa_cst_ref_desc *rdesc;
3797 try_decrement_rdesc_refcount (jf);
3798 if (jf->type == IPA_JF_CONST
3799 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3800 && rdesc->cs == cs)
3801 rdesc->cs = NULL;
3806 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3807 reference count data strucutres accordingly. */
3809 void
3810 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3811 ipa_edge_args *old_args, ipa_edge_args *new_args)
3813 unsigned int i;
3815 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3816 if (old_args->polymorphic_call_contexts)
3817 new_args->polymorphic_call_contexts
3818 = vec_safe_copy (old_args->polymorphic_call_contexts);
3820 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3822 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3823 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3825 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3827 if (src_jf->type == IPA_JF_CONST)
3829 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3831 if (!src_rdesc)
3832 dst_jf->value.constant.rdesc = NULL;
3833 else if (src->caller == dst->caller)
3835 struct ipa_ref *ref;
3836 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3837 gcc_checking_assert (n);
3838 ref = src->caller->find_reference (n, src->call_stmt,
3839 src->lto_stmt_uid);
3840 gcc_checking_assert (ref);
3841 dst->caller->clone_reference (ref, ref->stmt);
3843 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3844 dst_rdesc->cs = dst;
3845 dst_rdesc->refcount = src_rdesc->refcount;
3846 dst_rdesc->next_duplicate = NULL;
3847 dst_jf->value.constant.rdesc = dst_rdesc;
3849 else if (src_rdesc->cs == src)
3851 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3852 dst_rdesc->cs = dst;
3853 dst_rdesc->refcount = src_rdesc->refcount;
3854 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3855 src_rdesc->next_duplicate = dst_rdesc;
3856 dst_jf->value.constant.rdesc = dst_rdesc;
3858 else
3860 struct ipa_cst_ref_desc *dst_rdesc;
3861 /* This can happen during inlining, when a JFUNC can refer to a
3862 reference taken in a function up in the tree of inline clones.
3863 We need to find the duplicate that refers to our tree of
3864 inline clones. */
3866 gcc_assert (dst->caller->global.inlined_to);
3867 for (dst_rdesc = src_rdesc->next_duplicate;
3868 dst_rdesc;
3869 dst_rdesc = dst_rdesc->next_duplicate)
3871 struct cgraph_node *top;
3872 top = dst_rdesc->cs->caller->global.inlined_to
3873 ? dst_rdesc->cs->caller->global.inlined_to
3874 : dst_rdesc->cs->caller;
3875 if (dst->caller->global.inlined_to == top)
3876 break;
3878 gcc_assert (dst_rdesc);
3879 dst_jf->value.constant.rdesc = dst_rdesc;
3882 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3883 && src->caller == dst->caller)
3885 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3886 ? dst->caller->global.inlined_to : dst->caller;
3887 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3888 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3890 int c = ipa_get_controlled_uses (root_info, idx);
3891 if (c != IPA_UNDESCRIBED_USE)
3893 c++;
3894 ipa_set_controlled_uses (root_info, idx, c);
3900 /* Analyze newly added function into callgraph. */
3902 static void
3903 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3905 if (node->has_gimple_body_p ())
3906 ipa_analyze_node (node);
3909 /* Hook that is called by summary when a node is duplicated. */
3911 void
3912 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3913 ipa_node_params *old_info,
3914 ipa_node_params *new_info)
3916 ipa_agg_replacement_value *old_av, *new_av;
3918 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3919 new_info->lattices = NULL;
3920 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3921 new_info->known_csts = old_info->known_csts.copy ();
3922 new_info->known_contexts = old_info->known_contexts.copy ();
3924 new_info->analysis_done = old_info->analysis_done;
3925 new_info->node_enqueued = old_info->node_enqueued;
3926 new_info->versionable = old_info->versionable;
3928 old_av = ipa_get_agg_replacements_for_node (src);
3929 if (old_av)
3931 new_av = NULL;
3932 while (old_av)
3934 struct ipa_agg_replacement_value *v;
3936 v = ggc_alloc<ipa_agg_replacement_value> ();
3937 memcpy (v, old_av, sizeof (*v));
3938 v->next = new_av;
3939 new_av = v;
3940 old_av = old_av->next;
3942 ipa_set_node_agg_value_chain (dst, new_av);
3945 ipcp_transformation *src_trans = ipcp_get_transformation_summary (src);
3947 if (src_trans)
3949 ipcp_transformation_initialize ();
3950 src_trans = ipcp_transformation_sum->get_create (src);
3951 ipcp_transformation *dst_trans
3952 = ipcp_transformation_sum->get_create (dst);
3954 dst_trans->bits = vec_safe_copy (src_trans->bits);
3956 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3957 vec<ipa_vr, va_gc> *&dst_vr
3958 = ipcp_get_transformation_summary (dst)->m_vr;
3959 if (vec_safe_length (src_trans->m_vr) > 0)
3961 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3962 for (unsigned i = 0; i < src_vr->length (); ++i)
3963 dst_vr->quick_push ((*src_vr)[i]);
3968 /* Register our cgraph hooks if they are not already there. */
3970 void
3971 ipa_register_cgraph_hooks (void)
3973 ipa_check_create_node_params ();
3974 ipa_check_create_edge_args ();
3976 function_insertion_hook_holder =
3977 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3980 /* Unregister our cgraph hooks if they are not already there. */
3982 static void
3983 ipa_unregister_cgraph_hooks (void)
3985 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3986 function_insertion_hook_holder = NULL;
3989 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3990 longer needed after ipa-cp. */
3992 void
3993 ipa_free_all_structures_after_ipa_cp (void)
3995 if (!optimize && !in_lto_p)
3997 ipa_free_all_edge_args ();
3998 ipa_free_all_node_params ();
3999 ipcp_sources_pool.release ();
4000 ipcp_cst_values_pool.release ();
4001 ipcp_poly_ctx_values_pool.release ();
4002 ipcp_agg_lattice_pool.release ();
4003 ipa_unregister_cgraph_hooks ();
4004 ipa_refdesc_pool.release ();
4008 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4009 longer needed after indirect inlining. */
4011 void
4012 ipa_free_all_structures_after_iinln (void)
4014 ipa_free_all_edge_args ();
4015 ipa_free_all_node_params ();
4016 ipa_unregister_cgraph_hooks ();
4017 ipcp_sources_pool.release ();
4018 ipcp_cst_values_pool.release ();
4019 ipcp_poly_ctx_values_pool.release ();
4020 ipcp_agg_lattice_pool.release ();
4021 ipa_refdesc_pool.release ();
4024 /* Print ipa_tree_map data structures of all functions in the
4025 callgraph to F. */
4027 void
4028 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4030 int i, count;
4031 class ipa_node_params *info;
4033 if (!node->definition)
4034 return;
4035 info = IPA_NODE_REF (node);
4036 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4037 count = ipa_get_param_count (info);
4038 for (i = 0; i < count; i++)
4040 int c;
4042 fprintf (f, " ");
4043 ipa_dump_param (f, info, i);
4044 if (ipa_is_param_used (info, i))
4045 fprintf (f, " used");
4046 c = ipa_get_controlled_uses (info, i);
4047 if (c == IPA_UNDESCRIBED_USE)
4048 fprintf (f, " undescribed_use");
4049 else
4050 fprintf (f, " controlled_uses=%i", c);
4051 fprintf (f, "\n");
4055 /* Print ipa_tree_map data structures of all functions in the
4056 callgraph to F. */
4058 void
4059 ipa_print_all_params (FILE * f)
4061 struct cgraph_node *node;
4063 fprintf (f, "\nFunction parameters:\n");
4064 FOR_EACH_FUNCTION (node)
4065 ipa_print_node_params (f, node);
4068 /* Dump the AV linked list. */
4070 void
4071 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4073 bool comma = false;
4074 fprintf (f, " Aggregate replacements:");
4075 for (; av; av = av->next)
4077 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4078 av->index, av->offset);
4079 print_generic_expr (f, av->value);
4080 comma = true;
4082 fprintf (f, "\n");
4085 /* Stream out jump function JUMP_FUNC to OB. */
4087 static void
4088 ipa_write_jump_function (struct output_block *ob,
4089 struct ipa_jump_func *jump_func)
4091 struct ipa_agg_jf_item *item;
4092 struct bitpack_d bp;
4093 int i, count;
4094 int flag = 0;
4096 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4097 as well as WPA memory by handling them specially. */
4098 if (jump_func->type == IPA_JF_CONST
4099 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4100 flag = 1;
4102 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4103 switch (jump_func->type)
4105 case IPA_JF_UNKNOWN:
4106 break;
4107 case IPA_JF_CONST:
4108 gcc_assert (
4109 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4110 stream_write_tree (ob,
4111 flag
4112 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4113 : jump_func->value.constant.value, true);
4114 break;
4115 case IPA_JF_PASS_THROUGH:
4116 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4117 if (jump_func->value.pass_through.operation == NOP_EXPR)
4119 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4120 bp = bitpack_create (ob->main_stream);
4121 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4122 streamer_write_bitpack (&bp);
4124 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4125 == tcc_unary)
4126 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4127 else
4129 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4130 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4132 break;
4133 case IPA_JF_ANCESTOR:
4134 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4135 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4136 bp = bitpack_create (ob->main_stream);
4137 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4138 streamer_write_bitpack (&bp);
4139 break;
4142 count = vec_safe_length (jump_func->agg.items);
4143 streamer_write_uhwi (ob, count);
4144 if (count)
4146 bp = bitpack_create (ob->main_stream);
4147 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4148 streamer_write_bitpack (&bp);
4151 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4153 streamer_write_uhwi (ob, item->offset);
4154 stream_write_tree (ob, item->value, true);
4157 bp = bitpack_create (ob->main_stream);
4158 bp_pack_value (&bp, !!jump_func->bits, 1);
4159 streamer_write_bitpack (&bp);
4160 if (jump_func->bits)
4162 streamer_write_widest_int (ob, jump_func->bits->value);
4163 streamer_write_widest_int (ob, jump_func->bits->mask);
4165 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4166 streamer_write_bitpack (&bp);
4167 if (jump_func->m_vr)
4169 streamer_write_enum (ob->main_stream, value_rang_type,
4170 VR_LAST, jump_func->m_vr->kind ());
4171 stream_write_tree (ob, jump_func->m_vr->min (), true);
4172 stream_write_tree (ob, jump_func->m_vr->max (), true);
4176 /* Read in jump function JUMP_FUNC from IB. */
4178 static void
4179 ipa_read_jump_function (class lto_input_block *ib,
4180 struct ipa_jump_func *jump_func,
4181 struct cgraph_edge *cs,
4182 class data_in *data_in,
4183 bool prevails)
4185 enum jump_func_type jftype;
4186 enum tree_code operation;
4187 int i, count;
4188 int val = streamer_read_uhwi (ib);
4189 bool flag = val & 1;
4191 jftype = (enum jump_func_type) (val / 2);
4192 switch (jftype)
4194 case IPA_JF_UNKNOWN:
4195 ipa_set_jf_unknown (jump_func);
4196 break;
4197 case IPA_JF_CONST:
4199 tree t = stream_read_tree (ib, data_in);
4200 if (flag && prevails)
4201 t = build_fold_addr_expr (t);
4202 ipa_set_jf_constant (jump_func, t, cs);
4204 break;
4205 case IPA_JF_PASS_THROUGH:
4206 operation = (enum tree_code) streamer_read_uhwi (ib);
4207 if (operation == NOP_EXPR)
4209 int formal_id = streamer_read_uhwi (ib);
4210 struct bitpack_d bp = streamer_read_bitpack (ib);
4211 bool agg_preserved = bp_unpack_value (&bp, 1);
4212 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4214 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4216 int formal_id = streamer_read_uhwi (ib);
4217 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4219 else
4221 tree operand = stream_read_tree (ib, data_in);
4222 int formal_id = streamer_read_uhwi (ib);
4223 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4224 operation);
4226 break;
4227 case IPA_JF_ANCESTOR:
4229 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4230 int formal_id = streamer_read_uhwi (ib);
4231 struct bitpack_d bp = streamer_read_bitpack (ib);
4232 bool agg_preserved = bp_unpack_value (&bp, 1);
4233 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4234 break;
4236 default:
4237 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4240 count = streamer_read_uhwi (ib);
4241 if (prevails)
4242 vec_alloc (jump_func->agg.items, count);
4243 if (count)
4245 struct bitpack_d bp = streamer_read_bitpack (ib);
4246 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4248 for (i = 0; i < count; i++)
4250 struct ipa_agg_jf_item item;
4251 item.offset = streamer_read_uhwi (ib);
4252 item.value = stream_read_tree (ib, data_in);
4253 if (prevails)
4254 jump_func->agg.items->quick_push (item);
4257 struct bitpack_d bp = streamer_read_bitpack (ib);
4258 bool bits_known = bp_unpack_value (&bp, 1);
4259 if (bits_known)
4261 widest_int value = streamer_read_widest_int (ib);
4262 widest_int mask = streamer_read_widest_int (ib);
4263 if (prevails)
4264 ipa_set_jfunc_bits (jump_func, value, mask);
4266 else
4267 jump_func->bits = NULL;
4269 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4270 bool vr_known = bp_unpack_value (&vr_bp, 1);
4271 if (vr_known)
4273 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4274 VR_LAST);
4275 tree min = stream_read_tree (ib, data_in);
4276 tree max = stream_read_tree (ib, data_in);
4277 if (prevails)
4278 ipa_set_jfunc_vr (jump_func, type, min, max);
4280 else
4281 jump_func->m_vr = NULL;
4284 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4285 relevant to indirect inlining to OB. */
4287 static void
4288 ipa_write_indirect_edge_info (struct output_block *ob,
4289 struct cgraph_edge *cs)
4291 class cgraph_indirect_call_info *ii = cs->indirect_info;
4292 struct bitpack_d bp;
4294 streamer_write_hwi (ob, ii->param_index);
4295 bp = bitpack_create (ob->main_stream);
4296 bp_pack_value (&bp, ii->polymorphic, 1);
4297 bp_pack_value (&bp, ii->agg_contents, 1);
4298 bp_pack_value (&bp, ii->member_ptr, 1);
4299 bp_pack_value (&bp, ii->by_ref, 1);
4300 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4301 bp_pack_value (&bp, ii->vptr_changed, 1);
4302 streamer_write_bitpack (&bp);
4303 if (ii->agg_contents || ii->polymorphic)
4304 streamer_write_hwi (ob, ii->offset);
4305 else
4306 gcc_assert (ii->offset == 0);
4308 if (ii->polymorphic)
4310 streamer_write_hwi (ob, ii->otr_token);
4311 stream_write_tree (ob, ii->otr_type, true);
4312 ii->context.stream_out (ob);
4316 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4317 relevant to indirect inlining from IB. */
4319 static void
4320 ipa_read_indirect_edge_info (class lto_input_block *ib,
4321 class data_in *data_in,
4322 struct cgraph_edge *cs)
4324 class cgraph_indirect_call_info *ii = cs->indirect_info;
4325 struct bitpack_d bp;
4327 ii->param_index = (int) streamer_read_hwi (ib);
4328 bp = streamer_read_bitpack (ib);
4329 ii->polymorphic = bp_unpack_value (&bp, 1);
4330 ii->agg_contents = bp_unpack_value (&bp, 1);
4331 ii->member_ptr = bp_unpack_value (&bp, 1);
4332 ii->by_ref = bp_unpack_value (&bp, 1);
4333 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4334 ii->vptr_changed = bp_unpack_value (&bp, 1);
4335 if (ii->agg_contents || ii->polymorphic)
4336 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4337 else
4338 ii->offset = 0;
4339 if (ii->polymorphic)
4341 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4342 ii->otr_type = stream_read_tree (ib, data_in);
4343 ii->context.stream_in (ib, data_in);
4347 /* Stream out NODE info to OB. */
4349 static void
4350 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4352 int node_ref;
4353 lto_symtab_encoder_t encoder;
4354 class ipa_node_params *info = IPA_NODE_REF (node);
4355 int j;
4356 struct cgraph_edge *e;
4357 struct bitpack_d bp;
4359 encoder = ob->decl_state->symtab_node_encoder;
4360 node_ref = lto_symtab_encoder_encode (encoder, node);
4361 streamer_write_uhwi (ob, node_ref);
4363 streamer_write_uhwi (ob, ipa_get_param_count (info));
4364 for (j = 0; j < ipa_get_param_count (info); j++)
4365 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4366 bp = bitpack_create (ob->main_stream);
4367 gcc_assert (info->analysis_done
4368 || ipa_get_param_count (info) == 0);
4369 gcc_assert (!info->node_enqueued);
4370 gcc_assert (!info->ipcp_orig_node);
4371 for (j = 0; j < ipa_get_param_count (info); j++)
4372 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4373 streamer_write_bitpack (&bp);
4374 for (j = 0; j < ipa_get_param_count (info); j++)
4376 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4377 stream_write_tree (ob, ipa_get_type (info, j), true);
4379 for (e = node->callees; e; e = e->next_callee)
4381 class ipa_edge_args *args = IPA_EDGE_REF (e);
4383 streamer_write_uhwi (ob,
4384 ipa_get_cs_argument_count (args) * 2
4385 + (args->polymorphic_call_contexts != NULL));
4386 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4388 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4389 if (args->polymorphic_call_contexts != NULL)
4390 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4393 for (e = node->indirect_calls; e; e = e->next_callee)
4395 class ipa_edge_args *args = IPA_EDGE_REF (e);
4397 streamer_write_uhwi (ob,
4398 ipa_get_cs_argument_count (args) * 2
4399 + (args->polymorphic_call_contexts != NULL));
4400 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4402 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4403 if (args->polymorphic_call_contexts != NULL)
4404 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4406 ipa_write_indirect_edge_info (ob, e);
4410 /* Stream in edge E from IB. */
4412 static void
4413 ipa_read_edge_info (class lto_input_block *ib,
4414 class data_in *data_in,
4415 struct cgraph_edge *e, bool prevails)
4417 int count = streamer_read_uhwi (ib);
4418 bool contexts_computed = count & 1;
4420 count /= 2;
4421 if (!count)
4422 return;
4423 if (prevails && e->possibly_call_in_translation_unit_p ())
4425 class ipa_edge_args *args = IPA_EDGE_REF (e);
4426 vec_safe_grow_cleared (args->jump_functions, count);
4427 if (contexts_computed)
4428 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4429 for (int k = 0; k < count; k++)
4431 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4432 data_in, prevails);
4433 if (contexts_computed)
4434 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4435 (ib, data_in);
4438 else
4440 for (int k = 0; k < count; k++)
4442 struct ipa_jump_func dummy;
4443 ipa_read_jump_function (ib, &dummy, e,
4444 data_in, prevails);
4445 if (contexts_computed)
4447 class ipa_polymorphic_call_context ctx;
4448 ctx.stream_in (ib, data_in);
4454 /* Stream in NODE info from IB. */
4456 static void
4457 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4458 class data_in *data_in)
4460 int k;
4461 struct cgraph_edge *e;
4462 struct bitpack_d bp;
4463 bool prevails = node->prevailing_p ();
4464 class ipa_node_params *info = prevails ? IPA_NODE_REF (node) : NULL;
4466 int param_count = streamer_read_uhwi (ib);
4467 if (prevails)
4469 ipa_alloc_node_params (node, param_count);
4470 for (k = 0; k < param_count; k++)
4471 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4472 if (ipa_get_param_count (info) != 0)
4473 info->analysis_done = true;
4474 info->node_enqueued = false;
4476 else
4477 for (k = 0; k < param_count; k++)
4478 streamer_read_uhwi (ib);
4480 bp = streamer_read_bitpack (ib);
4481 for (k = 0; k < param_count; k++)
4483 bool used = bp_unpack_value (&bp, 1);
4485 if (prevails)
4486 ipa_set_param_used (info, k, used);
4488 for (k = 0; k < param_count; k++)
4490 int nuses = streamer_read_hwi (ib);
4491 tree type = stream_read_tree (ib, data_in);
4493 if (prevails)
4495 ipa_set_controlled_uses (info, k, nuses);
4496 (*info->descriptors)[k].decl_or_type = type;
4499 for (e = node->callees; e; e = e->next_callee)
4500 ipa_read_edge_info (ib, data_in, e, prevails);
4501 for (e = node->indirect_calls; e; e = e->next_callee)
4503 ipa_read_edge_info (ib, data_in, e, prevails);
4504 ipa_read_indirect_edge_info (ib, data_in, e);
4508 /* Write jump functions for nodes in SET. */
4510 void
4511 ipa_prop_write_jump_functions (void)
4513 struct cgraph_node *node;
4514 struct output_block *ob;
4515 unsigned int count = 0;
4516 lto_symtab_encoder_iterator lsei;
4517 lto_symtab_encoder_t encoder;
4519 if (!ipa_node_params_sum || !ipa_edge_args_sum)
4520 return;
4522 ob = create_output_block (LTO_section_jump_functions);
4523 encoder = ob->decl_state->symtab_node_encoder;
4524 ob->symbol = NULL;
4525 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4526 lsei_next_function_in_partition (&lsei))
4528 node = lsei_cgraph_node (lsei);
4529 if (node->has_gimple_body_p ()
4530 && IPA_NODE_REF (node) != NULL)
4531 count++;
4534 streamer_write_uhwi (ob, count);
4536 /* Process all of the functions. */
4537 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4538 lsei_next_function_in_partition (&lsei))
4540 node = lsei_cgraph_node (lsei);
4541 if (node->has_gimple_body_p ()
4542 && IPA_NODE_REF (node) != NULL)
4543 ipa_write_node_info (ob, node);
4545 streamer_write_char_stream (ob->main_stream, 0);
4546 produce_asm (ob, NULL);
4547 destroy_output_block (ob);
4550 /* Read section in file FILE_DATA of length LEN with data DATA. */
4552 static void
4553 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4554 size_t len)
4556 const struct lto_function_header *header =
4557 (const struct lto_function_header *) data;
4558 const int cfg_offset = sizeof (struct lto_function_header);
4559 const int main_offset = cfg_offset + header->cfg_size;
4560 const int string_offset = main_offset + header->main_size;
4561 class data_in *data_in;
4562 unsigned int i;
4563 unsigned int count;
4565 lto_input_block ib_main ((const char *) data + main_offset,
4566 header->main_size, file_data->mode_table);
4568 data_in =
4569 lto_data_in_create (file_data, (const char *) data + string_offset,
4570 header->string_size, vNULL);
4571 count = streamer_read_uhwi (&ib_main);
4573 for (i = 0; i < count; i++)
4575 unsigned int index;
4576 struct cgraph_node *node;
4577 lto_symtab_encoder_t encoder;
4579 index = streamer_read_uhwi (&ib_main);
4580 encoder = file_data->symtab_node_encoder;
4581 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4582 index));
4583 gcc_assert (node->definition);
4584 ipa_read_node_info (&ib_main, node, data_in);
4586 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4587 len);
4588 lto_data_in_delete (data_in);
4591 /* Read ipcp jump functions. */
4593 void
4594 ipa_prop_read_jump_functions (void)
4596 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4597 struct lto_file_decl_data *file_data;
4598 unsigned int j = 0;
4600 ipa_check_create_node_params ();
4601 ipa_check_create_edge_args ();
4602 ipa_register_cgraph_hooks ();
4604 while ((file_data = file_data_vec[j++]))
4606 size_t len;
4607 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4609 if (data)
4610 ipa_prop_read_section (file_data, data, len);
4614 void
4615 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4617 int node_ref;
4618 unsigned int count = 0;
4619 lto_symtab_encoder_t encoder;
4620 struct ipa_agg_replacement_value *aggvals, *av;
4622 aggvals = ipa_get_agg_replacements_for_node (node);
4623 encoder = ob->decl_state->symtab_node_encoder;
4624 node_ref = lto_symtab_encoder_encode (encoder, node);
4625 streamer_write_uhwi (ob, node_ref);
4627 for (av = aggvals; av; av = av->next)
4628 count++;
4629 streamer_write_uhwi (ob, count);
4631 for (av = aggvals; av; av = av->next)
4633 struct bitpack_d bp;
4635 streamer_write_uhwi (ob, av->offset);
4636 streamer_write_uhwi (ob, av->index);
4637 stream_write_tree (ob, av->value, true);
4639 bp = bitpack_create (ob->main_stream);
4640 bp_pack_value (&bp, av->by_ref, 1);
4641 streamer_write_bitpack (&bp);
4644 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4645 if (ts && vec_safe_length (ts->m_vr) > 0)
4647 count = ts->m_vr->length ();
4648 streamer_write_uhwi (ob, count);
4649 for (unsigned i = 0; i < count; ++i)
4651 struct bitpack_d bp;
4652 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4653 bp = bitpack_create (ob->main_stream);
4654 bp_pack_value (&bp, parm_vr->known, 1);
4655 streamer_write_bitpack (&bp);
4656 if (parm_vr->known)
4658 streamer_write_enum (ob->main_stream, value_rang_type,
4659 VR_LAST, parm_vr->type);
4660 streamer_write_wide_int (ob, parm_vr->min);
4661 streamer_write_wide_int (ob, parm_vr->max);
4665 else
4666 streamer_write_uhwi (ob, 0);
4668 if (ts && vec_safe_length (ts->bits) > 0)
4670 count = ts->bits->length ();
4671 streamer_write_uhwi (ob, count);
4673 for (unsigned i = 0; i < count; ++i)
4675 const ipa_bits *bits_jfunc = (*ts->bits)[i];
4676 struct bitpack_d bp = bitpack_create (ob->main_stream);
4677 bp_pack_value (&bp, !!bits_jfunc, 1);
4678 streamer_write_bitpack (&bp);
4679 if (bits_jfunc)
4681 streamer_write_widest_int (ob, bits_jfunc->value);
4682 streamer_write_widest_int (ob, bits_jfunc->mask);
4686 else
4687 streamer_write_uhwi (ob, 0);
4690 /* Stream in the aggregate value replacement chain for NODE from IB. */
4692 static void
4693 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4694 data_in *data_in)
4696 struct ipa_agg_replacement_value *aggvals = NULL;
4697 unsigned int count, i;
4699 count = streamer_read_uhwi (ib);
4700 for (i = 0; i <count; i++)
4702 struct ipa_agg_replacement_value *av;
4703 struct bitpack_d bp;
4705 av = ggc_alloc<ipa_agg_replacement_value> ();
4706 av->offset = streamer_read_uhwi (ib);
4707 av->index = streamer_read_uhwi (ib);
4708 av->value = stream_read_tree (ib, data_in);
4709 bp = streamer_read_bitpack (ib);
4710 av->by_ref = bp_unpack_value (&bp, 1);
4711 av->next = aggvals;
4712 aggvals = av;
4714 ipa_set_node_agg_value_chain (node, aggvals);
4716 count = streamer_read_uhwi (ib);
4717 if (count > 0)
4719 ipcp_transformation_initialize ();
4720 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4721 vec_safe_grow_cleared (ts->m_vr, count);
4722 for (i = 0; i < count; i++)
4724 ipa_vr *parm_vr;
4725 parm_vr = &(*ts->m_vr)[i];
4726 struct bitpack_d bp;
4727 bp = streamer_read_bitpack (ib);
4728 parm_vr->known = bp_unpack_value (&bp, 1);
4729 if (parm_vr->known)
4731 parm_vr->type = streamer_read_enum (ib, value_range_kind,
4732 VR_LAST);
4733 parm_vr->min = streamer_read_wide_int (ib);
4734 parm_vr->max = streamer_read_wide_int (ib);
4738 count = streamer_read_uhwi (ib);
4739 if (count > 0)
4741 ipcp_transformation_initialize ();
4742 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4743 vec_safe_grow_cleared (ts->bits, count);
4745 for (i = 0; i < count; i++)
4747 struct bitpack_d bp = streamer_read_bitpack (ib);
4748 bool known = bp_unpack_value (&bp, 1);
4749 if (known)
4751 ipa_bits *bits
4752 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4753 streamer_read_widest_int (ib));
4754 (*ts->bits)[i] = bits;
4760 /* Write all aggregate replacement for nodes in set. */
4762 void
4763 ipcp_write_transformation_summaries (void)
4765 struct cgraph_node *node;
4766 struct output_block *ob;
4767 unsigned int count = 0;
4768 lto_symtab_encoder_iterator lsei;
4769 lto_symtab_encoder_t encoder;
4771 ob = create_output_block (LTO_section_ipcp_transform);
4772 encoder = ob->decl_state->symtab_node_encoder;
4773 ob->symbol = NULL;
4774 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4775 lsei_next_function_in_partition (&lsei))
4777 node = lsei_cgraph_node (lsei);
4778 if (node->has_gimple_body_p ())
4779 count++;
4782 streamer_write_uhwi (ob, count);
4784 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4785 lsei_next_function_in_partition (&lsei))
4787 node = lsei_cgraph_node (lsei);
4788 if (node->has_gimple_body_p ())
4789 write_ipcp_transformation_info (ob, node);
4791 streamer_write_char_stream (ob->main_stream, 0);
4792 produce_asm (ob, NULL);
4793 destroy_output_block (ob);
4796 /* Read replacements section in file FILE_DATA of length LEN with data
4797 DATA. */
4799 static void
4800 read_replacements_section (struct lto_file_decl_data *file_data,
4801 const char *data,
4802 size_t len)
4804 const struct lto_function_header *header =
4805 (const struct lto_function_header *) data;
4806 const int cfg_offset = sizeof (struct lto_function_header);
4807 const int main_offset = cfg_offset + header->cfg_size;
4808 const int string_offset = main_offset + header->main_size;
4809 class data_in *data_in;
4810 unsigned int i;
4811 unsigned int count;
4813 lto_input_block ib_main ((const char *) data + main_offset,
4814 header->main_size, file_data->mode_table);
4816 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4817 header->string_size, vNULL);
4818 count = streamer_read_uhwi (&ib_main);
4820 for (i = 0; i < count; i++)
4822 unsigned int index;
4823 struct cgraph_node *node;
4824 lto_symtab_encoder_t encoder;
4826 index = streamer_read_uhwi (&ib_main);
4827 encoder = file_data->symtab_node_encoder;
4828 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4829 index));
4830 gcc_assert (node->definition);
4831 read_ipcp_transformation_info (&ib_main, node, data_in);
4833 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4834 len);
4835 lto_data_in_delete (data_in);
4838 /* Read IPA-CP aggregate replacements. */
4840 void
4841 ipcp_read_transformation_summaries (void)
4843 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4844 struct lto_file_decl_data *file_data;
4845 unsigned int j = 0;
4847 while ((file_data = file_data_vec[j++]))
4849 size_t len;
4850 const char *data = lto_get_section_data (file_data,
4851 LTO_section_ipcp_transform,
4852 NULL, &len);
4853 if (data)
4854 read_replacements_section (file_data, data, len);
4858 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4859 NODE. */
4861 static void
4862 adjust_agg_replacement_values (struct cgraph_node *node,
4863 struct ipa_agg_replacement_value *aggval)
4865 struct ipa_agg_replacement_value *v;
4867 if (!node->clone.param_adjustments)
4868 return;
4870 auto_vec<int, 16> new_indices;
4871 node->clone.param_adjustments->get_updated_indices (&new_indices);
4872 for (v = aggval; v; v = v->next)
4874 gcc_checking_assert (v->index >= 0);
4876 if ((unsigned) v->index < new_indices.length ())
4877 v->index = new_indices[v->index];
4878 else
4879 /* This can happen if we know about a constant passed by reference by
4880 an argument which is never actually used for anything, let alone
4881 loading that constant. */
4882 v->index = -1;
4886 /* Dominator walker driving the ipcp modification phase. */
4888 class ipcp_modif_dom_walker : public dom_walker
4890 public:
4891 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4892 vec<ipa_param_descriptor, va_gc> *descs,
4893 struct ipa_agg_replacement_value *av,
4894 bool *sc, bool *cc)
4895 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4896 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4898 virtual edge before_dom_children (basic_block);
4900 private:
4901 struct ipa_func_body_info *m_fbi;
4902 vec<ipa_param_descriptor, va_gc> *m_descriptors;
4903 struct ipa_agg_replacement_value *m_aggval;
4904 bool *m_something_changed, *m_cfg_changed;
4907 edge
4908 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4910 gimple_stmt_iterator gsi;
4911 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4913 struct ipa_agg_replacement_value *v;
4914 gimple *stmt = gsi_stmt (gsi);
4915 tree rhs, val, t;
4916 HOST_WIDE_INT offset;
4917 poly_int64 size;
4918 int index;
4919 bool by_ref, vce;
4921 if (!gimple_assign_load_p (stmt))
4922 continue;
4923 rhs = gimple_assign_rhs1 (stmt);
4924 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4925 continue;
4927 vce = false;
4928 t = rhs;
4929 while (handled_component_p (t))
4931 /* V_C_E can do things like convert an array of integers to one
4932 bigger integer and similar things we do not handle below. */
4933 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
4935 vce = true;
4936 break;
4938 t = TREE_OPERAND (t, 0);
4940 if (vce)
4941 continue;
4943 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4944 &offset, &size, &by_ref))
4945 continue;
4946 for (v = m_aggval; v; v = v->next)
4947 if (v->index == index
4948 && v->offset == offset)
4949 break;
4950 if (!v
4951 || v->by_ref != by_ref
4952 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
4953 size))
4954 continue;
4956 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4957 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4959 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4960 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4961 else if (TYPE_SIZE (TREE_TYPE (rhs))
4962 == TYPE_SIZE (TREE_TYPE (v->value)))
4963 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4964 else
4966 if (dump_file)
4968 fprintf (dump_file, " const ");
4969 print_generic_expr (dump_file, v->value);
4970 fprintf (dump_file, " can't be converted to type of ");
4971 print_generic_expr (dump_file, rhs);
4972 fprintf (dump_file, "\n");
4974 continue;
4977 else
4978 val = v->value;
4980 if (dump_file && (dump_flags & TDF_DETAILS))
4982 fprintf (dump_file, "Modifying stmt:\n ");
4983 print_gimple_stmt (dump_file, stmt, 0);
4985 gimple_assign_set_rhs_from_tree (&gsi, val);
4986 update_stmt (stmt);
4988 if (dump_file && (dump_flags & TDF_DETAILS))
4990 fprintf (dump_file, "into:\n ");
4991 print_gimple_stmt (dump_file, stmt, 0);
4992 fprintf (dump_file, "\n");
4995 *m_something_changed = true;
4996 if (maybe_clean_eh_stmt (stmt)
4997 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4998 *m_cfg_changed = true;
5000 return NULL;
5003 /* Update bits info of formal parameters as described in
5004 ipcp_transformation. */
5006 static void
5007 ipcp_update_bits (struct cgraph_node *node)
5009 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5011 if (!ts || vec_safe_length (ts->bits) == 0)
5012 return;
5013 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5014 unsigned count = bits.length ();
5015 if (!count)
5016 return;
5018 auto_vec<int, 16> new_indices;
5019 bool need_remapping = false;
5020 if (node->clone.param_adjustments)
5022 node->clone.param_adjustments->get_updated_indices (&new_indices);
5023 need_remapping = true;
5025 auto_vec <tree, 16> parm_decls;
5026 push_function_arg_decls (&parm_decls, node->decl);
5028 for (unsigned i = 0; i < count; ++i)
5030 tree parm;
5031 if (need_remapping)
5033 if (i >= new_indices.length ())
5034 continue;
5035 int idx = new_indices[i];
5036 if (idx < 0)
5037 continue;
5038 parm = parm_decls[idx];
5040 else
5041 parm = parm_decls[i];
5042 gcc_checking_assert (parm);
5045 if (!bits[i]
5046 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5047 || POINTER_TYPE_P (TREE_TYPE (parm)))
5048 || !is_gimple_reg (parm))
5049 continue;
5051 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5052 if (!ddef)
5053 continue;
5055 if (dump_file)
5057 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5058 print_hex (bits[i]->mask, dump_file);
5059 fprintf (dump_file, "\n");
5062 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5064 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5065 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5067 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5068 | wide_int::from (bits[i]->value, prec, sgn);
5069 set_nonzero_bits (ddef, nonzero_bits);
5071 else
5073 unsigned tem = bits[i]->mask.to_uhwi ();
5074 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5075 unsigned align = tem & -tem;
5076 unsigned misalign = bitpos & (align - 1);
5078 if (align > 1)
5080 if (dump_file)
5081 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5083 unsigned old_align, old_misalign;
5084 struct ptr_info_def *pi = get_ptr_info (ddef);
5085 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5087 if (old_known
5088 && old_align > align)
5090 if (dump_file)
5092 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5093 if ((old_misalign & (align - 1)) != misalign)
5094 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5095 old_misalign, misalign);
5097 continue;
5100 if (old_known
5101 && ((misalign & (old_align - 1)) != old_misalign)
5102 && dump_file)
5103 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5104 old_misalign, misalign);
5106 set_ptr_info_alignment (pi, align, misalign);
5112 bool
5113 ipa_vr::nonzero_p (tree expr_type) const
5115 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5116 return true;
5118 unsigned prec = TYPE_PRECISION (expr_type);
5119 return (type == VR_RANGE
5120 && TYPE_UNSIGNED (expr_type)
5121 && wi::eq_p (min, wi::one (prec))
5122 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5125 /* Update value range of formal parameters as described in
5126 ipcp_transformation. */
5128 static void
5129 ipcp_update_vr (struct cgraph_node *node)
5131 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5132 if (!ts || vec_safe_length (ts->m_vr) == 0)
5133 return;
5134 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5135 unsigned count = vr.length ();
5136 if (!count)
5137 return;
5139 auto_vec<int, 16> new_indices;
5140 bool need_remapping = false;
5141 if (node->clone.param_adjustments)
5143 node->clone.param_adjustments->get_updated_indices (&new_indices);
5144 need_remapping = true;
5146 auto_vec <tree, 16> parm_decls;
5147 push_function_arg_decls (&parm_decls, node->decl);
5149 for (unsigned i = 0; i < count; ++i)
5151 tree parm;
5152 int remapped_idx;
5153 if (need_remapping)
5155 if (i >= new_indices.length ())
5156 continue;
5157 remapped_idx = new_indices[i];
5158 if (remapped_idx < 0)
5159 continue;
5161 else
5162 remapped_idx = i;
5164 parm = parm_decls[remapped_idx];
5166 gcc_checking_assert (parm);
5167 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5169 if (!ddef || !is_gimple_reg (parm))
5170 continue;
5172 if (vr[i].known
5173 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5175 tree type = TREE_TYPE (ddef);
5176 unsigned prec = TYPE_PRECISION (type);
5177 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5179 if (dump_file)
5181 fprintf (dump_file, "Setting value range of param %u "
5182 "(now %i) ", i, remapped_idx);
5183 fprintf (dump_file, "%s[",
5184 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5185 print_decs (vr[i].min, dump_file);
5186 fprintf (dump_file, ", ");
5187 print_decs (vr[i].max, dump_file);
5188 fprintf (dump_file, "]\n");
5190 set_range_info (ddef, vr[i].type,
5191 wide_int_storage::from (vr[i].min, prec,
5192 TYPE_SIGN (type)),
5193 wide_int_storage::from (vr[i].max, prec,
5194 TYPE_SIGN (type)));
5196 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5197 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5199 if (dump_file)
5200 fprintf (dump_file, "Setting nonnull for %u\n", i);
5201 set_ptr_nonnull (ddef);
5207 /* IPCP transformation phase doing propagation of aggregate values. */
5209 unsigned int
5210 ipcp_transform_function (struct cgraph_node *node)
5212 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5213 struct ipa_func_body_info fbi;
5214 struct ipa_agg_replacement_value *aggval;
5215 int param_count;
5216 bool cfg_changed = false, something_changed = false;
5218 gcc_checking_assert (cfun);
5219 gcc_checking_assert (current_function_decl);
5221 if (dump_file)
5222 fprintf (dump_file, "Modification phase of node %s\n",
5223 node->dump_name ());
5225 ipcp_update_bits (node);
5226 ipcp_update_vr (node);
5227 aggval = ipa_get_agg_replacements_for_node (node);
5228 if (!aggval)
5229 return 0;
5230 param_count = count_formal_params (node->decl);
5231 if (param_count == 0)
5232 return 0;
5233 adjust_agg_replacement_values (node, aggval);
5234 if (dump_file)
5235 ipa_dump_agg_replacement_values (dump_file, aggval);
5237 fbi.node = node;
5238 fbi.info = NULL;
5239 fbi.bb_infos = vNULL;
5240 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5241 fbi.param_count = param_count;
5242 fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
5244 vec_safe_grow_cleared (descriptors, param_count);
5245 ipa_populate_param_decls (node, *descriptors);
5246 calculate_dominance_info (CDI_DOMINATORS);
5247 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5248 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5250 int i;
5251 struct ipa_bb_info *bi;
5252 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5253 free_ipa_bb_info (bi);
5254 fbi.bb_infos.release ();
5255 free_dominance_info (CDI_DOMINATORS);
5257 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5258 s->agg_values = NULL;
5259 s->bits = NULL;
5260 s->m_vr = NULL;
5262 vec_free (descriptors);
5264 if (!something_changed)
5265 return 0;
5267 if (cfg_changed)
5268 delete_unreachable_blocks_update_callgraph (node, false);
5270 return TODO_update_ssa_only_virtuals;
5273 #include "gt-ipa-prop.h"