Add prefixed insn support for stack_protect_setdi & stack_protect_testdi
[official-gcc.git] / gcc / ipa-prop.c
bloba6c135f242bc56455901a30652ca585894a7dc2a
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55 #include "tree-cfgcleanup.h"
57 /* Function summary where the parameter infos are actually stored. */
58 ipa_node_params_t *ipa_node_params_sum = NULL;
60 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
62 /* Edge summary for IPA-CP edge information. */
63 ipa_edge_args_sum_t *ipa_edge_args_sum;
65 /* Traits for a hash table for reusing already existing ipa_bits. */
67 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
69 typedef ipa_bits *value_type;
70 typedef ipa_bits *compare_type;
71 static hashval_t
72 hash (const ipa_bits *p)
74 hashval_t t = (hashval_t) p->value.to_shwi ();
75 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
77 static bool
78 equal (const ipa_bits *a, const ipa_bits *b)
80 return a->value == b->value && a->mask == b->mask;
82 static void
83 mark_empty (ipa_bits *&p)
85 p = NULL;
87 static bool
88 is_empty (const ipa_bits *p)
90 return p == NULL;
92 static bool
93 is_deleted (const ipa_bits *p)
95 return p == reinterpret_cast<const ipa_bits *> (1);
97 static void
98 mark_deleted (ipa_bits *&p)
100 p = reinterpret_cast<ipa_bits *> (1);
104 /* Hash table for avoid repeated allocations of equal ipa_bits. */
105 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
107 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
108 the equiv bitmap is not hashed and is expected to be NULL. */
110 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
112 typedef value_range *value_type;
113 typedef value_range *compare_type;
114 static hashval_t
115 hash (const value_range *p)
117 inchash::hash hstate (p->kind ());
118 inchash::add_expr (p->min (), hstate);
119 inchash::add_expr (p->max (), hstate);
120 return hstate.end ();
122 static bool
123 equal (const value_range *a, const value_range *b)
125 return a->equal_p (*b);
127 static void
128 mark_empty (value_range *&p)
130 p = NULL;
132 static bool
133 is_empty (const value_range *p)
135 return p == NULL;
137 static bool
138 is_deleted (const value_range *p)
140 return p == reinterpret_cast<const value_range *> (1);
142 static void
143 mark_deleted (value_range *&p)
145 p = reinterpret_cast<value_range *> (1);
149 /* Hash table for avoid repeated allocations of equal value_ranges. */
150 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
152 /* Holders of ipa cgraph hooks: */
153 static struct cgraph_node_hook_list *function_insertion_hook_holder;
155 /* Description of a reference to an IPA constant. */
156 struct ipa_cst_ref_desc
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge *cs;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc *next_duplicate;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
164 int refcount;
167 /* Allocation pool for reference descriptions. */
169 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
170 ("IPA-PROP ref descriptions");
172 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
175 static bool
176 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
178 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
180 if (!fs_opts)
181 return false;
182 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
185 /* Return index of the formal whose tree is PTREE in function which corresponds
186 to INFO. */
188 static int
189 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
190 tree ptree)
192 int i, count;
194 count = vec_safe_length (descriptors);
195 for (i = 0; i < count; i++)
196 if ((*descriptors)[i].decl_or_type == ptree)
197 return i;
199 return -1;
202 /* Return index of the formal whose tree is PTREE in function which corresponds
203 to INFO. */
206 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
208 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
211 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
212 NODE. */
214 static void
215 ipa_populate_param_decls (struct cgraph_node *node,
216 vec<ipa_param_descriptor, va_gc> &descriptors)
218 tree fndecl;
219 tree fnargs;
220 tree parm;
221 int param_num;
223 fndecl = node->decl;
224 gcc_assert (gimple_has_body_p (fndecl));
225 fnargs = DECL_ARGUMENTS (fndecl);
226 param_num = 0;
227 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
229 descriptors[param_num].decl_or_type = parm;
230 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
231 descriptors[param_num].move_cost = cost;
232 /* Watch overflow, move_cost is a bitfield. */
233 gcc_checking_assert (cost == descriptors[param_num].move_cost);
234 param_num++;
238 /* Return how many formal parameters FNDECL has. */
241 count_formal_params (tree fndecl)
243 tree parm;
244 int count = 0;
245 gcc_assert (gimple_has_body_p (fndecl));
247 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
248 count++;
250 return count;
253 /* Return the declaration of Ith formal parameter of the function corresponding
254 to INFO. Note there is no setter function as this array is built just once
255 using ipa_initialize_node_params. */
257 void
258 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
260 fprintf (file, "param #%i", i);
261 if ((*info->descriptors)[i].decl_or_type)
263 fprintf (file, " ");
264 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
268 /* If necessary, allocate vector of parameter descriptors in info of NODE.
269 Return true if they were allocated, false if not. */
271 static bool
272 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
274 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
276 if (!info->descriptors && param_count)
278 vec_safe_grow_cleared (info->descriptors, param_count);
279 return true;
281 else
282 return false;
285 /* Initialize the ipa_node_params structure associated with NODE by counting
286 the function parameters, creating the descriptors and populating their
287 param_decls. */
289 void
290 ipa_initialize_node_params (struct cgraph_node *node)
292 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
294 if (!info->descriptors
295 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
296 ipa_populate_param_decls (node, *info->descriptors);
299 /* Print the jump functions associated with call graph edge CS to file F. */
301 static void
302 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
304 int i, count;
306 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
307 for (i = 0; i < count; i++)
309 struct ipa_jump_func *jump_func;
310 enum jump_func_type type;
312 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
313 type = jump_func->type;
315 fprintf (f, " param %d: ", i);
316 if (type == IPA_JF_UNKNOWN)
317 fprintf (f, "UNKNOWN\n");
318 else if (type == IPA_JF_CONST)
320 tree val = jump_func->value.constant.value;
321 fprintf (f, "CONST: ");
322 print_generic_expr (f, val);
323 if (TREE_CODE (val) == ADDR_EXPR
324 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
326 fprintf (f, " -> ");
327 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
329 fprintf (f, "\n");
331 else if (type == IPA_JF_PASS_THROUGH)
333 fprintf (f, "PASS THROUGH: ");
334 fprintf (f, "%d, op %s",
335 jump_func->value.pass_through.formal_id,
336 get_tree_code_name(jump_func->value.pass_through.operation));
337 if (jump_func->value.pass_through.operation != NOP_EXPR)
339 fprintf (f, " ");
340 print_generic_expr (f, jump_func->value.pass_through.operand);
342 if (jump_func->value.pass_through.agg_preserved)
343 fprintf (f, ", agg_preserved");
344 fprintf (f, "\n");
346 else if (type == IPA_JF_ANCESTOR)
348 fprintf (f, "ANCESTOR: ");
349 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
350 jump_func->value.ancestor.formal_id,
351 jump_func->value.ancestor.offset);
352 if (jump_func->value.ancestor.agg_preserved)
353 fprintf (f, ", agg_preserved");
354 fprintf (f, "\n");
357 if (jump_func->agg.items)
359 struct ipa_agg_jf_item *item;
360 int j;
362 fprintf (f, " Aggregate passed by %s:\n",
363 jump_func->agg.by_ref ? "reference" : "value");
364 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
366 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
367 item->offset);
368 if (TYPE_P (item->value))
369 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
370 tree_to_uhwi (TYPE_SIZE (item->value)));
371 else
373 fprintf (f, "cst: ");
374 print_generic_expr (f, item->value);
376 fprintf (f, "\n");
380 class ipa_polymorphic_call_context *ctx
381 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
382 if (ctx && !ctx->useless_p ())
384 fprintf (f, " Context: ");
385 ctx->dump (dump_file);
388 if (jump_func->bits)
390 fprintf (f, " value: ");
391 print_hex (jump_func->bits->value, f);
392 fprintf (f, ", mask: ");
393 print_hex (jump_func->bits->mask, f);
394 fprintf (f, "\n");
396 else
397 fprintf (f, " Unknown bits\n");
399 if (jump_func->m_vr)
401 fprintf (f, " VR ");
402 fprintf (f, "%s[",
403 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
404 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
405 fprintf (f, ", ");
406 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
407 fprintf (f, "]\n");
409 else
410 fprintf (f, " Unknown VR\n");
415 /* Print the jump functions of all arguments on all call graph edges going from
416 NODE to file F. */
418 void
419 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
421 struct cgraph_edge *cs;
423 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
424 for (cs = node->callees; cs; cs = cs->next_callee)
426 if (!ipa_edge_args_info_available_for_edge_p (cs))
427 continue;
429 fprintf (f, " callsite %s -> %s : \n",
430 node->dump_name (),
431 cs->callee->dump_name ());
432 ipa_print_node_jump_functions_for_edge (f, cs);
435 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
437 class cgraph_indirect_call_info *ii;
438 if (!ipa_edge_args_info_available_for_edge_p (cs))
439 continue;
441 ii = cs->indirect_info;
442 if (ii->agg_contents)
443 fprintf (f, " indirect %s callsite, calling param %i, "
444 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
445 ii->member_ptr ? "member ptr" : "aggregate",
446 ii->param_index, ii->offset,
447 ii->by_ref ? "by reference" : "by_value");
448 else
449 fprintf (f, " indirect %s callsite, calling param %i, "
450 "offset " HOST_WIDE_INT_PRINT_DEC,
451 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
452 ii->offset);
454 if (cs->call_stmt)
456 fprintf (f, ", for stmt ");
457 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
459 else
460 fprintf (f, "\n");
461 if (ii->polymorphic)
462 ii->context.dump (f);
463 ipa_print_node_jump_functions_for_edge (f, cs);
467 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
469 void
470 ipa_print_all_jump_functions (FILE *f)
472 struct cgraph_node *node;
474 fprintf (f, "\nJump functions:\n");
475 FOR_EACH_FUNCTION (node)
477 ipa_print_node_jump_functions (f, node);
481 /* Set jfunc to be a know-really nothing jump function. */
483 static void
484 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
486 jfunc->type = IPA_JF_UNKNOWN;
487 jfunc->bits = NULL;
488 jfunc->m_vr = NULL;
491 /* Set JFUNC to be a copy of another jmp (to be used by jump function
492 combination code). The two functions will share their rdesc. */
494 static void
495 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
496 struct ipa_jump_func *src)
499 gcc_checking_assert (src->type == IPA_JF_CONST);
500 dst->type = IPA_JF_CONST;
501 dst->value.constant = src->value.constant;
504 /* Set JFUNC to be a constant jmp function. */
506 static void
507 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
508 struct cgraph_edge *cs)
510 jfunc->type = IPA_JF_CONST;
511 jfunc->value.constant.value = unshare_expr_without_location (constant);
513 if (TREE_CODE (constant) == ADDR_EXPR
514 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
516 struct ipa_cst_ref_desc *rdesc;
518 rdesc = ipa_refdesc_pool.allocate ();
519 rdesc->cs = cs;
520 rdesc->next_duplicate = NULL;
521 rdesc->refcount = 1;
522 jfunc->value.constant.rdesc = rdesc;
524 else
525 jfunc->value.constant.rdesc = NULL;
528 /* Set JFUNC to be a simple pass-through jump function. */
529 static void
530 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
531 bool agg_preserved)
533 jfunc->type = IPA_JF_PASS_THROUGH;
534 jfunc->value.pass_through.operand = NULL_TREE;
535 jfunc->value.pass_through.formal_id = formal_id;
536 jfunc->value.pass_through.operation = NOP_EXPR;
537 jfunc->value.pass_through.agg_preserved = agg_preserved;
540 /* Set JFUNC to be an unary pass through jump function. */
542 static void
543 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
544 enum tree_code operation)
546 jfunc->type = IPA_JF_PASS_THROUGH;
547 jfunc->value.pass_through.operand = NULL_TREE;
548 jfunc->value.pass_through.formal_id = formal_id;
549 jfunc->value.pass_through.operation = operation;
550 jfunc->value.pass_through.agg_preserved = false;
552 /* Set JFUNC to be an arithmetic pass through jump function. */
554 static void
555 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
556 tree operand, enum tree_code operation)
558 jfunc->type = IPA_JF_PASS_THROUGH;
559 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
560 jfunc->value.pass_through.formal_id = formal_id;
561 jfunc->value.pass_through.operation = operation;
562 jfunc->value.pass_through.agg_preserved = false;
565 /* Set JFUNC to be an ancestor jump function. */
567 static void
568 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
569 int formal_id, bool agg_preserved)
571 jfunc->type = IPA_JF_ANCESTOR;
572 jfunc->value.ancestor.formal_id = formal_id;
573 jfunc->value.ancestor.offset = offset;
574 jfunc->value.ancestor.agg_preserved = agg_preserved;
577 /* Get IPA BB information about the given BB. FBI is the context of analyzis
578 of this function body. */
580 static struct ipa_bb_info *
581 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
583 gcc_checking_assert (fbi);
584 return &fbi->bb_infos[bb->index];
587 /* Structure to be passed in between detect_type_change and
588 check_stmt_for_type_change. */
590 struct prop_type_change_info
592 /* Offset into the object where there is the virtual method pointer we are
593 looking for. */
594 HOST_WIDE_INT offset;
595 /* The declaration or SSA_NAME pointer of the base that we are checking for
596 type change. */
597 tree object;
598 /* Set to true if dynamic type change has been detected. */
599 bool type_maybe_changed;
602 /* Return true if STMT can modify a virtual method table pointer.
604 This function makes special assumptions about both constructors and
605 destructors which are all the functions that are allowed to alter the VMT
606 pointers. It assumes that destructors begin with assignment into all VMT
607 pointers and that constructors essentially look in the following way:
609 1) The very first thing they do is that they call constructors of ancestor
610 sub-objects that have them.
612 2) Then VMT pointers of this and all its ancestors is set to new values
613 corresponding to the type corresponding to the constructor.
615 3) Only afterwards, other stuff such as constructor of member sub-objects
616 and the code written by the user is run. Only this may include calling
617 virtual functions, directly or indirectly.
619 There is no way to call a constructor of an ancestor sub-object in any
620 other way.
622 This means that we do not have to care whether constructors get the correct
623 type information because they will always change it (in fact, if we define
624 the type to be given by the VMT pointer, it is undefined).
626 The most important fact to derive from the above is that if, for some
627 statement in the section 3, we try to detect whether the dynamic type has
628 changed, we can safely ignore all calls as we examine the function body
629 backwards until we reach statements in section 2 because these calls cannot
630 be ancestor constructors or destructors (if the input is not bogus) and so
631 do not change the dynamic type (this holds true only for automatically
632 allocated objects but at the moment we devirtualize only these). We then
633 must detect that statements in section 2 change the dynamic type and can try
634 to derive the new type. That is enough and we can stop, we will never see
635 the calls into constructors of sub-objects in this code. Therefore we can
636 safely ignore all call statements that we traverse.
639 static bool
640 stmt_may_be_vtbl_ptr_store (gimple *stmt)
642 if (is_gimple_call (stmt))
643 return false;
644 if (gimple_clobber_p (stmt))
645 return false;
646 else if (is_gimple_assign (stmt))
648 tree lhs = gimple_assign_lhs (stmt);
650 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
652 if (flag_strict_aliasing
653 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
654 return false;
656 if (TREE_CODE (lhs) == COMPONENT_REF
657 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
658 return false;
659 /* In the future we might want to use get_ref_base_and_extent to find
660 if there is a field corresponding to the offset and if so, proceed
661 almost like if it was a component ref. */
664 return true;
667 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
668 to check whether a particular statement may modify the virtual table
669 pointerIt stores its result into DATA, which points to a
670 prop_type_change_info structure. */
672 static bool
673 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
675 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
676 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
678 if (stmt_may_be_vtbl_ptr_store (stmt))
680 tci->type_maybe_changed = true;
681 return true;
683 else
684 return false;
687 /* See if ARG is PARAM_DECl describing instance passed by pointer
688 or reference in FUNCTION. Return false if the dynamic type may change
689 in between beggining of the function until CALL is invoked.
691 Generally functions are not allowed to change type of such instances,
692 but they call destructors. We assume that methods cannot destroy the THIS
693 pointer. Also as a special cases, constructor and destructors may change
694 type of the THIS pointer. */
696 static bool
697 param_type_may_change_p (tree function, tree arg, gimple *call)
699 /* Pure functions cannot do any changes on the dynamic type;
700 that require writting to memory. */
701 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
702 return false;
703 /* We need to check if we are within inlined consturctor
704 or destructor (ideally we would have way to check that the
705 inline cdtor is actually working on ARG, but we don't have
706 easy tie on this, so punt on all non-pure cdtors.
707 We may also record the types of cdtors and once we know type
708 of the instance match them.
710 Also code unification optimizations may merge calls from
711 different blocks making return values unreliable. So
712 do nothing during late optimization. */
713 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
714 return true;
715 if (TREE_CODE (arg) == SSA_NAME
716 && SSA_NAME_IS_DEFAULT_DEF (arg)
717 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
719 /* Normal (non-THIS) argument. */
720 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
721 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
722 /* THIS pointer of an method - here we want to watch constructors
723 and destructors as those definitely may change the dynamic
724 type. */
725 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
726 && !DECL_CXX_CONSTRUCTOR_P (function)
727 && !DECL_CXX_DESTRUCTOR_P (function)
728 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
730 /* Walk the inline stack and watch out for ctors/dtors. */
731 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
732 block = BLOCK_SUPERCONTEXT (block))
733 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
734 return true;
735 return false;
738 return true;
741 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
742 callsite CALL) by looking for assignments to its virtual table pointer. If
743 it is, return true and fill in the jump function JFUNC with relevant type
744 information or set it to unknown. ARG is the object itself (not a pointer
745 to it, unless dereferenced). BASE is the base of the memory access as
746 returned by get_ref_base_and_extent, as is the offset.
748 This is helper function for detect_type_change and detect_type_change_ssa
749 that does the heavy work which is usually unnecesary. */
751 static bool
752 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
753 tree base, tree comp_type, gcall *call,
754 struct ipa_jump_func *jfunc,
755 HOST_WIDE_INT offset)
757 struct prop_type_change_info tci;
758 ao_ref ao;
760 gcc_checking_assert (DECL_P (arg)
761 || TREE_CODE (arg) == MEM_REF
762 || handled_component_p (arg));
764 comp_type = TYPE_MAIN_VARIANT (comp_type);
766 /* Const calls cannot call virtual methods through VMT and so type changes do
767 not matter. */
768 if (!flag_devirtualize || !gimple_vuse (call)
769 /* Be sure expected_type is polymorphic. */
770 || !comp_type
771 || TREE_CODE (comp_type) != RECORD_TYPE
772 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
773 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
774 return true;
776 ao_ref_init (&ao, arg);
777 ao.base = base;
778 ao.offset = offset;
779 ao.size = POINTER_SIZE;
780 ao.max_size = ao.size;
782 tci.offset = offset;
783 tci.object = get_base_address (arg);
784 tci.type_maybe_changed = false;
786 int walked
787 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
788 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
790 if (walked >= 0 && !tci.type_maybe_changed)
791 return false;
793 ipa_set_jf_unknown (jfunc);
794 return true;
797 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
798 If it is, return true and fill in the jump function JFUNC with relevant type
799 information or set it to unknown. ARG is the object itself (not a pointer
800 to it, unless dereferenced). BASE is the base of the memory access as
801 returned by get_ref_base_and_extent, as is the offset. */
803 static bool
804 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
805 tree comp_type, gcall *call, struct ipa_jump_func *jfunc,
806 HOST_WIDE_INT offset)
808 if (!flag_devirtualize)
809 return false;
811 if (TREE_CODE (base) == MEM_REF
812 && !param_type_may_change_p (current_function_decl,
813 TREE_OPERAND (base, 0),
814 call))
815 return false;
816 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
817 call, jfunc, offset);
820 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
821 SSA name (its dereference will become the base and the offset is assumed to
822 be zero). */
824 static bool
825 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
826 gcall *call, struct ipa_jump_func *jfunc)
828 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
829 if (!flag_devirtualize
830 || !POINTER_TYPE_P (TREE_TYPE (arg)))
831 return false;
833 if (!param_type_may_change_p (current_function_decl, arg, call))
834 return false;
836 arg = build2 (MEM_REF, ptr_type_node, arg,
837 build_int_cst (ptr_type_node, 0));
839 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
840 call, jfunc, 0);
843 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
844 boolean variable pointed to by DATA. */
846 static bool
847 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
848 void *data)
850 bool *b = (bool *) data;
851 *b = true;
852 return true;
855 /* Find the nearest valid aa status for parameter specified by INDEX that
856 dominates BB. */
858 static struct ipa_param_aa_status *
859 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
860 int index)
862 while (true)
864 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
865 if (!bb)
866 return NULL;
867 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
868 if (!bi->param_aa_statuses.is_empty ()
869 && bi->param_aa_statuses[index].valid)
870 return &bi->param_aa_statuses[index];
874 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
875 structures and/or intialize the result with a dominating description as
876 necessary. */
878 static struct ipa_param_aa_status *
879 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
880 int index)
882 gcc_checking_assert (fbi);
883 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
884 if (bi->param_aa_statuses.is_empty ())
885 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
886 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
887 if (!paa->valid)
889 gcc_checking_assert (!paa->parm_modified
890 && !paa->ref_modified
891 && !paa->pt_modified);
892 struct ipa_param_aa_status *dom_paa;
893 dom_paa = find_dominating_aa_status (fbi, bb, index);
894 if (dom_paa)
895 *paa = *dom_paa;
896 else
897 paa->valid = true;
900 return paa;
903 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
904 a value known not to be modified in this function before reaching the
905 statement STMT. FBI holds information about the function we have so far
906 gathered but do not survive the summary building stage. */
908 static bool
909 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
910 gimple *stmt, tree parm_load)
912 struct ipa_param_aa_status *paa;
913 bool modified = false;
914 ao_ref refd;
916 tree base = get_base_address (parm_load);
917 gcc_assert (TREE_CODE (base) == PARM_DECL);
918 if (TREE_READONLY (base))
919 return true;
921 gcc_checking_assert (fbi);
922 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
923 if (paa->parm_modified)
924 return false;
926 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
927 ao_ref_init (&refd, parm_load);
928 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
929 &modified, NULL, NULL,
930 fbi->aa_walk_budget + 1);
931 if (walked < 0)
933 modified = true;
934 if (fbi)
935 fbi->aa_walk_budget = 0;
937 else if (fbi)
938 fbi->aa_walk_budget -= walked;
939 if (paa && modified)
940 paa->parm_modified = true;
941 return !modified;
944 /* If STMT is an assignment that loads a value from an parameter declaration,
945 return the index of the parameter in ipa_node_params which has not been
946 modified. Otherwise return -1. */
948 static int
949 load_from_unmodified_param (struct ipa_func_body_info *fbi,
950 vec<ipa_param_descriptor, va_gc> *descriptors,
951 gimple *stmt)
953 int index;
954 tree op1;
956 if (!gimple_assign_single_p (stmt))
957 return -1;
959 op1 = gimple_assign_rhs1 (stmt);
960 if (TREE_CODE (op1) != PARM_DECL)
961 return -1;
963 index = ipa_get_param_decl_index_1 (descriptors, op1);
964 if (index < 0
965 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
966 return -1;
968 return index;
971 /* Return true if memory reference REF (which must be a load through parameter
972 with INDEX) loads data that are known to be unmodified in this function
973 before reaching statement STMT. */
975 static bool
976 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
977 int index, gimple *stmt, tree ref)
979 struct ipa_param_aa_status *paa;
980 bool modified = false;
981 ao_ref refd;
983 gcc_checking_assert (fbi);
984 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
985 if (paa->ref_modified)
986 return false;
988 gcc_checking_assert (gimple_vuse (stmt));
989 ao_ref_init (&refd, ref);
990 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
991 &modified, NULL, NULL,
992 fbi->aa_walk_budget + 1);
993 if (walked < 0)
995 modified = true;
996 fbi->aa_walk_budget = 0;
998 else
999 fbi->aa_walk_budget -= walked;
1000 if (modified)
1001 paa->ref_modified = true;
1002 return !modified;
1005 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1006 is known to be unmodified in this function before reaching call statement
1007 CALL into which it is passed. FBI describes the function body. */
1009 static bool
1010 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1011 gimple *call, tree parm)
1013 bool modified = false;
1014 ao_ref refd;
1016 /* It's unnecessary to calculate anything about memory contnets for a const
1017 function because it is not goin to use it. But do not cache the result
1018 either. Also, no such calculations for non-pointers. */
1019 if (!gimple_vuse (call)
1020 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1021 return false;
1023 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1024 gimple_bb (call),
1025 index);
1026 if (paa->pt_modified)
1027 return false;
1029 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1030 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1031 &modified, NULL, NULL,
1032 fbi->aa_walk_budget + 1);
1033 if (walked < 0)
1035 fbi->aa_walk_budget = 0;
1036 modified = true;
1038 else
1039 fbi->aa_walk_budget -= walked;
1040 if (modified)
1041 paa->pt_modified = true;
1042 return !modified;
1045 /* Return true if we can prove that OP is a memory reference loading
1046 data from an aggregate passed as a parameter.
1048 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1049 false if it cannot prove that the value has not been modified before the
1050 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1051 if it cannot prove the value has not been modified, in that case it will
1052 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1054 INFO and PARMS_AINFO describe parameters of the current function (but the
1055 latter can be NULL), STMT is the load statement. If function returns true,
1056 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1057 within the aggregate and whether it is a load from a value passed by
1058 reference respectively. */
1060 bool
1061 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1062 vec<ipa_param_descriptor, va_gc> *descriptors,
1063 gimple *stmt, tree op, int *index_p,
1064 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1065 bool *by_ref_p, bool *guaranteed_unmodified)
1067 int index;
1068 HOST_WIDE_INT size;
1069 bool reverse;
1070 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1072 if (!base)
1073 return false;
1075 if (DECL_P (base))
1077 int index = ipa_get_param_decl_index_1 (descriptors, base);
1078 if (index >= 0
1079 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1081 *index_p = index;
1082 *by_ref_p = false;
1083 if (size_p)
1084 *size_p = size;
1085 if (guaranteed_unmodified)
1086 *guaranteed_unmodified = true;
1087 return true;
1089 return false;
1092 if (TREE_CODE (base) != MEM_REF
1093 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1094 || !integer_zerop (TREE_OPERAND (base, 1)))
1095 return false;
1097 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1099 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1100 index = ipa_get_param_decl_index_1 (descriptors, parm);
1102 else
1104 /* This branch catches situations where a pointer parameter is not a
1105 gimple register, for example:
1107 void hip7(S*) (struct S * p)
1109 void (*<T2e4>) (struct S *) D.1867;
1110 struct S * p.1;
1112 <bb 2>:
1113 p.1_1 = p;
1114 D.1867_2 = p.1_1->f;
1115 D.1867_2 ();
1116 gdp = &p;
1119 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1120 index = load_from_unmodified_param (fbi, descriptors, def);
1123 if (index >= 0)
1125 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1126 if (!data_preserved && !guaranteed_unmodified)
1127 return false;
1129 *index_p = index;
1130 *by_ref_p = true;
1131 if (size_p)
1132 *size_p = size;
1133 if (guaranteed_unmodified)
1134 *guaranteed_unmodified = data_preserved;
1135 return true;
1137 return false;
1140 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1141 of an assignment statement STMT, try to determine whether we are actually
1142 handling any of the following cases and construct an appropriate jump
1143 function into JFUNC if so:
1145 1) The passed value is loaded from a formal parameter which is not a gimple
1146 register (most probably because it is addressable, the value has to be
1147 scalar) and we can guarantee the value has not changed. This case can
1148 therefore be described by a simple pass-through jump function. For example:
1150 foo (int a)
1152 int a.0;
1154 a.0_2 = a;
1155 bar (a.0_2);
1157 2) The passed value can be described by a simple arithmetic pass-through
1158 jump function. E.g.
1160 foo (int a)
1162 int D.2064;
1164 D.2064_4 = a.1(D) + 4;
1165 bar (D.2064_4);
1167 This case can also occur in combination of the previous one, e.g.:
1169 foo (int a, int z)
1171 int a.0;
1172 int D.2064;
1174 a.0_3 = a;
1175 D.2064_4 = a.0_3 + 4;
1176 foo (D.2064_4);
1178 3) The passed value is an address of an object within another one (which
1179 also passed by reference). Such situations are described by an ancestor
1180 jump function and describe situations such as:
1182 B::foo() (struct B * const this)
1184 struct A * D.1845;
1186 D.1845_2 = &this_1(D)->D.1748;
1187 A::bar (D.1845_2);
1189 INFO is the structure describing individual parameters access different
1190 stages of IPA optimizations. PARMS_AINFO contains the information that is
1191 only needed for intraprocedural analysis. */
1193 static void
1194 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1195 class ipa_node_params *info,
1196 struct ipa_jump_func *jfunc,
1197 gcall *call, gimple *stmt, tree name,
1198 tree param_type)
1200 HOST_WIDE_INT offset, size;
1201 tree op1, tc_ssa, base, ssa;
1202 bool reverse;
1203 int index;
1205 op1 = gimple_assign_rhs1 (stmt);
1207 if (TREE_CODE (op1) == SSA_NAME)
1209 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1210 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1211 else
1212 index = load_from_unmodified_param (fbi, info->descriptors,
1213 SSA_NAME_DEF_STMT (op1));
1214 tc_ssa = op1;
1216 else
1218 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1219 tc_ssa = gimple_assign_lhs (stmt);
1222 if (index >= 0)
1224 switch (gimple_assign_rhs_class (stmt))
1226 case GIMPLE_BINARY_RHS:
1228 tree op2 = gimple_assign_rhs2 (stmt);
1229 if (!is_gimple_ip_invariant (op2)
1230 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1231 != tcc_comparison)
1232 && !useless_type_conversion_p (TREE_TYPE (name),
1233 TREE_TYPE (op1))))
1234 return;
1236 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1237 gimple_assign_rhs_code (stmt));
1238 break;
1240 case GIMPLE_SINGLE_RHS:
1242 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1243 tc_ssa);
1244 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1245 break;
1247 case GIMPLE_UNARY_RHS:
1248 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1249 ipa_set_jf_unary_pass_through (jfunc, index,
1250 gimple_assign_rhs_code (stmt));
1251 default:;
1253 return;
1256 if (TREE_CODE (op1) != ADDR_EXPR)
1257 return;
1258 op1 = TREE_OPERAND (op1, 0);
1259 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1260 return;
1261 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1262 offset_int mem_offset;
1263 if (!base
1264 || TREE_CODE (base) != MEM_REF
1265 || !mem_ref_offset (base).is_constant (&mem_offset))
1266 return;
1267 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1268 ssa = TREE_OPERAND (base, 0);
1269 if (TREE_CODE (ssa) != SSA_NAME
1270 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1271 || offset < 0)
1272 return;
1274 /* Dynamic types are changed in constructors and destructors. */
1275 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1276 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1277 ipa_set_ancestor_jf (jfunc, offset, index,
1278 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1281 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1282 it looks like:
1284 iftmp.1_3 = &obj_2(D)->D.1762;
1286 The base of the MEM_REF must be a default definition SSA NAME of a
1287 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1288 whole MEM_REF expression is returned and the offset calculated from any
1289 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1290 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1292 static tree
1293 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1295 HOST_WIDE_INT size;
1296 tree expr, parm, obj;
1297 bool reverse;
1299 if (!gimple_assign_single_p (assign))
1300 return NULL_TREE;
1301 expr = gimple_assign_rhs1 (assign);
1303 if (TREE_CODE (expr) != ADDR_EXPR)
1304 return NULL_TREE;
1305 expr = TREE_OPERAND (expr, 0);
1306 obj = expr;
1307 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1309 offset_int mem_offset;
1310 if (!expr
1311 || TREE_CODE (expr) != MEM_REF
1312 || !mem_ref_offset (expr).is_constant (&mem_offset))
1313 return NULL_TREE;
1314 parm = TREE_OPERAND (expr, 0);
1315 if (TREE_CODE (parm) != SSA_NAME
1316 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1317 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1318 return NULL_TREE;
1320 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1321 *obj_p = obj;
1322 return expr;
1326 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1327 statement PHI, try to find out whether NAME is in fact a
1328 multiple-inheritance typecast from a descendant into an ancestor of a formal
1329 parameter and thus can be described by an ancestor jump function and if so,
1330 write the appropriate function into JFUNC.
1332 Essentially we want to match the following pattern:
1334 if (obj_2(D) != 0B)
1335 goto <bb 3>;
1336 else
1337 goto <bb 4>;
1339 <bb 3>:
1340 iftmp.1_3 = &obj_2(D)->D.1762;
1342 <bb 4>:
1343 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1344 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1345 return D.1879_6; */
1347 static void
1348 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1349 class ipa_node_params *info,
1350 struct ipa_jump_func *jfunc,
1351 gcall *call, gphi *phi)
1353 HOST_WIDE_INT offset;
1354 gimple *assign, *cond;
1355 basic_block phi_bb, assign_bb, cond_bb;
1356 tree tmp, parm, expr, obj;
1357 int index, i;
1359 if (gimple_phi_num_args (phi) != 2)
1360 return;
1362 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1363 tmp = PHI_ARG_DEF (phi, 0);
1364 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1365 tmp = PHI_ARG_DEF (phi, 1);
1366 else
1367 return;
1368 if (TREE_CODE (tmp) != SSA_NAME
1369 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1370 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1371 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1372 return;
1374 assign = SSA_NAME_DEF_STMT (tmp);
1375 assign_bb = gimple_bb (assign);
1376 if (!single_pred_p (assign_bb))
1377 return;
1378 expr = get_ancestor_addr_info (assign, &obj, &offset);
1379 if (!expr)
1380 return;
1381 parm = TREE_OPERAND (expr, 0);
1382 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1383 if (index < 0)
1384 return;
1386 cond_bb = single_pred (assign_bb);
1387 cond = last_stmt (cond_bb);
1388 if (!cond
1389 || gimple_code (cond) != GIMPLE_COND
1390 || gimple_cond_code (cond) != NE_EXPR
1391 || gimple_cond_lhs (cond) != parm
1392 || !integer_zerop (gimple_cond_rhs (cond)))
1393 return;
1395 phi_bb = gimple_bb (phi);
1396 for (i = 0; i < 2; i++)
1398 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1399 if (pred != assign_bb && pred != cond_bb)
1400 return;
1403 ipa_set_ancestor_jf (jfunc, offset, index,
1404 parm_ref_data_pass_through_p (fbi, index, call, parm));
1407 /* Inspect the given TYPE and return true iff it has the same structure (the
1408 same number of fields of the same types) as a C++ member pointer. If
1409 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1410 corresponding fields there. */
1412 static bool
1413 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1415 tree fld;
1417 if (TREE_CODE (type) != RECORD_TYPE)
1418 return false;
1420 fld = TYPE_FIELDS (type);
1421 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1422 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1423 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1424 return false;
1426 if (method_ptr)
1427 *method_ptr = fld;
1429 fld = DECL_CHAIN (fld);
1430 if (!fld || INTEGRAL_TYPE_P (fld)
1431 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1432 return false;
1433 if (delta)
1434 *delta = fld;
1436 if (DECL_CHAIN (fld))
1437 return false;
1439 return true;
1442 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1443 return the rhs of its defining statement. Otherwise return RHS as it
1444 is. */
1446 static inline tree
1447 get_ssa_def_if_simple_copy (tree rhs)
1449 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1451 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1453 if (gimple_assign_single_p (def_stmt))
1454 rhs = gimple_assign_rhs1 (def_stmt);
1455 else
1456 break;
1458 return rhs;
1461 /* Simple linked list, describing known contents of an aggregate before
1462 call. */
1464 struct ipa_known_agg_contents_list
1466 /* Offset and size of the described part of the aggregate. */
1467 HOST_WIDE_INT offset, size;
1468 /* Known constant value or NULL if the contents is known to be unknown. */
1469 tree constant;
1470 /* Pointer to the next structure in the list. */
1471 struct ipa_known_agg_contents_list *next;
1474 /* Add a known content item into a linked list of ipa_known_agg_contents_list
1475 structure, in which all elements are sorted ascendingly by offset. */
1477 static inline void
1478 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1479 struct ipa_known_agg_contents_list *item)
1481 struct ipa_known_agg_contents_list *list = *plist;
1483 for (; list; list = list->next)
1485 if (list->offset >= item->offset)
1486 break;
1488 plist = &list->next;
1491 item->next = list;
1492 *plist = item;
1495 /* Check whether a given known content is clobbered by certain element in
1496 a linked list of ipa_known_agg_contents_list. */
1498 static inline bool
1499 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1500 struct ipa_known_agg_contents_list *item)
1502 for (; list; list = list->next)
1504 if (list->offset >= item->offset)
1505 return list->offset < item->offset + item->size;
1507 if (list->offset + list->size > item->offset)
1508 return true;
1511 return false;
1514 /* Build aggregate jump function from LIST, assuming there are exactly
1515 CONST_COUNT constant entries there and that offset of the passed argument
1516 is ARG_OFFSET and store it into JFUNC. */
1518 static void
1519 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1520 int const_count, HOST_WIDE_INT arg_offset,
1521 struct ipa_jump_func *jfunc)
1523 vec_alloc (jfunc->agg.items, const_count);
1524 while (list)
1526 if (list->constant)
1528 struct ipa_agg_jf_item item;
1529 item.offset = list->offset - arg_offset;
1530 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1531 item.value = unshare_expr_without_location (list->constant);
1532 jfunc->agg.items->quick_push (item);
1534 list = list->next;
1538 /* If STMT is a memory store to the object whose address is BASE, extract
1539 information (offset, size, and value) into CONTENT, and return true,
1540 otherwise we conservatively assume the whole object is modified with
1541 unknown content, and return false. CHECK_REF means that access to object
1542 is expected to be in form of MEM_REF expression. */
1544 static bool
1545 extract_mem_content (gimple *stmt, tree base, bool check_ref,
1546 struct ipa_known_agg_contents_list *content)
1548 HOST_WIDE_INT lhs_offset, lhs_size;
1549 tree lhs, rhs, lhs_base;
1550 bool reverse;
1552 if (!gimple_assign_single_p (stmt))
1553 return false;
1555 lhs = gimple_assign_lhs (stmt);
1556 rhs = gimple_assign_rhs1 (stmt);
1558 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1559 || TREE_CODE (lhs) == BIT_FIELD_REF
1560 || contains_bitfld_component_ref_p (lhs))
1561 return false;
1563 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1564 &lhs_size, &reverse);
1565 if (!lhs_base)
1566 return false;
1568 if (check_ref)
1570 if (TREE_CODE (lhs_base) != MEM_REF
1571 || TREE_OPERAND (lhs_base, 0) != base
1572 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1573 return false;
1575 else if (lhs_base != base)
1576 return false;
1578 rhs = get_ssa_def_if_simple_copy (rhs);
1580 content->size = lhs_size;
1581 content->offset = lhs_offset;
1582 content->constant = is_gimple_ip_invariant (rhs) ? rhs : NULL_TREE;
1583 content->next = NULL;
1585 return true;
1588 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1589 in ARG is filled in with constant values. ARG can either be an aggregate
1590 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1591 aggregate. JFUNC is the jump function into which the constants are
1592 subsequently stored. AA_WALK_BUDGET_P points to limit on number of
1593 statements we allow get_continuation_for_phi to examine. */
1595 static void
1596 determine_known_aggregate_parts (gcall *call, tree arg,
1597 tree arg_type,
1598 struct ipa_jump_func *jfunc,
1599 unsigned *aa_walk_budget_p)
1601 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1602 bitmap visited = NULL;
1603 int item_count = 0, const_count = 0;
1604 int ipa_max_agg_items = PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS);
1605 HOST_WIDE_INT arg_offset, arg_size;
1606 tree arg_base;
1607 bool check_ref, by_ref;
1608 ao_ref r;
1610 if (ipa_max_agg_items == 0)
1611 return;
1613 /* The function operates in three stages. First, we prepare check_ref, r,
1614 arg_base and arg_offset based on what is actually passed as an actual
1615 argument. */
1617 if (POINTER_TYPE_P (arg_type))
1619 by_ref = true;
1620 if (TREE_CODE (arg) == SSA_NAME)
1622 tree type_size;
1623 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1624 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1625 return;
1626 check_ref = true;
1627 arg_base = arg;
1628 arg_offset = 0;
1629 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1630 arg_size = tree_to_uhwi (type_size);
1631 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1633 else if (TREE_CODE (arg) == ADDR_EXPR)
1635 bool reverse;
1637 arg = TREE_OPERAND (arg, 0);
1638 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1639 &arg_size, &reverse);
1640 if (!arg_base)
1641 return;
1642 if (DECL_P (arg_base))
1644 check_ref = false;
1645 ao_ref_init (&r, arg_base);
1647 else
1648 return;
1650 else
1651 return;
1653 else
1655 bool reverse;
1657 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1659 by_ref = false;
1660 check_ref = false;
1661 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1662 &arg_size, &reverse);
1663 if (!arg_base)
1664 return;
1666 ao_ref_init (&r, arg);
1669 /* Second stage traverses virtual SSA web backwards starting from the call
1670 statement, only looks at individual dominating virtual operand (its
1671 definition dominates the call), as long as it is confident that content
1672 of the aggregate is affected by definition of the virtual operand, it
1673 builds a sorted linked list of ipa_agg_jf_list describing that. */
1675 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1677 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
1679 if (gimple_code (stmt) == GIMPLE_PHI)
1681 dom_vuse = get_continuation_for_phi (stmt, &r, true,
1682 *aa_walk_budget_p,
1683 &visited, false, NULL, NULL);
1684 continue;
1687 if (stmt_may_clobber_ref_p_1 (stmt, &r))
1689 struct ipa_known_agg_contents_list *content
1690 = XALLOCA (struct ipa_known_agg_contents_list);
1692 if (!extract_mem_content (stmt, arg_base, check_ref, content))
1693 break;
1695 /* Now we get a dominating virtual operand, and need to check
1696 whether its value is clobbered any other dominating one. */
1697 if (content->constant
1698 && !clobber_by_agg_contents_list_p (all_list, content))
1700 struct ipa_known_agg_contents_list *copy
1701 = XALLOCA (struct ipa_known_agg_contents_list);
1703 /* Add to the list consisting of only dominating virtual
1704 operands, whose definitions can finally reach the call. */
1705 add_to_agg_contents_list (&list, (*copy = *content, copy));
1707 if (++const_count == ipa_max_agg_items)
1708 break;
1711 /* Add to the list consisting of all dominating virtual operands. */
1712 add_to_agg_contents_list (&all_list, content);
1714 if (++item_count == 2 * ipa_max_agg_items)
1715 break;
1717 dom_vuse = gimple_vuse (stmt);
1720 if (visited)
1721 BITMAP_FREE (visited);
1723 /* Third stage just goes over the list and creates an appropriate vector of
1724 ipa_agg_jf_item structures out of it, of course only if there are
1725 any known constants to begin with. */
1727 if (const_count)
1729 jfunc->agg.by_ref = by_ref;
1730 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1735 /* Return the Ith param type of callee associated with call graph
1736 edge E. */
1738 tree
1739 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1741 int n;
1742 tree type = (e->callee
1743 ? TREE_TYPE (e->callee->decl)
1744 : gimple_call_fntype (e->call_stmt));
1745 tree t = TYPE_ARG_TYPES (type);
1747 for (n = 0; n < i; n++)
1749 if (!t)
1750 break;
1751 t = TREE_CHAIN (t);
1753 if (t)
1754 return TREE_VALUE (t);
1755 if (!e->callee)
1756 return NULL;
1757 t = DECL_ARGUMENTS (e->callee->decl);
1758 for (n = 0; n < i; n++)
1760 if (!t)
1761 return NULL;
1762 t = TREE_CHAIN (t);
1764 if (t)
1765 return TREE_TYPE (t);
1766 return NULL;
1769 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1770 allocated structure or a previously existing one shared with other jump
1771 functions and/or transformation summaries. */
1773 ipa_bits *
1774 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1776 ipa_bits tmp;
1777 tmp.value = value;
1778 tmp.mask = mask;
1780 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1781 if (*slot)
1782 return *slot;
1784 ipa_bits *res = ggc_alloc<ipa_bits> ();
1785 res->value = value;
1786 res->mask = mask;
1787 *slot = res;
1789 return res;
1792 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1793 table in order to avoid creating multiple same ipa_bits structures. */
1795 static void
1796 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1797 const widest_int &mask)
1799 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1802 /* Return a pointer to a value_range just like *TMP, but either find it in
1803 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1805 static value_range *
1806 ipa_get_value_range (value_range *tmp)
1808 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1809 if (*slot)
1810 return *slot;
1812 value_range *vr = ggc_alloc<value_range> ();
1813 *vr = *tmp;
1814 *slot = vr;
1816 return vr;
1819 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1820 equiv set. Use hash table in order to avoid creating multiple same copies of
1821 value_ranges. */
1823 static value_range *
1824 ipa_get_value_range (enum value_range_kind type, tree min, tree max)
1826 value_range tmp (type, min, max);
1827 return ipa_get_value_range (&tmp);
1830 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1831 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1832 same value_range structures. */
1834 static void
1835 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
1836 tree min, tree max)
1838 jf->m_vr = ipa_get_value_range (type, min, max);
1841 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
1842 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1844 static void
1845 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1847 jf->m_vr = ipa_get_value_range (tmp);
1850 /* Compute jump function for all arguments of callsite CS and insert the
1851 information in the jump_functions array in the ipa_edge_args corresponding
1852 to this callsite. */
1854 static void
1855 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1856 struct cgraph_edge *cs)
1858 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
1859 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
1860 gcall *call = cs->call_stmt;
1861 int n, arg_num = gimple_call_num_args (call);
1862 bool useful_context = false;
1864 if (arg_num == 0 || args->jump_functions)
1865 return;
1866 vec_safe_grow_cleared (args->jump_functions, arg_num);
1867 if (flag_devirtualize)
1868 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1870 if (gimple_call_internal_p (call))
1871 return;
1872 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1873 return;
1875 for (n = 0; n < arg_num; n++)
1877 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1878 tree arg = gimple_call_arg (call, n);
1879 tree param_type = ipa_get_callee_param_type (cs, n);
1880 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1882 tree instance;
1883 class ipa_polymorphic_call_context context (cs->caller->decl,
1884 arg, cs->call_stmt,
1885 &instance);
1886 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
1887 &fbi->aa_walk_budget);
1888 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1889 if (!context.useless_p ())
1890 useful_context = true;
1893 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1895 bool addr_nonzero = false;
1896 bool strict_overflow = false;
1898 if (TREE_CODE (arg) == SSA_NAME
1899 && param_type
1900 && get_ptr_nonnull (arg))
1901 addr_nonzero = true;
1902 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1903 addr_nonzero = true;
1905 if (addr_nonzero)
1907 tree z = build_int_cst (TREE_TYPE (arg), 0);
1908 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1910 else
1911 gcc_assert (!jfunc->m_vr);
1913 else
1915 wide_int min, max;
1916 value_range_kind type;
1917 if (TREE_CODE (arg) == SSA_NAME
1918 && param_type
1919 && (type = get_range_info (arg, &min, &max))
1920 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1922 value_range resvr;
1923 value_range tmpvr (type,
1924 wide_int_to_tree (TREE_TYPE (arg), min),
1925 wide_int_to_tree (TREE_TYPE (arg), max));
1926 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
1927 &tmpvr, TREE_TYPE (arg));
1928 if (!resvr.undefined_p () && !resvr.varying_p ())
1929 ipa_set_jfunc_vr (jfunc, &resvr);
1930 else
1931 gcc_assert (!jfunc->m_vr);
1933 else
1934 gcc_assert (!jfunc->m_vr);
1937 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1938 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1940 if (TREE_CODE (arg) == SSA_NAME)
1941 ipa_set_jfunc_bits (jfunc, 0,
1942 widest_int::from (get_nonzero_bits (arg),
1943 TYPE_SIGN (TREE_TYPE (arg))));
1944 else
1945 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1947 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1949 unsigned HOST_WIDE_INT bitpos;
1950 unsigned align;
1952 get_pointer_alignment_1 (arg, &align, &bitpos);
1953 widest_int mask = wi::bit_and_not
1954 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1955 align / BITS_PER_UNIT - 1);
1956 widest_int value = bitpos / BITS_PER_UNIT;
1957 ipa_set_jfunc_bits (jfunc, value, mask);
1959 else
1960 gcc_assert (!jfunc->bits);
1962 if (is_gimple_ip_invariant (arg)
1963 || (VAR_P (arg)
1964 && is_global_var (arg)
1965 && TREE_READONLY (arg)))
1966 ipa_set_jf_constant (jfunc, arg, cs);
1967 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1968 && TREE_CODE (arg) == PARM_DECL)
1970 int index = ipa_get_param_decl_index (info, arg);
1972 gcc_assert (index >=0);
1973 /* Aggregate passed by value, check for pass-through, otherwise we
1974 will attempt to fill in aggregate contents later in this
1975 for cycle. */
1976 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1978 ipa_set_jf_simple_pass_through (jfunc, index, false);
1979 continue;
1982 else if (TREE_CODE (arg) == SSA_NAME)
1984 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1986 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1987 if (index >= 0)
1989 bool agg_p;
1990 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1991 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1994 else
1996 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1997 if (is_gimple_assign (stmt))
1998 compute_complex_assign_jump_func (fbi, info, jfunc,
1999 call, stmt, arg, param_type);
2000 else if (gimple_code (stmt) == GIMPLE_PHI)
2001 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2002 call,
2003 as_a <gphi *> (stmt));
2007 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2008 passed (because type conversions are ignored in gimple). Usually we can
2009 safely get type from function declaration, but in case of K&R prototypes or
2010 variadic functions we can try our luck with type of the pointer passed.
2011 TODO: Since we look for actual initialization of the memory object, we may better
2012 work out the type based on the memory stores we find. */
2013 if (!param_type)
2014 param_type = TREE_TYPE (arg);
2016 if ((jfunc->type != IPA_JF_PASS_THROUGH
2017 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2018 && (jfunc->type != IPA_JF_ANCESTOR
2019 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2020 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2021 || POINTER_TYPE_P (param_type)))
2022 determine_known_aggregate_parts (call, arg, param_type, jfunc,
2023 &fbi->aa_walk_budget);
2025 if (!useful_context)
2026 vec_free (args->polymorphic_call_contexts);
2029 /* Compute jump functions for all edges - both direct and indirect - outgoing
2030 from BB. */
2032 static void
2033 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2035 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2036 int i;
2037 struct cgraph_edge *cs;
2039 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2041 struct cgraph_node *callee = cs->callee;
2043 if (callee)
2045 callee = callee->ultimate_alias_target ();
2046 /* We do not need to bother analyzing calls to unknown functions
2047 unless they may become known during lto/whopr. */
2048 if (!callee->definition && !flag_lto)
2049 continue;
2051 ipa_compute_jump_functions_for_edge (fbi, cs);
2055 /* If STMT looks like a statement loading a value from a member pointer formal
2056 parameter, return that parameter and store the offset of the field to
2057 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2058 might be clobbered). If USE_DELTA, then we look for a use of the delta
2059 field rather than the pfn. */
2061 static tree
2062 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2063 HOST_WIDE_INT *offset_p)
2065 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2067 if (!gimple_assign_single_p (stmt))
2068 return NULL_TREE;
2070 rhs = gimple_assign_rhs1 (stmt);
2071 if (TREE_CODE (rhs) == COMPONENT_REF)
2073 ref_field = TREE_OPERAND (rhs, 1);
2074 rhs = TREE_OPERAND (rhs, 0);
2076 else
2077 ref_field = NULL_TREE;
2078 if (TREE_CODE (rhs) != MEM_REF)
2079 return NULL_TREE;
2080 rec = TREE_OPERAND (rhs, 0);
2081 if (TREE_CODE (rec) != ADDR_EXPR)
2082 return NULL_TREE;
2083 rec = TREE_OPERAND (rec, 0);
2084 if (TREE_CODE (rec) != PARM_DECL
2085 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2086 return NULL_TREE;
2087 ref_offset = TREE_OPERAND (rhs, 1);
2089 if (use_delta)
2090 fld = delta_field;
2091 else
2092 fld = ptr_field;
2093 if (offset_p)
2094 *offset_p = int_bit_position (fld);
2096 if (ref_field)
2098 if (integer_nonzerop (ref_offset))
2099 return NULL_TREE;
2100 return ref_field == fld ? rec : NULL_TREE;
2102 else
2103 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2104 : NULL_TREE;
2107 /* Returns true iff T is an SSA_NAME defined by a statement. */
2109 static bool
2110 ipa_is_ssa_with_stmt_def (tree t)
2112 if (TREE_CODE (t) == SSA_NAME
2113 && !SSA_NAME_IS_DEFAULT_DEF (t))
2114 return true;
2115 else
2116 return false;
2119 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2120 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2121 indirect call graph edge.
2122 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2124 static struct cgraph_edge *
2125 ipa_note_param_call (struct cgraph_node *node, int param_index,
2126 gcall *stmt, bool polymorphic)
2128 struct cgraph_edge *cs;
2130 cs = node->get_edge (stmt);
2131 cs->indirect_info->param_index = param_index;
2132 cs->indirect_info->agg_contents = 0;
2133 cs->indirect_info->member_ptr = 0;
2134 cs->indirect_info->guaranteed_unmodified = 0;
2135 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2136 param_index, true);
2137 if (cs->indirect_info->polymorphic || polymorphic)
2138 ipa_set_param_used_by_polymorphic_call
2139 (IPA_NODE_REF (node), param_index, true);
2140 return cs;
2143 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2144 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2145 intermediate information about each formal parameter. Currently it checks
2146 whether the call calls a pointer that is a formal parameter and if so, the
2147 parameter is marked with the called flag and an indirect call graph edge
2148 describing the call is created. This is very simple for ordinary pointers
2149 represented in SSA but not-so-nice when it comes to member pointers. The
2150 ugly part of this function does nothing more than trying to match the
2151 pattern of such a call. An example of such a pattern is the gimple dump
2152 below, the call is on the last line:
2154 <bb 2>:
2155 f$__delta_5 = f.__delta;
2156 f$__pfn_24 = f.__pfn;
2159 <bb 2>:
2160 f$__delta_5 = MEM[(struct *)&f];
2161 f$__pfn_24 = MEM[(struct *)&f + 4B];
2163 and a few lines below:
2165 <bb 5>
2166 D.2496_3 = (int) f$__pfn_24;
2167 D.2497_4 = D.2496_3 & 1;
2168 if (D.2497_4 != 0)
2169 goto <bb 3>;
2170 else
2171 goto <bb 4>;
2173 <bb 6>:
2174 D.2500_7 = (unsigned int) f$__delta_5;
2175 D.2501_8 = &S + D.2500_7;
2176 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2177 D.2503_10 = *D.2502_9;
2178 D.2504_12 = f$__pfn_24 + -1;
2179 D.2505_13 = (unsigned int) D.2504_12;
2180 D.2506_14 = D.2503_10 + D.2505_13;
2181 D.2507_15 = *D.2506_14;
2182 iftmp.11_16 = (String:: *) D.2507_15;
2184 <bb 7>:
2185 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2186 D.2500_19 = (unsigned int) f$__delta_5;
2187 D.2508_20 = &S + D.2500_19;
2188 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2190 Such patterns are results of simple calls to a member pointer:
2192 int doprinting (int (MyString::* f)(int) const)
2194 MyString S ("somestring");
2196 return (S.*f)(4);
2199 Moreover, the function also looks for called pointers loaded from aggregates
2200 passed by value or reference. */
2202 static void
2203 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2204 tree target)
2206 class ipa_node_params *info = fbi->info;
2207 HOST_WIDE_INT offset;
2208 bool by_ref;
2210 if (SSA_NAME_IS_DEFAULT_DEF (target))
2212 tree var = SSA_NAME_VAR (target);
2213 int index = ipa_get_param_decl_index (info, var);
2214 if (index >= 0)
2215 ipa_note_param_call (fbi->node, index, call, false);
2216 return;
2219 int index;
2220 gimple *def = SSA_NAME_DEF_STMT (target);
2221 bool guaranteed_unmodified;
2222 if (gimple_assign_single_p (def)
2223 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2224 gimple_assign_rhs1 (def), &index, &offset,
2225 NULL, &by_ref, &guaranteed_unmodified))
2227 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2228 call, false);
2229 cs->indirect_info->offset = offset;
2230 cs->indirect_info->agg_contents = 1;
2231 cs->indirect_info->by_ref = by_ref;
2232 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2233 return;
2236 /* Now we need to try to match the complex pattern of calling a member
2237 pointer. */
2238 if (gimple_code (def) != GIMPLE_PHI
2239 || gimple_phi_num_args (def) != 2
2240 || !POINTER_TYPE_P (TREE_TYPE (target))
2241 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2242 return;
2244 /* First, we need to check whether one of these is a load from a member
2245 pointer that is a parameter to this function. */
2246 tree n1 = PHI_ARG_DEF (def, 0);
2247 tree n2 = PHI_ARG_DEF (def, 1);
2248 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2249 return;
2250 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2251 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2253 tree rec;
2254 basic_block bb, virt_bb;
2255 basic_block join = gimple_bb (def);
2256 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2258 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2259 return;
2261 bb = EDGE_PRED (join, 0)->src;
2262 virt_bb = gimple_bb (d2);
2264 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2266 bb = EDGE_PRED (join, 1)->src;
2267 virt_bb = gimple_bb (d1);
2269 else
2270 return;
2272 /* Second, we need to check that the basic blocks are laid out in the way
2273 corresponding to the pattern. */
2275 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2276 || single_pred (virt_bb) != bb
2277 || single_succ (virt_bb) != join)
2278 return;
2280 /* Third, let's see that the branching is done depending on the least
2281 significant bit of the pfn. */
2283 gimple *branch = last_stmt (bb);
2284 if (!branch || gimple_code (branch) != GIMPLE_COND)
2285 return;
2287 if ((gimple_cond_code (branch) != NE_EXPR
2288 && gimple_cond_code (branch) != EQ_EXPR)
2289 || !integer_zerop (gimple_cond_rhs (branch)))
2290 return;
2292 tree cond = gimple_cond_lhs (branch);
2293 if (!ipa_is_ssa_with_stmt_def (cond))
2294 return;
2296 def = SSA_NAME_DEF_STMT (cond);
2297 if (!is_gimple_assign (def)
2298 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2299 || !integer_onep (gimple_assign_rhs2 (def)))
2300 return;
2302 cond = gimple_assign_rhs1 (def);
2303 if (!ipa_is_ssa_with_stmt_def (cond))
2304 return;
2306 def = SSA_NAME_DEF_STMT (cond);
2308 if (is_gimple_assign (def)
2309 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2311 cond = gimple_assign_rhs1 (def);
2312 if (!ipa_is_ssa_with_stmt_def (cond))
2313 return;
2314 def = SSA_NAME_DEF_STMT (cond);
2317 tree rec2;
2318 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2319 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2320 == ptrmemfunc_vbit_in_delta),
2321 NULL);
2322 if (rec != rec2)
2323 return;
2325 index = ipa_get_param_decl_index (info, rec);
2326 if (index >= 0
2327 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2329 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2330 call, false);
2331 cs->indirect_info->offset = offset;
2332 cs->indirect_info->agg_contents = 1;
2333 cs->indirect_info->member_ptr = 1;
2334 cs->indirect_info->guaranteed_unmodified = 1;
2337 return;
2340 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2341 object referenced in the expression is a formal parameter of the caller
2342 FBI->node (described by FBI->info), create a call note for the
2343 statement. */
2345 static void
2346 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2347 gcall *call, tree target)
2349 tree obj = OBJ_TYPE_REF_OBJECT (target);
2350 int index;
2351 HOST_WIDE_INT anc_offset;
2353 if (!flag_devirtualize)
2354 return;
2356 if (TREE_CODE (obj) != SSA_NAME)
2357 return;
2359 class ipa_node_params *info = fbi->info;
2360 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2362 struct ipa_jump_func jfunc;
2363 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2364 return;
2366 anc_offset = 0;
2367 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2368 gcc_assert (index >= 0);
2369 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2370 call, &jfunc))
2371 return;
2373 else
2375 struct ipa_jump_func jfunc;
2376 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2377 tree expr;
2379 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2380 if (!expr)
2381 return;
2382 index = ipa_get_param_decl_index (info,
2383 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2384 gcc_assert (index >= 0);
2385 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2386 call, &jfunc, anc_offset))
2387 return;
2390 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2391 call, true);
2392 class cgraph_indirect_call_info *ii = cs->indirect_info;
2393 ii->offset = anc_offset;
2394 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2395 ii->otr_type = obj_type_ref_class (target);
2396 ii->polymorphic = 1;
2399 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2400 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2401 containing intermediate information about each formal parameter. */
2403 static void
2404 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2406 tree target = gimple_call_fn (call);
2408 if (!target
2409 || (TREE_CODE (target) != SSA_NAME
2410 && !virtual_method_call_p (target)))
2411 return;
2413 struct cgraph_edge *cs = fbi->node->get_edge (call);
2414 /* If we previously turned the call into a direct call, there is
2415 no need to analyze. */
2416 if (cs && !cs->indirect_unknown_callee)
2417 return;
2419 if (cs->indirect_info->polymorphic && flag_devirtualize)
2421 tree instance;
2422 tree target = gimple_call_fn (call);
2423 ipa_polymorphic_call_context context (current_function_decl,
2424 target, call, &instance);
2426 gcc_checking_assert (cs->indirect_info->otr_type
2427 == obj_type_ref_class (target));
2428 gcc_checking_assert (cs->indirect_info->otr_token
2429 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2431 cs->indirect_info->vptr_changed
2432 = !context.get_dynamic_type (instance,
2433 OBJ_TYPE_REF_OBJECT (target),
2434 obj_type_ref_class (target), call,
2435 &fbi->aa_walk_budget);
2436 cs->indirect_info->context = context;
2439 if (TREE_CODE (target) == SSA_NAME)
2440 ipa_analyze_indirect_call_uses (fbi, call, target);
2441 else if (virtual_method_call_p (target))
2442 ipa_analyze_virtual_call_uses (fbi, call, target);
2446 /* Analyze the call statement STMT with respect to formal parameters (described
2447 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2448 formal parameters are called. */
2450 static void
2451 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2453 if (is_gimple_call (stmt))
2454 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2457 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2458 If OP is a parameter declaration, mark it as used in the info structure
2459 passed in DATA. */
2461 static bool
2462 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2464 class ipa_node_params *info = (class ipa_node_params *) data;
2466 op = get_base_address (op);
2467 if (op
2468 && TREE_CODE (op) == PARM_DECL)
2470 int index = ipa_get_param_decl_index (info, op);
2471 gcc_assert (index >= 0);
2472 ipa_set_param_used (info, index, true);
2475 return false;
2478 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2479 the findings in various structures of the associated ipa_node_params
2480 structure, such as parameter flags, notes etc. FBI holds various data about
2481 the function being analyzed. */
2483 static void
2484 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2486 gimple_stmt_iterator gsi;
2487 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2489 gimple *stmt = gsi_stmt (gsi);
2491 if (is_gimple_debug (stmt))
2492 continue;
2494 ipa_analyze_stmt_uses (fbi, stmt);
2495 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2496 visit_ref_for_mod_analysis,
2497 visit_ref_for_mod_analysis,
2498 visit_ref_for_mod_analysis);
2500 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2501 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2502 visit_ref_for_mod_analysis,
2503 visit_ref_for_mod_analysis,
2504 visit_ref_for_mod_analysis);
2507 /* Calculate controlled uses of parameters of NODE. */
2509 static void
2510 ipa_analyze_controlled_uses (struct cgraph_node *node)
2512 class ipa_node_params *info = IPA_NODE_REF (node);
2514 for (int i = 0; i < ipa_get_param_count (info); i++)
2516 tree parm = ipa_get_param (info, i);
2517 int controlled_uses = 0;
2519 /* For SSA regs see if parameter is used. For non-SSA we compute
2520 the flag during modification analysis. */
2521 if (is_gimple_reg (parm))
2523 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2524 parm);
2525 if (ddef && !has_zero_uses (ddef))
2527 imm_use_iterator imm_iter;
2528 use_operand_p use_p;
2530 ipa_set_param_used (info, i, true);
2531 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2532 if (!is_gimple_call (USE_STMT (use_p)))
2534 if (!is_gimple_debug (USE_STMT (use_p)))
2536 controlled_uses = IPA_UNDESCRIBED_USE;
2537 break;
2540 else
2541 controlled_uses++;
2543 else
2544 controlled_uses = 0;
2546 else
2547 controlled_uses = IPA_UNDESCRIBED_USE;
2548 ipa_set_controlled_uses (info, i, controlled_uses);
2552 /* Free stuff in BI. */
2554 static void
2555 free_ipa_bb_info (struct ipa_bb_info *bi)
2557 bi->cg_edges.release ();
2558 bi->param_aa_statuses.release ();
2561 /* Dominator walker driving the analysis. */
2563 class analysis_dom_walker : public dom_walker
2565 public:
2566 analysis_dom_walker (struct ipa_func_body_info *fbi)
2567 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2569 virtual edge before_dom_children (basic_block);
2571 private:
2572 struct ipa_func_body_info *m_fbi;
2575 edge
2576 analysis_dom_walker::before_dom_children (basic_block bb)
2578 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2579 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2580 return NULL;
2583 /* Release body info FBI. */
2585 void
2586 ipa_release_body_info (struct ipa_func_body_info *fbi)
2588 int i;
2589 struct ipa_bb_info *bi;
2591 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2592 free_ipa_bb_info (bi);
2593 fbi->bb_infos.release ();
2596 /* Initialize the array describing properties of formal parameters
2597 of NODE, analyze their uses and compute jump functions associated
2598 with actual arguments of calls from within NODE. */
2600 void
2601 ipa_analyze_node (struct cgraph_node *node)
2603 struct ipa_func_body_info fbi;
2604 class ipa_node_params *info;
2606 ipa_check_create_node_params ();
2607 ipa_check_create_edge_args ();
2608 info = IPA_NODE_REF_GET_CREATE (node);
2610 if (info->analysis_done)
2611 return;
2612 info->analysis_done = 1;
2614 if (ipa_func_spec_opts_forbid_analysis_p (node))
2616 for (int i = 0; i < ipa_get_param_count (info); i++)
2618 ipa_set_param_used (info, i, true);
2619 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2621 return;
2624 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2625 push_cfun (func);
2626 calculate_dominance_info (CDI_DOMINATORS);
2627 ipa_initialize_node_params (node);
2628 ipa_analyze_controlled_uses (node);
2630 fbi.node = node;
2631 fbi.info = IPA_NODE_REF (node);
2632 fbi.bb_infos = vNULL;
2633 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2634 fbi.param_count = ipa_get_param_count (info);
2635 fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
2637 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2639 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2640 bi->cg_edges.safe_push (cs);
2643 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2645 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2646 bi->cg_edges.safe_push (cs);
2649 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2651 ipa_release_body_info (&fbi);
2652 free_dominance_info (CDI_DOMINATORS);
2653 pop_cfun ();
2656 /* Update the jump functions associated with call graph edge E when the call
2657 graph edge CS is being inlined, assuming that E->caller is already (possibly
2658 indirectly) inlined into CS->callee and that E has not been inlined. */
2660 static void
2661 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2662 struct cgraph_edge *e)
2664 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2665 class ipa_edge_args *args = IPA_EDGE_REF (e);
2666 if (!args)
2667 return;
2668 int count = ipa_get_cs_argument_count (args);
2669 int i;
2671 for (i = 0; i < count; i++)
2673 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2674 if (!top)
2676 ipa_set_jf_unknown (dst);
2677 continue;
2679 class ipa_polymorphic_call_context *dst_ctx
2680 = ipa_get_ith_polymorhic_call_context (args, i);
2682 if (dst->type == IPA_JF_ANCESTOR)
2684 struct ipa_jump_func *src;
2685 int dst_fid = dst->value.ancestor.formal_id;
2686 class ipa_polymorphic_call_context *src_ctx
2687 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2689 /* Variable number of arguments can cause havoc if we try to access
2690 one that does not exist in the inlined edge. So make sure we
2691 don't. */
2692 if (dst_fid >= ipa_get_cs_argument_count (top))
2694 ipa_set_jf_unknown (dst);
2695 continue;
2698 src = ipa_get_ith_jump_func (top, dst_fid);
2700 if (src_ctx && !src_ctx->useless_p ())
2702 class ipa_polymorphic_call_context ctx = *src_ctx;
2704 /* TODO: Make type preserved safe WRT contexts. */
2705 if (!ipa_get_jf_ancestor_type_preserved (dst))
2706 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2707 ctx.offset_by (dst->value.ancestor.offset);
2708 if (!ctx.useless_p ())
2710 if (!dst_ctx)
2712 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2713 count);
2714 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2717 dst_ctx->combine_with (ctx);
2721 if (src->agg.items
2722 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2724 struct ipa_agg_jf_item *item;
2725 int j;
2727 /* Currently we do not produce clobber aggregate jump functions,
2728 replace with merging when we do. */
2729 gcc_assert (!dst->agg.items);
2731 dst->agg.items = vec_safe_copy (src->agg.items);
2732 dst->agg.by_ref = src->agg.by_ref;
2733 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2734 item->offset -= dst->value.ancestor.offset;
2737 if (src->type == IPA_JF_PASS_THROUGH
2738 && src->value.pass_through.operation == NOP_EXPR)
2740 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2741 dst->value.ancestor.agg_preserved &=
2742 src->value.pass_through.agg_preserved;
2744 else if (src->type == IPA_JF_ANCESTOR)
2746 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2747 dst->value.ancestor.offset += src->value.ancestor.offset;
2748 dst->value.ancestor.agg_preserved &=
2749 src->value.ancestor.agg_preserved;
2751 else
2752 ipa_set_jf_unknown (dst);
2754 else if (dst->type == IPA_JF_PASS_THROUGH)
2756 struct ipa_jump_func *src;
2757 /* We must check range due to calls with variable number of arguments
2758 and we cannot combine jump functions with operations. */
2759 if (dst->value.pass_through.operation == NOP_EXPR
2760 && (top && dst->value.pass_through.formal_id
2761 < ipa_get_cs_argument_count (top)))
2763 int dst_fid = dst->value.pass_through.formal_id;
2764 src = ipa_get_ith_jump_func (top, dst_fid);
2765 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2766 class ipa_polymorphic_call_context *src_ctx
2767 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2769 if (src_ctx && !src_ctx->useless_p ())
2771 class ipa_polymorphic_call_context ctx = *src_ctx;
2773 /* TODO: Make type preserved safe WRT contexts. */
2774 if (!ipa_get_jf_pass_through_type_preserved (dst))
2775 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2776 if (!ctx.useless_p ())
2778 if (!dst_ctx)
2780 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2781 count);
2782 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2784 dst_ctx->combine_with (ctx);
2787 switch (src->type)
2789 case IPA_JF_UNKNOWN:
2790 ipa_set_jf_unknown (dst);
2791 break;
2792 case IPA_JF_CONST:
2793 ipa_set_jf_cst_copy (dst, src);
2794 break;
2796 case IPA_JF_PASS_THROUGH:
2798 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2799 enum tree_code operation;
2800 operation = ipa_get_jf_pass_through_operation (src);
2802 if (operation == NOP_EXPR)
2804 bool agg_p;
2805 agg_p = dst_agg_p
2806 && ipa_get_jf_pass_through_agg_preserved (src);
2807 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2809 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2810 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2811 else
2813 tree operand = ipa_get_jf_pass_through_operand (src);
2814 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2815 operation);
2817 break;
2819 case IPA_JF_ANCESTOR:
2821 bool agg_p;
2822 agg_p = dst_agg_p
2823 && ipa_get_jf_ancestor_agg_preserved (src);
2824 ipa_set_ancestor_jf (dst,
2825 ipa_get_jf_ancestor_offset (src),
2826 ipa_get_jf_ancestor_formal_id (src),
2827 agg_p);
2828 break;
2830 default:
2831 gcc_unreachable ();
2834 if (src->agg.items
2835 && (dst_agg_p || !src->agg.by_ref))
2837 /* Currently we do not produce clobber aggregate jump
2838 functions, replace with merging when we do. */
2839 gcc_assert (!dst->agg.items);
2841 dst->agg.by_ref = src->agg.by_ref;
2842 dst->agg.items = vec_safe_copy (src->agg.items);
2845 else
2846 ipa_set_jf_unknown (dst);
2851 /* If TARGET is an addr_expr of a function declaration, make it the
2852 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2853 Otherwise, return NULL. */
2855 struct cgraph_edge *
2856 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2857 bool speculative)
2859 struct cgraph_node *callee;
2860 bool unreachable = false;
2862 if (TREE_CODE (target) == ADDR_EXPR)
2863 target = TREE_OPERAND (target, 0);
2864 if (TREE_CODE (target) != FUNCTION_DECL)
2866 target = canonicalize_constructor_val (target, NULL);
2867 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2869 /* Member pointer call that goes through a VMT lookup. */
2870 if (ie->indirect_info->member_ptr
2871 /* Or if target is not an invariant expression and we do not
2872 know if it will evaulate to function at runtime.
2873 This can happen when folding through &VAR, where &VAR
2874 is IP invariant, but VAR itself is not.
2876 TODO: Revisit this when GCC 5 is branched. It seems that
2877 member_ptr check is not needed and that we may try to fold
2878 the expression and see if VAR is readonly. */
2879 || !is_gimple_ip_invariant (target))
2881 if (dump_enabled_p ())
2883 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2884 "discovered direct call non-invariant %s\n",
2885 ie->caller->dump_name ());
2887 return NULL;
2891 if (dump_enabled_p ())
2893 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2894 "discovered direct call to non-function in %s, "
2895 "making it __builtin_unreachable\n",
2896 ie->caller->dump_name ());
2899 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2900 callee = cgraph_node::get_create (target);
2901 unreachable = true;
2903 else
2904 callee = cgraph_node::get (target);
2906 else
2907 callee = cgraph_node::get (target);
2909 /* Because may-edges are not explicitely represented and vtable may be external,
2910 we may create the first reference to the object in the unit. */
2911 if (!callee || callee->inlined_to)
2914 /* We are better to ensure we can refer to it.
2915 In the case of static functions we are out of luck, since we already
2916 removed its body. In the case of public functions we may or may
2917 not introduce the reference. */
2918 if (!canonicalize_constructor_val (target, NULL)
2919 || !TREE_PUBLIC (target))
2921 if (dump_file)
2922 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2923 "(%s -> %s) but cannot refer to it. Giving up.\n",
2924 ie->caller->dump_name (),
2925 ie->callee->dump_name ());
2926 return NULL;
2928 callee = cgraph_node::get_create (target);
2931 /* If the edge is already speculated. */
2932 if (speculative && ie->speculative)
2934 struct cgraph_edge *e2;
2935 struct ipa_ref *ref;
2936 ie->speculative_call_info (e2, ie, ref);
2937 if (e2->callee->ultimate_alias_target ()
2938 != callee->ultimate_alias_target ())
2940 if (dump_file)
2941 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2942 "target (%s -> %s) but the call is already "
2943 "speculated to %s. Giving up.\n",
2944 ie->caller->dump_name (), callee->dump_name (),
2945 e2->callee->dump_name ());
2947 else
2949 if (dump_file)
2950 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2951 "(%s -> %s) this agree with previous speculation.\n",
2952 ie->caller->dump_name (), callee->dump_name ());
2954 return NULL;
2957 if (!dbg_cnt (devirt))
2958 return NULL;
2960 ipa_check_create_node_params ();
2962 /* We cannot make edges to inline clones. It is bug that someone removed
2963 the cgraph node too early. */
2964 gcc_assert (!callee->inlined_to);
2966 if (dump_file && !unreachable)
2968 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2969 "(%s -> %s), for stmt ",
2970 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2971 speculative ? "speculative" : "known",
2972 ie->caller->dump_name (),
2973 callee->dump_name ());
2974 if (ie->call_stmt)
2975 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2976 else
2977 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2979 if (dump_enabled_p ())
2981 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2982 "converting indirect call in %s to direct call to %s\n",
2983 ie->caller->name (), callee->name ());
2985 if (!speculative)
2987 struct cgraph_edge *orig = ie;
2988 ie = ie->make_direct (callee);
2989 /* If we resolved speculative edge the cost is already up to date
2990 for direct call (adjusted by inline_edge_duplication_hook). */
2991 if (ie == orig)
2993 ipa_call_summary *es = ipa_call_summaries->get (ie);
2994 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2995 - eni_size_weights.call_cost);
2996 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2997 - eni_time_weights.call_cost);
3000 else
3002 if (!callee->can_be_discarded_p ())
3004 cgraph_node *alias;
3005 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3006 if (alias)
3007 callee = alias;
3009 /* make_speculative will update ie's cost to direct call cost. */
3010 ie = ie->make_speculative
3011 (callee, ie->count.apply_scale (8, 10));
3014 return ie;
3017 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3018 CONSTRUCTOR and return it. Return NULL if the search fails for some
3019 reason. */
3021 static tree
3022 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3024 tree type = TREE_TYPE (constructor);
3025 if (TREE_CODE (type) != ARRAY_TYPE
3026 && TREE_CODE (type) != RECORD_TYPE)
3027 return NULL;
3029 unsigned ix;
3030 tree index, val;
3031 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3033 HOST_WIDE_INT elt_offset;
3034 if (TREE_CODE (type) == ARRAY_TYPE)
3036 offset_int off;
3037 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3038 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3040 if (index)
3042 if (TREE_CODE (index) == RANGE_EXPR)
3043 off = wi::to_offset (TREE_OPERAND (index, 0));
3044 else
3045 off = wi::to_offset (index);
3046 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3048 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3049 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3050 off = wi::sext (off - wi::to_offset (low_bound),
3051 TYPE_PRECISION (TREE_TYPE (index)));
3053 off *= wi::to_offset (unit_size);
3054 /* ??? Handle more than just the first index of a
3055 RANGE_EXPR. */
3057 else
3058 off = wi::to_offset (unit_size) * ix;
3060 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3061 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3062 continue;
3063 elt_offset = off.to_shwi ();
3065 else if (TREE_CODE (type) == RECORD_TYPE)
3067 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3068 if (DECL_BIT_FIELD (index))
3069 continue;
3070 elt_offset = int_bit_position (index);
3072 else
3073 gcc_unreachable ();
3075 if (elt_offset > req_offset)
3076 return NULL;
3078 if (TREE_CODE (val) == CONSTRUCTOR)
3079 return find_constructor_constant_at_offset (val,
3080 req_offset - elt_offset);
3082 if (elt_offset == req_offset
3083 && is_gimple_reg_type (TREE_TYPE (val))
3084 && is_gimple_ip_invariant (val))
3085 return val;
3087 return NULL;
3090 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3091 invariant from a static constructor and if so, return it. Otherwise return
3092 NULL. */
3094 static tree
3095 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3097 if (by_ref)
3099 if (TREE_CODE (scalar) != ADDR_EXPR)
3100 return NULL;
3101 scalar = TREE_OPERAND (scalar, 0);
3104 if (!VAR_P (scalar)
3105 || !is_global_var (scalar)
3106 || !TREE_READONLY (scalar)
3107 || !DECL_INITIAL (scalar)
3108 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3109 return NULL;
3111 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3114 /* Retrieve value from aggregate jump function AGG or static initializer of
3115 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3116 none. BY_REF specifies whether the value has to be passed by reference or
3117 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3118 to is set to true if the value comes from an initializer of a constant. */
3120 tree
3121 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3122 HOST_WIDE_INT offset, bool by_ref,
3123 bool *from_global_constant)
3125 struct ipa_agg_jf_item *item;
3126 int i;
3128 if (scalar)
3130 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3131 if (res)
3133 if (from_global_constant)
3134 *from_global_constant = true;
3135 return res;
3139 if (!agg
3140 || by_ref != agg->by_ref)
3141 return NULL;
3143 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3144 if (item->offset == offset)
3146 /* Currently we do not have clobber values, return NULL for them once
3147 we do. */
3148 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3149 if (from_global_constant)
3150 *from_global_constant = false;
3151 return item->value;
3153 return NULL;
3156 /* Remove a reference to SYMBOL from the list of references of a node given by
3157 reference description RDESC. Return true if the reference has been
3158 successfully found and removed. */
3160 static bool
3161 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3163 struct ipa_ref *to_del;
3164 struct cgraph_edge *origin;
3166 origin = rdesc->cs;
3167 if (!origin)
3168 return false;
3169 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3170 origin->lto_stmt_uid);
3171 if (!to_del)
3172 return false;
3174 to_del->remove_reference ();
3175 if (dump_file)
3176 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3177 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3178 return true;
3181 /* If JFUNC has a reference description with refcount different from
3182 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3183 NULL. JFUNC must be a constant jump function. */
3185 static struct ipa_cst_ref_desc *
3186 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3188 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3189 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3190 return rdesc;
3191 else
3192 return NULL;
3195 /* If the value of constant jump function JFUNC is an address of a function
3196 declaration, return the associated call graph node. Otherwise return
3197 NULL. */
3199 static cgraph_node *
3200 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3202 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3203 tree cst = ipa_get_jf_constant (jfunc);
3204 if (TREE_CODE (cst) != ADDR_EXPR
3205 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3206 return NULL;
3208 return cgraph_node::get (TREE_OPERAND (cst, 0));
3212 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3213 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3214 the edge specified in the rdesc. Return false if either the symbol or the
3215 reference could not be found, otherwise return true. */
3217 static bool
3218 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3220 struct ipa_cst_ref_desc *rdesc;
3221 if (jfunc->type == IPA_JF_CONST
3222 && (rdesc = jfunc_rdesc_usable (jfunc))
3223 && --rdesc->refcount == 0)
3225 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3226 if (!symbol)
3227 return false;
3229 return remove_described_reference (symbol, rdesc);
3231 return true;
3234 /* Try to find a destination for indirect edge IE that corresponds to a simple
3235 call or a call of a member function pointer and where the destination is a
3236 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3237 the type of the parameter to which the result of JFUNC is passed. If it can
3238 be determined, return the newly direct edge, otherwise return NULL.
3239 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3241 static struct cgraph_edge *
3242 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3243 struct ipa_jump_func *jfunc, tree target_type,
3244 class ipa_node_params *new_root_info)
3246 struct cgraph_edge *cs;
3247 tree target;
3248 bool agg_contents = ie->indirect_info->agg_contents;
3249 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3250 if (agg_contents)
3252 bool from_global_constant;
3253 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3254 ie->indirect_info->offset,
3255 ie->indirect_info->by_ref,
3256 &from_global_constant);
3257 if (target
3258 && !from_global_constant
3259 && !ie->indirect_info->guaranteed_unmodified)
3260 return NULL;
3262 else
3263 target = scalar;
3264 if (!target)
3265 return NULL;
3266 cs = ipa_make_edge_direct_to_target (ie, target);
3268 if (cs && !agg_contents)
3270 bool ok;
3271 gcc_checking_assert (cs->callee
3272 && (cs != ie
3273 || jfunc->type != IPA_JF_CONST
3274 || !cgraph_node_for_jfunc (jfunc)
3275 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3276 ok = try_decrement_rdesc_refcount (jfunc);
3277 gcc_checking_assert (ok);
3280 return cs;
3283 /* Return the target to be used in cases of impossible devirtualization. IE
3284 and target (the latter can be NULL) are dumped when dumping is enabled. */
3286 tree
3287 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3289 if (dump_file)
3291 if (target)
3292 fprintf (dump_file,
3293 "Type inconsistent devirtualization: %s->%s\n",
3294 ie->caller->dump_name (),
3295 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3296 else
3297 fprintf (dump_file,
3298 "No devirtualization target in %s\n",
3299 ie->caller->dump_name ());
3301 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3302 cgraph_node::get_create (new_target);
3303 return new_target;
3306 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3307 call based on a formal parameter which is described by jump function JFUNC
3308 and if it can be determined, make it direct and return the direct edge.
3309 Otherwise, return NULL. CTX describes the polymorphic context that the
3310 parameter the call is based on brings along with it. */
3312 static struct cgraph_edge *
3313 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3314 struct ipa_jump_func *jfunc,
3315 class ipa_polymorphic_call_context ctx)
3317 tree target = NULL;
3318 bool speculative = false;
3320 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3321 return NULL;
3323 gcc_assert (!ie->indirect_info->by_ref);
3325 /* Try to do lookup via known virtual table pointer value. */
3326 if (!ie->indirect_info->vptr_changed
3327 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3329 tree vtable;
3330 unsigned HOST_WIDE_INT offset;
3331 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3332 : NULL;
3333 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3334 ie->indirect_info->offset,
3335 true);
3336 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3338 bool can_refer;
3339 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3340 vtable, offset, &can_refer);
3341 if (can_refer)
3343 if (!t
3344 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3345 || !possible_polymorphic_call_target_p
3346 (ie, cgraph_node::get (t)))
3348 /* Do not speculate builtin_unreachable, it is stupid! */
3349 if (!ie->indirect_info->vptr_changed)
3350 target = ipa_impossible_devirt_target (ie, target);
3351 else
3352 target = NULL;
3354 else
3356 target = t;
3357 speculative = ie->indirect_info->vptr_changed;
3363 ipa_polymorphic_call_context ie_context (ie);
3364 vec <cgraph_node *>targets;
3365 bool final;
3367 ctx.offset_by (ie->indirect_info->offset);
3368 if (ie->indirect_info->vptr_changed)
3369 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3370 ie->indirect_info->otr_type);
3371 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3372 targets = possible_polymorphic_call_targets
3373 (ie->indirect_info->otr_type,
3374 ie->indirect_info->otr_token,
3375 ctx, &final);
3376 if (final && targets.length () <= 1)
3378 speculative = false;
3379 if (targets.length () == 1)
3380 target = targets[0]->decl;
3381 else
3382 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3384 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3385 && !ie->speculative && ie->maybe_hot_p ())
3387 cgraph_node *n;
3388 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3389 ie->indirect_info->otr_token,
3390 ie->indirect_info->context);
3391 if (n)
3393 target = n->decl;
3394 speculative = true;
3398 if (target)
3400 if (!possible_polymorphic_call_target_p
3401 (ie, cgraph_node::get_create (target)))
3403 if (speculative)
3404 return NULL;
3405 target = ipa_impossible_devirt_target (ie, target);
3407 return ipa_make_edge_direct_to_target (ie, target, speculative);
3409 else
3410 return NULL;
3413 /* Update the param called notes associated with NODE when CS is being inlined,
3414 assuming NODE is (potentially indirectly) inlined into CS->callee.
3415 Moreover, if the callee is discovered to be constant, create a new cgraph
3416 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3417 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3419 static bool
3420 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3421 struct cgraph_node *node,
3422 vec<cgraph_edge *> *new_edges)
3424 class ipa_edge_args *top;
3425 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3426 class ipa_node_params *new_root_info, *inlined_node_info;
3427 bool res = false;
3429 ipa_check_create_edge_args ();
3430 top = IPA_EDGE_REF (cs);
3431 new_root_info = IPA_NODE_REF (cs->caller->inlined_to
3432 ? cs->caller->inlined_to
3433 : cs->caller);
3434 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3436 for (ie = node->indirect_calls; ie; ie = next_ie)
3438 class cgraph_indirect_call_info *ici = ie->indirect_info;
3439 struct ipa_jump_func *jfunc;
3440 int param_index;
3441 cgraph_node *spec_target = NULL;
3443 next_ie = ie->next_callee;
3445 if (ici->param_index == -1)
3446 continue;
3448 /* We must check range due to calls with variable number of arguments: */
3449 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3451 ici->param_index = -1;
3452 continue;
3455 param_index = ici->param_index;
3456 jfunc = ipa_get_ith_jump_func (top, param_index);
3458 if (ie->speculative)
3460 struct cgraph_edge *de;
3461 struct ipa_ref *ref;
3462 ie->speculative_call_info (de, ie, ref);
3463 spec_target = de->callee;
3466 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3467 new_direct_edge = NULL;
3468 else if (ici->polymorphic)
3470 ipa_polymorphic_call_context ctx;
3471 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3472 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3474 else
3476 tree target_type = ipa_get_type (inlined_node_info, param_index);
3477 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3478 target_type,
3479 new_root_info);
3482 /* If speculation was removed, then we need to do nothing. */
3483 if (new_direct_edge && new_direct_edge != ie
3484 && new_direct_edge->callee == spec_target)
3486 new_direct_edge->indirect_inlining_edge = 1;
3487 top = IPA_EDGE_REF (cs);
3488 res = true;
3489 if (!new_direct_edge->speculative)
3490 continue;
3492 else if (new_direct_edge)
3494 new_direct_edge->indirect_inlining_edge = 1;
3495 if (new_edges)
3497 new_edges->safe_push (new_direct_edge);
3498 res = true;
3500 top = IPA_EDGE_REF (cs);
3501 /* If speculative edge was introduced we still need to update
3502 call info of the indirect edge. */
3503 if (!new_direct_edge->speculative)
3504 continue;
3506 if (jfunc->type == IPA_JF_PASS_THROUGH
3507 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3509 if (ici->agg_contents
3510 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3511 && !ici->polymorphic)
3512 ici->param_index = -1;
3513 else
3515 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3516 if (ici->polymorphic
3517 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3518 ici->vptr_changed = true;
3519 ipa_set_param_used_by_indirect_call (new_root_info,
3520 ici->param_index, true);
3521 if (ici->polymorphic)
3522 ipa_set_param_used_by_polymorphic_call (new_root_info,
3523 ici->param_index, true);
3526 else if (jfunc->type == IPA_JF_ANCESTOR)
3528 if (ici->agg_contents
3529 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3530 && !ici->polymorphic)
3531 ici->param_index = -1;
3532 else
3534 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3535 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3536 if (ici->polymorphic
3537 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3538 ici->vptr_changed = true;
3541 else
3542 /* Either we can find a destination for this edge now or never. */
3543 ici->param_index = -1;
3546 return res;
3549 /* Recursively traverse subtree of NODE (including node) made of inlined
3550 cgraph_edges when CS has been inlined and invoke
3551 update_indirect_edges_after_inlining on all nodes and
3552 update_jump_functions_after_inlining on all non-inlined edges that lead out
3553 of this subtree. Newly discovered indirect edges will be added to
3554 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3555 created. */
3557 static bool
3558 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3559 struct cgraph_node *node,
3560 vec<cgraph_edge *> *new_edges)
3562 struct cgraph_edge *e;
3563 bool res;
3565 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3567 for (e = node->callees; e; e = e->next_callee)
3568 if (!e->inline_failed)
3569 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3570 else
3571 update_jump_functions_after_inlining (cs, e);
3572 for (e = node->indirect_calls; e; e = e->next_callee)
3573 update_jump_functions_after_inlining (cs, e);
3575 return res;
3578 /* Combine two controlled uses counts as done during inlining. */
3580 static int
3581 combine_controlled_uses_counters (int c, int d)
3583 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3584 return IPA_UNDESCRIBED_USE;
3585 else
3586 return c + d - 1;
3589 /* Propagate number of controlled users from CS->caleee to the new root of the
3590 tree of inlined nodes. */
3592 static void
3593 propagate_controlled_uses (struct cgraph_edge *cs)
3595 class ipa_edge_args *args = IPA_EDGE_REF (cs);
3596 if (!args)
3597 return;
3598 struct cgraph_node *new_root = cs->caller->inlined_to
3599 ? cs->caller->inlined_to : cs->caller;
3600 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3601 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3602 int count, i;
3604 if (!old_root_info)
3605 return;
3607 count = MIN (ipa_get_cs_argument_count (args),
3608 ipa_get_param_count (old_root_info));
3609 for (i = 0; i < count; i++)
3611 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3612 struct ipa_cst_ref_desc *rdesc;
3614 if (jf->type == IPA_JF_PASS_THROUGH)
3616 int src_idx, c, d;
3617 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3618 c = ipa_get_controlled_uses (new_root_info, src_idx);
3619 d = ipa_get_controlled_uses (old_root_info, i);
3621 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3622 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3623 c = combine_controlled_uses_counters (c, d);
3624 ipa_set_controlled_uses (new_root_info, src_idx, c);
3625 if (c == 0 && new_root_info->ipcp_orig_node)
3627 struct cgraph_node *n;
3628 struct ipa_ref *ref;
3629 tree t = new_root_info->known_csts[src_idx];
3631 if (t && TREE_CODE (t) == ADDR_EXPR
3632 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3633 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3634 && (ref = new_root->find_reference (n, NULL, 0)))
3636 if (dump_file)
3637 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3638 "reference from %s to %s.\n",
3639 new_root->dump_name (),
3640 n->dump_name ());
3641 ref->remove_reference ();
3645 else if (jf->type == IPA_JF_CONST
3646 && (rdesc = jfunc_rdesc_usable (jf)))
3648 int d = ipa_get_controlled_uses (old_root_info, i);
3649 int c = rdesc->refcount;
3650 rdesc->refcount = combine_controlled_uses_counters (c, d);
3651 if (rdesc->refcount == 0)
3653 tree cst = ipa_get_jf_constant (jf);
3654 struct cgraph_node *n;
3655 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3656 && TREE_CODE (TREE_OPERAND (cst, 0))
3657 == FUNCTION_DECL);
3658 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3659 if (n)
3661 struct cgraph_node *clone;
3662 bool ok;
3663 ok = remove_described_reference (n, rdesc);
3664 gcc_checking_assert (ok);
3666 clone = cs->caller;
3667 while (clone->inlined_to
3668 && clone->ipcp_clone
3669 && clone != rdesc->cs->caller)
3671 struct ipa_ref *ref;
3672 ref = clone->find_reference (n, NULL, 0);
3673 if (ref)
3675 if (dump_file)
3676 fprintf (dump_file, "ipa-prop: Removing "
3677 "cloning-created reference "
3678 "from %s to %s.\n",
3679 clone->dump_name (),
3680 n->dump_name ());
3681 ref->remove_reference ();
3683 clone = clone->callers->caller;
3690 for (i = ipa_get_param_count (old_root_info);
3691 i < ipa_get_cs_argument_count (args);
3692 i++)
3694 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3696 if (jf->type == IPA_JF_CONST)
3698 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3699 if (rdesc)
3700 rdesc->refcount = IPA_UNDESCRIBED_USE;
3702 else if (jf->type == IPA_JF_PASS_THROUGH)
3703 ipa_set_controlled_uses (new_root_info,
3704 jf->value.pass_through.formal_id,
3705 IPA_UNDESCRIBED_USE);
3709 /* Update jump functions and call note functions on inlining the call site CS.
3710 CS is expected to lead to a node already cloned by
3711 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3712 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3713 created. */
3715 bool
3716 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3717 vec<cgraph_edge *> *new_edges)
3719 bool changed;
3720 /* Do nothing if the preparation phase has not been carried out yet
3721 (i.e. during early inlining). */
3722 if (!ipa_node_params_sum)
3723 return false;
3724 gcc_assert (ipa_edge_args_sum);
3726 propagate_controlled_uses (cs);
3727 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3728 ipa_node_params_sum->remove (cs->callee);
3730 class ipa_edge_args *args = IPA_EDGE_REF (cs);
3731 if (args)
3733 bool ok = true;
3734 if (args->jump_functions)
3736 struct ipa_jump_func *jf;
3737 int i;
3738 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3739 if (jf->type == IPA_JF_CONST
3740 && ipa_get_jf_constant_rdesc (jf))
3742 ok = false;
3743 break;
3746 if (ok)
3747 ipa_edge_args_sum->remove (cs);
3749 if (ipcp_transformation_sum)
3750 ipcp_transformation_sum->remove (cs->callee);
3752 return changed;
3755 /* Ensure that array of edge arguments infos is big enough to accommodate a
3756 structure for all edges and reallocates it if not. Also, allocate
3757 associated hash tables is they do not already exist. */
3759 void
3760 ipa_check_create_edge_args (void)
3762 if (!ipa_edge_args_sum)
3763 ipa_edge_args_sum
3764 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
3765 ipa_edge_args_sum_t (symtab, true));
3766 if (!ipa_bits_hash_table)
3767 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3768 if (!ipa_vr_hash_table)
3769 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3772 /* Free all ipa_edge structures. */
3774 void
3775 ipa_free_all_edge_args (void)
3777 if (!ipa_edge_args_sum)
3778 return;
3780 ggc_delete (ipa_edge_args_sum);
3781 ipa_edge_args_sum = NULL;
3784 /* Free all ipa_node_params structures. */
3786 void
3787 ipa_free_all_node_params (void)
3789 ggc_delete (ipa_node_params_sum);
3790 ipa_node_params_sum = NULL;
3793 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3794 tables if they do not already exist. */
3796 void
3797 ipcp_transformation_initialize (void)
3799 if (!ipa_bits_hash_table)
3800 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3801 if (!ipa_vr_hash_table)
3802 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3803 if (ipcp_transformation_sum == NULL)
3804 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
3807 /* Release the IPA CP transformation summary. */
3809 void
3810 ipcp_free_transformation_sum (void)
3812 if (!ipcp_transformation_sum)
3813 return;
3815 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
3816 ggc_free (ipcp_transformation_sum);
3817 ipcp_transformation_sum = NULL;
3820 /* Set the aggregate replacements of NODE to be AGGVALS. */
3822 void
3823 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3824 struct ipa_agg_replacement_value *aggvals)
3826 ipcp_transformation_initialize ();
3827 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3828 s->agg_values = aggvals;
3831 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3832 count data structures accordingly. */
3834 void
3835 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3837 if (args->jump_functions)
3839 struct ipa_jump_func *jf;
3840 int i;
3841 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3843 struct ipa_cst_ref_desc *rdesc;
3844 try_decrement_rdesc_refcount (jf);
3845 if (jf->type == IPA_JF_CONST
3846 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3847 && rdesc->cs == cs)
3848 rdesc->cs = NULL;
3853 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3854 reference count data strucutres accordingly. */
3856 void
3857 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3858 ipa_edge_args *old_args, ipa_edge_args *new_args)
3860 unsigned int i;
3862 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3863 if (old_args->polymorphic_call_contexts)
3864 new_args->polymorphic_call_contexts
3865 = vec_safe_copy (old_args->polymorphic_call_contexts);
3867 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3869 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3870 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3872 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3874 if (src_jf->type == IPA_JF_CONST)
3876 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3878 if (!src_rdesc)
3879 dst_jf->value.constant.rdesc = NULL;
3880 else if (src->caller == dst->caller)
3882 struct ipa_ref *ref;
3883 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3884 gcc_checking_assert (n);
3885 ref = src->caller->find_reference (n, src->call_stmt,
3886 src->lto_stmt_uid);
3887 gcc_checking_assert (ref);
3888 dst->caller->clone_reference (ref, ref->stmt);
3890 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3891 dst_rdesc->cs = dst;
3892 dst_rdesc->refcount = src_rdesc->refcount;
3893 dst_rdesc->next_duplicate = NULL;
3894 dst_jf->value.constant.rdesc = dst_rdesc;
3896 else if (src_rdesc->cs == src)
3898 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3899 dst_rdesc->cs = dst;
3900 dst_rdesc->refcount = src_rdesc->refcount;
3901 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3902 src_rdesc->next_duplicate = dst_rdesc;
3903 dst_jf->value.constant.rdesc = dst_rdesc;
3905 else
3907 struct ipa_cst_ref_desc *dst_rdesc;
3908 /* This can happen during inlining, when a JFUNC can refer to a
3909 reference taken in a function up in the tree of inline clones.
3910 We need to find the duplicate that refers to our tree of
3911 inline clones. */
3913 gcc_assert (dst->caller->inlined_to);
3914 for (dst_rdesc = src_rdesc->next_duplicate;
3915 dst_rdesc;
3916 dst_rdesc = dst_rdesc->next_duplicate)
3918 struct cgraph_node *top;
3919 top = dst_rdesc->cs->caller->inlined_to
3920 ? dst_rdesc->cs->caller->inlined_to
3921 : dst_rdesc->cs->caller;
3922 if (dst->caller->inlined_to == top)
3923 break;
3925 gcc_assert (dst_rdesc);
3926 dst_jf->value.constant.rdesc = dst_rdesc;
3929 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3930 && src->caller == dst->caller)
3932 struct cgraph_node *inline_root = dst->caller->inlined_to
3933 ? dst->caller->inlined_to : dst->caller;
3934 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3935 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3937 int c = ipa_get_controlled_uses (root_info, idx);
3938 if (c != IPA_UNDESCRIBED_USE)
3940 c++;
3941 ipa_set_controlled_uses (root_info, idx, c);
3947 /* Analyze newly added function into callgraph. */
3949 static void
3950 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3952 if (node->has_gimple_body_p ())
3953 ipa_analyze_node (node);
3956 /* Hook that is called by summary when a node is duplicated. */
3958 void
3959 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3960 ipa_node_params *old_info,
3961 ipa_node_params *new_info)
3963 ipa_agg_replacement_value *old_av, *new_av;
3965 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3966 new_info->lattices = NULL;
3967 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3968 new_info->known_csts = old_info->known_csts.copy ();
3969 new_info->known_contexts = old_info->known_contexts.copy ();
3971 new_info->analysis_done = old_info->analysis_done;
3972 new_info->node_enqueued = old_info->node_enqueued;
3973 new_info->versionable = old_info->versionable;
3975 old_av = ipa_get_agg_replacements_for_node (src);
3976 if (old_av)
3978 new_av = NULL;
3979 while (old_av)
3981 struct ipa_agg_replacement_value *v;
3983 v = ggc_alloc<ipa_agg_replacement_value> ();
3984 memcpy (v, old_av, sizeof (*v));
3985 v->next = new_av;
3986 new_av = v;
3987 old_av = old_av->next;
3989 ipa_set_node_agg_value_chain (dst, new_av);
3993 /* Duplication of ipcp transformation summaries. */
3995 void
3996 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
3997 ipcp_transformation *src_trans,
3998 ipcp_transformation *dst_trans)
4000 /* Avoid redundant work of duplicating vectors we will never use. */
4001 if (dst->inlined_to)
4002 return;
4003 dst_trans->bits = vec_safe_copy (src_trans->bits);
4004 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4005 ipa_agg_replacement_value *agg = src_trans->agg_values,
4006 **aggptr = &dst_trans->agg_values;
4007 while (agg)
4009 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4010 **aggptr = *agg;
4011 agg = agg->next;
4012 aggptr = &(*aggptr)->next;
4016 /* Register our cgraph hooks if they are not already there. */
4018 void
4019 ipa_register_cgraph_hooks (void)
4021 ipa_check_create_node_params ();
4022 ipa_check_create_edge_args ();
4024 function_insertion_hook_holder =
4025 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4028 /* Unregister our cgraph hooks if they are not already there. */
4030 static void
4031 ipa_unregister_cgraph_hooks (void)
4033 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4034 function_insertion_hook_holder = NULL;
4037 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4038 longer needed after ipa-cp. */
4040 void
4041 ipa_free_all_structures_after_ipa_cp (void)
4043 if (!optimize && !in_lto_p)
4045 ipa_free_all_edge_args ();
4046 ipa_free_all_node_params ();
4047 ipcp_sources_pool.release ();
4048 ipcp_cst_values_pool.release ();
4049 ipcp_poly_ctx_values_pool.release ();
4050 ipcp_agg_lattice_pool.release ();
4051 ipa_unregister_cgraph_hooks ();
4052 ipa_refdesc_pool.release ();
4056 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4057 longer needed after indirect inlining. */
4059 void
4060 ipa_free_all_structures_after_iinln (void)
4062 ipa_free_all_edge_args ();
4063 ipa_free_all_node_params ();
4064 ipa_unregister_cgraph_hooks ();
4065 ipcp_sources_pool.release ();
4066 ipcp_cst_values_pool.release ();
4067 ipcp_poly_ctx_values_pool.release ();
4068 ipcp_agg_lattice_pool.release ();
4069 ipa_refdesc_pool.release ();
4072 /* Print ipa_tree_map data structures of all functions in the
4073 callgraph to F. */
4075 void
4076 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4078 int i, count;
4079 class ipa_node_params *info;
4081 if (!node->definition)
4082 return;
4083 info = IPA_NODE_REF (node);
4084 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4085 count = ipa_get_param_count (info);
4086 for (i = 0; i < count; i++)
4088 int c;
4090 fprintf (f, " ");
4091 ipa_dump_param (f, info, i);
4092 if (ipa_is_param_used (info, i))
4093 fprintf (f, " used");
4094 if (ipa_is_param_used_by_ipa_predicates (info, i))
4095 fprintf (f, " used_by_ipa_predicates");
4096 if (ipa_is_param_used_by_indirect_call (info, i))
4097 fprintf (f, " used_by_indirect_call");
4098 if (ipa_is_param_used_by_polymorphic_call (info, i))
4099 fprintf (f, " used_by_polymorphic_call");
4100 c = ipa_get_controlled_uses (info, i);
4101 if (c == IPA_UNDESCRIBED_USE)
4102 fprintf (f, " undescribed_use");
4103 else
4104 fprintf (f, " controlled_uses=%i", c);
4105 fprintf (f, "\n");
4109 /* Print ipa_tree_map data structures of all functions in the
4110 callgraph to F. */
4112 void
4113 ipa_print_all_params (FILE * f)
4115 struct cgraph_node *node;
4117 fprintf (f, "\nFunction parameters:\n");
4118 FOR_EACH_FUNCTION (node)
4119 ipa_print_node_params (f, node);
4122 /* Dump the AV linked list. */
4124 void
4125 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4127 bool comma = false;
4128 fprintf (f, " Aggregate replacements:");
4129 for (; av; av = av->next)
4131 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4132 av->index, av->offset);
4133 print_generic_expr (f, av->value);
4134 comma = true;
4136 fprintf (f, "\n");
4139 /* Stream out jump function JUMP_FUNC to OB. */
4141 static void
4142 ipa_write_jump_function (struct output_block *ob,
4143 struct ipa_jump_func *jump_func)
4145 struct ipa_agg_jf_item *item;
4146 struct bitpack_d bp;
4147 int i, count;
4148 int flag = 0;
4150 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4151 as well as WPA memory by handling them specially. */
4152 if (jump_func->type == IPA_JF_CONST
4153 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4154 flag = 1;
4156 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4157 switch (jump_func->type)
4159 case IPA_JF_UNKNOWN:
4160 break;
4161 case IPA_JF_CONST:
4162 gcc_assert (
4163 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4164 stream_write_tree (ob,
4165 flag
4166 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4167 : jump_func->value.constant.value, true);
4168 break;
4169 case IPA_JF_PASS_THROUGH:
4170 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4171 if (jump_func->value.pass_through.operation == NOP_EXPR)
4173 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4174 bp = bitpack_create (ob->main_stream);
4175 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4176 streamer_write_bitpack (&bp);
4178 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4179 == tcc_unary)
4180 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4181 else
4183 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4184 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4186 break;
4187 case IPA_JF_ANCESTOR:
4188 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4189 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4190 bp = bitpack_create (ob->main_stream);
4191 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4192 streamer_write_bitpack (&bp);
4193 break;
4196 count = vec_safe_length (jump_func->agg.items);
4197 streamer_write_uhwi (ob, count);
4198 if (count)
4200 bp = bitpack_create (ob->main_stream);
4201 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4202 streamer_write_bitpack (&bp);
4205 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4207 streamer_write_uhwi (ob, item->offset);
4208 stream_write_tree (ob, item->value, true);
4211 bp = bitpack_create (ob->main_stream);
4212 bp_pack_value (&bp, !!jump_func->bits, 1);
4213 streamer_write_bitpack (&bp);
4214 if (jump_func->bits)
4216 streamer_write_widest_int (ob, jump_func->bits->value);
4217 streamer_write_widest_int (ob, jump_func->bits->mask);
4219 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4220 streamer_write_bitpack (&bp);
4221 if (jump_func->m_vr)
4223 streamer_write_enum (ob->main_stream, value_rang_type,
4224 VR_LAST, jump_func->m_vr->kind ());
4225 stream_write_tree (ob, jump_func->m_vr->min (), true);
4226 stream_write_tree (ob, jump_func->m_vr->max (), true);
4230 /* Read in jump function JUMP_FUNC from IB. */
4232 static void
4233 ipa_read_jump_function (class lto_input_block *ib,
4234 struct ipa_jump_func *jump_func,
4235 struct cgraph_edge *cs,
4236 class data_in *data_in,
4237 bool prevails)
4239 enum jump_func_type jftype;
4240 enum tree_code operation;
4241 int i, count;
4242 int val = streamer_read_uhwi (ib);
4243 bool flag = val & 1;
4245 jftype = (enum jump_func_type) (val / 2);
4246 switch (jftype)
4248 case IPA_JF_UNKNOWN:
4249 ipa_set_jf_unknown (jump_func);
4250 break;
4251 case IPA_JF_CONST:
4253 tree t = stream_read_tree (ib, data_in);
4254 if (flag && prevails)
4255 t = build_fold_addr_expr (t);
4256 ipa_set_jf_constant (jump_func, t, cs);
4258 break;
4259 case IPA_JF_PASS_THROUGH:
4260 operation = (enum tree_code) streamer_read_uhwi (ib);
4261 if (operation == NOP_EXPR)
4263 int formal_id = streamer_read_uhwi (ib);
4264 struct bitpack_d bp = streamer_read_bitpack (ib);
4265 bool agg_preserved = bp_unpack_value (&bp, 1);
4266 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4268 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4270 int formal_id = streamer_read_uhwi (ib);
4271 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4273 else
4275 tree operand = stream_read_tree (ib, data_in);
4276 int formal_id = streamer_read_uhwi (ib);
4277 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4278 operation);
4280 break;
4281 case IPA_JF_ANCESTOR:
4283 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4284 int formal_id = streamer_read_uhwi (ib);
4285 struct bitpack_d bp = streamer_read_bitpack (ib);
4286 bool agg_preserved = bp_unpack_value (&bp, 1);
4287 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4288 break;
4290 default:
4291 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4294 count = streamer_read_uhwi (ib);
4295 if (prevails)
4296 vec_alloc (jump_func->agg.items, count);
4297 if (count)
4299 struct bitpack_d bp = streamer_read_bitpack (ib);
4300 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4302 for (i = 0; i < count; i++)
4304 struct ipa_agg_jf_item item;
4305 item.offset = streamer_read_uhwi (ib);
4306 item.value = stream_read_tree (ib, data_in);
4307 if (prevails)
4308 jump_func->agg.items->quick_push (item);
4311 struct bitpack_d bp = streamer_read_bitpack (ib);
4312 bool bits_known = bp_unpack_value (&bp, 1);
4313 if (bits_known)
4315 widest_int value = streamer_read_widest_int (ib);
4316 widest_int mask = streamer_read_widest_int (ib);
4317 if (prevails)
4318 ipa_set_jfunc_bits (jump_func, value, mask);
4320 else
4321 jump_func->bits = NULL;
4323 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4324 bool vr_known = bp_unpack_value (&vr_bp, 1);
4325 if (vr_known)
4327 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4328 VR_LAST);
4329 tree min = stream_read_tree (ib, data_in);
4330 tree max = stream_read_tree (ib, data_in);
4331 if (prevails)
4332 ipa_set_jfunc_vr (jump_func, type, min, max);
4334 else
4335 jump_func->m_vr = NULL;
4338 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4339 relevant to indirect inlining to OB. */
4341 static void
4342 ipa_write_indirect_edge_info (struct output_block *ob,
4343 struct cgraph_edge *cs)
4345 class cgraph_indirect_call_info *ii = cs->indirect_info;
4346 struct bitpack_d bp;
4348 streamer_write_hwi (ob, ii->param_index);
4349 bp = bitpack_create (ob->main_stream);
4350 bp_pack_value (&bp, ii->polymorphic, 1);
4351 bp_pack_value (&bp, ii->agg_contents, 1);
4352 bp_pack_value (&bp, ii->member_ptr, 1);
4353 bp_pack_value (&bp, ii->by_ref, 1);
4354 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4355 bp_pack_value (&bp, ii->vptr_changed, 1);
4356 streamer_write_bitpack (&bp);
4357 if (ii->agg_contents || ii->polymorphic)
4358 streamer_write_hwi (ob, ii->offset);
4359 else
4360 gcc_assert (ii->offset == 0);
4362 if (ii->polymorphic)
4364 streamer_write_hwi (ob, ii->otr_token);
4365 stream_write_tree (ob, ii->otr_type, true);
4366 ii->context.stream_out (ob);
4370 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4371 relevant to indirect inlining from IB. */
4373 static void
4374 ipa_read_indirect_edge_info (class lto_input_block *ib,
4375 class data_in *data_in,
4376 struct cgraph_edge *cs,
4377 class ipa_node_params *info)
4379 class cgraph_indirect_call_info *ii = cs->indirect_info;
4380 struct bitpack_d bp;
4382 ii->param_index = (int) streamer_read_hwi (ib);
4383 bp = streamer_read_bitpack (ib);
4384 ii->polymorphic = bp_unpack_value (&bp, 1);
4385 ii->agg_contents = bp_unpack_value (&bp, 1);
4386 ii->member_ptr = bp_unpack_value (&bp, 1);
4387 ii->by_ref = bp_unpack_value (&bp, 1);
4388 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4389 ii->vptr_changed = bp_unpack_value (&bp, 1);
4390 if (ii->agg_contents || ii->polymorphic)
4391 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4392 else
4393 ii->offset = 0;
4394 if (ii->polymorphic)
4396 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4397 ii->otr_type = stream_read_tree (ib, data_in);
4398 ii->context.stream_in (ib, data_in);
4400 if (info && ii->param_index >= 0)
4402 if (ii->polymorphic)
4403 ipa_set_param_used_by_polymorphic_call (info,
4404 ii->param_index , true);
4405 ipa_set_param_used_by_indirect_call (info,
4406 ii->param_index, true);
4410 /* Stream out NODE info to OB. */
4412 static void
4413 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4415 int node_ref;
4416 lto_symtab_encoder_t encoder;
4417 class ipa_node_params *info = IPA_NODE_REF (node);
4418 int j;
4419 struct cgraph_edge *e;
4420 struct bitpack_d bp;
4422 encoder = ob->decl_state->symtab_node_encoder;
4423 node_ref = lto_symtab_encoder_encode (encoder, node);
4424 streamer_write_uhwi (ob, node_ref);
4426 streamer_write_uhwi (ob, ipa_get_param_count (info));
4427 for (j = 0; j < ipa_get_param_count (info); j++)
4428 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4429 bp = bitpack_create (ob->main_stream);
4430 gcc_assert (info->analysis_done
4431 || ipa_get_param_count (info) == 0);
4432 gcc_assert (!info->node_enqueued);
4433 gcc_assert (!info->ipcp_orig_node);
4434 for (j = 0; j < ipa_get_param_count (info); j++)
4435 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4436 streamer_write_bitpack (&bp);
4437 for (j = 0; j < ipa_get_param_count (info); j++)
4439 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4440 stream_write_tree (ob, ipa_get_type (info, j), true);
4442 for (e = node->callees; e; e = e->next_callee)
4444 class ipa_edge_args *args = IPA_EDGE_REF (e);
4446 if (!args)
4448 streamer_write_uhwi (ob, 0);
4449 continue;
4452 streamer_write_uhwi (ob,
4453 ipa_get_cs_argument_count (args) * 2
4454 + (args->polymorphic_call_contexts != NULL));
4455 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4457 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4458 if (args->polymorphic_call_contexts != NULL)
4459 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4462 for (e = node->indirect_calls; e; e = e->next_callee)
4464 class ipa_edge_args *args = IPA_EDGE_REF (e);
4465 if (!args)
4466 streamer_write_uhwi (ob, 0);
4467 else
4469 streamer_write_uhwi (ob,
4470 ipa_get_cs_argument_count (args) * 2
4471 + (args->polymorphic_call_contexts != NULL));
4472 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4474 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4475 if (args->polymorphic_call_contexts != NULL)
4476 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4479 ipa_write_indirect_edge_info (ob, e);
4483 /* Stream in edge E from IB. */
4485 static void
4486 ipa_read_edge_info (class lto_input_block *ib,
4487 class data_in *data_in,
4488 struct cgraph_edge *e, bool prevails)
4490 int count = streamer_read_uhwi (ib);
4491 bool contexts_computed = count & 1;
4493 count /= 2;
4494 if (!count)
4495 return;
4496 if (prevails && e->possibly_call_in_translation_unit_p ())
4498 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
4499 vec_safe_grow_cleared (args->jump_functions, count);
4500 if (contexts_computed)
4501 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4502 for (int k = 0; k < count; k++)
4504 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4505 data_in, prevails);
4506 if (contexts_computed)
4507 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4508 (ib, data_in);
4511 else
4513 for (int k = 0; k < count; k++)
4515 struct ipa_jump_func dummy;
4516 ipa_read_jump_function (ib, &dummy, e,
4517 data_in, prevails);
4518 if (contexts_computed)
4520 class ipa_polymorphic_call_context ctx;
4521 ctx.stream_in (ib, data_in);
4527 /* Stream in NODE info from IB. */
4529 static void
4530 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4531 class data_in *data_in)
4533 int k;
4534 struct cgraph_edge *e;
4535 struct bitpack_d bp;
4536 bool prevails = node->prevailing_p ();
4537 class ipa_node_params *info = prevails
4538 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
4540 int param_count = streamer_read_uhwi (ib);
4541 if (prevails)
4543 ipa_alloc_node_params (node, param_count);
4544 for (k = 0; k < param_count; k++)
4545 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4546 if (ipa_get_param_count (info) != 0)
4547 info->analysis_done = true;
4548 info->node_enqueued = false;
4550 else
4551 for (k = 0; k < param_count; k++)
4552 streamer_read_uhwi (ib);
4554 bp = streamer_read_bitpack (ib);
4555 for (k = 0; k < param_count; k++)
4557 bool used = bp_unpack_value (&bp, 1);
4559 if (prevails)
4560 ipa_set_param_used (info, k, used);
4562 for (k = 0; k < param_count; k++)
4564 int nuses = streamer_read_hwi (ib);
4565 tree type = stream_read_tree (ib, data_in);
4567 if (prevails)
4569 ipa_set_controlled_uses (info, k, nuses);
4570 (*info->descriptors)[k].decl_or_type = type;
4573 for (e = node->callees; e; e = e->next_callee)
4574 ipa_read_edge_info (ib, data_in, e, prevails);
4575 for (e = node->indirect_calls; e; e = e->next_callee)
4577 ipa_read_edge_info (ib, data_in, e, prevails);
4578 ipa_read_indirect_edge_info (ib, data_in, e, info);
4582 /* Write jump functions for nodes in SET. */
4584 void
4585 ipa_prop_write_jump_functions (void)
4587 struct cgraph_node *node;
4588 struct output_block *ob;
4589 unsigned int count = 0;
4590 lto_symtab_encoder_iterator lsei;
4591 lto_symtab_encoder_t encoder;
4593 if (!ipa_node_params_sum || !ipa_edge_args_sum)
4594 return;
4596 ob = create_output_block (LTO_section_jump_functions);
4597 encoder = ob->decl_state->symtab_node_encoder;
4598 ob->symbol = NULL;
4599 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4600 lsei_next_function_in_partition (&lsei))
4602 node = lsei_cgraph_node (lsei);
4603 if (node->has_gimple_body_p ()
4604 && IPA_NODE_REF (node) != NULL)
4605 count++;
4608 streamer_write_uhwi (ob, count);
4610 /* Process all of the functions. */
4611 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4612 lsei_next_function_in_partition (&lsei))
4614 node = lsei_cgraph_node (lsei);
4615 if (node->has_gimple_body_p ()
4616 && IPA_NODE_REF (node) != NULL)
4617 ipa_write_node_info (ob, node);
4619 streamer_write_char_stream (ob->main_stream, 0);
4620 produce_asm (ob, NULL);
4621 destroy_output_block (ob);
4624 /* Read section in file FILE_DATA of length LEN with data DATA. */
4626 static void
4627 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4628 size_t len)
4630 const struct lto_function_header *header =
4631 (const struct lto_function_header *) data;
4632 const int cfg_offset = sizeof (struct lto_function_header);
4633 const int main_offset = cfg_offset + header->cfg_size;
4634 const int string_offset = main_offset + header->main_size;
4635 class data_in *data_in;
4636 unsigned int i;
4637 unsigned int count;
4639 lto_input_block ib_main ((const char *) data + main_offset,
4640 header->main_size, file_data->mode_table);
4642 data_in =
4643 lto_data_in_create (file_data, (const char *) data + string_offset,
4644 header->string_size, vNULL);
4645 count = streamer_read_uhwi (&ib_main);
4647 for (i = 0; i < count; i++)
4649 unsigned int index;
4650 struct cgraph_node *node;
4651 lto_symtab_encoder_t encoder;
4653 index = streamer_read_uhwi (&ib_main);
4654 encoder = file_data->symtab_node_encoder;
4655 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4656 index));
4657 gcc_assert (node->definition);
4658 ipa_read_node_info (&ib_main, node, data_in);
4660 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4661 len);
4662 lto_data_in_delete (data_in);
4665 /* Read ipcp jump functions. */
4667 void
4668 ipa_prop_read_jump_functions (void)
4670 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4671 struct lto_file_decl_data *file_data;
4672 unsigned int j = 0;
4674 ipa_check_create_node_params ();
4675 ipa_check_create_edge_args ();
4676 ipa_register_cgraph_hooks ();
4678 while ((file_data = file_data_vec[j++]))
4680 size_t len;
4681 const char *data
4682 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
4683 &len);
4684 if (data)
4685 ipa_prop_read_section (file_data, data, len);
4689 void
4690 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4692 int node_ref;
4693 unsigned int count = 0;
4694 lto_symtab_encoder_t encoder;
4695 struct ipa_agg_replacement_value *aggvals, *av;
4697 aggvals = ipa_get_agg_replacements_for_node (node);
4698 encoder = ob->decl_state->symtab_node_encoder;
4699 node_ref = lto_symtab_encoder_encode (encoder, node);
4700 streamer_write_uhwi (ob, node_ref);
4702 for (av = aggvals; av; av = av->next)
4703 count++;
4704 streamer_write_uhwi (ob, count);
4706 for (av = aggvals; av; av = av->next)
4708 struct bitpack_d bp;
4710 streamer_write_uhwi (ob, av->offset);
4711 streamer_write_uhwi (ob, av->index);
4712 stream_write_tree (ob, av->value, true);
4714 bp = bitpack_create (ob->main_stream);
4715 bp_pack_value (&bp, av->by_ref, 1);
4716 streamer_write_bitpack (&bp);
4719 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4720 if (ts && vec_safe_length (ts->m_vr) > 0)
4722 count = ts->m_vr->length ();
4723 streamer_write_uhwi (ob, count);
4724 for (unsigned i = 0; i < count; ++i)
4726 struct bitpack_d bp;
4727 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4728 bp = bitpack_create (ob->main_stream);
4729 bp_pack_value (&bp, parm_vr->known, 1);
4730 streamer_write_bitpack (&bp);
4731 if (parm_vr->known)
4733 streamer_write_enum (ob->main_stream, value_rang_type,
4734 VR_LAST, parm_vr->type);
4735 streamer_write_wide_int (ob, parm_vr->min);
4736 streamer_write_wide_int (ob, parm_vr->max);
4740 else
4741 streamer_write_uhwi (ob, 0);
4743 if (ts && vec_safe_length (ts->bits) > 0)
4745 count = ts->bits->length ();
4746 streamer_write_uhwi (ob, count);
4748 for (unsigned i = 0; i < count; ++i)
4750 const ipa_bits *bits_jfunc = (*ts->bits)[i];
4751 struct bitpack_d bp = bitpack_create (ob->main_stream);
4752 bp_pack_value (&bp, !!bits_jfunc, 1);
4753 streamer_write_bitpack (&bp);
4754 if (bits_jfunc)
4756 streamer_write_widest_int (ob, bits_jfunc->value);
4757 streamer_write_widest_int (ob, bits_jfunc->mask);
4761 else
4762 streamer_write_uhwi (ob, 0);
4765 /* Stream in the aggregate value replacement chain for NODE from IB. */
4767 static void
4768 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4769 data_in *data_in)
4771 struct ipa_agg_replacement_value *aggvals = NULL;
4772 unsigned int count, i;
4774 count = streamer_read_uhwi (ib);
4775 for (i = 0; i <count; i++)
4777 struct ipa_agg_replacement_value *av;
4778 struct bitpack_d bp;
4780 av = ggc_alloc<ipa_agg_replacement_value> ();
4781 av->offset = streamer_read_uhwi (ib);
4782 av->index = streamer_read_uhwi (ib);
4783 av->value = stream_read_tree (ib, data_in);
4784 bp = streamer_read_bitpack (ib);
4785 av->by_ref = bp_unpack_value (&bp, 1);
4786 av->next = aggvals;
4787 aggvals = av;
4789 ipa_set_node_agg_value_chain (node, aggvals);
4791 count = streamer_read_uhwi (ib);
4792 if (count > 0)
4794 ipcp_transformation_initialize ();
4795 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4796 vec_safe_grow_cleared (ts->m_vr, count);
4797 for (i = 0; i < count; i++)
4799 ipa_vr *parm_vr;
4800 parm_vr = &(*ts->m_vr)[i];
4801 struct bitpack_d bp;
4802 bp = streamer_read_bitpack (ib);
4803 parm_vr->known = bp_unpack_value (&bp, 1);
4804 if (parm_vr->known)
4806 parm_vr->type = streamer_read_enum (ib, value_range_kind,
4807 VR_LAST);
4808 parm_vr->min = streamer_read_wide_int (ib);
4809 parm_vr->max = streamer_read_wide_int (ib);
4813 count = streamer_read_uhwi (ib);
4814 if (count > 0)
4816 ipcp_transformation_initialize ();
4817 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4818 vec_safe_grow_cleared (ts->bits, count);
4820 for (i = 0; i < count; i++)
4822 struct bitpack_d bp = streamer_read_bitpack (ib);
4823 bool known = bp_unpack_value (&bp, 1);
4824 if (known)
4826 ipa_bits *bits
4827 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4828 streamer_read_widest_int (ib));
4829 (*ts->bits)[i] = bits;
4835 /* Write all aggregate replacement for nodes in set. */
4837 void
4838 ipcp_write_transformation_summaries (void)
4840 struct cgraph_node *node;
4841 struct output_block *ob;
4842 unsigned int count = 0;
4843 lto_symtab_encoder_iterator lsei;
4844 lto_symtab_encoder_t encoder;
4846 ob = create_output_block (LTO_section_ipcp_transform);
4847 encoder = ob->decl_state->symtab_node_encoder;
4848 ob->symbol = NULL;
4849 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4850 lsei_next_function_in_partition (&lsei))
4852 node = lsei_cgraph_node (lsei);
4853 if (node->has_gimple_body_p ())
4854 count++;
4857 streamer_write_uhwi (ob, count);
4859 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4860 lsei_next_function_in_partition (&lsei))
4862 node = lsei_cgraph_node (lsei);
4863 if (node->has_gimple_body_p ())
4864 write_ipcp_transformation_info (ob, node);
4866 streamer_write_char_stream (ob->main_stream, 0);
4867 produce_asm (ob, NULL);
4868 destroy_output_block (ob);
4871 /* Read replacements section in file FILE_DATA of length LEN with data
4872 DATA. */
4874 static void
4875 read_replacements_section (struct lto_file_decl_data *file_data,
4876 const char *data,
4877 size_t len)
4879 const struct lto_function_header *header =
4880 (const struct lto_function_header *) data;
4881 const int cfg_offset = sizeof (struct lto_function_header);
4882 const int main_offset = cfg_offset + header->cfg_size;
4883 const int string_offset = main_offset + header->main_size;
4884 class data_in *data_in;
4885 unsigned int i;
4886 unsigned int count;
4888 lto_input_block ib_main ((const char *) data + main_offset,
4889 header->main_size, file_data->mode_table);
4891 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4892 header->string_size, vNULL);
4893 count = streamer_read_uhwi (&ib_main);
4895 for (i = 0; i < count; i++)
4897 unsigned int index;
4898 struct cgraph_node *node;
4899 lto_symtab_encoder_t encoder;
4901 index = streamer_read_uhwi (&ib_main);
4902 encoder = file_data->symtab_node_encoder;
4903 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4904 index));
4905 gcc_assert (node->definition);
4906 read_ipcp_transformation_info (&ib_main, node, data_in);
4908 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4909 len);
4910 lto_data_in_delete (data_in);
4913 /* Read IPA-CP aggregate replacements. */
4915 void
4916 ipcp_read_transformation_summaries (void)
4918 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4919 struct lto_file_decl_data *file_data;
4920 unsigned int j = 0;
4922 while ((file_data = file_data_vec[j++]))
4924 size_t len;
4925 const char *data
4926 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
4927 &len);
4928 if (data)
4929 read_replacements_section (file_data, data, len);
4933 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4934 NODE. */
4936 static void
4937 adjust_agg_replacement_values (struct cgraph_node *node,
4938 struct ipa_agg_replacement_value *aggval)
4940 struct ipa_agg_replacement_value *v;
4942 if (!node->clone.param_adjustments)
4943 return;
4945 auto_vec<int, 16> new_indices;
4946 node->clone.param_adjustments->get_updated_indices (&new_indices);
4947 for (v = aggval; v; v = v->next)
4949 gcc_checking_assert (v->index >= 0);
4951 if ((unsigned) v->index < new_indices.length ())
4952 v->index = new_indices[v->index];
4953 else
4954 /* This can happen if we know about a constant passed by reference by
4955 an argument which is never actually used for anything, let alone
4956 loading that constant. */
4957 v->index = -1;
4961 /* Dominator walker driving the ipcp modification phase. */
4963 class ipcp_modif_dom_walker : public dom_walker
4965 public:
4966 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4967 vec<ipa_param_descriptor, va_gc> *descs,
4968 struct ipa_agg_replacement_value *av,
4969 bool *sc, bool *cc)
4970 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4971 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4973 virtual edge before_dom_children (basic_block);
4975 private:
4976 struct ipa_func_body_info *m_fbi;
4977 vec<ipa_param_descriptor, va_gc> *m_descriptors;
4978 struct ipa_agg_replacement_value *m_aggval;
4979 bool *m_something_changed, *m_cfg_changed;
4982 edge
4983 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4985 gimple_stmt_iterator gsi;
4986 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4988 struct ipa_agg_replacement_value *v;
4989 gimple *stmt = gsi_stmt (gsi);
4990 tree rhs, val, t;
4991 HOST_WIDE_INT offset;
4992 poly_int64 size;
4993 int index;
4994 bool by_ref, vce;
4996 if (!gimple_assign_load_p (stmt))
4997 continue;
4998 rhs = gimple_assign_rhs1 (stmt);
4999 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5000 continue;
5002 vce = false;
5003 t = rhs;
5004 while (handled_component_p (t))
5006 /* V_C_E can do things like convert an array of integers to one
5007 bigger integer and similar things we do not handle below. */
5008 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5010 vce = true;
5011 break;
5013 t = TREE_OPERAND (t, 0);
5015 if (vce)
5016 continue;
5018 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5019 &offset, &size, &by_ref))
5020 continue;
5021 for (v = m_aggval; v; v = v->next)
5022 if (v->index == index
5023 && v->offset == offset)
5024 break;
5025 if (!v
5026 || v->by_ref != by_ref
5027 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5028 size))
5029 continue;
5031 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5032 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5034 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5035 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5036 else if (TYPE_SIZE (TREE_TYPE (rhs))
5037 == TYPE_SIZE (TREE_TYPE (v->value)))
5038 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5039 else
5041 if (dump_file)
5043 fprintf (dump_file, " const ");
5044 print_generic_expr (dump_file, v->value);
5045 fprintf (dump_file, " can't be converted to type of ");
5046 print_generic_expr (dump_file, rhs);
5047 fprintf (dump_file, "\n");
5049 continue;
5052 else
5053 val = v->value;
5055 if (dump_file && (dump_flags & TDF_DETAILS))
5057 fprintf (dump_file, "Modifying stmt:\n ");
5058 print_gimple_stmt (dump_file, stmt, 0);
5060 gimple_assign_set_rhs_from_tree (&gsi, val);
5061 update_stmt (stmt);
5063 if (dump_file && (dump_flags & TDF_DETAILS))
5065 fprintf (dump_file, "into:\n ");
5066 print_gimple_stmt (dump_file, stmt, 0);
5067 fprintf (dump_file, "\n");
5070 *m_something_changed = true;
5071 if (maybe_clean_eh_stmt (stmt)
5072 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5073 *m_cfg_changed = true;
5075 return NULL;
5078 /* Update bits info of formal parameters as described in
5079 ipcp_transformation. */
5081 static void
5082 ipcp_update_bits (struct cgraph_node *node)
5084 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5086 if (!ts || vec_safe_length (ts->bits) == 0)
5087 return;
5088 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5089 unsigned count = bits.length ();
5090 if (!count)
5091 return;
5093 auto_vec<int, 16> new_indices;
5094 bool need_remapping = false;
5095 if (node->clone.param_adjustments)
5097 node->clone.param_adjustments->get_updated_indices (&new_indices);
5098 need_remapping = true;
5100 auto_vec <tree, 16> parm_decls;
5101 push_function_arg_decls (&parm_decls, node->decl);
5103 for (unsigned i = 0; i < count; ++i)
5105 tree parm;
5106 if (need_remapping)
5108 if (i >= new_indices.length ())
5109 continue;
5110 int idx = new_indices[i];
5111 if (idx < 0)
5112 continue;
5113 parm = parm_decls[idx];
5115 else
5116 parm = parm_decls[i];
5117 gcc_checking_assert (parm);
5120 if (!bits[i]
5121 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5122 || POINTER_TYPE_P (TREE_TYPE (parm)))
5123 || !is_gimple_reg (parm))
5124 continue;
5126 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5127 if (!ddef)
5128 continue;
5130 if (dump_file)
5132 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5133 print_hex (bits[i]->mask, dump_file);
5134 fprintf (dump_file, "\n");
5137 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5139 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5140 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5142 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5143 | wide_int::from (bits[i]->value, prec, sgn);
5144 set_nonzero_bits (ddef, nonzero_bits);
5146 else
5148 unsigned tem = bits[i]->mask.to_uhwi ();
5149 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5150 unsigned align = tem & -tem;
5151 unsigned misalign = bitpos & (align - 1);
5153 if (align > 1)
5155 if (dump_file)
5156 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5158 unsigned old_align, old_misalign;
5159 struct ptr_info_def *pi = get_ptr_info (ddef);
5160 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5162 if (old_known
5163 && old_align > align)
5165 if (dump_file)
5167 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5168 if ((old_misalign & (align - 1)) != misalign)
5169 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5170 old_misalign, misalign);
5172 continue;
5175 if (old_known
5176 && ((misalign & (old_align - 1)) != old_misalign)
5177 && dump_file)
5178 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5179 old_misalign, misalign);
5181 set_ptr_info_alignment (pi, align, misalign);
5187 bool
5188 ipa_vr::nonzero_p (tree expr_type) const
5190 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5191 return true;
5193 unsigned prec = TYPE_PRECISION (expr_type);
5194 return (type == VR_RANGE
5195 && TYPE_UNSIGNED (expr_type)
5196 && wi::eq_p (min, wi::one (prec))
5197 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5200 /* Update value range of formal parameters as described in
5201 ipcp_transformation. */
5203 static void
5204 ipcp_update_vr (struct cgraph_node *node)
5206 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5207 if (!ts || vec_safe_length (ts->m_vr) == 0)
5208 return;
5209 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5210 unsigned count = vr.length ();
5211 if (!count)
5212 return;
5214 auto_vec<int, 16> new_indices;
5215 bool need_remapping = false;
5216 if (node->clone.param_adjustments)
5218 node->clone.param_adjustments->get_updated_indices (&new_indices);
5219 need_remapping = true;
5221 auto_vec <tree, 16> parm_decls;
5222 push_function_arg_decls (&parm_decls, node->decl);
5224 for (unsigned i = 0; i < count; ++i)
5226 tree parm;
5227 int remapped_idx;
5228 if (need_remapping)
5230 if (i >= new_indices.length ())
5231 continue;
5232 remapped_idx = new_indices[i];
5233 if (remapped_idx < 0)
5234 continue;
5236 else
5237 remapped_idx = i;
5239 parm = parm_decls[remapped_idx];
5241 gcc_checking_assert (parm);
5242 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5244 if (!ddef || !is_gimple_reg (parm))
5245 continue;
5247 if (vr[i].known
5248 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5250 tree type = TREE_TYPE (ddef);
5251 unsigned prec = TYPE_PRECISION (type);
5252 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5254 if (dump_file)
5256 fprintf (dump_file, "Setting value range of param %u "
5257 "(now %i) ", i, remapped_idx);
5258 fprintf (dump_file, "%s[",
5259 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5260 print_decs (vr[i].min, dump_file);
5261 fprintf (dump_file, ", ");
5262 print_decs (vr[i].max, dump_file);
5263 fprintf (dump_file, "]\n");
5265 set_range_info (ddef, vr[i].type,
5266 wide_int_storage::from (vr[i].min, prec,
5267 TYPE_SIGN (type)),
5268 wide_int_storage::from (vr[i].max, prec,
5269 TYPE_SIGN (type)));
5271 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5272 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5274 if (dump_file)
5275 fprintf (dump_file, "Setting nonnull for %u\n", i);
5276 set_ptr_nonnull (ddef);
5282 /* IPCP transformation phase doing propagation of aggregate values. */
5284 unsigned int
5285 ipcp_transform_function (struct cgraph_node *node)
5287 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5288 struct ipa_func_body_info fbi;
5289 struct ipa_agg_replacement_value *aggval;
5290 int param_count;
5291 bool cfg_changed = false, something_changed = false;
5293 gcc_checking_assert (cfun);
5294 gcc_checking_assert (current_function_decl);
5296 if (dump_file)
5297 fprintf (dump_file, "Modification phase of node %s\n",
5298 node->dump_name ());
5300 ipcp_update_bits (node);
5301 ipcp_update_vr (node);
5302 aggval = ipa_get_agg_replacements_for_node (node);
5303 if (!aggval)
5304 return 0;
5305 param_count = count_formal_params (node->decl);
5306 if (param_count == 0)
5307 return 0;
5308 adjust_agg_replacement_values (node, aggval);
5309 if (dump_file)
5310 ipa_dump_agg_replacement_values (dump_file, aggval);
5312 fbi.node = node;
5313 fbi.info = NULL;
5314 fbi.bb_infos = vNULL;
5315 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5316 fbi.param_count = param_count;
5317 fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
5319 vec_safe_grow_cleared (descriptors, param_count);
5320 ipa_populate_param_decls (node, *descriptors);
5321 calculate_dominance_info (CDI_DOMINATORS);
5322 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5323 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5325 int i;
5326 struct ipa_bb_info *bi;
5327 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5328 free_ipa_bb_info (bi);
5329 fbi.bb_infos.release ();
5330 free_dominance_info (CDI_DOMINATORS);
5332 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5333 s->agg_values = NULL;
5334 s->bits = NULL;
5335 s->m_vr = NULL;
5337 vec_free (descriptors);
5339 if (!something_changed)
5340 return 0;
5342 if (cfg_changed)
5343 delete_unreachable_blocks_update_callgraph (node, false);
5345 return TODO_update_ssa_only_virtuals;
5349 /* Return true if OTHER describes same agg item. */
5350 bool
5351 ipa_agg_jf_item::equal_to (const ipa_agg_jf_item &other)
5353 return offset == other.offset
5354 && operand_equal_p (value, other.value, 0);
5356 #include "gt-ipa-prop.h"