2018-02-09 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / ipa-prop.c
blobb9714fb899acd6067c8e106e3322366e18252f73
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Edge summary for IPA-CP edge information. */
61 ipa_edge_args_sum_t *ipa_edge_args_sum;
63 /* Traits for a hash table for reusing already existing ipa_bits. */
65 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67 typedef ipa_bits *value_type;
68 typedef ipa_bits *compare_type;
69 static hashval_t
70 hash (const ipa_bits *p)
72 hashval_t t = (hashval_t) p->value.to_shwi ();
73 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 static bool
76 equal (const ipa_bits *a, const ipa_bits *b)
78 return a->value == b->value && a->mask == b->mask;
80 static void
81 mark_empty (ipa_bits *&p)
83 p = NULL;
85 static bool
86 is_empty (const ipa_bits *p)
88 return p == NULL;
90 static bool
91 is_deleted (const ipa_bits *p)
93 return p == reinterpret_cast<const ipa_bits *> (1);
95 static void
96 mark_deleted (ipa_bits *&p)
98 p = reinterpret_cast<ipa_bits *> (1);
102 /* Hash table for avoid repeated allocations of equal ipa_bits. */
103 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
105 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
106 the equiv bitmap is not hashed and is expected to be NULL. */
108 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
110 typedef value_range *value_type;
111 typedef value_range *compare_type;
112 static hashval_t
113 hash (const value_range *p)
115 gcc_checking_assert (!p->equiv);
116 hashval_t t = (hashval_t) p->type;
117 t = iterative_hash_expr (p->min, t);
118 return iterative_hash_expr (p->max, t);
120 static bool
121 equal (const value_range *a, const value_range *b)
123 return a->type == b->type && a->min == b->min && a->max == b->max;
125 static void
126 mark_empty (value_range *&p)
128 p = NULL;
130 static bool
131 is_empty (const value_range *p)
133 return p == NULL;
135 static bool
136 is_deleted (const value_range *p)
138 return p == reinterpret_cast<const value_range *> (1);
140 static void
141 mark_deleted (value_range *&p)
143 p = reinterpret_cast<value_range *> (1);
147 /* Hash table for avoid repeated allocations of equal value_ranges. */
148 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
150 /* Holders of ipa cgraph hooks: */
151 static struct cgraph_node_hook_list *function_insertion_hook_holder;
153 /* Description of a reference to an IPA constant. */
154 struct ipa_cst_ref_desc
156 /* Edge that corresponds to the statement which took the reference. */
157 struct cgraph_edge *cs;
158 /* Linked list of duplicates created when call graph edges are cloned. */
159 struct ipa_cst_ref_desc *next_duplicate;
160 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
161 if out of control. */
162 int refcount;
165 /* Allocation pool for reference descriptions. */
167 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
168 ("IPA-PROP ref descriptions");
170 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
171 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
173 static bool
174 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
176 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
178 if (!fs_opts)
179 return false;
180 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
183 /* Return index of the formal whose tree is PTREE in function which corresponds
184 to INFO. */
186 static int
187 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
188 tree ptree)
190 int i, count;
192 count = vec_safe_length (descriptors);
193 for (i = 0; i < count; i++)
194 if ((*descriptors)[i].decl_or_type == ptree)
195 return i;
197 return -1;
200 /* Return index of the formal whose tree is PTREE in function which corresponds
201 to INFO. */
204 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
206 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
209 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
210 NODE. */
212 static void
213 ipa_populate_param_decls (struct cgraph_node *node,
214 vec<ipa_param_descriptor, va_gc> &descriptors)
216 tree fndecl;
217 tree fnargs;
218 tree parm;
219 int param_num;
221 fndecl = node->decl;
222 gcc_assert (gimple_has_body_p (fndecl));
223 fnargs = DECL_ARGUMENTS (fndecl);
224 param_num = 0;
225 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
227 descriptors[param_num].decl_or_type = parm;
228 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
229 true);
230 param_num++;
234 /* Return how many formal parameters FNDECL has. */
237 count_formal_params (tree fndecl)
239 tree parm;
240 int count = 0;
241 gcc_assert (gimple_has_body_p (fndecl));
243 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
244 count++;
246 return count;
249 /* Return the declaration of Ith formal parameter of the function corresponding
250 to INFO. Note there is no setter function as this array is built just once
251 using ipa_initialize_node_params. */
253 void
254 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
256 fprintf (file, "param #%i", i);
257 if ((*info->descriptors)[i].decl_or_type)
259 fprintf (file, " ");
260 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
264 /* If necessary, allocate vector of parameter descriptors in info of NODE.
265 Return true if they were allocated, false if not. */
267 static bool
268 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
270 struct ipa_node_params *info = IPA_NODE_REF (node);
272 if (!info->descriptors && param_count)
274 vec_safe_grow_cleared (info->descriptors, param_count);
275 return true;
277 else
278 return false;
281 /* Initialize the ipa_node_params structure associated with NODE by counting
282 the function parameters, creating the descriptors and populating their
283 param_decls. */
285 void
286 ipa_initialize_node_params (struct cgraph_node *node)
288 struct ipa_node_params *info = IPA_NODE_REF (node);
290 if (!info->descriptors
291 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
292 ipa_populate_param_decls (node, *info->descriptors);
295 /* Print the jump functions associated with call graph edge CS to file F. */
297 static void
298 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
300 int i, count;
302 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
303 for (i = 0; i < count; i++)
305 struct ipa_jump_func *jump_func;
306 enum jump_func_type type;
308 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
309 type = jump_func->type;
311 fprintf (f, " param %d: ", i);
312 if (type == IPA_JF_UNKNOWN)
313 fprintf (f, "UNKNOWN\n");
314 else if (type == IPA_JF_CONST)
316 tree val = jump_func->value.constant.value;
317 fprintf (f, "CONST: ");
318 print_generic_expr (f, val);
319 if (TREE_CODE (val) == ADDR_EXPR
320 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
322 fprintf (f, " -> ");
323 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
325 fprintf (f, "\n");
327 else if (type == IPA_JF_PASS_THROUGH)
329 fprintf (f, "PASS THROUGH: ");
330 fprintf (f, "%d, op %s",
331 jump_func->value.pass_through.formal_id,
332 get_tree_code_name(jump_func->value.pass_through.operation));
333 if (jump_func->value.pass_through.operation != NOP_EXPR)
335 fprintf (f, " ");
336 print_generic_expr (f, jump_func->value.pass_through.operand);
338 if (jump_func->value.pass_through.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 fprintf (f, "\n");
342 else if (type == IPA_JF_ANCESTOR)
344 fprintf (f, "ANCESTOR: ");
345 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
346 jump_func->value.ancestor.formal_id,
347 jump_func->value.ancestor.offset);
348 if (jump_func->value.ancestor.agg_preserved)
349 fprintf (f, ", agg_preserved");
350 fprintf (f, "\n");
353 if (jump_func->agg.items)
355 struct ipa_agg_jf_item *item;
356 int j;
358 fprintf (f, " Aggregate passed by %s:\n",
359 jump_func->agg.by_ref ? "reference" : "value");
360 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
362 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
363 item->offset);
364 if (TYPE_P (item->value))
365 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
366 tree_to_uhwi (TYPE_SIZE (item->value)));
367 else
369 fprintf (f, "cst: ");
370 print_generic_expr (f, item->value);
372 fprintf (f, "\n");
376 struct ipa_polymorphic_call_context *ctx
377 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
378 if (ctx && !ctx->useless_p ())
380 fprintf (f, " Context: ");
381 ctx->dump (dump_file);
384 if (jump_func->bits)
386 fprintf (f, " value: ");
387 print_hex (jump_func->bits->value, f);
388 fprintf (f, ", mask: ");
389 print_hex (jump_func->bits->mask, f);
390 fprintf (f, "\n");
392 else
393 fprintf (f, " Unknown bits\n");
395 if (jump_func->m_vr)
397 fprintf (f, " VR ");
398 fprintf (f, "%s[",
399 (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
400 print_decs (wi::to_wide (jump_func->m_vr->min), f);
401 fprintf (f, ", ");
402 print_decs (wi::to_wide (jump_func->m_vr->max), f);
403 fprintf (f, "]\n");
405 else
406 fprintf (f, " Unknown VR\n");
411 /* Print the jump functions of all arguments on all call graph edges going from
412 NODE to file F. */
414 void
415 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
417 struct cgraph_edge *cs;
419 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
420 for (cs = node->callees; cs; cs = cs->next_callee)
422 if (!ipa_edge_args_info_available_for_edge_p (cs))
423 continue;
425 fprintf (f, " callsite %s -> %s : \n",
426 node->dump_name (),
427 cs->callee->dump_name ());
428 ipa_print_node_jump_functions_for_edge (f, cs);
431 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
433 struct cgraph_indirect_call_info *ii;
434 if (!ipa_edge_args_info_available_for_edge_p (cs))
435 continue;
437 ii = cs->indirect_info;
438 if (ii->agg_contents)
439 fprintf (f, " indirect %s callsite, calling param %i, "
440 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
441 ii->member_ptr ? "member ptr" : "aggregate",
442 ii->param_index, ii->offset,
443 ii->by_ref ? "by reference" : "by_value");
444 else
445 fprintf (f, " indirect %s callsite, calling param %i, "
446 "offset " HOST_WIDE_INT_PRINT_DEC,
447 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
448 ii->offset);
450 if (cs->call_stmt)
452 fprintf (f, ", for stmt ");
453 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
455 else
456 fprintf (f, "\n");
457 if (ii->polymorphic)
458 ii->context.dump (f);
459 ipa_print_node_jump_functions_for_edge (f, cs);
463 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
465 void
466 ipa_print_all_jump_functions (FILE *f)
468 struct cgraph_node *node;
470 fprintf (f, "\nJump functions:\n");
471 FOR_EACH_FUNCTION (node)
473 ipa_print_node_jump_functions (f, node);
477 /* Set jfunc to be a know-really nothing jump function. */
479 static void
480 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
482 jfunc->type = IPA_JF_UNKNOWN;
483 jfunc->bits = NULL;
484 jfunc->m_vr = NULL;
487 /* Set JFUNC to be a copy of another jmp (to be used by jump function
488 combination code). The two functions will share their rdesc. */
490 static void
491 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
492 struct ipa_jump_func *src)
495 gcc_checking_assert (src->type == IPA_JF_CONST);
496 dst->type = IPA_JF_CONST;
497 dst->value.constant = src->value.constant;
500 /* Set JFUNC to be a constant jmp function. */
502 static void
503 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
504 struct cgraph_edge *cs)
506 jfunc->type = IPA_JF_CONST;
507 jfunc->value.constant.value = unshare_expr_without_location (constant);
509 if (TREE_CODE (constant) == ADDR_EXPR
510 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
512 struct ipa_cst_ref_desc *rdesc;
514 rdesc = ipa_refdesc_pool.allocate ();
515 rdesc->cs = cs;
516 rdesc->next_duplicate = NULL;
517 rdesc->refcount = 1;
518 jfunc->value.constant.rdesc = rdesc;
520 else
521 jfunc->value.constant.rdesc = NULL;
524 /* Set JFUNC to be a simple pass-through jump function. */
525 static void
526 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
527 bool agg_preserved)
529 jfunc->type = IPA_JF_PASS_THROUGH;
530 jfunc->value.pass_through.operand = NULL_TREE;
531 jfunc->value.pass_through.formal_id = formal_id;
532 jfunc->value.pass_through.operation = NOP_EXPR;
533 jfunc->value.pass_through.agg_preserved = agg_preserved;
536 /* Set JFUNC to be an unary pass through jump function. */
538 static void
539 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
540 enum tree_code operation)
542 jfunc->type = IPA_JF_PASS_THROUGH;
543 jfunc->value.pass_through.operand = NULL_TREE;
544 jfunc->value.pass_through.formal_id = formal_id;
545 jfunc->value.pass_through.operation = operation;
546 jfunc->value.pass_through.agg_preserved = false;
548 /* Set JFUNC to be an arithmetic pass through jump function. */
550 static void
551 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
552 tree operand, enum tree_code operation)
554 jfunc->type = IPA_JF_PASS_THROUGH;
555 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
556 jfunc->value.pass_through.formal_id = formal_id;
557 jfunc->value.pass_through.operation = operation;
558 jfunc->value.pass_through.agg_preserved = false;
561 /* Set JFUNC to be an ancestor jump function. */
563 static void
564 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
565 int formal_id, bool agg_preserved)
567 jfunc->type = IPA_JF_ANCESTOR;
568 jfunc->value.ancestor.formal_id = formal_id;
569 jfunc->value.ancestor.offset = offset;
570 jfunc->value.ancestor.agg_preserved = agg_preserved;
573 /* Get IPA BB information about the given BB. FBI is the context of analyzis
574 of this function body. */
576 static struct ipa_bb_info *
577 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
579 gcc_checking_assert (fbi);
580 return &fbi->bb_infos[bb->index];
583 /* Structure to be passed in between detect_type_change and
584 check_stmt_for_type_change. */
586 struct prop_type_change_info
588 /* Offset into the object where there is the virtual method pointer we are
589 looking for. */
590 HOST_WIDE_INT offset;
591 /* The declaration or SSA_NAME pointer of the base that we are checking for
592 type change. */
593 tree object;
594 /* Set to true if dynamic type change has been detected. */
595 bool type_maybe_changed;
598 /* Return true if STMT can modify a virtual method table pointer.
600 This function makes special assumptions about both constructors and
601 destructors which are all the functions that are allowed to alter the VMT
602 pointers. It assumes that destructors begin with assignment into all VMT
603 pointers and that constructors essentially look in the following way:
605 1) The very first thing they do is that they call constructors of ancestor
606 sub-objects that have them.
608 2) Then VMT pointers of this and all its ancestors is set to new values
609 corresponding to the type corresponding to the constructor.
611 3) Only afterwards, other stuff such as constructor of member sub-objects
612 and the code written by the user is run. Only this may include calling
613 virtual functions, directly or indirectly.
615 There is no way to call a constructor of an ancestor sub-object in any
616 other way.
618 This means that we do not have to care whether constructors get the correct
619 type information because they will always change it (in fact, if we define
620 the type to be given by the VMT pointer, it is undefined).
622 The most important fact to derive from the above is that if, for some
623 statement in the section 3, we try to detect whether the dynamic type has
624 changed, we can safely ignore all calls as we examine the function body
625 backwards until we reach statements in section 2 because these calls cannot
626 be ancestor constructors or destructors (if the input is not bogus) and so
627 do not change the dynamic type (this holds true only for automatically
628 allocated objects but at the moment we devirtualize only these). We then
629 must detect that statements in section 2 change the dynamic type and can try
630 to derive the new type. That is enough and we can stop, we will never see
631 the calls into constructors of sub-objects in this code. Therefore we can
632 safely ignore all call statements that we traverse.
635 static bool
636 stmt_may_be_vtbl_ptr_store (gimple *stmt)
638 if (is_gimple_call (stmt))
639 return false;
640 if (gimple_clobber_p (stmt))
641 return false;
642 else if (is_gimple_assign (stmt))
644 tree lhs = gimple_assign_lhs (stmt);
646 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
648 if (flag_strict_aliasing
649 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
650 return false;
652 if (TREE_CODE (lhs) == COMPONENT_REF
653 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
654 return false;
655 /* In the future we might want to use get_ref_base_and_extent to find
656 if there is a field corresponding to the offset and if so, proceed
657 almost like if it was a component ref. */
660 return true;
663 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
664 to check whether a particular statement may modify the virtual table
665 pointerIt stores its result into DATA, which points to a
666 prop_type_change_info structure. */
668 static bool
669 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
671 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
672 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
674 if (stmt_may_be_vtbl_ptr_store (stmt))
676 tci->type_maybe_changed = true;
677 return true;
679 else
680 return false;
683 /* See if ARG is PARAM_DECl describing instance passed by pointer
684 or reference in FUNCTION. Return false if the dynamic type may change
685 in between beggining of the function until CALL is invoked.
687 Generally functions are not allowed to change type of such instances,
688 but they call destructors. We assume that methods can not destroy the THIS
689 pointer. Also as a special cases, constructor and destructors may change
690 type of the THIS pointer. */
692 static bool
693 param_type_may_change_p (tree function, tree arg, gimple *call)
695 /* Pure functions can not do any changes on the dynamic type;
696 that require writting to memory. */
697 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
698 return false;
699 /* We need to check if we are within inlined consturctor
700 or destructor (ideally we would have way to check that the
701 inline cdtor is actually working on ARG, but we don't have
702 easy tie on this, so punt on all non-pure cdtors.
703 We may also record the types of cdtors and once we know type
704 of the instance match them.
706 Also code unification optimizations may merge calls from
707 different blocks making return values unreliable. So
708 do nothing during late optimization. */
709 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
710 return true;
711 if (TREE_CODE (arg) == SSA_NAME
712 && SSA_NAME_IS_DEFAULT_DEF (arg)
713 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
715 /* Normal (non-THIS) argument. */
716 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
717 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
718 /* THIS pointer of an method - here we want to watch constructors
719 and destructors as those definitely may change the dynamic
720 type. */
721 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
722 && !DECL_CXX_CONSTRUCTOR_P (function)
723 && !DECL_CXX_DESTRUCTOR_P (function)
724 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
726 /* Walk the inline stack and watch out for ctors/dtors. */
727 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
728 block = BLOCK_SUPERCONTEXT (block))
729 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
730 return true;
731 return false;
734 return true;
737 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
738 callsite CALL) by looking for assignments to its virtual table pointer. If
739 it is, return true and fill in the jump function JFUNC with relevant type
740 information or set it to unknown. ARG is the object itself (not a pointer
741 to it, unless dereferenced). BASE is the base of the memory access as
742 returned by get_ref_base_and_extent, as is the offset.
744 This is helper function for detect_type_change and detect_type_change_ssa
745 that does the heavy work which is usually unnecesary. */
747 static bool
748 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
749 gcall *call, struct ipa_jump_func *jfunc,
750 HOST_WIDE_INT offset)
752 struct prop_type_change_info tci;
753 ao_ref ao;
754 bool entry_reached = false;
756 gcc_checking_assert (DECL_P (arg)
757 || TREE_CODE (arg) == MEM_REF
758 || handled_component_p (arg));
760 comp_type = TYPE_MAIN_VARIANT (comp_type);
762 /* Const calls cannot call virtual methods through VMT and so type changes do
763 not matter. */
764 if (!flag_devirtualize || !gimple_vuse (call)
765 /* Be sure expected_type is polymorphic. */
766 || !comp_type
767 || TREE_CODE (comp_type) != RECORD_TYPE
768 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
769 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
770 return true;
772 ao_ref_init (&ao, arg);
773 ao.base = base;
774 ao.offset = offset;
775 ao.size = POINTER_SIZE;
776 ao.max_size = ao.size;
778 tci.offset = offset;
779 tci.object = get_base_address (arg);
780 tci.type_maybe_changed = false;
782 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
783 &tci, NULL, &entry_reached);
784 if (!tci.type_maybe_changed)
785 return false;
787 ipa_set_jf_unknown (jfunc);
788 return true;
791 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
792 If it is, return true and fill in the jump function JFUNC with relevant type
793 information or set it to unknown. ARG is the object itself (not a pointer
794 to it, unless dereferenced). BASE is the base of the memory access as
795 returned by get_ref_base_and_extent, as is the offset. */
797 static bool
798 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
799 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
801 if (!flag_devirtualize)
802 return false;
804 if (TREE_CODE (base) == MEM_REF
805 && !param_type_may_change_p (current_function_decl,
806 TREE_OPERAND (base, 0),
807 call))
808 return false;
809 return detect_type_change_from_memory_writes (arg, base, comp_type,
810 call, jfunc, offset);
813 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
814 SSA name (its dereference will become the base and the offset is assumed to
815 be zero). */
817 static bool
818 detect_type_change_ssa (tree arg, tree comp_type,
819 gcall *call, struct ipa_jump_func *jfunc)
821 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
822 if (!flag_devirtualize
823 || !POINTER_TYPE_P (TREE_TYPE (arg)))
824 return false;
826 if (!param_type_may_change_p (current_function_decl, arg, call))
827 return false;
829 arg = build2 (MEM_REF, ptr_type_node, arg,
830 build_int_cst (ptr_type_node, 0));
832 return detect_type_change_from_memory_writes (arg, arg, comp_type,
833 call, jfunc, 0);
836 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
837 boolean variable pointed to by DATA. */
839 static bool
840 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
841 void *data)
843 bool *b = (bool *) data;
844 *b = true;
845 return true;
848 /* Return true if we have already walked so many statements in AA that we
849 should really just start giving up. */
851 static bool
852 aa_overwalked (struct ipa_func_body_info *fbi)
854 gcc_checking_assert (fbi);
855 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
858 /* Find the nearest valid aa status for parameter specified by INDEX that
859 dominates BB. */
861 static struct ipa_param_aa_status *
862 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
863 int index)
865 while (true)
867 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
868 if (!bb)
869 return NULL;
870 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
871 if (!bi->param_aa_statuses.is_empty ()
872 && bi->param_aa_statuses[index].valid)
873 return &bi->param_aa_statuses[index];
877 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
878 structures and/or intialize the result with a dominating description as
879 necessary. */
881 static struct ipa_param_aa_status *
882 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
883 int index)
885 gcc_checking_assert (fbi);
886 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
887 if (bi->param_aa_statuses.is_empty ())
888 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
889 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
890 if (!paa->valid)
892 gcc_checking_assert (!paa->parm_modified
893 && !paa->ref_modified
894 && !paa->pt_modified);
895 struct ipa_param_aa_status *dom_paa;
896 dom_paa = find_dominating_aa_status (fbi, bb, index);
897 if (dom_paa)
898 *paa = *dom_paa;
899 else
900 paa->valid = true;
903 return paa;
906 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
907 a value known not to be modified in this function before reaching the
908 statement STMT. FBI holds information about the function we have so far
909 gathered but do not survive the summary building stage. */
911 static bool
912 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
913 gimple *stmt, tree parm_load)
915 struct ipa_param_aa_status *paa;
916 bool modified = false;
917 ao_ref refd;
919 tree base = get_base_address (parm_load);
920 gcc_assert (TREE_CODE (base) == PARM_DECL);
921 if (TREE_READONLY (base))
922 return true;
924 /* FIXME: FBI can be NULL if we are being called from outside
925 ipa_node_analysis or ipcp_transform_function, which currently happens
926 during inlining analysis. It would be great to extend fbi's lifetime and
927 always have it. Currently, we are just not afraid of too much walking in
928 that case. */
929 if (fbi)
931 if (aa_overwalked (fbi))
932 return false;
933 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
934 if (paa->parm_modified)
935 return false;
937 else
938 paa = NULL;
940 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
941 ao_ref_init (&refd, parm_load);
942 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
943 &modified, NULL);
944 if (fbi)
945 fbi->aa_walked += walked;
946 if (paa && modified)
947 paa->parm_modified = true;
948 return !modified;
951 /* If STMT is an assignment that loads a value from an parameter declaration,
952 return the index of the parameter in ipa_node_params which has not been
953 modified. Otherwise return -1. */
955 static int
956 load_from_unmodified_param (struct ipa_func_body_info *fbi,
957 vec<ipa_param_descriptor, va_gc> *descriptors,
958 gimple *stmt)
960 int index;
961 tree op1;
963 if (!gimple_assign_single_p (stmt))
964 return -1;
966 op1 = gimple_assign_rhs1 (stmt);
967 if (TREE_CODE (op1) != PARM_DECL)
968 return -1;
970 index = ipa_get_param_decl_index_1 (descriptors, op1);
971 if (index < 0
972 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
973 return -1;
975 return index;
978 /* Return true if memory reference REF (which must be a load through parameter
979 with INDEX) loads data that are known to be unmodified in this function
980 before reaching statement STMT. */
982 static bool
983 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
984 int index, gimple *stmt, tree ref)
986 struct ipa_param_aa_status *paa;
987 bool modified = false;
988 ao_ref refd;
990 /* FIXME: FBI can be NULL if we are being called from outside
991 ipa_node_analysis or ipcp_transform_function, which currently happens
992 during inlining analysis. It would be great to extend fbi's lifetime and
993 always have it. Currently, we are just not afraid of too much walking in
994 that case. */
995 if (fbi)
997 if (aa_overwalked (fbi))
998 return false;
999 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1000 if (paa->ref_modified)
1001 return false;
1003 else
1004 paa = NULL;
1006 gcc_checking_assert (gimple_vuse (stmt));
1007 ao_ref_init (&refd, ref);
1008 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1009 &modified, NULL);
1010 if (fbi)
1011 fbi->aa_walked += walked;
1012 if (paa && modified)
1013 paa->ref_modified = true;
1014 return !modified;
1017 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1018 is known to be unmodified in this function before reaching call statement
1019 CALL into which it is passed. FBI describes the function body. */
1021 static bool
1022 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1023 gimple *call, tree parm)
1025 bool modified = false;
1026 ao_ref refd;
1028 /* It's unnecessary to calculate anything about memory contnets for a const
1029 function because it is not goin to use it. But do not cache the result
1030 either. Also, no such calculations for non-pointers. */
1031 if (!gimple_vuse (call)
1032 || !POINTER_TYPE_P (TREE_TYPE (parm))
1033 || aa_overwalked (fbi))
1034 return false;
1036 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1037 gimple_bb (call),
1038 index);
1039 if (paa->pt_modified)
1040 return false;
1042 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1043 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1044 &modified, NULL);
1045 fbi->aa_walked += walked;
1046 if (modified)
1047 paa->pt_modified = true;
1048 return !modified;
1051 /* Return true if we can prove that OP is a memory reference loading
1052 data from an aggregate passed as a parameter.
1054 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1055 false if it cannot prove that the value has not been modified before the
1056 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1057 if it cannot prove the value has not been modified, in that case it will
1058 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1060 INFO and PARMS_AINFO describe parameters of the current function (but the
1061 latter can be NULL), STMT is the load statement. If function returns true,
1062 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1063 within the aggregate and whether it is a load from a value passed by
1064 reference respectively. */
1066 bool
1067 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1068 vec<ipa_param_descriptor, va_gc> *descriptors,
1069 gimple *stmt, tree op, int *index_p,
1070 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1071 bool *by_ref_p, bool *guaranteed_unmodified)
1073 int index;
1074 HOST_WIDE_INT size;
1075 bool reverse;
1076 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1078 if (!base)
1079 return false;
1081 if (DECL_P (base))
1083 int index = ipa_get_param_decl_index_1 (descriptors, base);
1084 if (index >= 0
1085 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1087 *index_p = index;
1088 *by_ref_p = false;
1089 if (size_p)
1090 *size_p = size;
1091 if (guaranteed_unmodified)
1092 *guaranteed_unmodified = true;
1093 return true;
1095 return false;
1098 if (TREE_CODE (base) != MEM_REF
1099 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1100 || !integer_zerop (TREE_OPERAND (base, 1)))
1101 return false;
1103 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1105 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1106 index = ipa_get_param_decl_index_1 (descriptors, parm);
1108 else
1110 /* This branch catches situations where a pointer parameter is not a
1111 gimple register, for example:
1113 void hip7(S*) (struct S * p)
1115 void (*<T2e4>) (struct S *) D.1867;
1116 struct S * p.1;
1118 <bb 2>:
1119 p.1_1 = p;
1120 D.1867_2 = p.1_1->f;
1121 D.1867_2 ();
1122 gdp = &p;
1125 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1126 index = load_from_unmodified_param (fbi, descriptors, def);
1129 if (index >= 0)
1131 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1132 if (!data_preserved && !guaranteed_unmodified)
1133 return false;
1135 *index_p = index;
1136 *by_ref_p = true;
1137 if (size_p)
1138 *size_p = size;
1139 if (guaranteed_unmodified)
1140 *guaranteed_unmodified = data_preserved;
1141 return true;
1143 return false;
1146 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1147 of an assignment statement STMT, try to determine whether we are actually
1148 handling any of the following cases and construct an appropriate jump
1149 function into JFUNC if so:
1151 1) The passed value is loaded from a formal parameter which is not a gimple
1152 register (most probably because it is addressable, the value has to be
1153 scalar) and we can guarantee the value has not changed. This case can
1154 therefore be described by a simple pass-through jump function. For example:
1156 foo (int a)
1158 int a.0;
1160 a.0_2 = a;
1161 bar (a.0_2);
1163 2) The passed value can be described by a simple arithmetic pass-through
1164 jump function. E.g.
1166 foo (int a)
1168 int D.2064;
1170 D.2064_4 = a.1(D) + 4;
1171 bar (D.2064_4);
1173 This case can also occur in combination of the previous one, e.g.:
1175 foo (int a, int z)
1177 int a.0;
1178 int D.2064;
1180 a.0_3 = a;
1181 D.2064_4 = a.0_3 + 4;
1182 foo (D.2064_4);
1184 3) The passed value is an address of an object within another one (which
1185 also passed by reference). Such situations are described by an ancestor
1186 jump function and describe situations such as:
1188 B::foo() (struct B * const this)
1190 struct A * D.1845;
1192 D.1845_2 = &this_1(D)->D.1748;
1193 A::bar (D.1845_2);
1195 INFO is the structure describing individual parameters access different
1196 stages of IPA optimizations. PARMS_AINFO contains the information that is
1197 only needed for intraprocedural analysis. */
1199 static void
1200 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1201 struct ipa_node_params *info,
1202 struct ipa_jump_func *jfunc,
1203 gcall *call, gimple *stmt, tree name,
1204 tree param_type)
1206 HOST_WIDE_INT offset, size;
1207 tree op1, tc_ssa, base, ssa;
1208 bool reverse;
1209 int index;
1211 op1 = gimple_assign_rhs1 (stmt);
1213 if (TREE_CODE (op1) == SSA_NAME)
1215 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1216 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1217 else
1218 index = load_from_unmodified_param (fbi, info->descriptors,
1219 SSA_NAME_DEF_STMT (op1));
1220 tc_ssa = op1;
1222 else
1224 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1225 tc_ssa = gimple_assign_lhs (stmt);
1228 if (index >= 0)
1230 switch (gimple_assign_rhs_class (stmt))
1232 case GIMPLE_BINARY_RHS:
1234 tree op2 = gimple_assign_rhs2 (stmt);
1235 if (!is_gimple_ip_invariant (op2)
1236 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1237 != tcc_comparison)
1238 && !useless_type_conversion_p (TREE_TYPE (name),
1239 TREE_TYPE (op1))))
1240 return;
1242 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1243 gimple_assign_rhs_code (stmt));
1244 break;
1246 case GIMPLE_SINGLE_RHS:
1248 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1249 tc_ssa);
1250 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1251 break;
1253 case GIMPLE_UNARY_RHS:
1254 if (is_gimple_assign (stmt)
1255 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1256 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1257 ipa_set_jf_unary_pass_through (jfunc, index,
1258 gimple_assign_rhs_code (stmt));
1259 default:;
1261 return;
1264 if (TREE_CODE (op1) != ADDR_EXPR)
1265 return;
1266 op1 = TREE_OPERAND (op1, 0);
1267 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1268 return;
1269 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1270 offset_int mem_offset;
1271 if (!base
1272 || TREE_CODE (base) != MEM_REF
1273 || !mem_ref_offset (base).is_constant (&mem_offset))
1274 return;
1275 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1276 ssa = TREE_OPERAND (base, 0);
1277 if (TREE_CODE (ssa) != SSA_NAME
1278 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1279 || offset < 0)
1280 return;
1282 /* Dynamic types are changed in constructors and destructors. */
1283 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1284 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1285 ipa_set_ancestor_jf (jfunc, offset, index,
1286 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1289 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1290 it looks like:
1292 iftmp.1_3 = &obj_2(D)->D.1762;
1294 The base of the MEM_REF must be a default definition SSA NAME of a
1295 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1296 whole MEM_REF expression is returned and the offset calculated from any
1297 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1298 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1300 static tree
1301 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1303 HOST_WIDE_INT size;
1304 tree expr, parm, obj;
1305 bool reverse;
1307 if (!gimple_assign_single_p (assign))
1308 return NULL_TREE;
1309 expr = gimple_assign_rhs1 (assign);
1311 if (TREE_CODE (expr) != ADDR_EXPR)
1312 return NULL_TREE;
1313 expr = TREE_OPERAND (expr, 0);
1314 obj = expr;
1315 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1317 offset_int mem_offset;
1318 if (!expr
1319 || TREE_CODE (expr) != MEM_REF
1320 || !mem_ref_offset (expr).is_constant (&mem_offset))
1321 return NULL_TREE;
1322 parm = TREE_OPERAND (expr, 0);
1323 if (TREE_CODE (parm) != SSA_NAME
1324 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1325 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1326 return NULL_TREE;
1328 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1329 *obj_p = obj;
1330 return expr;
1334 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1335 statement PHI, try to find out whether NAME is in fact a
1336 multiple-inheritance typecast from a descendant into an ancestor of a formal
1337 parameter and thus can be described by an ancestor jump function and if so,
1338 write the appropriate function into JFUNC.
1340 Essentially we want to match the following pattern:
1342 if (obj_2(D) != 0B)
1343 goto <bb 3>;
1344 else
1345 goto <bb 4>;
1347 <bb 3>:
1348 iftmp.1_3 = &obj_2(D)->D.1762;
1350 <bb 4>:
1351 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1352 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1353 return D.1879_6; */
1355 static void
1356 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1357 struct ipa_node_params *info,
1358 struct ipa_jump_func *jfunc,
1359 gcall *call, gphi *phi)
1361 HOST_WIDE_INT offset;
1362 gimple *assign, *cond;
1363 basic_block phi_bb, assign_bb, cond_bb;
1364 tree tmp, parm, expr, obj;
1365 int index, i;
1367 if (gimple_phi_num_args (phi) != 2)
1368 return;
1370 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1371 tmp = PHI_ARG_DEF (phi, 0);
1372 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1373 tmp = PHI_ARG_DEF (phi, 1);
1374 else
1375 return;
1376 if (TREE_CODE (tmp) != SSA_NAME
1377 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1378 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1379 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1380 return;
1382 assign = SSA_NAME_DEF_STMT (tmp);
1383 assign_bb = gimple_bb (assign);
1384 if (!single_pred_p (assign_bb))
1385 return;
1386 expr = get_ancestor_addr_info (assign, &obj, &offset);
1387 if (!expr)
1388 return;
1389 parm = TREE_OPERAND (expr, 0);
1390 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1391 if (index < 0)
1392 return;
1394 cond_bb = single_pred (assign_bb);
1395 cond = last_stmt (cond_bb);
1396 if (!cond
1397 || gimple_code (cond) != GIMPLE_COND
1398 || gimple_cond_code (cond) != NE_EXPR
1399 || gimple_cond_lhs (cond) != parm
1400 || !integer_zerop (gimple_cond_rhs (cond)))
1401 return;
1403 phi_bb = gimple_bb (phi);
1404 for (i = 0; i < 2; i++)
1406 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1407 if (pred != assign_bb && pred != cond_bb)
1408 return;
1411 ipa_set_ancestor_jf (jfunc, offset, index,
1412 parm_ref_data_pass_through_p (fbi, index, call, parm));
1415 /* Inspect the given TYPE and return true iff it has the same structure (the
1416 same number of fields of the same types) as a C++ member pointer. If
1417 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1418 corresponding fields there. */
1420 static bool
1421 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1423 tree fld;
1425 if (TREE_CODE (type) != RECORD_TYPE)
1426 return false;
1428 fld = TYPE_FIELDS (type);
1429 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1430 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1431 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1432 return false;
1434 if (method_ptr)
1435 *method_ptr = fld;
1437 fld = DECL_CHAIN (fld);
1438 if (!fld || INTEGRAL_TYPE_P (fld)
1439 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1440 return false;
1441 if (delta)
1442 *delta = fld;
1444 if (DECL_CHAIN (fld))
1445 return false;
1447 return true;
1450 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1451 return the rhs of its defining statement. Otherwise return RHS as it
1452 is. */
1454 static inline tree
1455 get_ssa_def_if_simple_copy (tree rhs)
1457 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1459 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1461 if (gimple_assign_single_p (def_stmt))
1462 rhs = gimple_assign_rhs1 (def_stmt);
1463 else
1464 break;
1466 return rhs;
1469 /* Simple linked list, describing known contents of an aggregate beforere
1470 call. */
1472 struct ipa_known_agg_contents_list
1474 /* Offset and size of the described part of the aggregate. */
1475 HOST_WIDE_INT offset, size;
1476 /* Known constant value or NULL if the contents is known to be unknown. */
1477 tree constant;
1478 /* Pointer to the next structure in the list. */
1479 struct ipa_known_agg_contents_list *next;
1482 /* Find the proper place in linked list of ipa_known_agg_contents_list
1483 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1484 unless there is a partial overlap, in which case return NULL, or such
1485 element is already there, in which case set *ALREADY_THERE to true. */
1487 static struct ipa_known_agg_contents_list **
1488 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1489 HOST_WIDE_INT lhs_offset,
1490 HOST_WIDE_INT lhs_size,
1491 bool *already_there)
1493 struct ipa_known_agg_contents_list **p = list;
1494 while (*p && (*p)->offset < lhs_offset)
1496 if ((*p)->offset + (*p)->size > lhs_offset)
1497 return NULL;
1498 p = &(*p)->next;
1501 if (*p && (*p)->offset < lhs_offset + lhs_size)
1503 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1504 /* We already know this value is subsequently overwritten with
1505 something else. */
1506 *already_there = true;
1507 else
1508 /* Otherwise this is a partial overlap which we cannot
1509 represent. */
1510 return NULL;
1512 return p;
1515 /* Build aggregate jump function from LIST, assuming there are exactly
1516 CONST_COUNT constant entries there and that th offset of the passed argument
1517 is ARG_OFFSET and store it into JFUNC. */
1519 static void
1520 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1521 int const_count, HOST_WIDE_INT arg_offset,
1522 struct ipa_jump_func *jfunc)
1524 vec_alloc (jfunc->agg.items, const_count);
1525 while (list)
1527 if (list->constant)
1529 struct ipa_agg_jf_item item;
1530 item.offset = list->offset - arg_offset;
1531 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1532 item.value = unshare_expr_without_location (list->constant);
1533 jfunc->agg.items->quick_push (item);
1535 list = list->next;
1539 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1540 in ARG is filled in with constant values. ARG can either be an aggregate
1541 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1542 aggregate. JFUNC is the jump function into which the constants are
1543 subsequently stored. */
1545 static void
1546 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1547 tree arg_type,
1548 struct ipa_jump_func *jfunc)
1550 struct ipa_known_agg_contents_list *list = NULL;
1551 int item_count = 0, const_count = 0;
1552 HOST_WIDE_INT arg_offset, arg_size;
1553 gimple_stmt_iterator gsi;
1554 tree arg_base;
1555 bool check_ref, by_ref;
1556 ao_ref r;
1558 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1559 return;
1561 /* The function operates in three stages. First, we prepare check_ref, r,
1562 arg_base and arg_offset based on what is actually passed as an actual
1563 argument. */
1565 if (POINTER_TYPE_P (arg_type))
1567 by_ref = true;
1568 if (TREE_CODE (arg) == SSA_NAME)
1570 tree type_size;
1571 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1572 return;
1573 check_ref = true;
1574 arg_base = arg;
1575 arg_offset = 0;
1576 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1577 arg_size = tree_to_uhwi (type_size);
1578 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1580 else if (TREE_CODE (arg) == ADDR_EXPR)
1582 bool reverse;
1584 arg = TREE_OPERAND (arg, 0);
1585 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1586 &arg_size, &reverse);
1587 if (!arg_base)
1588 return;
1589 if (DECL_P (arg_base))
1591 check_ref = false;
1592 ao_ref_init (&r, arg_base);
1594 else
1595 return;
1597 else
1598 return;
1600 else
1602 bool reverse;
1604 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1606 by_ref = false;
1607 check_ref = false;
1608 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1609 &arg_size, &reverse);
1610 if (!arg_base)
1611 return;
1613 ao_ref_init (&r, arg);
1616 /* Second stage walks back the BB, looks at individual statements and as long
1617 as it is confident of how the statements affect contents of the
1618 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1619 describing it. */
1620 gsi = gsi_for_stmt (call);
1621 gsi_prev (&gsi);
1622 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1624 struct ipa_known_agg_contents_list *n, **p;
1625 gimple *stmt = gsi_stmt (gsi);
1626 HOST_WIDE_INT lhs_offset, lhs_size;
1627 tree lhs, rhs, lhs_base;
1628 bool reverse;
1630 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1631 continue;
1632 if (!gimple_assign_single_p (stmt))
1633 break;
1635 lhs = gimple_assign_lhs (stmt);
1636 rhs = gimple_assign_rhs1 (stmt);
1637 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1638 || TREE_CODE (lhs) == BIT_FIELD_REF
1639 || contains_bitfld_component_ref_p (lhs))
1640 break;
1642 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1643 &lhs_size, &reverse);
1644 if (!lhs_base)
1645 break;
1647 if (check_ref)
1649 if (TREE_CODE (lhs_base) != MEM_REF
1650 || TREE_OPERAND (lhs_base, 0) != arg_base
1651 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1652 break;
1654 else if (lhs_base != arg_base)
1656 if (DECL_P (lhs_base))
1657 continue;
1658 else
1659 break;
1662 bool already_there = false;
1663 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1664 &already_there);
1665 if (!p)
1666 break;
1667 if (already_there)
1668 continue;
1670 rhs = get_ssa_def_if_simple_copy (rhs);
1671 n = XALLOCA (struct ipa_known_agg_contents_list);
1672 n->size = lhs_size;
1673 n->offset = lhs_offset;
1674 if (is_gimple_ip_invariant (rhs))
1676 n->constant = rhs;
1677 const_count++;
1679 else
1680 n->constant = NULL_TREE;
1681 n->next = *p;
1682 *p = n;
1684 item_count++;
1685 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1686 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1687 break;
1690 /* Third stage just goes over the list and creates an appropriate vector of
1691 ipa_agg_jf_item structures out of it, of sourse only if there are
1692 any known constants to begin with. */
1694 if (const_count)
1696 jfunc->agg.by_ref = by_ref;
1697 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1701 /* Return the Ith param type of callee associated with call graph
1702 edge E. */
1704 tree
1705 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1707 int n;
1708 tree type = (e->callee
1709 ? TREE_TYPE (e->callee->decl)
1710 : gimple_call_fntype (e->call_stmt));
1711 tree t = TYPE_ARG_TYPES (type);
1713 for (n = 0; n < i; n++)
1715 if (!t)
1716 break;
1717 t = TREE_CHAIN (t);
1719 if (t)
1720 return TREE_VALUE (t);
1721 if (!e->callee)
1722 return NULL;
1723 t = DECL_ARGUMENTS (e->callee->decl);
1724 for (n = 0; n < i; n++)
1726 if (!t)
1727 return NULL;
1728 t = TREE_CHAIN (t);
1730 if (t)
1731 return TREE_TYPE (t);
1732 return NULL;
1735 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1736 allocated structure or a previously existing one shared with other jump
1737 functions and/or transformation summaries. */
1739 ipa_bits *
1740 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1742 ipa_bits tmp;
1743 tmp.value = value;
1744 tmp.mask = mask;
1746 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1747 if (*slot)
1748 return *slot;
1750 ipa_bits *res = ggc_alloc<ipa_bits> ();
1751 res->value = value;
1752 res->mask = mask;
1753 *slot = res;
1755 return res;
1758 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1759 table in order to avoid creating multiple same ipa_bits structures. */
1761 static void
1762 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1763 const widest_int &mask)
1765 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1768 /* Return a pointer to a value_range just like *TMP, but either find it in
1769 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1771 static value_range *
1772 ipa_get_value_range (value_range *tmp)
1774 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1775 if (*slot)
1776 return *slot;
1778 value_range *vr = ggc_alloc<value_range> ();
1779 *vr = *tmp;
1780 *slot = vr;
1782 return vr;
1785 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1786 equiv set. Use hash table in order to avoid creating multiple same copies of
1787 value_ranges. */
1789 static value_range *
1790 ipa_get_value_range (enum value_range_type type, tree min, tree max)
1792 value_range tmp;
1793 tmp.type = type;
1794 tmp.min = min;
1795 tmp.max = max;
1796 tmp.equiv = NULL;
1797 return ipa_get_value_range (&tmp);
1800 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1801 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1802 same value_range structures. */
1804 static void
1805 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1806 tree min, tree max)
1808 jf->m_vr = ipa_get_value_range (type, min, max);
1811 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1812 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1814 static void
1815 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1817 jf->m_vr = ipa_get_value_range (tmp);
1820 /* Compute jump function for all arguments of callsite CS and insert the
1821 information in the jump_functions array in the ipa_edge_args corresponding
1822 to this callsite. */
1824 static void
1825 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1826 struct cgraph_edge *cs)
1828 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1829 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1830 gcall *call = cs->call_stmt;
1831 int n, arg_num = gimple_call_num_args (call);
1832 bool useful_context = false;
1834 if (arg_num == 0 || args->jump_functions)
1835 return;
1836 vec_safe_grow_cleared (args->jump_functions, arg_num);
1837 if (flag_devirtualize)
1838 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1840 if (gimple_call_internal_p (call))
1841 return;
1842 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1843 return;
1845 for (n = 0; n < arg_num; n++)
1847 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1848 tree arg = gimple_call_arg (call, n);
1849 tree param_type = ipa_get_callee_param_type (cs, n);
1850 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1852 tree instance;
1853 struct ipa_polymorphic_call_context context (cs->caller->decl,
1854 arg, cs->call_stmt,
1855 &instance);
1856 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1857 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1858 if (!context.useless_p ())
1859 useful_context = true;
1862 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1864 bool addr_nonzero = false;
1865 bool strict_overflow = false;
1867 if (TREE_CODE (arg) == SSA_NAME
1868 && param_type
1869 && get_ptr_nonnull (arg))
1870 addr_nonzero = true;
1871 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1872 addr_nonzero = true;
1874 if (addr_nonzero)
1876 tree z = build_int_cst (TREE_TYPE (arg), 0);
1877 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1879 else
1880 gcc_assert (!jfunc->m_vr);
1882 else
1884 wide_int min, max;
1885 value_range_type type;
1886 if (TREE_CODE (arg) == SSA_NAME
1887 && param_type
1888 && (type = get_range_info (arg, &min, &max))
1889 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1891 value_range tmpvr,resvr;
1893 tmpvr.type = type;
1894 tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1895 tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1896 tmpvr.equiv = NULL;
1897 memset (&resvr, 0, sizeof (resvr));
1898 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1899 &tmpvr, TREE_TYPE (arg));
1900 if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1901 ipa_set_jfunc_vr (jfunc, &resvr);
1902 else
1903 gcc_assert (!jfunc->m_vr);
1905 else
1906 gcc_assert (!jfunc->m_vr);
1909 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1910 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1912 if (TREE_CODE (arg) == SSA_NAME)
1913 ipa_set_jfunc_bits (jfunc, 0,
1914 widest_int::from (get_nonzero_bits (arg),
1915 TYPE_SIGN (TREE_TYPE (arg))));
1916 else
1917 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1919 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1921 unsigned HOST_WIDE_INT bitpos;
1922 unsigned align;
1924 get_pointer_alignment_1 (arg, &align, &bitpos);
1925 widest_int mask = wi::bit_and_not
1926 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1927 align / BITS_PER_UNIT - 1);
1928 widest_int value = bitpos / BITS_PER_UNIT;
1929 ipa_set_jfunc_bits (jfunc, value, mask);
1931 else
1932 gcc_assert (!jfunc->bits);
1934 if (is_gimple_ip_invariant (arg)
1935 || (VAR_P (arg)
1936 && is_global_var (arg)
1937 && TREE_READONLY (arg)))
1938 ipa_set_jf_constant (jfunc, arg, cs);
1939 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1940 && TREE_CODE (arg) == PARM_DECL)
1942 int index = ipa_get_param_decl_index (info, arg);
1944 gcc_assert (index >=0);
1945 /* Aggregate passed by value, check for pass-through, otherwise we
1946 will attempt to fill in aggregate contents later in this
1947 for cycle. */
1948 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1950 ipa_set_jf_simple_pass_through (jfunc, index, false);
1951 continue;
1954 else if (TREE_CODE (arg) == SSA_NAME)
1956 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1958 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1959 if (index >= 0)
1961 bool agg_p;
1962 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1963 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1966 else
1968 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1969 if (is_gimple_assign (stmt))
1970 compute_complex_assign_jump_func (fbi, info, jfunc,
1971 call, stmt, arg, param_type);
1972 else if (gimple_code (stmt) == GIMPLE_PHI)
1973 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1974 call,
1975 as_a <gphi *> (stmt));
1979 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1980 passed (because type conversions are ignored in gimple). Usually we can
1981 safely get type from function declaration, but in case of K&R prototypes or
1982 variadic functions we can try our luck with type of the pointer passed.
1983 TODO: Since we look for actual initialization of the memory object, we may better
1984 work out the type based on the memory stores we find. */
1985 if (!param_type)
1986 param_type = TREE_TYPE (arg);
1988 if ((jfunc->type != IPA_JF_PASS_THROUGH
1989 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1990 && (jfunc->type != IPA_JF_ANCESTOR
1991 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1992 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1993 || POINTER_TYPE_P (param_type)))
1994 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1996 if (!useful_context)
1997 vec_free (args->polymorphic_call_contexts);
2000 /* Compute jump functions for all edges - both direct and indirect - outgoing
2001 from BB. */
2003 static void
2004 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2006 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2007 int i;
2008 struct cgraph_edge *cs;
2010 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2012 struct cgraph_node *callee = cs->callee;
2014 if (callee)
2016 callee->ultimate_alias_target ();
2017 /* We do not need to bother analyzing calls to unknown functions
2018 unless they may become known during lto/whopr. */
2019 if (!callee->definition && !flag_lto)
2020 continue;
2022 ipa_compute_jump_functions_for_edge (fbi, cs);
2026 /* If STMT looks like a statement loading a value from a member pointer formal
2027 parameter, return that parameter and store the offset of the field to
2028 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2029 might be clobbered). If USE_DELTA, then we look for a use of the delta
2030 field rather than the pfn. */
2032 static tree
2033 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2034 HOST_WIDE_INT *offset_p)
2036 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2038 if (!gimple_assign_single_p (stmt))
2039 return NULL_TREE;
2041 rhs = gimple_assign_rhs1 (stmt);
2042 if (TREE_CODE (rhs) == COMPONENT_REF)
2044 ref_field = TREE_OPERAND (rhs, 1);
2045 rhs = TREE_OPERAND (rhs, 0);
2047 else
2048 ref_field = NULL_TREE;
2049 if (TREE_CODE (rhs) != MEM_REF)
2050 return NULL_TREE;
2051 rec = TREE_OPERAND (rhs, 0);
2052 if (TREE_CODE (rec) != ADDR_EXPR)
2053 return NULL_TREE;
2054 rec = TREE_OPERAND (rec, 0);
2055 if (TREE_CODE (rec) != PARM_DECL
2056 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2057 return NULL_TREE;
2058 ref_offset = TREE_OPERAND (rhs, 1);
2060 if (use_delta)
2061 fld = delta_field;
2062 else
2063 fld = ptr_field;
2064 if (offset_p)
2065 *offset_p = int_bit_position (fld);
2067 if (ref_field)
2069 if (integer_nonzerop (ref_offset))
2070 return NULL_TREE;
2071 return ref_field == fld ? rec : NULL_TREE;
2073 else
2074 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2075 : NULL_TREE;
2078 /* Returns true iff T is an SSA_NAME defined by a statement. */
2080 static bool
2081 ipa_is_ssa_with_stmt_def (tree t)
2083 if (TREE_CODE (t) == SSA_NAME
2084 && !SSA_NAME_IS_DEFAULT_DEF (t))
2085 return true;
2086 else
2087 return false;
2090 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2091 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2092 indirect call graph edge. */
2094 static struct cgraph_edge *
2095 ipa_note_param_call (struct cgraph_node *node, int param_index,
2096 gcall *stmt)
2098 struct cgraph_edge *cs;
2100 cs = node->get_edge (stmt);
2101 cs->indirect_info->param_index = param_index;
2102 cs->indirect_info->agg_contents = 0;
2103 cs->indirect_info->member_ptr = 0;
2104 cs->indirect_info->guaranteed_unmodified = 0;
2105 return cs;
2108 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2109 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2110 intermediate information about each formal parameter. Currently it checks
2111 whether the call calls a pointer that is a formal parameter and if so, the
2112 parameter is marked with the called flag and an indirect call graph edge
2113 describing the call is created. This is very simple for ordinary pointers
2114 represented in SSA but not-so-nice when it comes to member pointers. The
2115 ugly part of this function does nothing more than trying to match the
2116 pattern of such a call. An example of such a pattern is the gimple dump
2117 below, the call is on the last line:
2119 <bb 2>:
2120 f$__delta_5 = f.__delta;
2121 f$__pfn_24 = f.__pfn;
2124 <bb 2>:
2125 f$__delta_5 = MEM[(struct *)&f];
2126 f$__pfn_24 = MEM[(struct *)&f + 4B];
2128 and a few lines below:
2130 <bb 5>
2131 D.2496_3 = (int) f$__pfn_24;
2132 D.2497_4 = D.2496_3 & 1;
2133 if (D.2497_4 != 0)
2134 goto <bb 3>;
2135 else
2136 goto <bb 4>;
2138 <bb 6>:
2139 D.2500_7 = (unsigned int) f$__delta_5;
2140 D.2501_8 = &S + D.2500_7;
2141 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2142 D.2503_10 = *D.2502_9;
2143 D.2504_12 = f$__pfn_24 + -1;
2144 D.2505_13 = (unsigned int) D.2504_12;
2145 D.2506_14 = D.2503_10 + D.2505_13;
2146 D.2507_15 = *D.2506_14;
2147 iftmp.11_16 = (String:: *) D.2507_15;
2149 <bb 7>:
2150 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2151 D.2500_19 = (unsigned int) f$__delta_5;
2152 D.2508_20 = &S + D.2500_19;
2153 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2155 Such patterns are results of simple calls to a member pointer:
2157 int doprinting (int (MyString::* f)(int) const)
2159 MyString S ("somestring");
2161 return (S.*f)(4);
2164 Moreover, the function also looks for called pointers loaded from aggregates
2165 passed by value or reference. */
2167 static void
2168 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2169 tree target)
2171 struct ipa_node_params *info = fbi->info;
2172 HOST_WIDE_INT offset;
2173 bool by_ref;
2175 if (SSA_NAME_IS_DEFAULT_DEF (target))
2177 tree var = SSA_NAME_VAR (target);
2178 int index = ipa_get_param_decl_index (info, var);
2179 if (index >= 0)
2180 ipa_note_param_call (fbi->node, index, call);
2181 return;
2184 int index;
2185 gimple *def = SSA_NAME_DEF_STMT (target);
2186 bool guaranteed_unmodified;
2187 if (gimple_assign_single_p (def)
2188 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2189 gimple_assign_rhs1 (def), &index, &offset,
2190 NULL, &by_ref, &guaranteed_unmodified))
2192 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2193 cs->indirect_info->offset = offset;
2194 cs->indirect_info->agg_contents = 1;
2195 cs->indirect_info->by_ref = by_ref;
2196 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2197 return;
2200 /* Now we need to try to match the complex pattern of calling a member
2201 pointer. */
2202 if (gimple_code (def) != GIMPLE_PHI
2203 || gimple_phi_num_args (def) != 2
2204 || !POINTER_TYPE_P (TREE_TYPE (target))
2205 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2206 return;
2208 /* First, we need to check whether one of these is a load from a member
2209 pointer that is a parameter to this function. */
2210 tree n1 = PHI_ARG_DEF (def, 0);
2211 tree n2 = PHI_ARG_DEF (def, 1);
2212 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2213 return;
2214 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2215 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2217 tree rec;
2218 basic_block bb, virt_bb;
2219 basic_block join = gimple_bb (def);
2220 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2222 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2223 return;
2225 bb = EDGE_PRED (join, 0)->src;
2226 virt_bb = gimple_bb (d2);
2228 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2230 bb = EDGE_PRED (join, 1)->src;
2231 virt_bb = gimple_bb (d1);
2233 else
2234 return;
2236 /* Second, we need to check that the basic blocks are laid out in the way
2237 corresponding to the pattern. */
2239 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2240 || single_pred (virt_bb) != bb
2241 || single_succ (virt_bb) != join)
2242 return;
2244 /* Third, let's see that the branching is done depending on the least
2245 significant bit of the pfn. */
2247 gimple *branch = last_stmt (bb);
2248 if (!branch || gimple_code (branch) != GIMPLE_COND)
2249 return;
2251 if ((gimple_cond_code (branch) != NE_EXPR
2252 && gimple_cond_code (branch) != EQ_EXPR)
2253 || !integer_zerop (gimple_cond_rhs (branch)))
2254 return;
2256 tree cond = gimple_cond_lhs (branch);
2257 if (!ipa_is_ssa_with_stmt_def (cond))
2258 return;
2260 def = SSA_NAME_DEF_STMT (cond);
2261 if (!is_gimple_assign (def)
2262 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2263 || !integer_onep (gimple_assign_rhs2 (def)))
2264 return;
2266 cond = gimple_assign_rhs1 (def);
2267 if (!ipa_is_ssa_with_stmt_def (cond))
2268 return;
2270 def = SSA_NAME_DEF_STMT (cond);
2272 if (is_gimple_assign (def)
2273 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2275 cond = gimple_assign_rhs1 (def);
2276 if (!ipa_is_ssa_with_stmt_def (cond))
2277 return;
2278 def = SSA_NAME_DEF_STMT (cond);
2281 tree rec2;
2282 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2283 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2284 == ptrmemfunc_vbit_in_delta),
2285 NULL);
2286 if (rec != rec2)
2287 return;
2289 index = ipa_get_param_decl_index (info, rec);
2290 if (index >= 0
2291 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2293 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2294 cs->indirect_info->offset = offset;
2295 cs->indirect_info->agg_contents = 1;
2296 cs->indirect_info->member_ptr = 1;
2297 cs->indirect_info->guaranteed_unmodified = 1;
2300 return;
2303 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2304 object referenced in the expression is a formal parameter of the caller
2305 FBI->node (described by FBI->info), create a call note for the
2306 statement. */
2308 static void
2309 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2310 gcall *call, tree target)
2312 tree obj = OBJ_TYPE_REF_OBJECT (target);
2313 int index;
2314 HOST_WIDE_INT anc_offset;
2316 if (!flag_devirtualize)
2317 return;
2319 if (TREE_CODE (obj) != SSA_NAME)
2320 return;
2322 struct ipa_node_params *info = fbi->info;
2323 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2325 struct ipa_jump_func jfunc;
2326 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2327 return;
2329 anc_offset = 0;
2330 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2331 gcc_assert (index >= 0);
2332 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2333 call, &jfunc))
2334 return;
2336 else
2338 struct ipa_jump_func jfunc;
2339 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2340 tree expr;
2342 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2343 if (!expr)
2344 return;
2345 index = ipa_get_param_decl_index (info,
2346 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2347 gcc_assert (index >= 0);
2348 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2349 call, &jfunc, anc_offset))
2350 return;
2353 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2354 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2355 ii->offset = anc_offset;
2356 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2357 ii->otr_type = obj_type_ref_class (target);
2358 ii->polymorphic = 1;
2361 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2362 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2363 containing intermediate information about each formal parameter. */
2365 static void
2366 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2368 tree target = gimple_call_fn (call);
2370 if (!target
2371 || (TREE_CODE (target) != SSA_NAME
2372 && !virtual_method_call_p (target)))
2373 return;
2375 struct cgraph_edge *cs = fbi->node->get_edge (call);
2376 /* If we previously turned the call into a direct call, there is
2377 no need to analyze. */
2378 if (cs && !cs->indirect_unknown_callee)
2379 return;
2381 if (cs->indirect_info->polymorphic && flag_devirtualize)
2383 tree instance;
2384 tree target = gimple_call_fn (call);
2385 ipa_polymorphic_call_context context (current_function_decl,
2386 target, call, &instance);
2388 gcc_checking_assert (cs->indirect_info->otr_type
2389 == obj_type_ref_class (target));
2390 gcc_checking_assert (cs->indirect_info->otr_token
2391 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2393 cs->indirect_info->vptr_changed
2394 = !context.get_dynamic_type (instance,
2395 OBJ_TYPE_REF_OBJECT (target),
2396 obj_type_ref_class (target), call);
2397 cs->indirect_info->context = context;
2400 if (TREE_CODE (target) == SSA_NAME)
2401 ipa_analyze_indirect_call_uses (fbi, call, target);
2402 else if (virtual_method_call_p (target))
2403 ipa_analyze_virtual_call_uses (fbi, call, target);
2407 /* Analyze the call statement STMT with respect to formal parameters (described
2408 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2409 formal parameters are called. */
2411 static void
2412 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2414 if (is_gimple_call (stmt))
2415 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2418 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2419 If OP is a parameter declaration, mark it as used in the info structure
2420 passed in DATA. */
2422 static bool
2423 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2425 struct ipa_node_params *info = (struct ipa_node_params *) data;
2427 op = get_base_address (op);
2428 if (op
2429 && TREE_CODE (op) == PARM_DECL)
2431 int index = ipa_get_param_decl_index (info, op);
2432 gcc_assert (index >= 0);
2433 ipa_set_param_used (info, index, true);
2436 return false;
2439 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2440 the findings in various structures of the associated ipa_node_params
2441 structure, such as parameter flags, notes etc. FBI holds various data about
2442 the function being analyzed. */
2444 static void
2445 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2447 gimple_stmt_iterator gsi;
2448 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2450 gimple *stmt = gsi_stmt (gsi);
2452 if (is_gimple_debug (stmt))
2453 continue;
2455 ipa_analyze_stmt_uses (fbi, stmt);
2456 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2457 visit_ref_for_mod_analysis,
2458 visit_ref_for_mod_analysis,
2459 visit_ref_for_mod_analysis);
2461 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2462 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2463 visit_ref_for_mod_analysis,
2464 visit_ref_for_mod_analysis,
2465 visit_ref_for_mod_analysis);
2468 /* Calculate controlled uses of parameters of NODE. */
2470 static void
2471 ipa_analyze_controlled_uses (struct cgraph_node *node)
2473 struct ipa_node_params *info = IPA_NODE_REF (node);
2475 for (int i = 0; i < ipa_get_param_count (info); i++)
2477 tree parm = ipa_get_param (info, i);
2478 int controlled_uses = 0;
2480 /* For SSA regs see if parameter is used. For non-SSA we compute
2481 the flag during modification analysis. */
2482 if (is_gimple_reg (parm))
2484 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2485 parm);
2486 if (ddef && !has_zero_uses (ddef))
2488 imm_use_iterator imm_iter;
2489 use_operand_p use_p;
2491 ipa_set_param_used (info, i, true);
2492 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2493 if (!is_gimple_call (USE_STMT (use_p)))
2495 if (!is_gimple_debug (USE_STMT (use_p)))
2497 controlled_uses = IPA_UNDESCRIBED_USE;
2498 break;
2501 else
2502 controlled_uses++;
2504 else
2505 controlled_uses = 0;
2507 else
2508 controlled_uses = IPA_UNDESCRIBED_USE;
2509 ipa_set_controlled_uses (info, i, controlled_uses);
2513 /* Free stuff in BI. */
2515 static void
2516 free_ipa_bb_info (struct ipa_bb_info *bi)
2518 bi->cg_edges.release ();
2519 bi->param_aa_statuses.release ();
2522 /* Dominator walker driving the analysis. */
2524 class analysis_dom_walker : public dom_walker
2526 public:
2527 analysis_dom_walker (struct ipa_func_body_info *fbi)
2528 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2530 virtual edge before_dom_children (basic_block);
2532 private:
2533 struct ipa_func_body_info *m_fbi;
2536 edge
2537 analysis_dom_walker::before_dom_children (basic_block bb)
2539 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2540 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2541 return NULL;
2544 /* Release body info FBI. */
2546 void
2547 ipa_release_body_info (struct ipa_func_body_info *fbi)
2549 int i;
2550 struct ipa_bb_info *bi;
2552 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2553 free_ipa_bb_info (bi);
2554 fbi->bb_infos.release ();
2557 /* Initialize the array describing properties of formal parameters
2558 of NODE, analyze their uses and compute jump functions associated
2559 with actual arguments of calls from within NODE. */
2561 void
2562 ipa_analyze_node (struct cgraph_node *node)
2564 struct ipa_func_body_info fbi;
2565 struct ipa_node_params *info;
2567 ipa_check_create_node_params ();
2568 ipa_check_create_edge_args ();
2569 info = IPA_NODE_REF (node);
2571 if (info->analysis_done)
2572 return;
2573 info->analysis_done = 1;
2575 if (ipa_func_spec_opts_forbid_analysis_p (node))
2577 for (int i = 0; i < ipa_get_param_count (info); i++)
2579 ipa_set_param_used (info, i, true);
2580 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2582 return;
2585 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2586 push_cfun (func);
2587 calculate_dominance_info (CDI_DOMINATORS);
2588 ipa_initialize_node_params (node);
2589 ipa_analyze_controlled_uses (node);
2591 fbi.node = node;
2592 fbi.info = IPA_NODE_REF (node);
2593 fbi.bb_infos = vNULL;
2594 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2595 fbi.param_count = ipa_get_param_count (info);
2596 fbi.aa_walked = 0;
2598 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2600 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2601 bi->cg_edges.safe_push (cs);
2604 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2606 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2607 bi->cg_edges.safe_push (cs);
2610 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2612 ipa_release_body_info (&fbi);
2613 free_dominance_info (CDI_DOMINATORS);
2614 pop_cfun ();
2617 /* Update the jump functions associated with call graph edge E when the call
2618 graph edge CS is being inlined, assuming that E->caller is already (possibly
2619 indirectly) inlined into CS->callee and that E has not been inlined. */
2621 static void
2622 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2623 struct cgraph_edge *e)
2625 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2626 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2627 int count = ipa_get_cs_argument_count (args);
2628 int i;
2630 for (i = 0; i < count; i++)
2632 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2633 struct ipa_polymorphic_call_context *dst_ctx
2634 = ipa_get_ith_polymorhic_call_context (args, i);
2636 if (dst->type == IPA_JF_ANCESTOR)
2638 struct ipa_jump_func *src;
2639 int dst_fid = dst->value.ancestor.formal_id;
2640 struct ipa_polymorphic_call_context *src_ctx
2641 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2643 /* Variable number of arguments can cause havoc if we try to access
2644 one that does not exist in the inlined edge. So make sure we
2645 don't. */
2646 if (dst_fid >= ipa_get_cs_argument_count (top))
2648 ipa_set_jf_unknown (dst);
2649 continue;
2652 src = ipa_get_ith_jump_func (top, dst_fid);
2654 if (src_ctx && !src_ctx->useless_p ())
2656 struct ipa_polymorphic_call_context ctx = *src_ctx;
2658 /* TODO: Make type preserved safe WRT contexts. */
2659 if (!ipa_get_jf_ancestor_type_preserved (dst))
2660 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2661 ctx.offset_by (dst->value.ancestor.offset);
2662 if (!ctx.useless_p ())
2664 if (!dst_ctx)
2666 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2667 count);
2668 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2671 dst_ctx->combine_with (ctx);
2675 if (src->agg.items
2676 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2678 struct ipa_agg_jf_item *item;
2679 int j;
2681 /* Currently we do not produce clobber aggregate jump functions,
2682 replace with merging when we do. */
2683 gcc_assert (!dst->agg.items);
2685 dst->agg.items = vec_safe_copy (src->agg.items);
2686 dst->agg.by_ref = src->agg.by_ref;
2687 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2688 item->offset -= dst->value.ancestor.offset;
2691 if (src->type == IPA_JF_PASS_THROUGH
2692 && src->value.pass_through.operation == NOP_EXPR)
2694 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2695 dst->value.ancestor.agg_preserved &=
2696 src->value.pass_through.agg_preserved;
2698 else if (src->type == IPA_JF_PASS_THROUGH
2699 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2701 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2702 dst->value.ancestor.agg_preserved = false;
2704 else if (src->type == IPA_JF_ANCESTOR)
2706 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2707 dst->value.ancestor.offset += src->value.ancestor.offset;
2708 dst->value.ancestor.agg_preserved &=
2709 src->value.ancestor.agg_preserved;
2711 else
2712 ipa_set_jf_unknown (dst);
2714 else if (dst->type == IPA_JF_PASS_THROUGH)
2716 struct ipa_jump_func *src;
2717 /* We must check range due to calls with variable number of arguments
2718 and we cannot combine jump functions with operations. */
2719 if (dst->value.pass_through.operation == NOP_EXPR
2720 && (dst->value.pass_through.formal_id
2721 < ipa_get_cs_argument_count (top)))
2723 int dst_fid = dst->value.pass_through.formal_id;
2724 src = ipa_get_ith_jump_func (top, dst_fid);
2725 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2726 struct ipa_polymorphic_call_context *src_ctx
2727 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2729 if (src_ctx && !src_ctx->useless_p ())
2731 struct ipa_polymorphic_call_context ctx = *src_ctx;
2733 /* TODO: Make type preserved safe WRT contexts. */
2734 if (!ipa_get_jf_pass_through_type_preserved (dst))
2735 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2736 if (!ctx.useless_p ())
2738 if (!dst_ctx)
2740 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2741 count);
2742 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2744 dst_ctx->combine_with (ctx);
2747 switch (src->type)
2749 case IPA_JF_UNKNOWN:
2750 ipa_set_jf_unknown (dst);
2751 break;
2752 case IPA_JF_CONST:
2753 ipa_set_jf_cst_copy (dst, src);
2754 break;
2756 case IPA_JF_PASS_THROUGH:
2758 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2759 enum tree_code operation;
2760 operation = ipa_get_jf_pass_through_operation (src);
2762 if (operation == NOP_EXPR)
2764 bool agg_p;
2765 agg_p = dst_agg_p
2766 && ipa_get_jf_pass_through_agg_preserved (src);
2767 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2769 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2770 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2771 else
2773 tree operand = ipa_get_jf_pass_through_operand (src);
2774 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2775 operation);
2777 break;
2779 case IPA_JF_ANCESTOR:
2781 bool agg_p;
2782 agg_p = dst_agg_p
2783 && ipa_get_jf_ancestor_agg_preserved (src);
2784 ipa_set_ancestor_jf (dst,
2785 ipa_get_jf_ancestor_offset (src),
2786 ipa_get_jf_ancestor_formal_id (src),
2787 agg_p);
2788 break;
2790 default:
2791 gcc_unreachable ();
2794 if (src->agg.items
2795 && (dst_agg_p || !src->agg.by_ref))
2797 /* Currently we do not produce clobber aggregate jump
2798 functions, replace with merging when we do. */
2799 gcc_assert (!dst->agg.items);
2801 dst->agg.by_ref = src->agg.by_ref;
2802 dst->agg.items = vec_safe_copy (src->agg.items);
2805 else
2806 ipa_set_jf_unknown (dst);
2811 /* If TARGET is an addr_expr of a function declaration, make it the
2812 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2813 Otherwise, return NULL. */
2815 struct cgraph_edge *
2816 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2817 bool speculative)
2819 struct cgraph_node *callee;
2820 struct ipa_call_summary *es = ipa_call_summaries->get (ie);
2821 bool unreachable = false;
2823 if (TREE_CODE (target) == ADDR_EXPR)
2824 target = TREE_OPERAND (target, 0);
2825 if (TREE_CODE (target) != FUNCTION_DECL)
2827 target = canonicalize_constructor_val (target, NULL);
2828 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2830 /* Member pointer call that goes through a VMT lookup. */
2831 if (ie->indirect_info->member_ptr
2832 /* Or if target is not an invariant expression and we do not
2833 know if it will evaulate to function at runtime.
2834 This can happen when folding through &VAR, where &VAR
2835 is IP invariant, but VAR itself is not.
2837 TODO: Revisit this when GCC 5 is branched. It seems that
2838 member_ptr check is not needed and that we may try to fold
2839 the expression and see if VAR is readonly. */
2840 || !is_gimple_ip_invariant (target))
2842 if (dump_enabled_p ())
2844 location_t loc = gimple_location_safe (ie->call_stmt);
2845 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2846 "discovered direct call non-invariant %s\n",
2847 ie->caller->dump_name ());
2849 return NULL;
2853 if (dump_enabled_p ())
2855 location_t loc = gimple_location_safe (ie->call_stmt);
2856 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2857 "discovered direct call to non-function in %s, "
2858 "making it __builtin_unreachable\n",
2859 ie->caller->dump_name ());
2862 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2863 callee = cgraph_node::get_create (target);
2864 unreachable = true;
2866 else
2867 callee = cgraph_node::get (target);
2869 else
2870 callee = cgraph_node::get (target);
2872 /* Because may-edges are not explicitely represented and vtable may be external,
2873 we may create the first reference to the object in the unit. */
2874 if (!callee || callee->global.inlined_to)
2877 /* We are better to ensure we can refer to it.
2878 In the case of static functions we are out of luck, since we already
2879 removed its body. In the case of public functions we may or may
2880 not introduce the reference. */
2881 if (!canonicalize_constructor_val (target, NULL)
2882 || !TREE_PUBLIC (target))
2884 if (dump_file)
2885 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2886 "(%s -> %s) but can not refer to it. Giving up.\n",
2887 ie->caller->dump_name (),
2888 ie->callee->dump_name ());
2889 return NULL;
2891 callee = cgraph_node::get_create (target);
2894 /* If the edge is already speculated. */
2895 if (speculative && ie->speculative)
2897 struct cgraph_edge *e2;
2898 struct ipa_ref *ref;
2899 ie->speculative_call_info (e2, ie, ref);
2900 if (e2->callee->ultimate_alias_target ()
2901 != callee->ultimate_alias_target ())
2903 if (dump_file)
2904 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2905 "target (%s -> %s) but the call is already "
2906 "speculated to %s. Giving up.\n",
2907 ie->caller->dump_name (), callee->dump_name (),
2908 e2->callee->dump_name ());
2910 else
2912 if (dump_file)
2913 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2914 "(%s -> %s) this agree with previous speculation.\n",
2915 ie->caller->dump_name (), callee->dump_name ());
2917 return NULL;
2920 if (!dbg_cnt (devirt))
2921 return NULL;
2923 ipa_check_create_node_params ();
2925 /* We can not make edges to inline clones. It is bug that someone removed
2926 the cgraph node too early. */
2927 gcc_assert (!callee->global.inlined_to);
2929 if (dump_file && !unreachable)
2931 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2932 "(%s -> %s), for stmt ",
2933 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2934 speculative ? "speculative" : "known",
2935 ie->caller->dump_name (),
2936 callee->dump_name ());
2937 if (ie->call_stmt)
2938 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2939 else
2940 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2942 if (dump_enabled_p ())
2944 location_t loc = gimple_location_safe (ie->call_stmt);
2946 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2947 "converting indirect call in %s to direct call to %s\n",
2948 ie->caller->name (), callee->name ());
2950 if (!speculative)
2952 struct cgraph_edge *orig = ie;
2953 ie = ie->make_direct (callee);
2954 /* If we resolved speculative edge the cost is already up to date
2955 for direct call (adjusted by inline_edge_duplication_hook). */
2956 if (ie == orig)
2958 es = ipa_call_summaries->get (ie);
2959 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2960 - eni_size_weights.call_cost);
2961 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2962 - eni_time_weights.call_cost);
2965 else
2967 if (!callee->can_be_discarded_p ())
2969 cgraph_node *alias;
2970 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2971 if (alias)
2972 callee = alias;
2974 /* make_speculative will update ie's cost to direct call cost. */
2975 ie = ie->make_speculative
2976 (callee, ie->count.apply_scale (8, 10));
2979 return ie;
2982 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2983 CONSTRUCTOR and return it. Return NULL if the search fails for some
2984 reason. */
2986 static tree
2987 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2989 tree type = TREE_TYPE (constructor);
2990 if (TREE_CODE (type) != ARRAY_TYPE
2991 && TREE_CODE (type) != RECORD_TYPE)
2992 return NULL;
2994 unsigned ix;
2995 tree index, val;
2996 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2998 HOST_WIDE_INT elt_offset;
2999 if (TREE_CODE (type) == ARRAY_TYPE)
3001 offset_int off;
3002 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3003 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3005 if (index)
3007 if (TREE_CODE (index) == RANGE_EXPR)
3008 off = wi::to_offset (TREE_OPERAND (index, 0));
3009 else
3010 off = wi::to_offset (index);
3011 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3013 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3014 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3015 off = wi::sext (off - wi::to_offset (low_bound),
3016 TYPE_PRECISION (TREE_TYPE (index)));
3018 off *= wi::to_offset (unit_size);
3019 /* ??? Handle more than just the first index of a
3020 RANGE_EXPR. */
3022 else
3023 off = wi::to_offset (unit_size) * ix;
3025 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3026 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3027 continue;
3028 elt_offset = off.to_shwi ();
3030 else if (TREE_CODE (type) == RECORD_TYPE)
3032 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3033 if (DECL_BIT_FIELD (index))
3034 continue;
3035 elt_offset = int_bit_position (index);
3037 else
3038 gcc_unreachable ();
3040 if (elt_offset > req_offset)
3041 return NULL;
3043 if (TREE_CODE (val) == CONSTRUCTOR)
3044 return find_constructor_constant_at_offset (val,
3045 req_offset - elt_offset);
3047 if (elt_offset == req_offset
3048 && is_gimple_reg_type (TREE_TYPE (val))
3049 && is_gimple_ip_invariant (val))
3050 return val;
3052 return NULL;
3055 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3056 invariant from a static constructor and if so, return it. Otherwise return
3057 NULL. */
3059 static tree
3060 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3062 if (by_ref)
3064 if (TREE_CODE (scalar) != ADDR_EXPR)
3065 return NULL;
3066 scalar = TREE_OPERAND (scalar, 0);
3069 if (!VAR_P (scalar)
3070 || !is_global_var (scalar)
3071 || !TREE_READONLY (scalar)
3072 || !DECL_INITIAL (scalar)
3073 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3074 return NULL;
3076 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3079 /* Retrieve value from aggregate jump function AGG or static initializer of
3080 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3081 none. BY_REF specifies whether the value has to be passed by reference or
3082 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3083 to is set to true if the value comes from an initializer of a constant. */
3085 tree
3086 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3087 HOST_WIDE_INT offset, bool by_ref,
3088 bool *from_global_constant)
3090 struct ipa_agg_jf_item *item;
3091 int i;
3093 if (scalar)
3095 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3096 if (res)
3098 if (from_global_constant)
3099 *from_global_constant = true;
3100 return res;
3104 if (!agg
3105 || by_ref != agg->by_ref)
3106 return NULL;
3108 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3109 if (item->offset == offset)
3111 /* Currently we do not have clobber values, return NULL for them once
3112 we do. */
3113 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3114 if (from_global_constant)
3115 *from_global_constant = false;
3116 return item->value;
3118 return NULL;
3121 /* Remove a reference to SYMBOL from the list of references of a node given by
3122 reference description RDESC. Return true if the reference has been
3123 successfully found and removed. */
3125 static bool
3126 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3128 struct ipa_ref *to_del;
3129 struct cgraph_edge *origin;
3131 origin = rdesc->cs;
3132 if (!origin)
3133 return false;
3134 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3135 origin->lto_stmt_uid);
3136 if (!to_del)
3137 return false;
3139 to_del->remove_reference ();
3140 if (dump_file)
3141 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3142 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3143 return true;
3146 /* If JFUNC has a reference description with refcount different from
3147 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3148 NULL. JFUNC must be a constant jump function. */
3150 static struct ipa_cst_ref_desc *
3151 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3153 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3154 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3155 return rdesc;
3156 else
3157 return NULL;
3160 /* If the value of constant jump function JFUNC is an address of a function
3161 declaration, return the associated call graph node. Otherwise return
3162 NULL. */
3164 static cgraph_node *
3165 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3167 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3168 tree cst = ipa_get_jf_constant (jfunc);
3169 if (TREE_CODE (cst) != ADDR_EXPR
3170 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3171 return NULL;
3173 return cgraph_node::get (TREE_OPERAND (cst, 0));
3177 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3178 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3179 the edge specified in the rdesc. Return false if either the symbol or the
3180 reference could not be found, otherwise return true. */
3182 static bool
3183 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3185 struct ipa_cst_ref_desc *rdesc;
3186 if (jfunc->type == IPA_JF_CONST
3187 && (rdesc = jfunc_rdesc_usable (jfunc))
3188 && --rdesc->refcount == 0)
3190 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3191 if (!symbol)
3192 return false;
3194 return remove_described_reference (symbol, rdesc);
3196 return true;
3199 /* Try to find a destination for indirect edge IE that corresponds to a simple
3200 call or a call of a member function pointer and where the destination is a
3201 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3202 the type of the parameter to which the result of JFUNC is passed. If it can
3203 be determined, return the newly direct edge, otherwise return NULL.
3204 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3206 static struct cgraph_edge *
3207 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3208 struct ipa_jump_func *jfunc, tree target_type,
3209 struct ipa_node_params *new_root_info)
3211 struct cgraph_edge *cs;
3212 tree target;
3213 bool agg_contents = ie->indirect_info->agg_contents;
3214 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3215 if (agg_contents)
3217 bool from_global_constant;
3218 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3219 ie->indirect_info->offset,
3220 ie->indirect_info->by_ref,
3221 &from_global_constant);
3222 if (target
3223 && !from_global_constant
3224 && !ie->indirect_info->guaranteed_unmodified)
3225 return NULL;
3227 else
3228 target = scalar;
3229 if (!target)
3230 return NULL;
3231 cs = ipa_make_edge_direct_to_target (ie, target);
3233 if (cs && !agg_contents)
3235 bool ok;
3236 gcc_checking_assert (cs->callee
3237 && (cs != ie
3238 || jfunc->type != IPA_JF_CONST
3239 || !cgraph_node_for_jfunc (jfunc)
3240 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3241 ok = try_decrement_rdesc_refcount (jfunc);
3242 gcc_checking_assert (ok);
3245 return cs;
3248 /* Return the target to be used in cases of impossible devirtualization. IE
3249 and target (the latter can be NULL) are dumped when dumping is enabled. */
3251 tree
3252 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3254 if (dump_file)
3256 if (target)
3257 fprintf (dump_file,
3258 "Type inconsistent devirtualization: %s->%s\n",
3259 ie->caller->dump_name (),
3260 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3261 else
3262 fprintf (dump_file,
3263 "No devirtualization target in %s\n",
3264 ie->caller->dump_name ());
3266 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3267 cgraph_node::get_create (new_target);
3268 return new_target;
3271 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3272 call based on a formal parameter which is described by jump function JFUNC
3273 and if it can be determined, make it direct and return the direct edge.
3274 Otherwise, return NULL. CTX describes the polymorphic context that the
3275 parameter the call is based on brings along with it. */
3277 static struct cgraph_edge *
3278 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3279 struct ipa_jump_func *jfunc,
3280 struct ipa_polymorphic_call_context ctx)
3282 tree target = NULL;
3283 bool speculative = false;
3285 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3286 return NULL;
3288 gcc_assert (!ie->indirect_info->by_ref);
3290 /* Try to do lookup via known virtual table pointer value. */
3291 if (!ie->indirect_info->vptr_changed
3292 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3294 tree vtable;
3295 unsigned HOST_WIDE_INT offset;
3296 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3297 : NULL;
3298 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3299 ie->indirect_info->offset,
3300 true);
3301 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3303 bool can_refer;
3304 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3305 vtable, offset, &can_refer);
3306 if (can_refer)
3308 if (!t
3309 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3310 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3311 || !possible_polymorphic_call_target_p
3312 (ie, cgraph_node::get (t)))
3314 /* Do not speculate builtin_unreachable, it is stupid! */
3315 if (!ie->indirect_info->vptr_changed)
3316 target = ipa_impossible_devirt_target (ie, target);
3317 else
3318 target = NULL;
3320 else
3322 target = t;
3323 speculative = ie->indirect_info->vptr_changed;
3329 ipa_polymorphic_call_context ie_context (ie);
3330 vec <cgraph_node *>targets;
3331 bool final;
3333 ctx.offset_by (ie->indirect_info->offset);
3334 if (ie->indirect_info->vptr_changed)
3335 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3336 ie->indirect_info->otr_type);
3337 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3338 targets = possible_polymorphic_call_targets
3339 (ie->indirect_info->otr_type,
3340 ie->indirect_info->otr_token,
3341 ctx, &final);
3342 if (final && targets.length () <= 1)
3344 speculative = false;
3345 if (targets.length () == 1)
3346 target = targets[0]->decl;
3347 else
3348 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3350 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3351 && !ie->speculative && ie->maybe_hot_p ())
3353 cgraph_node *n;
3354 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3355 ie->indirect_info->otr_token,
3356 ie->indirect_info->context);
3357 if (n)
3359 target = n->decl;
3360 speculative = true;
3364 if (target)
3366 if (!possible_polymorphic_call_target_p
3367 (ie, cgraph_node::get_create (target)))
3369 if (speculative)
3370 return NULL;
3371 target = ipa_impossible_devirt_target (ie, target);
3373 return ipa_make_edge_direct_to_target (ie, target, speculative);
3375 else
3376 return NULL;
3379 /* Update the param called notes associated with NODE when CS is being inlined,
3380 assuming NODE is (potentially indirectly) inlined into CS->callee.
3381 Moreover, if the callee is discovered to be constant, create a new cgraph
3382 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3383 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3385 static bool
3386 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3387 struct cgraph_node *node,
3388 vec<cgraph_edge *> *new_edges)
3390 struct ipa_edge_args *top;
3391 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3392 struct ipa_node_params *new_root_info, *inlined_node_info;
3393 bool res = false;
3395 ipa_check_create_edge_args ();
3396 top = IPA_EDGE_REF (cs);
3397 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3398 ? cs->caller->global.inlined_to
3399 : cs->caller);
3400 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3402 for (ie = node->indirect_calls; ie; ie = next_ie)
3404 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3405 struct ipa_jump_func *jfunc;
3406 int param_index;
3407 cgraph_node *spec_target = NULL;
3409 next_ie = ie->next_callee;
3411 if (ici->param_index == -1)
3412 continue;
3414 /* We must check range due to calls with variable number of arguments: */
3415 if (ici->param_index >= ipa_get_cs_argument_count (top))
3417 ici->param_index = -1;
3418 continue;
3421 param_index = ici->param_index;
3422 jfunc = ipa_get_ith_jump_func (top, param_index);
3424 if (ie->speculative)
3426 struct cgraph_edge *de;
3427 struct ipa_ref *ref;
3428 ie->speculative_call_info (de, ie, ref);
3429 spec_target = de->callee;
3432 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3433 new_direct_edge = NULL;
3434 else if (ici->polymorphic)
3436 ipa_polymorphic_call_context ctx;
3437 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3438 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3440 else
3442 tree target_type = ipa_get_type (inlined_node_info, param_index);
3443 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3444 target_type,
3445 new_root_info);
3448 /* If speculation was removed, then we need to do nothing. */
3449 if (new_direct_edge && new_direct_edge != ie
3450 && new_direct_edge->callee == spec_target)
3452 new_direct_edge->indirect_inlining_edge = 1;
3453 top = IPA_EDGE_REF (cs);
3454 res = true;
3455 if (!new_direct_edge->speculative)
3456 continue;
3458 else if (new_direct_edge)
3460 new_direct_edge->indirect_inlining_edge = 1;
3461 if (new_direct_edge->call_stmt)
3462 new_direct_edge->call_stmt_cannot_inline_p
3463 = !gimple_check_call_matching_types (
3464 new_direct_edge->call_stmt,
3465 new_direct_edge->callee->decl, false);
3466 if (new_edges)
3468 new_edges->safe_push (new_direct_edge);
3469 res = true;
3471 top = IPA_EDGE_REF (cs);
3472 /* If speculative edge was introduced we still need to update
3473 call info of the indirect edge. */
3474 if (!new_direct_edge->speculative)
3475 continue;
3477 if (jfunc->type == IPA_JF_PASS_THROUGH
3478 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3480 if (ici->agg_contents
3481 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3482 && !ici->polymorphic)
3483 ici->param_index = -1;
3484 else
3486 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3487 if (ici->polymorphic
3488 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3489 ici->vptr_changed = true;
3492 else if (jfunc->type == IPA_JF_ANCESTOR)
3494 if (ici->agg_contents
3495 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3496 && !ici->polymorphic)
3497 ici->param_index = -1;
3498 else
3500 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3501 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3502 if (ici->polymorphic
3503 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3504 ici->vptr_changed = true;
3507 else
3508 /* Either we can find a destination for this edge now or never. */
3509 ici->param_index = -1;
3512 return res;
3515 /* Recursively traverse subtree of NODE (including node) made of inlined
3516 cgraph_edges when CS has been inlined and invoke
3517 update_indirect_edges_after_inlining on all nodes and
3518 update_jump_functions_after_inlining on all non-inlined edges that lead out
3519 of this subtree. Newly discovered indirect edges will be added to
3520 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3521 created. */
3523 static bool
3524 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3525 struct cgraph_node *node,
3526 vec<cgraph_edge *> *new_edges)
3528 struct cgraph_edge *e;
3529 bool res;
3531 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3533 for (e = node->callees; e; e = e->next_callee)
3534 if (!e->inline_failed)
3535 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3536 else
3537 update_jump_functions_after_inlining (cs, e);
3538 for (e = node->indirect_calls; e; e = e->next_callee)
3539 update_jump_functions_after_inlining (cs, e);
3541 return res;
3544 /* Combine two controlled uses counts as done during inlining. */
3546 static int
3547 combine_controlled_uses_counters (int c, int d)
3549 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3550 return IPA_UNDESCRIBED_USE;
3551 else
3552 return c + d - 1;
3555 /* Propagate number of controlled users from CS->caleee to the new root of the
3556 tree of inlined nodes. */
3558 static void
3559 propagate_controlled_uses (struct cgraph_edge *cs)
3561 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3562 struct cgraph_node *new_root = cs->caller->global.inlined_to
3563 ? cs->caller->global.inlined_to : cs->caller;
3564 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3565 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3566 int count, i;
3568 count = MIN (ipa_get_cs_argument_count (args),
3569 ipa_get_param_count (old_root_info));
3570 for (i = 0; i < count; i++)
3572 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3573 struct ipa_cst_ref_desc *rdesc;
3575 if (jf->type == IPA_JF_PASS_THROUGH)
3577 int src_idx, c, d;
3578 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3579 c = ipa_get_controlled_uses (new_root_info, src_idx);
3580 d = ipa_get_controlled_uses (old_root_info, i);
3582 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3583 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3584 c = combine_controlled_uses_counters (c, d);
3585 ipa_set_controlled_uses (new_root_info, src_idx, c);
3586 if (c == 0 && new_root_info->ipcp_orig_node)
3588 struct cgraph_node *n;
3589 struct ipa_ref *ref;
3590 tree t = new_root_info->known_csts[src_idx];
3592 if (t && TREE_CODE (t) == ADDR_EXPR
3593 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3594 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3595 && (ref = new_root->find_reference (n, NULL, 0)))
3597 if (dump_file)
3598 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3599 "reference from %s to %s.\n",
3600 new_root->dump_name (),
3601 n->dump_name ());
3602 ref->remove_reference ();
3606 else if (jf->type == IPA_JF_CONST
3607 && (rdesc = jfunc_rdesc_usable (jf)))
3609 int d = ipa_get_controlled_uses (old_root_info, i);
3610 int c = rdesc->refcount;
3611 rdesc->refcount = combine_controlled_uses_counters (c, d);
3612 if (rdesc->refcount == 0)
3614 tree cst = ipa_get_jf_constant (jf);
3615 struct cgraph_node *n;
3616 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3617 && TREE_CODE (TREE_OPERAND (cst, 0))
3618 == FUNCTION_DECL);
3619 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3620 if (n)
3622 struct cgraph_node *clone;
3623 bool ok;
3624 ok = remove_described_reference (n, rdesc);
3625 gcc_checking_assert (ok);
3627 clone = cs->caller;
3628 while (clone->global.inlined_to
3629 && clone != rdesc->cs->caller
3630 && IPA_NODE_REF (clone)->ipcp_orig_node)
3632 struct ipa_ref *ref;
3633 ref = clone->find_reference (n, NULL, 0);
3634 if (ref)
3636 if (dump_file)
3637 fprintf (dump_file, "ipa-prop: Removing "
3638 "cloning-created reference "
3639 "from %s to %s.\n",
3640 clone->dump_name (),
3641 n->dump_name ());
3642 ref->remove_reference ();
3644 clone = clone->callers->caller;
3651 for (i = ipa_get_param_count (old_root_info);
3652 i < ipa_get_cs_argument_count (args);
3653 i++)
3655 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3657 if (jf->type == IPA_JF_CONST)
3659 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3660 if (rdesc)
3661 rdesc->refcount = IPA_UNDESCRIBED_USE;
3663 else if (jf->type == IPA_JF_PASS_THROUGH)
3664 ipa_set_controlled_uses (new_root_info,
3665 jf->value.pass_through.formal_id,
3666 IPA_UNDESCRIBED_USE);
3670 /* Update jump functions and call note functions on inlining the call site CS.
3671 CS is expected to lead to a node already cloned by
3672 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3673 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3674 created. */
3676 bool
3677 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3678 vec<cgraph_edge *> *new_edges)
3680 bool changed;
3681 /* Do nothing if the preparation phase has not been carried out yet
3682 (i.e. during early inlining). */
3683 if (!ipa_node_params_sum)
3684 return false;
3685 gcc_assert (ipa_edge_args_sum);
3687 propagate_controlled_uses (cs);
3688 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3690 return changed;
3693 /* Ensure that array of edge arguments infos is big enough to accommodate a
3694 structure for all edges and reallocates it if not. Also, allocate
3695 associated hash tables is they do not already exist. */
3697 void
3698 ipa_check_create_edge_args (void)
3700 if (!ipa_edge_args_sum)
3701 ipa_edge_args_sum
3702 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3703 ipa_edge_args_sum_t (symtab, true));
3704 if (!ipa_bits_hash_table)
3705 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3706 if (!ipa_vr_hash_table)
3707 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3710 /* Frees all dynamically allocated structures that the argument info points
3711 to. */
3713 void
3714 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3716 vec_free (args->jump_functions);
3717 *args = ipa_edge_args ();
3720 /* Free all ipa_edge structures. */
3722 void
3723 ipa_free_all_edge_args (void)
3725 if (!ipa_edge_args_sum)
3726 return;
3728 ipa_edge_args_sum->release ();
3729 ipa_edge_args_sum = NULL;
3732 /* Free all ipa_node_params structures. */
3734 void
3735 ipa_free_all_node_params (void)
3737 ipa_node_params_sum->release ();
3738 ipa_node_params_sum = NULL;
3741 /* Grow ipcp_transformations if necessary. Also allocate any necessary hash
3742 tables if they do not already exist. */
3744 void
3745 ipcp_grow_transformations_if_necessary (void)
3747 if (vec_safe_length (ipcp_transformations)
3748 <= (unsigned) symtab->cgraph_max_uid)
3749 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3750 if (!ipa_bits_hash_table)
3751 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3752 if (!ipa_vr_hash_table)
3753 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3756 /* Set the aggregate replacements of NODE to be AGGVALS. */
3758 void
3759 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3760 struct ipa_agg_replacement_value *aggvals)
3762 ipcp_grow_transformations_if_necessary ();
3763 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3766 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3767 count data structures accordingly. */
3769 void
3770 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3772 if (args->jump_functions)
3774 struct ipa_jump_func *jf;
3775 int i;
3776 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3778 struct ipa_cst_ref_desc *rdesc;
3779 try_decrement_rdesc_refcount (jf);
3780 if (jf->type == IPA_JF_CONST
3781 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3782 && rdesc->cs == cs)
3783 rdesc->cs = NULL;
3788 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3789 reference count data strucutres accordingly. */
3791 void
3792 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3793 ipa_edge_args *old_args, ipa_edge_args *new_args)
3795 unsigned int i;
3797 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3798 if (old_args->polymorphic_call_contexts)
3799 new_args->polymorphic_call_contexts
3800 = vec_safe_copy (old_args->polymorphic_call_contexts);
3802 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3804 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3805 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3807 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3809 if (src_jf->type == IPA_JF_CONST)
3811 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3813 if (!src_rdesc)
3814 dst_jf->value.constant.rdesc = NULL;
3815 else if (src->caller == dst->caller)
3817 struct ipa_ref *ref;
3818 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3819 gcc_checking_assert (n);
3820 ref = src->caller->find_reference (n, src->call_stmt,
3821 src->lto_stmt_uid);
3822 gcc_checking_assert (ref);
3823 dst->caller->clone_reference (ref, ref->stmt);
3825 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3826 dst_rdesc->cs = dst;
3827 dst_rdesc->refcount = src_rdesc->refcount;
3828 dst_rdesc->next_duplicate = NULL;
3829 dst_jf->value.constant.rdesc = dst_rdesc;
3831 else if (src_rdesc->cs == src)
3833 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3834 dst_rdesc->cs = dst;
3835 dst_rdesc->refcount = src_rdesc->refcount;
3836 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3837 src_rdesc->next_duplicate = dst_rdesc;
3838 dst_jf->value.constant.rdesc = dst_rdesc;
3840 else
3842 struct ipa_cst_ref_desc *dst_rdesc;
3843 /* This can happen during inlining, when a JFUNC can refer to a
3844 reference taken in a function up in the tree of inline clones.
3845 We need to find the duplicate that refers to our tree of
3846 inline clones. */
3848 gcc_assert (dst->caller->global.inlined_to);
3849 for (dst_rdesc = src_rdesc->next_duplicate;
3850 dst_rdesc;
3851 dst_rdesc = dst_rdesc->next_duplicate)
3853 struct cgraph_node *top;
3854 top = dst_rdesc->cs->caller->global.inlined_to
3855 ? dst_rdesc->cs->caller->global.inlined_to
3856 : dst_rdesc->cs->caller;
3857 if (dst->caller->global.inlined_to == top)
3858 break;
3860 gcc_assert (dst_rdesc);
3861 dst_jf->value.constant.rdesc = dst_rdesc;
3864 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3865 && src->caller == dst->caller)
3867 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3868 ? dst->caller->global.inlined_to : dst->caller;
3869 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3870 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3872 int c = ipa_get_controlled_uses (root_info, idx);
3873 if (c != IPA_UNDESCRIBED_USE)
3875 c++;
3876 ipa_set_controlled_uses (root_info, idx, c);
3882 /* Analyze newly added function into callgraph. */
3884 static void
3885 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3887 if (node->has_gimple_body_p ())
3888 ipa_analyze_node (node);
3891 /* Hook that is called by summary when a node is duplicated. */
3893 void
3894 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3895 ipa_node_params *old_info,
3896 ipa_node_params *new_info)
3898 ipa_agg_replacement_value *old_av, *new_av;
3900 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3901 new_info->lattices = NULL;
3902 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3903 new_info->known_csts = old_info->known_csts.copy ();
3904 new_info->known_contexts = old_info->known_contexts.copy ();
3906 new_info->analysis_done = old_info->analysis_done;
3907 new_info->node_enqueued = old_info->node_enqueued;
3908 new_info->versionable = old_info->versionable;
3910 old_av = ipa_get_agg_replacements_for_node (src);
3911 if (old_av)
3913 new_av = NULL;
3914 while (old_av)
3916 struct ipa_agg_replacement_value *v;
3918 v = ggc_alloc<ipa_agg_replacement_value> ();
3919 memcpy (v, old_av, sizeof (*v));
3920 v->next = new_av;
3921 new_av = v;
3922 old_av = old_av->next;
3924 ipa_set_node_agg_value_chain (dst, new_av);
3927 ipcp_transformation_summary *src_trans
3928 = ipcp_get_transformation_summary (src);
3930 if (src_trans)
3932 ipcp_grow_transformations_if_necessary ();
3933 src_trans = ipcp_get_transformation_summary (src);
3934 ipcp_transformation_summary *dst_trans
3935 = ipcp_get_transformation_summary (dst);
3937 dst_trans->bits = vec_safe_copy (src_trans->bits);
3939 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3940 vec<ipa_vr, va_gc> *&dst_vr
3941 = ipcp_get_transformation_summary (dst)->m_vr;
3942 if (vec_safe_length (src_trans->m_vr) > 0)
3944 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3945 for (unsigned i = 0; i < src_vr->length (); ++i)
3946 dst_vr->quick_push ((*src_vr)[i]);
3951 /* Register our cgraph hooks if they are not already there. */
3953 void
3954 ipa_register_cgraph_hooks (void)
3956 ipa_check_create_node_params ();
3957 ipa_check_create_edge_args ();
3959 function_insertion_hook_holder =
3960 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3963 /* Unregister our cgraph hooks if they are not already there. */
3965 static void
3966 ipa_unregister_cgraph_hooks (void)
3968 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3969 function_insertion_hook_holder = NULL;
3972 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3973 longer needed after ipa-cp. */
3975 void
3976 ipa_free_all_structures_after_ipa_cp (void)
3978 if (!optimize && !in_lto_p)
3980 ipa_free_all_edge_args ();
3981 ipa_free_all_node_params ();
3982 ipcp_sources_pool.release ();
3983 ipcp_cst_values_pool.release ();
3984 ipcp_poly_ctx_values_pool.release ();
3985 ipcp_agg_lattice_pool.release ();
3986 ipa_unregister_cgraph_hooks ();
3987 ipa_refdesc_pool.release ();
3991 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3992 longer needed after indirect inlining. */
3994 void
3995 ipa_free_all_structures_after_iinln (void)
3997 ipa_free_all_edge_args ();
3998 ipa_free_all_node_params ();
3999 ipa_unregister_cgraph_hooks ();
4000 ipcp_sources_pool.release ();
4001 ipcp_cst_values_pool.release ();
4002 ipcp_poly_ctx_values_pool.release ();
4003 ipcp_agg_lattice_pool.release ();
4004 ipa_refdesc_pool.release ();
4007 /* Print ipa_tree_map data structures of all functions in the
4008 callgraph to F. */
4010 void
4011 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4013 int i, count;
4014 struct ipa_node_params *info;
4016 if (!node->definition)
4017 return;
4018 info = IPA_NODE_REF (node);
4019 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4020 count = ipa_get_param_count (info);
4021 for (i = 0; i < count; i++)
4023 int c;
4025 fprintf (f, " ");
4026 ipa_dump_param (f, info, i);
4027 if (ipa_is_param_used (info, i))
4028 fprintf (f, " used");
4029 c = ipa_get_controlled_uses (info, i);
4030 if (c == IPA_UNDESCRIBED_USE)
4031 fprintf (f, " undescribed_use");
4032 else
4033 fprintf (f, " controlled_uses=%i", c);
4034 fprintf (f, "\n");
4038 /* Print ipa_tree_map data structures of all functions in the
4039 callgraph to F. */
4041 void
4042 ipa_print_all_params (FILE * f)
4044 struct cgraph_node *node;
4046 fprintf (f, "\nFunction parameters:\n");
4047 FOR_EACH_FUNCTION (node)
4048 ipa_print_node_params (f, node);
4051 /* Dump the AV linked list. */
4053 void
4054 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4056 bool comma = false;
4057 fprintf (f, " Aggregate replacements:");
4058 for (; av; av = av->next)
4060 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4061 av->index, av->offset);
4062 print_generic_expr (f, av->value);
4063 comma = true;
4065 fprintf (f, "\n");
4068 /* Stream out jump function JUMP_FUNC to OB. */
4070 static void
4071 ipa_write_jump_function (struct output_block *ob,
4072 struct ipa_jump_func *jump_func)
4074 struct ipa_agg_jf_item *item;
4075 struct bitpack_d bp;
4076 int i, count;
4078 streamer_write_uhwi (ob, jump_func->type);
4079 switch (jump_func->type)
4081 case IPA_JF_UNKNOWN:
4082 break;
4083 case IPA_JF_CONST:
4084 gcc_assert (
4085 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4086 stream_write_tree (ob, jump_func->value.constant.value, true);
4087 break;
4088 case IPA_JF_PASS_THROUGH:
4089 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4090 if (jump_func->value.pass_through.operation == NOP_EXPR)
4092 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4093 bp = bitpack_create (ob->main_stream);
4094 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4095 streamer_write_bitpack (&bp);
4097 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4098 == tcc_unary)
4099 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4100 else
4102 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4103 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4105 break;
4106 case IPA_JF_ANCESTOR:
4107 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4108 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4109 bp = bitpack_create (ob->main_stream);
4110 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4111 streamer_write_bitpack (&bp);
4112 break;
4115 count = vec_safe_length (jump_func->agg.items);
4116 streamer_write_uhwi (ob, count);
4117 if (count)
4119 bp = bitpack_create (ob->main_stream);
4120 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4121 streamer_write_bitpack (&bp);
4124 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4126 streamer_write_uhwi (ob, item->offset);
4127 stream_write_tree (ob, item->value, true);
4130 bp = bitpack_create (ob->main_stream);
4131 bp_pack_value (&bp, !!jump_func->bits, 1);
4132 streamer_write_bitpack (&bp);
4133 if (jump_func->bits)
4135 streamer_write_widest_int (ob, jump_func->bits->value);
4136 streamer_write_widest_int (ob, jump_func->bits->mask);
4138 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4139 streamer_write_bitpack (&bp);
4140 if (jump_func->m_vr)
4142 streamer_write_enum (ob->main_stream, value_rang_type,
4143 VR_LAST, jump_func->m_vr->type);
4144 stream_write_tree (ob, jump_func->m_vr->min, true);
4145 stream_write_tree (ob, jump_func->m_vr->max, true);
4149 /* Read in jump function JUMP_FUNC from IB. */
4151 static void
4152 ipa_read_jump_function (struct lto_input_block *ib,
4153 struct ipa_jump_func *jump_func,
4154 struct cgraph_edge *cs,
4155 struct data_in *data_in)
4157 enum jump_func_type jftype;
4158 enum tree_code operation;
4159 int i, count;
4161 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4162 switch (jftype)
4164 case IPA_JF_UNKNOWN:
4165 ipa_set_jf_unknown (jump_func);
4166 break;
4167 case IPA_JF_CONST:
4168 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4169 break;
4170 case IPA_JF_PASS_THROUGH:
4171 operation = (enum tree_code) streamer_read_uhwi (ib);
4172 if (operation == NOP_EXPR)
4174 int formal_id = streamer_read_uhwi (ib);
4175 struct bitpack_d bp = streamer_read_bitpack (ib);
4176 bool agg_preserved = bp_unpack_value (&bp, 1);
4177 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4179 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4181 int formal_id = streamer_read_uhwi (ib);
4182 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4184 else
4186 tree operand = stream_read_tree (ib, data_in);
4187 int formal_id = streamer_read_uhwi (ib);
4188 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4189 operation);
4191 break;
4192 case IPA_JF_ANCESTOR:
4194 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4195 int formal_id = streamer_read_uhwi (ib);
4196 struct bitpack_d bp = streamer_read_bitpack (ib);
4197 bool agg_preserved = bp_unpack_value (&bp, 1);
4198 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4199 break;
4203 count = streamer_read_uhwi (ib);
4204 vec_alloc (jump_func->agg.items, count);
4205 if (count)
4207 struct bitpack_d bp = streamer_read_bitpack (ib);
4208 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4210 for (i = 0; i < count; i++)
4212 struct ipa_agg_jf_item item;
4213 item.offset = streamer_read_uhwi (ib);
4214 item.value = stream_read_tree (ib, data_in);
4215 jump_func->agg.items->quick_push (item);
4218 struct bitpack_d bp = streamer_read_bitpack (ib);
4219 bool bits_known = bp_unpack_value (&bp, 1);
4220 if (bits_known)
4222 widest_int value = streamer_read_widest_int (ib);
4223 widest_int mask = streamer_read_widest_int (ib);
4224 ipa_set_jfunc_bits (jump_func, value, mask);
4226 else
4227 jump_func->bits = NULL;
4229 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4230 bool vr_known = bp_unpack_value (&vr_bp, 1);
4231 if (vr_known)
4233 enum value_range_type type = streamer_read_enum (ib, value_range_type,
4234 VR_LAST);
4235 tree min = stream_read_tree (ib, data_in);
4236 tree max = stream_read_tree (ib, data_in);
4237 ipa_set_jfunc_vr (jump_func, type, min, max);
4239 else
4240 jump_func->m_vr = NULL;
4243 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4244 relevant to indirect inlining to OB. */
4246 static void
4247 ipa_write_indirect_edge_info (struct output_block *ob,
4248 struct cgraph_edge *cs)
4250 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4251 struct bitpack_d bp;
4253 streamer_write_hwi (ob, ii->param_index);
4254 bp = bitpack_create (ob->main_stream);
4255 bp_pack_value (&bp, ii->polymorphic, 1);
4256 bp_pack_value (&bp, ii->agg_contents, 1);
4257 bp_pack_value (&bp, ii->member_ptr, 1);
4258 bp_pack_value (&bp, ii->by_ref, 1);
4259 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4260 bp_pack_value (&bp, ii->vptr_changed, 1);
4261 streamer_write_bitpack (&bp);
4262 if (ii->agg_contents || ii->polymorphic)
4263 streamer_write_hwi (ob, ii->offset);
4264 else
4265 gcc_assert (ii->offset == 0);
4267 if (ii->polymorphic)
4269 streamer_write_hwi (ob, ii->otr_token);
4270 stream_write_tree (ob, ii->otr_type, true);
4271 ii->context.stream_out (ob);
4275 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4276 relevant to indirect inlining from IB. */
4278 static void
4279 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4280 struct data_in *data_in,
4281 struct cgraph_edge *cs)
4283 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4284 struct bitpack_d bp;
4286 ii->param_index = (int) streamer_read_hwi (ib);
4287 bp = streamer_read_bitpack (ib);
4288 ii->polymorphic = bp_unpack_value (&bp, 1);
4289 ii->agg_contents = bp_unpack_value (&bp, 1);
4290 ii->member_ptr = bp_unpack_value (&bp, 1);
4291 ii->by_ref = bp_unpack_value (&bp, 1);
4292 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4293 ii->vptr_changed = bp_unpack_value (&bp, 1);
4294 if (ii->agg_contents || ii->polymorphic)
4295 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4296 else
4297 ii->offset = 0;
4298 if (ii->polymorphic)
4300 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4301 ii->otr_type = stream_read_tree (ib, data_in);
4302 ii->context.stream_in (ib, data_in);
4306 /* Stream out NODE info to OB. */
4308 static void
4309 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4311 int node_ref;
4312 lto_symtab_encoder_t encoder;
4313 struct ipa_node_params *info = IPA_NODE_REF (node);
4314 int j;
4315 struct cgraph_edge *e;
4316 struct bitpack_d bp;
4318 encoder = ob->decl_state->symtab_node_encoder;
4319 node_ref = lto_symtab_encoder_encode (encoder, node);
4320 streamer_write_uhwi (ob, node_ref);
4322 streamer_write_uhwi (ob, ipa_get_param_count (info));
4323 for (j = 0; j < ipa_get_param_count (info); j++)
4324 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4325 bp = bitpack_create (ob->main_stream);
4326 gcc_assert (info->analysis_done
4327 || ipa_get_param_count (info) == 0);
4328 gcc_assert (!info->node_enqueued);
4329 gcc_assert (!info->ipcp_orig_node);
4330 for (j = 0; j < ipa_get_param_count (info); j++)
4331 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4332 streamer_write_bitpack (&bp);
4333 for (j = 0; j < ipa_get_param_count (info); j++)
4335 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4336 stream_write_tree (ob, ipa_get_type (info, j), true);
4338 for (e = node->callees; e; e = e->next_callee)
4340 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4342 streamer_write_uhwi (ob,
4343 ipa_get_cs_argument_count (args) * 2
4344 + (args->polymorphic_call_contexts != NULL));
4345 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4347 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4348 if (args->polymorphic_call_contexts != NULL)
4349 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4352 for (e = node->indirect_calls; e; e = e->next_callee)
4354 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4356 streamer_write_uhwi (ob,
4357 ipa_get_cs_argument_count (args) * 2
4358 + (args->polymorphic_call_contexts != NULL));
4359 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4361 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4362 if (args->polymorphic_call_contexts != NULL)
4363 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4365 ipa_write_indirect_edge_info (ob, e);
4369 /* Stream in NODE info from IB. */
4371 static void
4372 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4373 struct data_in *data_in)
4375 struct ipa_node_params *info = IPA_NODE_REF (node);
4376 int k;
4377 struct cgraph_edge *e;
4378 struct bitpack_d bp;
4380 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4382 for (k = 0; k < ipa_get_param_count (info); k++)
4383 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4385 bp = streamer_read_bitpack (ib);
4386 if (ipa_get_param_count (info) != 0)
4387 info->analysis_done = true;
4388 info->node_enqueued = false;
4389 for (k = 0; k < ipa_get_param_count (info); k++)
4390 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4391 for (k = 0; k < ipa_get_param_count (info); k++)
4393 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4394 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
4396 for (e = node->callees; e; e = e->next_callee)
4398 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4399 int count = streamer_read_uhwi (ib);
4400 bool contexts_computed = count & 1;
4401 count /= 2;
4403 if (!count)
4404 continue;
4405 vec_safe_grow_cleared (args->jump_functions, count);
4406 if (contexts_computed)
4407 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4409 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4411 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4412 data_in);
4413 if (contexts_computed)
4414 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4417 for (e = node->indirect_calls; e; e = e->next_callee)
4419 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4420 int count = streamer_read_uhwi (ib);
4421 bool contexts_computed = count & 1;
4422 count /= 2;
4424 if (count)
4426 vec_safe_grow_cleared (args->jump_functions, count);
4427 if (contexts_computed)
4428 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4429 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4431 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4432 data_in);
4433 if (contexts_computed)
4434 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4437 ipa_read_indirect_edge_info (ib, data_in, e);
4441 /* Write jump functions for nodes in SET. */
4443 void
4444 ipa_prop_write_jump_functions (void)
4446 struct cgraph_node *node;
4447 struct output_block *ob;
4448 unsigned int count = 0;
4449 lto_symtab_encoder_iterator lsei;
4450 lto_symtab_encoder_t encoder;
4452 if (!ipa_node_params_sum || !ipa_edge_args_sum)
4453 return;
4455 ob = create_output_block (LTO_section_jump_functions);
4456 encoder = ob->decl_state->symtab_node_encoder;
4457 ob->symbol = NULL;
4458 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4459 lsei_next_function_in_partition (&lsei))
4461 node = lsei_cgraph_node (lsei);
4462 if (node->has_gimple_body_p ()
4463 && IPA_NODE_REF (node) != NULL)
4464 count++;
4467 streamer_write_uhwi (ob, count);
4469 /* Process all of the functions. */
4470 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4471 lsei_next_function_in_partition (&lsei))
4473 node = lsei_cgraph_node (lsei);
4474 if (node->has_gimple_body_p ()
4475 && IPA_NODE_REF (node) != NULL)
4476 ipa_write_node_info (ob, node);
4478 streamer_write_char_stream (ob->main_stream, 0);
4479 produce_asm (ob, NULL);
4480 destroy_output_block (ob);
4483 /* Read section in file FILE_DATA of length LEN with data DATA. */
4485 static void
4486 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4487 size_t len)
4489 const struct lto_function_header *header =
4490 (const struct lto_function_header *) data;
4491 const int cfg_offset = sizeof (struct lto_function_header);
4492 const int main_offset = cfg_offset + header->cfg_size;
4493 const int string_offset = main_offset + header->main_size;
4494 struct data_in *data_in;
4495 unsigned int i;
4496 unsigned int count;
4498 lto_input_block ib_main ((const char *) data + main_offset,
4499 header->main_size, file_data->mode_table);
4501 data_in =
4502 lto_data_in_create (file_data, (const char *) data + string_offset,
4503 header->string_size, vNULL);
4504 count = streamer_read_uhwi (&ib_main);
4506 for (i = 0; i < count; i++)
4508 unsigned int index;
4509 struct cgraph_node *node;
4510 lto_symtab_encoder_t encoder;
4512 index = streamer_read_uhwi (&ib_main);
4513 encoder = file_data->symtab_node_encoder;
4514 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4515 index));
4516 gcc_assert (node->definition);
4517 ipa_read_node_info (&ib_main, node, data_in);
4519 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4520 len);
4521 lto_data_in_delete (data_in);
4524 /* Read ipcp jump functions. */
4526 void
4527 ipa_prop_read_jump_functions (void)
4529 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4530 struct lto_file_decl_data *file_data;
4531 unsigned int j = 0;
4533 ipa_check_create_node_params ();
4534 ipa_check_create_edge_args ();
4535 ipa_register_cgraph_hooks ();
4537 while ((file_data = file_data_vec[j++]))
4539 size_t len;
4540 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4542 if (data)
4543 ipa_prop_read_section (file_data, data, len);
4547 void
4548 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4550 int node_ref;
4551 unsigned int count = 0;
4552 lto_symtab_encoder_t encoder;
4553 struct ipa_agg_replacement_value *aggvals, *av;
4555 aggvals = ipa_get_agg_replacements_for_node (node);
4556 encoder = ob->decl_state->symtab_node_encoder;
4557 node_ref = lto_symtab_encoder_encode (encoder, node);
4558 streamer_write_uhwi (ob, node_ref);
4560 for (av = aggvals; av; av = av->next)
4561 count++;
4562 streamer_write_uhwi (ob, count);
4564 for (av = aggvals; av; av = av->next)
4566 struct bitpack_d bp;
4568 streamer_write_uhwi (ob, av->offset);
4569 streamer_write_uhwi (ob, av->index);
4570 stream_write_tree (ob, av->value, true);
4572 bp = bitpack_create (ob->main_stream);
4573 bp_pack_value (&bp, av->by_ref, 1);
4574 streamer_write_bitpack (&bp);
4577 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4578 if (ts && vec_safe_length (ts->m_vr) > 0)
4580 count = ts->m_vr->length ();
4581 streamer_write_uhwi (ob, count);
4582 for (unsigned i = 0; i < count; ++i)
4584 struct bitpack_d bp;
4585 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4586 bp = bitpack_create (ob->main_stream);
4587 bp_pack_value (&bp, parm_vr->known, 1);
4588 streamer_write_bitpack (&bp);
4589 if (parm_vr->known)
4591 streamer_write_enum (ob->main_stream, value_rang_type,
4592 VR_LAST, parm_vr->type);
4593 streamer_write_wide_int (ob, parm_vr->min);
4594 streamer_write_wide_int (ob, parm_vr->max);
4598 else
4599 streamer_write_uhwi (ob, 0);
4601 if (ts && vec_safe_length (ts->bits) > 0)
4603 count = ts->bits->length ();
4604 streamer_write_uhwi (ob, count);
4606 for (unsigned i = 0; i < count; ++i)
4608 const ipa_bits *bits_jfunc = (*ts->bits)[i];
4609 struct bitpack_d bp = bitpack_create (ob->main_stream);
4610 bp_pack_value (&bp, !!bits_jfunc, 1);
4611 streamer_write_bitpack (&bp);
4612 if (bits_jfunc)
4614 streamer_write_widest_int (ob, bits_jfunc->value);
4615 streamer_write_widest_int (ob, bits_jfunc->mask);
4619 else
4620 streamer_write_uhwi (ob, 0);
4623 /* Stream in the aggregate value replacement chain for NODE from IB. */
4625 static void
4626 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4627 data_in *data_in)
4629 struct ipa_agg_replacement_value *aggvals = NULL;
4630 unsigned int count, i;
4632 count = streamer_read_uhwi (ib);
4633 for (i = 0; i <count; i++)
4635 struct ipa_agg_replacement_value *av;
4636 struct bitpack_d bp;
4638 av = ggc_alloc<ipa_agg_replacement_value> ();
4639 av->offset = streamer_read_uhwi (ib);
4640 av->index = streamer_read_uhwi (ib);
4641 av->value = stream_read_tree (ib, data_in);
4642 bp = streamer_read_bitpack (ib);
4643 av->by_ref = bp_unpack_value (&bp, 1);
4644 av->next = aggvals;
4645 aggvals = av;
4647 ipa_set_node_agg_value_chain (node, aggvals);
4649 count = streamer_read_uhwi (ib);
4650 if (count > 0)
4652 ipcp_grow_transformations_if_necessary ();
4654 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4655 vec_safe_grow_cleared (ts->m_vr, count);
4656 for (i = 0; i < count; i++)
4658 ipa_vr *parm_vr;
4659 parm_vr = &(*ts->m_vr)[i];
4660 struct bitpack_d bp;
4661 bp = streamer_read_bitpack (ib);
4662 parm_vr->known = bp_unpack_value (&bp, 1);
4663 if (parm_vr->known)
4665 parm_vr->type = streamer_read_enum (ib, value_range_type,
4666 VR_LAST);
4667 parm_vr->min = streamer_read_wide_int (ib);
4668 parm_vr->max = streamer_read_wide_int (ib);
4672 count = streamer_read_uhwi (ib);
4673 if (count > 0)
4675 ipcp_grow_transformations_if_necessary ();
4677 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4678 vec_safe_grow_cleared (ts->bits, count);
4680 for (i = 0; i < count; i++)
4682 struct bitpack_d bp = streamer_read_bitpack (ib);
4683 bool known = bp_unpack_value (&bp, 1);
4684 if (known)
4686 ipa_bits *bits
4687 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4688 streamer_read_widest_int (ib));
4689 (*ts->bits)[i] = bits;
4695 /* Write all aggregate replacement for nodes in set. */
4697 void
4698 ipcp_write_transformation_summaries (void)
4700 struct cgraph_node *node;
4701 struct output_block *ob;
4702 unsigned int count = 0;
4703 lto_symtab_encoder_iterator lsei;
4704 lto_symtab_encoder_t encoder;
4706 ob = create_output_block (LTO_section_ipcp_transform);
4707 encoder = ob->decl_state->symtab_node_encoder;
4708 ob->symbol = NULL;
4709 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4710 lsei_next_function_in_partition (&lsei))
4712 node = lsei_cgraph_node (lsei);
4713 if (node->has_gimple_body_p ())
4714 count++;
4717 streamer_write_uhwi (ob, count);
4719 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4720 lsei_next_function_in_partition (&lsei))
4722 node = lsei_cgraph_node (lsei);
4723 if (node->has_gimple_body_p ())
4724 write_ipcp_transformation_info (ob, node);
4726 streamer_write_char_stream (ob->main_stream, 0);
4727 produce_asm (ob, NULL);
4728 destroy_output_block (ob);
4731 /* Read replacements section in file FILE_DATA of length LEN with data
4732 DATA. */
4734 static void
4735 read_replacements_section (struct lto_file_decl_data *file_data,
4736 const char *data,
4737 size_t len)
4739 const struct lto_function_header *header =
4740 (const struct lto_function_header *) data;
4741 const int cfg_offset = sizeof (struct lto_function_header);
4742 const int main_offset = cfg_offset + header->cfg_size;
4743 const int string_offset = main_offset + header->main_size;
4744 struct data_in *data_in;
4745 unsigned int i;
4746 unsigned int count;
4748 lto_input_block ib_main ((const char *) data + main_offset,
4749 header->main_size, file_data->mode_table);
4751 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4752 header->string_size, vNULL);
4753 count = streamer_read_uhwi (&ib_main);
4755 for (i = 0; i < count; i++)
4757 unsigned int index;
4758 struct cgraph_node *node;
4759 lto_symtab_encoder_t encoder;
4761 index = streamer_read_uhwi (&ib_main);
4762 encoder = file_data->symtab_node_encoder;
4763 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4764 index));
4765 gcc_assert (node->definition);
4766 read_ipcp_transformation_info (&ib_main, node, data_in);
4768 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4769 len);
4770 lto_data_in_delete (data_in);
4773 /* Read IPA-CP aggregate replacements. */
4775 void
4776 ipcp_read_transformation_summaries (void)
4778 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4779 struct lto_file_decl_data *file_data;
4780 unsigned int j = 0;
4782 while ((file_data = file_data_vec[j++]))
4784 size_t len;
4785 const char *data = lto_get_section_data (file_data,
4786 LTO_section_ipcp_transform,
4787 NULL, &len);
4788 if (data)
4789 read_replacements_section (file_data, data, len);
4793 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4794 NODE. */
4796 static void
4797 adjust_agg_replacement_values (struct cgraph_node *node,
4798 struct ipa_agg_replacement_value *aggval)
4800 struct ipa_agg_replacement_value *v;
4801 int i, c = 0, d = 0, *adj;
4803 if (!node->clone.combined_args_to_skip)
4804 return;
4806 for (v = aggval; v; v = v->next)
4808 gcc_assert (v->index >= 0);
4809 if (c < v->index)
4810 c = v->index;
4812 c++;
4814 adj = XALLOCAVEC (int, c);
4815 for (i = 0; i < c; i++)
4816 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4818 adj[i] = -1;
4819 d++;
4821 else
4822 adj[i] = i - d;
4824 for (v = aggval; v; v = v->next)
4825 v->index = adj[v->index];
4828 /* Dominator walker driving the ipcp modification phase. */
4830 class ipcp_modif_dom_walker : public dom_walker
4832 public:
4833 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4834 vec<ipa_param_descriptor, va_gc> *descs,
4835 struct ipa_agg_replacement_value *av,
4836 bool *sc, bool *cc)
4837 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4838 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4840 virtual edge before_dom_children (basic_block);
4842 private:
4843 struct ipa_func_body_info *m_fbi;
4844 vec<ipa_param_descriptor, va_gc> *m_descriptors;
4845 struct ipa_agg_replacement_value *m_aggval;
4846 bool *m_something_changed, *m_cfg_changed;
4849 edge
4850 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4852 gimple_stmt_iterator gsi;
4853 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4855 struct ipa_agg_replacement_value *v;
4856 gimple *stmt = gsi_stmt (gsi);
4857 tree rhs, val, t;
4858 HOST_WIDE_INT offset, size;
4859 int index;
4860 bool by_ref, vce;
4862 if (!gimple_assign_load_p (stmt))
4863 continue;
4864 rhs = gimple_assign_rhs1 (stmt);
4865 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4866 continue;
4868 vce = false;
4869 t = rhs;
4870 while (handled_component_p (t))
4872 /* V_C_E can do things like convert an array of integers to one
4873 bigger integer and similar things we do not handle below. */
4874 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4876 vce = true;
4877 break;
4879 t = TREE_OPERAND (t, 0);
4881 if (vce)
4882 continue;
4884 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4885 &offset, &size, &by_ref))
4886 continue;
4887 for (v = m_aggval; v; v = v->next)
4888 if (v->index == index
4889 && v->offset == offset)
4890 break;
4891 if (!v
4892 || v->by_ref != by_ref
4893 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4894 continue;
4896 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4897 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4899 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4900 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4901 else if (TYPE_SIZE (TREE_TYPE (rhs))
4902 == TYPE_SIZE (TREE_TYPE (v->value)))
4903 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4904 else
4906 if (dump_file)
4908 fprintf (dump_file, " const ");
4909 print_generic_expr (dump_file, v->value);
4910 fprintf (dump_file, " can't be converted to type of ");
4911 print_generic_expr (dump_file, rhs);
4912 fprintf (dump_file, "\n");
4914 continue;
4917 else
4918 val = v->value;
4920 if (dump_file && (dump_flags & TDF_DETAILS))
4922 fprintf (dump_file, "Modifying stmt:\n ");
4923 print_gimple_stmt (dump_file, stmt, 0);
4925 gimple_assign_set_rhs_from_tree (&gsi, val);
4926 update_stmt (stmt);
4928 if (dump_file && (dump_flags & TDF_DETAILS))
4930 fprintf (dump_file, "into:\n ");
4931 print_gimple_stmt (dump_file, stmt, 0);
4932 fprintf (dump_file, "\n");
4935 *m_something_changed = true;
4936 if (maybe_clean_eh_stmt (stmt)
4937 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4938 *m_cfg_changed = true;
4940 return NULL;
4943 /* Update bits info of formal parameters as described in
4944 ipcp_transformation_summary. */
4946 static void
4947 ipcp_update_bits (struct cgraph_node *node)
4949 tree parm = DECL_ARGUMENTS (node->decl);
4950 tree next_parm = parm;
4951 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4953 if (!ts || vec_safe_length (ts->bits) == 0)
4954 return;
4956 vec<ipa_bits *, va_gc> &bits = *ts->bits;
4957 unsigned count = bits.length ();
4959 for (unsigned i = 0; i < count; ++i, parm = next_parm)
4961 if (node->clone.combined_args_to_skip
4962 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
4963 continue;
4965 gcc_checking_assert (parm);
4966 next_parm = DECL_CHAIN (parm);
4968 if (!bits[i]
4969 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
4970 || POINTER_TYPE_P (TREE_TYPE (parm)))
4971 || !is_gimple_reg (parm))
4972 continue;
4974 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
4975 if (!ddef)
4976 continue;
4978 if (dump_file)
4980 fprintf (dump_file, "Adjusting mask for param %u to ", i);
4981 print_hex (bits[i]->mask, dump_file);
4982 fprintf (dump_file, "\n");
4985 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
4987 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
4988 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
4990 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
4991 | wide_int::from (bits[i]->value, prec, sgn);
4992 set_nonzero_bits (ddef, nonzero_bits);
4994 else
4996 unsigned tem = bits[i]->mask.to_uhwi ();
4997 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
4998 unsigned align = tem & -tem;
4999 unsigned misalign = bitpos & (align - 1);
5001 if (align > 1)
5003 if (dump_file)
5004 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5006 unsigned old_align, old_misalign;
5007 struct ptr_info_def *pi = get_ptr_info (ddef);
5008 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5010 if (old_known
5011 && old_align > align)
5013 if (dump_file)
5015 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5016 if ((old_misalign & (align - 1)) != misalign)
5017 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5018 old_misalign, misalign);
5020 continue;
5023 if (old_known
5024 && ((misalign & (old_align - 1)) != old_misalign)
5025 && dump_file)
5026 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5027 old_misalign, misalign);
5029 set_ptr_info_alignment (pi, align, misalign);
5035 /* Update value range of formal parameters as described in
5036 ipcp_transformation_summary. */
5038 static void
5039 ipcp_update_vr (struct cgraph_node *node)
5041 tree fndecl = node->decl;
5042 tree parm = DECL_ARGUMENTS (fndecl);
5043 tree next_parm = parm;
5044 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5045 if (!ts || vec_safe_length (ts->m_vr) == 0)
5046 return;
5047 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5048 unsigned count = vr.length ();
5050 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5052 if (node->clone.combined_args_to_skip
5053 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5054 continue;
5055 gcc_checking_assert (parm);
5056 next_parm = DECL_CHAIN (parm);
5057 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5059 if (!ddef || !is_gimple_reg (parm))
5060 continue;
5062 if (vr[i].known
5063 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5065 tree type = TREE_TYPE (ddef);
5066 unsigned prec = TYPE_PRECISION (type);
5067 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5069 if (dump_file)
5071 fprintf (dump_file, "Setting value range of param %u ", i);
5072 fprintf (dump_file, "%s[",
5073 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5074 print_decs (vr[i].min, dump_file);
5075 fprintf (dump_file, ", ");
5076 print_decs (vr[i].max, dump_file);
5077 fprintf (dump_file, "]\n");
5079 set_range_info (ddef, vr[i].type,
5080 wide_int_storage::from (vr[i].min, prec,
5081 TYPE_SIGN (type)),
5082 wide_int_storage::from (vr[i].max, prec,
5083 TYPE_SIGN (type)));
5085 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5086 && vr[i].type == VR_ANTI_RANGE
5087 && wi::eq_p (vr[i].min, 0)
5088 && wi::eq_p (vr[i].max, 0))
5090 if (dump_file)
5091 fprintf (dump_file, "Setting nonnull for %u\n", i);
5092 set_ptr_nonnull (ddef);
5098 /* IPCP transformation phase doing propagation of aggregate values. */
5100 unsigned int
5101 ipcp_transform_function (struct cgraph_node *node)
5103 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5104 struct ipa_func_body_info fbi;
5105 struct ipa_agg_replacement_value *aggval;
5106 int param_count;
5107 bool cfg_changed = false, something_changed = false;
5109 gcc_checking_assert (cfun);
5110 gcc_checking_assert (current_function_decl);
5112 if (dump_file)
5113 fprintf (dump_file, "Modification phase of node %s\n",
5114 node->dump_name ());
5116 ipcp_update_bits (node);
5117 ipcp_update_vr (node);
5118 aggval = ipa_get_agg_replacements_for_node (node);
5119 if (!aggval)
5120 return 0;
5121 param_count = count_formal_params (node->decl);
5122 if (param_count == 0)
5123 return 0;
5124 adjust_agg_replacement_values (node, aggval);
5125 if (dump_file)
5126 ipa_dump_agg_replacement_values (dump_file, aggval);
5128 fbi.node = node;
5129 fbi.info = NULL;
5130 fbi.bb_infos = vNULL;
5131 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5132 fbi.param_count = param_count;
5133 fbi.aa_walked = 0;
5135 vec_safe_grow_cleared (descriptors, param_count);
5136 ipa_populate_param_decls (node, *descriptors);
5137 calculate_dominance_info (CDI_DOMINATORS);
5138 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5139 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5141 int i;
5142 struct ipa_bb_info *bi;
5143 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5144 free_ipa_bb_info (bi);
5145 fbi.bb_infos.release ();
5146 free_dominance_info (CDI_DOMINATORS);
5147 (*ipcp_transformations)[node->uid].agg_values = NULL;
5148 (*ipcp_transformations)[node->uid].bits = NULL;
5149 (*ipcp_transformations)[node->uid].m_vr = NULL;
5151 vec_free (descriptors);
5153 if (!something_changed)
5154 return 0;
5155 else if (cfg_changed)
5156 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5157 else
5158 return TODO_update_ssa_only_virtuals;
5161 #include "gt-ipa-prop.h"