poly_int: get_object_alignment_2
[official-gcc.git] / gcc / ipa-prop.c
blob071c0d69a20ea4c20c8e0ca3a8e086300557784e
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Edge summary for IPA-CP edge information. */
61 ipa_edge_args_sum_t *ipa_edge_args_sum;
63 /* Traits for a hash table for reusing already existing ipa_bits. */
65 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67 typedef ipa_bits *value_type;
68 typedef ipa_bits *compare_type;
69 static hashval_t
70 hash (const ipa_bits *p)
72 hashval_t t = (hashval_t) p->value.to_shwi ();
73 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 static bool
76 equal (const ipa_bits *a, const ipa_bits *b)
78 return a->value == b->value && a->mask == b->mask;
80 static void
81 mark_empty (ipa_bits *&p)
83 p = NULL;
85 static bool
86 is_empty (const ipa_bits *p)
88 return p == NULL;
90 static bool
91 is_deleted (const ipa_bits *p)
93 return p == reinterpret_cast<const ipa_bits *> (1);
95 static void
96 mark_deleted (ipa_bits *&p)
98 p = reinterpret_cast<ipa_bits *> (1);
102 /* Hash table for avoid repeated allocations of equal ipa_bits. */
103 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
105 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
106 the equiv bitmap is not hashed and is expected to be NULL. */
108 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
110 typedef value_range *value_type;
111 typedef value_range *compare_type;
112 static hashval_t
113 hash (const value_range *p)
115 gcc_checking_assert (!p->equiv);
116 hashval_t t = (hashval_t) p->type;
117 t = iterative_hash_expr (p->min, t);
118 return iterative_hash_expr (p->max, t);
120 static bool
121 equal (const value_range *a, const value_range *b)
123 return a->type == b->type && a->min == b->min && a->max == b->max;
125 static void
126 mark_empty (value_range *&p)
128 p = NULL;
130 static bool
131 is_empty (const value_range *p)
133 return p == NULL;
135 static bool
136 is_deleted (const value_range *p)
138 return p == reinterpret_cast<const value_range *> (1);
140 static void
141 mark_deleted (value_range *&p)
143 p = reinterpret_cast<value_range *> (1);
147 /* Hash table for avoid repeated allocations of equal value_ranges. */
148 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
150 /* Holders of ipa cgraph hooks: */
151 static struct cgraph_node_hook_list *function_insertion_hook_holder;
153 /* Description of a reference to an IPA constant. */
154 struct ipa_cst_ref_desc
156 /* Edge that corresponds to the statement which took the reference. */
157 struct cgraph_edge *cs;
158 /* Linked list of duplicates created when call graph edges are cloned. */
159 struct ipa_cst_ref_desc *next_duplicate;
160 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
161 if out of control. */
162 int refcount;
165 /* Allocation pool for reference descriptions. */
167 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
168 ("IPA-PROP ref descriptions");
170 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
171 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
173 static bool
174 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
176 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
178 if (!fs_opts)
179 return false;
180 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
183 /* Return index of the formal whose tree is PTREE in function which corresponds
184 to INFO. */
186 static int
187 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
188 tree ptree)
190 int i, count;
192 count = vec_safe_length (descriptors);
193 for (i = 0; i < count; i++)
194 if ((*descriptors)[i].decl_or_type == ptree)
195 return i;
197 return -1;
200 /* Return index of the formal whose tree is PTREE in function which corresponds
201 to INFO. */
204 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
206 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
209 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
210 NODE. */
212 static void
213 ipa_populate_param_decls (struct cgraph_node *node,
214 vec<ipa_param_descriptor, va_gc> &descriptors)
216 tree fndecl;
217 tree fnargs;
218 tree parm;
219 int param_num;
221 fndecl = node->decl;
222 gcc_assert (gimple_has_body_p (fndecl));
223 fnargs = DECL_ARGUMENTS (fndecl);
224 param_num = 0;
225 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
227 descriptors[param_num].decl_or_type = parm;
228 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
229 true);
230 param_num++;
234 /* Return how many formal parameters FNDECL has. */
237 count_formal_params (tree fndecl)
239 tree parm;
240 int count = 0;
241 gcc_assert (gimple_has_body_p (fndecl));
243 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
244 count++;
246 return count;
249 /* Return the declaration of Ith formal parameter of the function corresponding
250 to INFO. Note there is no setter function as this array is built just once
251 using ipa_initialize_node_params. */
253 void
254 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
256 fprintf (file, "param #%i", i);
257 if ((*info->descriptors)[i].decl_or_type)
259 fprintf (file, " ");
260 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
264 /* If necessary, allocate vector of parameter descriptors in info of NODE.
265 Return true if they were allocated, false if not. */
267 static bool
268 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
270 struct ipa_node_params *info = IPA_NODE_REF (node);
272 if (!info->descriptors && param_count)
274 vec_safe_grow_cleared (info->descriptors, param_count);
275 return true;
277 else
278 return false;
281 /* Initialize the ipa_node_params structure associated with NODE by counting
282 the function parameters, creating the descriptors and populating their
283 param_decls. */
285 void
286 ipa_initialize_node_params (struct cgraph_node *node)
288 struct ipa_node_params *info = IPA_NODE_REF (node);
290 if (!info->descriptors
291 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
292 ipa_populate_param_decls (node, *info->descriptors);
295 /* Print the jump functions associated with call graph edge CS to file F. */
297 static void
298 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
300 int i, count;
302 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
303 for (i = 0; i < count; i++)
305 struct ipa_jump_func *jump_func;
306 enum jump_func_type type;
308 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
309 type = jump_func->type;
311 fprintf (f, " param %d: ", i);
312 if (type == IPA_JF_UNKNOWN)
313 fprintf (f, "UNKNOWN\n");
314 else if (type == IPA_JF_CONST)
316 tree val = jump_func->value.constant.value;
317 fprintf (f, "CONST: ");
318 print_generic_expr (f, val);
319 if (TREE_CODE (val) == ADDR_EXPR
320 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
322 fprintf (f, " -> ");
323 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
325 fprintf (f, "\n");
327 else if (type == IPA_JF_PASS_THROUGH)
329 fprintf (f, "PASS THROUGH: ");
330 fprintf (f, "%d, op %s",
331 jump_func->value.pass_through.formal_id,
332 get_tree_code_name(jump_func->value.pass_through.operation));
333 if (jump_func->value.pass_through.operation != NOP_EXPR)
335 fprintf (f, " ");
336 print_generic_expr (f, jump_func->value.pass_through.operand);
338 if (jump_func->value.pass_through.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 fprintf (f, "\n");
342 else if (type == IPA_JF_ANCESTOR)
344 fprintf (f, "ANCESTOR: ");
345 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
346 jump_func->value.ancestor.formal_id,
347 jump_func->value.ancestor.offset);
348 if (jump_func->value.ancestor.agg_preserved)
349 fprintf (f, ", agg_preserved");
350 fprintf (f, "\n");
353 if (jump_func->agg.items)
355 struct ipa_agg_jf_item *item;
356 int j;
358 fprintf (f, " Aggregate passed by %s:\n",
359 jump_func->agg.by_ref ? "reference" : "value");
360 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
362 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
363 item->offset);
364 if (TYPE_P (item->value))
365 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
366 tree_to_uhwi (TYPE_SIZE (item->value)));
367 else
369 fprintf (f, "cst: ");
370 print_generic_expr (f, item->value);
372 fprintf (f, "\n");
376 struct ipa_polymorphic_call_context *ctx
377 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
378 if (ctx && !ctx->useless_p ())
380 fprintf (f, " Context: ");
381 ctx->dump (dump_file);
384 if (jump_func->bits)
386 fprintf (f, " value: ");
387 print_hex (jump_func->bits->value, f);
388 fprintf (f, ", mask: ");
389 print_hex (jump_func->bits->mask, f);
390 fprintf (f, "\n");
392 else
393 fprintf (f, " Unknown bits\n");
395 if (jump_func->m_vr)
397 fprintf (f, " VR ");
398 fprintf (f, "%s[",
399 (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
400 print_decs (wi::to_wide (jump_func->m_vr->min), f);
401 fprintf (f, ", ");
402 print_decs (wi::to_wide (jump_func->m_vr->max), f);
403 fprintf (f, "]\n");
405 else
406 fprintf (f, " Unknown VR\n");
411 /* Print the jump functions of all arguments on all call graph edges going from
412 NODE to file F. */
414 void
415 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
417 struct cgraph_edge *cs;
419 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
420 for (cs = node->callees; cs; cs = cs->next_callee)
422 if (!ipa_edge_args_info_available_for_edge_p (cs))
423 continue;
425 fprintf (f, " callsite %s -> %s : \n",
426 node->dump_name (),
427 cs->callee->dump_name ());
428 ipa_print_node_jump_functions_for_edge (f, cs);
431 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
433 struct cgraph_indirect_call_info *ii;
434 if (!ipa_edge_args_info_available_for_edge_p (cs))
435 continue;
437 ii = cs->indirect_info;
438 if (ii->agg_contents)
439 fprintf (f, " indirect %s callsite, calling param %i, "
440 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
441 ii->member_ptr ? "member ptr" : "aggregate",
442 ii->param_index, ii->offset,
443 ii->by_ref ? "by reference" : "by_value");
444 else
445 fprintf (f, " indirect %s callsite, calling param %i, "
446 "offset " HOST_WIDE_INT_PRINT_DEC,
447 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
448 ii->offset);
450 if (cs->call_stmt)
452 fprintf (f, ", for stmt ");
453 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
455 else
456 fprintf (f, "\n");
457 if (ii->polymorphic)
458 ii->context.dump (f);
459 ipa_print_node_jump_functions_for_edge (f, cs);
463 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
465 void
466 ipa_print_all_jump_functions (FILE *f)
468 struct cgraph_node *node;
470 fprintf (f, "\nJump functions:\n");
471 FOR_EACH_FUNCTION (node)
473 ipa_print_node_jump_functions (f, node);
477 /* Set jfunc to be a know-really nothing jump function. */
479 static void
480 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
482 jfunc->type = IPA_JF_UNKNOWN;
483 jfunc->bits = NULL;
484 jfunc->m_vr = NULL;
487 /* Set JFUNC to be a copy of another jmp (to be used by jump function
488 combination code). The two functions will share their rdesc. */
490 static void
491 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
492 struct ipa_jump_func *src)
495 gcc_checking_assert (src->type == IPA_JF_CONST);
496 dst->type = IPA_JF_CONST;
497 dst->value.constant = src->value.constant;
500 /* Set JFUNC to be a constant jmp function. */
502 static void
503 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
504 struct cgraph_edge *cs)
506 jfunc->type = IPA_JF_CONST;
507 jfunc->value.constant.value = unshare_expr_without_location (constant);
509 if (TREE_CODE (constant) == ADDR_EXPR
510 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
512 struct ipa_cst_ref_desc *rdesc;
514 rdesc = ipa_refdesc_pool.allocate ();
515 rdesc->cs = cs;
516 rdesc->next_duplicate = NULL;
517 rdesc->refcount = 1;
518 jfunc->value.constant.rdesc = rdesc;
520 else
521 jfunc->value.constant.rdesc = NULL;
524 /* Set JFUNC to be a simple pass-through jump function. */
525 static void
526 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
527 bool agg_preserved)
529 jfunc->type = IPA_JF_PASS_THROUGH;
530 jfunc->value.pass_through.operand = NULL_TREE;
531 jfunc->value.pass_through.formal_id = formal_id;
532 jfunc->value.pass_through.operation = NOP_EXPR;
533 jfunc->value.pass_through.agg_preserved = agg_preserved;
536 /* Set JFUNC to be an unary pass through jump function. */
538 static void
539 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
540 enum tree_code operation)
542 jfunc->type = IPA_JF_PASS_THROUGH;
543 jfunc->value.pass_through.operand = NULL_TREE;
544 jfunc->value.pass_through.formal_id = formal_id;
545 jfunc->value.pass_through.operation = operation;
546 jfunc->value.pass_through.agg_preserved = false;
548 /* Set JFUNC to be an arithmetic pass through jump function. */
550 static void
551 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
552 tree operand, enum tree_code operation)
554 jfunc->type = IPA_JF_PASS_THROUGH;
555 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
556 jfunc->value.pass_through.formal_id = formal_id;
557 jfunc->value.pass_through.operation = operation;
558 jfunc->value.pass_through.agg_preserved = false;
561 /* Set JFUNC to be an ancestor jump function. */
563 static void
564 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
565 int formal_id, bool agg_preserved)
567 jfunc->type = IPA_JF_ANCESTOR;
568 jfunc->value.ancestor.formal_id = formal_id;
569 jfunc->value.ancestor.offset = offset;
570 jfunc->value.ancestor.agg_preserved = agg_preserved;
573 /* Get IPA BB information about the given BB. FBI is the context of analyzis
574 of this function body. */
576 static struct ipa_bb_info *
577 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
579 gcc_checking_assert (fbi);
580 return &fbi->bb_infos[bb->index];
583 /* Structure to be passed in between detect_type_change and
584 check_stmt_for_type_change. */
586 struct prop_type_change_info
588 /* Offset into the object where there is the virtual method pointer we are
589 looking for. */
590 HOST_WIDE_INT offset;
591 /* The declaration or SSA_NAME pointer of the base that we are checking for
592 type change. */
593 tree object;
594 /* Set to true if dynamic type change has been detected. */
595 bool type_maybe_changed;
598 /* Return true if STMT can modify a virtual method table pointer.
600 This function makes special assumptions about both constructors and
601 destructors which are all the functions that are allowed to alter the VMT
602 pointers. It assumes that destructors begin with assignment into all VMT
603 pointers and that constructors essentially look in the following way:
605 1) The very first thing they do is that they call constructors of ancestor
606 sub-objects that have them.
608 2) Then VMT pointers of this and all its ancestors is set to new values
609 corresponding to the type corresponding to the constructor.
611 3) Only afterwards, other stuff such as constructor of member sub-objects
612 and the code written by the user is run. Only this may include calling
613 virtual functions, directly or indirectly.
615 There is no way to call a constructor of an ancestor sub-object in any
616 other way.
618 This means that we do not have to care whether constructors get the correct
619 type information because they will always change it (in fact, if we define
620 the type to be given by the VMT pointer, it is undefined).
622 The most important fact to derive from the above is that if, for some
623 statement in the section 3, we try to detect whether the dynamic type has
624 changed, we can safely ignore all calls as we examine the function body
625 backwards until we reach statements in section 2 because these calls cannot
626 be ancestor constructors or destructors (if the input is not bogus) and so
627 do not change the dynamic type (this holds true only for automatically
628 allocated objects but at the moment we devirtualize only these). We then
629 must detect that statements in section 2 change the dynamic type and can try
630 to derive the new type. That is enough and we can stop, we will never see
631 the calls into constructors of sub-objects in this code. Therefore we can
632 safely ignore all call statements that we traverse.
635 static bool
636 stmt_may_be_vtbl_ptr_store (gimple *stmt)
638 if (is_gimple_call (stmt))
639 return false;
640 if (gimple_clobber_p (stmt))
641 return false;
642 else if (is_gimple_assign (stmt))
644 tree lhs = gimple_assign_lhs (stmt);
646 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
648 if (flag_strict_aliasing
649 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
650 return false;
652 if (TREE_CODE (lhs) == COMPONENT_REF
653 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
654 return false;
655 /* In the future we might want to use get_ref_base_and_extent to find
656 if there is a field corresponding to the offset and if so, proceed
657 almost like if it was a component ref. */
660 return true;
663 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
664 to check whether a particular statement may modify the virtual table
665 pointerIt stores its result into DATA, which points to a
666 prop_type_change_info structure. */
668 static bool
669 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
671 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
672 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
674 if (stmt_may_be_vtbl_ptr_store (stmt))
676 tci->type_maybe_changed = true;
677 return true;
679 else
680 return false;
683 /* See if ARG is PARAM_DECl describing instance passed by pointer
684 or reference in FUNCTION. Return false if the dynamic type may change
685 in between beggining of the function until CALL is invoked.
687 Generally functions are not allowed to change type of such instances,
688 but they call destructors. We assume that methods can not destroy the THIS
689 pointer. Also as a special cases, constructor and destructors may change
690 type of the THIS pointer. */
692 static bool
693 param_type_may_change_p (tree function, tree arg, gimple *call)
695 /* Pure functions can not do any changes on the dynamic type;
696 that require writting to memory. */
697 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
698 return false;
699 /* We need to check if we are within inlined consturctor
700 or destructor (ideally we would have way to check that the
701 inline cdtor is actually working on ARG, but we don't have
702 easy tie on this, so punt on all non-pure cdtors.
703 We may also record the types of cdtors and once we know type
704 of the instance match them.
706 Also code unification optimizations may merge calls from
707 different blocks making return values unreliable. So
708 do nothing during late optimization. */
709 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
710 return true;
711 if (TREE_CODE (arg) == SSA_NAME
712 && SSA_NAME_IS_DEFAULT_DEF (arg)
713 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
715 /* Normal (non-THIS) argument. */
716 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
717 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
718 /* THIS pointer of an method - here we want to watch constructors
719 and destructors as those definitely may change the dynamic
720 type. */
721 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
722 && !DECL_CXX_CONSTRUCTOR_P (function)
723 && !DECL_CXX_DESTRUCTOR_P (function)
724 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
726 /* Walk the inline stack and watch out for ctors/dtors. */
727 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
728 block = BLOCK_SUPERCONTEXT (block))
729 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
730 return true;
731 return false;
734 return true;
737 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
738 callsite CALL) by looking for assignments to its virtual table pointer. If
739 it is, return true and fill in the jump function JFUNC with relevant type
740 information or set it to unknown. ARG is the object itself (not a pointer
741 to it, unless dereferenced). BASE is the base of the memory access as
742 returned by get_ref_base_and_extent, as is the offset.
744 This is helper function for detect_type_change and detect_type_change_ssa
745 that does the heavy work which is usually unnecesary. */
747 static bool
748 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
749 gcall *call, struct ipa_jump_func *jfunc,
750 HOST_WIDE_INT offset)
752 struct prop_type_change_info tci;
753 ao_ref ao;
754 bool entry_reached = false;
756 gcc_checking_assert (DECL_P (arg)
757 || TREE_CODE (arg) == MEM_REF
758 || handled_component_p (arg));
760 comp_type = TYPE_MAIN_VARIANT (comp_type);
762 /* Const calls cannot call virtual methods through VMT and so type changes do
763 not matter. */
764 if (!flag_devirtualize || !gimple_vuse (call)
765 /* Be sure expected_type is polymorphic. */
766 || !comp_type
767 || TREE_CODE (comp_type) != RECORD_TYPE
768 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
769 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
770 return true;
772 ao_ref_init (&ao, arg);
773 ao.base = base;
774 ao.offset = offset;
775 ao.size = POINTER_SIZE;
776 ao.max_size = ao.size;
778 tci.offset = offset;
779 tci.object = get_base_address (arg);
780 tci.type_maybe_changed = false;
782 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
783 &tci, NULL, &entry_reached);
784 if (!tci.type_maybe_changed)
785 return false;
787 ipa_set_jf_unknown (jfunc);
788 return true;
791 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
792 If it is, return true and fill in the jump function JFUNC with relevant type
793 information or set it to unknown. ARG is the object itself (not a pointer
794 to it, unless dereferenced). BASE is the base of the memory access as
795 returned by get_ref_base_and_extent, as is the offset. */
797 static bool
798 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
799 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
801 if (!flag_devirtualize)
802 return false;
804 if (TREE_CODE (base) == MEM_REF
805 && !param_type_may_change_p (current_function_decl,
806 TREE_OPERAND (base, 0),
807 call))
808 return false;
809 return detect_type_change_from_memory_writes (arg, base, comp_type,
810 call, jfunc, offset);
813 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
814 SSA name (its dereference will become the base and the offset is assumed to
815 be zero). */
817 static bool
818 detect_type_change_ssa (tree arg, tree comp_type,
819 gcall *call, struct ipa_jump_func *jfunc)
821 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
822 if (!flag_devirtualize
823 || !POINTER_TYPE_P (TREE_TYPE (arg)))
824 return false;
826 if (!param_type_may_change_p (current_function_decl, arg, call))
827 return false;
829 arg = build2 (MEM_REF, ptr_type_node, arg,
830 build_int_cst (ptr_type_node, 0));
832 return detect_type_change_from_memory_writes (arg, arg, comp_type,
833 call, jfunc, 0);
836 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
837 boolean variable pointed to by DATA. */
839 static bool
840 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
841 void *data)
843 bool *b = (bool *) data;
844 *b = true;
845 return true;
848 /* Return true if we have already walked so many statements in AA that we
849 should really just start giving up. */
851 static bool
852 aa_overwalked (struct ipa_func_body_info *fbi)
854 gcc_checking_assert (fbi);
855 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
858 /* Find the nearest valid aa status for parameter specified by INDEX that
859 dominates BB. */
861 static struct ipa_param_aa_status *
862 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
863 int index)
865 while (true)
867 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
868 if (!bb)
869 return NULL;
870 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
871 if (!bi->param_aa_statuses.is_empty ()
872 && bi->param_aa_statuses[index].valid)
873 return &bi->param_aa_statuses[index];
877 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
878 structures and/or intialize the result with a dominating description as
879 necessary. */
881 static struct ipa_param_aa_status *
882 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
883 int index)
885 gcc_checking_assert (fbi);
886 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
887 if (bi->param_aa_statuses.is_empty ())
888 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
889 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
890 if (!paa->valid)
892 gcc_checking_assert (!paa->parm_modified
893 && !paa->ref_modified
894 && !paa->pt_modified);
895 struct ipa_param_aa_status *dom_paa;
896 dom_paa = find_dominating_aa_status (fbi, bb, index);
897 if (dom_paa)
898 *paa = *dom_paa;
899 else
900 paa->valid = true;
903 return paa;
906 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
907 a value known not to be modified in this function before reaching the
908 statement STMT. FBI holds information about the function we have so far
909 gathered but do not survive the summary building stage. */
911 static bool
912 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
913 gimple *stmt, tree parm_load)
915 struct ipa_param_aa_status *paa;
916 bool modified = false;
917 ao_ref refd;
919 tree base = get_base_address (parm_load);
920 gcc_assert (TREE_CODE (base) == PARM_DECL);
921 if (TREE_READONLY (base))
922 return true;
924 /* FIXME: FBI can be NULL if we are being called from outside
925 ipa_node_analysis or ipcp_transform_function, which currently happens
926 during inlining analysis. It would be great to extend fbi's lifetime and
927 always have it. Currently, we are just not afraid of too much walking in
928 that case. */
929 if (fbi)
931 if (aa_overwalked (fbi))
932 return false;
933 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
934 if (paa->parm_modified)
935 return false;
937 else
938 paa = NULL;
940 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
941 ao_ref_init (&refd, parm_load);
942 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
943 &modified, NULL);
944 if (fbi)
945 fbi->aa_walked += walked;
946 if (paa && modified)
947 paa->parm_modified = true;
948 return !modified;
951 /* If STMT is an assignment that loads a value from an parameter declaration,
952 return the index of the parameter in ipa_node_params which has not been
953 modified. Otherwise return -1. */
955 static int
956 load_from_unmodified_param (struct ipa_func_body_info *fbi,
957 vec<ipa_param_descriptor, va_gc> *descriptors,
958 gimple *stmt)
960 int index;
961 tree op1;
963 if (!gimple_assign_single_p (stmt))
964 return -1;
966 op1 = gimple_assign_rhs1 (stmt);
967 if (TREE_CODE (op1) != PARM_DECL)
968 return -1;
970 index = ipa_get_param_decl_index_1 (descriptors, op1);
971 if (index < 0
972 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
973 return -1;
975 return index;
978 /* Return true if memory reference REF (which must be a load through parameter
979 with INDEX) loads data that are known to be unmodified in this function
980 before reaching statement STMT. */
982 static bool
983 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
984 int index, gimple *stmt, tree ref)
986 struct ipa_param_aa_status *paa;
987 bool modified = false;
988 ao_ref refd;
990 /* FIXME: FBI can be NULL if we are being called from outside
991 ipa_node_analysis or ipcp_transform_function, which currently happens
992 during inlining analysis. It would be great to extend fbi's lifetime and
993 always have it. Currently, we are just not afraid of too much walking in
994 that case. */
995 if (fbi)
997 if (aa_overwalked (fbi))
998 return false;
999 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1000 if (paa->ref_modified)
1001 return false;
1003 else
1004 paa = NULL;
1006 gcc_checking_assert (gimple_vuse (stmt));
1007 ao_ref_init (&refd, ref);
1008 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1009 &modified, NULL);
1010 if (fbi)
1011 fbi->aa_walked += walked;
1012 if (paa && modified)
1013 paa->ref_modified = true;
1014 return !modified;
1017 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1018 is known to be unmodified in this function before reaching call statement
1019 CALL into which it is passed. FBI describes the function body. */
1021 static bool
1022 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1023 gimple *call, tree parm)
1025 bool modified = false;
1026 ao_ref refd;
1028 /* It's unnecessary to calculate anything about memory contnets for a const
1029 function because it is not goin to use it. But do not cache the result
1030 either. Also, no such calculations for non-pointers. */
1031 if (!gimple_vuse (call)
1032 || !POINTER_TYPE_P (TREE_TYPE (parm))
1033 || aa_overwalked (fbi))
1034 return false;
1036 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1037 gimple_bb (call),
1038 index);
1039 if (paa->pt_modified)
1040 return false;
1042 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1043 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1044 &modified, NULL);
1045 fbi->aa_walked += walked;
1046 if (modified)
1047 paa->pt_modified = true;
1048 return !modified;
1051 /* Return true if we can prove that OP is a memory reference loading
1052 data from an aggregate passed as a parameter.
1054 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1055 false if it cannot prove that the value has not been modified before the
1056 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1057 if it cannot prove the value has not been modified, in that case it will
1058 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1060 INFO and PARMS_AINFO describe parameters of the current function (but the
1061 latter can be NULL), STMT is the load statement. If function returns true,
1062 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1063 within the aggregate and whether it is a load from a value passed by
1064 reference respectively. */
1066 bool
1067 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1068 vec<ipa_param_descriptor, va_gc> *descriptors,
1069 gimple *stmt, tree op, int *index_p,
1070 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1071 bool *by_ref_p, bool *guaranteed_unmodified)
1073 int index;
1074 HOST_WIDE_INT size;
1075 bool reverse;
1076 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1078 if (!base)
1079 return false;
1081 if (DECL_P (base))
1083 int index = ipa_get_param_decl_index_1 (descriptors, base);
1084 if (index >= 0
1085 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1087 *index_p = index;
1088 *by_ref_p = false;
1089 if (size_p)
1090 *size_p = size;
1091 if (guaranteed_unmodified)
1092 *guaranteed_unmodified = true;
1093 return true;
1095 return false;
1098 if (TREE_CODE (base) != MEM_REF
1099 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1100 || !integer_zerop (TREE_OPERAND (base, 1)))
1101 return false;
1103 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1105 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1106 index = ipa_get_param_decl_index_1 (descriptors, parm);
1108 else
1110 /* This branch catches situations where a pointer parameter is not a
1111 gimple register, for example:
1113 void hip7(S*) (struct S * p)
1115 void (*<T2e4>) (struct S *) D.1867;
1116 struct S * p.1;
1118 <bb 2>:
1119 p.1_1 = p;
1120 D.1867_2 = p.1_1->f;
1121 D.1867_2 ();
1122 gdp = &p;
1125 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1126 index = load_from_unmodified_param (fbi, descriptors, def);
1129 if (index >= 0)
1131 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1132 if (!data_preserved && !guaranteed_unmodified)
1133 return false;
1135 *index_p = index;
1136 *by_ref_p = true;
1137 if (size_p)
1138 *size_p = size;
1139 if (guaranteed_unmodified)
1140 *guaranteed_unmodified = data_preserved;
1141 return true;
1143 return false;
1146 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1147 of an assignment statement STMT, try to determine whether we are actually
1148 handling any of the following cases and construct an appropriate jump
1149 function into JFUNC if so:
1151 1) The passed value is loaded from a formal parameter which is not a gimple
1152 register (most probably because it is addressable, the value has to be
1153 scalar) and we can guarantee the value has not changed. This case can
1154 therefore be described by a simple pass-through jump function. For example:
1156 foo (int a)
1158 int a.0;
1160 a.0_2 = a;
1161 bar (a.0_2);
1163 2) The passed value can be described by a simple arithmetic pass-through
1164 jump function. E.g.
1166 foo (int a)
1168 int D.2064;
1170 D.2064_4 = a.1(D) + 4;
1171 bar (D.2064_4);
1173 This case can also occur in combination of the previous one, e.g.:
1175 foo (int a, int z)
1177 int a.0;
1178 int D.2064;
1180 a.0_3 = a;
1181 D.2064_4 = a.0_3 + 4;
1182 foo (D.2064_4);
1184 3) The passed value is an address of an object within another one (which
1185 also passed by reference). Such situations are described by an ancestor
1186 jump function and describe situations such as:
1188 B::foo() (struct B * const this)
1190 struct A * D.1845;
1192 D.1845_2 = &this_1(D)->D.1748;
1193 A::bar (D.1845_2);
1195 INFO is the structure describing individual parameters access different
1196 stages of IPA optimizations. PARMS_AINFO contains the information that is
1197 only needed for intraprocedural analysis. */
1199 static void
1200 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1201 struct ipa_node_params *info,
1202 struct ipa_jump_func *jfunc,
1203 gcall *call, gimple *stmt, tree name,
1204 tree param_type)
1206 HOST_WIDE_INT offset, size;
1207 tree op1, tc_ssa, base, ssa;
1208 bool reverse;
1209 int index;
1211 op1 = gimple_assign_rhs1 (stmt);
1213 if (TREE_CODE (op1) == SSA_NAME)
1215 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1216 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1217 else
1218 index = load_from_unmodified_param (fbi, info->descriptors,
1219 SSA_NAME_DEF_STMT (op1));
1220 tc_ssa = op1;
1222 else
1224 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1225 tc_ssa = gimple_assign_lhs (stmt);
1228 if (index >= 0)
1230 switch (gimple_assign_rhs_class (stmt))
1232 case GIMPLE_BINARY_RHS:
1234 tree op2 = gimple_assign_rhs2 (stmt);
1235 if (!is_gimple_ip_invariant (op2)
1236 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1237 != tcc_comparison)
1238 && !useless_type_conversion_p (TREE_TYPE (name),
1239 TREE_TYPE (op1))))
1240 return;
1242 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1243 gimple_assign_rhs_code (stmt));
1244 break;
1246 case GIMPLE_SINGLE_RHS:
1248 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1249 tc_ssa);
1250 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1251 break;
1253 case GIMPLE_UNARY_RHS:
1254 if (is_gimple_assign (stmt)
1255 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1256 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1257 ipa_set_jf_unary_pass_through (jfunc, index,
1258 gimple_assign_rhs_code (stmt));
1259 default:;
1261 return;
1264 if (TREE_CODE (op1) != ADDR_EXPR)
1265 return;
1266 op1 = TREE_OPERAND (op1, 0);
1267 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1268 return;
1269 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1270 if (!base || TREE_CODE (base) != MEM_REF)
1271 return;
1272 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1273 ssa = TREE_OPERAND (base, 0);
1274 if (TREE_CODE (ssa) != SSA_NAME
1275 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1276 || offset < 0)
1277 return;
1279 /* Dynamic types are changed in constructors and destructors. */
1280 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1281 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1282 ipa_set_ancestor_jf (jfunc, offset, index,
1283 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1286 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1287 it looks like:
1289 iftmp.1_3 = &obj_2(D)->D.1762;
1291 The base of the MEM_REF must be a default definition SSA NAME of a
1292 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1293 whole MEM_REF expression is returned and the offset calculated from any
1294 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1295 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1297 static tree
1298 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1300 HOST_WIDE_INT size;
1301 tree expr, parm, obj;
1302 bool reverse;
1304 if (!gimple_assign_single_p (assign))
1305 return NULL_TREE;
1306 expr = gimple_assign_rhs1 (assign);
1308 if (TREE_CODE (expr) != ADDR_EXPR)
1309 return NULL_TREE;
1310 expr = TREE_OPERAND (expr, 0);
1311 obj = expr;
1312 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1314 if (!expr || TREE_CODE (expr) != MEM_REF)
1315 return NULL_TREE;
1316 parm = TREE_OPERAND (expr, 0);
1317 if (TREE_CODE (parm) != SSA_NAME
1318 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1319 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1320 return NULL_TREE;
1322 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1323 *obj_p = obj;
1324 return expr;
1328 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1329 statement PHI, try to find out whether NAME is in fact a
1330 multiple-inheritance typecast from a descendant into an ancestor of a formal
1331 parameter and thus can be described by an ancestor jump function and if so,
1332 write the appropriate function into JFUNC.
1334 Essentially we want to match the following pattern:
1336 if (obj_2(D) != 0B)
1337 goto <bb 3>;
1338 else
1339 goto <bb 4>;
1341 <bb 3>:
1342 iftmp.1_3 = &obj_2(D)->D.1762;
1344 <bb 4>:
1345 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1346 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1347 return D.1879_6; */
1349 static void
1350 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1351 struct ipa_node_params *info,
1352 struct ipa_jump_func *jfunc,
1353 gcall *call, gphi *phi)
1355 HOST_WIDE_INT offset;
1356 gimple *assign, *cond;
1357 basic_block phi_bb, assign_bb, cond_bb;
1358 tree tmp, parm, expr, obj;
1359 int index, i;
1361 if (gimple_phi_num_args (phi) != 2)
1362 return;
1364 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1365 tmp = PHI_ARG_DEF (phi, 0);
1366 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1367 tmp = PHI_ARG_DEF (phi, 1);
1368 else
1369 return;
1370 if (TREE_CODE (tmp) != SSA_NAME
1371 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1372 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1373 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1374 return;
1376 assign = SSA_NAME_DEF_STMT (tmp);
1377 assign_bb = gimple_bb (assign);
1378 if (!single_pred_p (assign_bb))
1379 return;
1380 expr = get_ancestor_addr_info (assign, &obj, &offset);
1381 if (!expr)
1382 return;
1383 parm = TREE_OPERAND (expr, 0);
1384 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1385 if (index < 0)
1386 return;
1388 cond_bb = single_pred (assign_bb);
1389 cond = last_stmt (cond_bb);
1390 if (!cond
1391 || gimple_code (cond) != GIMPLE_COND
1392 || gimple_cond_code (cond) != NE_EXPR
1393 || gimple_cond_lhs (cond) != parm
1394 || !integer_zerop (gimple_cond_rhs (cond)))
1395 return;
1397 phi_bb = gimple_bb (phi);
1398 for (i = 0; i < 2; i++)
1400 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1401 if (pred != assign_bb && pred != cond_bb)
1402 return;
1405 ipa_set_ancestor_jf (jfunc, offset, index,
1406 parm_ref_data_pass_through_p (fbi, index, call, parm));
1409 /* Inspect the given TYPE and return true iff it has the same structure (the
1410 same number of fields of the same types) as a C++ member pointer. If
1411 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1412 corresponding fields there. */
1414 static bool
1415 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1417 tree fld;
1419 if (TREE_CODE (type) != RECORD_TYPE)
1420 return false;
1422 fld = TYPE_FIELDS (type);
1423 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1424 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1425 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1426 return false;
1428 if (method_ptr)
1429 *method_ptr = fld;
1431 fld = DECL_CHAIN (fld);
1432 if (!fld || INTEGRAL_TYPE_P (fld)
1433 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1434 return false;
1435 if (delta)
1436 *delta = fld;
1438 if (DECL_CHAIN (fld))
1439 return false;
1441 return true;
1444 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1445 return the rhs of its defining statement. Otherwise return RHS as it
1446 is. */
1448 static inline tree
1449 get_ssa_def_if_simple_copy (tree rhs)
1451 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1453 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1455 if (gimple_assign_single_p (def_stmt))
1456 rhs = gimple_assign_rhs1 (def_stmt);
1457 else
1458 break;
1460 return rhs;
1463 /* Simple linked list, describing known contents of an aggregate beforere
1464 call. */
1466 struct ipa_known_agg_contents_list
1468 /* Offset and size of the described part of the aggregate. */
1469 HOST_WIDE_INT offset, size;
1470 /* Known constant value or NULL if the contents is known to be unknown. */
1471 tree constant;
1472 /* Pointer to the next structure in the list. */
1473 struct ipa_known_agg_contents_list *next;
1476 /* Find the proper place in linked list of ipa_known_agg_contents_list
1477 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1478 unless there is a partial overlap, in which case return NULL, or such
1479 element is already there, in which case set *ALREADY_THERE to true. */
1481 static struct ipa_known_agg_contents_list **
1482 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1483 HOST_WIDE_INT lhs_offset,
1484 HOST_WIDE_INT lhs_size,
1485 bool *already_there)
1487 struct ipa_known_agg_contents_list **p = list;
1488 while (*p && (*p)->offset < lhs_offset)
1490 if ((*p)->offset + (*p)->size > lhs_offset)
1491 return NULL;
1492 p = &(*p)->next;
1495 if (*p && (*p)->offset < lhs_offset + lhs_size)
1497 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1498 /* We already know this value is subsequently overwritten with
1499 something else. */
1500 *already_there = true;
1501 else
1502 /* Otherwise this is a partial overlap which we cannot
1503 represent. */
1504 return NULL;
1506 return p;
1509 /* Build aggregate jump function from LIST, assuming there are exactly
1510 CONST_COUNT constant entries there and that th offset of the passed argument
1511 is ARG_OFFSET and store it into JFUNC. */
1513 static void
1514 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1515 int const_count, HOST_WIDE_INT arg_offset,
1516 struct ipa_jump_func *jfunc)
1518 vec_alloc (jfunc->agg.items, const_count);
1519 while (list)
1521 if (list->constant)
1523 struct ipa_agg_jf_item item;
1524 item.offset = list->offset - arg_offset;
1525 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1526 item.value = unshare_expr_without_location (list->constant);
1527 jfunc->agg.items->quick_push (item);
1529 list = list->next;
1533 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1534 in ARG is filled in with constant values. ARG can either be an aggregate
1535 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1536 aggregate. JFUNC is the jump function into which the constants are
1537 subsequently stored. */
1539 static void
1540 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1541 tree arg_type,
1542 struct ipa_jump_func *jfunc)
1544 struct ipa_known_agg_contents_list *list = NULL;
1545 int item_count = 0, const_count = 0;
1546 HOST_WIDE_INT arg_offset, arg_size;
1547 gimple_stmt_iterator gsi;
1548 tree arg_base;
1549 bool check_ref, by_ref;
1550 ao_ref r;
1552 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1553 return;
1555 /* The function operates in three stages. First, we prepare check_ref, r,
1556 arg_base and arg_offset based on what is actually passed as an actual
1557 argument. */
1559 if (POINTER_TYPE_P (arg_type))
1561 by_ref = true;
1562 if (TREE_CODE (arg) == SSA_NAME)
1564 tree type_size;
1565 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1566 return;
1567 check_ref = true;
1568 arg_base = arg;
1569 arg_offset = 0;
1570 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1571 arg_size = tree_to_uhwi (type_size);
1572 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1574 else if (TREE_CODE (arg) == ADDR_EXPR)
1576 bool reverse;
1578 arg = TREE_OPERAND (arg, 0);
1579 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1580 &arg_size, &reverse);
1581 if (!arg_base)
1582 return;
1583 if (DECL_P (arg_base))
1585 check_ref = false;
1586 ao_ref_init (&r, arg_base);
1588 else
1589 return;
1591 else
1592 return;
1594 else
1596 bool reverse;
1598 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1600 by_ref = false;
1601 check_ref = false;
1602 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1603 &arg_size, &reverse);
1604 if (!arg_base)
1605 return;
1607 ao_ref_init (&r, arg);
1610 /* Second stage walks back the BB, looks at individual statements and as long
1611 as it is confident of how the statements affect contents of the
1612 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1613 describing it. */
1614 gsi = gsi_for_stmt (call);
1615 gsi_prev (&gsi);
1616 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1618 struct ipa_known_agg_contents_list *n, **p;
1619 gimple *stmt = gsi_stmt (gsi);
1620 HOST_WIDE_INT lhs_offset, lhs_size;
1621 tree lhs, rhs, lhs_base;
1622 bool reverse;
1624 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1625 continue;
1626 if (!gimple_assign_single_p (stmt))
1627 break;
1629 lhs = gimple_assign_lhs (stmt);
1630 rhs = gimple_assign_rhs1 (stmt);
1631 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1632 || TREE_CODE (lhs) == BIT_FIELD_REF
1633 || contains_bitfld_component_ref_p (lhs))
1634 break;
1636 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1637 &lhs_size, &reverse);
1638 if (!lhs_base)
1639 break;
1641 if (check_ref)
1643 if (TREE_CODE (lhs_base) != MEM_REF
1644 || TREE_OPERAND (lhs_base, 0) != arg_base
1645 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1646 break;
1648 else if (lhs_base != arg_base)
1650 if (DECL_P (lhs_base))
1651 continue;
1652 else
1653 break;
1656 bool already_there = false;
1657 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1658 &already_there);
1659 if (!p)
1660 break;
1661 if (already_there)
1662 continue;
1664 rhs = get_ssa_def_if_simple_copy (rhs);
1665 n = XALLOCA (struct ipa_known_agg_contents_list);
1666 n->size = lhs_size;
1667 n->offset = lhs_offset;
1668 if (is_gimple_ip_invariant (rhs))
1670 n->constant = rhs;
1671 const_count++;
1673 else
1674 n->constant = NULL_TREE;
1675 n->next = *p;
1676 *p = n;
1678 item_count++;
1679 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1680 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1681 break;
1684 /* Third stage just goes over the list and creates an appropriate vector of
1685 ipa_agg_jf_item structures out of it, of sourse only if there are
1686 any known constants to begin with. */
1688 if (const_count)
1690 jfunc->agg.by_ref = by_ref;
1691 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1695 /* Return the Ith param type of callee associated with call graph
1696 edge E. */
1698 tree
1699 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1701 int n;
1702 tree type = (e->callee
1703 ? TREE_TYPE (e->callee->decl)
1704 : gimple_call_fntype (e->call_stmt));
1705 tree t = TYPE_ARG_TYPES (type);
1707 for (n = 0; n < i; n++)
1709 if (!t)
1710 break;
1711 t = TREE_CHAIN (t);
1713 if (t)
1714 return TREE_VALUE (t);
1715 if (!e->callee)
1716 return NULL;
1717 t = DECL_ARGUMENTS (e->callee->decl);
1718 for (n = 0; n < i; n++)
1720 if (!t)
1721 return NULL;
1722 t = TREE_CHAIN (t);
1724 if (t)
1725 return TREE_TYPE (t);
1726 return NULL;
1729 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1730 allocated structure or a previously existing one shared with other jump
1731 functions and/or transformation summaries. */
1733 ipa_bits *
1734 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1736 ipa_bits tmp;
1737 tmp.value = value;
1738 tmp.mask = mask;
1740 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1741 if (*slot)
1742 return *slot;
1744 ipa_bits *res = ggc_alloc<ipa_bits> ();
1745 res->value = value;
1746 res->mask = mask;
1747 *slot = res;
1749 return res;
1752 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1753 table in order to avoid creating multiple same ipa_bits structures. */
1755 static void
1756 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1757 const widest_int &mask)
1759 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1762 /* Return a pointer to a value_range just like *TMP, but either find it in
1763 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1765 static value_range *
1766 ipa_get_value_range (value_range *tmp)
1768 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1769 if (*slot)
1770 return *slot;
1772 value_range *vr = ggc_alloc<value_range> ();
1773 *vr = *tmp;
1774 *slot = vr;
1776 return vr;
1779 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1780 equiv set. Use hash table in order to avoid creating multiple same copies of
1781 value_ranges. */
1783 static value_range *
1784 ipa_get_value_range (enum value_range_type type, tree min, tree max)
1786 value_range tmp;
1787 tmp.type = type;
1788 tmp.min = min;
1789 tmp.max = max;
1790 tmp.equiv = NULL;
1791 return ipa_get_value_range (&tmp);
1794 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1795 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1796 same value_range structures. */
1798 static void
1799 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1800 tree min, tree max)
1802 jf->m_vr = ipa_get_value_range (type, min, max);
1805 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1806 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1808 static void
1809 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1811 jf->m_vr = ipa_get_value_range (tmp);
1814 /* Compute jump function for all arguments of callsite CS and insert the
1815 information in the jump_functions array in the ipa_edge_args corresponding
1816 to this callsite. */
1818 static void
1819 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1820 struct cgraph_edge *cs)
1822 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1823 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1824 gcall *call = cs->call_stmt;
1825 int n, arg_num = gimple_call_num_args (call);
1826 bool useful_context = false;
1828 if (arg_num == 0 || args->jump_functions)
1829 return;
1830 vec_safe_grow_cleared (args->jump_functions, arg_num);
1831 if (flag_devirtualize)
1832 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1834 if (gimple_call_internal_p (call))
1835 return;
1836 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1837 return;
1839 for (n = 0; n < arg_num; n++)
1841 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1842 tree arg = gimple_call_arg (call, n);
1843 tree param_type = ipa_get_callee_param_type (cs, n);
1844 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1846 tree instance;
1847 struct ipa_polymorphic_call_context context (cs->caller->decl,
1848 arg, cs->call_stmt,
1849 &instance);
1850 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1851 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1852 if (!context.useless_p ())
1853 useful_context = true;
1856 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1858 bool addr_nonzero = false;
1859 bool strict_overflow = false;
1861 if (TREE_CODE (arg) == SSA_NAME
1862 && param_type
1863 && get_ptr_nonnull (arg))
1864 addr_nonzero = true;
1865 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1866 addr_nonzero = true;
1868 if (addr_nonzero)
1870 tree z = build_int_cst (TREE_TYPE (arg), 0);
1871 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1873 else
1874 gcc_assert (!jfunc->m_vr);
1876 else
1878 wide_int min, max;
1879 value_range_type type;
1880 if (TREE_CODE (arg) == SSA_NAME
1881 && param_type
1882 && (type = get_range_info (arg, &min, &max))
1883 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1885 value_range tmpvr,resvr;
1887 tmpvr.type = type;
1888 tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1889 tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1890 tmpvr.equiv = NULL;
1891 memset (&resvr, 0, sizeof (resvr));
1892 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1893 &tmpvr, TREE_TYPE (arg));
1894 if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1895 ipa_set_jfunc_vr (jfunc, &resvr);
1896 else
1897 gcc_assert (!jfunc->m_vr);
1899 else
1900 gcc_assert (!jfunc->m_vr);
1903 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1904 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1906 if (TREE_CODE (arg) == SSA_NAME)
1907 ipa_set_jfunc_bits (jfunc, 0,
1908 widest_int::from (get_nonzero_bits (arg),
1909 TYPE_SIGN (TREE_TYPE (arg))));
1910 else
1911 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1913 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1915 unsigned HOST_WIDE_INT bitpos;
1916 unsigned align;
1918 get_pointer_alignment_1 (arg, &align, &bitpos);
1919 widest_int mask = wi::bit_and_not
1920 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1921 align / BITS_PER_UNIT - 1);
1922 widest_int value = bitpos / BITS_PER_UNIT;
1923 ipa_set_jfunc_bits (jfunc, value, mask);
1925 else
1926 gcc_assert (!jfunc->bits);
1928 if (is_gimple_ip_invariant (arg)
1929 || (VAR_P (arg)
1930 && is_global_var (arg)
1931 && TREE_READONLY (arg)))
1932 ipa_set_jf_constant (jfunc, arg, cs);
1933 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1934 && TREE_CODE (arg) == PARM_DECL)
1936 int index = ipa_get_param_decl_index (info, arg);
1938 gcc_assert (index >=0);
1939 /* Aggregate passed by value, check for pass-through, otherwise we
1940 will attempt to fill in aggregate contents later in this
1941 for cycle. */
1942 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1944 ipa_set_jf_simple_pass_through (jfunc, index, false);
1945 continue;
1948 else if (TREE_CODE (arg) == SSA_NAME)
1950 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1952 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1953 if (index >= 0)
1955 bool agg_p;
1956 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1957 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1960 else
1962 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1963 if (is_gimple_assign (stmt))
1964 compute_complex_assign_jump_func (fbi, info, jfunc,
1965 call, stmt, arg, param_type);
1966 else if (gimple_code (stmt) == GIMPLE_PHI)
1967 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1968 call,
1969 as_a <gphi *> (stmt));
1973 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1974 passed (because type conversions are ignored in gimple). Usually we can
1975 safely get type from function declaration, but in case of K&R prototypes or
1976 variadic functions we can try our luck with type of the pointer passed.
1977 TODO: Since we look for actual initialization of the memory object, we may better
1978 work out the type based on the memory stores we find. */
1979 if (!param_type)
1980 param_type = TREE_TYPE (arg);
1982 if ((jfunc->type != IPA_JF_PASS_THROUGH
1983 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1984 && (jfunc->type != IPA_JF_ANCESTOR
1985 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1986 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1987 || POINTER_TYPE_P (param_type)))
1988 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1990 if (!useful_context)
1991 vec_free (args->polymorphic_call_contexts);
1994 /* Compute jump functions for all edges - both direct and indirect - outgoing
1995 from BB. */
1997 static void
1998 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2000 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2001 int i;
2002 struct cgraph_edge *cs;
2004 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2006 struct cgraph_node *callee = cs->callee;
2008 if (callee)
2010 callee->ultimate_alias_target ();
2011 /* We do not need to bother analyzing calls to unknown functions
2012 unless they may become known during lto/whopr. */
2013 if (!callee->definition && !flag_lto)
2014 continue;
2016 ipa_compute_jump_functions_for_edge (fbi, cs);
2020 /* If STMT looks like a statement loading a value from a member pointer formal
2021 parameter, return that parameter and store the offset of the field to
2022 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2023 might be clobbered). If USE_DELTA, then we look for a use of the delta
2024 field rather than the pfn. */
2026 static tree
2027 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2028 HOST_WIDE_INT *offset_p)
2030 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2032 if (!gimple_assign_single_p (stmt))
2033 return NULL_TREE;
2035 rhs = gimple_assign_rhs1 (stmt);
2036 if (TREE_CODE (rhs) == COMPONENT_REF)
2038 ref_field = TREE_OPERAND (rhs, 1);
2039 rhs = TREE_OPERAND (rhs, 0);
2041 else
2042 ref_field = NULL_TREE;
2043 if (TREE_CODE (rhs) != MEM_REF)
2044 return NULL_TREE;
2045 rec = TREE_OPERAND (rhs, 0);
2046 if (TREE_CODE (rec) != ADDR_EXPR)
2047 return NULL_TREE;
2048 rec = TREE_OPERAND (rec, 0);
2049 if (TREE_CODE (rec) != PARM_DECL
2050 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2051 return NULL_TREE;
2052 ref_offset = TREE_OPERAND (rhs, 1);
2054 if (use_delta)
2055 fld = delta_field;
2056 else
2057 fld = ptr_field;
2058 if (offset_p)
2059 *offset_p = int_bit_position (fld);
2061 if (ref_field)
2063 if (integer_nonzerop (ref_offset))
2064 return NULL_TREE;
2065 return ref_field == fld ? rec : NULL_TREE;
2067 else
2068 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2069 : NULL_TREE;
2072 /* Returns true iff T is an SSA_NAME defined by a statement. */
2074 static bool
2075 ipa_is_ssa_with_stmt_def (tree t)
2077 if (TREE_CODE (t) == SSA_NAME
2078 && !SSA_NAME_IS_DEFAULT_DEF (t))
2079 return true;
2080 else
2081 return false;
2084 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2085 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2086 indirect call graph edge. */
2088 static struct cgraph_edge *
2089 ipa_note_param_call (struct cgraph_node *node, int param_index,
2090 gcall *stmt)
2092 struct cgraph_edge *cs;
2094 cs = node->get_edge (stmt);
2095 cs->indirect_info->param_index = param_index;
2096 cs->indirect_info->agg_contents = 0;
2097 cs->indirect_info->member_ptr = 0;
2098 cs->indirect_info->guaranteed_unmodified = 0;
2099 return cs;
2102 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2103 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2104 intermediate information about each formal parameter. Currently it checks
2105 whether the call calls a pointer that is a formal parameter and if so, the
2106 parameter is marked with the called flag and an indirect call graph edge
2107 describing the call is created. This is very simple for ordinary pointers
2108 represented in SSA but not-so-nice when it comes to member pointers. The
2109 ugly part of this function does nothing more than trying to match the
2110 pattern of such a call. An example of such a pattern is the gimple dump
2111 below, the call is on the last line:
2113 <bb 2>:
2114 f$__delta_5 = f.__delta;
2115 f$__pfn_24 = f.__pfn;
2118 <bb 2>:
2119 f$__delta_5 = MEM[(struct *)&f];
2120 f$__pfn_24 = MEM[(struct *)&f + 4B];
2122 and a few lines below:
2124 <bb 5>
2125 D.2496_3 = (int) f$__pfn_24;
2126 D.2497_4 = D.2496_3 & 1;
2127 if (D.2497_4 != 0)
2128 goto <bb 3>;
2129 else
2130 goto <bb 4>;
2132 <bb 6>:
2133 D.2500_7 = (unsigned int) f$__delta_5;
2134 D.2501_8 = &S + D.2500_7;
2135 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2136 D.2503_10 = *D.2502_9;
2137 D.2504_12 = f$__pfn_24 + -1;
2138 D.2505_13 = (unsigned int) D.2504_12;
2139 D.2506_14 = D.2503_10 + D.2505_13;
2140 D.2507_15 = *D.2506_14;
2141 iftmp.11_16 = (String:: *) D.2507_15;
2143 <bb 7>:
2144 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2145 D.2500_19 = (unsigned int) f$__delta_5;
2146 D.2508_20 = &S + D.2500_19;
2147 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2149 Such patterns are results of simple calls to a member pointer:
2151 int doprinting (int (MyString::* f)(int) const)
2153 MyString S ("somestring");
2155 return (S.*f)(4);
2158 Moreover, the function also looks for called pointers loaded from aggregates
2159 passed by value or reference. */
2161 static void
2162 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2163 tree target)
2165 struct ipa_node_params *info = fbi->info;
2166 HOST_WIDE_INT offset;
2167 bool by_ref;
2169 if (SSA_NAME_IS_DEFAULT_DEF (target))
2171 tree var = SSA_NAME_VAR (target);
2172 int index = ipa_get_param_decl_index (info, var);
2173 if (index >= 0)
2174 ipa_note_param_call (fbi->node, index, call);
2175 return;
2178 int index;
2179 gimple *def = SSA_NAME_DEF_STMT (target);
2180 bool guaranteed_unmodified;
2181 if (gimple_assign_single_p (def)
2182 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2183 gimple_assign_rhs1 (def), &index, &offset,
2184 NULL, &by_ref, &guaranteed_unmodified))
2186 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2187 cs->indirect_info->offset = offset;
2188 cs->indirect_info->agg_contents = 1;
2189 cs->indirect_info->by_ref = by_ref;
2190 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2191 return;
2194 /* Now we need to try to match the complex pattern of calling a member
2195 pointer. */
2196 if (gimple_code (def) != GIMPLE_PHI
2197 || gimple_phi_num_args (def) != 2
2198 || !POINTER_TYPE_P (TREE_TYPE (target))
2199 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2200 return;
2202 /* First, we need to check whether one of these is a load from a member
2203 pointer that is a parameter to this function. */
2204 tree n1 = PHI_ARG_DEF (def, 0);
2205 tree n2 = PHI_ARG_DEF (def, 1);
2206 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2207 return;
2208 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2209 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2211 tree rec;
2212 basic_block bb, virt_bb;
2213 basic_block join = gimple_bb (def);
2214 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2216 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2217 return;
2219 bb = EDGE_PRED (join, 0)->src;
2220 virt_bb = gimple_bb (d2);
2222 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2224 bb = EDGE_PRED (join, 1)->src;
2225 virt_bb = gimple_bb (d1);
2227 else
2228 return;
2230 /* Second, we need to check that the basic blocks are laid out in the way
2231 corresponding to the pattern. */
2233 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2234 || single_pred (virt_bb) != bb
2235 || single_succ (virt_bb) != join)
2236 return;
2238 /* Third, let's see that the branching is done depending on the least
2239 significant bit of the pfn. */
2241 gimple *branch = last_stmt (bb);
2242 if (!branch || gimple_code (branch) != GIMPLE_COND)
2243 return;
2245 if ((gimple_cond_code (branch) != NE_EXPR
2246 && gimple_cond_code (branch) != EQ_EXPR)
2247 || !integer_zerop (gimple_cond_rhs (branch)))
2248 return;
2250 tree cond = gimple_cond_lhs (branch);
2251 if (!ipa_is_ssa_with_stmt_def (cond))
2252 return;
2254 def = SSA_NAME_DEF_STMT (cond);
2255 if (!is_gimple_assign (def)
2256 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2257 || !integer_onep (gimple_assign_rhs2 (def)))
2258 return;
2260 cond = gimple_assign_rhs1 (def);
2261 if (!ipa_is_ssa_with_stmt_def (cond))
2262 return;
2264 def = SSA_NAME_DEF_STMT (cond);
2266 if (is_gimple_assign (def)
2267 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2269 cond = gimple_assign_rhs1 (def);
2270 if (!ipa_is_ssa_with_stmt_def (cond))
2271 return;
2272 def = SSA_NAME_DEF_STMT (cond);
2275 tree rec2;
2276 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2277 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2278 == ptrmemfunc_vbit_in_delta),
2279 NULL);
2280 if (rec != rec2)
2281 return;
2283 index = ipa_get_param_decl_index (info, rec);
2284 if (index >= 0
2285 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2287 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2288 cs->indirect_info->offset = offset;
2289 cs->indirect_info->agg_contents = 1;
2290 cs->indirect_info->member_ptr = 1;
2291 cs->indirect_info->guaranteed_unmodified = 1;
2294 return;
2297 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2298 object referenced in the expression is a formal parameter of the caller
2299 FBI->node (described by FBI->info), create a call note for the
2300 statement. */
2302 static void
2303 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2304 gcall *call, tree target)
2306 tree obj = OBJ_TYPE_REF_OBJECT (target);
2307 int index;
2308 HOST_WIDE_INT anc_offset;
2310 if (!flag_devirtualize)
2311 return;
2313 if (TREE_CODE (obj) != SSA_NAME)
2314 return;
2316 struct ipa_node_params *info = fbi->info;
2317 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2319 struct ipa_jump_func jfunc;
2320 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2321 return;
2323 anc_offset = 0;
2324 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2325 gcc_assert (index >= 0);
2326 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2327 call, &jfunc))
2328 return;
2330 else
2332 struct ipa_jump_func jfunc;
2333 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2334 tree expr;
2336 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2337 if (!expr)
2338 return;
2339 index = ipa_get_param_decl_index (info,
2340 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2341 gcc_assert (index >= 0);
2342 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2343 call, &jfunc, anc_offset))
2344 return;
2347 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2348 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2349 ii->offset = anc_offset;
2350 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2351 ii->otr_type = obj_type_ref_class (target);
2352 ii->polymorphic = 1;
2355 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2356 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2357 containing intermediate information about each formal parameter. */
2359 static void
2360 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2362 tree target = gimple_call_fn (call);
2364 if (!target
2365 || (TREE_CODE (target) != SSA_NAME
2366 && !virtual_method_call_p (target)))
2367 return;
2369 struct cgraph_edge *cs = fbi->node->get_edge (call);
2370 /* If we previously turned the call into a direct call, there is
2371 no need to analyze. */
2372 if (cs && !cs->indirect_unknown_callee)
2373 return;
2375 if (cs->indirect_info->polymorphic && flag_devirtualize)
2377 tree instance;
2378 tree target = gimple_call_fn (call);
2379 ipa_polymorphic_call_context context (current_function_decl,
2380 target, call, &instance);
2382 gcc_checking_assert (cs->indirect_info->otr_type
2383 == obj_type_ref_class (target));
2384 gcc_checking_assert (cs->indirect_info->otr_token
2385 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2387 cs->indirect_info->vptr_changed
2388 = !context.get_dynamic_type (instance,
2389 OBJ_TYPE_REF_OBJECT (target),
2390 obj_type_ref_class (target), call);
2391 cs->indirect_info->context = context;
2394 if (TREE_CODE (target) == SSA_NAME)
2395 ipa_analyze_indirect_call_uses (fbi, call, target);
2396 else if (virtual_method_call_p (target))
2397 ipa_analyze_virtual_call_uses (fbi, call, target);
2401 /* Analyze the call statement STMT with respect to formal parameters (described
2402 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2403 formal parameters are called. */
2405 static void
2406 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2408 if (is_gimple_call (stmt))
2409 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2412 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2413 If OP is a parameter declaration, mark it as used in the info structure
2414 passed in DATA. */
2416 static bool
2417 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2419 struct ipa_node_params *info = (struct ipa_node_params *) data;
2421 op = get_base_address (op);
2422 if (op
2423 && TREE_CODE (op) == PARM_DECL)
2425 int index = ipa_get_param_decl_index (info, op);
2426 gcc_assert (index >= 0);
2427 ipa_set_param_used (info, index, true);
2430 return false;
2433 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2434 the findings in various structures of the associated ipa_node_params
2435 structure, such as parameter flags, notes etc. FBI holds various data about
2436 the function being analyzed. */
2438 static void
2439 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2441 gimple_stmt_iterator gsi;
2442 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2444 gimple *stmt = gsi_stmt (gsi);
2446 if (is_gimple_debug (stmt))
2447 continue;
2449 ipa_analyze_stmt_uses (fbi, stmt);
2450 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2451 visit_ref_for_mod_analysis,
2452 visit_ref_for_mod_analysis,
2453 visit_ref_for_mod_analysis);
2455 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2456 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2457 visit_ref_for_mod_analysis,
2458 visit_ref_for_mod_analysis,
2459 visit_ref_for_mod_analysis);
2462 /* Calculate controlled uses of parameters of NODE. */
2464 static void
2465 ipa_analyze_controlled_uses (struct cgraph_node *node)
2467 struct ipa_node_params *info = IPA_NODE_REF (node);
2469 for (int i = 0; i < ipa_get_param_count (info); i++)
2471 tree parm = ipa_get_param (info, i);
2472 int controlled_uses = 0;
2474 /* For SSA regs see if parameter is used. For non-SSA we compute
2475 the flag during modification analysis. */
2476 if (is_gimple_reg (parm))
2478 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2479 parm);
2480 if (ddef && !has_zero_uses (ddef))
2482 imm_use_iterator imm_iter;
2483 use_operand_p use_p;
2485 ipa_set_param_used (info, i, true);
2486 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2487 if (!is_gimple_call (USE_STMT (use_p)))
2489 if (!is_gimple_debug (USE_STMT (use_p)))
2491 controlled_uses = IPA_UNDESCRIBED_USE;
2492 break;
2495 else
2496 controlled_uses++;
2498 else
2499 controlled_uses = 0;
2501 else
2502 controlled_uses = IPA_UNDESCRIBED_USE;
2503 ipa_set_controlled_uses (info, i, controlled_uses);
2507 /* Free stuff in BI. */
2509 static void
2510 free_ipa_bb_info (struct ipa_bb_info *bi)
2512 bi->cg_edges.release ();
2513 bi->param_aa_statuses.release ();
2516 /* Dominator walker driving the analysis. */
2518 class analysis_dom_walker : public dom_walker
2520 public:
2521 analysis_dom_walker (struct ipa_func_body_info *fbi)
2522 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2524 virtual edge before_dom_children (basic_block);
2526 private:
2527 struct ipa_func_body_info *m_fbi;
2530 edge
2531 analysis_dom_walker::before_dom_children (basic_block bb)
2533 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2534 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2535 return NULL;
2538 /* Release body info FBI. */
2540 void
2541 ipa_release_body_info (struct ipa_func_body_info *fbi)
2543 int i;
2544 struct ipa_bb_info *bi;
2546 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2547 free_ipa_bb_info (bi);
2548 fbi->bb_infos.release ();
2551 /* Initialize the array describing properties of formal parameters
2552 of NODE, analyze their uses and compute jump functions associated
2553 with actual arguments of calls from within NODE. */
2555 void
2556 ipa_analyze_node (struct cgraph_node *node)
2558 struct ipa_func_body_info fbi;
2559 struct ipa_node_params *info;
2561 ipa_check_create_node_params ();
2562 ipa_check_create_edge_args ();
2563 info = IPA_NODE_REF (node);
2565 if (info->analysis_done)
2566 return;
2567 info->analysis_done = 1;
2569 if (ipa_func_spec_opts_forbid_analysis_p (node))
2571 for (int i = 0; i < ipa_get_param_count (info); i++)
2573 ipa_set_param_used (info, i, true);
2574 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2576 return;
2579 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2580 push_cfun (func);
2581 calculate_dominance_info (CDI_DOMINATORS);
2582 ipa_initialize_node_params (node);
2583 ipa_analyze_controlled_uses (node);
2585 fbi.node = node;
2586 fbi.info = IPA_NODE_REF (node);
2587 fbi.bb_infos = vNULL;
2588 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2589 fbi.param_count = ipa_get_param_count (info);
2590 fbi.aa_walked = 0;
2592 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2594 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2595 bi->cg_edges.safe_push (cs);
2598 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2600 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2601 bi->cg_edges.safe_push (cs);
2604 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2606 ipa_release_body_info (&fbi);
2607 free_dominance_info (CDI_DOMINATORS);
2608 pop_cfun ();
2611 /* Update the jump functions associated with call graph edge E when the call
2612 graph edge CS is being inlined, assuming that E->caller is already (possibly
2613 indirectly) inlined into CS->callee and that E has not been inlined. */
2615 static void
2616 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2617 struct cgraph_edge *e)
2619 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2620 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2621 int count = ipa_get_cs_argument_count (args);
2622 int i;
2624 for (i = 0; i < count; i++)
2626 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2627 struct ipa_polymorphic_call_context *dst_ctx
2628 = ipa_get_ith_polymorhic_call_context (args, i);
2630 if (dst->type == IPA_JF_ANCESTOR)
2632 struct ipa_jump_func *src;
2633 int dst_fid = dst->value.ancestor.formal_id;
2634 struct ipa_polymorphic_call_context *src_ctx
2635 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2637 /* Variable number of arguments can cause havoc if we try to access
2638 one that does not exist in the inlined edge. So make sure we
2639 don't. */
2640 if (dst_fid >= ipa_get_cs_argument_count (top))
2642 ipa_set_jf_unknown (dst);
2643 continue;
2646 src = ipa_get_ith_jump_func (top, dst_fid);
2648 if (src_ctx && !src_ctx->useless_p ())
2650 struct ipa_polymorphic_call_context ctx = *src_ctx;
2652 /* TODO: Make type preserved safe WRT contexts. */
2653 if (!ipa_get_jf_ancestor_type_preserved (dst))
2654 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2655 ctx.offset_by (dst->value.ancestor.offset);
2656 if (!ctx.useless_p ())
2658 if (!dst_ctx)
2660 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2661 count);
2662 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2665 dst_ctx->combine_with (ctx);
2669 if (src->agg.items
2670 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2672 struct ipa_agg_jf_item *item;
2673 int j;
2675 /* Currently we do not produce clobber aggregate jump functions,
2676 replace with merging when we do. */
2677 gcc_assert (!dst->agg.items);
2679 dst->agg.items = vec_safe_copy (src->agg.items);
2680 dst->agg.by_ref = src->agg.by_ref;
2681 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2682 item->offset -= dst->value.ancestor.offset;
2685 if (src->type == IPA_JF_PASS_THROUGH
2686 && src->value.pass_through.operation == NOP_EXPR)
2688 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2689 dst->value.ancestor.agg_preserved &=
2690 src->value.pass_through.agg_preserved;
2692 else if (src->type == IPA_JF_PASS_THROUGH
2693 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2695 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2696 dst->value.ancestor.agg_preserved = false;
2698 else if (src->type == IPA_JF_ANCESTOR)
2700 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2701 dst->value.ancestor.offset += src->value.ancestor.offset;
2702 dst->value.ancestor.agg_preserved &=
2703 src->value.ancestor.agg_preserved;
2705 else
2706 ipa_set_jf_unknown (dst);
2708 else if (dst->type == IPA_JF_PASS_THROUGH)
2710 struct ipa_jump_func *src;
2711 /* We must check range due to calls with variable number of arguments
2712 and we cannot combine jump functions with operations. */
2713 if (dst->value.pass_through.operation == NOP_EXPR
2714 && (dst->value.pass_through.formal_id
2715 < ipa_get_cs_argument_count (top)))
2717 int dst_fid = dst->value.pass_through.formal_id;
2718 src = ipa_get_ith_jump_func (top, dst_fid);
2719 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2720 struct ipa_polymorphic_call_context *src_ctx
2721 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2723 if (src_ctx && !src_ctx->useless_p ())
2725 struct ipa_polymorphic_call_context ctx = *src_ctx;
2727 /* TODO: Make type preserved safe WRT contexts. */
2728 if (!ipa_get_jf_pass_through_type_preserved (dst))
2729 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2730 if (!ctx.useless_p ())
2732 if (!dst_ctx)
2734 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2735 count);
2736 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2738 dst_ctx->combine_with (ctx);
2741 switch (src->type)
2743 case IPA_JF_UNKNOWN:
2744 ipa_set_jf_unknown (dst);
2745 break;
2746 case IPA_JF_CONST:
2747 ipa_set_jf_cst_copy (dst, src);
2748 break;
2750 case IPA_JF_PASS_THROUGH:
2752 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2753 enum tree_code operation;
2754 operation = ipa_get_jf_pass_through_operation (src);
2756 if (operation == NOP_EXPR)
2758 bool agg_p;
2759 agg_p = dst_agg_p
2760 && ipa_get_jf_pass_through_agg_preserved (src);
2761 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2763 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2764 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2765 else
2767 tree operand = ipa_get_jf_pass_through_operand (src);
2768 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2769 operation);
2771 break;
2773 case IPA_JF_ANCESTOR:
2775 bool agg_p;
2776 agg_p = dst_agg_p
2777 && ipa_get_jf_ancestor_agg_preserved (src);
2778 ipa_set_ancestor_jf (dst,
2779 ipa_get_jf_ancestor_offset (src),
2780 ipa_get_jf_ancestor_formal_id (src),
2781 agg_p);
2782 break;
2784 default:
2785 gcc_unreachable ();
2788 if (src->agg.items
2789 && (dst_agg_p || !src->agg.by_ref))
2791 /* Currently we do not produce clobber aggregate jump
2792 functions, replace with merging when we do. */
2793 gcc_assert (!dst->agg.items);
2795 dst->agg.by_ref = src->agg.by_ref;
2796 dst->agg.items = vec_safe_copy (src->agg.items);
2799 else
2800 ipa_set_jf_unknown (dst);
2805 /* If TARGET is an addr_expr of a function declaration, make it the
2806 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2807 Otherwise, return NULL. */
2809 struct cgraph_edge *
2810 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2811 bool speculative)
2813 struct cgraph_node *callee;
2814 struct ipa_call_summary *es = ipa_call_summaries->get (ie);
2815 bool unreachable = false;
2817 if (TREE_CODE (target) == ADDR_EXPR)
2818 target = TREE_OPERAND (target, 0);
2819 if (TREE_CODE (target) != FUNCTION_DECL)
2821 target = canonicalize_constructor_val (target, NULL);
2822 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2824 /* Member pointer call that goes through a VMT lookup. */
2825 if (ie->indirect_info->member_ptr
2826 /* Or if target is not an invariant expression and we do not
2827 know if it will evaulate to function at runtime.
2828 This can happen when folding through &VAR, where &VAR
2829 is IP invariant, but VAR itself is not.
2831 TODO: Revisit this when GCC 5 is branched. It seems that
2832 member_ptr check is not needed and that we may try to fold
2833 the expression and see if VAR is readonly. */
2834 || !is_gimple_ip_invariant (target))
2836 if (dump_enabled_p ())
2838 location_t loc = gimple_location_safe (ie->call_stmt);
2839 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2840 "discovered direct call non-invariant %s\n",
2841 ie->caller->dump_name ());
2843 return NULL;
2847 if (dump_enabled_p ())
2849 location_t loc = gimple_location_safe (ie->call_stmt);
2850 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2851 "discovered direct call to non-function in %s, "
2852 "making it __builtin_unreachable\n",
2853 ie->caller->dump_name ());
2856 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2857 callee = cgraph_node::get_create (target);
2858 unreachable = true;
2860 else
2861 callee = cgraph_node::get (target);
2863 else
2864 callee = cgraph_node::get (target);
2866 /* Because may-edges are not explicitely represented and vtable may be external,
2867 we may create the first reference to the object in the unit. */
2868 if (!callee || callee->global.inlined_to)
2871 /* We are better to ensure we can refer to it.
2872 In the case of static functions we are out of luck, since we already
2873 removed its body. In the case of public functions we may or may
2874 not introduce the reference. */
2875 if (!canonicalize_constructor_val (target, NULL)
2876 || !TREE_PUBLIC (target))
2878 if (dump_file)
2879 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2880 "(%s -> %s) but can not refer to it. Giving up.\n",
2881 ie->caller->dump_name (),
2882 ie->callee->dump_name ());
2883 return NULL;
2885 callee = cgraph_node::get_create (target);
2888 /* If the edge is already speculated. */
2889 if (speculative && ie->speculative)
2891 struct cgraph_edge *e2;
2892 struct ipa_ref *ref;
2893 ie->speculative_call_info (e2, ie, ref);
2894 if (e2->callee->ultimate_alias_target ()
2895 != callee->ultimate_alias_target ())
2897 if (dump_file)
2898 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2899 "target (%s -> %s) but the call is already "
2900 "speculated to %s. Giving up.\n",
2901 ie->caller->dump_name (), callee->dump_name (),
2902 e2->callee->dump_name ());
2904 else
2906 if (dump_file)
2907 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2908 "(%s -> %s) this agree with previous speculation.\n",
2909 ie->caller->dump_name (), callee->dump_name ());
2911 return NULL;
2914 if (!dbg_cnt (devirt))
2915 return NULL;
2917 ipa_check_create_node_params ();
2919 /* We can not make edges to inline clones. It is bug that someone removed
2920 the cgraph node too early. */
2921 gcc_assert (!callee->global.inlined_to);
2923 if (dump_file && !unreachable)
2925 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2926 "(%s -> %s), for stmt ",
2927 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2928 speculative ? "speculative" : "known",
2929 ie->caller->dump_name (),
2930 callee->dump_name ());
2931 if (ie->call_stmt)
2932 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2933 else
2934 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2936 if (dump_enabled_p ())
2938 location_t loc = gimple_location_safe (ie->call_stmt);
2940 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2941 "converting indirect call in %s to direct call to %s\n",
2942 ie->caller->name (), callee->name ());
2944 if (!speculative)
2946 struct cgraph_edge *orig = ie;
2947 ie = ie->make_direct (callee);
2948 /* If we resolved speculative edge the cost is already up to date
2949 for direct call (adjusted by inline_edge_duplication_hook). */
2950 if (ie == orig)
2952 es = ipa_call_summaries->get (ie);
2953 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2954 - eni_size_weights.call_cost);
2955 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2956 - eni_time_weights.call_cost);
2959 else
2961 if (!callee->can_be_discarded_p ())
2963 cgraph_node *alias;
2964 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2965 if (alias)
2966 callee = alias;
2968 /* make_speculative will update ie's cost to direct call cost. */
2969 ie = ie->make_speculative
2970 (callee, ie->count.apply_scale (8, 10));
2973 return ie;
2976 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2977 CONSTRUCTOR and return it. Return NULL if the search fails for some
2978 reason. */
2980 static tree
2981 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2983 tree type = TREE_TYPE (constructor);
2984 if (TREE_CODE (type) != ARRAY_TYPE
2985 && TREE_CODE (type) != RECORD_TYPE)
2986 return NULL;
2988 unsigned ix;
2989 tree index, val;
2990 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2992 HOST_WIDE_INT elt_offset;
2993 if (TREE_CODE (type) == ARRAY_TYPE)
2995 offset_int off;
2996 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2997 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2999 if (index)
3001 if (TREE_CODE (index) == RANGE_EXPR)
3002 off = wi::to_offset (TREE_OPERAND (index, 0));
3003 else
3004 off = wi::to_offset (index);
3005 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3007 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3008 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3009 off = wi::sext (off - wi::to_offset (low_bound),
3010 TYPE_PRECISION (TREE_TYPE (index)));
3012 off *= wi::to_offset (unit_size);
3013 /* ??? Handle more than just the first index of a
3014 RANGE_EXPR. */
3016 else
3017 off = wi::to_offset (unit_size) * ix;
3019 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3020 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3021 continue;
3022 elt_offset = off.to_shwi ();
3024 else if (TREE_CODE (type) == RECORD_TYPE)
3026 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3027 if (DECL_BIT_FIELD (index))
3028 continue;
3029 elt_offset = int_bit_position (index);
3031 else
3032 gcc_unreachable ();
3034 if (elt_offset > req_offset)
3035 return NULL;
3037 if (TREE_CODE (val) == CONSTRUCTOR)
3038 return find_constructor_constant_at_offset (val,
3039 req_offset - elt_offset);
3041 if (elt_offset == req_offset
3042 && is_gimple_reg_type (TREE_TYPE (val))
3043 && is_gimple_ip_invariant (val))
3044 return val;
3046 return NULL;
3049 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3050 invariant from a static constructor and if so, return it. Otherwise return
3051 NULL. */
3053 static tree
3054 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3056 if (by_ref)
3058 if (TREE_CODE (scalar) != ADDR_EXPR)
3059 return NULL;
3060 scalar = TREE_OPERAND (scalar, 0);
3063 if (!VAR_P (scalar)
3064 || !is_global_var (scalar)
3065 || !TREE_READONLY (scalar)
3066 || !DECL_INITIAL (scalar)
3067 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3068 return NULL;
3070 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3073 /* Retrieve value from aggregate jump function AGG or static initializer of
3074 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3075 none. BY_REF specifies whether the value has to be passed by reference or
3076 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3077 to is set to true if the value comes from an initializer of a constant. */
3079 tree
3080 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3081 HOST_WIDE_INT offset, bool by_ref,
3082 bool *from_global_constant)
3084 struct ipa_agg_jf_item *item;
3085 int i;
3087 if (scalar)
3089 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3090 if (res)
3092 if (from_global_constant)
3093 *from_global_constant = true;
3094 return res;
3098 if (!agg
3099 || by_ref != agg->by_ref)
3100 return NULL;
3102 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3103 if (item->offset == offset)
3105 /* Currently we do not have clobber values, return NULL for them once
3106 we do. */
3107 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3108 if (from_global_constant)
3109 *from_global_constant = false;
3110 return item->value;
3112 return NULL;
3115 /* Remove a reference to SYMBOL from the list of references of a node given by
3116 reference description RDESC. Return true if the reference has been
3117 successfully found and removed. */
3119 static bool
3120 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3122 struct ipa_ref *to_del;
3123 struct cgraph_edge *origin;
3125 origin = rdesc->cs;
3126 if (!origin)
3127 return false;
3128 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3129 origin->lto_stmt_uid);
3130 if (!to_del)
3131 return false;
3133 to_del->remove_reference ();
3134 if (dump_file)
3135 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3136 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3137 return true;
3140 /* If JFUNC has a reference description with refcount different from
3141 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3142 NULL. JFUNC must be a constant jump function. */
3144 static struct ipa_cst_ref_desc *
3145 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3147 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3148 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3149 return rdesc;
3150 else
3151 return NULL;
3154 /* If the value of constant jump function JFUNC is an address of a function
3155 declaration, return the associated call graph node. Otherwise return
3156 NULL. */
3158 static cgraph_node *
3159 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3161 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3162 tree cst = ipa_get_jf_constant (jfunc);
3163 if (TREE_CODE (cst) != ADDR_EXPR
3164 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3165 return NULL;
3167 return cgraph_node::get (TREE_OPERAND (cst, 0));
3171 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3172 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3173 the edge specified in the rdesc. Return false if either the symbol or the
3174 reference could not be found, otherwise return true. */
3176 static bool
3177 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3179 struct ipa_cst_ref_desc *rdesc;
3180 if (jfunc->type == IPA_JF_CONST
3181 && (rdesc = jfunc_rdesc_usable (jfunc))
3182 && --rdesc->refcount == 0)
3184 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3185 if (!symbol)
3186 return false;
3188 return remove_described_reference (symbol, rdesc);
3190 return true;
3193 /* Try to find a destination for indirect edge IE that corresponds to a simple
3194 call or a call of a member function pointer and where the destination is a
3195 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3196 the type of the parameter to which the result of JFUNC is passed. If it can
3197 be determined, return the newly direct edge, otherwise return NULL.
3198 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3200 static struct cgraph_edge *
3201 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3202 struct ipa_jump_func *jfunc, tree target_type,
3203 struct ipa_node_params *new_root_info)
3205 struct cgraph_edge *cs;
3206 tree target;
3207 bool agg_contents = ie->indirect_info->agg_contents;
3208 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3209 if (agg_contents)
3211 bool from_global_constant;
3212 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3213 ie->indirect_info->offset,
3214 ie->indirect_info->by_ref,
3215 &from_global_constant);
3216 if (target
3217 && !from_global_constant
3218 && !ie->indirect_info->guaranteed_unmodified)
3219 return NULL;
3221 else
3222 target = scalar;
3223 if (!target)
3224 return NULL;
3225 cs = ipa_make_edge_direct_to_target (ie, target);
3227 if (cs && !agg_contents)
3229 bool ok;
3230 gcc_checking_assert (cs->callee
3231 && (cs != ie
3232 || jfunc->type != IPA_JF_CONST
3233 || !cgraph_node_for_jfunc (jfunc)
3234 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3235 ok = try_decrement_rdesc_refcount (jfunc);
3236 gcc_checking_assert (ok);
3239 return cs;
3242 /* Return the target to be used in cases of impossible devirtualization. IE
3243 and target (the latter can be NULL) are dumped when dumping is enabled. */
3245 tree
3246 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3248 if (dump_file)
3250 if (target)
3251 fprintf (dump_file,
3252 "Type inconsistent devirtualization: %s->%s\n",
3253 ie->caller->dump_name (),
3254 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3255 else
3256 fprintf (dump_file,
3257 "No devirtualization target in %s\n",
3258 ie->caller->dump_name ());
3260 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3261 cgraph_node::get_create (new_target);
3262 return new_target;
3265 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3266 call based on a formal parameter which is described by jump function JFUNC
3267 and if it can be determined, make it direct and return the direct edge.
3268 Otherwise, return NULL. CTX describes the polymorphic context that the
3269 parameter the call is based on brings along with it. */
3271 static struct cgraph_edge *
3272 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3273 struct ipa_jump_func *jfunc,
3274 struct ipa_polymorphic_call_context ctx)
3276 tree target = NULL;
3277 bool speculative = false;
3279 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3280 return NULL;
3282 gcc_assert (!ie->indirect_info->by_ref);
3284 /* Try to do lookup via known virtual table pointer value. */
3285 if (!ie->indirect_info->vptr_changed
3286 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3288 tree vtable;
3289 unsigned HOST_WIDE_INT offset;
3290 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3291 : NULL;
3292 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3293 ie->indirect_info->offset,
3294 true);
3295 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3297 bool can_refer;
3298 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3299 vtable, offset, &can_refer);
3300 if (can_refer)
3302 if (!t
3303 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3304 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3305 || !possible_polymorphic_call_target_p
3306 (ie, cgraph_node::get (t)))
3308 /* Do not speculate builtin_unreachable, it is stupid! */
3309 if (!ie->indirect_info->vptr_changed)
3310 target = ipa_impossible_devirt_target (ie, target);
3311 else
3312 target = NULL;
3314 else
3316 target = t;
3317 speculative = ie->indirect_info->vptr_changed;
3323 ipa_polymorphic_call_context ie_context (ie);
3324 vec <cgraph_node *>targets;
3325 bool final;
3327 ctx.offset_by (ie->indirect_info->offset);
3328 if (ie->indirect_info->vptr_changed)
3329 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3330 ie->indirect_info->otr_type);
3331 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3332 targets = possible_polymorphic_call_targets
3333 (ie->indirect_info->otr_type,
3334 ie->indirect_info->otr_token,
3335 ctx, &final);
3336 if (final && targets.length () <= 1)
3338 speculative = false;
3339 if (targets.length () == 1)
3340 target = targets[0]->decl;
3341 else
3342 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3344 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3345 && !ie->speculative && ie->maybe_hot_p ())
3347 cgraph_node *n;
3348 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3349 ie->indirect_info->otr_token,
3350 ie->indirect_info->context);
3351 if (n)
3353 target = n->decl;
3354 speculative = true;
3358 if (target)
3360 if (!possible_polymorphic_call_target_p
3361 (ie, cgraph_node::get_create (target)))
3363 if (speculative)
3364 return NULL;
3365 target = ipa_impossible_devirt_target (ie, target);
3367 return ipa_make_edge_direct_to_target (ie, target, speculative);
3369 else
3370 return NULL;
3373 /* Update the param called notes associated with NODE when CS is being inlined,
3374 assuming NODE is (potentially indirectly) inlined into CS->callee.
3375 Moreover, if the callee is discovered to be constant, create a new cgraph
3376 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3377 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3379 static bool
3380 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3381 struct cgraph_node *node,
3382 vec<cgraph_edge *> *new_edges)
3384 struct ipa_edge_args *top;
3385 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3386 struct ipa_node_params *new_root_info, *inlined_node_info;
3387 bool res = false;
3389 ipa_check_create_edge_args ();
3390 top = IPA_EDGE_REF (cs);
3391 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3392 ? cs->caller->global.inlined_to
3393 : cs->caller);
3394 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3396 for (ie = node->indirect_calls; ie; ie = next_ie)
3398 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3399 struct ipa_jump_func *jfunc;
3400 int param_index;
3401 cgraph_node *spec_target = NULL;
3403 next_ie = ie->next_callee;
3405 if (ici->param_index == -1)
3406 continue;
3408 /* We must check range due to calls with variable number of arguments: */
3409 if (ici->param_index >= ipa_get_cs_argument_count (top))
3411 ici->param_index = -1;
3412 continue;
3415 param_index = ici->param_index;
3416 jfunc = ipa_get_ith_jump_func (top, param_index);
3418 if (ie->speculative)
3420 struct cgraph_edge *de;
3421 struct ipa_ref *ref;
3422 ie->speculative_call_info (de, ie, ref);
3423 spec_target = de->callee;
3426 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3427 new_direct_edge = NULL;
3428 else if (ici->polymorphic)
3430 ipa_polymorphic_call_context ctx;
3431 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3432 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3434 else
3436 tree target_type = ipa_get_type (inlined_node_info, param_index);
3437 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3438 target_type,
3439 new_root_info);
3442 /* If speculation was removed, then we need to do nothing. */
3443 if (new_direct_edge && new_direct_edge != ie
3444 && new_direct_edge->callee == spec_target)
3446 new_direct_edge->indirect_inlining_edge = 1;
3447 top = IPA_EDGE_REF (cs);
3448 res = true;
3449 if (!new_direct_edge->speculative)
3450 continue;
3452 else if (new_direct_edge)
3454 new_direct_edge->indirect_inlining_edge = 1;
3455 if (new_direct_edge->call_stmt)
3456 new_direct_edge->call_stmt_cannot_inline_p
3457 = !gimple_check_call_matching_types (
3458 new_direct_edge->call_stmt,
3459 new_direct_edge->callee->decl, false);
3460 if (new_edges)
3462 new_edges->safe_push (new_direct_edge);
3463 res = true;
3465 top = IPA_EDGE_REF (cs);
3466 /* If speculative edge was introduced we still need to update
3467 call info of the indirect edge. */
3468 if (!new_direct_edge->speculative)
3469 continue;
3471 if (jfunc->type == IPA_JF_PASS_THROUGH
3472 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3474 if (ici->agg_contents
3475 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3476 && !ici->polymorphic)
3477 ici->param_index = -1;
3478 else
3480 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3481 if (ici->polymorphic
3482 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3483 ici->vptr_changed = true;
3486 else if (jfunc->type == IPA_JF_ANCESTOR)
3488 if (ici->agg_contents
3489 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3490 && !ici->polymorphic)
3491 ici->param_index = -1;
3492 else
3494 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3495 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3496 if (ici->polymorphic
3497 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3498 ici->vptr_changed = true;
3501 else
3502 /* Either we can find a destination for this edge now or never. */
3503 ici->param_index = -1;
3506 return res;
3509 /* Recursively traverse subtree of NODE (including node) made of inlined
3510 cgraph_edges when CS has been inlined and invoke
3511 update_indirect_edges_after_inlining on all nodes and
3512 update_jump_functions_after_inlining on all non-inlined edges that lead out
3513 of this subtree. Newly discovered indirect edges will be added to
3514 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3515 created. */
3517 static bool
3518 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3519 struct cgraph_node *node,
3520 vec<cgraph_edge *> *new_edges)
3522 struct cgraph_edge *e;
3523 bool res;
3525 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3527 for (e = node->callees; e; e = e->next_callee)
3528 if (!e->inline_failed)
3529 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3530 else
3531 update_jump_functions_after_inlining (cs, e);
3532 for (e = node->indirect_calls; e; e = e->next_callee)
3533 update_jump_functions_after_inlining (cs, e);
3535 return res;
3538 /* Combine two controlled uses counts as done during inlining. */
3540 static int
3541 combine_controlled_uses_counters (int c, int d)
3543 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3544 return IPA_UNDESCRIBED_USE;
3545 else
3546 return c + d - 1;
3549 /* Propagate number of controlled users from CS->caleee to the new root of the
3550 tree of inlined nodes. */
3552 static void
3553 propagate_controlled_uses (struct cgraph_edge *cs)
3555 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3556 struct cgraph_node *new_root = cs->caller->global.inlined_to
3557 ? cs->caller->global.inlined_to : cs->caller;
3558 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3559 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3560 int count, i;
3562 count = MIN (ipa_get_cs_argument_count (args),
3563 ipa_get_param_count (old_root_info));
3564 for (i = 0; i < count; i++)
3566 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3567 struct ipa_cst_ref_desc *rdesc;
3569 if (jf->type == IPA_JF_PASS_THROUGH)
3571 int src_idx, c, d;
3572 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3573 c = ipa_get_controlled_uses (new_root_info, src_idx);
3574 d = ipa_get_controlled_uses (old_root_info, i);
3576 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3577 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3578 c = combine_controlled_uses_counters (c, d);
3579 ipa_set_controlled_uses (new_root_info, src_idx, c);
3580 if (c == 0 && new_root_info->ipcp_orig_node)
3582 struct cgraph_node *n;
3583 struct ipa_ref *ref;
3584 tree t = new_root_info->known_csts[src_idx];
3586 if (t && TREE_CODE (t) == ADDR_EXPR
3587 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3588 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3589 && (ref = new_root->find_reference (n, NULL, 0)))
3591 if (dump_file)
3592 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3593 "reference from %s to %s.\n",
3594 new_root->dump_name (),
3595 n->dump_name ());
3596 ref->remove_reference ();
3600 else if (jf->type == IPA_JF_CONST
3601 && (rdesc = jfunc_rdesc_usable (jf)))
3603 int d = ipa_get_controlled_uses (old_root_info, i);
3604 int c = rdesc->refcount;
3605 rdesc->refcount = combine_controlled_uses_counters (c, d);
3606 if (rdesc->refcount == 0)
3608 tree cst = ipa_get_jf_constant (jf);
3609 struct cgraph_node *n;
3610 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3611 && TREE_CODE (TREE_OPERAND (cst, 0))
3612 == FUNCTION_DECL);
3613 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3614 if (n)
3616 struct cgraph_node *clone;
3617 bool ok;
3618 ok = remove_described_reference (n, rdesc);
3619 gcc_checking_assert (ok);
3621 clone = cs->caller;
3622 while (clone->global.inlined_to
3623 && clone != rdesc->cs->caller
3624 && IPA_NODE_REF (clone)->ipcp_orig_node)
3626 struct ipa_ref *ref;
3627 ref = clone->find_reference (n, NULL, 0);
3628 if (ref)
3630 if (dump_file)
3631 fprintf (dump_file, "ipa-prop: Removing "
3632 "cloning-created reference "
3633 "from %s to %s.\n",
3634 clone->dump_name (),
3635 n->dump_name ());
3636 ref->remove_reference ();
3638 clone = clone->callers->caller;
3645 for (i = ipa_get_param_count (old_root_info);
3646 i < ipa_get_cs_argument_count (args);
3647 i++)
3649 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3651 if (jf->type == IPA_JF_CONST)
3653 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3654 if (rdesc)
3655 rdesc->refcount = IPA_UNDESCRIBED_USE;
3657 else if (jf->type == IPA_JF_PASS_THROUGH)
3658 ipa_set_controlled_uses (new_root_info,
3659 jf->value.pass_through.formal_id,
3660 IPA_UNDESCRIBED_USE);
3664 /* Update jump functions and call note functions on inlining the call site CS.
3665 CS is expected to lead to a node already cloned by
3666 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3667 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3668 created. */
3670 bool
3671 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3672 vec<cgraph_edge *> *new_edges)
3674 bool changed;
3675 /* Do nothing if the preparation phase has not been carried out yet
3676 (i.e. during early inlining). */
3677 if (!ipa_node_params_sum)
3678 return false;
3679 gcc_assert (ipa_edge_args_sum);
3681 propagate_controlled_uses (cs);
3682 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3684 return changed;
3687 /* Ensure that array of edge arguments infos is big enough to accommodate a
3688 structure for all edges and reallocates it if not. Also, allocate
3689 associated hash tables is they do not already exist. */
3691 void
3692 ipa_check_create_edge_args (void)
3694 if (!ipa_edge_args_sum)
3695 ipa_edge_args_sum
3696 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3697 ipa_edge_args_sum_t (symtab, true));
3698 if (!ipa_bits_hash_table)
3699 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3700 if (!ipa_vr_hash_table)
3701 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3704 /* Frees all dynamically allocated structures that the argument info points
3705 to. */
3707 void
3708 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3710 vec_free (args->jump_functions);
3711 *args = ipa_edge_args ();
3714 /* Free all ipa_edge structures. */
3716 void
3717 ipa_free_all_edge_args (void)
3719 if (!ipa_edge_args_sum)
3720 return;
3722 ipa_edge_args_sum->release ();
3723 ipa_edge_args_sum = NULL;
3726 /* Free all ipa_node_params structures. */
3728 void
3729 ipa_free_all_node_params (void)
3731 ipa_node_params_sum->release ();
3732 ipa_node_params_sum = NULL;
3735 /* Grow ipcp_transformations if necessary. Also allocate any necessary hash
3736 tables if they do not already exist. */
3738 void
3739 ipcp_grow_transformations_if_necessary (void)
3741 if (vec_safe_length (ipcp_transformations)
3742 <= (unsigned) symtab->cgraph_max_uid)
3743 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3744 if (!ipa_bits_hash_table)
3745 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3746 if (!ipa_vr_hash_table)
3747 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3750 /* Set the aggregate replacements of NODE to be AGGVALS. */
3752 void
3753 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3754 struct ipa_agg_replacement_value *aggvals)
3756 ipcp_grow_transformations_if_necessary ();
3757 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3760 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3761 count data structures accordingly. */
3763 void
3764 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3766 if (args->jump_functions)
3768 struct ipa_jump_func *jf;
3769 int i;
3770 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3772 struct ipa_cst_ref_desc *rdesc;
3773 try_decrement_rdesc_refcount (jf);
3774 if (jf->type == IPA_JF_CONST
3775 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3776 && rdesc->cs == cs)
3777 rdesc->cs = NULL;
3782 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3783 reference count data strucutres accordingly. */
3785 void
3786 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3787 ipa_edge_args *old_args, ipa_edge_args *new_args)
3789 unsigned int i;
3791 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3792 if (old_args->polymorphic_call_contexts)
3793 new_args->polymorphic_call_contexts
3794 = vec_safe_copy (old_args->polymorphic_call_contexts);
3796 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3798 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3799 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3801 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3803 if (src_jf->type == IPA_JF_CONST)
3805 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3807 if (!src_rdesc)
3808 dst_jf->value.constant.rdesc = NULL;
3809 else if (src->caller == dst->caller)
3811 struct ipa_ref *ref;
3812 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3813 gcc_checking_assert (n);
3814 ref = src->caller->find_reference (n, src->call_stmt,
3815 src->lto_stmt_uid);
3816 gcc_checking_assert (ref);
3817 dst->caller->clone_reference (ref, ref->stmt);
3819 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3820 dst_rdesc->cs = dst;
3821 dst_rdesc->refcount = src_rdesc->refcount;
3822 dst_rdesc->next_duplicate = NULL;
3823 dst_jf->value.constant.rdesc = dst_rdesc;
3825 else if (src_rdesc->cs == src)
3827 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3828 dst_rdesc->cs = dst;
3829 dst_rdesc->refcount = src_rdesc->refcount;
3830 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3831 src_rdesc->next_duplicate = dst_rdesc;
3832 dst_jf->value.constant.rdesc = dst_rdesc;
3834 else
3836 struct ipa_cst_ref_desc *dst_rdesc;
3837 /* This can happen during inlining, when a JFUNC can refer to a
3838 reference taken in a function up in the tree of inline clones.
3839 We need to find the duplicate that refers to our tree of
3840 inline clones. */
3842 gcc_assert (dst->caller->global.inlined_to);
3843 for (dst_rdesc = src_rdesc->next_duplicate;
3844 dst_rdesc;
3845 dst_rdesc = dst_rdesc->next_duplicate)
3847 struct cgraph_node *top;
3848 top = dst_rdesc->cs->caller->global.inlined_to
3849 ? dst_rdesc->cs->caller->global.inlined_to
3850 : dst_rdesc->cs->caller;
3851 if (dst->caller->global.inlined_to == top)
3852 break;
3854 gcc_assert (dst_rdesc);
3855 dst_jf->value.constant.rdesc = dst_rdesc;
3858 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3859 && src->caller == dst->caller)
3861 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3862 ? dst->caller->global.inlined_to : dst->caller;
3863 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3864 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3866 int c = ipa_get_controlled_uses (root_info, idx);
3867 if (c != IPA_UNDESCRIBED_USE)
3869 c++;
3870 ipa_set_controlled_uses (root_info, idx, c);
3876 /* Analyze newly added function into callgraph. */
3878 static void
3879 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3881 if (node->has_gimple_body_p ())
3882 ipa_analyze_node (node);
3885 /* Hook that is called by summary when a node is duplicated. */
3887 void
3888 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3889 ipa_node_params *old_info,
3890 ipa_node_params *new_info)
3892 ipa_agg_replacement_value *old_av, *new_av;
3894 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3895 new_info->lattices = NULL;
3896 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3897 new_info->known_csts = old_info->known_csts.copy ();
3898 new_info->known_contexts = old_info->known_contexts.copy ();
3900 new_info->analysis_done = old_info->analysis_done;
3901 new_info->node_enqueued = old_info->node_enqueued;
3902 new_info->versionable = old_info->versionable;
3904 old_av = ipa_get_agg_replacements_for_node (src);
3905 if (old_av)
3907 new_av = NULL;
3908 while (old_av)
3910 struct ipa_agg_replacement_value *v;
3912 v = ggc_alloc<ipa_agg_replacement_value> ();
3913 memcpy (v, old_av, sizeof (*v));
3914 v->next = new_av;
3915 new_av = v;
3916 old_av = old_av->next;
3918 ipa_set_node_agg_value_chain (dst, new_av);
3921 ipcp_transformation_summary *src_trans
3922 = ipcp_get_transformation_summary (src);
3924 if (src_trans)
3926 ipcp_grow_transformations_if_necessary ();
3927 src_trans = ipcp_get_transformation_summary (src);
3928 ipcp_transformation_summary *dst_trans
3929 = ipcp_get_transformation_summary (dst);
3931 dst_trans->bits = vec_safe_copy (src_trans->bits);
3933 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3934 vec<ipa_vr, va_gc> *&dst_vr
3935 = ipcp_get_transformation_summary (dst)->m_vr;
3936 if (vec_safe_length (src_trans->m_vr) > 0)
3938 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3939 for (unsigned i = 0; i < src_vr->length (); ++i)
3940 dst_vr->quick_push ((*src_vr)[i]);
3945 /* Register our cgraph hooks if they are not already there. */
3947 void
3948 ipa_register_cgraph_hooks (void)
3950 ipa_check_create_node_params ();
3951 ipa_check_create_edge_args ();
3953 function_insertion_hook_holder =
3954 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3957 /* Unregister our cgraph hooks if they are not already there. */
3959 static void
3960 ipa_unregister_cgraph_hooks (void)
3962 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3963 function_insertion_hook_holder = NULL;
3966 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3967 longer needed after ipa-cp. */
3969 void
3970 ipa_free_all_structures_after_ipa_cp (void)
3972 if (!optimize && !in_lto_p)
3974 ipa_free_all_edge_args ();
3975 ipa_free_all_node_params ();
3976 ipcp_sources_pool.release ();
3977 ipcp_cst_values_pool.release ();
3978 ipcp_poly_ctx_values_pool.release ();
3979 ipcp_agg_lattice_pool.release ();
3980 ipa_unregister_cgraph_hooks ();
3981 ipa_refdesc_pool.release ();
3985 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3986 longer needed after indirect inlining. */
3988 void
3989 ipa_free_all_structures_after_iinln (void)
3991 ipa_free_all_edge_args ();
3992 ipa_free_all_node_params ();
3993 ipa_unregister_cgraph_hooks ();
3994 ipcp_sources_pool.release ();
3995 ipcp_cst_values_pool.release ();
3996 ipcp_poly_ctx_values_pool.release ();
3997 ipcp_agg_lattice_pool.release ();
3998 ipa_refdesc_pool.release ();
4001 /* Print ipa_tree_map data structures of all functions in the
4002 callgraph to F. */
4004 void
4005 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4007 int i, count;
4008 struct ipa_node_params *info;
4010 if (!node->definition)
4011 return;
4012 info = IPA_NODE_REF (node);
4013 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4014 count = ipa_get_param_count (info);
4015 for (i = 0; i < count; i++)
4017 int c;
4019 fprintf (f, " ");
4020 ipa_dump_param (f, info, i);
4021 if (ipa_is_param_used (info, i))
4022 fprintf (f, " used");
4023 c = ipa_get_controlled_uses (info, i);
4024 if (c == IPA_UNDESCRIBED_USE)
4025 fprintf (f, " undescribed_use");
4026 else
4027 fprintf (f, " controlled_uses=%i", c);
4028 fprintf (f, "\n");
4032 /* Print ipa_tree_map data structures of all functions in the
4033 callgraph to F. */
4035 void
4036 ipa_print_all_params (FILE * f)
4038 struct cgraph_node *node;
4040 fprintf (f, "\nFunction parameters:\n");
4041 FOR_EACH_FUNCTION (node)
4042 ipa_print_node_params (f, node);
4045 /* Dump the AV linked list. */
4047 void
4048 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4050 bool comma = false;
4051 fprintf (f, " Aggregate replacements:");
4052 for (; av; av = av->next)
4054 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4055 av->index, av->offset);
4056 print_generic_expr (f, av->value);
4057 comma = true;
4059 fprintf (f, "\n");
4062 /* Stream out jump function JUMP_FUNC to OB. */
4064 static void
4065 ipa_write_jump_function (struct output_block *ob,
4066 struct ipa_jump_func *jump_func)
4068 struct ipa_agg_jf_item *item;
4069 struct bitpack_d bp;
4070 int i, count;
4072 streamer_write_uhwi (ob, jump_func->type);
4073 switch (jump_func->type)
4075 case IPA_JF_UNKNOWN:
4076 break;
4077 case IPA_JF_CONST:
4078 gcc_assert (
4079 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4080 stream_write_tree (ob, jump_func->value.constant.value, true);
4081 break;
4082 case IPA_JF_PASS_THROUGH:
4083 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4084 if (jump_func->value.pass_through.operation == NOP_EXPR)
4086 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4087 bp = bitpack_create (ob->main_stream);
4088 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4089 streamer_write_bitpack (&bp);
4091 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4092 == tcc_unary)
4093 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4094 else
4096 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4097 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4099 break;
4100 case IPA_JF_ANCESTOR:
4101 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4102 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4103 bp = bitpack_create (ob->main_stream);
4104 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4105 streamer_write_bitpack (&bp);
4106 break;
4109 count = vec_safe_length (jump_func->agg.items);
4110 streamer_write_uhwi (ob, count);
4111 if (count)
4113 bp = bitpack_create (ob->main_stream);
4114 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4115 streamer_write_bitpack (&bp);
4118 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4120 streamer_write_uhwi (ob, item->offset);
4121 stream_write_tree (ob, item->value, true);
4124 bp = bitpack_create (ob->main_stream);
4125 bp_pack_value (&bp, !!jump_func->bits, 1);
4126 streamer_write_bitpack (&bp);
4127 if (jump_func->bits)
4129 streamer_write_widest_int (ob, jump_func->bits->value);
4130 streamer_write_widest_int (ob, jump_func->bits->mask);
4132 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4133 streamer_write_bitpack (&bp);
4134 if (jump_func->m_vr)
4136 streamer_write_enum (ob->main_stream, value_rang_type,
4137 VR_LAST, jump_func->m_vr->type);
4138 stream_write_tree (ob, jump_func->m_vr->min, true);
4139 stream_write_tree (ob, jump_func->m_vr->max, true);
4143 /* Read in jump function JUMP_FUNC from IB. */
4145 static void
4146 ipa_read_jump_function (struct lto_input_block *ib,
4147 struct ipa_jump_func *jump_func,
4148 struct cgraph_edge *cs,
4149 struct data_in *data_in)
4151 enum jump_func_type jftype;
4152 enum tree_code operation;
4153 int i, count;
4155 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4156 switch (jftype)
4158 case IPA_JF_UNKNOWN:
4159 ipa_set_jf_unknown (jump_func);
4160 break;
4161 case IPA_JF_CONST:
4162 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4163 break;
4164 case IPA_JF_PASS_THROUGH:
4165 operation = (enum tree_code) streamer_read_uhwi (ib);
4166 if (operation == NOP_EXPR)
4168 int formal_id = streamer_read_uhwi (ib);
4169 struct bitpack_d bp = streamer_read_bitpack (ib);
4170 bool agg_preserved = bp_unpack_value (&bp, 1);
4171 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4173 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4175 int formal_id = streamer_read_uhwi (ib);
4176 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4178 else
4180 tree operand = stream_read_tree (ib, data_in);
4181 int formal_id = streamer_read_uhwi (ib);
4182 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4183 operation);
4185 break;
4186 case IPA_JF_ANCESTOR:
4188 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4189 int formal_id = streamer_read_uhwi (ib);
4190 struct bitpack_d bp = streamer_read_bitpack (ib);
4191 bool agg_preserved = bp_unpack_value (&bp, 1);
4192 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4193 break;
4197 count = streamer_read_uhwi (ib);
4198 vec_alloc (jump_func->agg.items, count);
4199 if (count)
4201 struct bitpack_d bp = streamer_read_bitpack (ib);
4202 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4204 for (i = 0; i < count; i++)
4206 struct ipa_agg_jf_item item;
4207 item.offset = streamer_read_uhwi (ib);
4208 item.value = stream_read_tree (ib, data_in);
4209 jump_func->agg.items->quick_push (item);
4212 struct bitpack_d bp = streamer_read_bitpack (ib);
4213 bool bits_known = bp_unpack_value (&bp, 1);
4214 if (bits_known)
4216 widest_int value = streamer_read_widest_int (ib);
4217 widest_int mask = streamer_read_widest_int (ib);
4218 ipa_set_jfunc_bits (jump_func, value, mask);
4220 else
4221 jump_func->bits = NULL;
4223 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4224 bool vr_known = bp_unpack_value (&vr_bp, 1);
4225 if (vr_known)
4227 enum value_range_type type = streamer_read_enum (ib, value_range_type,
4228 VR_LAST);
4229 tree min = stream_read_tree (ib, data_in);
4230 tree max = stream_read_tree (ib, data_in);
4231 ipa_set_jfunc_vr (jump_func, type, min, max);
4233 else
4234 jump_func->m_vr = NULL;
4237 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4238 relevant to indirect inlining to OB. */
4240 static void
4241 ipa_write_indirect_edge_info (struct output_block *ob,
4242 struct cgraph_edge *cs)
4244 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4245 struct bitpack_d bp;
4247 streamer_write_hwi (ob, ii->param_index);
4248 bp = bitpack_create (ob->main_stream);
4249 bp_pack_value (&bp, ii->polymorphic, 1);
4250 bp_pack_value (&bp, ii->agg_contents, 1);
4251 bp_pack_value (&bp, ii->member_ptr, 1);
4252 bp_pack_value (&bp, ii->by_ref, 1);
4253 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4254 bp_pack_value (&bp, ii->vptr_changed, 1);
4255 streamer_write_bitpack (&bp);
4256 if (ii->agg_contents || ii->polymorphic)
4257 streamer_write_hwi (ob, ii->offset);
4258 else
4259 gcc_assert (ii->offset == 0);
4261 if (ii->polymorphic)
4263 streamer_write_hwi (ob, ii->otr_token);
4264 stream_write_tree (ob, ii->otr_type, true);
4265 ii->context.stream_out (ob);
4269 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4270 relevant to indirect inlining from IB. */
4272 static void
4273 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4274 struct data_in *data_in,
4275 struct cgraph_edge *cs)
4277 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4278 struct bitpack_d bp;
4280 ii->param_index = (int) streamer_read_hwi (ib);
4281 bp = streamer_read_bitpack (ib);
4282 ii->polymorphic = bp_unpack_value (&bp, 1);
4283 ii->agg_contents = bp_unpack_value (&bp, 1);
4284 ii->member_ptr = bp_unpack_value (&bp, 1);
4285 ii->by_ref = bp_unpack_value (&bp, 1);
4286 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4287 ii->vptr_changed = bp_unpack_value (&bp, 1);
4288 if (ii->agg_contents || ii->polymorphic)
4289 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4290 else
4291 ii->offset = 0;
4292 if (ii->polymorphic)
4294 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4295 ii->otr_type = stream_read_tree (ib, data_in);
4296 ii->context.stream_in (ib, data_in);
4300 /* Stream out NODE info to OB. */
4302 static void
4303 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4305 int node_ref;
4306 lto_symtab_encoder_t encoder;
4307 struct ipa_node_params *info = IPA_NODE_REF (node);
4308 int j;
4309 struct cgraph_edge *e;
4310 struct bitpack_d bp;
4312 encoder = ob->decl_state->symtab_node_encoder;
4313 node_ref = lto_symtab_encoder_encode (encoder, node);
4314 streamer_write_uhwi (ob, node_ref);
4316 streamer_write_uhwi (ob, ipa_get_param_count (info));
4317 for (j = 0; j < ipa_get_param_count (info); j++)
4318 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4319 bp = bitpack_create (ob->main_stream);
4320 gcc_assert (info->analysis_done
4321 || ipa_get_param_count (info) == 0);
4322 gcc_assert (!info->node_enqueued);
4323 gcc_assert (!info->ipcp_orig_node);
4324 for (j = 0; j < ipa_get_param_count (info); j++)
4325 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4326 streamer_write_bitpack (&bp);
4327 for (j = 0; j < ipa_get_param_count (info); j++)
4329 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4330 stream_write_tree (ob, ipa_get_type (info, j), true);
4332 for (e = node->callees; e; e = e->next_callee)
4334 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4336 streamer_write_uhwi (ob,
4337 ipa_get_cs_argument_count (args) * 2
4338 + (args->polymorphic_call_contexts != NULL));
4339 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4341 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4342 if (args->polymorphic_call_contexts != NULL)
4343 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4346 for (e = node->indirect_calls; e; e = e->next_callee)
4348 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4350 streamer_write_uhwi (ob,
4351 ipa_get_cs_argument_count (args) * 2
4352 + (args->polymorphic_call_contexts != NULL));
4353 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4355 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4356 if (args->polymorphic_call_contexts != NULL)
4357 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4359 ipa_write_indirect_edge_info (ob, e);
4363 /* Stream in NODE info from IB. */
4365 static void
4366 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4367 struct data_in *data_in)
4369 struct ipa_node_params *info = IPA_NODE_REF (node);
4370 int k;
4371 struct cgraph_edge *e;
4372 struct bitpack_d bp;
4374 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4376 for (k = 0; k < ipa_get_param_count (info); k++)
4377 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4379 bp = streamer_read_bitpack (ib);
4380 if (ipa_get_param_count (info) != 0)
4381 info->analysis_done = true;
4382 info->node_enqueued = false;
4383 for (k = 0; k < ipa_get_param_count (info); k++)
4384 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4385 for (k = 0; k < ipa_get_param_count (info); k++)
4387 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4388 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
4390 for (e = node->callees; e; e = e->next_callee)
4392 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4393 int count = streamer_read_uhwi (ib);
4394 bool contexts_computed = count & 1;
4395 count /= 2;
4397 if (!count)
4398 continue;
4399 vec_safe_grow_cleared (args->jump_functions, count);
4400 if (contexts_computed)
4401 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4403 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4405 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4406 data_in);
4407 if (contexts_computed)
4408 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4411 for (e = node->indirect_calls; e; e = e->next_callee)
4413 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4414 int count = streamer_read_uhwi (ib);
4415 bool contexts_computed = count & 1;
4416 count /= 2;
4418 if (count)
4420 vec_safe_grow_cleared (args->jump_functions, count);
4421 if (contexts_computed)
4422 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4423 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4425 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4426 data_in);
4427 if (contexts_computed)
4428 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4431 ipa_read_indirect_edge_info (ib, data_in, e);
4435 /* Write jump functions for nodes in SET. */
4437 void
4438 ipa_prop_write_jump_functions (void)
4440 struct cgraph_node *node;
4441 struct output_block *ob;
4442 unsigned int count = 0;
4443 lto_symtab_encoder_iterator lsei;
4444 lto_symtab_encoder_t encoder;
4446 if (!ipa_node_params_sum || !ipa_edge_args_sum)
4447 return;
4449 ob = create_output_block (LTO_section_jump_functions);
4450 encoder = ob->decl_state->symtab_node_encoder;
4451 ob->symbol = NULL;
4452 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4453 lsei_next_function_in_partition (&lsei))
4455 node = lsei_cgraph_node (lsei);
4456 if (node->has_gimple_body_p ()
4457 && IPA_NODE_REF (node) != NULL)
4458 count++;
4461 streamer_write_uhwi (ob, count);
4463 /* Process all of the functions. */
4464 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4465 lsei_next_function_in_partition (&lsei))
4467 node = lsei_cgraph_node (lsei);
4468 if (node->has_gimple_body_p ()
4469 && IPA_NODE_REF (node) != NULL)
4470 ipa_write_node_info (ob, node);
4472 streamer_write_char_stream (ob->main_stream, 0);
4473 produce_asm (ob, NULL);
4474 destroy_output_block (ob);
4477 /* Read section in file FILE_DATA of length LEN with data DATA. */
4479 static void
4480 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4481 size_t len)
4483 const struct lto_function_header *header =
4484 (const struct lto_function_header *) data;
4485 const int cfg_offset = sizeof (struct lto_function_header);
4486 const int main_offset = cfg_offset + header->cfg_size;
4487 const int string_offset = main_offset + header->main_size;
4488 struct data_in *data_in;
4489 unsigned int i;
4490 unsigned int count;
4492 lto_input_block ib_main ((const char *) data + main_offset,
4493 header->main_size, file_data->mode_table);
4495 data_in =
4496 lto_data_in_create (file_data, (const char *) data + string_offset,
4497 header->string_size, vNULL);
4498 count = streamer_read_uhwi (&ib_main);
4500 for (i = 0; i < count; i++)
4502 unsigned int index;
4503 struct cgraph_node *node;
4504 lto_symtab_encoder_t encoder;
4506 index = streamer_read_uhwi (&ib_main);
4507 encoder = file_data->symtab_node_encoder;
4508 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4509 index));
4510 gcc_assert (node->definition);
4511 ipa_read_node_info (&ib_main, node, data_in);
4513 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4514 len);
4515 lto_data_in_delete (data_in);
4518 /* Read ipcp jump functions. */
4520 void
4521 ipa_prop_read_jump_functions (void)
4523 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4524 struct lto_file_decl_data *file_data;
4525 unsigned int j = 0;
4527 ipa_check_create_node_params ();
4528 ipa_check_create_edge_args ();
4529 ipa_register_cgraph_hooks ();
4531 while ((file_data = file_data_vec[j++]))
4533 size_t len;
4534 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4536 if (data)
4537 ipa_prop_read_section (file_data, data, len);
4541 void
4542 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4544 int node_ref;
4545 unsigned int count = 0;
4546 lto_symtab_encoder_t encoder;
4547 struct ipa_agg_replacement_value *aggvals, *av;
4549 aggvals = ipa_get_agg_replacements_for_node (node);
4550 encoder = ob->decl_state->symtab_node_encoder;
4551 node_ref = lto_symtab_encoder_encode (encoder, node);
4552 streamer_write_uhwi (ob, node_ref);
4554 for (av = aggvals; av; av = av->next)
4555 count++;
4556 streamer_write_uhwi (ob, count);
4558 for (av = aggvals; av; av = av->next)
4560 struct bitpack_d bp;
4562 streamer_write_uhwi (ob, av->offset);
4563 streamer_write_uhwi (ob, av->index);
4564 stream_write_tree (ob, av->value, true);
4566 bp = bitpack_create (ob->main_stream);
4567 bp_pack_value (&bp, av->by_ref, 1);
4568 streamer_write_bitpack (&bp);
4571 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4572 if (ts && vec_safe_length (ts->m_vr) > 0)
4574 count = ts->m_vr->length ();
4575 streamer_write_uhwi (ob, count);
4576 for (unsigned i = 0; i < count; ++i)
4578 struct bitpack_d bp;
4579 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4580 bp = bitpack_create (ob->main_stream);
4581 bp_pack_value (&bp, parm_vr->known, 1);
4582 streamer_write_bitpack (&bp);
4583 if (parm_vr->known)
4585 streamer_write_enum (ob->main_stream, value_rang_type,
4586 VR_LAST, parm_vr->type);
4587 streamer_write_wide_int (ob, parm_vr->min);
4588 streamer_write_wide_int (ob, parm_vr->max);
4592 else
4593 streamer_write_uhwi (ob, 0);
4595 if (ts && vec_safe_length (ts->bits) > 0)
4597 count = ts->bits->length ();
4598 streamer_write_uhwi (ob, count);
4600 for (unsigned i = 0; i < count; ++i)
4602 const ipa_bits *bits_jfunc = (*ts->bits)[i];
4603 struct bitpack_d bp = bitpack_create (ob->main_stream);
4604 bp_pack_value (&bp, !!bits_jfunc, 1);
4605 streamer_write_bitpack (&bp);
4606 if (bits_jfunc)
4608 streamer_write_widest_int (ob, bits_jfunc->value);
4609 streamer_write_widest_int (ob, bits_jfunc->mask);
4613 else
4614 streamer_write_uhwi (ob, 0);
4617 /* Stream in the aggregate value replacement chain for NODE from IB. */
4619 static void
4620 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4621 data_in *data_in)
4623 struct ipa_agg_replacement_value *aggvals = NULL;
4624 unsigned int count, i;
4626 count = streamer_read_uhwi (ib);
4627 for (i = 0; i <count; i++)
4629 struct ipa_agg_replacement_value *av;
4630 struct bitpack_d bp;
4632 av = ggc_alloc<ipa_agg_replacement_value> ();
4633 av->offset = streamer_read_uhwi (ib);
4634 av->index = streamer_read_uhwi (ib);
4635 av->value = stream_read_tree (ib, data_in);
4636 bp = streamer_read_bitpack (ib);
4637 av->by_ref = bp_unpack_value (&bp, 1);
4638 av->next = aggvals;
4639 aggvals = av;
4641 ipa_set_node_agg_value_chain (node, aggvals);
4643 count = streamer_read_uhwi (ib);
4644 if (count > 0)
4646 ipcp_grow_transformations_if_necessary ();
4648 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4649 vec_safe_grow_cleared (ts->m_vr, count);
4650 for (i = 0; i < count; i++)
4652 ipa_vr *parm_vr;
4653 parm_vr = &(*ts->m_vr)[i];
4654 struct bitpack_d bp;
4655 bp = streamer_read_bitpack (ib);
4656 parm_vr->known = bp_unpack_value (&bp, 1);
4657 if (parm_vr->known)
4659 parm_vr->type = streamer_read_enum (ib, value_range_type,
4660 VR_LAST);
4661 parm_vr->min = streamer_read_wide_int (ib);
4662 parm_vr->max = streamer_read_wide_int (ib);
4666 count = streamer_read_uhwi (ib);
4667 if (count > 0)
4669 ipcp_grow_transformations_if_necessary ();
4671 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4672 vec_safe_grow_cleared (ts->bits, count);
4674 for (i = 0; i < count; i++)
4676 struct bitpack_d bp = streamer_read_bitpack (ib);
4677 bool known = bp_unpack_value (&bp, 1);
4678 if (known)
4680 ipa_bits *bits
4681 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4682 streamer_read_widest_int (ib));
4683 (*ts->bits)[i] = bits;
4689 /* Write all aggregate replacement for nodes in set. */
4691 void
4692 ipcp_write_transformation_summaries (void)
4694 struct cgraph_node *node;
4695 struct output_block *ob;
4696 unsigned int count = 0;
4697 lto_symtab_encoder_iterator lsei;
4698 lto_symtab_encoder_t encoder;
4700 ob = create_output_block (LTO_section_ipcp_transform);
4701 encoder = ob->decl_state->symtab_node_encoder;
4702 ob->symbol = NULL;
4703 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4704 lsei_next_function_in_partition (&lsei))
4706 node = lsei_cgraph_node (lsei);
4707 if (node->has_gimple_body_p ())
4708 count++;
4711 streamer_write_uhwi (ob, count);
4713 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4714 lsei_next_function_in_partition (&lsei))
4716 node = lsei_cgraph_node (lsei);
4717 if (node->has_gimple_body_p ())
4718 write_ipcp_transformation_info (ob, node);
4720 streamer_write_char_stream (ob->main_stream, 0);
4721 produce_asm (ob, NULL);
4722 destroy_output_block (ob);
4725 /* Read replacements section in file FILE_DATA of length LEN with data
4726 DATA. */
4728 static void
4729 read_replacements_section (struct lto_file_decl_data *file_data,
4730 const char *data,
4731 size_t len)
4733 const struct lto_function_header *header =
4734 (const struct lto_function_header *) data;
4735 const int cfg_offset = sizeof (struct lto_function_header);
4736 const int main_offset = cfg_offset + header->cfg_size;
4737 const int string_offset = main_offset + header->main_size;
4738 struct data_in *data_in;
4739 unsigned int i;
4740 unsigned int count;
4742 lto_input_block ib_main ((const char *) data + main_offset,
4743 header->main_size, file_data->mode_table);
4745 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4746 header->string_size, vNULL);
4747 count = streamer_read_uhwi (&ib_main);
4749 for (i = 0; i < count; i++)
4751 unsigned int index;
4752 struct cgraph_node *node;
4753 lto_symtab_encoder_t encoder;
4755 index = streamer_read_uhwi (&ib_main);
4756 encoder = file_data->symtab_node_encoder;
4757 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4758 index));
4759 gcc_assert (node->definition);
4760 read_ipcp_transformation_info (&ib_main, node, data_in);
4762 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4763 len);
4764 lto_data_in_delete (data_in);
4767 /* Read IPA-CP aggregate replacements. */
4769 void
4770 ipcp_read_transformation_summaries (void)
4772 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4773 struct lto_file_decl_data *file_data;
4774 unsigned int j = 0;
4776 while ((file_data = file_data_vec[j++]))
4778 size_t len;
4779 const char *data = lto_get_section_data (file_data,
4780 LTO_section_ipcp_transform,
4781 NULL, &len);
4782 if (data)
4783 read_replacements_section (file_data, data, len);
4787 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4788 NODE. */
4790 static void
4791 adjust_agg_replacement_values (struct cgraph_node *node,
4792 struct ipa_agg_replacement_value *aggval)
4794 struct ipa_agg_replacement_value *v;
4795 int i, c = 0, d = 0, *adj;
4797 if (!node->clone.combined_args_to_skip)
4798 return;
4800 for (v = aggval; v; v = v->next)
4802 gcc_assert (v->index >= 0);
4803 if (c < v->index)
4804 c = v->index;
4806 c++;
4808 adj = XALLOCAVEC (int, c);
4809 for (i = 0; i < c; i++)
4810 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4812 adj[i] = -1;
4813 d++;
4815 else
4816 adj[i] = i - d;
4818 for (v = aggval; v; v = v->next)
4819 v->index = adj[v->index];
4822 /* Dominator walker driving the ipcp modification phase. */
4824 class ipcp_modif_dom_walker : public dom_walker
4826 public:
4827 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4828 vec<ipa_param_descriptor, va_gc> *descs,
4829 struct ipa_agg_replacement_value *av,
4830 bool *sc, bool *cc)
4831 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4832 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4834 virtual edge before_dom_children (basic_block);
4836 private:
4837 struct ipa_func_body_info *m_fbi;
4838 vec<ipa_param_descriptor, va_gc> *m_descriptors;
4839 struct ipa_agg_replacement_value *m_aggval;
4840 bool *m_something_changed, *m_cfg_changed;
4843 edge
4844 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4846 gimple_stmt_iterator gsi;
4847 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4849 struct ipa_agg_replacement_value *v;
4850 gimple *stmt = gsi_stmt (gsi);
4851 tree rhs, val, t;
4852 HOST_WIDE_INT offset, size;
4853 int index;
4854 bool by_ref, vce;
4856 if (!gimple_assign_load_p (stmt))
4857 continue;
4858 rhs = gimple_assign_rhs1 (stmt);
4859 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4860 continue;
4862 vce = false;
4863 t = rhs;
4864 while (handled_component_p (t))
4866 /* V_C_E can do things like convert an array of integers to one
4867 bigger integer and similar things we do not handle below. */
4868 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4870 vce = true;
4871 break;
4873 t = TREE_OPERAND (t, 0);
4875 if (vce)
4876 continue;
4878 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4879 &offset, &size, &by_ref))
4880 continue;
4881 for (v = m_aggval; v; v = v->next)
4882 if (v->index == index
4883 && v->offset == offset)
4884 break;
4885 if (!v
4886 || v->by_ref != by_ref
4887 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4888 continue;
4890 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4891 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4893 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4894 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4895 else if (TYPE_SIZE (TREE_TYPE (rhs))
4896 == TYPE_SIZE (TREE_TYPE (v->value)))
4897 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4898 else
4900 if (dump_file)
4902 fprintf (dump_file, " const ");
4903 print_generic_expr (dump_file, v->value);
4904 fprintf (dump_file, " can't be converted to type of ");
4905 print_generic_expr (dump_file, rhs);
4906 fprintf (dump_file, "\n");
4908 continue;
4911 else
4912 val = v->value;
4914 if (dump_file && (dump_flags & TDF_DETAILS))
4916 fprintf (dump_file, "Modifying stmt:\n ");
4917 print_gimple_stmt (dump_file, stmt, 0);
4919 gimple_assign_set_rhs_from_tree (&gsi, val);
4920 update_stmt (stmt);
4922 if (dump_file && (dump_flags & TDF_DETAILS))
4924 fprintf (dump_file, "into:\n ");
4925 print_gimple_stmt (dump_file, stmt, 0);
4926 fprintf (dump_file, "\n");
4929 *m_something_changed = true;
4930 if (maybe_clean_eh_stmt (stmt)
4931 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4932 *m_cfg_changed = true;
4934 return NULL;
4937 /* Update bits info of formal parameters as described in
4938 ipcp_transformation_summary. */
4940 static void
4941 ipcp_update_bits (struct cgraph_node *node)
4943 tree parm = DECL_ARGUMENTS (node->decl);
4944 tree next_parm = parm;
4945 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4947 if (!ts || vec_safe_length (ts->bits) == 0)
4948 return;
4950 vec<ipa_bits *, va_gc> &bits = *ts->bits;
4951 unsigned count = bits.length ();
4953 for (unsigned i = 0; i < count; ++i, parm = next_parm)
4955 if (node->clone.combined_args_to_skip
4956 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
4957 continue;
4959 gcc_checking_assert (parm);
4960 next_parm = DECL_CHAIN (parm);
4962 if (!bits[i]
4963 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
4964 || POINTER_TYPE_P (TREE_TYPE (parm)))
4965 || !is_gimple_reg (parm))
4966 continue;
4968 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
4969 if (!ddef)
4970 continue;
4972 if (dump_file)
4974 fprintf (dump_file, "Adjusting mask for param %u to ", i);
4975 print_hex (bits[i]->mask, dump_file);
4976 fprintf (dump_file, "\n");
4979 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
4981 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
4982 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
4984 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
4985 | wide_int::from (bits[i]->value, prec, sgn);
4986 set_nonzero_bits (ddef, nonzero_bits);
4988 else
4990 unsigned tem = bits[i]->mask.to_uhwi ();
4991 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
4992 unsigned align = tem & -tem;
4993 unsigned misalign = bitpos & (align - 1);
4995 if (align > 1)
4997 if (dump_file)
4998 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5000 unsigned old_align, old_misalign;
5001 struct ptr_info_def *pi = get_ptr_info (ddef);
5002 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5004 if (old_known
5005 && old_align > align)
5007 if (dump_file)
5009 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5010 if ((old_misalign & (align - 1)) != misalign)
5011 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5012 old_misalign, misalign);
5014 continue;
5017 if (old_known
5018 && ((misalign & (old_align - 1)) != old_misalign)
5019 && dump_file)
5020 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5021 old_misalign, misalign);
5023 set_ptr_info_alignment (pi, align, misalign);
5029 /* Update value range of formal parameters as described in
5030 ipcp_transformation_summary. */
5032 static void
5033 ipcp_update_vr (struct cgraph_node *node)
5035 tree fndecl = node->decl;
5036 tree parm = DECL_ARGUMENTS (fndecl);
5037 tree next_parm = parm;
5038 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5039 if (!ts || vec_safe_length (ts->m_vr) == 0)
5040 return;
5041 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5042 unsigned count = vr.length ();
5044 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5046 if (node->clone.combined_args_to_skip
5047 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5048 continue;
5049 gcc_checking_assert (parm);
5050 next_parm = DECL_CHAIN (parm);
5051 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5053 if (!ddef || !is_gimple_reg (parm))
5054 continue;
5056 if (vr[i].known
5057 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5059 tree type = TREE_TYPE (ddef);
5060 unsigned prec = TYPE_PRECISION (type);
5061 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5063 if (dump_file)
5065 fprintf (dump_file, "Setting value range of param %u ", i);
5066 fprintf (dump_file, "%s[",
5067 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5068 print_decs (vr[i].min, dump_file);
5069 fprintf (dump_file, ", ");
5070 print_decs (vr[i].max, dump_file);
5071 fprintf (dump_file, "]\n");
5073 set_range_info (ddef, vr[i].type,
5074 wide_int_storage::from (vr[i].min, prec,
5075 TYPE_SIGN (type)),
5076 wide_int_storage::from (vr[i].max, prec,
5077 TYPE_SIGN (type)));
5079 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5080 && vr[i].type == VR_ANTI_RANGE
5081 && wi::eq_p (vr[i].min, 0)
5082 && wi::eq_p (vr[i].max, 0))
5084 if (dump_file)
5085 fprintf (dump_file, "Setting nonnull for %u\n", i);
5086 set_ptr_nonnull (ddef);
5092 /* IPCP transformation phase doing propagation of aggregate values. */
5094 unsigned int
5095 ipcp_transform_function (struct cgraph_node *node)
5097 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5098 struct ipa_func_body_info fbi;
5099 struct ipa_agg_replacement_value *aggval;
5100 int param_count;
5101 bool cfg_changed = false, something_changed = false;
5103 gcc_checking_assert (cfun);
5104 gcc_checking_assert (current_function_decl);
5106 if (dump_file)
5107 fprintf (dump_file, "Modification phase of node %s\n",
5108 node->dump_name ());
5110 ipcp_update_bits (node);
5111 ipcp_update_vr (node);
5112 aggval = ipa_get_agg_replacements_for_node (node);
5113 if (!aggval)
5114 return 0;
5115 param_count = count_formal_params (node->decl);
5116 if (param_count == 0)
5117 return 0;
5118 adjust_agg_replacement_values (node, aggval);
5119 if (dump_file)
5120 ipa_dump_agg_replacement_values (dump_file, aggval);
5122 fbi.node = node;
5123 fbi.info = NULL;
5124 fbi.bb_infos = vNULL;
5125 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5126 fbi.param_count = param_count;
5127 fbi.aa_walked = 0;
5129 vec_safe_grow_cleared (descriptors, param_count);
5130 ipa_populate_param_decls (node, *descriptors);
5131 calculate_dominance_info (CDI_DOMINATORS);
5132 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5133 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5135 int i;
5136 struct ipa_bb_info *bi;
5137 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5138 free_ipa_bb_info (bi);
5139 fbi.bb_infos.release ();
5140 free_dominance_info (CDI_DOMINATORS);
5141 (*ipcp_transformations)[node->uid].agg_values = NULL;
5142 (*ipcp_transformations)[node->uid].bits = NULL;
5143 (*ipcp_transformations)[node->uid].m_vr = NULL;
5145 vec_free (descriptors);
5147 if (!something_changed)
5148 return 0;
5149 else if (cfg_changed)
5150 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5151 else
5152 return TODO_update_ssa_only_virtuals;
5155 #include "gt-ipa-prop.h"