typeck.c (cp_truthvalue_conversion): Add tsubst_flags_t parameter and use it in calls...
[official-gcc.git] / gcc / ipa-prop.c
blob9a51b2996786abace87dac69df60afb3a6a02a8e
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
59 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
61 /* Edge summary for IPA-CP edge information. */
62 ipa_edge_args_sum_t *ipa_edge_args_sum;
64 /* Traits for a hash table for reusing already existing ipa_bits. */
66 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
68 typedef ipa_bits *value_type;
69 typedef ipa_bits *compare_type;
70 static hashval_t
71 hash (const ipa_bits *p)
73 hashval_t t = (hashval_t) p->value.to_shwi ();
74 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
76 static bool
77 equal (const ipa_bits *a, const ipa_bits *b)
79 return a->value == b->value && a->mask == b->mask;
81 static void
82 mark_empty (ipa_bits *&p)
84 p = NULL;
86 static bool
87 is_empty (const ipa_bits *p)
89 return p == NULL;
91 static bool
92 is_deleted (const ipa_bits *p)
94 return p == reinterpret_cast<const ipa_bits *> (1);
96 static void
97 mark_deleted (ipa_bits *&p)
99 p = reinterpret_cast<ipa_bits *> (1);
103 /* Hash table for avoid repeated allocations of equal ipa_bits. */
104 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
107 the equiv bitmap is not hashed and is expected to be NULL. */
109 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
111 typedef value_range *value_type;
112 typedef value_range *compare_type;
113 static hashval_t
114 hash (const value_range *p)
116 inchash::hash hstate (p->kind ());
117 inchash::add_expr (p->min (), hstate);
118 inchash::add_expr (p->max (), hstate);
119 return hstate.end ();
121 static bool
122 equal (const value_range *a, const value_range *b)
124 return a->equal_p (*b);
126 static void
127 mark_empty (value_range *&p)
129 p = NULL;
131 static bool
132 is_empty (const value_range *p)
134 return p == NULL;
136 static bool
137 is_deleted (const value_range *p)
139 return p == reinterpret_cast<const value_range *> (1);
141 static void
142 mark_deleted (value_range *&p)
144 p = reinterpret_cast<value_range *> (1);
148 /* Hash table for avoid repeated allocations of equal value_ranges. */
149 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
151 /* Holders of ipa cgraph hooks: */
152 static struct cgraph_node_hook_list *function_insertion_hook_holder;
154 /* Description of a reference to an IPA constant. */
155 struct ipa_cst_ref_desc
157 /* Edge that corresponds to the statement which took the reference. */
158 struct cgraph_edge *cs;
159 /* Linked list of duplicates created when call graph edges are cloned. */
160 struct ipa_cst_ref_desc *next_duplicate;
161 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
162 if out of control. */
163 int refcount;
166 /* Allocation pool for reference descriptions. */
168 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
169 ("IPA-PROP ref descriptions");
171 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
172 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
174 static bool
175 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
177 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
179 if (!fs_opts)
180 return false;
181 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
184 /* Return index of the formal whose tree is PTREE in function which corresponds
185 to INFO. */
187 static int
188 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
189 tree ptree)
191 int i, count;
193 count = vec_safe_length (descriptors);
194 for (i = 0; i < count; i++)
195 if ((*descriptors)[i].decl_or_type == ptree)
196 return i;
198 return -1;
201 /* Return index of the formal whose tree is PTREE in function which corresponds
202 to INFO. */
205 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
207 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
210 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
211 NODE. */
213 static void
214 ipa_populate_param_decls (struct cgraph_node *node,
215 vec<ipa_param_descriptor, va_gc> &descriptors)
217 tree fndecl;
218 tree fnargs;
219 tree parm;
220 int param_num;
222 fndecl = node->decl;
223 gcc_assert (gimple_has_body_p (fndecl));
224 fnargs = DECL_ARGUMENTS (fndecl);
225 param_num = 0;
226 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
228 descriptors[param_num].decl_or_type = parm;
229 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
230 descriptors[param_num].move_cost = cost;
231 /* Watch overflow, move_cost is a bitfield. */
232 gcc_checking_assert (cost == descriptors[param_num].move_cost);
233 param_num++;
237 /* Return how many formal parameters FNDECL has. */
240 count_formal_params (tree fndecl)
242 tree parm;
243 int count = 0;
244 gcc_assert (gimple_has_body_p (fndecl));
246 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
247 count++;
249 return count;
252 /* Return the declaration of Ith formal parameter of the function corresponding
253 to INFO. Note there is no setter function as this array is built just once
254 using ipa_initialize_node_params. */
256 void
257 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
259 fprintf (file, "param #%i", i);
260 if ((*info->descriptors)[i].decl_or_type)
262 fprintf (file, " ");
263 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
267 /* If necessary, allocate vector of parameter descriptors in info of NODE.
268 Return true if they were allocated, false if not. */
270 static bool
271 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
273 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
275 if (!info->descriptors && param_count)
277 vec_safe_grow_cleared (info->descriptors, param_count);
278 return true;
280 else
281 return false;
284 /* Initialize the ipa_node_params structure associated with NODE by counting
285 the function parameters, creating the descriptors and populating their
286 param_decls. */
288 void
289 ipa_initialize_node_params (struct cgraph_node *node)
291 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
293 if (!info->descriptors
294 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
295 ipa_populate_param_decls (node, *info->descriptors);
298 /* Print the jump functions associated with call graph edge CS to file F. */
300 static void
301 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
303 int i, count;
305 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
306 for (i = 0; i < count; i++)
308 struct ipa_jump_func *jump_func;
309 enum jump_func_type type;
311 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
312 type = jump_func->type;
314 fprintf (f, " param %d: ", i);
315 if (type == IPA_JF_UNKNOWN)
316 fprintf (f, "UNKNOWN\n");
317 else if (type == IPA_JF_CONST)
319 tree val = jump_func->value.constant.value;
320 fprintf (f, "CONST: ");
321 print_generic_expr (f, val);
322 if (TREE_CODE (val) == ADDR_EXPR
323 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
325 fprintf (f, " -> ");
326 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
328 fprintf (f, "\n");
330 else if (type == IPA_JF_PASS_THROUGH)
332 fprintf (f, "PASS THROUGH: ");
333 fprintf (f, "%d, op %s",
334 jump_func->value.pass_through.formal_id,
335 get_tree_code_name(jump_func->value.pass_through.operation));
336 if (jump_func->value.pass_through.operation != NOP_EXPR)
338 fprintf (f, " ");
339 print_generic_expr (f, jump_func->value.pass_through.operand);
341 if (jump_func->value.pass_through.agg_preserved)
342 fprintf (f, ", agg_preserved");
343 fprintf (f, "\n");
345 else if (type == IPA_JF_ANCESTOR)
347 fprintf (f, "ANCESTOR: ");
348 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
349 jump_func->value.ancestor.formal_id,
350 jump_func->value.ancestor.offset);
351 if (jump_func->value.ancestor.agg_preserved)
352 fprintf (f, ", agg_preserved");
353 fprintf (f, "\n");
356 if (jump_func->agg.items)
358 struct ipa_agg_jf_item *item;
359 int j;
361 fprintf (f, " Aggregate passed by %s:\n",
362 jump_func->agg.by_ref ? "reference" : "value");
363 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
365 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
366 item->offset);
367 fprintf (f, "type: ");
368 print_generic_expr (f, item->type);
369 fprintf (f, ", ");
370 if (item->jftype == IPA_JF_PASS_THROUGH)
371 fprintf (f, "PASS THROUGH: %d,",
372 item->value.pass_through.formal_id);
373 else if (item->jftype == IPA_JF_LOAD_AGG)
375 fprintf (f, "LOAD AGG: %d",
376 item->value.pass_through.formal_id);
377 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
378 item->value.load_agg.offset,
379 item->value.load_agg.by_ref ? "reference"
380 : "value");
383 if (item->jftype == IPA_JF_PASS_THROUGH
384 || item->jftype == IPA_JF_LOAD_AGG)
386 fprintf (f, " op %s",
387 get_tree_code_name (item->value.pass_through.operation));
388 if (item->value.pass_through.operation != NOP_EXPR)
390 fprintf (f, " ");
391 print_generic_expr (f, item->value.pass_through.operand);
394 else if (item->jftype == IPA_JF_CONST)
396 fprintf (f, "CONST: ");
397 print_generic_expr (f, item->value.constant);
399 else if (item->jftype == IPA_JF_UNKNOWN)
400 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
401 tree_to_uhwi (TYPE_SIZE (item->type)));
402 fprintf (f, "\n");
406 class ipa_polymorphic_call_context *ctx
407 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
408 if (ctx && !ctx->useless_p ())
410 fprintf (f, " Context: ");
411 ctx->dump (dump_file);
414 if (jump_func->bits)
416 fprintf (f, " value: ");
417 print_hex (jump_func->bits->value, f);
418 fprintf (f, ", mask: ");
419 print_hex (jump_func->bits->mask, f);
420 fprintf (f, "\n");
422 else
423 fprintf (f, " Unknown bits\n");
425 if (jump_func->m_vr)
427 fprintf (f, " VR ");
428 fprintf (f, "%s[",
429 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
430 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
431 fprintf (f, ", ");
432 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
433 fprintf (f, "]\n");
435 else
436 fprintf (f, " Unknown VR\n");
441 /* Print the jump functions of all arguments on all call graph edges going from
442 NODE to file F. */
444 void
445 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
447 struct cgraph_edge *cs;
449 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
450 for (cs = node->callees; cs; cs = cs->next_callee)
453 fprintf (f, " callsite %s -> %s : \n",
454 node->dump_name (),
455 cs->callee->dump_name ());
456 if (!ipa_edge_args_info_available_for_edge_p (cs))
457 fprintf (f, " no arg info\n");
458 else
459 ipa_print_node_jump_functions_for_edge (f, cs);
462 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
464 class cgraph_indirect_call_info *ii;
466 ii = cs->indirect_info;
467 if (ii->agg_contents)
468 fprintf (f, " indirect %s callsite, calling param %i, "
469 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
470 ii->member_ptr ? "member ptr" : "aggregate",
471 ii->param_index, ii->offset,
472 ii->by_ref ? "by reference" : "by_value");
473 else
474 fprintf (f, " indirect %s callsite, calling param %i, "
475 "offset " HOST_WIDE_INT_PRINT_DEC,
476 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
477 ii->offset);
479 if (cs->call_stmt)
481 fprintf (f, ", for stmt ");
482 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
484 else
485 fprintf (f, "\n");
486 if (ii->polymorphic)
487 ii->context.dump (f);
488 if (!ipa_edge_args_info_available_for_edge_p (cs))
489 fprintf (f, " no arg info\n");
490 else
491 ipa_print_node_jump_functions_for_edge (f, cs);
495 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
497 void
498 ipa_print_all_jump_functions (FILE *f)
500 struct cgraph_node *node;
502 fprintf (f, "\nJump functions:\n");
503 FOR_EACH_FUNCTION (node)
505 ipa_print_node_jump_functions (f, node);
509 /* Set jfunc to be a know-really nothing jump function. */
511 static void
512 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
514 jfunc->type = IPA_JF_UNKNOWN;
515 jfunc->bits = NULL;
516 jfunc->m_vr = NULL;
519 /* Set JFUNC to be a copy of another jmp (to be used by jump function
520 combination code). The two functions will share their rdesc. */
522 static void
523 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
524 struct ipa_jump_func *src)
527 gcc_checking_assert (src->type == IPA_JF_CONST);
528 dst->type = IPA_JF_CONST;
529 dst->value.constant = src->value.constant;
532 /* Set JFUNC to be a constant jmp function. */
534 static void
535 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
536 struct cgraph_edge *cs)
538 jfunc->type = IPA_JF_CONST;
539 jfunc->value.constant.value = unshare_expr_without_location (constant);
541 if (TREE_CODE (constant) == ADDR_EXPR
542 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
544 struct ipa_cst_ref_desc *rdesc;
546 rdesc = ipa_refdesc_pool.allocate ();
547 rdesc->cs = cs;
548 rdesc->next_duplicate = NULL;
549 rdesc->refcount = 1;
550 jfunc->value.constant.rdesc = rdesc;
552 else
553 jfunc->value.constant.rdesc = NULL;
556 /* Set JFUNC to be a simple pass-through jump function. */
557 static void
558 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
559 bool agg_preserved)
561 jfunc->type = IPA_JF_PASS_THROUGH;
562 jfunc->value.pass_through.operand = NULL_TREE;
563 jfunc->value.pass_through.formal_id = formal_id;
564 jfunc->value.pass_through.operation = NOP_EXPR;
565 jfunc->value.pass_through.agg_preserved = agg_preserved;
568 /* Set JFUNC to be an unary pass through jump function. */
570 static void
571 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
572 enum tree_code operation)
574 jfunc->type = IPA_JF_PASS_THROUGH;
575 jfunc->value.pass_through.operand = NULL_TREE;
576 jfunc->value.pass_through.formal_id = formal_id;
577 jfunc->value.pass_through.operation = operation;
578 jfunc->value.pass_through.agg_preserved = false;
580 /* Set JFUNC to be an arithmetic pass through jump function. */
582 static void
583 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
584 tree operand, enum tree_code operation)
586 jfunc->type = IPA_JF_PASS_THROUGH;
587 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
588 jfunc->value.pass_through.formal_id = formal_id;
589 jfunc->value.pass_through.operation = operation;
590 jfunc->value.pass_through.agg_preserved = false;
593 /* Set JFUNC to be an ancestor jump function. */
595 static void
596 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
597 int formal_id, bool agg_preserved)
599 jfunc->type = IPA_JF_ANCESTOR;
600 jfunc->value.ancestor.formal_id = formal_id;
601 jfunc->value.ancestor.offset = offset;
602 jfunc->value.ancestor.agg_preserved = agg_preserved;
605 /* Get IPA BB information about the given BB. FBI is the context of analyzis
606 of this function body. */
608 static struct ipa_bb_info *
609 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
611 gcc_checking_assert (fbi);
612 return &fbi->bb_infos[bb->index];
615 /* Structure to be passed in between detect_type_change and
616 check_stmt_for_type_change. */
618 struct prop_type_change_info
620 /* Offset into the object where there is the virtual method pointer we are
621 looking for. */
622 HOST_WIDE_INT offset;
623 /* The declaration or SSA_NAME pointer of the base that we are checking for
624 type change. */
625 tree object;
626 /* Set to true if dynamic type change has been detected. */
627 bool type_maybe_changed;
630 /* Return true if STMT can modify a virtual method table pointer.
632 This function makes special assumptions about both constructors and
633 destructors which are all the functions that are allowed to alter the VMT
634 pointers. It assumes that destructors begin with assignment into all VMT
635 pointers and that constructors essentially look in the following way:
637 1) The very first thing they do is that they call constructors of ancestor
638 sub-objects that have them.
640 2) Then VMT pointers of this and all its ancestors is set to new values
641 corresponding to the type corresponding to the constructor.
643 3) Only afterwards, other stuff such as constructor of member sub-objects
644 and the code written by the user is run. Only this may include calling
645 virtual functions, directly or indirectly.
647 There is no way to call a constructor of an ancestor sub-object in any
648 other way.
650 This means that we do not have to care whether constructors get the correct
651 type information because they will always change it (in fact, if we define
652 the type to be given by the VMT pointer, it is undefined).
654 The most important fact to derive from the above is that if, for some
655 statement in the section 3, we try to detect whether the dynamic type has
656 changed, we can safely ignore all calls as we examine the function body
657 backwards until we reach statements in section 2 because these calls cannot
658 be ancestor constructors or destructors (if the input is not bogus) and so
659 do not change the dynamic type (this holds true only for automatically
660 allocated objects but at the moment we devirtualize only these). We then
661 must detect that statements in section 2 change the dynamic type and can try
662 to derive the new type. That is enough and we can stop, we will never see
663 the calls into constructors of sub-objects in this code. Therefore we can
664 safely ignore all call statements that we traverse.
667 static bool
668 stmt_may_be_vtbl_ptr_store (gimple *stmt)
670 if (is_gimple_call (stmt))
671 return false;
672 if (gimple_clobber_p (stmt))
673 return false;
674 else if (is_gimple_assign (stmt))
676 tree lhs = gimple_assign_lhs (stmt);
678 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
680 if (flag_strict_aliasing
681 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
682 return false;
684 if (TREE_CODE (lhs) == COMPONENT_REF
685 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
686 return false;
687 /* In the future we might want to use get_ref_base_and_extent to find
688 if there is a field corresponding to the offset and if so, proceed
689 almost like if it was a component ref. */
692 return true;
695 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
696 to check whether a particular statement may modify the virtual table
697 pointerIt stores its result into DATA, which points to a
698 prop_type_change_info structure. */
700 static bool
701 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
703 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
704 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
706 if (stmt_may_be_vtbl_ptr_store (stmt))
708 tci->type_maybe_changed = true;
709 return true;
711 else
712 return false;
715 /* See if ARG is PARAM_DECl describing instance passed by pointer
716 or reference in FUNCTION. Return false if the dynamic type may change
717 in between beggining of the function until CALL is invoked.
719 Generally functions are not allowed to change type of such instances,
720 but they call destructors. We assume that methods cannot destroy the THIS
721 pointer. Also as a special cases, constructor and destructors may change
722 type of the THIS pointer. */
724 static bool
725 param_type_may_change_p (tree function, tree arg, gimple *call)
727 /* Pure functions cannot do any changes on the dynamic type;
728 that require writting to memory. */
729 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
730 return false;
731 /* We need to check if we are within inlined consturctor
732 or destructor (ideally we would have way to check that the
733 inline cdtor is actually working on ARG, but we don't have
734 easy tie on this, so punt on all non-pure cdtors.
735 We may also record the types of cdtors and once we know type
736 of the instance match them.
738 Also code unification optimizations may merge calls from
739 different blocks making return values unreliable. So
740 do nothing during late optimization. */
741 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
742 return true;
743 if (TREE_CODE (arg) == SSA_NAME
744 && SSA_NAME_IS_DEFAULT_DEF (arg)
745 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
747 /* Normal (non-THIS) argument. */
748 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
749 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
750 /* THIS pointer of an method - here we want to watch constructors
751 and destructors as those definitely may change the dynamic
752 type. */
753 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
754 && !DECL_CXX_CONSTRUCTOR_P (function)
755 && !DECL_CXX_DESTRUCTOR_P (function)
756 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
758 /* Walk the inline stack and watch out for ctors/dtors. */
759 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
760 block = BLOCK_SUPERCONTEXT (block))
761 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
762 return true;
763 return false;
766 return true;
769 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
770 callsite CALL) by looking for assignments to its virtual table pointer. If
771 it is, return true and fill in the jump function JFUNC with relevant type
772 information or set it to unknown. ARG is the object itself (not a pointer
773 to it, unless dereferenced). BASE is the base of the memory access as
774 returned by get_ref_base_and_extent, as is the offset.
776 This is helper function for detect_type_change and detect_type_change_ssa
777 that does the heavy work which is usually unnecesary. */
779 static bool
780 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
781 tree base, tree comp_type, gcall *call,
782 struct ipa_jump_func *jfunc,
783 HOST_WIDE_INT offset)
785 struct prop_type_change_info tci;
786 ao_ref ao;
788 gcc_checking_assert (DECL_P (arg)
789 || TREE_CODE (arg) == MEM_REF
790 || handled_component_p (arg));
792 comp_type = TYPE_MAIN_VARIANT (comp_type);
794 /* Const calls cannot call virtual methods through VMT and so type changes do
795 not matter. */
796 if (!flag_devirtualize || !gimple_vuse (call)
797 /* Be sure expected_type is polymorphic. */
798 || !comp_type
799 || TREE_CODE (comp_type) != RECORD_TYPE
800 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
801 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
802 return true;
804 ao_ref_init (&ao, arg);
805 ao.base = base;
806 ao.offset = offset;
807 ao.size = POINTER_SIZE;
808 ao.max_size = ao.size;
810 tci.offset = offset;
811 tci.object = get_base_address (arg);
812 tci.type_maybe_changed = false;
814 int walked
815 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
816 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
818 if (walked >= 0 && !tci.type_maybe_changed)
819 return false;
821 ipa_set_jf_unknown (jfunc);
822 return true;
825 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
826 If it is, return true and fill in the jump function JFUNC with relevant type
827 information or set it to unknown. ARG is the object itself (not a pointer
828 to it, unless dereferenced). BASE is the base of the memory access as
829 returned by get_ref_base_and_extent, as is the offset. */
831 static bool
832 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
833 tree comp_type, gcall *call, struct ipa_jump_func *jfunc,
834 HOST_WIDE_INT offset)
836 if (!flag_devirtualize)
837 return false;
839 if (TREE_CODE (base) == MEM_REF
840 && !param_type_may_change_p (current_function_decl,
841 TREE_OPERAND (base, 0),
842 call))
843 return false;
844 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
845 call, jfunc, offset);
848 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
849 SSA name (its dereference will become the base and the offset is assumed to
850 be zero). */
852 static bool
853 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
854 gcall *call, struct ipa_jump_func *jfunc)
856 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
857 if (!flag_devirtualize
858 || !POINTER_TYPE_P (TREE_TYPE (arg)))
859 return false;
861 if (!param_type_may_change_p (current_function_decl, arg, call))
862 return false;
864 arg = build2 (MEM_REF, ptr_type_node, arg,
865 build_int_cst (ptr_type_node, 0));
867 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
868 call, jfunc, 0);
871 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
872 boolean variable pointed to by DATA. */
874 static bool
875 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
876 void *data)
878 bool *b = (bool *) data;
879 *b = true;
880 return true;
883 /* Find the nearest valid aa status for parameter specified by INDEX that
884 dominates BB. */
886 static struct ipa_param_aa_status *
887 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
888 int index)
890 while (true)
892 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
893 if (!bb)
894 return NULL;
895 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
896 if (!bi->param_aa_statuses.is_empty ()
897 && bi->param_aa_statuses[index].valid)
898 return &bi->param_aa_statuses[index];
902 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
903 structures and/or intialize the result with a dominating description as
904 necessary. */
906 static struct ipa_param_aa_status *
907 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
908 int index)
910 gcc_checking_assert (fbi);
911 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
912 if (bi->param_aa_statuses.is_empty ())
913 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
914 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
915 if (!paa->valid)
917 gcc_checking_assert (!paa->parm_modified
918 && !paa->ref_modified
919 && !paa->pt_modified);
920 struct ipa_param_aa_status *dom_paa;
921 dom_paa = find_dominating_aa_status (fbi, bb, index);
922 if (dom_paa)
923 *paa = *dom_paa;
924 else
925 paa->valid = true;
928 return paa;
931 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
932 a value known not to be modified in this function before reaching the
933 statement STMT. FBI holds information about the function we have so far
934 gathered but do not survive the summary building stage. */
936 static bool
937 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
938 gimple *stmt, tree parm_load)
940 struct ipa_param_aa_status *paa;
941 bool modified = false;
942 ao_ref refd;
944 tree base = get_base_address (parm_load);
945 gcc_assert (TREE_CODE (base) == PARM_DECL);
946 if (TREE_READONLY (base))
947 return true;
949 gcc_checking_assert (fbi);
950 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
951 if (paa->parm_modified)
952 return false;
954 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
955 ao_ref_init (&refd, parm_load);
956 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
957 &modified, NULL, NULL,
958 fbi->aa_walk_budget + 1);
959 if (walked < 0)
961 modified = true;
962 if (fbi)
963 fbi->aa_walk_budget = 0;
965 else if (fbi)
966 fbi->aa_walk_budget -= walked;
967 if (paa && modified)
968 paa->parm_modified = true;
969 return !modified;
972 /* If STMT is an assignment that loads a value from an parameter declaration,
973 return the index of the parameter in ipa_node_params which has not been
974 modified. Otherwise return -1. */
976 static int
977 load_from_unmodified_param (struct ipa_func_body_info *fbi,
978 vec<ipa_param_descriptor, va_gc> *descriptors,
979 gimple *stmt)
981 int index;
982 tree op1;
984 if (!gimple_assign_single_p (stmt))
985 return -1;
987 op1 = gimple_assign_rhs1 (stmt);
988 if (TREE_CODE (op1) != PARM_DECL)
989 return -1;
991 index = ipa_get_param_decl_index_1 (descriptors, op1);
992 if (index < 0
993 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
994 return -1;
996 return index;
999 /* Return true if memory reference REF (which must be a load through parameter
1000 with INDEX) loads data that are known to be unmodified in this function
1001 before reaching statement STMT. */
1003 static bool
1004 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1005 int index, gimple *stmt, tree ref)
1007 struct ipa_param_aa_status *paa;
1008 bool modified = false;
1009 ao_ref refd;
1011 gcc_checking_assert (fbi);
1012 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1013 if (paa->ref_modified)
1014 return false;
1016 gcc_checking_assert (gimple_vuse (stmt));
1017 ao_ref_init (&refd, ref);
1018 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1019 &modified, NULL, NULL,
1020 fbi->aa_walk_budget + 1);
1021 if (walked < 0)
1023 modified = true;
1024 fbi->aa_walk_budget = 0;
1026 else
1027 fbi->aa_walk_budget -= walked;
1028 if (modified)
1029 paa->ref_modified = true;
1030 return !modified;
1033 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1034 is known to be unmodified in this function before reaching call statement
1035 CALL into which it is passed. FBI describes the function body. */
1037 static bool
1038 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1039 gimple *call, tree parm)
1041 bool modified = false;
1042 ao_ref refd;
1044 /* It's unnecessary to calculate anything about memory contnets for a const
1045 function because it is not goin to use it. But do not cache the result
1046 either. Also, no such calculations for non-pointers. */
1047 if (!gimple_vuse (call)
1048 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1049 return false;
1051 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1052 gimple_bb (call),
1053 index);
1054 if (paa->pt_modified)
1055 return false;
1057 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1058 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1059 &modified, NULL, NULL,
1060 fbi->aa_walk_budget + 1);
1061 if (walked < 0)
1063 fbi->aa_walk_budget = 0;
1064 modified = true;
1066 else
1067 fbi->aa_walk_budget -= walked;
1068 if (modified)
1069 paa->pt_modified = true;
1070 return !modified;
1073 /* Return true if we can prove that OP is a memory reference loading
1074 data from an aggregate passed as a parameter.
1076 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1077 false if it cannot prove that the value has not been modified before the
1078 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1079 if it cannot prove the value has not been modified, in that case it will
1080 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1082 INFO and PARMS_AINFO describe parameters of the current function (but the
1083 latter can be NULL), STMT is the load statement. If function returns true,
1084 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1085 within the aggregate and whether it is a load from a value passed by
1086 reference respectively. */
1088 bool
1089 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1090 vec<ipa_param_descriptor, va_gc> *descriptors,
1091 gimple *stmt, tree op, int *index_p,
1092 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1093 bool *by_ref_p, bool *guaranteed_unmodified)
1095 int index;
1096 HOST_WIDE_INT size;
1097 bool reverse;
1098 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1100 if (!base)
1101 return false;
1103 if (DECL_P (base))
1105 int index = ipa_get_param_decl_index_1 (descriptors, base);
1106 if (index >= 0
1107 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1109 *index_p = index;
1110 *by_ref_p = false;
1111 if (size_p)
1112 *size_p = size;
1113 if (guaranteed_unmodified)
1114 *guaranteed_unmodified = true;
1115 return true;
1117 return false;
1120 if (TREE_CODE (base) != MEM_REF
1121 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1122 || !integer_zerop (TREE_OPERAND (base, 1)))
1123 return false;
1125 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1127 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1128 index = ipa_get_param_decl_index_1 (descriptors, parm);
1130 else
1132 /* This branch catches situations where a pointer parameter is not a
1133 gimple register, for example:
1135 void hip7(S*) (struct S * p)
1137 void (*<T2e4>) (struct S *) D.1867;
1138 struct S * p.1;
1140 <bb 2>:
1141 p.1_1 = p;
1142 D.1867_2 = p.1_1->f;
1143 D.1867_2 ();
1144 gdp = &p;
1147 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1148 index = load_from_unmodified_param (fbi, descriptors, def);
1151 if (index >= 0)
1153 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1154 if (!data_preserved && !guaranteed_unmodified)
1155 return false;
1157 *index_p = index;
1158 *by_ref_p = true;
1159 if (size_p)
1160 *size_p = size;
1161 if (guaranteed_unmodified)
1162 *guaranteed_unmodified = data_preserved;
1163 return true;
1165 return false;
1168 /* If STMT is an assignment that loads a value from a parameter declaration,
1169 or from an aggregate passed as the parameter either by value or reference,
1170 return the index of the parameter in ipa_node_params. Otherwise return -1.
1172 FBI holds gathered information about the function. INFO describes
1173 parameters of the function, STMT is the assignment statement. If it is a
1174 memory load from an aggregate, *OFFSET_P is filled with offset within the
1175 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1176 reference. */
1178 static int
1179 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1180 class ipa_node_params *info,
1181 gimple *stmt,
1182 HOST_WIDE_INT *offset_p,
1183 bool *by_ref_p)
1185 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1186 poly_int64 size;
1188 /* Load value from a parameter declaration. */
1189 if (index >= 0)
1191 *offset_p = -1;
1192 return index;
1195 if (!gimple_assign_load_p (stmt))
1196 return -1;
1198 tree rhs = gimple_assign_rhs1 (stmt);
1200 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1201 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1202 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1203 return -1;
1205 /* Skip memory reference containing bit-field. */
1206 if (TREE_CODE (rhs) == BIT_FIELD_REF
1207 || contains_bitfld_component_ref_p (rhs))
1208 return -1;
1210 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1211 offset_p, &size, by_ref_p))
1212 return -1;
1214 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1215 size));
1216 if (!*by_ref_p)
1218 tree param_type = ipa_get_type (info, index);
1220 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1221 return -1;
1223 else if (TREE_THIS_VOLATILE (rhs))
1224 return -1;
1226 return index;
1229 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1230 of an assignment statement STMT, try to determine whether we are actually
1231 handling any of the following cases and construct an appropriate jump
1232 function into JFUNC if so:
1234 1) The passed value is loaded from a formal parameter which is not a gimple
1235 register (most probably because it is addressable, the value has to be
1236 scalar) and we can guarantee the value has not changed. This case can
1237 therefore be described by a simple pass-through jump function. For example:
1239 foo (int a)
1241 int a.0;
1243 a.0_2 = a;
1244 bar (a.0_2);
1246 2) The passed value can be described by a simple arithmetic pass-through
1247 jump function. E.g.
1249 foo (int a)
1251 int D.2064;
1253 D.2064_4 = a.1(D) + 4;
1254 bar (D.2064_4);
1256 This case can also occur in combination of the previous one, e.g.:
1258 foo (int a, int z)
1260 int a.0;
1261 int D.2064;
1263 a.0_3 = a;
1264 D.2064_4 = a.0_3 + 4;
1265 foo (D.2064_4);
1267 3) The passed value is an address of an object within another one (which
1268 also passed by reference). Such situations are described by an ancestor
1269 jump function and describe situations such as:
1271 B::foo() (struct B * const this)
1273 struct A * D.1845;
1275 D.1845_2 = &this_1(D)->D.1748;
1276 A::bar (D.1845_2);
1278 INFO is the structure describing individual parameters access different
1279 stages of IPA optimizations. PARMS_AINFO contains the information that is
1280 only needed for intraprocedural analysis. */
1282 static void
1283 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1284 class ipa_node_params *info,
1285 struct ipa_jump_func *jfunc,
1286 gcall *call, gimple *stmt, tree name,
1287 tree param_type)
1289 HOST_WIDE_INT offset, size;
1290 tree op1, tc_ssa, base, ssa;
1291 bool reverse;
1292 int index;
1294 op1 = gimple_assign_rhs1 (stmt);
1296 if (TREE_CODE (op1) == SSA_NAME)
1298 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1299 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1300 else
1301 index = load_from_unmodified_param (fbi, info->descriptors,
1302 SSA_NAME_DEF_STMT (op1));
1303 tc_ssa = op1;
1305 else
1307 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1308 tc_ssa = gimple_assign_lhs (stmt);
1311 if (index >= 0)
1313 switch (gimple_assign_rhs_class (stmt))
1315 case GIMPLE_BINARY_RHS:
1317 tree op2 = gimple_assign_rhs2 (stmt);
1318 if (!is_gimple_ip_invariant (op2)
1319 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1320 != tcc_comparison)
1321 && !useless_type_conversion_p (TREE_TYPE (name),
1322 TREE_TYPE (op1))))
1323 return;
1325 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1326 gimple_assign_rhs_code (stmt));
1327 break;
1329 case GIMPLE_SINGLE_RHS:
1331 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1332 tc_ssa);
1333 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1334 break;
1336 case GIMPLE_UNARY_RHS:
1337 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1338 ipa_set_jf_unary_pass_through (jfunc, index,
1339 gimple_assign_rhs_code (stmt));
1340 default:;
1342 return;
1345 if (TREE_CODE (op1) != ADDR_EXPR)
1346 return;
1347 op1 = TREE_OPERAND (op1, 0);
1348 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1349 return;
1350 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1351 offset_int mem_offset;
1352 if (!base
1353 || TREE_CODE (base) != MEM_REF
1354 || !mem_ref_offset (base).is_constant (&mem_offset))
1355 return;
1356 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1357 ssa = TREE_OPERAND (base, 0);
1358 if (TREE_CODE (ssa) != SSA_NAME
1359 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1360 || offset < 0)
1361 return;
1363 /* Dynamic types are changed in constructors and destructors. */
1364 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1365 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1366 ipa_set_ancestor_jf (jfunc, offset, index,
1367 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1370 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1371 it looks like:
1373 iftmp.1_3 = &obj_2(D)->D.1762;
1375 The base of the MEM_REF must be a default definition SSA NAME of a
1376 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1377 whole MEM_REF expression is returned and the offset calculated from any
1378 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1379 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1381 static tree
1382 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1384 HOST_WIDE_INT size;
1385 tree expr, parm, obj;
1386 bool reverse;
1388 if (!gimple_assign_single_p (assign))
1389 return NULL_TREE;
1390 expr = gimple_assign_rhs1 (assign);
1392 if (TREE_CODE (expr) != ADDR_EXPR)
1393 return NULL_TREE;
1394 expr = TREE_OPERAND (expr, 0);
1395 obj = expr;
1396 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1398 offset_int mem_offset;
1399 if (!expr
1400 || TREE_CODE (expr) != MEM_REF
1401 || !mem_ref_offset (expr).is_constant (&mem_offset))
1402 return NULL_TREE;
1403 parm = TREE_OPERAND (expr, 0);
1404 if (TREE_CODE (parm) != SSA_NAME
1405 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1406 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1407 return NULL_TREE;
1409 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1410 *obj_p = obj;
1411 return expr;
1415 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1416 statement PHI, try to find out whether NAME is in fact a
1417 multiple-inheritance typecast from a descendant into an ancestor of a formal
1418 parameter and thus can be described by an ancestor jump function and if so,
1419 write the appropriate function into JFUNC.
1421 Essentially we want to match the following pattern:
1423 if (obj_2(D) != 0B)
1424 goto <bb 3>;
1425 else
1426 goto <bb 4>;
1428 <bb 3>:
1429 iftmp.1_3 = &obj_2(D)->D.1762;
1431 <bb 4>:
1432 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1433 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1434 return D.1879_6; */
1436 static void
1437 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1438 class ipa_node_params *info,
1439 struct ipa_jump_func *jfunc,
1440 gcall *call, gphi *phi)
1442 HOST_WIDE_INT offset;
1443 gimple *assign, *cond;
1444 basic_block phi_bb, assign_bb, cond_bb;
1445 tree tmp, parm, expr, obj;
1446 int index, i;
1448 if (gimple_phi_num_args (phi) != 2)
1449 return;
1451 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1452 tmp = PHI_ARG_DEF (phi, 0);
1453 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1454 tmp = PHI_ARG_DEF (phi, 1);
1455 else
1456 return;
1457 if (TREE_CODE (tmp) != SSA_NAME
1458 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1459 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1460 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1461 return;
1463 assign = SSA_NAME_DEF_STMT (tmp);
1464 assign_bb = gimple_bb (assign);
1465 if (!single_pred_p (assign_bb))
1466 return;
1467 expr = get_ancestor_addr_info (assign, &obj, &offset);
1468 if (!expr)
1469 return;
1470 parm = TREE_OPERAND (expr, 0);
1471 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1472 if (index < 0)
1473 return;
1475 cond_bb = single_pred (assign_bb);
1476 cond = last_stmt (cond_bb);
1477 if (!cond
1478 || gimple_code (cond) != GIMPLE_COND
1479 || gimple_cond_code (cond) != NE_EXPR
1480 || gimple_cond_lhs (cond) != parm
1481 || !integer_zerop (gimple_cond_rhs (cond)))
1482 return;
1484 phi_bb = gimple_bb (phi);
1485 for (i = 0; i < 2; i++)
1487 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1488 if (pred != assign_bb && pred != cond_bb)
1489 return;
1492 ipa_set_ancestor_jf (jfunc, offset, index,
1493 parm_ref_data_pass_through_p (fbi, index, call, parm));
1496 /* Inspect the given TYPE and return true iff it has the same structure (the
1497 same number of fields of the same types) as a C++ member pointer. If
1498 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1499 corresponding fields there. */
1501 static bool
1502 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1504 tree fld;
1506 if (TREE_CODE (type) != RECORD_TYPE)
1507 return false;
1509 fld = TYPE_FIELDS (type);
1510 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1511 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1512 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1513 return false;
1515 if (method_ptr)
1516 *method_ptr = fld;
1518 fld = DECL_CHAIN (fld);
1519 if (!fld || INTEGRAL_TYPE_P (fld)
1520 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1521 return false;
1522 if (delta)
1523 *delta = fld;
1525 if (DECL_CHAIN (fld))
1526 return false;
1528 return true;
1531 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1532 return the rhs of its defining statement, and this statement is stored in
1533 *RHS_STMT. Otherwise return RHS as it is. */
1535 static inline tree
1536 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1538 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1540 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1542 if (gimple_assign_single_p (def_stmt))
1543 rhs = gimple_assign_rhs1 (def_stmt);
1544 else
1545 break;
1546 *rhs_stmt = def_stmt;
1548 return rhs;
1551 /* Simple linked list, describing contents of an aggregate before call. */
1553 struct ipa_known_agg_contents_list
1555 /* Offset and size of the described part of the aggregate. */
1556 HOST_WIDE_INT offset, size;
1558 /* Type of the described part of the aggregate. */
1559 tree type;
1561 /* Known constant value or jump function data describing contents. */
1562 struct ipa_load_agg_data value;
1564 /* Pointer to the next structure in the list. */
1565 struct ipa_known_agg_contents_list *next;
1568 /* Add an aggregate content item into a linked list of
1569 ipa_known_agg_contents_list structure, in which all elements
1570 are sorted ascendingly by offset. */
1572 static inline void
1573 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1574 struct ipa_known_agg_contents_list *item)
1576 struct ipa_known_agg_contents_list *list = *plist;
1578 for (; list; list = list->next)
1580 if (list->offset >= item->offset)
1581 break;
1583 plist = &list->next;
1586 item->next = list;
1587 *plist = item;
1590 /* Check whether a given aggregate content is clobbered by certain element in
1591 a linked list of ipa_known_agg_contents_list. */
1593 static inline bool
1594 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1595 struct ipa_known_agg_contents_list *item)
1597 for (; list; list = list->next)
1599 if (list->offset >= item->offset)
1600 return list->offset < item->offset + item->size;
1602 if (list->offset + list->size > item->offset)
1603 return true;
1606 return false;
1609 /* Build aggregate jump function from LIST, assuming there are exactly
1610 VALUE_COUNT entries there and that offset of the passed argument
1611 is ARG_OFFSET and store it into JFUNC. */
1613 static void
1614 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1615 int value_count, HOST_WIDE_INT arg_offset,
1616 struct ipa_jump_func *jfunc)
1618 vec_alloc (jfunc->agg.items, value_count);
1619 for (; list; list = list->next)
1621 struct ipa_agg_jf_item item;
1622 tree operand = list->value.pass_through.operand;
1624 if (list->value.pass_through.formal_id >= 0)
1626 /* Content value is derived from some formal paramerter. */
1627 if (list->value.offset >= 0)
1628 item.jftype = IPA_JF_LOAD_AGG;
1629 else
1630 item.jftype = IPA_JF_PASS_THROUGH;
1632 item.value.load_agg = list->value;
1633 if (operand)
1634 item.value.pass_through.operand
1635 = unshare_expr_without_location (operand);
1637 else if (operand)
1639 /* Content value is known constant. */
1640 item.jftype = IPA_JF_CONST;
1641 item.value.constant = unshare_expr_without_location (operand);
1643 else
1644 continue;
1646 item.type = list->type;
1647 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1649 item.offset = list->offset - arg_offset;
1650 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1652 jfunc->agg.items->quick_push (item);
1656 /* Given an assignment statement STMT, try to collect information into
1657 AGG_VALUE that will be used to construct jump function for RHS of the
1658 assignment, from which content value of an aggregate part comes.
1660 Besides constant and simple pass-through jump functions, also try to
1661 identify whether it matches the following pattern that can be described by
1662 a load-value-from-aggregate jump function, which is a derivative of simple
1663 pass-through jump function.
1665 foo (int *p)
1669 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1670 bar (q_5);
1673 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1674 constant, simple pass-through and load-vale-from-aggregate. If value
1675 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1676 set to -1. For simple pass-through and load-value-from-aggregate, field
1677 FORMAL_ID specifies the related formal parameter index, and field
1678 OFFSET can be used to distinguish them, -1 means simple pass-through,
1679 otherwise means load-value-from-aggregate. */
1681 static void
1682 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1683 struct ipa_load_agg_data *agg_value,
1684 gimple *stmt)
1686 tree lhs = gimple_assign_lhs (stmt);
1687 tree rhs1 = gimple_assign_rhs1 (stmt);
1688 enum tree_code code;
1689 int index = -1;
1691 /* Initialize jump function data for the aggregate part. */
1692 memset (agg_value, 0, sizeof (*agg_value));
1693 agg_value->pass_through.operation = NOP_EXPR;
1694 agg_value->pass_through.formal_id = -1;
1695 agg_value->offset = -1;
1697 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1698 || TREE_THIS_VOLATILE (lhs)
1699 || TREE_CODE (lhs) == BIT_FIELD_REF
1700 || contains_bitfld_component_ref_p (lhs))
1701 return;
1703 /* Skip SSA copies. */
1704 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1706 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1707 break;
1709 stmt = SSA_NAME_DEF_STMT (rhs1);
1710 if (!is_gimple_assign (stmt))
1711 return;
1713 rhs1 = gimple_assign_rhs1 (stmt);
1716 code = gimple_assign_rhs_code (stmt);
1717 switch (gimple_assign_rhs_class (stmt))
1719 case GIMPLE_SINGLE_RHS:
1720 if (is_gimple_ip_invariant (rhs1))
1722 agg_value->pass_through.operand = rhs1;
1723 return;
1725 code = NOP_EXPR;
1726 break;
1728 case GIMPLE_UNARY_RHS:
1729 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1730 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1731 tcc_binary, this subtleness is somewhat misleading.
1733 Since tcc_unary is widely used in IPA-CP code to check an operation
1734 with one operand, here we only allow tc_unary operation to avoid
1735 possible problem. Then we can use (opclass == tc_unary) or not to
1736 distinguish unary and binary. */
1737 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1738 return;
1740 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1741 break;
1743 case GIMPLE_BINARY_RHS:
1745 gimple *rhs1_stmt = stmt;
1746 gimple *rhs2_stmt = stmt;
1747 tree rhs2 = gimple_assign_rhs2 (stmt);
1749 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1750 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1752 if (is_gimple_ip_invariant (rhs2))
1754 agg_value->pass_through.operand = rhs2;
1755 stmt = rhs1_stmt;
1757 else if (is_gimple_ip_invariant (rhs1))
1759 if (TREE_CODE_CLASS (code) == tcc_comparison)
1760 code = swap_tree_comparison (code);
1761 else if (!commutative_tree_code (code))
1762 return;
1764 agg_value->pass_through.operand = rhs1;
1765 stmt = rhs2_stmt;
1766 rhs1 = rhs2;
1768 else
1769 return;
1771 if (TREE_CODE_CLASS (code) != tcc_comparison
1772 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs1)))
1773 return;
1775 break;
1777 default:
1778 return;
1781 if (TREE_CODE (rhs1) != SSA_NAME)
1782 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1783 &agg_value->offset,
1784 &agg_value->by_ref);
1785 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1786 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1788 if (index >= 0)
1790 if (agg_value->offset >= 0)
1791 agg_value->type = TREE_TYPE (rhs1);
1792 agg_value->pass_through.formal_id = index;
1793 agg_value->pass_through.operation = code;
1795 else
1796 agg_value->pass_through.operand = NULL_TREE;
1799 /* If STMT is a memory store to the object whose address is BASE, extract
1800 information (offset, size, and value) into CONTENT, and return true,
1801 otherwise we conservatively assume the whole object is modified with
1802 unknown content, and return false. CHECK_REF means that access to object
1803 is expected to be in form of MEM_REF expression. */
1805 static bool
1806 extract_mem_content (struct ipa_func_body_info *fbi,
1807 gimple *stmt, tree base, bool check_ref,
1808 struct ipa_known_agg_contents_list *content)
1810 HOST_WIDE_INT lhs_offset, lhs_size;
1811 bool reverse;
1813 if (!is_gimple_assign (stmt))
1814 return false;
1816 tree lhs = gimple_assign_lhs (stmt);
1817 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1818 &reverse);
1819 if (!lhs_base)
1820 return false;
1822 if (check_ref)
1824 if (TREE_CODE (lhs_base) != MEM_REF
1825 || TREE_OPERAND (lhs_base, 0) != base
1826 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1827 return false;
1829 else if (lhs_base != base)
1830 return false;
1832 content->offset = lhs_offset;
1833 content->size = lhs_size;
1834 content->type = TREE_TYPE (lhs);
1835 content->next = NULL;
1837 analyze_agg_content_value (fbi, &content->value, stmt);
1838 return true;
1841 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1842 in ARG is filled in constants or values that are derived from caller's
1843 formal parameter in the way described by some kinds of jump functions. FBI
1844 is the context of the caller function for interprocedural analysis. ARG can
1845 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1846 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1848 static void
1849 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1850 gcall *call, tree arg,
1851 tree arg_type,
1852 struct ipa_jump_func *jfunc)
1854 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1855 bitmap visited = NULL;
1856 int item_count = 0, value_count = 0;
1857 HOST_WIDE_INT arg_offset, arg_size;
1858 tree arg_base;
1859 bool check_ref, by_ref;
1860 ao_ref r;
1862 if (param_ipa_max_agg_items == 0)
1863 return;
1865 /* The function operates in three stages. First, we prepare check_ref, r,
1866 arg_base and arg_offset based on what is actually passed as an actual
1867 argument. */
1869 if (POINTER_TYPE_P (arg_type))
1871 by_ref = true;
1872 if (TREE_CODE (arg) == SSA_NAME)
1874 tree type_size;
1875 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1876 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1877 return;
1878 check_ref = true;
1879 arg_base = arg;
1880 arg_offset = 0;
1881 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1882 arg_size = tree_to_uhwi (type_size);
1883 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1885 else if (TREE_CODE (arg) == ADDR_EXPR)
1887 bool reverse;
1889 arg = TREE_OPERAND (arg, 0);
1890 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1891 &arg_size, &reverse);
1892 if (!arg_base)
1893 return;
1894 if (DECL_P (arg_base))
1896 check_ref = false;
1897 ao_ref_init (&r, arg_base);
1899 else
1900 return;
1902 else
1903 return;
1905 else
1907 bool reverse;
1909 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1911 by_ref = false;
1912 check_ref = false;
1913 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1914 &arg_size, &reverse);
1915 if (!arg_base)
1916 return;
1918 ao_ref_init (&r, arg);
1921 /* Second stage traverses virtual SSA web backwards starting from the call
1922 statement, only looks at individual dominating virtual operand (its
1923 definition dominates the call), as long as it is confident that content
1924 of the aggregate is affected by definition of the virtual operand, it
1925 builds a sorted linked list of ipa_agg_jf_list describing that. */
1927 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1929 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
1931 if (gimple_code (stmt) == GIMPLE_PHI)
1933 dom_vuse = get_continuation_for_phi (stmt, &r, true,
1934 fbi->aa_walk_budget,
1935 &visited, false, NULL, NULL);
1936 continue;
1939 if (stmt_may_clobber_ref_p_1 (stmt, &r))
1941 struct ipa_known_agg_contents_list *content
1942 = XALLOCA (struct ipa_known_agg_contents_list);
1944 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
1945 break;
1947 /* Now we get a dominating virtual operand, and need to check
1948 whether its value is clobbered any other dominating one. */
1949 if ((content->value.pass_through.formal_id >= 0
1950 || content->value.pass_through.operand)
1951 && !clobber_by_agg_contents_list_p (all_list, content))
1953 struct ipa_known_agg_contents_list *copy
1954 = XALLOCA (struct ipa_known_agg_contents_list);
1956 /* Add to the list consisting of only dominating virtual
1957 operands, whose definitions can finally reach the call. */
1958 add_to_agg_contents_list (&list, (*copy = *content, copy));
1960 if (++value_count == param_ipa_max_agg_items)
1961 break;
1964 /* Add to the list consisting of all dominating virtual operands. */
1965 add_to_agg_contents_list (&all_list, content);
1967 if (++item_count == 2 * param_ipa_max_agg_items)
1968 break;
1970 dom_vuse = gimple_vuse (stmt);
1973 if (visited)
1974 BITMAP_FREE (visited);
1976 /* Third stage just goes over the list and creates an appropriate vector of
1977 ipa_agg_jf_item structures out of it, of course only if there are
1978 any meaningful items to begin with. */
1980 if (value_count)
1982 jfunc->agg.by_ref = by_ref;
1983 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
1988 /* Return the Ith param type of callee associated with call graph
1989 edge E. */
1991 tree
1992 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1994 int n;
1995 tree type = (e->callee
1996 ? TREE_TYPE (e->callee->decl)
1997 : gimple_call_fntype (e->call_stmt));
1998 tree t = TYPE_ARG_TYPES (type);
2000 for (n = 0; n < i; n++)
2002 if (!t)
2003 break;
2004 t = TREE_CHAIN (t);
2006 if (t)
2007 return TREE_VALUE (t);
2008 if (!e->callee)
2009 return NULL;
2010 t = DECL_ARGUMENTS (e->callee->decl);
2011 for (n = 0; n < i; n++)
2013 if (!t)
2014 return NULL;
2015 t = TREE_CHAIN (t);
2017 if (t)
2018 return TREE_TYPE (t);
2019 return NULL;
2022 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2023 allocated structure or a previously existing one shared with other jump
2024 functions and/or transformation summaries. */
2026 ipa_bits *
2027 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2029 ipa_bits tmp;
2030 tmp.value = value;
2031 tmp.mask = mask;
2033 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2034 if (*slot)
2035 return *slot;
2037 ipa_bits *res = ggc_alloc<ipa_bits> ();
2038 res->value = value;
2039 res->mask = mask;
2040 *slot = res;
2042 return res;
2045 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2046 table in order to avoid creating multiple same ipa_bits structures. */
2048 static void
2049 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2050 const widest_int &mask)
2052 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2055 /* Return a pointer to a value_range just like *TMP, but either find it in
2056 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2058 static value_range *
2059 ipa_get_value_range (value_range *tmp)
2061 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2062 if (*slot)
2063 return *slot;
2065 value_range *vr = ggc_alloc<value_range> ();
2066 *vr = *tmp;
2067 *slot = vr;
2069 return vr;
2072 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2073 equiv set. Use hash table in order to avoid creating multiple same copies of
2074 value_ranges. */
2076 static value_range *
2077 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2079 value_range tmp (min, max, kind);
2080 return ipa_get_value_range (&tmp);
2083 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2084 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2085 same value_range structures. */
2087 static void
2088 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2089 tree min, tree max)
2091 jf->m_vr = ipa_get_value_range (type, min, max);
2094 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2095 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2097 static void
2098 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2100 jf->m_vr = ipa_get_value_range (tmp);
2103 /* Compute jump function for all arguments of callsite CS and insert the
2104 information in the jump_functions array in the ipa_edge_args corresponding
2105 to this callsite. */
2107 static void
2108 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2109 struct cgraph_edge *cs)
2111 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
2112 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
2113 gcall *call = cs->call_stmt;
2114 int n, arg_num = gimple_call_num_args (call);
2115 bool useful_context = false;
2117 if (arg_num == 0 || args->jump_functions)
2118 return;
2119 vec_safe_grow_cleared (args->jump_functions, arg_num);
2120 if (flag_devirtualize)
2121 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
2123 if (gimple_call_internal_p (call))
2124 return;
2125 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2126 return;
2128 for (n = 0; n < arg_num; n++)
2130 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2131 tree arg = gimple_call_arg (call, n);
2132 tree param_type = ipa_get_callee_param_type (cs, n);
2133 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2135 tree instance;
2136 class ipa_polymorphic_call_context context (cs->caller->decl,
2137 arg, cs->call_stmt,
2138 &instance);
2139 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2140 &fbi->aa_walk_budget);
2141 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2142 if (!context.useless_p ())
2143 useful_context = true;
2146 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2148 bool addr_nonzero = false;
2149 bool strict_overflow = false;
2151 if (TREE_CODE (arg) == SSA_NAME
2152 && param_type
2153 && get_ptr_nonnull (arg))
2154 addr_nonzero = true;
2155 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2156 addr_nonzero = true;
2158 if (addr_nonzero)
2160 tree z = build_int_cst (TREE_TYPE (arg), 0);
2161 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2163 else
2164 gcc_assert (!jfunc->m_vr);
2166 else
2168 wide_int min, max;
2169 value_range_kind kind;
2170 if (TREE_CODE (arg) == SSA_NAME
2171 && param_type
2172 && (kind = get_range_info (arg, &min, &max))
2173 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
2175 value_range resvr;
2176 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2177 wide_int_to_tree (TREE_TYPE (arg), max),
2178 kind);
2179 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2180 &tmpvr, TREE_TYPE (arg));
2181 if (!resvr.undefined_p () && !resvr.varying_p ())
2182 ipa_set_jfunc_vr (jfunc, &resvr);
2183 else
2184 gcc_assert (!jfunc->m_vr);
2186 else
2187 gcc_assert (!jfunc->m_vr);
2190 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2191 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2193 if (TREE_CODE (arg) == SSA_NAME)
2194 ipa_set_jfunc_bits (jfunc, 0,
2195 widest_int::from (get_nonzero_bits (arg),
2196 TYPE_SIGN (TREE_TYPE (arg))));
2197 else
2198 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2200 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2202 unsigned HOST_WIDE_INT bitpos;
2203 unsigned align;
2205 get_pointer_alignment_1 (arg, &align, &bitpos);
2206 widest_int mask = wi::bit_and_not
2207 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2208 align / BITS_PER_UNIT - 1);
2209 widest_int value = bitpos / BITS_PER_UNIT;
2210 ipa_set_jfunc_bits (jfunc, value, mask);
2212 else
2213 gcc_assert (!jfunc->bits);
2215 if (is_gimple_ip_invariant (arg)
2216 || (VAR_P (arg)
2217 && is_global_var (arg)
2218 && TREE_READONLY (arg)))
2219 ipa_set_jf_constant (jfunc, arg, cs);
2220 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2221 && TREE_CODE (arg) == PARM_DECL)
2223 int index = ipa_get_param_decl_index (info, arg);
2225 gcc_assert (index >=0);
2226 /* Aggregate passed by value, check for pass-through, otherwise we
2227 will attempt to fill in aggregate contents later in this
2228 for cycle. */
2229 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2231 ipa_set_jf_simple_pass_through (jfunc, index, false);
2232 continue;
2235 else if (TREE_CODE (arg) == SSA_NAME)
2237 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2239 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2240 if (index >= 0)
2242 bool agg_p;
2243 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2244 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2247 else
2249 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2250 if (is_gimple_assign (stmt))
2251 compute_complex_assign_jump_func (fbi, info, jfunc,
2252 call, stmt, arg, param_type);
2253 else if (gimple_code (stmt) == GIMPLE_PHI)
2254 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2255 call,
2256 as_a <gphi *> (stmt));
2260 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2261 passed (because type conversions are ignored in gimple). Usually we can
2262 safely get type from function declaration, but in case of K&R prototypes or
2263 variadic functions we can try our luck with type of the pointer passed.
2264 TODO: Since we look for actual initialization of the memory object, we may better
2265 work out the type based on the memory stores we find. */
2266 if (!param_type)
2267 param_type = TREE_TYPE (arg);
2269 if ((jfunc->type != IPA_JF_PASS_THROUGH
2270 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2271 && (jfunc->type != IPA_JF_ANCESTOR
2272 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2273 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2274 || POINTER_TYPE_P (param_type)))
2275 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2277 if (!useful_context)
2278 vec_free (args->polymorphic_call_contexts);
2281 /* Compute jump functions for all edges - both direct and indirect - outgoing
2282 from BB. */
2284 static void
2285 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2287 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2288 int i;
2289 struct cgraph_edge *cs;
2291 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2293 struct cgraph_node *callee = cs->callee;
2295 if (callee)
2297 callee = callee->ultimate_alias_target ();
2298 /* We do not need to bother analyzing calls to unknown functions
2299 unless they may become known during lto/whopr. */
2300 if (!callee->definition && !flag_lto)
2301 continue;
2303 ipa_compute_jump_functions_for_edge (fbi, cs);
2307 /* If STMT looks like a statement loading a value from a member pointer formal
2308 parameter, return that parameter and store the offset of the field to
2309 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2310 might be clobbered). If USE_DELTA, then we look for a use of the delta
2311 field rather than the pfn. */
2313 static tree
2314 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2315 HOST_WIDE_INT *offset_p)
2317 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2319 if (!gimple_assign_single_p (stmt))
2320 return NULL_TREE;
2322 rhs = gimple_assign_rhs1 (stmt);
2323 if (TREE_CODE (rhs) == COMPONENT_REF)
2325 ref_field = TREE_OPERAND (rhs, 1);
2326 rhs = TREE_OPERAND (rhs, 0);
2328 else
2329 ref_field = NULL_TREE;
2330 if (TREE_CODE (rhs) != MEM_REF)
2331 return NULL_TREE;
2332 rec = TREE_OPERAND (rhs, 0);
2333 if (TREE_CODE (rec) != ADDR_EXPR)
2334 return NULL_TREE;
2335 rec = TREE_OPERAND (rec, 0);
2336 if (TREE_CODE (rec) != PARM_DECL
2337 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2338 return NULL_TREE;
2339 ref_offset = TREE_OPERAND (rhs, 1);
2341 if (use_delta)
2342 fld = delta_field;
2343 else
2344 fld = ptr_field;
2345 if (offset_p)
2346 *offset_p = int_bit_position (fld);
2348 if (ref_field)
2350 if (integer_nonzerop (ref_offset))
2351 return NULL_TREE;
2352 return ref_field == fld ? rec : NULL_TREE;
2354 else
2355 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2356 : NULL_TREE;
2359 /* Returns true iff T is an SSA_NAME defined by a statement. */
2361 static bool
2362 ipa_is_ssa_with_stmt_def (tree t)
2364 if (TREE_CODE (t) == SSA_NAME
2365 && !SSA_NAME_IS_DEFAULT_DEF (t))
2366 return true;
2367 else
2368 return false;
2371 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2372 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2373 indirect call graph edge.
2374 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2376 static struct cgraph_edge *
2377 ipa_note_param_call (struct cgraph_node *node, int param_index,
2378 gcall *stmt, bool polymorphic)
2380 struct cgraph_edge *cs;
2382 cs = node->get_edge (stmt);
2383 cs->indirect_info->param_index = param_index;
2384 cs->indirect_info->agg_contents = 0;
2385 cs->indirect_info->member_ptr = 0;
2386 cs->indirect_info->guaranteed_unmodified = 0;
2387 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2388 param_index, true);
2389 if (cs->indirect_info->polymorphic || polymorphic)
2390 ipa_set_param_used_by_polymorphic_call
2391 (IPA_NODE_REF (node), param_index, true);
2392 return cs;
2395 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2396 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2397 intermediate information about each formal parameter. Currently it checks
2398 whether the call calls a pointer that is a formal parameter and if so, the
2399 parameter is marked with the called flag and an indirect call graph edge
2400 describing the call is created. This is very simple for ordinary pointers
2401 represented in SSA but not-so-nice when it comes to member pointers. The
2402 ugly part of this function does nothing more than trying to match the
2403 pattern of such a call. An example of such a pattern is the gimple dump
2404 below, the call is on the last line:
2406 <bb 2>:
2407 f$__delta_5 = f.__delta;
2408 f$__pfn_24 = f.__pfn;
2411 <bb 2>:
2412 f$__delta_5 = MEM[(struct *)&f];
2413 f$__pfn_24 = MEM[(struct *)&f + 4B];
2415 and a few lines below:
2417 <bb 5>
2418 D.2496_3 = (int) f$__pfn_24;
2419 D.2497_4 = D.2496_3 & 1;
2420 if (D.2497_4 != 0)
2421 goto <bb 3>;
2422 else
2423 goto <bb 4>;
2425 <bb 6>:
2426 D.2500_7 = (unsigned int) f$__delta_5;
2427 D.2501_8 = &S + D.2500_7;
2428 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2429 D.2503_10 = *D.2502_9;
2430 D.2504_12 = f$__pfn_24 + -1;
2431 D.2505_13 = (unsigned int) D.2504_12;
2432 D.2506_14 = D.2503_10 + D.2505_13;
2433 D.2507_15 = *D.2506_14;
2434 iftmp.11_16 = (String:: *) D.2507_15;
2436 <bb 7>:
2437 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2438 D.2500_19 = (unsigned int) f$__delta_5;
2439 D.2508_20 = &S + D.2500_19;
2440 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2442 Such patterns are results of simple calls to a member pointer:
2444 int doprinting (int (MyString::* f)(int) const)
2446 MyString S ("somestring");
2448 return (S.*f)(4);
2451 Moreover, the function also looks for called pointers loaded from aggregates
2452 passed by value or reference. */
2454 static void
2455 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2456 tree target)
2458 class ipa_node_params *info = fbi->info;
2459 HOST_WIDE_INT offset;
2460 bool by_ref;
2462 if (SSA_NAME_IS_DEFAULT_DEF (target))
2464 tree var = SSA_NAME_VAR (target);
2465 int index = ipa_get_param_decl_index (info, var);
2466 if (index >= 0)
2467 ipa_note_param_call (fbi->node, index, call, false);
2468 return;
2471 int index;
2472 gimple *def = SSA_NAME_DEF_STMT (target);
2473 bool guaranteed_unmodified;
2474 if (gimple_assign_single_p (def)
2475 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2476 gimple_assign_rhs1 (def), &index, &offset,
2477 NULL, &by_ref, &guaranteed_unmodified))
2479 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2480 call, false);
2481 cs->indirect_info->offset = offset;
2482 cs->indirect_info->agg_contents = 1;
2483 cs->indirect_info->by_ref = by_ref;
2484 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2485 return;
2488 /* Now we need to try to match the complex pattern of calling a member
2489 pointer. */
2490 if (gimple_code (def) != GIMPLE_PHI
2491 || gimple_phi_num_args (def) != 2
2492 || !POINTER_TYPE_P (TREE_TYPE (target))
2493 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2494 return;
2496 /* First, we need to check whether one of these is a load from a member
2497 pointer that is a parameter to this function. */
2498 tree n1 = PHI_ARG_DEF (def, 0);
2499 tree n2 = PHI_ARG_DEF (def, 1);
2500 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2501 return;
2502 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2503 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2505 tree rec;
2506 basic_block bb, virt_bb;
2507 basic_block join = gimple_bb (def);
2508 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2510 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2511 return;
2513 bb = EDGE_PRED (join, 0)->src;
2514 virt_bb = gimple_bb (d2);
2516 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2518 bb = EDGE_PRED (join, 1)->src;
2519 virt_bb = gimple_bb (d1);
2521 else
2522 return;
2524 /* Second, we need to check that the basic blocks are laid out in the way
2525 corresponding to the pattern. */
2527 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2528 || single_pred (virt_bb) != bb
2529 || single_succ (virt_bb) != join)
2530 return;
2532 /* Third, let's see that the branching is done depending on the least
2533 significant bit of the pfn. */
2535 gimple *branch = last_stmt (bb);
2536 if (!branch || gimple_code (branch) != GIMPLE_COND)
2537 return;
2539 if ((gimple_cond_code (branch) != NE_EXPR
2540 && gimple_cond_code (branch) != EQ_EXPR)
2541 || !integer_zerop (gimple_cond_rhs (branch)))
2542 return;
2544 tree cond = gimple_cond_lhs (branch);
2545 if (!ipa_is_ssa_with_stmt_def (cond))
2546 return;
2548 def = SSA_NAME_DEF_STMT (cond);
2549 if (!is_gimple_assign (def)
2550 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2551 || !integer_onep (gimple_assign_rhs2 (def)))
2552 return;
2554 cond = gimple_assign_rhs1 (def);
2555 if (!ipa_is_ssa_with_stmt_def (cond))
2556 return;
2558 def = SSA_NAME_DEF_STMT (cond);
2560 if (is_gimple_assign (def)
2561 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2563 cond = gimple_assign_rhs1 (def);
2564 if (!ipa_is_ssa_with_stmt_def (cond))
2565 return;
2566 def = SSA_NAME_DEF_STMT (cond);
2569 tree rec2;
2570 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2571 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2572 == ptrmemfunc_vbit_in_delta),
2573 NULL);
2574 if (rec != rec2)
2575 return;
2577 index = ipa_get_param_decl_index (info, rec);
2578 if (index >= 0
2579 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2581 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2582 call, false);
2583 cs->indirect_info->offset = offset;
2584 cs->indirect_info->agg_contents = 1;
2585 cs->indirect_info->member_ptr = 1;
2586 cs->indirect_info->guaranteed_unmodified = 1;
2589 return;
2592 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2593 object referenced in the expression is a formal parameter of the caller
2594 FBI->node (described by FBI->info), create a call note for the
2595 statement. */
2597 static void
2598 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2599 gcall *call, tree target)
2601 tree obj = OBJ_TYPE_REF_OBJECT (target);
2602 int index;
2603 HOST_WIDE_INT anc_offset;
2605 if (!flag_devirtualize)
2606 return;
2608 if (TREE_CODE (obj) != SSA_NAME)
2609 return;
2611 class ipa_node_params *info = fbi->info;
2612 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2614 struct ipa_jump_func jfunc;
2615 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2616 return;
2618 anc_offset = 0;
2619 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2620 gcc_assert (index >= 0);
2621 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2622 call, &jfunc))
2623 return;
2625 else
2627 struct ipa_jump_func jfunc;
2628 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2629 tree expr;
2631 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2632 if (!expr)
2633 return;
2634 index = ipa_get_param_decl_index (info,
2635 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2636 gcc_assert (index >= 0);
2637 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2638 call, &jfunc, anc_offset))
2639 return;
2642 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2643 call, true);
2644 class cgraph_indirect_call_info *ii = cs->indirect_info;
2645 ii->offset = anc_offset;
2646 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2647 ii->otr_type = obj_type_ref_class (target);
2648 ii->polymorphic = 1;
2651 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2652 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2653 containing intermediate information about each formal parameter. */
2655 static void
2656 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2658 tree target = gimple_call_fn (call);
2660 if (!target
2661 || (TREE_CODE (target) != SSA_NAME
2662 && !virtual_method_call_p (target)))
2663 return;
2665 struct cgraph_edge *cs = fbi->node->get_edge (call);
2666 /* If we previously turned the call into a direct call, there is
2667 no need to analyze. */
2668 if (cs && !cs->indirect_unknown_callee)
2669 return;
2671 if (cs->indirect_info->polymorphic && flag_devirtualize)
2673 tree instance;
2674 tree target = gimple_call_fn (call);
2675 ipa_polymorphic_call_context context (current_function_decl,
2676 target, call, &instance);
2678 gcc_checking_assert (cs->indirect_info->otr_type
2679 == obj_type_ref_class (target));
2680 gcc_checking_assert (cs->indirect_info->otr_token
2681 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2683 cs->indirect_info->vptr_changed
2684 = !context.get_dynamic_type (instance,
2685 OBJ_TYPE_REF_OBJECT (target),
2686 obj_type_ref_class (target), call,
2687 &fbi->aa_walk_budget);
2688 cs->indirect_info->context = context;
2691 if (TREE_CODE (target) == SSA_NAME)
2692 ipa_analyze_indirect_call_uses (fbi, call, target);
2693 else if (virtual_method_call_p (target))
2694 ipa_analyze_virtual_call_uses (fbi, call, target);
2698 /* Analyze the call statement STMT with respect to formal parameters (described
2699 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2700 formal parameters are called. */
2702 static void
2703 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2705 if (is_gimple_call (stmt))
2706 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2709 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2710 If OP is a parameter declaration, mark it as used in the info structure
2711 passed in DATA. */
2713 static bool
2714 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2716 class ipa_node_params *info = (class ipa_node_params *) data;
2718 op = get_base_address (op);
2719 if (op
2720 && TREE_CODE (op) == PARM_DECL)
2722 int index = ipa_get_param_decl_index (info, op);
2723 gcc_assert (index >= 0);
2724 ipa_set_param_used (info, index, true);
2727 return false;
2730 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2731 the findings in various structures of the associated ipa_node_params
2732 structure, such as parameter flags, notes etc. FBI holds various data about
2733 the function being analyzed. */
2735 static void
2736 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2738 gimple_stmt_iterator gsi;
2739 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2741 gimple *stmt = gsi_stmt (gsi);
2743 if (is_gimple_debug (stmt))
2744 continue;
2746 ipa_analyze_stmt_uses (fbi, stmt);
2747 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2748 visit_ref_for_mod_analysis,
2749 visit_ref_for_mod_analysis,
2750 visit_ref_for_mod_analysis);
2752 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2753 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2754 visit_ref_for_mod_analysis,
2755 visit_ref_for_mod_analysis,
2756 visit_ref_for_mod_analysis);
2759 /* Calculate controlled uses of parameters of NODE. */
2761 static void
2762 ipa_analyze_controlled_uses (struct cgraph_node *node)
2764 class ipa_node_params *info = IPA_NODE_REF (node);
2766 for (int i = 0; i < ipa_get_param_count (info); i++)
2768 tree parm = ipa_get_param (info, i);
2769 int controlled_uses = 0;
2771 /* For SSA regs see if parameter is used. For non-SSA we compute
2772 the flag during modification analysis. */
2773 if (is_gimple_reg (parm))
2775 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2776 parm);
2777 if (ddef && !has_zero_uses (ddef))
2779 imm_use_iterator imm_iter;
2780 use_operand_p use_p;
2782 ipa_set_param_used (info, i, true);
2783 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2784 if (!is_gimple_call (USE_STMT (use_p)))
2786 if (!is_gimple_debug (USE_STMT (use_p)))
2788 controlled_uses = IPA_UNDESCRIBED_USE;
2789 break;
2792 else
2793 controlled_uses++;
2795 else
2796 controlled_uses = 0;
2798 else
2799 controlled_uses = IPA_UNDESCRIBED_USE;
2800 ipa_set_controlled_uses (info, i, controlled_uses);
2804 /* Free stuff in BI. */
2806 static void
2807 free_ipa_bb_info (struct ipa_bb_info *bi)
2809 bi->cg_edges.release ();
2810 bi->param_aa_statuses.release ();
2813 /* Dominator walker driving the analysis. */
2815 class analysis_dom_walker : public dom_walker
2817 public:
2818 analysis_dom_walker (struct ipa_func_body_info *fbi)
2819 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2821 virtual edge before_dom_children (basic_block);
2823 private:
2824 struct ipa_func_body_info *m_fbi;
2827 edge
2828 analysis_dom_walker::before_dom_children (basic_block bb)
2830 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2831 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2832 return NULL;
2835 /* Release body info FBI. */
2837 void
2838 ipa_release_body_info (struct ipa_func_body_info *fbi)
2840 int i;
2841 struct ipa_bb_info *bi;
2843 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2844 free_ipa_bb_info (bi);
2845 fbi->bb_infos.release ();
2848 /* Initialize the array describing properties of formal parameters
2849 of NODE, analyze their uses and compute jump functions associated
2850 with actual arguments of calls from within NODE. */
2852 void
2853 ipa_analyze_node (struct cgraph_node *node)
2855 struct ipa_func_body_info fbi;
2856 class ipa_node_params *info;
2858 ipa_check_create_node_params ();
2859 ipa_check_create_edge_args ();
2860 info = IPA_NODE_REF_GET_CREATE (node);
2862 if (info->analysis_done)
2863 return;
2864 info->analysis_done = 1;
2866 if (ipa_func_spec_opts_forbid_analysis_p (node))
2868 for (int i = 0; i < ipa_get_param_count (info); i++)
2870 ipa_set_param_used (info, i, true);
2871 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2873 return;
2876 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2877 push_cfun (func);
2878 calculate_dominance_info (CDI_DOMINATORS);
2879 ipa_initialize_node_params (node);
2880 ipa_analyze_controlled_uses (node);
2882 fbi.node = node;
2883 fbi.info = IPA_NODE_REF (node);
2884 fbi.bb_infos = vNULL;
2885 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2886 fbi.param_count = ipa_get_param_count (info);
2887 fbi.aa_walk_budget = param_ipa_max_aa_steps;
2889 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2891 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2892 bi->cg_edges.safe_push (cs);
2895 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2897 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2898 bi->cg_edges.safe_push (cs);
2901 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2903 ipa_release_body_info (&fbi);
2904 free_dominance_info (CDI_DOMINATORS);
2905 pop_cfun ();
2908 /* Update the jump functions associated with call graph edge E when the call
2909 graph edge CS is being inlined, assuming that E->caller is already (possibly
2910 indirectly) inlined into CS->callee and that E has not been inlined. */
2912 static void
2913 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2914 struct cgraph_edge *e)
2916 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2917 class ipa_edge_args *args = IPA_EDGE_REF (e);
2918 if (!args)
2919 return;
2920 int count = ipa_get_cs_argument_count (args);
2921 int i;
2923 for (i = 0; i < count; i++)
2925 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2926 class ipa_polymorphic_call_context *dst_ctx
2927 = ipa_get_ith_polymorhic_call_context (args, i);
2929 if (dst->agg.items)
2931 struct ipa_agg_jf_item *item;
2932 int j;
2934 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
2936 int dst_fid;
2937 struct ipa_jump_func *src;
2939 if (item->jftype != IPA_JF_PASS_THROUGH
2940 && item->jftype != IPA_JF_LOAD_AGG)
2941 continue;
2943 dst_fid = item->value.pass_through.formal_id;
2944 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
2946 item->jftype = IPA_JF_UNKNOWN;
2947 continue;
2950 item->value.pass_through.formal_id = -1;
2951 src = ipa_get_ith_jump_func (top, dst_fid);
2952 if (src->type == IPA_JF_CONST)
2954 if (item->jftype == IPA_JF_PASS_THROUGH
2955 && item->value.pass_through.operation == NOP_EXPR)
2957 item->jftype = IPA_JF_CONST;
2958 item->value.constant = src->value.constant.value;
2959 continue;
2962 else if (src->type == IPA_JF_PASS_THROUGH
2963 && src->value.pass_through.operation == NOP_EXPR)
2965 if (item->jftype == IPA_JF_PASS_THROUGH
2966 || !item->value.load_agg.by_ref
2967 || src->value.pass_through.agg_preserved)
2968 item->value.pass_through.formal_id
2969 = src->value.pass_through.formal_id;
2971 else if (src->type == IPA_JF_ANCESTOR)
2973 if (item->jftype == IPA_JF_PASS_THROUGH)
2975 if (!src->value.ancestor.offset)
2976 item->value.pass_through.formal_id
2977 = src->value.ancestor.formal_id;
2979 else if (src->value.ancestor.agg_preserved)
2981 gcc_checking_assert (item->value.load_agg.by_ref);
2983 item->value.pass_through.formal_id
2984 = src->value.ancestor.formal_id;
2985 item->value.load_agg.offset
2986 += src->value.ancestor.offset;
2990 if (item->value.pass_through.formal_id < 0)
2991 item->jftype = IPA_JF_UNKNOWN;
2995 if (!top)
2997 ipa_set_jf_unknown (dst);
2998 continue;
3001 if (dst->type == IPA_JF_ANCESTOR)
3003 struct ipa_jump_func *src;
3004 int dst_fid = dst->value.ancestor.formal_id;
3005 class ipa_polymorphic_call_context *src_ctx
3006 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3008 /* Variable number of arguments can cause havoc if we try to access
3009 one that does not exist in the inlined edge. So make sure we
3010 don't. */
3011 if (dst_fid >= ipa_get_cs_argument_count (top))
3013 ipa_set_jf_unknown (dst);
3014 continue;
3017 src = ipa_get_ith_jump_func (top, dst_fid);
3019 if (src_ctx && !src_ctx->useless_p ())
3021 class ipa_polymorphic_call_context ctx = *src_ctx;
3023 /* TODO: Make type preserved safe WRT contexts. */
3024 if (!ipa_get_jf_ancestor_type_preserved (dst))
3025 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3026 ctx.offset_by (dst->value.ancestor.offset);
3027 if (!ctx.useless_p ())
3029 if (!dst_ctx)
3031 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3032 count);
3033 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3036 dst_ctx->combine_with (ctx);
3040 /* Parameter and argument in ancestor jump function must be pointer
3041 type, which means access to aggregate must be by-reference. */
3042 gcc_assert (!src->agg.items || src->agg.by_ref);
3044 if (src->agg.items && dst->value.ancestor.agg_preserved)
3046 struct ipa_agg_jf_item *item;
3047 int j;
3049 /* Currently we do not produce clobber aggregate jump functions,
3050 replace with merging when we do. */
3051 gcc_assert (!dst->agg.items);
3053 dst->agg.items = vec_safe_copy (src->agg.items);
3054 dst->agg.by_ref = src->agg.by_ref;
3055 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3056 item->offset -= dst->value.ancestor.offset;
3059 if (src->type == IPA_JF_PASS_THROUGH
3060 && src->value.pass_through.operation == NOP_EXPR)
3062 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3063 dst->value.ancestor.agg_preserved &=
3064 src->value.pass_through.agg_preserved;
3066 else if (src->type == IPA_JF_ANCESTOR)
3068 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3069 dst->value.ancestor.offset += src->value.ancestor.offset;
3070 dst->value.ancestor.agg_preserved &=
3071 src->value.ancestor.agg_preserved;
3073 else
3074 ipa_set_jf_unknown (dst);
3076 else if (dst->type == IPA_JF_PASS_THROUGH)
3078 struct ipa_jump_func *src;
3079 /* We must check range due to calls with variable number of arguments
3080 and we cannot combine jump functions with operations. */
3081 if (dst->value.pass_through.operation == NOP_EXPR
3082 && (top && dst->value.pass_through.formal_id
3083 < ipa_get_cs_argument_count (top)))
3085 int dst_fid = dst->value.pass_through.formal_id;
3086 src = ipa_get_ith_jump_func (top, dst_fid);
3087 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3088 class ipa_polymorphic_call_context *src_ctx
3089 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3091 if (src_ctx && !src_ctx->useless_p ())
3093 class ipa_polymorphic_call_context ctx = *src_ctx;
3095 /* TODO: Make type preserved safe WRT contexts. */
3096 if (!ipa_get_jf_pass_through_type_preserved (dst))
3097 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3098 if (!ctx.useless_p ())
3100 if (!dst_ctx)
3102 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3103 count);
3104 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3106 dst_ctx->combine_with (ctx);
3109 switch (src->type)
3111 case IPA_JF_UNKNOWN:
3112 ipa_set_jf_unknown (dst);
3113 break;
3114 case IPA_JF_CONST:
3115 ipa_set_jf_cst_copy (dst, src);
3116 break;
3118 case IPA_JF_PASS_THROUGH:
3120 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3121 enum tree_code operation;
3122 operation = ipa_get_jf_pass_through_operation (src);
3124 if (operation == NOP_EXPR)
3126 bool agg_p;
3127 agg_p = dst_agg_p
3128 && ipa_get_jf_pass_through_agg_preserved (src);
3129 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3131 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3132 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3133 else
3135 tree operand = ipa_get_jf_pass_through_operand (src);
3136 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3137 operation);
3139 break;
3141 case IPA_JF_ANCESTOR:
3143 bool agg_p;
3144 agg_p = dst_agg_p
3145 && ipa_get_jf_ancestor_agg_preserved (src);
3146 ipa_set_ancestor_jf (dst,
3147 ipa_get_jf_ancestor_offset (src),
3148 ipa_get_jf_ancestor_formal_id (src),
3149 agg_p);
3150 break;
3152 default:
3153 gcc_unreachable ();
3156 if (src->agg.items
3157 && (dst_agg_p || !src->agg.by_ref))
3159 /* Currently we do not produce clobber aggregate jump
3160 functions, replace with merging when we do. */
3161 gcc_assert (!dst->agg.items);
3163 dst->agg.by_ref = src->agg.by_ref;
3164 dst->agg.items = vec_safe_copy (src->agg.items);
3167 else
3168 ipa_set_jf_unknown (dst);
3173 /* If TARGET is an addr_expr of a function declaration, make it the
3174 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3175 Otherwise, return NULL. */
3177 struct cgraph_edge *
3178 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3179 bool speculative)
3181 struct cgraph_node *callee;
3182 bool unreachable = false;
3184 if (TREE_CODE (target) == ADDR_EXPR)
3185 target = TREE_OPERAND (target, 0);
3186 if (TREE_CODE (target) != FUNCTION_DECL)
3188 target = canonicalize_constructor_val (target, NULL);
3189 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3191 /* Member pointer call that goes through a VMT lookup. */
3192 if (ie->indirect_info->member_ptr
3193 /* Or if target is not an invariant expression and we do not
3194 know if it will evaulate to function at runtime.
3195 This can happen when folding through &VAR, where &VAR
3196 is IP invariant, but VAR itself is not.
3198 TODO: Revisit this when GCC 5 is branched. It seems that
3199 member_ptr check is not needed and that we may try to fold
3200 the expression and see if VAR is readonly. */
3201 || !is_gimple_ip_invariant (target))
3203 if (dump_enabled_p ())
3205 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3206 "discovered direct call non-invariant %s\n",
3207 ie->caller->dump_name ());
3209 return NULL;
3213 if (dump_enabled_p ())
3215 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3216 "discovered direct call to non-function in %s, "
3217 "making it __builtin_unreachable\n",
3218 ie->caller->dump_name ());
3221 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3222 callee = cgraph_node::get_create (target);
3223 unreachable = true;
3225 else
3226 callee = cgraph_node::get (target);
3228 else
3229 callee = cgraph_node::get (target);
3231 /* Because may-edges are not explicitely represented and vtable may be external,
3232 we may create the first reference to the object in the unit. */
3233 if (!callee || callee->inlined_to)
3236 /* We are better to ensure we can refer to it.
3237 In the case of static functions we are out of luck, since we already
3238 removed its body. In the case of public functions we may or may
3239 not introduce the reference. */
3240 if (!canonicalize_constructor_val (target, NULL)
3241 || !TREE_PUBLIC (target))
3243 if (dump_file)
3244 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3245 "(%s -> %s) but cannot refer to it. Giving up.\n",
3246 ie->caller->dump_name (),
3247 ie->callee->dump_name ());
3248 return NULL;
3250 callee = cgraph_node::get_create (target);
3253 /* If the edge is already speculated. */
3254 if (speculative && ie->speculative)
3256 struct cgraph_edge *e2;
3257 struct ipa_ref *ref;
3258 ie->speculative_call_info (e2, ie, ref);
3259 if (e2->callee->ultimate_alias_target ()
3260 != callee->ultimate_alias_target ())
3262 if (dump_file)
3263 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
3264 "target (%s -> %s) but the call is already "
3265 "speculated to %s. Giving up.\n",
3266 ie->caller->dump_name (), callee->dump_name (),
3267 e2->callee->dump_name ());
3269 else
3271 if (dump_file)
3272 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
3273 "(%s -> %s) this agree with previous speculation.\n",
3274 ie->caller->dump_name (), callee->dump_name ());
3276 return NULL;
3279 if (!dbg_cnt (devirt))
3280 return NULL;
3282 ipa_check_create_node_params ();
3284 /* We cannot make edges to inline clones. It is bug that someone removed
3285 the cgraph node too early. */
3286 gcc_assert (!callee->inlined_to);
3288 if (dump_file && !unreachable)
3290 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3291 "(%s -> %s), for stmt ",
3292 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3293 speculative ? "speculative" : "known",
3294 ie->caller->dump_name (),
3295 callee->dump_name ());
3296 if (ie->call_stmt)
3297 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3298 else
3299 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3301 if (dump_enabled_p ())
3303 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3304 "converting indirect call in %s to direct call to %s\n",
3305 ie->caller->name (), callee->name ());
3307 if (!speculative)
3309 struct cgraph_edge *orig = ie;
3310 ie = ie->make_direct (callee);
3311 /* If we resolved speculative edge the cost is already up to date
3312 for direct call (adjusted by inline_edge_duplication_hook). */
3313 if (ie == orig)
3315 ipa_call_summary *es = ipa_call_summaries->get (ie);
3316 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3317 - eni_size_weights.call_cost);
3318 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3319 - eni_time_weights.call_cost);
3322 else
3324 if (!callee->can_be_discarded_p ())
3326 cgraph_node *alias;
3327 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3328 if (alias)
3329 callee = alias;
3331 /* make_speculative will update ie's cost to direct call cost. */
3332 ie = ie->make_speculative
3333 (callee, ie->count.apply_scale (8, 10));
3336 return ie;
3339 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3340 CONSTRUCTOR and return it. Return NULL if the search fails for some
3341 reason. */
3343 static tree
3344 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3346 tree type = TREE_TYPE (constructor);
3347 if (TREE_CODE (type) != ARRAY_TYPE
3348 && TREE_CODE (type) != RECORD_TYPE)
3349 return NULL;
3351 unsigned ix;
3352 tree index, val;
3353 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3355 HOST_WIDE_INT elt_offset;
3356 if (TREE_CODE (type) == ARRAY_TYPE)
3358 offset_int off;
3359 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3360 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3362 if (index)
3364 if (TREE_CODE (index) == RANGE_EXPR)
3365 off = wi::to_offset (TREE_OPERAND (index, 0));
3366 else
3367 off = wi::to_offset (index);
3368 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3370 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3371 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3372 off = wi::sext (off - wi::to_offset (low_bound),
3373 TYPE_PRECISION (TREE_TYPE (index)));
3375 off *= wi::to_offset (unit_size);
3376 /* ??? Handle more than just the first index of a
3377 RANGE_EXPR. */
3379 else
3380 off = wi::to_offset (unit_size) * ix;
3382 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3383 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3384 continue;
3385 elt_offset = off.to_shwi ();
3387 else if (TREE_CODE (type) == RECORD_TYPE)
3389 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3390 if (DECL_BIT_FIELD (index))
3391 continue;
3392 elt_offset = int_bit_position (index);
3394 else
3395 gcc_unreachable ();
3397 if (elt_offset > req_offset)
3398 return NULL;
3400 if (TREE_CODE (val) == CONSTRUCTOR)
3401 return find_constructor_constant_at_offset (val,
3402 req_offset - elt_offset);
3404 if (elt_offset == req_offset
3405 && is_gimple_reg_type (TREE_TYPE (val))
3406 && is_gimple_ip_invariant (val))
3407 return val;
3409 return NULL;
3412 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3413 invariant from a static constructor and if so, return it. Otherwise return
3414 NULL. */
3416 static tree
3417 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3419 if (by_ref)
3421 if (TREE_CODE (scalar) != ADDR_EXPR)
3422 return NULL;
3423 scalar = TREE_OPERAND (scalar, 0);
3426 if (!VAR_P (scalar)
3427 || !is_global_var (scalar)
3428 || !TREE_READONLY (scalar)
3429 || !DECL_INITIAL (scalar)
3430 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3431 return NULL;
3433 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3436 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3437 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3438 return NULL if there is none. BY_REF specifies whether the value has to be
3439 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3440 the boolean it points to is set to true if the value comes from an
3441 initializer of a constant. */
3443 tree
3444 ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
3445 HOST_WIDE_INT offset, bool by_ref,
3446 bool *from_global_constant)
3448 struct ipa_agg_value *item;
3449 int i;
3451 if (scalar)
3453 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3454 if (res)
3456 if (from_global_constant)
3457 *from_global_constant = true;
3458 return res;
3462 if (!agg
3463 || by_ref != agg->by_ref)
3464 return NULL;
3466 FOR_EACH_VEC_ELT (agg->items, i, item)
3467 if (item->offset == offset)
3469 /* Currently we do not have clobber values, return NULL for them once
3470 we do. */
3471 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3472 if (from_global_constant)
3473 *from_global_constant = false;
3474 return item->value;
3476 return NULL;
3479 /* Remove a reference to SYMBOL from the list of references of a node given by
3480 reference description RDESC. Return true if the reference has been
3481 successfully found and removed. */
3483 static bool
3484 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3486 struct ipa_ref *to_del;
3487 struct cgraph_edge *origin;
3489 origin = rdesc->cs;
3490 if (!origin)
3491 return false;
3492 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3493 origin->lto_stmt_uid);
3494 if (!to_del)
3495 return false;
3497 to_del->remove_reference ();
3498 if (dump_file)
3499 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3500 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3501 return true;
3504 /* If JFUNC has a reference description with refcount different from
3505 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3506 NULL. JFUNC must be a constant jump function. */
3508 static struct ipa_cst_ref_desc *
3509 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3511 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3512 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3513 return rdesc;
3514 else
3515 return NULL;
3518 /* If the value of constant jump function JFUNC is an address of a function
3519 declaration, return the associated call graph node. Otherwise return
3520 NULL. */
3522 static cgraph_node *
3523 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3525 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3526 tree cst = ipa_get_jf_constant (jfunc);
3527 if (TREE_CODE (cst) != ADDR_EXPR
3528 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3529 return NULL;
3531 return cgraph_node::get (TREE_OPERAND (cst, 0));
3535 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3536 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3537 the edge specified in the rdesc. Return false if either the symbol or the
3538 reference could not be found, otherwise return true. */
3540 static bool
3541 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3543 struct ipa_cst_ref_desc *rdesc;
3544 if (jfunc->type == IPA_JF_CONST
3545 && (rdesc = jfunc_rdesc_usable (jfunc))
3546 && --rdesc->refcount == 0)
3548 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3549 if (!symbol)
3550 return false;
3552 return remove_described_reference (symbol, rdesc);
3554 return true;
3557 /* Try to find a destination for indirect edge IE that corresponds to a simple
3558 call or a call of a member function pointer and where the destination is a
3559 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3560 the type of the parameter to which the result of JFUNC is passed. If it can
3561 be determined, return the newly direct edge, otherwise return NULL.
3562 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3563 relative to. */
3565 static struct cgraph_edge *
3566 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3567 struct ipa_jump_func *jfunc, tree target_type,
3568 struct cgraph_node *new_root,
3569 class ipa_node_params *new_root_info)
3571 struct cgraph_edge *cs;
3572 tree target;
3573 bool agg_contents = ie->indirect_info->agg_contents;
3574 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3575 if (agg_contents)
3577 bool from_global_constant;
3578 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3579 new_root,
3580 &jfunc->agg);
3581 target = ipa_find_agg_cst_for_param (&agg, scalar,
3582 ie->indirect_info->offset,
3583 ie->indirect_info->by_ref,
3584 &from_global_constant);
3585 agg.release ();
3586 if (target
3587 && !from_global_constant
3588 && !ie->indirect_info->guaranteed_unmodified)
3589 return NULL;
3591 else
3592 target = scalar;
3593 if (!target)
3594 return NULL;
3595 cs = ipa_make_edge_direct_to_target (ie, target);
3597 if (cs && !agg_contents)
3599 bool ok;
3600 gcc_checking_assert (cs->callee
3601 && (cs != ie
3602 || jfunc->type != IPA_JF_CONST
3603 || !cgraph_node_for_jfunc (jfunc)
3604 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3605 ok = try_decrement_rdesc_refcount (jfunc);
3606 gcc_checking_assert (ok);
3609 return cs;
3612 /* Return the target to be used in cases of impossible devirtualization. IE
3613 and target (the latter can be NULL) are dumped when dumping is enabled. */
3615 tree
3616 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3618 if (dump_file)
3620 if (target)
3621 fprintf (dump_file,
3622 "Type inconsistent devirtualization: %s->%s\n",
3623 ie->caller->dump_name (),
3624 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3625 else
3626 fprintf (dump_file,
3627 "No devirtualization target in %s\n",
3628 ie->caller->dump_name ());
3630 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3631 cgraph_node::get_create (new_target);
3632 return new_target;
3635 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3636 call based on a formal parameter which is described by jump function JFUNC
3637 and if it can be determined, make it direct and return the direct edge.
3638 Otherwise, return NULL. CTX describes the polymorphic context that the
3639 parameter the call is based on brings along with it. NEW_ROOT and
3640 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3641 to. */
3643 static struct cgraph_edge *
3644 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3645 struct ipa_jump_func *jfunc,
3646 class ipa_polymorphic_call_context ctx,
3647 struct cgraph_node *new_root,
3648 class ipa_node_params *new_root_info)
3650 tree target = NULL;
3651 bool speculative = false;
3653 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3654 return NULL;
3656 gcc_assert (!ie->indirect_info->by_ref);
3658 /* Try to do lookup via known virtual table pointer value. */
3659 if (!ie->indirect_info->vptr_changed
3660 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3662 tree vtable;
3663 unsigned HOST_WIDE_INT offset;
3664 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3665 : NULL;
3666 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3667 new_root,
3668 &jfunc->agg);
3669 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3670 ie->indirect_info->offset,
3671 true);
3672 agg.release ();
3673 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3675 bool can_refer;
3676 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3677 vtable, offset, &can_refer);
3678 if (can_refer)
3680 if (!t
3681 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3682 || !possible_polymorphic_call_target_p
3683 (ie, cgraph_node::get (t)))
3685 /* Do not speculate builtin_unreachable, it is stupid! */
3686 if (!ie->indirect_info->vptr_changed)
3687 target = ipa_impossible_devirt_target (ie, target);
3688 else
3689 target = NULL;
3691 else
3693 target = t;
3694 speculative = ie->indirect_info->vptr_changed;
3700 ipa_polymorphic_call_context ie_context (ie);
3701 vec <cgraph_node *>targets;
3702 bool final;
3704 ctx.offset_by (ie->indirect_info->offset);
3705 if (ie->indirect_info->vptr_changed)
3706 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3707 ie->indirect_info->otr_type);
3708 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3709 targets = possible_polymorphic_call_targets
3710 (ie->indirect_info->otr_type,
3711 ie->indirect_info->otr_token,
3712 ctx, &final);
3713 if (final && targets.length () <= 1)
3715 speculative = false;
3716 if (targets.length () == 1)
3717 target = targets[0]->decl;
3718 else
3719 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3721 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3722 && !ie->speculative && ie->maybe_hot_p ())
3724 cgraph_node *n;
3725 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3726 ie->indirect_info->otr_token,
3727 ie->indirect_info->context);
3728 if (n)
3730 target = n->decl;
3731 speculative = true;
3735 if (target)
3737 if (!possible_polymorphic_call_target_p
3738 (ie, cgraph_node::get_create (target)))
3740 if (speculative)
3741 return NULL;
3742 target = ipa_impossible_devirt_target (ie, target);
3744 return ipa_make_edge_direct_to_target (ie, target, speculative);
3746 else
3747 return NULL;
3750 /* Update the param called notes associated with NODE when CS is being inlined,
3751 assuming NODE is (potentially indirectly) inlined into CS->callee.
3752 Moreover, if the callee is discovered to be constant, create a new cgraph
3753 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3754 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3756 static bool
3757 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3758 struct cgraph_node *node,
3759 vec<cgraph_edge *> *new_edges)
3761 class ipa_edge_args *top;
3762 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3763 struct cgraph_node *new_root;
3764 class ipa_node_params *new_root_info, *inlined_node_info;
3765 bool res = false;
3767 ipa_check_create_edge_args ();
3768 top = IPA_EDGE_REF (cs);
3769 new_root = cs->caller->inlined_to
3770 ? cs->caller->inlined_to : cs->caller;
3771 new_root_info = IPA_NODE_REF (new_root);
3772 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3774 for (ie = node->indirect_calls; ie; ie = next_ie)
3776 class cgraph_indirect_call_info *ici = ie->indirect_info;
3777 struct ipa_jump_func *jfunc;
3778 int param_index;
3779 cgraph_node *spec_target = NULL;
3781 next_ie = ie->next_callee;
3783 if (ici->param_index == -1)
3784 continue;
3786 /* We must check range due to calls with variable number of arguments: */
3787 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3789 ici->param_index = -1;
3790 continue;
3793 param_index = ici->param_index;
3794 jfunc = ipa_get_ith_jump_func (top, param_index);
3796 if (ie->speculative)
3798 struct cgraph_edge *de;
3799 struct ipa_ref *ref;
3800 ie->speculative_call_info (de, ie, ref);
3801 spec_target = de->callee;
3804 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3805 new_direct_edge = NULL;
3806 else if (ici->polymorphic)
3808 ipa_polymorphic_call_context ctx;
3809 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3810 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3811 new_root,
3812 new_root_info);
3814 else
3816 tree target_type = ipa_get_type (inlined_node_info, param_index);
3817 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3818 target_type,
3819 new_root,
3820 new_root_info);
3823 /* If speculation was removed, then we need to do nothing. */
3824 if (new_direct_edge && new_direct_edge != ie
3825 && new_direct_edge->callee == spec_target)
3827 new_direct_edge->indirect_inlining_edge = 1;
3828 top = IPA_EDGE_REF (cs);
3829 res = true;
3830 if (!new_direct_edge->speculative)
3831 continue;
3833 else if (new_direct_edge)
3835 new_direct_edge->indirect_inlining_edge = 1;
3836 if (new_edges)
3838 new_edges->safe_push (new_direct_edge);
3839 res = true;
3841 top = IPA_EDGE_REF (cs);
3842 /* If speculative edge was introduced we still need to update
3843 call info of the indirect edge. */
3844 if (!new_direct_edge->speculative)
3845 continue;
3847 if (jfunc->type == IPA_JF_PASS_THROUGH
3848 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3850 if (ici->agg_contents
3851 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3852 && !ici->polymorphic)
3853 ici->param_index = -1;
3854 else
3856 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3857 if (ici->polymorphic
3858 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3859 ici->vptr_changed = true;
3860 ipa_set_param_used_by_indirect_call (new_root_info,
3861 ici->param_index, true);
3862 if (ici->polymorphic)
3863 ipa_set_param_used_by_polymorphic_call (new_root_info,
3864 ici->param_index, true);
3867 else if (jfunc->type == IPA_JF_ANCESTOR)
3869 if (ici->agg_contents
3870 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3871 && !ici->polymorphic)
3872 ici->param_index = -1;
3873 else
3875 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3876 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3877 if (ici->polymorphic
3878 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3879 ici->vptr_changed = true;
3880 ipa_set_param_used_by_indirect_call (new_root_info,
3881 ici->param_index, true);
3882 if (ici->polymorphic)
3883 ipa_set_param_used_by_polymorphic_call (new_root_info,
3884 ici->param_index, true);
3887 else
3888 /* Either we can find a destination for this edge now or never. */
3889 ici->param_index = -1;
3892 return res;
3895 /* Recursively traverse subtree of NODE (including node) made of inlined
3896 cgraph_edges when CS has been inlined and invoke
3897 update_indirect_edges_after_inlining on all nodes and
3898 update_jump_functions_after_inlining on all non-inlined edges that lead out
3899 of this subtree. Newly discovered indirect edges will be added to
3900 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3901 created. */
3903 static bool
3904 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3905 struct cgraph_node *node,
3906 vec<cgraph_edge *> *new_edges)
3908 struct cgraph_edge *e;
3909 bool res;
3911 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3913 for (e = node->callees; e; e = e->next_callee)
3914 if (!e->inline_failed)
3915 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3916 else
3917 update_jump_functions_after_inlining (cs, e);
3918 for (e = node->indirect_calls; e; e = e->next_callee)
3919 update_jump_functions_after_inlining (cs, e);
3921 return res;
3924 /* Combine two controlled uses counts as done during inlining. */
3926 static int
3927 combine_controlled_uses_counters (int c, int d)
3929 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3930 return IPA_UNDESCRIBED_USE;
3931 else
3932 return c + d - 1;
3935 /* Propagate number of controlled users from CS->caleee to the new root of the
3936 tree of inlined nodes. */
3938 static void
3939 propagate_controlled_uses (struct cgraph_edge *cs)
3941 class ipa_edge_args *args = IPA_EDGE_REF (cs);
3942 if (!args)
3943 return;
3944 struct cgraph_node *new_root = cs->caller->inlined_to
3945 ? cs->caller->inlined_to : cs->caller;
3946 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3947 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3948 int count, i;
3950 if (!old_root_info)
3951 return;
3953 count = MIN (ipa_get_cs_argument_count (args),
3954 ipa_get_param_count (old_root_info));
3955 for (i = 0; i < count; i++)
3957 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3958 struct ipa_cst_ref_desc *rdesc;
3960 if (jf->type == IPA_JF_PASS_THROUGH)
3962 int src_idx, c, d;
3963 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3964 c = ipa_get_controlled_uses (new_root_info, src_idx);
3965 d = ipa_get_controlled_uses (old_root_info, i);
3967 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3968 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3969 c = combine_controlled_uses_counters (c, d);
3970 ipa_set_controlled_uses (new_root_info, src_idx, c);
3971 if (c == 0 && new_root_info->ipcp_orig_node)
3973 struct cgraph_node *n;
3974 struct ipa_ref *ref;
3975 tree t = new_root_info->known_csts[src_idx];
3977 if (t && TREE_CODE (t) == ADDR_EXPR
3978 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3979 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3980 && (ref = new_root->find_reference (n, NULL, 0)))
3982 if (dump_file)
3983 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3984 "reference from %s to %s.\n",
3985 new_root->dump_name (),
3986 n->dump_name ());
3987 ref->remove_reference ();
3991 else if (jf->type == IPA_JF_CONST
3992 && (rdesc = jfunc_rdesc_usable (jf)))
3994 int d = ipa_get_controlled_uses (old_root_info, i);
3995 int c = rdesc->refcount;
3996 rdesc->refcount = combine_controlled_uses_counters (c, d);
3997 if (rdesc->refcount == 0)
3999 tree cst = ipa_get_jf_constant (jf);
4000 struct cgraph_node *n;
4001 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4002 && TREE_CODE (TREE_OPERAND (cst, 0))
4003 == FUNCTION_DECL);
4004 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4005 if (n)
4007 struct cgraph_node *clone;
4008 bool ok;
4009 ok = remove_described_reference (n, rdesc);
4010 gcc_checking_assert (ok);
4012 clone = cs->caller;
4013 while (clone->inlined_to
4014 && clone->ipcp_clone
4015 && clone != rdesc->cs->caller)
4017 struct ipa_ref *ref;
4018 ref = clone->find_reference (n, NULL, 0);
4019 if (ref)
4021 if (dump_file)
4022 fprintf (dump_file, "ipa-prop: Removing "
4023 "cloning-created reference "
4024 "from %s to %s.\n",
4025 clone->dump_name (),
4026 n->dump_name ());
4027 ref->remove_reference ();
4029 clone = clone->callers->caller;
4036 for (i = ipa_get_param_count (old_root_info);
4037 i < ipa_get_cs_argument_count (args);
4038 i++)
4040 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4042 if (jf->type == IPA_JF_CONST)
4044 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4045 if (rdesc)
4046 rdesc->refcount = IPA_UNDESCRIBED_USE;
4048 else if (jf->type == IPA_JF_PASS_THROUGH)
4049 ipa_set_controlled_uses (new_root_info,
4050 jf->value.pass_through.formal_id,
4051 IPA_UNDESCRIBED_USE);
4055 /* Update jump functions and call note functions on inlining the call site CS.
4056 CS is expected to lead to a node already cloned by
4057 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4058 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4059 created. */
4061 bool
4062 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4063 vec<cgraph_edge *> *new_edges)
4065 bool changed;
4066 /* Do nothing if the preparation phase has not been carried out yet
4067 (i.e. during early inlining). */
4068 if (!ipa_node_params_sum)
4069 return false;
4070 gcc_assert (ipa_edge_args_sum);
4072 propagate_controlled_uses (cs);
4073 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4074 ipa_node_params_sum->remove (cs->callee);
4076 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4077 if (args)
4079 bool ok = true;
4080 if (args->jump_functions)
4082 struct ipa_jump_func *jf;
4083 int i;
4084 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4085 if (jf->type == IPA_JF_CONST
4086 && ipa_get_jf_constant_rdesc (jf))
4088 ok = false;
4089 break;
4092 if (ok)
4093 ipa_edge_args_sum->remove (cs);
4095 if (ipcp_transformation_sum)
4096 ipcp_transformation_sum->remove (cs->callee);
4098 return changed;
4101 /* Ensure that array of edge arguments infos is big enough to accommodate a
4102 structure for all edges and reallocates it if not. Also, allocate
4103 associated hash tables is they do not already exist. */
4105 void
4106 ipa_check_create_edge_args (void)
4108 if (!ipa_edge_args_sum)
4109 ipa_edge_args_sum
4110 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4111 ipa_edge_args_sum_t (symtab, true));
4112 if (!ipa_bits_hash_table)
4113 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4114 if (!ipa_vr_hash_table)
4115 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4118 /* Free all ipa_edge structures. */
4120 void
4121 ipa_free_all_edge_args (void)
4123 if (!ipa_edge_args_sum)
4124 return;
4126 ggc_delete (ipa_edge_args_sum);
4127 ipa_edge_args_sum = NULL;
4130 /* Free all ipa_node_params structures. */
4132 void
4133 ipa_free_all_node_params (void)
4135 ggc_delete (ipa_node_params_sum);
4136 ipa_node_params_sum = NULL;
4139 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4140 tables if they do not already exist. */
4142 void
4143 ipcp_transformation_initialize (void)
4145 if (!ipa_bits_hash_table)
4146 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4147 if (!ipa_vr_hash_table)
4148 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4149 if (ipcp_transformation_sum == NULL)
4150 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4153 /* Release the IPA CP transformation summary. */
4155 void
4156 ipcp_free_transformation_sum (void)
4158 if (!ipcp_transformation_sum)
4159 return;
4161 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4162 ggc_free (ipcp_transformation_sum);
4163 ipcp_transformation_sum = NULL;
4166 /* Set the aggregate replacements of NODE to be AGGVALS. */
4168 void
4169 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4170 struct ipa_agg_replacement_value *aggvals)
4172 ipcp_transformation_initialize ();
4173 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4174 s->agg_values = aggvals;
4177 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4178 count data structures accordingly. */
4180 void
4181 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4183 if (args->jump_functions)
4185 struct ipa_jump_func *jf;
4186 int i;
4187 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4189 struct ipa_cst_ref_desc *rdesc;
4190 try_decrement_rdesc_refcount (jf);
4191 if (jf->type == IPA_JF_CONST
4192 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4193 && rdesc->cs == cs)
4194 rdesc->cs = NULL;
4199 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4200 reference count data strucutres accordingly. */
4202 void
4203 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4204 ipa_edge_args *old_args, ipa_edge_args *new_args)
4206 unsigned int i;
4208 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4209 if (old_args->polymorphic_call_contexts)
4210 new_args->polymorphic_call_contexts
4211 = vec_safe_copy (old_args->polymorphic_call_contexts);
4213 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4215 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4216 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4218 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4220 if (src_jf->type == IPA_JF_CONST)
4222 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4224 if (!src_rdesc)
4225 dst_jf->value.constant.rdesc = NULL;
4226 else if (src->caller == dst->caller)
4228 struct ipa_ref *ref;
4229 symtab_node *n = cgraph_node_for_jfunc (src_jf);
4230 gcc_checking_assert (n);
4231 ref = src->caller->find_reference (n, src->call_stmt,
4232 src->lto_stmt_uid);
4233 gcc_checking_assert (ref);
4234 dst->caller->clone_reference (ref, ref->stmt);
4236 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4237 dst_rdesc->cs = dst;
4238 dst_rdesc->refcount = src_rdesc->refcount;
4239 dst_rdesc->next_duplicate = NULL;
4240 dst_jf->value.constant.rdesc = dst_rdesc;
4242 else if (src_rdesc->cs == src)
4244 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4245 dst_rdesc->cs = dst;
4246 dst_rdesc->refcount = src_rdesc->refcount;
4247 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4248 src_rdesc->next_duplicate = dst_rdesc;
4249 dst_jf->value.constant.rdesc = dst_rdesc;
4251 else
4253 struct ipa_cst_ref_desc *dst_rdesc;
4254 /* This can happen during inlining, when a JFUNC can refer to a
4255 reference taken in a function up in the tree of inline clones.
4256 We need to find the duplicate that refers to our tree of
4257 inline clones. */
4259 gcc_assert (dst->caller->inlined_to);
4260 for (dst_rdesc = src_rdesc->next_duplicate;
4261 dst_rdesc;
4262 dst_rdesc = dst_rdesc->next_duplicate)
4264 struct cgraph_node *top;
4265 top = dst_rdesc->cs->caller->inlined_to
4266 ? dst_rdesc->cs->caller->inlined_to
4267 : dst_rdesc->cs->caller;
4268 if (dst->caller->inlined_to == top)
4269 break;
4271 gcc_assert (dst_rdesc);
4272 dst_jf->value.constant.rdesc = dst_rdesc;
4275 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4276 && src->caller == dst->caller)
4278 struct cgraph_node *inline_root = dst->caller->inlined_to
4279 ? dst->caller->inlined_to : dst->caller;
4280 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
4281 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4283 int c = ipa_get_controlled_uses (root_info, idx);
4284 if (c != IPA_UNDESCRIBED_USE)
4286 c++;
4287 ipa_set_controlled_uses (root_info, idx, c);
4293 /* Analyze newly added function into callgraph. */
4295 static void
4296 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4298 if (node->has_gimple_body_p ())
4299 ipa_analyze_node (node);
4302 /* Hook that is called by summary when a node is duplicated. */
4304 void
4305 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4306 ipa_node_params *old_info,
4307 ipa_node_params *new_info)
4309 ipa_agg_replacement_value *old_av, *new_av;
4311 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4312 new_info->lattices = NULL;
4313 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4314 new_info->known_csts = old_info->known_csts.copy ();
4315 new_info->known_contexts = old_info->known_contexts.copy ();
4317 new_info->analysis_done = old_info->analysis_done;
4318 new_info->node_enqueued = old_info->node_enqueued;
4319 new_info->versionable = old_info->versionable;
4321 old_av = ipa_get_agg_replacements_for_node (src);
4322 if (old_av)
4324 new_av = NULL;
4325 while (old_av)
4327 struct ipa_agg_replacement_value *v;
4329 v = ggc_alloc<ipa_agg_replacement_value> ();
4330 memcpy (v, old_av, sizeof (*v));
4331 v->next = new_av;
4332 new_av = v;
4333 old_av = old_av->next;
4335 ipa_set_node_agg_value_chain (dst, new_av);
4339 /* Duplication of ipcp transformation summaries. */
4341 void
4342 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4343 ipcp_transformation *src_trans,
4344 ipcp_transformation *dst_trans)
4346 /* Avoid redundant work of duplicating vectors we will never use. */
4347 if (dst->inlined_to)
4348 return;
4349 dst_trans->bits = vec_safe_copy (src_trans->bits);
4350 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4351 ipa_agg_replacement_value *agg = src_trans->agg_values,
4352 **aggptr = &dst_trans->agg_values;
4353 while (agg)
4355 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4356 **aggptr = *agg;
4357 agg = agg->next;
4358 aggptr = &(*aggptr)->next;
4362 /* Register our cgraph hooks if they are not already there. */
4364 void
4365 ipa_register_cgraph_hooks (void)
4367 ipa_check_create_node_params ();
4368 ipa_check_create_edge_args ();
4370 function_insertion_hook_holder =
4371 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4374 /* Unregister our cgraph hooks if they are not already there. */
4376 static void
4377 ipa_unregister_cgraph_hooks (void)
4379 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4380 function_insertion_hook_holder = NULL;
4383 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4384 longer needed after ipa-cp. */
4386 void
4387 ipa_free_all_structures_after_ipa_cp (void)
4389 if (!optimize && !in_lto_p)
4391 ipa_free_all_edge_args ();
4392 ipa_free_all_node_params ();
4393 ipcp_sources_pool.release ();
4394 ipcp_cst_values_pool.release ();
4395 ipcp_poly_ctx_values_pool.release ();
4396 ipcp_agg_lattice_pool.release ();
4397 ipa_unregister_cgraph_hooks ();
4398 ipa_refdesc_pool.release ();
4402 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4403 longer needed after indirect inlining. */
4405 void
4406 ipa_free_all_structures_after_iinln (void)
4408 ipa_free_all_edge_args ();
4409 ipa_free_all_node_params ();
4410 ipa_unregister_cgraph_hooks ();
4411 ipcp_sources_pool.release ();
4412 ipcp_cst_values_pool.release ();
4413 ipcp_poly_ctx_values_pool.release ();
4414 ipcp_agg_lattice_pool.release ();
4415 ipa_refdesc_pool.release ();
4418 /* Print ipa_tree_map data structures of all functions in the
4419 callgraph to F. */
4421 void
4422 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4424 int i, count;
4425 class ipa_node_params *info;
4427 if (!node->definition)
4428 return;
4429 info = IPA_NODE_REF (node);
4430 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4431 if (!info)
4433 fprintf (f, " no params return\n");
4434 return;
4436 count = ipa_get_param_count (info);
4437 for (i = 0; i < count; i++)
4439 int c;
4441 fprintf (f, " ");
4442 ipa_dump_param (f, info, i);
4443 if (ipa_is_param_used (info, i))
4444 fprintf (f, " used");
4445 if (ipa_is_param_used_by_ipa_predicates (info, i))
4446 fprintf (f, " used_by_ipa_predicates");
4447 if (ipa_is_param_used_by_indirect_call (info, i))
4448 fprintf (f, " used_by_indirect_call");
4449 if (ipa_is_param_used_by_polymorphic_call (info, i))
4450 fprintf (f, " used_by_polymorphic_call");
4451 c = ipa_get_controlled_uses (info, i);
4452 if (c == IPA_UNDESCRIBED_USE)
4453 fprintf (f, " undescribed_use");
4454 else
4455 fprintf (f, " controlled_uses=%i", c);
4456 fprintf (f, "\n");
4460 /* Print ipa_tree_map data structures of all functions in the
4461 callgraph to F. */
4463 void
4464 ipa_print_all_params (FILE * f)
4466 struct cgraph_node *node;
4468 fprintf (f, "\nFunction parameters:\n");
4469 FOR_EACH_FUNCTION (node)
4470 ipa_print_node_params (f, node);
4473 /* Dump the AV linked list. */
4475 void
4476 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4478 bool comma = false;
4479 fprintf (f, " Aggregate replacements:");
4480 for (; av; av = av->next)
4482 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4483 av->index, av->offset);
4484 print_generic_expr (f, av->value);
4485 comma = true;
4487 fprintf (f, "\n");
4490 /* Stream out jump function JUMP_FUNC to OB. */
4492 static void
4493 ipa_write_jump_function (struct output_block *ob,
4494 struct ipa_jump_func *jump_func)
4496 struct ipa_agg_jf_item *item;
4497 struct bitpack_d bp;
4498 int i, count;
4499 int flag = 0;
4501 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4502 as well as WPA memory by handling them specially. */
4503 if (jump_func->type == IPA_JF_CONST
4504 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4505 flag = 1;
4507 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4508 switch (jump_func->type)
4510 case IPA_JF_UNKNOWN:
4511 break;
4512 case IPA_JF_CONST:
4513 gcc_assert (
4514 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4515 stream_write_tree (ob,
4516 flag
4517 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4518 : jump_func->value.constant.value, true);
4519 break;
4520 case IPA_JF_PASS_THROUGH:
4521 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4522 if (jump_func->value.pass_through.operation == NOP_EXPR)
4524 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4525 bp = bitpack_create (ob->main_stream);
4526 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4527 streamer_write_bitpack (&bp);
4529 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4530 == tcc_unary)
4531 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4532 else
4534 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4535 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4537 break;
4538 case IPA_JF_ANCESTOR:
4539 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4540 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4541 bp = bitpack_create (ob->main_stream);
4542 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4543 streamer_write_bitpack (&bp);
4544 break;
4545 default:
4546 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4549 count = vec_safe_length (jump_func->agg.items);
4550 streamer_write_uhwi (ob, count);
4551 if (count)
4553 bp = bitpack_create (ob->main_stream);
4554 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4555 streamer_write_bitpack (&bp);
4558 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4560 stream_write_tree (ob, item->type, true);
4561 streamer_write_uhwi (ob, item->offset);
4562 streamer_write_uhwi (ob, item->jftype);
4563 switch (item->jftype)
4565 case IPA_JF_UNKNOWN:
4566 break;
4567 case IPA_JF_CONST:
4568 stream_write_tree (ob, item->value.constant, true);
4569 break;
4570 case IPA_JF_PASS_THROUGH:
4571 case IPA_JF_LOAD_AGG:
4572 streamer_write_uhwi (ob, item->value.pass_through.operation);
4573 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4574 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4575 != tcc_unary)
4576 stream_write_tree (ob, item->value.pass_through.operand, true);
4577 if (item->jftype == IPA_JF_LOAD_AGG)
4579 stream_write_tree (ob, item->value.load_agg.type, true);
4580 streamer_write_uhwi (ob, item->value.load_agg.offset);
4581 bp = bitpack_create (ob->main_stream);
4582 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4583 streamer_write_bitpack (&bp);
4585 break;
4586 default:
4587 fatal_error (UNKNOWN_LOCATION,
4588 "invalid jump function in LTO stream");
4592 bp = bitpack_create (ob->main_stream);
4593 bp_pack_value (&bp, !!jump_func->bits, 1);
4594 streamer_write_bitpack (&bp);
4595 if (jump_func->bits)
4597 streamer_write_widest_int (ob, jump_func->bits->value);
4598 streamer_write_widest_int (ob, jump_func->bits->mask);
4600 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4601 streamer_write_bitpack (&bp);
4602 if (jump_func->m_vr)
4604 streamer_write_enum (ob->main_stream, value_rang_type,
4605 VR_LAST, jump_func->m_vr->kind ());
4606 stream_write_tree (ob, jump_func->m_vr->min (), true);
4607 stream_write_tree (ob, jump_func->m_vr->max (), true);
4611 /* Read in jump function JUMP_FUNC from IB. */
4613 static void
4614 ipa_read_jump_function (class lto_input_block *ib,
4615 struct ipa_jump_func *jump_func,
4616 struct cgraph_edge *cs,
4617 class data_in *data_in,
4618 bool prevails)
4620 enum jump_func_type jftype;
4621 enum tree_code operation;
4622 int i, count;
4623 int val = streamer_read_uhwi (ib);
4624 bool flag = val & 1;
4626 jftype = (enum jump_func_type) (val / 2);
4627 switch (jftype)
4629 case IPA_JF_UNKNOWN:
4630 ipa_set_jf_unknown (jump_func);
4631 break;
4632 case IPA_JF_CONST:
4634 tree t = stream_read_tree (ib, data_in);
4635 if (flag && prevails)
4636 t = build_fold_addr_expr (t);
4637 ipa_set_jf_constant (jump_func, t, cs);
4639 break;
4640 case IPA_JF_PASS_THROUGH:
4641 operation = (enum tree_code) streamer_read_uhwi (ib);
4642 if (operation == NOP_EXPR)
4644 int formal_id = streamer_read_uhwi (ib);
4645 struct bitpack_d bp = streamer_read_bitpack (ib);
4646 bool agg_preserved = bp_unpack_value (&bp, 1);
4647 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4649 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4651 int formal_id = streamer_read_uhwi (ib);
4652 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4654 else
4656 tree operand = stream_read_tree (ib, data_in);
4657 int formal_id = streamer_read_uhwi (ib);
4658 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4659 operation);
4661 break;
4662 case IPA_JF_ANCESTOR:
4664 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4665 int formal_id = streamer_read_uhwi (ib);
4666 struct bitpack_d bp = streamer_read_bitpack (ib);
4667 bool agg_preserved = bp_unpack_value (&bp, 1);
4668 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4669 break;
4671 default:
4672 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4675 count = streamer_read_uhwi (ib);
4676 if (prevails)
4677 vec_alloc (jump_func->agg.items, count);
4678 if (count)
4680 struct bitpack_d bp = streamer_read_bitpack (ib);
4681 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4683 for (i = 0; i < count; i++)
4685 struct ipa_agg_jf_item item;
4686 item.type = stream_read_tree (ib, data_in);
4687 item.offset = streamer_read_uhwi (ib);
4688 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4690 switch (item.jftype)
4692 case IPA_JF_UNKNOWN:
4693 break;
4694 case IPA_JF_CONST:
4695 item.value.constant = stream_read_tree (ib, data_in);
4696 break;
4697 case IPA_JF_PASS_THROUGH:
4698 case IPA_JF_LOAD_AGG:
4699 operation = (enum tree_code) streamer_read_uhwi (ib);
4700 item.value.pass_through.operation = operation;
4701 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4702 if (TREE_CODE_CLASS (operation) == tcc_unary)
4703 item.value.pass_through.operand = NULL_TREE;
4704 else
4705 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4706 if (item.jftype == IPA_JF_LOAD_AGG)
4708 struct bitpack_d bp;
4709 item.value.load_agg.type = stream_read_tree (ib, data_in);
4710 item.value.load_agg.offset = streamer_read_uhwi (ib);
4711 bp = streamer_read_bitpack (ib);
4712 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4714 break;
4715 default:
4716 fatal_error (UNKNOWN_LOCATION,
4717 "invalid jump function in LTO stream");
4719 if (prevails)
4720 jump_func->agg.items->quick_push (item);
4723 struct bitpack_d bp = streamer_read_bitpack (ib);
4724 bool bits_known = bp_unpack_value (&bp, 1);
4725 if (bits_known)
4727 widest_int value = streamer_read_widest_int (ib);
4728 widest_int mask = streamer_read_widest_int (ib);
4729 if (prevails)
4730 ipa_set_jfunc_bits (jump_func, value, mask);
4732 else
4733 jump_func->bits = NULL;
4735 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4736 bool vr_known = bp_unpack_value (&vr_bp, 1);
4737 if (vr_known)
4739 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4740 VR_LAST);
4741 tree min = stream_read_tree (ib, data_in);
4742 tree max = stream_read_tree (ib, data_in);
4743 if (prevails)
4744 ipa_set_jfunc_vr (jump_func, type, min, max);
4746 else
4747 jump_func->m_vr = NULL;
4750 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4751 relevant to indirect inlining to OB. */
4753 static void
4754 ipa_write_indirect_edge_info (struct output_block *ob,
4755 struct cgraph_edge *cs)
4757 class cgraph_indirect_call_info *ii = cs->indirect_info;
4758 struct bitpack_d bp;
4760 streamer_write_hwi (ob, ii->param_index);
4761 bp = bitpack_create (ob->main_stream);
4762 bp_pack_value (&bp, ii->polymorphic, 1);
4763 bp_pack_value (&bp, ii->agg_contents, 1);
4764 bp_pack_value (&bp, ii->member_ptr, 1);
4765 bp_pack_value (&bp, ii->by_ref, 1);
4766 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4767 bp_pack_value (&bp, ii->vptr_changed, 1);
4768 streamer_write_bitpack (&bp);
4769 if (ii->agg_contents || ii->polymorphic)
4770 streamer_write_hwi (ob, ii->offset);
4771 else
4772 gcc_assert (ii->offset == 0);
4774 if (ii->polymorphic)
4776 streamer_write_hwi (ob, ii->otr_token);
4777 stream_write_tree (ob, ii->otr_type, true);
4778 ii->context.stream_out (ob);
4782 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4783 relevant to indirect inlining from IB. */
4785 static void
4786 ipa_read_indirect_edge_info (class lto_input_block *ib,
4787 class data_in *data_in,
4788 struct cgraph_edge *cs,
4789 class ipa_node_params *info)
4791 class cgraph_indirect_call_info *ii = cs->indirect_info;
4792 struct bitpack_d bp;
4794 ii->param_index = (int) streamer_read_hwi (ib);
4795 bp = streamer_read_bitpack (ib);
4796 ii->polymorphic = bp_unpack_value (&bp, 1);
4797 ii->agg_contents = bp_unpack_value (&bp, 1);
4798 ii->member_ptr = bp_unpack_value (&bp, 1);
4799 ii->by_ref = bp_unpack_value (&bp, 1);
4800 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4801 ii->vptr_changed = bp_unpack_value (&bp, 1);
4802 if (ii->agg_contents || ii->polymorphic)
4803 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4804 else
4805 ii->offset = 0;
4806 if (ii->polymorphic)
4808 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4809 ii->otr_type = stream_read_tree (ib, data_in);
4810 ii->context.stream_in (ib, data_in);
4812 if (info && ii->param_index >= 0)
4814 if (ii->polymorphic)
4815 ipa_set_param_used_by_polymorphic_call (info,
4816 ii->param_index , true);
4817 ipa_set_param_used_by_indirect_call (info,
4818 ii->param_index, true);
4822 /* Stream out NODE info to OB. */
4824 static void
4825 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4827 int node_ref;
4828 lto_symtab_encoder_t encoder;
4829 class ipa_node_params *info = IPA_NODE_REF (node);
4830 int j;
4831 struct cgraph_edge *e;
4832 struct bitpack_d bp;
4834 encoder = ob->decl_state->symtab_node_encoder;
4835 node_ref = lto_symtab_encoder_encode (encoder, node);
4836 streamer_write_uhwi (ob, node_ref);
4838 streamer_write_uhwi (ob, ipa_get_param_count (info));
4839 for (j = 0; j < ipa_get_param_count (info); j++)
4840 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4841 bp = bitpack_create (ob->main_stream);
4842 gcc_assert (info->analysis_done
4843 || ipa_get_param_count (info) == 0);
4844 gcc_assert (!info->node_enqueued);
4845 gcc_assert (!info->ipcp_orig_node);
4846 for (j = 0; j < ipa_get_param_count (info); j++)
4847 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4848 streamer_write_bitpack (&bp);
4849 for (j = 0; j < ipa_get_param_count (info); j++)
4851 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4852 stream_write_tree (ob, ipa_get_type (info, j), true);
4854 for (e = node->callees; e; e = e->next_callee)
4856 class ipa_edge_args *args = IPA_EDGE_REF (e);
4858 if (!args)
4860 streamer_write_uhwi (ob, 0);
4861 continue;
4864 streamer_write_uhwi (ob,
4865 ipa_get_cs_argument_count (args) * 2
4866 + (args->polymorphic_call_contexts != NULL));
4867 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4869 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4870 if (args->polymorphic_call_contexts != NULL)
4871 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4874 for (e = node->indirect_calls; e; e = e->next_callee)
4876 class ipa_edge_args *args = IPA_EDGE_REF (e);
4877 if (!args)
4878 streamer_write_uhwi (ob, 0);
4879 else
4881 streamer_write_uhwi (ob,
4882 ipa_get_cs_argument_count (args) * 2
4883 + (args->polymorphic_call_contexts != NULL));
4884 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4886 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4887 if (args->polymorphic_call_contexts != NULL)
4888 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4891 ipa_write_indirect_edge_info (ob, e);
4895 /* Stream in edge E from IB. */
4897 static void
4898 ipa_read_edge_info (class lto_input_block *ib,
4899 class data_in *data_in,
4900 struct cgraph_edge *e, bool prevails)
4902 int count = streamer_read_uhwi (ib);
4903 bool contexts_computed = count & 1;
4905 count /= 2;
4906 if (!count)
4907 return;
4908 if (prevails && e->possibly_call_in_translation_unit_p ())
4910 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
4911 vec_safe_grow_cleared (args->jump_functions, count);
4912 if (contexts_computed)
4913 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4914 for (int k = 0; k < count; k++)
4916 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4917 data_in, prevails);
4918 if (contexts_computed)
4919 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4920 (ib, data_in);
4923 else
4925 for (int k = 0; k < count; k++)
4927 struct ipa_jump_func dummy;
4928 ipa_read_jump_function (ib, &dummy, e,
4929 data_in, prevails);
4930 if (contexts_computed)
4932 class ipa_polymorphic_call_context ctx;
4933 ctx.stream_in (ib, data_in);
4939 /* Stream in NODE info from IB. */
4941 static void
4942 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4943 class data_in *data_in)
4945 int k;
4946 struct cgraph_edge *e;
4947 struct bitpack_d bp;
4948 bool prevails = node->prevailing_p ();
4949 class ipa_node_params *info = prevails
4950 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
4952 int param_count = streamer_read_uhwi (ib);
4953 if (prevails)
4955 ipa_alloc_node_params (node, param_count);
4956 for (k = 0; k < param_count; k++)
4957 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4958 if (ipa_get_param_count (info) != 0)
4959 info->analysis_done = true;
4960 info->node_enqueued = false;
4962 else
4963 for (k = 0; k < param_count; k++)
4964 streamer_read_uhwi (ib);
4966 bp = streamer_read_bitpack (ib);
4967 for (k = 0; k < param_count; k++)
4969 bool used = bp_unpack_value (&bp, 1);
4971 if (prevails)
4972 ipa_set_param_used (info, k, used);
4974 for (k = 0; k < param_count; k++)
4976 int nuses = streamer_read_hwi (ib);
4977 tree type = stream_read_tree (ib, data_in);
4979 if (prevails)
4981 ipa_set_controlled_uses (info, k, nuses);
4982 (*info->descriptors)[k].decl_or_type = type;
4985 for (e = node->callees; e; e = e->next_callee)
4986 ipa_read_edge_info (ib, data_in, e, prevails);
4987 for (e = node->indirect_calls; e; e = e->next_callee)
4989 ipa_read_edge_info (ib, data_in, e, prevails);
4990 ipa_read_indirect_edge_info (ib, data_in, e, info);
4994 /* Write jump functions for nodes in SET. */
4996 void
4997 ipa_prop_write_jump_functions (void)
4999 struct cgraph_node *node;
5000 struct output_block *ob;
5001 unsigned int count = 0;
5002 lto_symtab_encoder_iterator lsei;
5003 lto_symtab_encoder_t encoder;
5005 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5006 return;
5008 ob = create_output_block (LTO_section_jump_functions);
5009 encoder = ob->decl_state->symtab_node_encoder;
5010 ob->symbol = NULL;
5011 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5012 lsei_next_function_in_partition (&lsei))
5014 node = lsei_cgraph_node (lsei);
5015 if (node->has_gimple_body_p ()
5016 && IPA_NODE_REF (node) != NULL)
5017 count++;
5020 streamer_write_uhwi (ob, count);
5022 /* Process all of the functions. */
5023 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5024 lsei_next_function_in_partition (&lsei))
5026 node = lsei_cgraph_node (lsei);
5027 if (node->has_gimple_body_p ()
5028 && IPA_NODE_REF (node) != NULL)
5029 ipa_write_node_info (ob, node);
5031 streamer_write_char_stream (ob->main_stream, 0);
5032 produce_asm (ob, NULL);
5033 destroy_output_block (ob);
5036 /* Read section in file FILE_DATA of length LEN with data DATA. */
5038 static void
5039 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5040 size_t len)
5042 const struct lto_function_header *header =
5043 (const struct lto_function_header *) data;
5044 const int cfg_offset = sizeof (struct lto_function_header);
5045 const int main_offset = cfg_offset + header->cfg_size;
5046 const int string_offset = main_offset + header->main_size;
5047 class data_in *data_in;
5048 unsigned int i;
5049 unsigned int count;
5051 lto_input_block ib_main ((const char *) data + main_offset,
5052 header->main_size, file_data->mode_table);
5054 data_in =
5055 lto_data_in_create (file_data, (const char *) data + string_offset,
5056 header->string_size, vNULL);
5057 count = streamer_read_uhwi (&ib_main);
5059 for (i = 0; i < count; i++)
5061 unsigned int index;
5062 struct cgraph_node *node;
5063 lto_symtab_encoder_t encoder;
5065 index = streamer_read_uhwi (&ib_main);
5066 encoder = file_data->symtab_node_encoder;
5067 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5068 index));
5069 gcc_assert (node->definition);
5070 ipa_read_node_info (&ib_main, node, data_in);
5072 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5073 len);
5074 lto_data_in_delete (data_in);
5077 /* Read ipcp jump functions. */
5079 void
5080 ipa_prop_read_jump_functions (void)
5082 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5083 struct lto_file_decl_data *file_data;
5084 unsigned int j = 0;
5086 ipa_check_create_node_params ();
5087 ipa_check_create_edge_args ();
5088 ipa_register_cgraph_hooks ();
5090 while ((file_data = file_data_vec[j++]))
5092 size_t len;
5093 const char *data
5094 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5095 &len);
5096 if (data)
5097 ipa_prop_read_section (file_data, data, len);
5101 void
5102 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5104 int node_ref;
5105 unsigned int count = 0;
5106 lto_symtab_encoder_t encoder;
5107 struct ipa_agg_replacement_value *aggvals, *av;
5109 aggvals = ipa_get_agg_replacements_for_node (node);
5110 encoder = ob->decl_state->symtab_node_encoder;
5111 node_ref = lto_symtab_encoder_encode (encoder, node);
5112 streamer_write_uhwi (ob, node_ref);
5114 for (av = aggvals; av; av = av->next)
5115 count++;
5116 streamer_write_uhwi (ob, count);
5118 for (av = aggvals; av; av = av->next)
5120 struct bitpack_d bp;
5122 streamer_write_uhwi (ob, av->offset);
5123 streamer_write_uhwi (ob, av->index);
5124 stream_write_tree (ob, av->value, true);
5126 bp = bitpack_create (ob->main_stream);
5127 bp_pack_value (&bp, av->by_ref, 1);
5128 streamer_write_bitpack (&bp);
5131 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5132 if (ts && vec_safe_length (ts->m_vr) > 0)
5134 count = ts->m_vr->length ();
5135 streamer_write_uhwi (ob, count);
5136 for (unsigned i = 0; i < count; ++i)
5138 struct bitpack_d bp;
5139 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5140 bp = bitpack_create (ob->main_stream);
5141 bp_pack_value (&bp, parm_vr->known, 1);
5142 streamer_write_bitpack (&bp);
5143 if (parm_vr->known)
5145 streamer_write_enum (ob->main_stream, value_rang_type,
5146 VR_LAST, parm_vr->type);
5147 streamer_write_wide_int (ob, parm_vr->min);
5148 streamer_write_wide_int (ob, parm_vr->max);
5152 else
5153 streamer_write_uhwi (ob, 0);
5155 if (ts && vec_safe_length (ts->bits) > 0)
5157 count = ts->bits->length ();
5158 streamer_write_uhwi (ob, count);
5160 for (unsigned i = 0; i < count; ++i)
5162 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5163 struct bitpack_d bp = bitpack_create (ob->main_stream);
5164 bp_pack_value (&bp, !!bits_jfunc, 1);
5165 streamer_write_bitpack (&bp);
5166 if (bits_jfunc)
5168 streamer_write_widest_int (ob, bits_jfunc->value);
5169 streamer_write_widest_int (ob, bits_jfunc->mask);
5173 else
5174 streamer_write_uhwi (ob, 0);
5177 /* Stream in the aggregate value replacement chain for NODE from IB. */
5179 static void
5180 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5181 data_in *data_in)
5183 struct ipa_agg_replacement_value *aggvals = NULL;
5184 unsigned int count, i;
5186 count = streamer_read_uhwi (ib);
5187 for (i = 0; i <count; i++)
5189 struct ipa_agg_replacement_value *av;
5190 struct bitpack_d bp;
5192 av = ggc_alloc<ipa_agg_replacement_value> ();
5193 av->offset = streamer_read_uhwi (ib);
5194 av->index = streamer_read_uhwi (ib);
5195 av->value = stream_read_tree (ib, data_in);
5196 bp = streamer_read_bitpack (ib);
5197 av->by_ref = bp_unpack_value (&bp, 1);
5198 av->next = aggvals;
5199 aggvals = av;
5201 ipa_set_node_agg_value_chain (node, aggvals);
5203 count = streamer_read_uhwi (ib);
5204 if (count > 0)
5206 ipcp_transformation_initialize ();
5207 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5208 vec_safe_grow_cleared (ts->m_vr, count);
5209 for (i = 0; i < count; i++)
5211 ipa_vr *parm_vr;
5212 parm_vr = &(*ts->m_vr)[i];
5213 struct bitpack_d bp;
5214 bp = streamer_read_bitpack (ib);
5215 parm_vr->known = bp_unpack_value (&bp, 1);
5216 if (parm_vr->known)
5218 parm_vr->type = streamer_read_enum (ib, value_range_kind,
5219 VR_LAST);
5220 parm_vr->min = streamer_read_wide_int (ib);
5221 parm_vr->max = streamer_read_wide_int (ib);
5225 count = streamer_read_uhwi (ib);
5226 if (count > 0)
5228 ipcp_transformation_initialize ();
5229 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5230 vec_safe_grow_cleared (ts->bits, count);
5232 for (i = 0; i < count; i++)
5234 struct bitpack_d bp = streamer_read_bitpack (ib);
5235 bool known = bp_unpack_value (&bp, 1);
5236 if (known)
5238 ipa_bits *bits
5239 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
5240 streamer_read_widest_int (ib));
5241 (*ts->bits)[i] = bits;
5247 /* Write all aggregate replacement for nodes in set. */
5249 void
5250 ipcp_write_transformation_summaries (void)
5252 struct cgraph_node *node;
5253 struct output_block *ob;
5254 unsigned int count = 0;
5255 lto_symtab_encoder_iterator lsei;
5256 lto_symtab_encoder_t encoder;
5258 ob = create_output_block (LTO_section_ipcp_transform);
5259 encoder = ob->decl_state->symtab_node_encoder;
5260 ob->symbol = NULL;
5261 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5262 lsei_next_function_in_partition (&lsei))
5264 node = lsei_cgraph_node (lsei);
5265 if (node->has_gimple_body_p ())
5266 count++;
5269 streamer_write_uhwi (ob, count);
5271 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5272 lsei_next_function_in_partition (&lsei))
5274 node = lsei_cgraph_node (lsei);
5275 if (node->has_gimple_body_p ())
5276 write_ipcp_transformation_info (ob, node);
5278 streamer_write_char_stream (ob->main_stream, 0);
5279 produce_asm (ob, NULL);
5280 destroy_output_block (ob);
5283 /* Read replacements section in file FILE_DATA of length LEN with data
5284 DATA. */
5286 static void
5287 read_replacements_section (struct lto_file_decl_data *file_data,
5288 const char *data,
5289 size_t len)
5291 const struct lto_function_header *header =
5292 (const struct lto_function_header *) data;
5293 const int cfg_offset = sizeof (struct lto_function_header);
5294 const int main_offset = cfg_offset + header->cfg_size;
5295 const int string_offset = main_offset + header->main_size;
5296 class data_in *data_in;
5297 unsigned int i;
5298 unsigned int count;
5300 lto_input_block ib_main ((const char *) data + main_offset,
5301 header->main_size, file_data->mode_table);
5303 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5304 header->string_size, vNULL);
5305 count = streamer_read_uhwi (&ib_main);
5307 for (i = 0; i < count; i++)
5309 unsigned int index;
5310 struct cgraph_node *node;
5311 lto_symtab_encoder_t encoder;
5313 index = streamer_read_uhwi (&ib_main);
5314 encoder = file_data->symtab_node_encoder;
5315 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5316 index));
5317 gcc_assert (node->definition);
5318 read_ipcp_transformation_info (&ib_main, node, data_in);
5320 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5321 len);
5322 lto_data_in_delete (data_in);
5325 /* Read IPA-CP aggregate replacements. */
5327 void
5328 ipcp_read_transformation_summaries (void)
5330 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5331 struct lto_file_decl_data *file_data;
5332 unsigned int j = 0;
5334 while ((file_data = file_data_vec[j++]))
5336 size_t len;
5337 const char *data
5338 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5339 &len);
5340 if (data)
5341 read_replacements_section (file_data, data, len);
5345 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5346 NODE. */
5348 static void
5349 adjust_agg_replacement_values (struct cgraph_node *node,
5350 struct ipa_agg_replacement_value *aggval)
5352 struct ipa_agg_replacement_value *v;
5354 if (!node->clone.param_adjustments)
5355 return;
5357 auto_vec<int, 16> new_indices;
5358 node->clone.param_adjustments->get_updated_indices (&new_indices);
5359 for (v = aggval; v; v = v->next)
5361 gcc_checking_assert (v->index >= 0);
5363 if ((unsigned) v->index < new_indices.length ())
5364 v->index = new_indices[v->index];
5365 else
5366 /* This can happen if we know about a constant passed by reference by
5367 an argument which is never actually used for anything, let alone
5368 loading that constant. */
5369 v->index = -1;
5373 /* Dominator walker driving the ipcp modification phase. */
5375 class ipcp_modif_dom_walker : public dom_walker
5377 public:
5378 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5379 vec<ipa_param_descriptor, va_gc> *descs,
5380 struct ipa_agg_replacement_value *av,
5381 bool *sc, bool *cc)
5382 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5383 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5385 virtual edge before_dom_children (basic_block);
5387 private:
5388 struct ipa_func_body_info *m_fbi;
5389 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5390 struct ipa_agg_replacement_value *m_aggval;
5391 bool *m_something_changed, *m_cfg_changed;
5394 edge
5395 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5397 gimple_stmt_iterator gsi;
5398 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5400 struct ipa_agg_replacement_value *v;
5401 gimple *stmt = gsi_stmt (gsi);
5402 tree rhs, val, t;
5403 HOST_WIDE_INT offset;
5404 poly_int64 size;
5405 int index;
5406 bool by_ref, vce;
5408 if (!gimple_assign_load_p (stmt))
5409 continue;
5410 rhs = gimple_assign_rhs1 (stmt);
5411 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5412 continue;
5414 vce = false;
5415 t = rhs;
5416 while (handled_component_p (t))
5418 /* V_C_E can do things like convert an array of integers to one
5419 bigger integer and similar things we do not handle below. */
5420 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5422 vce = true;
5423 break;
5425 t = TREE_OPERAND (t, 0);
5427 if (vce)
5428 continue;
5430 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5431 &offset, &size, &by_ref))
5432 continue;
5433 for (v = m_aggval; v; v = v->next)
5434 if (v->index == index
5435 && v->offset == offset)
5436 break;
5437 if (!v
5438 || v->by_ref != by_ref
5439 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5440 size))
5441 continue;
5443 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5444 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5446 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5447 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5448 else if (TYPE_SIZE (TREE_TYPE (rhs))
5449 == TYPE_SIZE (TREE_TYPE (v->value)))
5450 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5451 else
5453 if (dump_file)
5455 fprintf (dump_file, " const ");
5456 print_generic_expr (dump_file, v->value);
5457 fprintf (dump_file, " can't be converted to type of ");
5458 print_generic_expr (dump_file, rhs);
5459 fprintf (dump_file, "\n");
5461 continue;
5464 else
5465 val = v->value;
5467 if (dump_file && (dump_flags & TDF_DETAILS))
5469 fprintf (dump_file, "Modifying stmt:\n ");
5470 print_gimple_stmt (dump_file, stmt, 0);
5472 gimple_assign_set_rhs_from_tree (&gsi, val);
5473 update_stmt (stmt);
5475 if (dump_file && (dump_flags & TDF_DETAILS))
5477 fprintf (dump_file, "into:\n ");
5478 print_gimple_stmt (dump_file, stmt, 0);
5479 fprintf (dump_file, "\n");
5482 *m_something_changed = true;
5483 if (maybe_clean_eh_stmt (stmt)
5484 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5485 *m_cfg_changed = true;
5487 return NULL;
5490 /* Update bits info of formal parameters as described in
5491 ipcp_transformation. */
5493 static void
5494 ipcp_update_bits (struct cgraph_node *node)
5496 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5498 if (!ts || vec_safe_length (ts->bits) == 0)
5499 return;
5500 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5501 unsigned count = bits.length ();
5502 if (!count)
5503 return;
5505 auto_vec<int, 16> new_indices;
5506 bool need_remapping = false;
5507 if (node->clone.param_adjustments)
5509 node->clone.param_adjustments->get_updated_indices (&new_indices);
5510 need_remapping = true;
5512 auto_vec <tree, 16> parm_decls;
5513 push_function_arg_decls (&parm_decls, node->decl);
5515 for (unsigned i = 0; i < count; ++i)
5517 tree parm;
5518 if (need_remapping)
5520 if (i >= new_indices.length ())
5521 continue;
5522 int idx = new_indices[i];
5523 if (idx < 0)
5524 continue;
5525 parm = parm_decls[idx];
5527 else
5528 parm = parm_decls[i];
5529 gcc_checking_assert (parm);
5532 if (!bits[i]
5533 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5534 || POINTER_TYPE_P (TREE_TYPE (parm)))
5535 || !is_gimple_reg (parm))
5536 continue;
5538 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5539 if (!ddef)
5540 continue;
5542 if (dump_file)
5544 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5545 print_hex (bits[i]->mask, dump_file);
5546 fprintf (dump_file, "\n");
5549 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5551 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5552 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5554 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5555 | wide_int::from (bits[i]->value, prec, sgn);
5556 set_nonzero_bits (ddef, nonzero_bits);
5558 else
5560 unsigned tem = bits[i]->mask.to_uhwi ();
5561 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5562 unsigned align = tem & -tem;
5563 unsigned misalign = bitpos & (align - 1);
5565 if (align > 1)
5567 if (dump_file)
5568 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5570 unsigned old_align, old_misalign;
5571 struct ptr_info_def *pi = get_ptr_info (ddef);
5572 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5574 if (old_known
5575 && old_align > align)
5577 if (dump_file)
5579 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5580 if ((old_misalign & (align - 1)) != misalign)
5581 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5582 old_misalign, misalign);
5584 continue;
5587 if (old_known
5588 && ((misalign & (old_align - 1)) != old_misalign)
5589 && dump_file)
5590 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5591 old_misalign, misalign);
5593 set_ptr_info_alignment (pi, align, misalign);
5599 bool
5600 ipa_vr::nonzero_p (tree expr_type) const
5602 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5603 return true;
5605 unsigned prec = TYPE_PRECISION (expr_type);
5606 return (type == VR_RANGE
5607 && TYPE_UNSIGNED (expr_type)
5608 && wi::eq_p (min, wi::one (prec))
5609 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5612 /* Update value range of formal parameters as described in
5613 ipcp_transformation. */
5615 static void
5616 ipcp_update_vr (struct cgraph_node *node)
5618 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5619 if (!ts || vec_safe_length (ts->m_vr) == 0)
5620 return;
5621 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5622 unsigned count = vr.length ();
5623 if (!count)
5624 return;
5626 auto_vec<int, 16> new_indices;
5627 bool need_remapping = false;
5628 if (node->clone.param_adjustments)
5630 node->clone.param_adjustments->get_updated_indices (&new_indices);
5631 need_remapping = true;
5633 auto_vec <tree, 16> parm_decls;
5634 push_function_arg_decls (&parm_decls, node->decl);
5636 for (unsigned i = 0; i < count; ++i)
5638 tree parm;
5639 int remapped_idx;
5640 if (need_remapping)
5642 if (i >= new_indices.length ())
5643 continue;
5644 remapped_idx = new_indices[i];
5645 if (remapped_idx < 0)
5646 continue;
5648 else
5649 remapped_idx = i;
5651 parm = parm_decls[remapped_idx];
5653 gcc_checking_assert (parm);
5654 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5656 if (!ddef || !is_gimple_reg (parm))
5657 continue;
5659 if (vr[i].known
5660 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5662 tree type = TREE_TYPE (ddef);
5663 unsigned prec = TYPE_PRECISION (type);
5664 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5666 if (dump_file)
5668 fprintf (dump_file, "Setting value range of param %u "
5669 "(now %i) ", i, remapped_idx);
5670 fprintf (dump_file, "%s[",
5671 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5672 print_decs (vr[i].min, dump_file);
5673 fprintf (dump_file, ", ");
5674 print_decs (vr[i].max, dump_file);
5675 fprintf (dump_file, "]\n");
5677 set_range_info (ddef, vr[i].type,
5678 wide_int_storage::from (vr[i].min, prec,
5679 TYPE_SIGN (type)),
5680 wide_int_storage::from (vr[i].max, prec,
5681 TYPE_SIGN (type)));
5683 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5684 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5686 if (dump_file)
5687 fprintf (dump_file, "Setting nonnull for %u\n", i);
5688 set_ptr_nonnull (ddef);
5694 /* IPCP transformation phase doing propagation of aggregate values. */
5696 unsigned int
5697 ipcp_transform_function (struct cgraph_node *node)
5699 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5700 struct ipa_func_body_info fbi;
5701 struct ipa_agg_replacement_value *aggval;
5702 int param_count;
5703 bool cfg_changed = false, something_changed = false;
5705 gcc_checking_assert (cfun);
5706 gcc_checking_assert (current_function_decl);
5708 if (dump_file)
5709 fprintf (dump_file, "Modification phase of node %s\n",
5710 node->dump_name ());
5712 ipcp_update_bits (node);
5713 ipcp_update_vr (node);
5714 aggval = ipa_get_agg_replacements_for_node (node);
5715 if (!aggval)
5716 return 0;
5717 param_count = count_formal_params (node->decl);
5718 if (param_count == 0)
5719 return 0;
5720 adjust_agg_replacement_values (node, aggval);
5721 if (dump_file)
5722 ipa_dump_agg_replacement_values (dump_file, aggval);
5724 fbi.node = node;
5725 fbi.info = NULL;
5726 fbi.bb_infos = vNULL;
5727 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5728 fbi.param_count = param_count;
5729 fbi.aa_walk_budget = param_ipa_max_aa_steps;
5731 vec_safe_grow_cleared (descriptors, param_count);
5732 ipa_populate_param_decls (node, *descriptors);
5733 calculate_dominance_info (CDI_DOMINATORS);
5734 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5735 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5737 int i;
5738 struct ipa_bb_info *bi;
5739 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5740 free_ipa_bb_info (bi);
5741 fbi.bb_infos.release ();
5742 free_dominance_info (CDI_DOMINATORS);
5744 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5745 s->agg_values = NULL;
5746 s->bits = NULL;
5747 s->m_vr = NULL;
5749 vec_free (descriptors);
5751 if (!something_changed)
5752 return 0;
5754 if (cfg_changed)
5755 delete_unreachable_blocks_update_callgraph (node, false);
5757 return TODO_update_ssa_only_virtuals;
5761 /* Return true if OTHER describes same agg value. */
5762 bool
5763 ipa_agg_value::equal_to (const ipa_agg_value &other)
5765 return offset == other.offset
5766 && operand_equal_p (value, other.value, 0);
5768 #include "gt-ipa-prop.h"