testsuite: simplify target requirements for various Power9 testcases.
[official-gcc.git] / gcc / ipa-prop.c
bloba848f1db95e61011c91897399c8b37447d0c88c3
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55 #include "options.h"
57 /* Function summary where the parameter infos are actually stored. */
58 ipa_node_params_t *ipa_node_params_sum = NULL;
60 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
62 /* Edge summary for IPA-CP edge information. */
63 ipa_edge_args_sum_t *ipa_edge_args_sum;
65 /* Traits for a hash table for reusing already existing ipa_bits. */
67 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
69 typedef ipa_bits *value_type;
70 typedef ipa_bits *compare_type;
71 static hashval_t
72 hash (const ipa_bits *p)
74 hashval_t t = (hashval_t) p->value.to_shwi ();
75 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
77 static bool
78 equal (const ipa_bits *a, const ipa_bits *b)
80 return a->value == b->value && a->mask == b->mask;
82 static const bool empty_zero_p = true;
83 static void
84 mark_empty (ipa_bits *&p)
86 p = NULL;
88 static bool
89 is_empty (const ipa_bits *p)
91 return p == NULL;
93 static bool
94 is_deleted (const ipa_bits *p)
96 return p == reinterpret_cast<const ipa_bits *> (1);
98 static void
99 mark_deleted (ipa_bits *&p)
101 p = reinterpret_cast<ipa_bits *> (1);
105 /* Hash table for avoid repeated allocations of equal ipa_bits. */
106 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
108 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
109 the equiv bitmap is not hashed and is expected to be NULL. */
111 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
113 typedef value_range *value_type;
114 typedef value_range *compare_type;
115 static hashval_t
116 hash (const value_range *p)
118 inchash::hash hstate (p->kind ());
119 inchash::add_expr (p->min (), hstate);
120 inchash::add_expr (p->max (), hstate);
121 return hstate.end ();
123 static bool
124 equal (const value_range *a, const value_range *b)
126 return (a->equal_p (*b)
127 && types_compatible_p (a->type (), b->type ()));
129 static const bool empty_zero_p = true;
130 static void
131 mark_empty (value_range *&p)
133 p = NULL;
135 static bool
136 is_empty (const value_range *p)
138 return p == NULL;
140 static bool
141 is_deleted (const value_range *p)
143 return p == reinterpret_cast<const value_range *> (1);
145 static void
146 mark_deleted (value_range *&p)
148 p = reinterpret_cast<value_range *> (1);
152 /* Hash table for avoid repeated allocations of equal value_ranges. */
153 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
155 /* Holders of ipa cgraph hooks: */
156 static struct cgraph_node_hook_list *function_insertion_hook_holder;
158 /* Description of a reference to an IPA constant. */
159 struct ipa_cst_ref_desc
161 /* Edge that corresponds to the statement which took the reference. */
162 struct cgraph_edge *cs;
163 /* Linked list of duplicates created when call graph edges are cloned. */
164 struct ipa_cst_ref_desc *next_duplicate;
165 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
166 if out of control. */
167 int refcount;
170 /* Allocation pool for reference descriptions. */
172 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
173 ("IPA-PROP ref descriptions");
175 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
176 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
178 static bool
179 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
181 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
183 if (!fs_opts)
184 return false;
185 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
188 /* Return index of the formal whose tree is PTREE in function which corresponds
189 to INFO. */
191 static int
192 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
193 tree ptree)
195 int i, count;
197 count = vec_safe_length (descriptors);
198 for (i = 0; i < count; i++)
199 if ((*descriptors)[i].decl_or_type == ptree)
200 return i;
202 return -1;
205 /* Return index of the formal whose tree is PTREE in function which corresponds
206 to INFO. */
209 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
211 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
214 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
215 NODE. */
217 static void
218 ipa_populate_param_decls (struct cgraph_node *node,
219 vec<ipa_param_descriptor, va_gc> &descriptors)
221 tree fndecl;
222 tree fnargs;
223 tree parm;
224 int param_num;
226 fndecl = node->decl;
227 gcc_assert (gimple_has_body_p (fndecl));
228 fnargs = DECL_ARGUMENTS (fndecl);
229 param_num = 0;
230 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
232 descriptors[param_num].decl_or_type = parm;
233 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
234 descriptors[param_num].move_cost = cost;
235 /* Watch overflow, move_cost is a bitfield. */
236 gcc_checking_assert (cost == descriptors[param_num].move_cost);
237 param_num++;
241 /* Return how many formal parameters FNDECL has. */
244 count_formal_params (tree fndecl)
246 tree parm;
247 int count = 0;
248 gcc_assert (gimple_has_body_p (fndecl));
250 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
251 count++;
253 return count;
256 /* Return the declaration of Ith formal parameter of the function corresponding
257 to INFO. Note there is no setter function as this array is built just once
258 using ipa_initialize_node_params. */
260 void
261 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
263 fprintf (file, "param #%i", i);
264 if ((*info->descriptors)[i].decl_or_type)
266 fprintf (file, " ");
267 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
271 /* If necessary, allocate vector of parameter descriptors in info of NODE.
272 Return true if they were allocated, false if not. */
274 static bool
275 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
277 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
279 if (!info->descriptors && param_count)
281 vec_safe_grow_cleared (info->descriptors, param_count, true);
282 return true;
284 else
285 return false;
288 /* Initialize the ipa_node_params structure associated with NODE by counting
289 the function parameters, creating the descriptors and populating their
290 param_decls. */
292 void
293 ipa_initialize_node_params (struct cgraph_node *node)
295 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
297 if (!info->descriptors
298 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
299 ipa_populate_param_decls (node, *info->descriptors);
302 /* Print the jump functions associated with call graph edge CS to file F. */
304 static void
305 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
307 int i, count;
309 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
310 for (i = 0; i < count; i++)
312 struct ipa_jump_func *jump_func;
313 enum jump_func_type type;
315 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
316 type = jump_func->type;
318 fprintf (f, " param %d: ", i);
319 if (type == IPA_JF_UNKNOWN)
320 fprintf (f, "UNKNOWN\n");
321 else if (type == IPA_JF_CONST)
323 tree val = jump_func->value.constant.value;
324 fprintf (f, "CONST: ");
325 print_generic_expr (f, val);
326 if (TREE_CODE (val) == ADDR_EXPR
327 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
329 fprintf (f, " -> ");
330 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
332 fprintf (f, "\n");
334 else if (type == IPA_JF_PASS_THROUGH)
336 fprintf (f, "PASS THROUGH: ");
337 fprintf (f, "%d, op %s",
338 jump_func->value.pass_through.formal_id,
339 get_tree_code_name(jump_func->value.pass_through.operation));
340 if (jump_func->value.pass_through.operation != NOP_EXPR)
342 fprintf (f, " ");
343 print_generic_expr (f, jump_func->value.pass_through.operand);
345 if (jump_func->value.pass_through.agg_preserved)
346 fprintf (f, ", agg_preserved");
347 fprintf (f, "\n");
349 else if (type == IPA_JF_ANCESTOR)
351 fprintf (f, "ANCESTOR: ");
352 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
353 jump_func->value.ancestor.formal_id,
354 jump_func->value.ancestor.offset);
355 if (jump_func->value.ancestor.agg_preserved)
356 fprintf (f, ", agg_preserved");
357 fprintf (f, "\n");
360 if (jump_func->agg.items)
362 struct ipa_agg_jf_item *item;
363 int j;
365 fprintf (f, " Aggregate passed by %s:\n",
366 jump_func->agg.by_ref ? "reference" : "value");
367 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
369 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
370 item->offset);
371 fprintf (f, "type: ");
372 print_generic_expr (f, item->type);
373 fprintf (f, ", ");
374 if (item->jftype == IPA_JF_PASS_THROUGH)
375 fprintf (f, "PASS THROUGH: %d,",
376 item->value.pass_through.formal_id);
377 else if (item->jftype == IPA_JF_LOAD_AGG)
379 fprintf (f, "LOAD AGG: %d",
380 item->value.pass_through.formal_id);
381 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
382 item->value.load_agg.offset,
383 item->value.load_agg.by_ref ? "reference"
384 : "value");
387 if (item->jftype == IPA_JF_PASS_THROUGH
388 || item->jftype == IPA_JF_LOAD_AGG)
390 fprintf (f, " op %s",
391 get_tree_code_name (item->value.pass_through.operation));
392 if (item->value.pass_through.operation != NOP_EXPR)
394 fprintf (f, " ");
395 print_generic_expr (f, item->value.pass_through.operand);
398 else if (item->jftype == IPA_JF_CONST)
400 fprintf (f, "CONST: ");
401 print_generic_expr (f, item->value.constant);
403 else if (item->jftype == IPA_JF_UNKNOWN)
404 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
405 tree_to_uhwi (TYPE_SIZE (item->type)));
406 fprintf (f, "\n");
410 class ipa_polymorphic_call_context *ctx
411 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
412 if (ctx && !ctx->useless_p ())
414 fprintf (f, " Context: ");
415 ctx->dump (dump_file);
418 if (jump_func->bits)
420 fprintf (f, " value: ");
421 print_hex (jump_func->bits->value, f);
422 fprintf (f, ", mask: ");
423 print_hex (jump_func->bits->mask, f);
424 fprintf (f, "\n");
426 else
427 fprintf (f, " Unknown bits\n");
429 if (jump_func->m_vr)
431 fprintf (f, " VR ");
432 fprintf (f, "%s[",
433 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
434 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
435 fprintf (f, ", ");
436 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
437 fprintf (f, "]\n");
439 else
440 fprintf (f, " Unknown VR\n");
445 /* Print the jump functions of all arguments on all call graph edges going from
446 NODE to file F. */
448 void
449 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
451 struct cgraph_edge *cs;
453 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
454 for (cs = node->callees; cs; cs = cs->next_callee)
457 fprintf (f, " callsite %s -> %s : \n",
458 node->dump_name (),
459 cs->callee->dump_name ());
460 if (!ipa_edge_args_info_available_for_edge_p (cs))
461 fprintf (f, " no arg info\n");
462 else
463 ipa_print_node_jump_functions_for_edge (f, cs);
466 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
468 class cgraph_indirect_call_info *ii;
470 ii = cs->indirect_info;
471 if (ii->agg_contents)
472 fprintf (f, " indirect %s callsite, calling param %i, "
473 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
474 ii->member_ptr ? "member ptr" : "aggregate",
475 ii->param_index, ii->offset,
476 ii->by_ref ? "by reference" : "by_value");
477 else
478 fprintf (f, " indirect %s callsite, calling param %i, "
479 "offset " HOST_WIDE_INT_PRINT_DEC,
480 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
481 ii->offset);
483 if (cs->call_stmt)
485 fprintf (f, ", for stmt ");
486 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
488 else
489 fprintf (f, "\n");
490 if (ii->polymorphic)
491 ii->context.dump (f);
492 if (!ipa_edge_args_info_available_for_edge_p (cs))
493 fprintf (f, " no arg info\n");
494 else
495 ipa_print_node_jump_functions_for_edge (f, cs);
499 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
501 void
502 ipa_print_all_jump_functions (FILE *f)
504 struct cgraph_node *node;
506 fprintf (f, "\nJump functions:\n");
507 FOR_EACH_FUNCTION (node)
509 ipa_print_node_jump_functions (f, node);
513 /* Set jfunc to be a know-really nothing jump function. */
515 static void
516 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
518 jfunc->type = IPA_JF_UNKNOWN;
521 /* Set JFUNC to be a copy of another jmp (to be used by jump function
522 combination code). The two functions will share their rdesc. */
524 static void
525 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
526 struct ipa_jump_func *src)
529 gcc_checking_assert (src->type == IPA_JF_CONST);
530 dst->type = IPA_JF_CONST;
531 dst->value.constant = src->value.constant;
534 /* Set JFUNC to be a constant jmp function. */
536 static void
537 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
538 struct cgraph_edge *cs)
540 jfunc->type = IPA_JF_CONST;
541 jfunc->value.constant.value = unshare_expr_without_location (constant);
543 if (TREE_CODE (constant) == ADDR_EXPR
544 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
546 struct ipa_cst_ref_desc *rdesc;
548 rdesc = ipa_refdesc_pool.allocate ();
549 rdesc->cs = cs;
550 rdesc->next_duplicate = NULL;
551 rdesc->refcount = 1;
552 jfunc->value.constant.rdesc = rdesc;
554 else
555 jfunc->value.constant.rdesc = NULL;
558 /* Set JFUNC to be a simple pass-through jump function. */
559 static void
560 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
561 bool agg_preserved)
563 jfunc->type = IPA_JF_PASS_THROUGH;
564 jfunc->value.pass_through.operand = NULL_TREE;
565 jfunc->value.pass_through.formal_id = formal_id;
566 jfunc->value.pass_through.operation = NOP_EXPR;
567 jfunc->value.pass_through.agg_preserved = agg_preserved;
570 /* Set JFUNC to be an unary pass through jump function. */
572 static void
573 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
574 enum tree_code operation)
576 jfunc->type = IPA_JF_PASS_THROUGH;
577 jfunc->value.pass_through.operand = NULL_TREE;
578 jfunc->value.pass_through.formal_id = formal_id;
579 jfunc->value.pass_through.operation = operation;
580 jfunc->value.pass_through.agg_preserved = false;
582 /* Set JFUNC to be an arithmetic pass through jump function. */
584 static void
585 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
586 tree operand, enum tree_code operation)
588 jfunc->type = IPA_JF_PASS_THROUGH;
589 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
590 jfunc->value.pass_through.formal_id = formal_id;
591 jfunc->value.pass_through.operation = operation;
592 jfunc->value.pass_through.agg_preserved = false;
595 /* Set JFUNC to be an ancestor jump function. */
597 static void
598 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
599 int formal_id, bool agg_preserved)
601 jfunc->type = IPA_JF_ANCESTOR;
602 jfunc->value.ancestor.formal_id = formal_id;
603 jfunc->value.ancestor.offset = offset;
604 jfunc->value.ancestor.agg_preserved = agg_preserved;
607 /* Get IPA BB information about the given BB. FBI is the context of analyzis
608 of this function body. */
610 static struct ipa_bb_info *
611 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
613 gcc_checking_assert (fbi);
614 return &fbi->bb_infos[bb->index];
617 /* Structure to be passed in between detect_type_change and
618 check_stmt_for_type_change. */
620 struct prop_type_change_info
622 /* Offset into the object where there is the virtual method pointer we are
623 looking for. */
624 HOST_WIDE_INT offset;
625 /* The declaration or SSA_NAME pointer of the base that we are checking for
626 type change. */
627 tree object;
628 /* Set to true if dynamic type change has been detected. */
629 bool type_maybe_changed;
632 /* Return true if STMT can modify a virtual method table pointer.
634 This function makes special assumptions about both constructors and
635 destructors which are all the functions that are allowed to alter the VMT
636 pointers. It assumes that destructors begin with assignment into all VMT
637 pointers and that constructors essentially look in the following way:
639 1) The very first thing they do is that they call constructors of ancestor
640 sub-objects that have them.
642 2) Then VMT pointers of this and all its ancestors is set to new values
643 corresponding to the type corresponding to the constructor.
645 3) Only afterwards, other stuff such as constructor of member sub-objects
646 and the code written by the user is run. Only this may include calling
647 virtual functions, directly or indirectly.
649 There is no way to call a constructor of an ancestor sub-object in any
650 other way.
652 This means that we do not have to care whether constructors get the correct
653 type information because they will always change it (in fact, if we define
654 the type to be given by the VMT pointer, it is undefined).
656 The most important fact to derive from the above is that if, for some
657 statement in the section 3, we try to detect whether the dynamic type has
658 changed, we can safely ignore all calls as we examine the function body
659 backwards until we reach statements in section 2 because these calls cannot
660 be ancestor constructors or destructors (if the input is not bogus) and so
661 do not change the dynamic type (this holds true only for automatically
662 allocated objects but at the moment we devirtualize only these). We then
663 must detect that statements in section 2 change the dynamic type and can try
664 to derive the new type. That is enough and we can stop, we will never see
665 the calls into constructors of sub-objects in this code. Therefore we can
666 safely ignore all call statements that we traverse.
669 static bool
670 stmt_may_be_vtbl_ptr_store (gimple *stmt)
672 if (is_gimple_call (stmt))
673 return false;
674 if (gimple_clobber_p (stmt))
675 return false;
676 else if (is_gimple_assign (stmt))
678 tree lhs = gimple_assign_lhs (stmt);
680 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
682 if (flag_strict_aliasing
683 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
684 return false;
686 if (TREE_CODE (lhs) == COMPONENT_REF
687 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
688 return false;
689 /* In the future we might want to use get_ref_base_and_extent to find
690 if there is a field corresponding to the offset and if so, proceed
691 almost like if it was a component ref. */
694 return true;
697 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
698 to check whether a particular statement may modify the virtual table
699 pointerIt stores its result into DATA, which points to a
700 prop_type_change_info structure. */
702 static bool
703 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
705 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
706 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
708 if (stmt_may_be_vtbl_ptr_store (stmt))
710 tci->type_maybe_changed = true;
711 return true;
713 else
714 return false;
717 /* See if ARG is PARAM_DECl describing instance passed by pointer
718 or reference in FUNCTION. Return false if the dynamic type may change
719 in between beggining of the function until CALL is invoked.
721 Generally functions are not allowed to change type of such instances,
722 but they call destructors. We assume that methods cannot destroy the THIS
723 pointer. Also as a special cases, constructor and destructors may change
724 type of the THIS pointer. */
726 static bool
727 param_type_may_change_p (tree function, tree arg, gimple *call)
729 /* Pure functions cannot do any changes on the dynamic type;
730 that require writting to memory. */
731 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
732 return false;
733 /* We need to check if we are within inlined consturctor
734 or destructor (ideally we would have way to check that the
735 inline cdtor is actually working on ARG, but we don't have
736 easy tie on this, so punt on all non-pure cdtors.
737 We may also record the types of cdtors and once we know type
738 of the instance match them.
740 Also code unification optimizations may merge calls from
741 different blocks making return values unreliable. So
742 do nothing during late optimization. */
743 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
744 return true;
745 if (TREE_CODE (arg) == SSA_NAME
746 && SSA_NAME_IS_DEFAULT_DEF (arg)
747 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
749 /* Normal (non-THIS) argument. */
750 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
751 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
752 /* THIS pointer of an method - here we want to watch constructors
753 and destructors as those definitely may change the dynamic
754 type. */
755 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
756 && !DECL_CXX_CONSTRUCTOR_P (function)
757 && !DECL_CXX_DESTRUCTOR_P (function)
758 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
760 /* Walk the inline stack and watch out for ctors/dtors. */
761 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
762 block = BLOCK_SUPERCONTEXT (block))
763 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
764 return true;
765 return false;
768 return true;
771 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
772 callsite CALL) by looking for assignments to its virtual table pointer. If
773 it is, return true. ARG is the object itself (not a pointer
774 to it, unless dereferenced). BASE is the base of the memory access as
775 returned by get_ref_base_and_extent, as is the offset.
777 This is helper function for detect_type_change and detect_type_change_ssa
778 that does the heavy work which is usually unnecesary. */
780 static bool
781 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
782 tree base, tree comp_type, gcall *call,
783 HOST_WIDE_INT offset)
785 struct prop_type_change_info tci;
786 ao_ref ao;
788 gcc_checking_assert (DECL_P (arg)
789 || TREE_CODE (arg) == MEM_REF
790 || handled_component_p (arg));
792 comp_type = TYPE_MAIN_VARIANT (comp_type);
794 /* Const calls cannot call virtual methods through VMT and so type changes do
795 not matter. */
796 if (!flag_devirtualize || !gimple_vuse (call)
797 /* Be sure expected_type is polymorphic. */
798 || !comp_type
799 || TREE_CODE (comp_type) != RECORD_TYPE
800 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
801 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
802 return true;
804 ao_ref_init (&ao, arg);
805 ao.base = base;
806 ao.offset = offset;
807 ao.size = POINTER_SIZE;
808 ao.max_size = ao.size;
810 tci.offset = offset;
811 tci.object = get_base_address (arg);
812 tci.type_maybe_changed = false;
814 int walked
815 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
816 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
818 if (walked >= 0 && !tci.type_maybe_changed)
819 return false;
821 return true;
824 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
825 If it is, return true. ARG is the object itself (not a pointer
826 to it, unless dereferenced). BASE is the base of the memory access as
827 returned by get_ref_base_and_extent, as is the offset. */
829 static bool
830 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
831 tree comp_type, gcall *call,
832 HOST_WIDE_INT offset)
834 if (!flag_devirtualize)
835 return false;
837 if (TREE_CODE (base) == MEM_REF
838 && !param_type_may_change_p (current_function_decl,
839 TREE_OPERAND (base, 0),
840 call))
841 return false;
842 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
843 call, offset);
846 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
847 SSA name (its dereference will become the base and the offset is assumed to
848 be zero). */
850 static bool
851 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
852 gcall *call)
854 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
855 if (!flag_devirtualize
856 || !POINTER_TYPE_P (TREE_TYPE (arg)))
857 return false;
859 if (!param_type_may_change_p (current_function_decl, arg, call))
860 return false;
862 arg = build2 (MEM_REF, ptr_type_node, arg,
863 build_int_cst (ptr_type_node, 0));
865 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
866 call, 0);
869 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
870 boolean variable pointed to by DATA. */
872 static bool
873 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
874 void *data)
876 bool *b = (bool *) data;
877 *b = true;
878 return true;
881 /* Find the nearest valid aa status for parameter specified by INDEX that
882 dominates BB. */
884 static struct ipa_param_aa_status *
885 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
886 int index)
888 while (true)
890 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
891 if (!bb)
892 return NULL;
893 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
894 if (!bi->param_aa_statuses.is_empty ()
895 && bi->param_aa_statuses[index].valid)
896 return &bi->param_aa_statuses[index];
900 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
901 structures and/or intialize the result with a dominating description as
902 necessary. */
904 static struct ipa_param_aa_status *
905 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
906 int index)
908 gcc_checking_assert (fbi);
909 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
910 if (bi->param_aa_statuses.is_empty ())
911 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
912 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
913 if (!paa->valid)
915 gcc_checking_assert (!paa->parm_modified
916 && !paa->ref_modified
917 && !paa->pt_modified);
918 struct ipa_param_aa_status *dom_paa;
919 dom_paa = find_dominating_aa_status (fbi, bb, index);
920 if (dom_paa)
921 *paa = *dom_paa;
922 else
923 paa->valid = true;
926 return paa;
929 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
930 a value known not to be modified in this function before reaching the
931 statement STMT. FBI holds information about the function we have so far
932 gathered but do not survive the summary building stage. */
934 static bool
935 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
936 gimple *stmt, tree parm_load)
938 struct ipa_param_aa_status *paa;
939 bool modified = false;
940 ao_ref refd;
942 tree base = get_base_address (parm_load);
943 gcc_assert (TREE_CODE (base) == PARM_DECL);
944 if (TREE_READONLY (base))
945 return true;
947 gcc_checking_assert (fbi);
948 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
949 if (paa->parm_modified)
950 return false;
952 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
953 ao_ref_init (&refd, parm_load);
954 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
955 &modified, NULL, NULL,
956 fbi->aa_walk_budget + 1);
957 if (walked < 0)
959 modified = true;
960 if (fbi)
961 fbi->aa_walk_budget = 0;
963 else if (fbi)
964 fbi->aa_walk_budget -= walked;
965 if (paa && modified)
966 paa->parm_modified = true;
967 return !modified;
970 /* If STMT is an assignment that loads a value from an parameter declaration,
971 return the index of the parameter in ipa_node_params which has not been
972 modified. Otherwise return -1. */
974 static int
975 load_from_unmodified_param (struct ipa_func_body_info *fbi,
976 vec<ipa_param_descriptor, va_gc> *descriptors,
977 gimple *stmt)
979 int index;
980 tree op1;
982 if (!gimple_assign_single_p (stmt))
983 return -1;
985 op1 = gimple_assign_rhs1 (stmt);
986 if (TREE_CODE (op1) != PARM_DECL)
987 return -1;
989 index = ipa_get_param_decl_index_1 (descriptors, op1);
990 if (index < 0
991 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
992 return -1;
994 return index;
997 /* Return true if memory reference REF (which must be a load through parameter
998 with INDEX) loads data that are known to be unmodified in this function
999 before reaching statement STMT. */
1001 static bool
1002 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1003 int index, gimple *stmt, tree ref)
1005 struct ipa_param_aa_status *paa;
1006 bool modified = false;
1007 ao_ref refd;
1009 gcc_checking_assert (fbi);
1010 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1011 if (paa->ref_modified)
1012 return false;
1014 gcc_checking_assert (gimple_vuse (stmt));
1015 ao_ref_init (&refd, ref);
1016 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1017 &modified, NULL, NULL,
1018 fbi->aa_walk_budget + 1);
1019 if (walked < 0)
1021 modified = true;
1022 fbi->aa_walk_budget = 0;
1024 else
1025 fbi->aa_walk_budget -= walked;
1026 if (modified)
1027 paa->ref_modified = true;
1028 return !modified;
1031 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1032 is known to be unmodified in this function before reaching call statement
1033 CALL into which it is passed. FBI describes the function body. */
1035 static bool
1036 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1037 gimple *call, tree parm)
1039 bool modified = false;
1040 ao_ref refd;
1042 /* It's unnecessary to calculate anything about memory contnets for a const
1043 function because it is not goin to use it. But do not cache the result
1044 either. Also, no such calculations for non-pointers. */
1045 if (!gimple_vuse (call)
1046 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1047 return false;
1049 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1050 gimple_bb (call),
1051 index);
1052 if (paa->pt_modified)
1053 return false;
1055 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1056 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1057 &modified, NULL, NULL,
1058 fbi->aa_walk_budget + 1);
1059 if (walked < 0)
1061 fbi->aa_walk_budget = 0;
1062 modified = true;
1064 else
1065 fbi->aa_walk_budget -= walked;
1066 if (modified)
1067 paa->pt_modified = true;
1068 return !modified;
1071 /* Return true if we can prove that OP is a memory reference loading
1072 data from an aggregate passed as a parameter.
1074 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1075 false if it cannot prove that the value has not been modified before the
1076 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1077 if it cannot prove the value has not been modified, in that case it will
1078 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1080 INFO and PARMS_AINFO describe parameters of the current function (but the
1081 latter can be NULL), STMT is the load statement. If function returns true,
1082 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1083 within the aggregate and whether it is a load from a value passed by
1084 reference respectively. */
1086 bool
1087 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1088 vec<ipa_param_descriptor, va_gc> *descriptors,
1089 gimple *stmt, tree op, int *index_p,
1090 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1091 bool *by_ref_p, bool *guaranteed_unmodified)
1093 int index;
1094 HOST_WIDE_INT size;
1095 bool reverse;
1096 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1098 if (!base)
1099 return false;
1101 if (DECL_P (base))
1103 int index = ipa_get_param_decl_index_1 (descriptors, base);
1104 if (index >= 0
1105 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1107 *index_p = index;
1108 *by_ref_p = false;
1109 if (size_p)
1110 *size_p = size;
1111 if (guaranteed_unmodified)
1112 *guaranteed_unmodified = true;
1113 return true;
1115 return false;
1118 if (TREE_CODE (base) != MEM_REF
1119 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1120 || !integer_zerop (TREE_OPERAND (base, 1)))
1121 return false;
1123 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1125 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1126 index = ipa_get_param_decl_index_1 (descriptors, parm);
1128 else
1130 /* This branch catches situations where a pointer parameter is not a
1131 gimple register, for example:
1133 void hip7(S*) (struct S * p)
1135 void (*<T2e4>) (struct S *) D.1867;
1136 struct S * p.1;
1138 <bb 2>:
1139 p.1_1 = p;
1140 D.1867_2 = p.1_1->f;
1141 D.1867_2 ();
1142 gdp = &p;
1145 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1146 index = load_from_unmodified_param (fbi, descriptors, def);
1149 if (index >= 0)
1151 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1152 if (!data_preserved && !guaranteed_unmodified)
1153 return false;
1155 *index_p = index;
1156 *by_ref_p = true;
1157 if (size_p)
1158 *size_p = size;
1159 if (guaranteed_unmodified)
1160 *guaranteed_unmodified = data_preserved;
1161 return true;
1163 return false;
1166 /* If STMT is an assignment that loads a value from a parameter declaration,
1167 or from an aggregate passed as the parameter either by value or reference,
1168 return the index of the parameter in ipa_node_params. Otherwise return -1.
1170 FBI holds gathered information about the function. INFO describes
1171 parameters of the function, STMT is the assignment statement. If it is a
1172 memory load from an aggregate, *OFFSET_P is filled with offset within the
1173 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1174 reference. */
1176 static int
1177 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1178 class ipa_node_params *info,
1179 gimple *stmt,
1180 HOST_WIDE_INT *offset_p,
1181 bool *by_ref_p)
1183 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1184 poly_int64 size;
1186 /* Load value from a parameter declaration. */
1187 if (index >= 0)
1189 *offset_p = -1;
1190 return index;
1193 if (!gimple_assign_load_p (stmt))
1194 return -1;
1196 tree rhs = gimple_assign_rhs1 (stmt);
1198 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1199 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1200 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1201 return -1;
1203 /* Skip memory reference containing bit-field. */
1204 if (TREE_CODE (rhs) == BIT_FIELD_REF
1205 || contains_bitfld_component_ref_p (rhs))
1206 return -1;
1208 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1209 offset_p, &size, by_ref_p))
1210 return -1;
1212 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1213 size));
1214 if (!*by_ref_p)
1216 tree param_type = ipa_get_type (info, index);
1218 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1219 return -1;
1221 else if (TREE_THIS_VOLATILE (rhs))
1222 return -1;
1224 return index;
1227 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1228 to find original pointer. Initialize RET to the pointer which results from
1229 the walk.
1230 If offset is known return true and initialize OFFSET_RET. */
1232 bool
1233 unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1235 poly_int64 offset = 0;
1236 bool offset_known = true;
1237 int i;
1239 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1241 if (TREE_CODE (op) == ADDR_EXPR)
1243 poly_int64 extra_offset = 0;
1244 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1245 &offset);
1246 if (!base)
1248 base = get_base_address (TREE_OPERAND (op, 0));
1249 if (TREE_CODE (base) != MEM_REF)
1250 break;
1251 offset_known = false;
1253 else
1255 if (TREE_CODE (base) != MEM_REF)
1256 break;
1257 offset += extra_offset;
1259 op = TREE_OPERAND (base, 0);
1260 if (mem_ref_offset (base).to_shwi (&extra_offset))
1261 offset += extra_offset;
1262 else
1263 offset_known = false;
1265 else if (TREE_CODE (op) == SSA_NAME
1266 && !SSA_NAME_IS_DEFAULT_DEF (op))
1268 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1270 if (gimple_assign_single_p (pstmt))
1271 op = gimple_assign_rhs1 (pstmt);
1272 else if (is_gimple_assign (pstmt)
1273 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1275 poly_int64 extra_offset = 0;
1276 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1277 &extra_offset))
1278 offset += extra_offset;
1279 else
1280 offset_known = false;
1281 op = gimple_assign_rhs1 (pstmt);
1283 else
1284 break;
1286 else
1287 break;
1289 *ret = op;
1290 *offset_ret = offset;
1291 return offset_known;
1294 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1295 of an assignment statement STMT, try to determine whether we are actually
1296 handling any of the following cases and construct an appropriate jump
1297 function into JFUNC if so:
1299 1) The passed value is loaded from a formal parameter which is not a gimple
1300 register (most probably because it is addressable, the value has to be
1301 scalar) and we can guarantee the value has not changed. This case can
1302 therefore be described by a simple pass-through jump function. For example:
1304 foo (int a)
1306 int a.0;
1308 a.0_2 = a;
1309 bar (a.0_2);
1311 2) The passed value can be described by a simple arithmetic pass-through
1312 jump function. E.g.
1314 foo (int a)
1316 int D.2064;
1318 D.2064_4 = a.1(D) + 4;
1319 bar (D.2064_4);
1321 This case can also occur in combination of the previous one, e.g.:
1323 foo (int a, int z)
1325 int a.0;
1326 int D.2064;
1328 a.0_3 = a;
1329 D.2064_4 = a.0_3 + 4;
1330 foo (D.2064_4);
1332 3) The passed value is an address of an object within another one (which
1333 also passed by reference). Such situations are described by an ancestor
1334 jump function and describe situations such as:
1336 B::foo() (struct B * const this)
1338 struct A * D.1845;
1340 D.1845_2 = &this_1(D)->D.1748;
1341 A::bar (D.1845_2);
1343 INFO is the structure describing individual parameters access different
1344 stages of IPA optimizations. PARMS_AINFO contains the information that is
1345 only needed for intraprocedural analysis. */
1347 static void
1348 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1349 class ipa_node_params *info,
1350 struct ipa_jump_func *jfunc,
1351 gcall *call, gimple *stmt, tree name,
1352 tree param_type)
1354 HOST_WIDE_INT offset, size;
1355 tree op1, tc_ssa, base, ssa;
1356 bool reverse;
1357 int index;
1359 op1 = gimple_assign_rhs1 (stmt);
1361 if (TREE_CODE (op1) == SSA_NAME)
1363 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1364 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1365 else
1366 index = load_from_unmodified_param (fbi, info->descriptors,
1367 SSA_NAME_DEF_STMT (op1));
1368 tc_ssa = op1;
1370 else
1372 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1373 tc_ssa = gimple_assign_lhs (stmt);
1376 if (index >= 0)
1378 switch (gimple_assign_rhs_class (stmt))
1380 case GIMPLE_BINARY_RHS:
1382 tree op2 = gimple_assign_rhs2 (stmt);
1383 if (!is_gimple_ip_invariant (op2)
1384 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1385 != tcc_comparison)
1386 && !useless_type_conversion_p (TREE_TYPE (name),
1387 TREE_TYPE (op1))))
1388 return;
1390 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1391 gimple_assign_rhs_code (stmt));
1392 break;
1394 case GIMPLE_SINGLE_RHS:
1396 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1397 tc_ssa);
1398 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1399 break;
1401 case GIMPLE_UNARY_RHS:
1402 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1403 ipa_set_jf_unary_pass_through (jfunc, index,
1404 gimple_assign_rhs_code (stmt));
1405 default:;
1407 return;
1410 if (TREE_CODE (op1) != ADDR_EXPR)
1411 return;
1412 op1 = TREE_OPERAND (op1, 0);
1413 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1414 return;
1415 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1416 offset_int mem_offset;
1417 if (!base
1418 || TREE_CODE (base) != MEM_REF
1419 || !mem_ref_offset (base).is_constant (&mem_offset))
1420 return;
1421 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1422 ssa = TREE_OPERAND (base, 0);
1423 if (TREE_CODE (ssa) != SSA_NAME
1424 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1425 || offset < 0)
1426 return;
1428 /* Dynamic types are changed in constructors and destructors. */
1429 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1430 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1431 ipa_set_ancestor_jf (jfunc, offset, index,
1432 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1435 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1436 it looks like:
1438 iftmp.1_3 = &obj_2(D)->D.1762;
1440 The base of the MEM_REF must be a default definition SSA NAME of a
1441 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1442 whole MEM_REF expression is returned and the offset calculated from any
1443 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1444 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1446 static tree
1447 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1449 HOST_WIDE_INT size;
1450 tree expr, parm, obj;
1451 bool reverse;
1453 if (!gimple_assign_single_p (assign))
1454 return NULL_TREE;
1455 expr = gimple_assign_rhs1 (assign);
1457 if (TREE_CODE (expr) != ADDR_EXPR)
1458 return NULL_TREE;
1459 expr = TREE_OPERAND (expr, 0);
1460 obj = expr;
1461 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1463 offset_int mem_offset;
1464 if (!expr
1465 || TREE_CODE (expr) != MEM_REF
1466 || !mem_ref_offset (expr).is_constant (&mem_offset))
1467 return NULL_TREE;
1468 parm = TREE_OPERAND (expr, 0);
1469 if (TREE_CODE (parm) != SSA_NAME
1470 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1471 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1472 return NULL_TREE;
1474 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1475 *obj_p = obj;
1476 return expr;
1480 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1481 statement PHI, try to find out whether NAME is in fact a
1482 multiple-inheritance typecast from a descendant into an ancestor of a formal
1483 parameter and thus can be described by an ancestor jump function and if so,
1484 write the appropriate function into JFUNC.
1486 Essentially we want to match the following pattern:
1488 if (obj_2(D) != 0B)
1489 goto <bb 3>;
1490 else
1491 goto <bb 4>;
1493 <bb 3>:
1494 iftmp.1_3 = &obj_2(D)->D.1762;
1496 <bb 4>:
1497 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1498 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1499 return D.1879_6; */
1501 static void
1502 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1503 class ipa_node_params *info,
1504 struct ipa_jump_func *jfunc,
1505 gcall *call, gphi *phi)
1507 HOST_WIDE_INT offset;
1508 gimple *assign, *cond;
1509 basic_block phi_bb, assign_bb, cond_bb;
1510 tree tmp, parm, expr, obj;
1511 int index, i;
1513 if (gimple_phi_num_args (phi) != 2)
1514 return;
1516 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1517 tmp = PHI_ARG_DEF (phi, 0);
1518 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1519 tmp = PHI_ARG_DEF (phi, 1);
1520 else
1521 return;
1522 if (TREE_CODE (tmp) != SSA_NAME
1523 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1524 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1525 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1526 return;
1528 assign = SSA_NAME_DEF_STMT (tmp);
1529 assign_bb = gimple_bb (assign);
1530 if (!single_pred_p (assign_bb))
1531 return;
1532 expr = get_ancestor_addr_info (assign, &obj, &offset);
1533 if (!expr)
1534 return;
1535 parm = TREE_OPERAND (expr, 0);
1536 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1537 if (index < 0)
1538 return;
1540 cond_bb = single_pred (assign_bb);
1541 cond = last_stmt (cond_bb);
1542 if (!cond
1543 || gimple_code (cond) != GIMPLE_COND
1544 || gimple_cond_code (cond) != NE_EXPR
1545 || gimple_cond_lhs (cond) != parm
1546 || !integer_zerop (gimple_cond_rhs (cond)))
1547 return;
1549 phi_bb = gimple_bb (phi);
1550 for (i = 0; i < 2; i++)
1552 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1553 if (pred != assign_bb && pred != cond_bb)
1554 return;
1557 ipa_set_ancestor_jf (jfunc, offset, index,
1558 parm_ref_data_pass_through_p (fbi, index, call, parm));
1561 /* Inspect the given TYPE and return true iff it has the same structure (the
1562 same number of fields of the same types) as a C++ member pointer. If
1563 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1564 corresponding fields there. */
1566 static bool
1567 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1569 tree fld;
1571 if (TREE_CODE (type) != RECORD_TYPE)
1572 return false;
1574 fld = TYPE_FIELDS (type);
1575 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1576 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1577 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1578 return false;
1580 if (method_ptr)
1581 *method_ptr = fld;
1583 fld = DECL_CHAIN (fld);
1584 if (!fld || INTEGRAL_TYPE_P (fld)
1585 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1586 return false;
1587 if (delta)
1588 *delta = fld;
1590 if (DECL_CHAIN (fld))
1591 return false;
1593 return true;
1596 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1597 return the rhs of its defining statement, and this statement is stored in
1598 *RHS_STMT. Otherwise return RHS as it is. */
1600 static inline tree
1601 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1603 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1605 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1607 if (gimple_assign_single_p (def_stmt))
1608 rhs = gimple_assign_rhs1 (def_stmt);
1609 else
1610 break;
1611 *rhs_stmt = def_stmt;
1613 return rhs;
1616 /* Simple linked list, describing contents of an aggregate before call. */
1618 struct ipa_known_agg_contents_list
1620 /* Offset and size of the described part of the aggregate. */
1621 HOST_WIDE_INT offset, size;
1623 /* Type of the described part of the aggregate. */
1624 tree type;
1626 /* Known constant value or jump function data describing contents. */
1627 struct ipa_load_agg_data value;
1629 /* Pointer to the next structure in the list. */
1630 struct ipa_known_agg_contents_list *next;
1633 /* Add an aggregate content item into a linked list of
1634 ipa_known_agg_contents_list structure, in which all elements
1635 are sorted ascendingly by offset. */
1637 static inline void
1638 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1639 struct ipa_known_agg_contents_list *item)
1641 struct ipa_known_agg_contents_list *list = *plist;
1643 for (; list; list = list->next)
1645 if (list->offset >= item->offset)
1646 break;
1648 plist = &list->next;
1651 item->next = list;
1652 *plist = item;
1655 /* Check whether a given aggregate content is clobbered by certain element in
1656 a linked list of ipa_known_agg_contents_list. */
1658 static inline bool
1659 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1660 struct ipa_known_agg_contents_list *item)
1662 for (; list; list = list->next)
1664 if (list->offset >= item->offset)
1665 return list->offset < item->offset + item->size;
1667 if (list->offset + list->size > item->offset)
1668 return true;
1671 return false;
1674 /* Build aggregate jump function from LIST, assuming there are exactly
1675 VALUE_COUNT entries there and that offset of the passed argument
1676 is ARG_OFFSET and store it into JFUNC. */
1678 static void
1679 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1680 int value_count, HOST_WIDE_INT arg_offset,
1681 struct ipa_jump_func *jfunc)
1683 vec_alloc (jfunc->agg.items, value_count);
1684 for (; list; list = list->next)
1686 struct ipa_agg_jf_item item;
1687 tree operand = list->value.pass_through.operand;
1689 if (list->value.pass_through.formal_id >= 0)
1691 /* Content value is derived from some formal paramerter. */
1692 if (list->value.offset >= 0)
1693 item.jftype = IPA_JF_LOAD_AGG;
1694 else
1695 item.jftype = IPA_JF_PASS_THROUGH;
1697 item.value.load_agg = list->value;
1698 if (operand)
1699 item.value.pass_through.operand
1700 = unshare_expr_without_location (operand);
1702 else if (operand)
1704 /* Content value is known constant. */
1705 item.jftype = IPA_JF_CONST;
1706 item.value.constant = unshare_expr_without_location (operand);
1708 else
1709 continue;
1711 item.type = list->type;
1712 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1714 item.offset = list->offset - arg_offset;
1715 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1717 jfunc->agg.items->quick_push (item);
1721 /* Given an assignment statement STMT, try to collect information into
1722 AGG_VALUE that will be used to construct jump function for RHS of the
1723 assignment, from which content value of an aggregate part comes.
1725 Besides constant and simple pass-through jump functions, also try to
1726 identify whether it matches the following pattern that can be described by
1727 a load-value-from-aggregate jump function, which is a derivative of simple
1728 pass-through jump function.
1730 foo (int *p)
1734 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1735 bar (q_5);
1738 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1739 constant, simple pass-through and load-vale-from-aggregate. If value
1740 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1741 set to -1. For simple pass-through and load-value-from-aggregate, field
1742 FORMAL_ID specifies the related formal parameter index, and field
1743 OFFSET can be used to distinguish them, -1 means simple pass-through,
1744 otherwise means load-value-from-aggregate. */
1746 static void
1747 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1748 struct ipa_load_agg_data *agg_value,
1749 gimple *stmt)
1751 tree lhs = gimple_assign_lhs (stmt);
1752 tree rhs1 = gimple_assign_rhs1 (stmt);
1753 enum tree_code code;
1754 int index = -1;
1756 /* Initialize jump function data for the aggregate part. */
1757 memset (agg_value, 0, sizeof (*agg_value));
1758 agg_value->pass_through.operation = NOP_EXPR;
1759 agg_value->pass_through.formal_id = -1;
1760 agg_value->offset = -1;
1762 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1763 || TREE_THIS_VOLATILE (lhs)
1764 || TREE_CODE (lhs) == BIT_FIELD_REF
1765 || contains_bitfld_component_ref_p (lhs))
1766 return;
1768 /* Skip SSA copies. */
1769 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1771 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1772 break;
1774 stmt = SSA_NAME_DEF_STMT (rhs1);
1775 if (!is_gimple_assign (stmt))
1776 return;
1778 rhs1 = gimple_assign_rhs1 (stmt);
1781 code = gimple_assign_rhs_code (stmt);
1782 switch (gimple_assign_rhs_class (stmt))
1784 case GIMPLE_SINGLE_RHS:
1785 if (is_gimple_ip_invariant (rhs1))
1787 agg_value->pass_through.operand = rhs1;
1788 return;
1790 code = NOP_EXPR;
1791 break;
1793 case GIMPLE_UNARY_RHS:
1794 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1795 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1796 tcc_binary, this subtleness is somewhat misleading.
1798 Since tcc_unary is widely used in IPA-CP code to check an operation
1799 with one operand, here we only allow tc_unary operation to avoid
1800 possible problem. Then we can use (opclass == tc_unary) or not to
1801 distinguish unary and binary. */
1802 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1803 return;
1805 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1806 break;
1808 case GIMPLE_BINARY_RHS:
1810 gimple *rhs1_stmt = stmt;
1811 gimple *rhs2_stmt = stmt;
1812 tree rhs2 = gimple_assign_rhs2 (stmt);
1814 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1815 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1817 if (is_gimple_ip_invariant (rhs2))
1819 agg_value->pass_through.operand = rhs2;
1820 stmt = rhs1_stmt;
1822 else if (is_gimple_ip_invariant (rhs1))
1824 if (TREE_CODE_CLASS (code) == tcc_comparison)
1825 code = swap_tree_comparison (code);
1826 else if (!commutative_tree_code (code))
1827 return;
1829 agg_value->pass_through.operand = rhs1;
1830 stmt = rhs2_stmt;
1831 rhs1 = rhs2;
1833 else
1834 return;
1836 if (TREE_CODE_CLASS (code) != tcc_comparison
1837 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs1)))
1838 return;
1840 break;
1842 default:
1843 return;
1846 if (TREE_CODE (rhs1) != SSA_NAME)
1847 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1848 &agg_value->offset,
1849 &agg_value->by_ref);
1850 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1851 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1853 if (index >= 0)
1855 if (agg_value->offset >= 0)
1856 agg_value->type = TREE_TYPE (rhs1);
1857 agg_value->pass_through.formal_id = index;
1858 agg_value->pass_through.operation = code;
1860 else
1861 agg_value->pass_through.operand = NULL_TREE;
1864 /* If STMT is a memory store to the object whose address is BASE, extract
1865 information (offset, size, and value) into CONTENT, and return true,
1866 otherwise we conservatively assume the whole object is modified with
1867 unknown content, and return false. CHECK_REF means that access to object
1868 is expected to be in form of MEM_REF expression. */
1870 static bool
1871 extract_mem_content (struct ipa_func_body_info *fbi,
1872 gimple *stmt, tree base, bool check_ref,
1873 struct ipa_known_agg_contents_list *content)
1875 HOST_WIDE_INT lhs_offset, lhs_size;
1876 bool reverse;
1878 if (!is_gimple_assign (stmt))
1879 return false;
1881 tree lhs = gimple_assign_lhs (stmt);
1882 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1883 &reverse);
1884 if (!lhs_base)
1885 return false;
1887 if (check_ref)
1889 if (TREE_CODE (lhs_base) != MEM_REF
1890 || TREE_OPERAND (lhs_base, 0) != base
1891 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1892 return false;
1894 else if (lhs_base != base)
1895 return false;
1897 content->offset = lhs_offset;
1898 content->size = lhs_size;
1899 content->type = TREE_TYPE (lhs);
1900 content->next = NULL;
1902 analyze_agg_content_value (fbi, &content->value, stmt);
1903 return true;
1906 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1907 in ARG is filled in constants or values that are derived from caller's
1908 formal parameter in the way described by some kinds of jump functions. FBI
1909 is the context of the caller function for interprocedural analysis. ARG can
1910 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1911 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1913 static void
1914 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1915 gcall *call, tree arg,
1916 tree arg_type,
1917 struct ipa_jump_func *jfunc)
1919 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1920 bitmap visited = NULL;
1921 int item_count = 0, value_count = 0;
1922 HOST_WIDE_INT arg_offset, arg_size;
1923 tree arg_base;
1924 bool check_ref, by_ref;
1925 ao_ref r;
1926 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
1928 if (max_agg_items == 0)
1929 return;
1931 /* The function operates in three stages. First, we prepare check_ref, r,
1932 arg_base and arg_offset based on what is actually passed as an actual
1933 argument. */
1935 if (POINTER_TYPE_P (arg_type))
1937 by_ref = true;
1938 if (TREE_CODE (arg) == SSA_NAME)
1940 tree type_size;
1941 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1942 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1943 return;
1944 check_ref = true;
1945 arg_base = arg;
1946 arg_offset = 0;
1947 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1948 arg_size = tree_to_uhwi (type_size);
1949 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1951 else if (TREE_CODE (arg) == ADDR_EXPR)
1953 bool reverse;
1955 arg = TREE_OPERAND (arg, 0);
1956 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1957 &arg_size, &reverse);
1958 if (!arg_base)
1959 return;
1960 if (DECL_P (arg_base))
1962 check_ref = false;
1963 ao_ref_init (&r, arg_base);
1965 else
1966 return;
1968 else
1969 return;
1971 else
1973 bool reverse;
1975 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1977 by_ref = false;
1978 check_ref = false;
1979 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1980 &arg_size, &reverse);
1981 if (!arg_base)
1982 return;
1984 ao_ref_init (&r, arg);
1987 /* Second stage traverses virtual SSA web backwards starting from the call
1988 statement, only looks at individual dominating virtual operand (its
1989 definition dominates the call), as long as it is confident that content
1990 of the aggregate is affected by definition of the virtual operand, it
1991 builds a sorted linked list of ipa_agg_jf_list describing that. */
1993 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1995 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
1997 if (gimple_code (stmt) == GIMPLE_PHI)
1999 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2000 fbi->aa_walk_budget,
2001 &visited, false, NULL, NULL);
2002 continue;
2005 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2007 struct ipa_known_agg_contents_list *content
2008 = XALLOCA (struct ipa_known_agg_contents_list);
2010 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2011 break;
2013 /* Now we get a dominating virtual operand, and need to check
2014 whether its value is clobbered any other dominating one. */
2015 if ((content->value.pass_through.formal_id >= 0
2016 || content->value.pass_through.operand)
2017 && !clobber_by_agg_contents_list_p (all_list, content))
2019 struct ipa_known_agg_contents_list *copy
2020 = XALLOCA (struct ipa_known_agg_contents_list);
2022 /* Add to the list consisting of only dominating virtual
2023 operands, whose definitions can finally reach the call. */
2024 add_to_agg_contents_list (&list, (*copy = *content, copy));
2026 if (++value_count == max_agg_items)
2027 break;
2030 /* Add to the list consisting of all dominating virtual operands. */
2031 add_to_agg_contents_list (&all_list, content);
2033 if (++item_count == 2 * max_agg_items)
2034 break;
2036 dom_vuse = gimple_vuse (stmt);
2039 if (visited)
2040 BITMAP_FREE (visited);
2042 /* Third stage just goes over the list and creates an appropriate vector of
2043 ipa_agg_jf_item structures out of it, of course only if there are
2044 any meaningful items to begin with. */
2046 if (value_count)
2048 jfunc->agg.by_ref = by_ref;
2049 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2054 /* Return the Ith param type of callee associated with call graph
2055 edge E. */
2057 tree
2058 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2060 int n;
2061 tree type = (e->callee
2062 ? TREE_TYPE (e->callee->decl)
2063 : gimple_call_fntype (e->call_stmt));
2064 tree t = TYPE_ARG_TYPES (type);
2066 for (n = 0; n < i; n++)
2068 if (!t)
2069 break;
2070 t = TREE_CHAIN (t);
2072 if (t)
2073 return TREE_VALUE (t);
2074 if (!e->callee)
2075 return NULL;
2076 t = DECL_ARGUMENTS (e->callee->decl);
2077 for (n = 0; n < i; n++)
2079 if (!t)
2080 return NULL;
2081 t = TREE_CHAIN (t);
2083 if (t)
2084 return TREE_TYPE (t);
2085 return NULL;
2088 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2089 allocated structure or a previously existing one shared with other jump
2090 functions and/or transformation summaries. */
2092 ipa_bits *
2093 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2095 ipa_bits tmp;
2096 tmp.value = value;
2097 tmp.mask = mask;
2099 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2100 if (*slot)
2101 return *slot;
2103 ipa_bits *res = ggc_alloc<ipa_bits> ();
2104 res->value = value;
2105 res->mask = mask;
2106 *slot = res;
2108 return res;
2111 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2112 table in order to avoid creating multiple same ipa_bits structures. */
2114 static void
2115 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2116 const widest_int &mask)
2118 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2121 /* Return a pointer to a value_range just like *TMP, but either find it in
2122 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2124 static value_range *
2125 ipa_get_value_range (value_range *tmp)
2127 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2128 if (*slot)
2129 return *slot;
2131 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
2132 *vr = *tmp;
2133 *slot = vr;
2135 return vr;
2138 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2139 equiv set. Use hash table in order to avoid creating multiple same copies of
2140 value_ranges. */
2142 static value_range *
2143 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2145 value_range tmp (min, max, kind);
2146 return ipa_get_value_range (&tmp);
2149 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2150 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2151 same value_range structures. */
2153 static void
2154 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2155 tree min, tree max)
2157 jf->m_vr = ipa_get_value_range (type, min, max);
2160 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2161 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2163 static void
2164 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2166 jf->m_vr = ipa_get_value_range (tmp);
2169 /* Compute jump function for all arguments of callsite CS and insert the
2170 information in the jump_functions array in the ipa_edge_args corresponding
2171 to this callsite. */
2173 static void
2174 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2175 struct cgraph_edge *cs)
2177 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
2178 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
2179 gcall *call = cs->call_stmt;
2180 int n, arg_num = gimple_call_num_args (call);
2181 bool useful_context = false;
2183 if (arg_num == 0 || args->jump_functions)
2184 return;
2185 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2186 if (flag_devirtualize)
2187 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2189 if (gimple_call_internal_p (call))
2190 return;
2191 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2192 return;
2194 for (n = 0; n < arg_num; n++)
2196 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2197 tree arg = gimple_call_arg (call, n);
2198 tree param_type = ipa_get_callee_param_type (cs, n);
2199 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2201 tree instance;
2202 class ipa_polymorphic_call_context context (cs->caller->decl,
2203 arg, cs->call_stmt,
2204 &instance);
2205 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2206 &fbi->aa_walk_budget);
2207 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2208 if (!context.useless_p ())
2209 useful_context = true;
2212 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2214 bool addr_nonzero = false;
2215 bool strict_overflow = false;
2217 if (TREE_CODE (arg) == SSA_NAME
2218 && param_type
2219 && get_ptr_nonnull (arg))
2220 addr_nonzero = true;
2221 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2222 addr_nonzero = true;
2224 if (addr_nonzero)
2226 tree z = build_int_cst (TREE_TYPE (arg), 0);
2227 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2229 else
2230 gcc_assert (!jfunc->m_vr);
2232 else
2234 wide_int min, max;
2235 value_range_kind kind;
2236 if (TREE_CODE (arg) == SSA_NAME
2237 && param_type
2238 && (kind = get_range_info (arg, &min, &max))
2239 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
2241 value_range resvr;
2242 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2243 wide_int_to_tree (TREE_TYPE (arg), max),
2244 kind);
2245 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2246 &tmpvr, TREE_TYPE (arg));
2247 if (!resvr.undefined_p () && !resvr.varying_p ())
2248 ipa_set_jfunc_vr (jfunc, &resvr);
2249 else
2250 gcc_assert (!jfunc->m_vr);
2252 else
2253 gcc_assert (!jfunc->m_vr);
2256 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2257 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2259 if (TREE_CODE (arg) == SSA_NAME)
2260 ipa_set_jfunc_bits (jfunc, 0,
2261 widest_int::from (get_nonzero_bits (arg),
2262 TYPE_SIGN (TREE_TYPE (arg))));
2263 else
2264 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2266 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2268 unsigned HOST_WIDE_INT bitpos;
2269 unsigned align;
2271 get_pointer_alignment_1 (arg, &align, &bitpos);
2272 widest_int mask = wi::bit_and_not
2273 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2274 align / BITS_PER_UNIT - 1);
2275 widest_int value = bitpos / BITS_PER_UNIT;
2276 ipa_set_jfunc_bits (jfunc, value, mask);
2278 else
2279 gcc_assert (!jfunc->bits);
2281 if (is_gimple_ip_invariant (arg)
2282 || (VAR_P (arg)
2283 && is_global_var (arg)
2284 && TREE_READONLY (arg)))
2285 ipa_set_jf_constant (jfunc, arg, cs);
2286 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2287 && TREE_CODE (arg) == PARM_DECL)
2289 int index = ipa_get_param_decl_index (info, arg);
2291 gcc_assert (index >=0);
2292 /* Aggregate passed by value, check for pass-through, otherwise we
2293 will attempt to fill in aggregate contents later in this
2294 for cycle. */
2295 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2297 ipa_set_jf_simple_pass_through (jfunc, index, false);
2298 continue;
2301 else if (TREE_CODE (arg) == SSA_NAME)
2303 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2305 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2306 if (index >= 0)
2308 bool agg_p;
2309 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2310 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2313 else
2315 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2316 if (is_gimple_assign (stmt))
2317 compute_complex_assign_jump_func (fbi, info, jfunc,
2318 call, stmt, arg, param_type);
2319 else if (gimple_code (stmt) == GIMPLE_PHI)
2320 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2321 call,
2322 as_a <gphi *> (stmt));
2326 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2327 passed (because type conversions are ignored in gimple). Usually we can
2328 safely get type from function declaration, but in case of K&R prototypes or
2329 variadic functions we can try our luck with type of the pointer passed.
2330 TODO: Since we look for actual initialization of the memory object, we may better
2331 work out the type based on the memory stores we find. */
2332 if (!param_type)
2333 param_type = TREE_TYPE (arg);
2335 if ((jfunc->type != IPA_JF_PASS_THROUGH
2336 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2337 && (jfunc->type != IPA_JF_ANCESTOR
2338 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2339 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2340 || POINTER_TYPE_P (param_type)))
2341 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2343 if (!useful_context)
2344 vec_free (args->polymorphic_call_contexts);
2347 /* Compute jump functions for all edges - both direct and indirect - outgoing
2348 from BB. */
2350 static void
2351 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2353 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2354 int i;
2355 struct cgraph_edge *cs;
2357 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2359 struct cgraph_node *callee = cs->callee;
2361 if (callee)
2363 callee = callee->ultimate_alias_target ();
2364 /* We do not need to bother analyzing calls to unknown functions
2365 unless they may become known during lto/whopr. */
2366 if (!callee->definition && !flag_lto)
2367 continue;
2369 ipa_compute_jump_functions_for_edge (fbi, cs);
2373 /* If STMT looks like a statement loading a value from a member pointer formal
2374 parameter, return that parameter and store the offset of the field to
2375 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2376 might be clobbered). If USE_DELTA, then we look for a use of the delta
2377 field rather than the pfn. */
2379 static tree
2380 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2381 HOST_WIDE_INT *offset_p)
2383 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2385 if (!gimple_assign_single_p (stmt))
2386 return NULL_TREE;
2388 rhs = gimple_assign_rhs1 (stmt);
2389 if (TREE_CODE (rhs) == COMPONENT_REF)
2391 ref_field = TREE_OPERAND (rhs, 1);
2392 rhs = TREE_OPERAND (rhs, 0);
2394 else
2395 ref_field = NULL_TREE;
2396 if (TREE_CODE (rhs) != MEM_REF)
2397 return NULL_TREE;
2398 rec = TREE_OPERAND (rhs, 0);
2399 if (TREE_CODE (rec) != ADDR_EXPR)
2400 return NULL_TREE;
2401 rec = TREE_OPERAND (rec, 0);
2402 if (TREE_CODE (rec) != PARM_DECL
2403 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2404 return NULL_TREE;
2405 ref_offset = TREE_OPERAND (rhs, 1);
2407 if (use_delta)
2408 fld = delta_field;
2409 else
2410 fld = ptr_field;
2411 if (offset_p)
2412 *offset_p = int_bit_position (fld);
2414 if (ref_field)
2416 if (integer_nonzerop (ref_offset))
2417 return NULL_TREE;
2418 return ref_field == fld ? rec : NULL_TREE;
2420 else
2421 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2422 : NULL_TREE;
2425 /* Returns true iff T is an SSA_NAME defined by a statement. */
2427 static bool
2428 ipa_is_ssa_with_stmt_def (tree t)
2430 if (TREE_CODE (t) == SSA_NAME
2431 && !SSA_NAME_IS_DEFAULT_DEF (t))
2432 return true;
2433 else
2434 return false;
2437 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2438 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2439 indirect call graph edge.
2440 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2442 static struct cgraph_edge *
2443 ipa_note_param_call (struct cgraph_node *node, int param_index,
2444 gcall *stmt, bool polymorphic)
2446 struct cgraph_edge *cs;
2448 cs = node->get_edge (stmt);
2449 cs->indirect_info->param_index = param_index;
2450 cs->indirect_info->agg_contents = 0;
2451 cs->indirect_info->member_ptr = 0;
2452 cs->indirect_info->guaranteed_unmodified = 0;
2453 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2454 param_index, true);
2455 if (cs->indirect_info->polymorphic || polymorphic)
2456 ipa_set_param_used_by_polymorphic_call
2457 (IPA_NODE_REF (node), param_index, true);
2458 return cs;
2461 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2462 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2463 intermediate information about each formal parameter. Currently it checks
2464 whether the call calls a pointer that is a formal parameter and if so, the
2465 parameter is marked with the called flag and an indirect call graph edge
2466 describing the call is created. This is very simple for ordinary pointers
2467 represented in SSA but not-so-nice when it comes to member pointers. The
2468 ugly part of this function does nothing more than trying to match the
2469 pattern of such a call. An example of such a pattern is the gimple dump
2470 below, the call is on the last line:
2472 <bb 2>:
2473 f$__delta_5 = f.__delta;
2474 f$__pfn_24 = f.__pfn;
2477 <bb 2>:
2478 f$__delta_5 = MEM[(struct *)&f];
2479 f$__pfn_24 = MEM[(struct *)&f + 4B];
2481 and a few lines below:
2483 <bb 5>
2484 D.2496_3 = (int) f$__pfn_24;
2485 D.2497_4 = D.2496_3 & 1;
2486 if (D.2497_4 != 0)
2487 goto <bb 3>;
2488 else
2489 goto <bb 4>;
2491 <bb 6>:
2492 D.2500_7 = (unsigned int) f$__delta_5;
2493 D.2501_8 = &S + D.2500_7;
2494 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2495 D.2503_10 = *D.2502_9;
2496 D.2504_12 = f$__pfn_24 + -1;
2497 D.2505_13 = (unsigned int) D.2504_12;
2498 D.2506_14 = D.2503_10 + D.2505_13;
2499 D.2507_15 = *D.2506_14;
2500 iftmp.11_16 = (String:: *) D.2507_15;
2502 <bb 7>:
2503 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2504 D.2500_19 = (unsigned int) f$__delta_5;
2505 D.2508_20 = &S + D.2500_19;
2506 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2508 Such patterns are results of simple calls to a member pointer:
2510 int doprinting (int (MyString::* f)(int) const)
2512 MyString S ("somestring");
2514 return (S.*f)(4);
2517 Moreover, the function also looks for called pointers loaded from aggregates
2518 passed by value or reference. */
2520 static void
2521 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2522 tree target)
2524 class ipa_node_params *info = fbi->info;
2525 HOST_WIDE_INT offset;
2526 bool by_ref;
2528 if (SSA_NAME_IS_DEFAULT_DEF (target))
2530 tree var = SSA_NAME_VAR (target);
2531 int index = ipa_get_param_decl_index (info, var);
2532 if (index >= 0)
2533 ipa_note_param_call (fbi->node, index, call, false);
2534 return;
2537 int index;
2538 gimple *def = SSA_NAME_DEF_STMT (target);
2539 bool guaranteed_unmodified;
2540 if (gimple_assign_single_p (def)
2541 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2542 gimple_assign_rhs1 (def), &index, &offset,
2543 NULL, &by_ref, &guaranteed_unmodified))
2545 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2546 call, false);
2547 cs->indirect_info->offset = offset;
2548 cs->indirect_info->agg_contents = 1;
2549 cs->indirect_info->by_ref = by_ref;
2550 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2551 return;
2554 /* Now we need to try to match the complex pattern of calling a member
2555 pointer. */
2556 if (gimple_code (def) != GIMPLE_PHI
2557 || gimple_phi_num_args (def) != 2
2558 || !POINTER_TYPE_P (TREE_TYPE (target))
2559 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2560 return;
2562 /* First, we need to check whether one of these is a load from a member
2563 pointer that is a parameter to this function. */
2564 tree n1 = PHI_ARG_DEF (def, 0);
2565 tree n2 = PHI_ARG_DEF (def, 1);
2566 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2567 return;
2568 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2569 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2571 tree rec;
2572 basic_block bb, virt_bb;
2573 basic_block join = gimple_bb (def);
2574 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2576 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2577 return;
2579 bb = EDGE_PRED (join, 0)->src;
2580 virt_bb = gimple_bb (d2);
2582 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2584 bb = EDGE_PRED (join, 1)->src;
2585 virt_bb = gimple_bb (d1);
2587 else
2588 return;
2590 /* Second, we need to check that the basic blocks are laid out in the way
2591 corresponding to the pattern. */
2593 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2594 || single_pred (virt_bb) != bb
2595 || single_succ (virt_bb) != join)
2596 return;
2598 /* Third, let's see that the branching is done depending on the least
2599 significant bit of the pfn. */
2601 gimple *branch = last_stmt (bb);
2602 if (!branch || gimple_code (branch) != GIMPLE_COND)
2603 return;
2605 if ((gimple_cond_code (branch) != NE_EXPR
2606 && gimple_cond_code (branch) != EQ_EXPR)
2607 || !integer_zerop (gimple_cond_rhs (branch)))
2608 return;
2610 tree cond = gimple_cond_lhs (branch);
2611 if (!ipa_is_ssa_with_stmt_def (cond))
2612 return;
2614 def = SSA_NAME_DEF_STMT (cond);
2615 if (!is_gimple_assign (def)
2616 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2617 || !integer_onep (gimple_assign_rhs2 (def)))
2618 return;
2620 cond = gimple_assign_rhs1 (def);
2621 if (!ipa_is_ssa_with_stmt_def (cond))
2622 return;
2624 def = SSA_NAME_DEF_STMT (cond);
2626 if (is_gimple_assign (def)
2627 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2629 cond = gimple_assign_rhs1 (def);
2630 if (!ipa_is_ssa_with_stmt_def (cond))
2631 return;
2632 def = SSA_NAME_DEF_STMT (cond);
2635 tree rec2;
2636 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2637 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2638 == ptrmemfunc_vbit_in_delta),
2639 NULL);
2640 if (rec != rec2)
2641 return;
2643 index = ipa_get_param_decl_index (info, rec);
2644 if (index >= 0
2645 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2647 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2648 call, false);
2649 cs->indirect_info->offset = offset;
2650 cs->indirect_info->agg_contents = 1;
2651 cs->indirect_info->member_ptr = 1;
2652 cs->indirect_info->guaranteed_unmodified = 1;
2655 return;
2658 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2659 object referenced in the expression is a formal parameter of the caller
2660 FBI->node (described by FBI->info), create a call note for the
2661 statement. */
2663 static void
2664 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2665 gcall *call, tree target)
2667 tree obj = OBJ_TYPE_REF_OBJECT (target);
2668 int index;
2669 HOST_WIDE_INT anc_offset;
2671 if (!flag_devirtualize)
2672 return;
2674 if (TREE_CODE (obj) != SSA_NAME)
2675 return;
2677 class ipa_node_params *info = fbi->info;
2678 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2680 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2681 return;
2683 anc_offset = 0;
2684 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2685 gcc_assert (index >= 0);
2686 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2687 call))
2688 return;
2690 else
2692 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2693 tree expr;
2695 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2696 if (!expr)
2697 return;
2698 index = ipa_get_param_decl_index (info,
2699 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2700 gcc_assert (index >= 0);
2701 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2702 call, anc_offset))
2703 return;
2706 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2707 call, true);
2708 class cgraph_indirect_call_info *ii = cs->indirect_info;
2709 ii->offset = anc_offset;
2710 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2711 ii->otr_type = obj_type_ref_class (target);
2712 ii->polymorphic = 1;
2715 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2716 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2717 containing intermediate information about each formal parameter. */
2719 static void
2720 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2722 tree target = gimple_call_fn (call);
2724 if (!target
2725 || (TREE_CODE (target) != SSA_NAME
2726 && !virtual_method_call_p (target)))
2727 return;
2729 struct cgraph_edge *cs = fbi->node->get_edge (call);
2730 /* If we previously turned the call into a direct call, there is
2731 no need to analyze. */
2732 if (cs && !cs->indirect_unknown_callee)
2733 return;
2735 if (cs->indirect_info->polymorphic && flag_devirtualize)
2737 tree instance;
2738 tree target = gimple_call_fn (call);
2739 ipa_polymorphic_call_context context (current_function_decl,
2740 target, call, &instance);
2742 gcc_checking_assert (cs->indirect_info->otr_type
2743 == obj_type_ref_class (target));
2744 gcc_checking_assert (cs->indirect_info->otr_token
2745 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2747 cs->indirect_info->vptr_changed
2748 = !context.get_dynamic_type (instance,
2749 OBJ_TYPE_REF_OBJECT (target),
2750 obj_type_ref_class (target), call,
2751 &fbi->aa_walk_budget);
2752 cs->indirect_info->context = context;
2755 if (TREE_CODE (target) == SSA_NAME)
2756 ipa_analyze_indirect_call_uses (fbi, call, target);
2757 else if (virtual_method_call_p (target))
2758 ipa_analyze_virtual_call_uses (fbi, call, target);
2762 /* Analyze the call statement STMT with respect to formal parameters (described
2763 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2764 formal parameters are called. */
2766 static void
2767 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2769 if (is_gimple_call (stmt))
2770 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2773 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2774 If OP is a parameter declaration, mark it as used in the info structure
2775 passed in DATA. */
2777 static bool
2778 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2780 class ipa_node_params *info = (class ipa_node_params *) data;
2782 op = get_base_address (op);
2783 if (op
2784 && TREE_CODE (op) == PARM_DECL)
2786 int index = ipa_get_param_decl_index (info, op);
2787 gcc_assert (index >= 0);
2788 ipa_set_param_used (info, index, true);
2791 return false;
2794 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2795 the findings in various structures of the associated ipa_node_params
2796 structure, such as parameter flags, notes etc. FBI holds various data about
2797 the function being analyzed. */
2799 static void
2800 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2802 gimple_stmt_iterator gsi;
2803 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2805 gimple *stmt = gsi_stmt (gsi);
2807 if (is_gimple_debug (stmt))
2808 continue;
2810 ipa_analyze_stmt_uses (fbi, stmt);
2811 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2812 visit_ref_for_mod_analysis,
2813 visit_ref_for_mod_analysis,
2814 visit_ref_for_mod_analysis);
2816 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2817 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2818 visit_ref_for_mod_analysis,
2819 visit_ref_for_mod_analysis,
2820 visit_ref_for_mod_analysis);
2823 /* Calculate controlled uses of parameters of NODE. */
2825 static void
2826 ipa_analyze_controlled_uses (struct cgraph_node *node)
2828 class ipa_node_params *info = IPA_NODE_REF (node);
2830 for (int i = 0; i < ipa_get_param_count (info); i++)
2832 tree parm = ipa_get_param (info, i);
2833 int controlled_uses = 0;
2835 /* For SSA regs see if parameter is used. For non-SSA we compute
2836 the flag during modification analysis. */
2837 if (is_gimple_reg (parm))
2839 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2840 parm);
2841 if (ddef && !has_zero_uses (ddef))
2843 imm_use_iterator imm_iter;
2844 use_operand_p use_p;
2846 ipa_set_param_used (info, i, true);
2847 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2848 if (!is_gimple_call (USE_STMT (use_p)))
2850 if (!is_gimple_debug (USE_STMT (use_p)))
2852 controlled_uses = IPA_UNDESCRIBED_USE;
2853 break;
2856 else
2857 controlled_uses++;
2859 else
2860 controlled_uses = 0;
2862 else
2863 controlled_uses = IPA_UNDESCRIBED_USE;
2864 ipa_set_controlled_uses (info, i, controlled_uses);
2868 /* Free stuff in BI. */
2870 static void
2871 free_ipa_bb_info (struct ipa_bb_info *bi)
2873 bi->cg_edges.release ();
2874 bi->param_aa_statuses.release ();
2877 /* Dominator walker driving the analysis. */
2879 class analysis_dom_walker : public dom_walker
2881 public:
2882 analysis_dom_walker (struct ipa_func_body_info *fbi)
2883 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2885 virtual edge before_dom_children (basic_block);
2887 private:
2888 struct ipa_func_body_info *m_fbi;
2891 edge
2892 analysis_dom_walker::before_dom_children (basic_block bb)
2894 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2895 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2896 return NULL;
2899 /* Release body info FBI. */
2901 void
2902 ipa_release_body_info (struct ipa_func_body_info *fbi)
2904 int i;
2905 struct ipa_bb_info *bi;
2907 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2908 free_ipa_bb_info (bi);
2909 fbi->bb_infos.release ();
2912 /* Initialize the array describing properties of formal parameters
2913 of NODE, analyze their uses and compute jump functions associated
2914 with actual arguments of calls from within NODE. */
2916 void
2917 ipa_analyze_node (struct cgraph_node *node)
2919 struct ipa_func_body_info fbi;
2920 class ipa_node_params *info;
2922 ipa_check_create_node_params ();
2923 ipa_check_create_edge_args ();
2924 info = IPA_NODE_REF_GET_CREATE (node);
2926 if (info->analysis_done)
2927 return;
2928 info->analysis_done = 1;
2930 if (ipa_func_spec_opts_forbid_analysis_p (node))
2932 for (int i = 0; i < ipa_get_param_count (info); i++)
2934 ipa_set_param_used (info, i, true);
2935 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2937 return;
2940 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2941 push_cfun (func);
2942 calculate_dominance_info (CDI_DOMINATORS);
2943 ipa_initialize_node_params (node);
2944 ipa_analyze_controlled_uses (node);
2946 fbi.node = node;
2947 fbi.info = IPA_NODE_REF (node);
2948 fbi.bb_infos = vNULL;
2949 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
2950 fbi.param_count = ipa_get_param_count (info);
2951 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
2953 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2955 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2956 bi->cg_edges.safe_push (cs);
2959 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2961 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2962 bi->cg_edges.safe_push (cs);
2965 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2967 ipa_release_body_info (&fbi);
2968 free_dominance_info (CDI_DOMINATORS);
2969 pop_cfun ();
2972 /* Update the jump functions associated with call graph edge E when the call
2973 graph edge CS is being inlined, assuming that E->caller is already (possibly
2974 indirectly) inlined into CS->callee and that E has not been inlined. */
2976 static void
2977 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2978 struct cgraph_edge *e)
2980 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2981 class ipa_edge_args *args = IPA_EDGE_REF (e);
2982 if (!args)
2983 return;
2984 int count = ipa_get_cs_argument_count (args);
2985 int i;
2987 for (i = 0; i < count; i++)
2989 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2990 class ipa_polymorphic_call_context *dst_ctx
2991 = ipa_get_ith_polymorhic_call_context (args, i);
2993 if (dst->agg.items)
2995 struct ipa_agg_jf_item *item;
2996 int j;
2998 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3000 int dst_fid;
3001 struct ipa_jump_func *src;
3003 if (item->jftype != IPA_JF_PASS_THROUGH
3004 && item->jftype != IPA_JF_LOAD_AGG)
3005 continue;
3007 dst_fid = item->value.pass_through.formal_id;
3008 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3010 item->jftype = IPA_JF_UNKNOWN;
3011 continue;
3014 item->value.pass_through.formal_id = -1;
3015 src = ipa_get_ith_jump_func (top, dst_fid);
3016 if (src->type == IPA_JF_CONST)
3018 if (item->jftype == IPA_JF_PASS_THROUGH
3019 && item->value.pass_through.operation == NOP_EXPR)
3021 item->jftype = IPA_JF_CONST;
3022 item->value.constant = src->value.constant.value;
3023 continue;
3026 else if (src->type == IPA_JF_PASS_THROUGH
3027 && src->value.pass_through.operation == NOP_EXPR)
3029 if (item->jftype == IPA_JF_PASS_THROUGH
3030 || !item->value.load_agg.by_ref
3031 || src->value.pass_through.agg_preserved)
3032 item->value.pass_through.formal_id
3033 = src->value.pass_through.formal_id;
3035 else if (src->type == IPA_JF_ANCESTOR)
3037 if (item->jftype == IPA_JF_PASS_THROUGH)
3039 if (!src->value.ancestor.offset)
3040 item->value.pass_through.formal_id
3041 = src->value.ancestor.formal_id;
3043 else if (src->value.ancestor.agg_preserved)
3045 gcc_checking_assert (item->value.load_agg.by_ref);
3047 item->value.pass_through.formal_id
3048 = src->value.ancestor.formal_id;
3049 item->value.load_agg.offset
3050 += src->value.ancestor.offset;
3054 if (item->value.pass_through.formal_id < 0)
3055 item->jftype = IPA_JF_UNKNOWN;
3059 if (!top)
3061 ipa_set_jf_unknown (dst);
3062 continue;
3065 if (dst->type == IPA_JF_ANCESTOR)
3067 struct ipa_jump_func *src;
3068 int dst_fid = dst->value.ancestor.formal_id;
3069 class ipa_polymorphic_call_context *src_ctx
3070 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3072 /* Variable number of arguments can cause havoc if we try to access
3073 one that does not exist in the inlined edge. So make sure we
3074 don't. */
3075 if (dst_fid >= ipa_get_cs_argument_count (top))
3077 ipa_set_jf_unknown (dst);
3078 continue;
3081 src = ipa_get_ith_jump_func (top, dst_fid);
3083 if (src_ctx && !src_ctx->useless_p ())
3085 class ipa_polymorphic_call_context ctx = *src_ctx;
3087 /* TODO: Make type preserved safe WRT contexts. */
3088 if (!ipa_get_jf_ancestor_type_preserved (dst))
3089 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3090 ctx.offset_by (dst->value.ancestor.offset);
3091 if (!ctx.useless_p ())
3093 if (!dst_ctx)
3095 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3096 count, true);
3097 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3100 dst_ctx->combine_with (ctx);
3104 /* Parameter and argument in ancestor jump function must be pointer
3105 type, which means access to aggregate must be by-reference. */
3106 gcc_assert (!src->agg.items || src->agg.by_ref);
3108 if (src->agg.items && dst->value.ancestor.agg_preserved)
3110 struct ipa_agg_jf_item *item;
3111 int j;
3113 /* Currently we do not produce clobber aggregate jump functions,
3114 replace with merging when we do. */
3115 gcc_assert (!dst->agg.items);
3117 dst->agg.items = vec_safe_copy (src->agg.items);
3118 dst->agg.by_ref = src->agg.by_ref;
3119 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3120 item->offset -= dst->value.ancestor.offset;
3123 if (src->type == IPA_JF_PASS_THROUGH
3124 && src->value.pass_through.operation == NOP_EXPR)
3126 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3127 dst->value.ancestor.agg_preserved &=
3128 src->value.pass_through.agg_preserved;
3130 else if (src->type == IPA_JF_ANCESTOR)
3132 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3133 dst->value.ancestor.offset += src->value.ancestor.offset;
3134 dst->value.ancestor.agg_preserved &=
3135 src->value.ancestor.agg_preserved;
3137 else
3138 ipa_set_jf_unknown (dst);
3140 else if (dst->type == IPA_JF_PASS_THROUGH)
3142 struct ipa_jump_func *src;
3143 /* We must check range due to calls with variable number of arguments
3144 and we cannot combine jump functions with operations. */
3145 if (dst->value.pass_through.operation == NOP_EXPR
3146 && (top && dst->value.pass_through.formal_id
3147 < ipa_get_cs_argument_count (top)))
3149 int dst_fid = dst->value.pass_through.formal_id;
3150 src = ipa_get_ith_jump_func (top, dst_fid);
3151 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3152 class ipa_polymorphic_call_context *src_ctx
3153 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3155 if (src_ctx && !src_ctx->useless_p ())
3157 class ipa_polymorphic_call_context ctx = *src_ctx;
3159 /* TODO: Make type preserved safe WRT contexts. */
3160 if (!ipa_get_jf_pass_through_type_preserved (dst))
3161 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3162 if (!ctx.useless_p ())
3164 if (!dst_ctx)
3166 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3167 count, true);
3168 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3170 dst_ctx->combine_with (ctx);
3173 switch (src->type)
3175 case IPA_JF_UNKNOWN:
3176 ipa_set_jf_unknown (dst);
3177 break;
3178 case IPA_JF_CONST:
3179 ipa_set_jf_cst_copy (dst, src);
3180 break;
3182 case IPA_JF_PASS_THROUGH:
3184 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3185 enum tree_code operation;
3186 operation = ipa_get_jf_pass_through_operation (src);
3188 if (operation == NOP_EXPR)
3190 bool agg_p;
3191 agg_p = dst_agg_p
3192 && ipa_get_jf_pass_through_agg_preserved (src);
3193 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3195 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3196 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3197 else
3199 tree operand = ipa_get_jf_pass_through_operand (src);
3200 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3201 operation);
3203 break;
3205 case IPA_JF_ANCESTOR:
3207 bool agg_p;
3208 agg_p = dst_agg_p
3209 && ipa_get_jf_ancestor_agg_preserved (src);
3210 ipa_set_ancestor_jf (dst,
3211 ipa_get_jf_ancestor_offset (src),
3212 ipa_get_jf_ancestor_formal_id (src),
3213 agg_p);
3214 break;
3216 default:
3217 gcc_unreachable ();
3220 if (src->agg.items
3221 && (dst_agg_p || !src->agg.by_ref))
3223 /* Currently we do not produce clobber aggregate jump
3224 functions, replace with merging when we do. */
3225 gcc_assert (!dst->agg.items);
3227 dst->agg.by_ref = src->agg.by_ref;
3228 dst->agg.items = vec_safe_copy (src->agg.items);
3231 else
3232 ipa_set_jf_unknown (dst);
3237 /* If TARGET is an addr_expr of a function declaration, make it the
3238 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3239 Otherwise, return NULL. */
3241 struct cgraph_edge *
3242 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3243 bool speculative)
3245 struct cgraph_node *callee;
3246 bool unreachable = false;
3248 if (TREE_CODE (target) == ADDR_EXPR)
3249 target = TREE_OPERAND (target, 0);
3250 if (TREE_CODE (target) != FUNCTION_DECL)
3252 target = canonicalize_constructor_val (target, NULL);
3253 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3255 /* Member pointer call that goes through a VMT lookup. */
3256 if (ie->indirect_info->member_ptr
3257 /* Or if target is not an invariant expression and we do not
3258 know if it will evaulate to function at runtime.
3259 This can happen when folding through &VAR, where &VAR
3260 is IP invariant, but VAR itself is not.
3262 TODO: Revisit this when GCC 5 is branched. It seems that
3263 member_ptr check is not needed and that we may try to fold
3264 the expression and see if VAR is readonly. */
3265 || !is_gimple_ip_invariant (target))
3267 if (dump_enabled_p ())
3269 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3270 "discovered direct call non-invariant %s\n",
3271 ie->caller->dump_name ());
3273 return NULL;
3277 if (dump_enabled_p ())
3279 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3280 "discovered direct call to non-function in %s, "
3281 "making it __builtin_unreachable\n",
3282 ie->caller->dump_name ());
3285 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3286 callee = cgraph_node::get_create (target);
3287 unreachable = true;
3289 else
3290 callee = cgraph_node::get (target);
3292 else
3293 callee = cgraph_node::get (target);
3295 /* Because may-edges are not explicitely represented and vtable may be external,
3296 we may create the first reference to the object in the unit. */
3297 if (!callee || callee->inlined_to)
3300 /* We are better to ensure we can refer to it.
3301 In the case of static functions we are out of luck, since we already
3302 removed its body. In the case of public functions we may or may
3303 not introduce the reference. */
3304 if (!canonicalize_constructor_val (target, NULL)
3305 || !TREE_PUBLIC (target))
3307 if (dump_file)
3308 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3309 "(%s -> %s) but cannot refer to it. Giving up.\n",
3310 ie->caller->dump_name (),
3311 ie->callee->dump_name ());
3312 return NULL;
3314 callee = cgraph_node::get_create (target);
3317 /* If the edge is already speculated. */
3318 if (speculative && ie->speculative)
3320 if (dump_file)
3322 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3323 if (!e2)
3325 if (dump_file)
3326 fprintf (dump_file, "ipa-prop: Discovered call to a "
3327 "speculative target (%s -> %s) but the call is "
3328 "already speculated to different target. "
3329 "Giving up.\n",
3330 ie->caller->dump_name (), callee->dump_name ());
3332 else
3334 if (dump_file)
3335 fprintf (dump_file,
3336 "ipa-prop: Discovered call to a speculative target "
3337 "(%s -> %s) this agree with previous speculation.\n",
3338 ie->caller->dump_name (), callee->dump_name ());
3341 return NULL;
3344 if (!dbg_cnt (devirt))
3345 return NULL;
3347 ipa_check_create_node_params ();
3349 /* We cannot make edges to inline clones. It is bug that someone removed
3350 the cgraph node too early. */
3351 gcc_assert (!callee->inlined_to);
3353 if (dump_file && !unreachable)
3355 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3356 "(%s -> %s), for stmt ",
3357 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3358 speculative ? "speculative" : "known",
3359 ie->caller->dump_name (),
3360 callee->dump_name ());
3361 if (ie->call_stmt)
3362 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3363 else
3364 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3366 if (dump_enabled_p ())
3368 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3369 "converting indirect call in %s to direct call to %s\n",
3370 ie->caller->dump_name (), callee->dump_name ());
3372 if (!speculative)
3374 struct cgraph_edge *orig = ie;
3375 ie = cgraph_edge::make_direct (ie, callee);
3376 /* If we resolved speculative edge the cost is already up to date
3377 for direct call (adjusted by inline_edge_duplication_hook). */
3378 if (ie == orig)
3380 ipa_call_summary *es = ipa_call_summaries->get (ie);
3381 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3382 - eni_size_weights.call_cost);
3383 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3384 - eni_time_weights.call_cost);
3387 else
3389 if (!callee->can_be_discarded_p ())
3391 cgraph_node *alias;
3392 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3393 if (alias)
3394 callee = alias;
3396 /* make_speculative will update ie's cost to direct call cost. */
3397 ie = ie->make_speculative
3398 (callee, ie->count.apply_scale (8, 10));
3401 return ie;
3404 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3405 CONSTRUCTOR and return it. Return NULL if the search fails for some
3406 reason. */
3408 static tree
3409 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3411 tree type = TREE_TYPE (constructor);
3412 if (TREE_CODE (type) != ARRAY_TYPE
3413 && TREE_CODE (type) != RECORD_TYPE)
3414 return NULL;
3416 unsigned ix;
3417 tree index, val;
3418 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3420 HOST_WIDE_INT elt_offset;
3421 if (TREE_CODE (type) == ARRAY_TYPE)
3423 offset_int off;
3424 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3425 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3427 if (index)
3429 if (TREE_CODE (index) == RANGE_EXPR)
3430 off = wi::to_offset (TREE_OPERAND (index, 0));
3431 else
3432 off = wi::to_offset (index);
3433 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3435 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3436 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3437 off = wi::sext (off - wi::to_offset (low_bound),
3438 TYPE_PRECISION (TREE_TYPE (index)));
3440 off *= wi::to_offset (unit_size);
3441 /* ??? Handle more than just the first index of a
3442 RANGE_EXPR. */
3444 else
3445 off = wi::to_offset (unit_size) * ix;
3447 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3448 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3449 continue;
3450 elt_offset = off.to_shwi ();
3452 else if (TREE_CODE (type) == RECORD_TYPE)
3454 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3455 if (DECL_BIT_FIELD (index))
3456 continue;
3457 elt_offset = int_bit_position (index);
3459 else
3460 gcc_unreachable ();
3462 if (elt_offset > req_offset)
3463 return NULL;
3465 if (TREE_CODE (val) == CONSTRUCTOR)
3466 return find_constructor_constant_at_offset (val,
3467 req_offset - elt_offset);
3469 if (elt_offset == req_offset
3470 && is_gimple_reg_type (TREE_TYPE (val))
3471 && is_gimple_ip_invariant (val))
3472 return val;
3474 return NULL;
3477 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3478 invariant from a static constructor and if so, return it. Otherwise return
3479 NULL. */
3481 static tree
3482 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3484 if (by_ref)
3486 if (TREE_CODE (scalar) != ADDR_EXPR)
3487 return NULL;
3488 scalar = TREE_OPERAND (scalar, 0);
3491 if (!VAR_P (scalar)
3492 || !is_global_var (scalar)
3493 || !TREE_READONLY (scalar)
3494 || !DECL_INITIAL (scalar)
3495 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3496 return NULL;
3498 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3501 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3502 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3503 return NULL if there is none. BY_REF specifies whether the value has to be
3504 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3505 the boolean it points to is set to true if the value comes from an
3506 initializer of a constant. */
3508 tree
3509 ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
3510 HOST_WIDE_INT offset, bool by_ref,
3511 bool *from_global_constant)
3513 struct ipa_agg_value *item;
3514 int i;
3516 if (scalar)
3518 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3519 if (res)
3521 if (from_global_constant)
3522 *from_global_constant = true;
3523 return res;
3527 if (!agg
3528 || by_ref != agg->by_ref)
3529 return NULL;
3531 FOR_EACH_VEC_ELT (agg->items, i, item)
3532 if (item->offset == offset)
3534 /* Currently we do not have clobber values, return NULL for them once
3535 we do. */
3536 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3537 if (from_global_constant)
3538 *from_global_constant = false;
3539 return item->value;
3541 return NULL;
3544 /* Remove a reference to SYMBOL from the list of references of a node given by
3545 reference description RDESC. Return true if the reference has been
3546 successfully found and removed. */
3548 static bool
3549 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3551 struct ipa_ref *to_del;
3552 struct cgraph_edge *origin;
3554 origin = rdesc->cs;
3555 if (!origin)
3556 return false;
3557 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3558 origin->lto_stmt_uid);
3559 if (!to_del)
3560 return false;
3562 to_del->remove_reference ();
3563 if (dump_file)
3564 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3565 origin->caller->dump_name (), symbol->dump_name ());
3566 return true;
3569 /* If JFUNC has a reference description with refcount different from
3570 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3571 NULL. JFUNC must be a constant jump function. */
3573 static struct ipa_cst_ref_desc *
3574 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3576 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3577 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3578 return rdesc;
3579 else
3580 return NULL;
3583 /* If the value of constant jump function JFUNC is an address of a function
3584 declaration, return the associated call graph node. Otherwise return
3585 NULL. */
3587 static cgraph_node *
3588 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3590 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3591 tree cst = ipa_get_jf_constant (jfunc);
3592 if (TREE_CODE (cst) != ADDR_EXPR
3593 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3594 return NULL;
3596 return cgraph_node::get (TREE_OPERAND (cst, 0));
3600 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3601 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3602 the edge specified in the rdesc. Return false if either the symbol or the
3603 reference could not be found, otherwise return true. */
3605 static bool
3606 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3608 struct ipa_cst_ref_desc *rdesc;
3609 if (jfunc->type == IPA_JF_CONST
3610 && (rdesc = jfunc_rdesc_usable (jfunc))
3611 && --rdesc->refcount == 0)
3613 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3614 if (!symbol)
3615 return false;
3617 return remove_described_reference (symbol, rdesc);
3619 return true;
3622 /* Try to find a destination for indirect edge IE that corresponds to a simple
3623 call or a call of a member function pointer and where the destination is a
3624 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3625 the type of the parameter to which the result of JFUNC is passed. If it can
3626 be determined, return the newly direct edge, otherwise return NULL.
3627 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3628 relative to. */
3630 static struct cgraph_edge *
3631 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3632 struct ipa_jump_func *jfunc, tree target_type,
3633 struct cgraph_node *new_root,
3634 class ipa_node_params *new_root_info)
3636 struct cgraph_edge *cs;
3637 tree target;
3638 bool agg_contents = ie->indirect_info->agg_contents;
3639 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3640 if (agg_contents)
3642 bool from_global_constant;
3643 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3644 new_root,
3645 &jfunc->agg);
3646 target = ipa_find_agg_cst_for_param (&agg, scalar,
3647 ie->indirect_info->offset,
3648 ie->indirect_info->by_ref,
3649 &from_global_constant);
3650 agg.release ();
3651 if (target
3652 && !from_global_constant
3653 && !ie->indirect_info->guaranteed_unmodified)
3654 return NULL;
3656 else
3657 target = scalar;
3658 if (!target)
3659 return NULL;
3660 cs = ipa_make_edge_direct_to_target (ie, target);
3662 if (cs && !agg_contents)
3664 bool ok;
3665 gcc_checking_assert (cs->callee
3666 && (cs != ie
3667 || jfunc->type != IPA_JF_CONST
3668 || !cgraph_node_for_jfunc (jfunc)
3669 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3670 ok = try_decrement_rdesc_refcount (jfunc);
3671 gcc_checking_assert (ok);
3674 return cs;
3677 /* Return the target to be used in cases of impossible devirtualization. IE
3678 and target (the latter can be NULL) are dumped when dumping is enabled. */
3680 tree
3681 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3683 if (dump_file)
3685 if (target)
3686 fprintf (dump_file,
3687 "Type inconsistent devirtualization: %s->%s\n",
3688 ie->caller->dump_name (),
3689 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3690 else
3691 fprintf (dump_file,
3692 "No devirtualization target in %s\n",
3693 ie->caller->dump_name ());
3695 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3696 cgraph_node::get_create (new_target);
3697 return new_target;
3700 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3701 call based on a formal parameter which is described by jump function JFUNC
3702 and if it can be determined, make it direct and return the direct edge.
3703 Otherwise, return NULL. CTX describes the polymorphic context that the
3704 parameter the call is based on brings along with it. NEW_ROOT and
3705 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3706 to. */
3708 static struct cgraph_edge *
3709 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3710 struct ipa_jump_func *jfunc,
3711 class ipa_polymorphic_call_context ctx,
3712 struct cgraph_node *new_root,
3713 class ipa_node_params *new_root_info)
3715 tree target = NULL;
3716 bool speculative = false;
3718 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3719 return NULL;
3721 gcc_assert (!ie->indirect_info->by_ref);
3723 /* Try to do lookup via known virtual table pointer value. */
3724 if (!ie->indirect_info->vptr_changed
3725 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3727 tree vtable;
3728 unsigned HOST_WIDE_INT offset;
3729 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3730 : NULL;
3731 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3732 new_root,
3733 &jfunc->agg);
3734 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3735 ie->indirect_info->offset,
3736 true);
3737 agg.release ();
3738 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3740 bool can_refer;
3741 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3742 vtable, offset, &can_refer);
3743 if (can_refer)
3745 if (!t
3746 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3747 || !possible_polymorphic_call_target_p
3748 (ie, cgraph_node::get (t)))
3750 /* Do not speculate builtin_unreachable, it is stupid! */
3751 if (!ie->indirect_info->vptr_changed)
3752 target = ipa_impossible_devirt_target (ie, target);
3753 else
3754 target = NULL;
3756 else
3758 target = t;
3759 speculative = ie->indirect_info->vptr_changed;
3765 ipa_polymorphic_call_context ie_context (ie);
3766 vec <cgraph_node *>targets;
3767 bool final;
3769 ctx.offset_by (ie->indirect_info->offset);
3770 if (ie->indirect_info->vptr_changed)
3771 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3772 ie->indirect_info->otr_type);
3773 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3774 targets = possible_polymorphic_call_targets
3775 (ie->indirect_info->otr_type,
3776 ie->indirect_info->otr_token,
3777 ctx, &final);
3778 if (final && targets.length () <= 1)
3780 speculative = false;
3781 if (targets.length () == 1)
3782 target = targets[0]->decl;
3783 else
3784 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3786 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3787 && !ie->speculative && ie->maybe_hot_p ())
3789 cgraph_node *n;
3790 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3791 ie->indirect_info->otr_token,
3792 ie->indirect_info->context);
3793 if (n)
3795 target = n->decl;
3796 speculative = true;
3800 if (target)
3802 if (!possible_polymorphic_call_target_p
3803 (ie, cgraph_node::get_create (target)))
3805 if (speculative)
3806 return NULL;
3807 target = ipa_impossible_devirt_target (ie, target);
3809 return ipa_make_edge_direct_to_target (ie, target, speculative);
3811 else
3812 return NULL;
3815 /* Update the param called notes associated with NODE when CS is being inlined,
3816 assuming NODE is (potentially indirectly) inlined into CS->callee.
3817 Moreover, if the callee is discovered to be constant, create a new cgraph
3818 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3819 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3821 static bool
3822 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3823 struct cgraph_node *node,
3824 vec<cgraph_edge *> *new_edges)
3826 class ipa_edge_args *top;
3827 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3828 struct cgraph_node *new_root;
3829 class ipa_node_params *new_root_info, *inlined_node_info;
3830 bool res = false;
3832 ipa_check_create_edge_args ();
3833 top = IPA_EDGE_REF (cs);
3834 new_root = cs->caller->inlined_to
3835 ? cs->caller->inlined_to : cs->caller;
3836 new_root_info = IPA_NODE_REF (new_root);
3837 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3839 for (ie = node->indirect_calls; ie; ie = next_ie)
3841 class cgraph_indirect_call_info *ici = ie->indirect_info;
3842 struct ipa_jump_func *jfunc;
3843 int param_index;
3845 next_ie = ie->next_callee;
3847 if (ici->param_index == -1)
3848 continue;
3850 /* We must check range due to calls with variable number of arguments: */
3851 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3853 ici->param_index = -1;
3854 continue;
3857 param_index = ici->param_index;
3858 jfunc = ipa_get_ith_jump_func (top, param_index);
3860 auto_vec<cgraph_node *, 4> spec_targets;
3861 if (ie->speculative)
3862 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3863 direct;
3864 direct = direct->next_speculative_call_target ())
3865 spec_targets.safe_push (direct->callee);
3867 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3868 new_direct_edge = NULL;
3869 else if (ici->polymorphic)
3871 ipa_polymorphic_call_context ctx;
3872 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3873 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3874 new_root,
3875 new_root_info);
3877 else
3879 tree target_type = ipa_get_type (inlined_node_info, param_index);
3880 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3881 target_type,
3882 new_root,
3883 new_root_info);
3886 /* If speculation was removed, then we need to do nothing. */
3887 if (new_direct_edge && new_direct_edge != ie
3888 && spec_targets.contains (new_direct_edge->callee))
3890 new_direct_edge->indirect_inlining_edge = 1;
3891 top = IPA_EDGE_REF (cs);
3892 res = true;
3893 if (!new_direct_edge->speculative)
3894 continue;
3896 else if (new_direct_edge)
3898 new_direct_edge->indirect_inlining_edge = 1;
3899 if (new_edges)
3901 new_edges->safe_push (new_direct_edge);
3902 res = true;
3904 top = IPA_EDGE_REF (cs);
3905 /* If speculative edge was introduced we still need to update
3906 call info of the indirect edge. */
3907 if (!new_direct_edge->speculative)
3908 continue;
3910 if (jfunc->type == IPA_JF_PASS_THROUGH
3911 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3913 if (ici->agg_contents
3914 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3915 && !ici->polymorphic)
3916 ici->param_index = -1;
3917 else
3919 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3920 if (ici->polymorphic
3921 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3922 ici->vptr_changed = true;
3923 ipa_set_param_used_by_indirect_call (new_root_info,
3924 ici->param_index, true);
3925 if (ici->polymorphic)
3926 ipa_set_param_used_by_polymorphic_call (new_root_info,
3927 ici->param_index, true);
3930 else if (jfunc->type == IPA_JF_ANCESTOR)
3932 if (ici->agg_contents
3933 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3934 && !ici->polymorphic)
3935 ici->param_index = -1;
3936 else
3938 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3939 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3940 if (ici->polymorphic
3941 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3942 ici->vptr_changed = true;
3943 ipa_set_param_used_by_indirect_call (new_root_info,
3944 ici->param_index, true);
3945 if (ici->polymorphic)
3946 ipa_set_param_used_by_polymorphic_call (new_root_info,
3947 ici->param_index, true);
3950 else
3951 /* Either we can find a destination for this edge now or never. */
3952 ici->param_index = -1;
3955 return res;
3958 /* Recursively traverse subtree of NODE (including node) made of inlined
3959 cgraph_edges when CS has been inlined and invoke
3960 update_indirect_edges_after_inlining on all nodes and
3961 update_jump_functions_after_inlining on all non-inlined edges that lead out
3962 of this subtree. Newly discovered indirect edges will be added to
3963 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3964 created. */
3966 static bool
3967 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3968 struct cgraph_node *node,
3969 vec<cgraph_edge *> *new_edges)
3971 struct cgraph_edge *e;
3972 bool res;
3974 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3976 for (e = node->callees; e; e = e->next_callee)
3977 if (!e->inline_failed)
3978 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3979 else
3980 update_jump_functions_after_inlining (cs, e);
3981 for (e = node->indirect_calls; e; e = e->next_callee)
3982 update_jump_functions_after_inlining (cs, e);
3984 return res;
3987 /* Combine two controlled uses counts as done during inlining. */
3989 static int
3990 combine_controlled_uses_counters (int c, int d)
3992 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3993 return IPA_UNDESCRIBED_USE;
3994 else
3995 return c + d - 1;
3998 /* Propagate number of controlled users from CS->caleee to the new root of the
3999 tree of inlined nodes. */
4001 static void
4002 propagate_controlled_uses (struct cgraph_edge *cs)
4004 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4005 if (!args)
4006 return;
4007 struct cgraph_node *new_root = cs->caller->inlined_to
4008 ? cs->caller->inlined_to : cs->caller;
4009 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
4010 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
4011 int count, i;
4013 if (!old_root_info)
4014 return;
4016 count = MIN (ipa_get_cs_argument_count (args),
4017 ipa_get_param_count (old_root_info));
4018 for (i = 0; i < count; i++)
4020 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4021 struct ipa_cst_ref_desc *rdesc;
4023 if (jf->type == IPA_JF_PASS_THROUGH)
4025 int src_idx, c, d;
4026 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4027 c = ipa_get_controlled_uses (new_root_info, src_idx);
4028 d = ipa_get_controlled_uses (old_root_info, i);
4030 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4031 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4032 c = combine_controlled_uses_counters (c, d);
4033 ipa_set_controlled_uses (new_root_info, src_idx, c);
4034 if (c == 0 && new_root_info->ipcp_orig_node)
4036 struct cgraph_node *n;
4037 struct ipa_ref *ref;
4038 tree t = new_root_info->known_csts[src_idx];
4040 if (t && TREE_CODE (t) == ADDR_EXPR
4041 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4042 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4043 && (ref = new_root->find_reference (n, NULL, 0)))
4045 if (dump_file)
4046 fprintf (dump_file, "ipa-prop: Removing cloning-created "
4047 "reference from %s to %s.\n",
4048 new_root->dump_name (),
4049 n->dump_name ());
4050 ref->remove_reference ();
4054 else if (jf->type == IPA_JF_CONST
4055 && (rdesc = jfunc_rdesc_usable (jf)))
4057 int d = ipa_get_controlled_uses (old_root_info, i);
4058 int c = rdesc->refcount;
4059 rdesc->refcount = combine_controlled_uses_counters (c, d);
4060 if (rdesc->refcount == 0)
4062 tree cst = ipa_get_jf_constant (jf);
4063 struct cgraph_node *n;
4064 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4065 && TREE_CODE (TREE_OPERAND (cst, 0))
4066 == FUNCTION_DECL);
4067 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4068 if (n)
4070 struct cgraph_node *clone;
4071 bool ok;
4072 ok = remove_described_reference (n, rdesc);
4073 gcc_checking_assert (ok);
4075 clone = cs->caller;
4076 while (clone->inlined_to
4077 && clone->ipcp_clone
4078 && clone != rdesc->cs->caller)
4080 struct ipa_ref *ref;
4081 ref = clone->find_reference (n, NULL, 0);
4082 if (ref)
4084 if (dump_file)
4085 fprintf (dump_file, "ipa-prop: Removing "
4086 "cloning-created reference "
4087 "from %s to %s.\n",
4088 clone->dump_name (),
4089 n->dump_name ());
4090 ref->remove_reference ();
4092 clone = clone->callers->caller;
4099 for (i = ipa_get_param_count (old_root_info);
4100 i < ipa_get_cs_argument_count (args);
4101 i++)
4103 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4105 if (jf->type == IPA_JF_CONST)
4107 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4108 if (rdesc)
4109 rdesc->refcount = IPA_UNDESCRIBED_USE;
4111 else if (jf->type == IPA_JF_PASS_THROUGH)
4112 ipa_set_controlled_uses (new_root_info,
4113 jf->value.pass_through.formal_id,
4114 IPA_UNDESCRIBED_USE);
4118 /* Update jump functions and call note functions on inlining the call site CS.
4119 CS is expected to lead to a node already cloned by
4120 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4121 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4122 created. */
4124 bool
4125 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4126 vec<cgraph_edge *> *new_edges)
4128 bool changed;
4129 /* Do nothing if the preparation phase has not been carried out yet
4130 (i.e. during early inlining). */
4131 if (!ipa_node_params_sum)
4132 return false;
4133 gcc_assert (ipa_edge_args_sum);
4135 propagate_controlled_uses (cs);
4136 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4137 ipa_node_params_sum->remove (cs->callee);
4139 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4140 if (args)
4142 bool ok = true;
4143 if (args->jump_functions)
4145 struct ipa_jump_func *jf;
4146 int i;
4147 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4148 if (jf->type == IPA_JF_CONST
4149 && ipa_get_jf_constant_rdesc (jf))
4151 ok = false;
4152 break;
4155 if (ok)
4156 ipa_edge_args_sum->remove (cs);
4158 if (ipcp_transformation_sum)
4159 ipcp_transformation_sum->remove (cs->callee);
4161 return changed;
4164 /* Ensure that array of edge arguments infos is big enough to accommodate a
4165 structure for all edges and reallocates it if not. Also, allocate
4166 associated hash tables is they do not already exist. */
4168 void
4169 ipa_check_create_edge_args (void)
4171 if (!ipa_edge_args_sum)
4172 ipa_edge_args_sum
4173 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4174 ipa_edge_args_sum_t (symtab, true));
4175 if (!ipa_bits_hash_table)
4176 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4177 if (!ipa_vr_hash_table)
4178 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4181 /* Free all ipa_edge structures. */
4183 void
4184 ipa_free_all_edge_args (void)
4186 if (!ipa_edge_args_sum)
4187 return;
4189 ggc_delete (ipa_edge_args_sum);
4190 ipa_edge_args_sum = NULL;
4193 /* Free all ipa_node_params structures. */
4195 void
4196 ipa_free_all_node_params (void)
4198 if (ipa_node_params_sum)
4199 ggc_delete (ipa_node_params_sum);
4200 ipa_node_params_sum = NULL;
4203 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4204 tables if they do not already exist. */
4206 void
4207 ipcp_transformation_initialize (void)
4209 if (!ipa_bits_hash_table)
4210 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4211 if (!ipa_vr_hash_table)
4212 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4213 if (ipcp_transformation_sum == NULL)
4214 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4217 /* Release the IPA CP transformation summary. */
4219 void
4220 ipcp_free_transformation_sum (void)
4222 if (!ipcp_transformation_sum)
4223 return;
4225 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4226 ggc_free (ipcp_transformation_sum);
4227 ipcp_transformation_sum = NULL;
4230 /* Set the aggregate replacements of NODE to be AGGVALS. */
4232 void
4233 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4234 struct ipa_agg_replacement_value *aggvals)
4236 ipcp_transformation_initialize ();
4237 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4238 s->agg_values = aggvals;
4241 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4242 count data structures accordingly. */
4244 void
4245 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4247 if (args->jump_functions)
4249 struct ipa_jump_func *jf;
4250 int i;
4251 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4253 struct ipa_cst_ref_desc *rdesc;
4254 try_decrement_rdesc_refcount (jf);
4255 if (jf->type == IPA_JF_CONST
4256 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4257 && rdesc->cs == cs)
4258 rdesc->cs = NULL;
4263 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4264 reference count data strucutres accordingly. */
4266 void
4267 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4268 ipa_edge_args *old_args, ipa_edge_args *new_args)
4270 unsigned int i;
4272 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4273 if (old_args->polymorphic_call_contexts)
4274 new_args->polymorphic_call_contexts
4275 = vec_safe_copy (old_args->polymorphic_call_contexts);
4277 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4279 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4280 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4282 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4284 if (src_jf->type == IPA_JF_CONST)
4286 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4288 if (!src_rdesc)
4289 dst_jf->value.constant.rdesc = NULL;
4290 else if (src->caller == dst->caller)
4292 struct ipa_ref *ref;
4293 symtab_node *n = cgraph_node_for_jfunc (src_jf);
4294 gcc_checking_assert (n);
4295 ref = src->caller->find_reference (n, src->call_stmt,
4296 src->lto_stmt_uid);
4297 gcc_checking_assert (ref);
4298 dst->caller->clone_reference (ref, ref->stmt);
4300 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4301 dst_rdesc->cs = dst;
4302 dst_rdesc->refcount = src_rdesc->refcount;
4303 dst_rdesc->next_duplicate = NULL;
4304 dst_jf->value.constant.rdesc = dst_rdesc;
4306 else if (src_rdesc->cs == src)
4308 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4309 dst_rdesc->cs = dst;
4310 dst_rdesc->refcount = src_rdesc->refcount;
4311 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4312 src_rdesc->next_duplicate = dst_rdesc;
4313 dst_jf->value.constant.rdesc = dst_rdesc;
4315 else
4317 struct ipa_cst_ref_desc *dst_rdesc;
4318 /* This can happen during inlining, when a JFUNC can refer to a
4319 reference taken in a function up in the tree of inline clones.
4320 We need to find the duplicate that refers to our tree of
4321 inline clones. */
4323 gcc_assert (dst->caller->inlined_to);
4324 for (dst_rdesc = src_rdesc->next_duplicate;
4325 dst_rdesc;
4326 dst_rdesc = dst_rdesc->next_duplicate)
4328 struct cgraph_node *top;
4329 top = dst_rdesc->cs->caller->inlined_to
4330 ? dst_rdesc->cs->caller->inlined_to
4331 : dst_rdesc->cs->caller;
4332 if (dst->caller->inlined_to == top)
4333 break;
4335 gcc_assert (dst_rdesc);
4336 dst_jf->value.constant.rdesc = dst_rdesc;
4339 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4340 && src->caller == dst->caller)
4342 struct cgraph_node *inline_root = dst->caller->inlined_to
4343 ? dst->caller->inlined_to : dst->caller;
4344 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
4345 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4347 int c = ipa_get_controlled_uses (root_info, idx);
4348 if (c != IPA_UNDESCRIBED_USE)
4350 c++;
4351 ipa_set_controlled_uses (root_info, idx, c);
4357 /* Analyze newly added function into callgraph. */
4359 static void
4360 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4362 if (node->has_gimple_body_p ())
4363 ipa_analyze_node (node);
4366 /* Hook that is called by summary when a node is duplicated. */
4368 void
4369 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4370 ipa_node_params *old_info,
4371 ipa_node_params *new_info)
4373 ipa_agg_replacement_value *old_av, *new_av;
4375 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4376 new_info->lattices = NULL;
4377 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4378 new_info->known_csts = old_info->known_csts.copy ();
4379 new_info->known_contexts = old_info->known_contexts.copy ();
4381 new_info->analysis_done = old_info->analysis_done;
4382 new_info->node_enqueued = old_info->node_enqueued;
4383 new_info->versionable = old_info->versionable;
4385 old_av = ipa_get_agg_replacements_for_node (src);
4386 if (old_av)
4388 new_av = NULL;
4389 while (old_av)
4391 struct ipa_agg_replacement_value *v;
4393 v = ggc_alloc<ipa_agg_replacement_value> ();
4394 memcpy (v, old_av, sizeof (*v));
4395 v->next = new_av;
4396 new_av = v;
4397 old_av = old_av->next;
4399 ipa_set_node_agg_value_chain (dst, new_av);
4403 /* Duplication of ipcp transformation summaries. */
4405 void
4406 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4407 ipcp_transformation *src_trans,
4408 ipcp_transformation *dst_trans)
4410 /* Avoid redundant work of duplicating vectors we will never use. */
4411 if (dst->inlined_to)
4412 return;
4413 dst_trans->bits = vec_safe_copy (src_trans->bits);
4414 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4415 ipa_agg_replacement_value *agg = src_trans->agg_values,
4416 **aggptr = &dst_trans->agg_values;
4417 while (agg)
4419 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4420 **aggptr = *agg;
4421 agg = agg->next;
4422 aggptr = &(*aggptr)->next;
4426 /* Register our cgraph hooks if they are not already there. */
4428 void
4429 ipa_register_cgraph_hooks (void)
4431 ipa_check_create_node_params ();
4432 ipa_check_create_edge_args ();
4434 function_insertion_hook_holder =
4435 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4438 /* Unregister our cgraph hooks if they are not already there. */
4440 static void
4441 ipa_unregister_cgraph_hooks (void)
4443 if (function_insertion_hook_holder)
4444 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4445 function_insertion_hook_holder = NULL;
4448 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4449 longer needed after ipa-cp. */
4451 void
4452 ipa_free_all_structures_after_ipa_cp (void)
4454 if (!optimize && !in_lto_p)
4456 ipa_free_all_edge_args ();
4457 ipa_free_all_node_params ();
4458 ipcp_sources_pool.release ();
4459 ipcp_cst_values_pool.release ();
4460 ipcp_poly_ctx_values_pool.release ();
4461 ipcp_agg_lattice_pool.release ();
4462 ipa_unregister_cgraph_hooks ();
4463 ipa_refdesc_pool.release ();
4467 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4468 longer needed after indirect inlining. */
4470 void
4471 ipa_free_all_structures_after_iinln (void)
4473 ipa_free_all_edge_args ();
4474 ipa_free_all_node_params ();
4475 ipa_unregister_cgraph_hooks ();
4476 ipcp_sources_pool.release ();
4477 ipcp_cst_values_pool.release ();
4478 ipcp_poly_ctx_values_pool.release ();
4479 ipcp_agg_lattice_pool.release ();
4480 ipa_refdesc_pool.release ();
4483 /* Print ipa_tree_map data structures of all functions in the
4484 callgraph to F. */
4486 void
4487 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4489 int i, count;
4490 class ipa_node_params *info;
4492 if (!node->definition)
4493 return;
4494 info = IPA_NODE_REF (node);
4495 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4496 if (!info)
4498 fprintf (f, " no params return\n");
4499 return;
4501 count = ipa_get_param_count (info);
4502 for (i = 0; i < count; i++)
4504 int c;
4506 fprintf (f, " ");
4507 ipa_dump_param (f, info, i);
4508 if (ipa_is_param_used (info, i))
4509 fprintf (f, " used");
4510 if (ipa_is_param_used_by_ipa_predicates (info, i))
4511 fprintf (f, " used_by_ipa_predicates");
4512 if (ipa_is_param_used_by_indirect_call (info, i))
4513 fprintf (f, " used_by_indirect_call");
4514 if (ipa_is_param_used_by_polymorphic_call (info, i))
4515 fprintf (f, " used_by_polymorphic_call");
4516 c = ipa_get_controlled_uses (info, i);
4517 if (c == IPA_UNDESCRIBED_USE)
4518 fprintf (f, " undescribed_use");
4519 else
4520 fprintf (f, " controlled_uses=%i", c);
4521 fprintf (f, "\n");
4525 /* Print ipa_tree_map data structures of all functions in the
4526 callgraph to F. */
4528 void
4529 ipa_print_all_params (FILE * f)
4531 struct cgraph_node *node;
4533 fprintf (f, "\nFunction parameters:\n");
4534 FOR_EACH_FUNCTION (node)
4535 ipa_print_node_params (f, node);
4538 /* Dump the AV linked list. */
4540 void
4541 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4543 bool comma = false;
4544 fprintf (f, " Aggregate replacements:");
4545 for (; av; av = av->next)
4547 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4548 av->index, av->offset);
4549 print_generic_expr (f, av->value);
4550 comma = true;
4552 fprintf (f, "\n");
4555 /* Stream out jump function JUMP_FUNC to OB. */
4557 static void
4558 ipa_write_jump_function (struct output_block *ob,
4559 struct ipa_jump_func *jump_func)
4561 struct ipa_agg_jf_item *item;
4562 struct bitpack_d bp;
4563 int i, count;
4564 int flag = 0;
4566 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4567 as well as WPA memory by handling them specially. */
4568 if (jump_func->type == IPA_JF_CONST
4569 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4570 flag = 1;
4572 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4573 switch (jump_func->type)
4575 case IPA_JF_UNKNOWN:
4576 break;
4577 case IPA_JF_CONST:
4578 gcc_assert (
4579 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4580 stream_write_tree (ob,
4581 flag
4582 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4583 : jump_func->value.constant.value, true);
4584 break;
4585 case IPA_JF_PASS_THROUGH:
4586 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4587 if (jump_func->value.pass_through.operation == NOP_EXPR)
4589 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4590 bp = bitpack_create (ob->main_stream);
4591 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4592 streamer_write_bitpack (&bp);
4594 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4595 == tcc_unary)
4596 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4597 else
4599 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4600 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4602 break;
4603 case IPA_JF_ANCESTOR:
4604 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4605 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4606 bp = bitpack_create (ob->main_stream);
4607 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4608 streamer_write_bitpack (&bp);
4609 break;
4610 default:
4611 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4614 count = vec_safe_length (jump_func->agg.items);
4615 streamer_write_uhwi (ob, count);
4616 if (count)
4618 bp = bitpack_create (ob->main_stream);
4619 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4620 streamer_write_bitpack (&bp);
4623 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4625 stream_write_tree (ob, item->type, true);
4626 streamer_write_uhwi (ob, item->offset);
4627 streamer_write_uhwi (ob, item->jftype);
4628 switch (item->jftype)
4630 case IPA_JF_UNKNOWN:
4631 break;
4632 case IPA_JF_CONST:
4633 stream_write_tree (ob, item->value.constant, true);
4634 break;
4635 case IPA_JF_PASS_THROUGH:
4636 case IPA_JF_LOAD_AGG:
4637 streamer_write_uhwi (ob, item->value.pass_through.operation);
4638 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4639 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4640 != tcc_unary)
4641 stream_write_tree (ob, item->value.pass_through.operand, true);
4642 if (item->jftype == IPA_JF_LOAD_AGG)
4644 stream_write_tree (ob, item->value.load_agg.type, true);
4645 streamer_write_uhwi (ob, item->value.load_agg.offset);
4646 bp = bitpack_create (ob->main_stream);
4647 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4648 streamer_write_bitpack (&bp);
4650 break;
4651 default:
4652 fatal_error (UNKNOWN_LOCATION,
4653 "invalid jump function in LTO stream");
4657 bp = bitpack_create (ob->main_stream);
4658 bp_pack_value (&bp, !!jump_func->bits, 1);
4659 streamer_write_bitpack (&bp);
4660 if (jump_func->bits)
4662 streamer_write_widest_int (ob, jump_func->bits->value);
4663 streamer_write_widest_int (ob, jump_func->bits->mask);
4665 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4666 streamer_write_bitpack (&bp);
4667 if (jump_func->m_vr)
4669 streamer_write_enum (ob->main_stream, value_rang_type,
4670 VR_LAST, jump_func->m_vr->kind ());
4671 stream_write_tree (ob, jump_func->m_vr->min (), true);
4672 stream_write_tree (ob, jump_func->m_vr->max (), true);
4676 /* Read in jump function JUMP_FUNC from IB. */
4678 static void
4679 ipa_read_jump_function (class lto_input_block *ib,
4680 struct ipa_jump_func *jump_func,
4681 struct cgraph_edge *cs,
4682 class data_in *data_in,
4683 bool prevails)
4685 enum jump_func_type jftype;
4686 enum tree_code operation;
4687 int i, count;
4688 int val = streamer_read_uhwi (ib);
4689 bool flag = val & 1;
4691 jftype = (enum jump_func_type) (val / 2);
4692 switch (jftype)
4694 case IPA_JF_UNKNOWN:
4695 ipa_set_jf_unknown (jump_func);
4696 break;
4697 case IPA_JF_CONST:
4699 tree t = stream_read_tree (ib, data_in);
4700 if (flag && prevails)
4701 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4702 ipa_set_jf_constant (jump_func, t, cs);
4704 break;
4705 case IPA_JF_PASS_THROUGH:
4706 operation = (enum tree_code) streamer_read_uhwi (ib);
4707 if (operation == NOP_EXPR)
4709 int formal_id = streamer_read_uhwi (ib);
4710 struct bitpack_d bp = streamer_read_bitpack (ib);
4711 bool agg_preserved = bp_unpack_value (&bp, 1);
4712 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4714 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4716 int formal_id = streamer_read_uhwi (ib);
4717 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4719 else
4721 tree operand = stream_read_tree (ib, data_in);
4722 int formal_id = streamer_read_uhwi (ib);
4723 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4724 operation);
4726 break;
4727 case IPA_JF_ANCESTOR:
4729 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4730 int formal_id = streamer_read_uhwi (ib);
4731 struct bitpack_d bp = streamer_read_bitpack (ib);
4732 bool agg_preserved = bp_unpack_value (&bp, 1);
4733 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4734 break;
4736 default:
4737 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4740 count = streamer_read_uhwi (ib);
4741 if (prevails)
4742 vec_alloc (jump_func->agg.items, count);
4743 if (count)
4745 struct bitpack_d bp = streamer_read_bitpack (ib);
4746 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4748 for (i = 0; i < count; i++)
4750 struct ipa_agg_jf_item item;
4751 item.type = stream_read_tree (ib, data_in);
4752 item.offset = streamer_read_uhwi (ib);
4753 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4755 switch (item.jftype)
4757 case IPA_JF_UNKNOWN:
4758 break;
4759 case IPA_JF_CONST:
4760 item.value.constant = stream_read_tree (ib, data_in);
4761 break;
4762 case IPA_JF_PASS_THROUGH:
4763 case IPA_JF_LOAD_AGG:
4764 operation = (enum tree_code) streamer_read_uhwi (ib);
4765 item.value.pass_through.operation = operation;
4766 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4767 if (TREE_CODE_CLASS (operation) == tcc_unary)
4768 item.value.pass_through.operand = NULL_TREE;
4769 else
4770 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4771 if (item.jftype == IPA_JF_LOAD_AGG)
4773 struct bitpack_d bp;
4774 item.value.load_agg.type = stream_read_tree (ib, data_in);
4775 item.value.load_agg.offset = streamer_read_uhwi (ib);
4776 bp = streamer_read_bitpack (ib);
4777 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4779 break;
4780 default:
4781 fatal_error (UNKNOWN_LOCATION,
4782 "invalid jump function in LTO stream");
4784 if (prevails)
4785 jump_func->agg.items->quick_push (item);
4788 struct bitpack_d bp = streamer_read_bitpack (ib);
4789 bool bits_known = bp_unpack_value (&bp, 1);
4790 if (bits_known)
4792 widest_int value = streamer_read_widest_int (ib);
4793 widest_int mask = streamer_read_widest_int (ib);
4794 if (prevails)
4795 ipa_set_jfunc_bits (jump_func, value, mask);
4797 else
4798 jump_func->bits = NULL;
4800 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4801 bool vr_known = bp_unpack_value (&vr_bp, 1);
4802 if (vr_known)
4804 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4805 VR_LAST);
4806 tree min = stream_read_tree (ib, data_in);
4807 tree max = stream_read_tree (ib, data_in);
4808 if (prevails)
4809 ipa_set_jfunc_vr (jump_func, type, min, max);
4811 else
4812 jump_func->m_vr = NULL;
4815 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4816 relevant to indirect inlining to OB. */
4818 static void
4819 ipa_write_indirect_edge_info (struct output_block *ob,
4820 struct cgraph_edge *cs)
4822 class cgraph_indirect_call_info *ii = cs->indirect_info;
4823 struct bitpack_d bp;
4825 streamer_write_hwi (ob, ii->param_index);
4826 bp = bitpack_create (ob->main_stream);
4827 bp_pack_value (&bp, ii->polymorphic, 1);
4828 bp_pack_value (&bp, ii->agg_contents, 1);
4829 bp_pack_value (&bp, ii->member_ptr, 1);
4830 bp_pack_value (&bp, ii->by_ref, 1);
4831 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4832 bp_pack_value (&bp, ii->vptr_changed, 1);
4833 streamer_write_bitpack (&bp);
4834 if (ii->agg_contents || ii->polymorphic)
4835 streamer_write_hwi (ob, ii->offset);
4836 else
4837 gcc_assert (ii->offset == 0);
4839 if (ii->polymorphic)
4841 streamer_write_hwi (ob, ii->otr_token);
4842 stream_write_tree (ob, ii->otr_type, true);
4843 ii->context.stream_out (ob);
4847 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4848 relevant to indirect inlining from IB. */
4850 static void
4851 ipa_read_indirect_edge_info (class lto_input_block *ib,
4852 class data_in *data_in,
4853 struct cgraph_edge *cs,
4854 class ipa_node_params *info)
4856 class cgraph_indirect_call_info *ii = cs->indirect_info;
4857 struct bitpack_d bp;
4859 ii->param_index = (int) streamer_read_hwi (ib);
4860 bp = streamer_read_bitpack (ib);
4861 ii->polymorphic = bp_unpack_value (&bp, 1);
4862 ii->agg_contents = bp_unpack_value (&bp, 1);
4863 ii->member_ptr = bp_unpack_value (&bp, 1);
4864 ii->by_ref = bp_unpack_value (&bp, 1);
4865 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4866 ii->vptr_changed = bp_unpack_value (&bp, 1);
4867 if (ii->agg_contents || ii->polymorphic)
4868 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4869 else
4870 ii->offset = 0;
4871 if (ii->polymorphic)
4873 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4874 ii->otr_type = stream_read_tree (ib, data_in);
4875 ii->context.stream_in (ib, data_in);
4877 if (info && ii->param_index >= 0)
4879 if (ii->polymorphic)
4880 ipa_set_param_used_by_polymorphic_call (info,
4881 ii->param_index , true);
4882 ipa_set_param_used_by_indirect_call (info,
4883 ii->param_index, true);
4887 /* Stream out NODE info to OB. */
4889 static void
4890 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4892 int node_ref;
4893 lto_symtab_encoder_t encoder;
4894 class ipa_node_params *info = IPA_NODE_REF (node);
4895 int j;
4896 struct cgraph_edge *e;
4897 struct bitpack_d bp;
4899 encoder = ob->decl_state->symtab_node_encoder;
4900 node_ref = lto_symtab_encoder_encode (encoder, node);
4901 streamer_write_uhwi (ob, node_ref);
4903 streamer_write_uhwi (ob, ipa_get_param_count (info));
4904 for (j = 0; j < ipa_get_param_count (info); j++)
4905 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4906 bp = bitpack_create (ob->main_stream);
4907 gcc_assert (info->analysis_done
4908 || ipa_get_param_count (info) == 0);
4909 gcc_assert (!info->node_enqueued);
4910 gcc_assert (!info->ipcp_orig_node);
4911 for (j = 0; j < ipa_get_param_count (info); j++)
4912 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4913 streamer_write_bitpack (&bp);
4914 for (j = 0; j < ipa_get_param_count (info); j++)
4916 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4917 stream_write_tree (ob, ipa_get_type (info, j), true);
4919 for (e = node->callees; e; e = e->next_callee)
4921 class ipa_edge_args *args = IPA_EDGE_REF (e);
4923 if (!args)
4925 streamer_write_uhwi (ob, 0);
4926 continue;
4929 streamer_write_uhwi (ob,
4930 ipa_get_cs_argument_count (args) * 2
4931 + (args->polymorphic_call_contexts != NULL));
4932 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4934 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4935 if (args->polymorphic_call_contexts != NULL)
4936 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4939 for (e = node->indirect_calls; e; e = e->next_callee)
4941 class ipa_edge_args *args = IPA_EDGE_REF (e);
4942 if (!args)
4943 streamer_write_uhwi (ob, 0);
4944 else
4946 streamer_write_uhwi (ob,
4947 ipa_get_cs_argument_count (args) * 2
4948 + (args->polymorphic_call_contexts != NULL));
4949 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4951 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4952 if (args->polymorphic_call_contexts != NULL)
4953 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4956 ipa_write_indirect_edge_info (ob, e);
4960 /* Stream in edge E from IB. */
4962 static void
4963 ipa_read_edge_info (class lto_input_block *ib,
4964 class data_in *data_in,
4965 struct cgraph_edge *e, bool prevails)
4967 int count = streamer_read_uhwi (ib);
4968 bool contexts_computed = count & 1;
4970 count /= 2;
4971 if (!count)
4972 return;
4973 if (prevails && e->possibly_call_in_translation_unit_p ())
4975 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
4976 vec_safe_grow_cleared (args->jump_functions, count, true);
4977 if (contexts_computed)
4978 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
4979 for (int k = 0; k < count; k++)
4981 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4982 data_in, prevails);
4983 if (contexts_computed)
4984 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4985 (ib, data_in);
4988 else
4990 for (int k = 0; k < count; k++)
4992 struct ipa_jump_func dummy;
4993 ipa_read_jump_function (ib, &dummy, e,
4994 data_in, prevails);
4995 if (contexts_computed)
4997 class ipa_polymorphic_call_context ctx;
4998 ctx.stream_in (ib, data_in);
5004 /* Stream in NODE info from IB. */
5006 static void
5007 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5008 class data_in *data_in)
5010 int k;
5011 struct cgraph_edge *e;
5012 struct bitpack_d bp;
5013 bool prevails = node->prevailing_p ();
5014 class ipa_node_params *info = prevails
5015 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
5017 int param_count = streamer_read_uhwi (ib);
5018 if (prevails)
5020 ipa_alloc_node_params (node, param_count);
5021 for (k = 0; k < param_count; k++)
5022 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5023 if (ipa_get_param_count (info) != 0)
5024 info->analysis_done = true;
5025 info->node_enqueued = false;
5027 else
5028 for (k = 0; k < param_count; k++)
5029 streamer_read_uhwi (ib);
5031 bp = streamer_read_bitpack (ib);
5032 for (k = 0; k < param_count; k++)
5034 bool used = bp_unpack_value (&bp, 1);
5036 if (prevails)
5037 ipa_set_param_used (info, k, used);
5039 for (k = 0; k < param_count; k++)
5041 int nuses = streamer_read_hwi (ib);
5042 tree type = stream_read_tree (ib, data_in);
5044 if (prevails)
5046 ipa_set_controlled_uses (info, k, nuses);
5047 (*info->descriptors)[k].decl_or_type = type;
5050 for (e = node->callees; e; e = e->next_callee)
5051 ipa_read_edge_info (ib, data_in, e, prevails);
5052 for (e = node->indirect_calls; e; e = e->next_callee)
5054 ipa_read_edge_info (ib, data_in, e, prevails);
5055 ipa_read_indirect_edge_info (ib, data_in, e, info);
5059 /* Write jump functions for nodes in SET. */
5061 void
5062 ipa_prop_write_jump_functions (void)
5064 struct cgraph_node *node;
5065 struct output_block *ob;
5066 unsigned int count = 0;
5067 lto_symtab_encoder_iterator lsei;
5068 lto_symtab_encoder_t encoder;
5070 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5071 return;
5073 ob = create_output_block (LTO_section_jump_functions);
5074 encoder = ob->decl_state->symtab_node_encoder;
5075 ob->symbol = NULL;
5076 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5077 lsei_next_function_in_partition (&lsei))
5079 node = lsei_cgraph_node (lsei);
5080 if (node->has_gimple_body_p ()
5081 && IPA_NODE_REF (node) != NULL)
5082 count++;
5085 streamer_write_uhwi (ob, count);
5087 /* Process all of the functions. */
5088 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5089 lsei_next_function_in_partition (&lsei))
5091 node = lsei_cgraph_node (lsei);
5092 if (node->has_gimple_body_p ()
5093 && IPA_NODE_REF (node) != NULL)
5094 ipa_write_node_info (ob, node);
5096 streamer_write_char_stream (ob->main_stream, 0);
5097 produce_asm (ob, NULL);
5098 destroy_output_block (ob);
5101 /* Read section in file FILE_DATA of length LEN with data DATA. */
5103 static void
5104 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5105 size_t len)
5107 const struct lto_function_header *header =
5108 (const struct lto_function_header *) data;
5109 const int cfg_offset = sizeof (struct lto_function_header);
5110 const int main_offset = cfg_offset + header->cfg_size;
5111 const int string_offset = main_offset + header->main_size;
5112 class data_in *data_in;
5113 unsigned int i;
5114 unsigned int count;
5116 lto_input_block ib_main ((const char *) data + main_offset,
5117 header->main_size, file_data->mode_table);
5119 data_in =
5120 lto_data_in_create (file_data, (const char *) data + string_offset,
5121 header->string_size, vNULL);
5122 count = streamer_read_uhwi (&ib_main);
5124 for (i = 0; i < count; i++)
5126 unsigned int index;
5127 struct cgraph_node *node;
5128 lto_symtab_encoder_t encoder;
5130 index = streamer_read_uhwi (&ib_main);
5131 encoder = file_data->symtab_node_encoder;
5132 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5133 index));
5134 gcc_assert (node->definition);
5135 ipa_read_node_info (&ib_main, node, data_in);
5137 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5138 len);
5139 lto_data_in_delete (data_in);
5142 /* Read ipcp jump functions. */
5144 void
5145 ipa_prop_read_jump_functions (void)
5147 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5148 struct lto_file_decl_data *file_data;
5149 unsigned int j = 0;
5151 ipa_check_create_node_params ();
5152 ipa_check_create_edge_args ();
5153 ipa_register_cgraph_hooks ();
5155 while ((file_data = file_data_vec[j++]))
5157 size_t len;
5158 const char *data
5159 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5160 &len);
5161 if (data)
5162 ipa_prop_read_section (file_data, data, len);
5166 void
5167 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5169 int node_ref;
5170 unsigned int count = 0;
5171 lto_symtab_encoder_t encoder;
5172 struct ipa_agg_replacement_value *aggvals, *av;
5174 aggvals = ipa_get_agg_replacements_for_node (node);
5175 encoder = ob->decl_state->symtab_node_encoder;
5176 node_ref = lto_symtab_encoder_encode (encoder, node);
5177 streamer_write_uhwi (ob, node_ref);
5179 for (av = aggvals; av; av = av->next)
5180 count++;
5181 streamer_write_uhwi (ob, count);
5183 for (av = aggvals; av; av = av->next)
5185 struct bitpack_d bp;
5187 streamer_write_uhwi (ob, av->offset);
5188 streamer_write_uhwi (ob, av->index);
5189 stream_write_tree (ob, av->value, true);
5191 bp = bitpack_create (ob->main_stream);
5192 bp_pack_value (&bp, av->by_ref, 1);
5193 streamer_write_bitpack (&bp);
5196 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5197 if (ts && vec_safe_length (ts->m_vr) > 0)
5199 count = ts->m_vr->length ();
5200 streamer_write_uhwi (ob, count);
5201 for (unsigned i = 0; i < count; ++i)
5203 struct bitpack_d bp;
5204 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5205 bp = bitpack_create (ob->main_stream);
5206 bp_pack_value (&bp, parm_vr->known, 1);
5207 streamer_write_bitpack (&bp);
5208 if (parm_vr->known)
5210 streamer_write_enum (ob->main_stream, value_rang_type,
5211 VR_LAST, parm_vr->type);
5212 streamer_write_wide_int (ob, parm_vr->min);
5213 streamer_write_wide_int (ob, parm_vr->max);
5217 else
5218 streamer_write_uhwi (ob, 0);
5220 if (ts && vec_safe_length (ts->bits) > 0)
5222 count = ts->bits->length ();
5223 streamer_write_uhwi (ob, count);
5225 for (unsigned i = 0; i < count; ++i)
5227 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5228 struct bitpack_d bp = bitpack_create (ob->main_stream);
5229 bp_pack_value (&bp, !!bits_jfunc, 1);
5230 streamer_write_bitpack (&bp);
5231 if (bits_jfunc)
5233 streamer_write_widest_int (ob, bits_jfunc->value);
5234 streamer_write_widest_int (ob, bits_jfunc->mask);
5238 else
5239 streamer_write_uhwi (ob, 0);
5242 /* Stream in the aggregate value replacement chain for NODE from IB. */
5244 static void
5245 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5246 data_in *data_in)
5248 struct ipa_agg_replacement_value *aggvals = NULL;
5249 unsigned int count, i;
5251 count = streamer_read_uhwi (ib);
5252 for (i = 0; i <count; i++)
5254 struct ipa_agg_replacement_value *av;
5255 struct bitpack_d bp;
5257 av = ggc_alloc<ipa_agg_replacement_value> ();
5258 av->offset = streamer_read_uhwi (ib);
5259 av->index = streamer_read_uhwi (ib);
5260 av->value = stream_read_tree (ib, data_in);
5261 bp = streamer_read_bitpack (ib);
5262 av->by_ref = bp_unpack_value (&bp, 1);
5263 av->next = aggvals;
5264 aggvals = av;
5266 ipa_set_node_agg_value_chain (node, aggvals);
5268 count = streamer_read_uhwi (ib);
5269 if (count > 0)
5271 ipcp_transformation_initialize ();
5272 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5273 vec_safe_grow_cleared (ts->m_vr, count, true);
5274 for (i = 0; i < count; i++)
5276 ipa_vr *parm_vr;
5277 parm_vr = &(*ts->m_vr)[i];
5278 struct bitpack_d bp;
5279 bp = streamer_read_bitpack (ib);
5280 parm_vr->known = bp_unpack_value (&bp, 1);
5281 if (parm_vr->known)
5283 parm_vr->type = streamer_read_enum (ib, value_range_kind,
5284 VR_LAST);
5285 parm_vr->min = streamer_read_wide_int (ib);
5286 parm_vr->max = streamer_read_wide_int (ib);
5290 count = streamer_read_uhwi (ib);
5291 if (count > 0)
5293 ipcp_transformation_initialize ();
5294 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5295 vec_safe_grow_cleared (ts->bits, count, true);
5297 for (i = 0; i < count; i++)
5299 struct bitpack_d bp = streamer_read_bitpack (ib);
5300 bool known = bp_unpack_value (&bp, 1);
5301 if (known)
5303 const widest_int value = streamer_read_widest_int (ib);
5304 const widest_int mask = streamer_read_widest_int (ib);
5305 ipa_bits *bits
5306 = ipa_get_ipa_bits_for_value (value, mask);
5307 (*ts->bits)[i] = bits;
5313 /* Write all aggregate replacement for nodes in set. */
5315 void
5316 ipcp_write_transformation_summaries (void)
5318 struct cgraph_node *node;
5319 struct output_block *ob;
5320 unsigned int count = 0;
5321 lto_symtab_encoder_iterator lsei;
5322 lto_symtab_encoder_t encoder;
5324 ob = create_output_block (LTO_section_ipcp_transform);
5325 encoder = ob->decl_state->symtab_node_encoder;
5326 ob->symbol = NULL;
5327 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5328 lsei_next_function_in_partition (&lsei))
5330 node = lsei_cgraph_node (lsei);
5331 if (node->has_gimple_body_p ())
5332 count++;
5335 streamer_write_uhwi (ob, count);
5337 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5338 lsei_next_function_in_partition (&lsei))
5340 node = lsei_cgraph_node (lsei);
5341 if (node->has_gimple_body_p ())
5342 write_ipcp_transformation_info (ob, node);
5344 streamer_write_char_stream (ob->main_stream, 0);
5345 produce_asm (ob, NULL);
5346 destroy_output_block (ob);
5349 /* Read replacements section in file FILE_DATA of length LEN with data
5350 DATA. */
5352 static void
5353 read_replacements_section (struct lto_file_decl_data *file_data,
5354 const char *data,
5355 size_t len)
5357 const struct lto_function_header *header =
5358 (const struct lto_function_header *) data;
5359 const int cfg_offset = sizeof (struct lto_function_header);
5360 const int main_offset = cfg_offset + header->cfg_size;
5361 const int string_offset = main_offset + header->main_size;
5362 class data_in *data_in;
5363 unsigned int i;
5364 unsigned int count;
5366 lto_input_block ib_main ((const char *) data + main_offset,
5367 header->main_size, file_data->mode_table);
5369 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5370 header->string_size, vNULL);
5371 count = streamer_read_uhwi (&ib_main);
5373 for (i = 0; i < count; i++)
5375 unsigned int index;
5376 struct cgraph_node *node;
5377 lto_symtab_encoder_t encoder;
5379 index = streamer_read_uhwi (&ib_main);
5380 encoder = file_data->symtab_node_encoder;
5381 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5382 index));
5383 gcc_assert (node->definition);
5384 read_ipcp_transformation_info (&ib_main, node, data_in);
5386 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5387 len);
5388 lto_data_in_delete (data_in);
5391 /* Read IPA-CP aggregate replacements. */
5393 void
5394 ipcp_read_transformation_summaries (void)
5396 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5397 struct lto_file_decl_data *file_data;
5398 unsigned int j = 0;
5400 while ((file_data = file_data_vec[j++]))
5402 size_t len;
5403 const char *data
5404 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5405 &len);
5406 if (data)
5407 read_replacements_section (file_data, data, len);
5411 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5412 NODE. */
5414 static void
5415 adjust_agg_replacement_values (struct cgraph_node *node,
5416 struct ipa_agg_replacement_value *aggval)
5418 struct ipa_agg_replacement_value *v;
5420 if (!node->clone.param_adjustments)
5421 return;
5423 auto_vec<int, 16> new_indices;
5424 node->clone.param_adjustments->get_updated_indices (&new_indices);
5425 for (v = aggval; v; v = v->next)
5427 gcc_checking_assert (v->index >= 0);
5429 if ((unsigned) v->index < new_indices.length ())
5430 v->index = new_indices[v->index];
5431 else
5432 /* This can happen if we know about a constant passed by reference by
5433 an argument which is never actually used for anything, let alone
5434 loading that constant. */
5435 v->index = -1;
5439 /* Dominator walker driving the ipcp modification phase. */
5441 class ipcp_modif_dom_walker : public dom_walker
5443 public:
5444 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5445 vec<ipa_param_descriptor, va_gc> *descs,
5446 struct ipa_agg_replacement_value *av,
5447 bool *sc, bool *cc)
5448 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5449 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5451 virtual edge before_dom_children (basic_block);
5453 private:
5454 struct ipa_func_body_info *m_fbi;
5455 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5456 struct ipa_agg_replacement_value *m_aggval;
5457 bool *m_something_changed, *m_cfg_changed;
5460 edge
5461 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5463 gimple_stmt_iterator gsi;
5464 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5466 struct ipa_agg_replacement_value *v;
5467 gimple *stmt = gsi_stmt (gsi);
5468 tree rhs, val, t;
5469 HOST_WIDE_INT offset;
5470 poly_int64 size;
5471 int index;
5472 bool by_ref, vce;
5474 if (!gimple_assign_load_p (stmt))
5475 continue;
5476 rhs = gimple_assign_rhs1 (stmt);
5477 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5478 continue;
5480 vce = false;
5481 t = rhs;
5482 while (handled_component_p (t))
5484 /* V_C_E can do things like convert an array of integers to one
5485 bigger integer and similar things we do not handle below. */
5486 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5488 vce = true;
5489 break;
5491 t = TREE_OPERAND (t, 0);
5493 if (vce)
5494 continue;
5496 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5497 &offset, &size, &by_ref))
5498 continue;
5499 for (v = m_aggval; v; v = v->next)
5500 if (v->index == index
5501 && v->offset == offset)
5502 break;
5503 if (!v
5504 || v->by_ref != by_ref
5505 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5506 size))
5507 continue;
5509 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5510 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5512 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5513 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5514 else if (TYPE_SIZE (TREE_TYPE (rhs))
5515 == TYPE_SIZE (TREE_TYPE (v->value)))
5516 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5517 else
5519 if (dump_file)
5521 fprintf (dump_file, " const ");
5522 print_generic_expr (dump_file, v->value);
5523 fprintf (dump_file, " can't be converted to type of ");
5524 print_generic_expr (dump_file, rhs);
5525 fprintf (dump_file, "\n");
5527 continue;
5530 else
5531 val = v->value;
5533 if (dump_file && (dump_flags & TDF_DETAILS))
5535 fprintf (dump_file, "Modifying stmt:\n ");
5536 print_gimple_stmt (dump_file, stmt, 0);
5538 gimple_assign_set_rhs_from_tree (&gsi, val);
5539 update_stmt (stmt);
5541 if (dump_file && (dump_flags & TDF_DETAILS))
5543 fprintf (dump_file, "into:\n ");
5544 print_gimple_stmt (dump_file, stmt, 0);
5545 fprintf (dump_file, "\n");
5548 *m_something_changed = true;
5549 if (maybe_clean_eh_stmt (stmt)
5550 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5551 *m_cfg_changed = true;
5553 return NULL;
5556 /* Return true if we have recorded VALUE and MASK about PARM.
5557 Set VALUE and MASk accordingly. */
5559 bool
5560 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5562 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5563 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5564 if (!ts || vec_safe_length (ts->bits) == 0)
5565 return false;
5567 int i = 0;
5568 for (tree p = DECL_ARGUMENTS (current_function_decl);
5569 p != parm; p = DECL_CHAIN (p))
5571 i++;
5572 /* Ignore static chain. */
5573 if (!p)
5574 return false;
5577 if (cnode->clone.param_adjustments)
5579 i = cnode->clone.param_adjustments->get_original_index (i);
5580 if (i < 0)
5581 return false;
5584 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5585 if (!bits[i])
5586 return false;
5587 *mask = bits[i]->mask;
5588 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5589 return true;
5593 /* Update bits info of formal parameters as described in
5594 ipcp_transformation. */
5596 static void
5597 ipcp_update_bits (struct cgraph_node *node)
5599 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5601 if (!ts || vec_safe_length (ts->bits) == 0)
5602 return;
5603 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5604 unsigned count = bits.length ();
5605 if (!count)
5606 return;
5608 auto_vec<int, 16> new_indices;
5609 bool need_remapping = false;
5610 if (node->clone.param_adjustments)
5612 node->clone.param_adjustments->get_updated_indices (&new_indices);
5613 need_remapping = true;
5615 auto_vec <tree, 16> parm_decls;
5616 push_function_arg_decls (&parm_decls, node->decl);
5618 for (unsigned i = 0; i < count; ++i)
5620 tree parm;
5621 if (need_remapping)
5623 if (i >= new_indices.length ())
5624 continue;
5625 int idx = new_indices[i];
5626 if (idx < 0)
5627 continue;
5628 parm = parm_decls[idx];
5630 else
5631 parm = parm_decls[i];
5632 gcc_checking_assert (parm);
5635 if (!bits[i]
5636 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5637 || POINTER_TYPE_P (TREE_TYPE (parm)))
5638 || !is_gimple_reg (parm))
5639 continue;
5641 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5642 if (!ddef)
5643 continue;
5645 if (dump_file)
5647 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5648 print_hex (bits[i]->mask, dump_file);
5649 fprintf (dump_file, "\n");
5652 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5654 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5655 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5657 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5658 | wide_int::from (bits[i]->value, prec, sgn);
5659 set_nonzero_bits (ddef, nonzero_bits);
5661 else
5663 unsigned tem = bits[i]->mask.to_uhwi ();
5664 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5665 unsigned align = tem & -tem;
5666 unsigned misalign = bitpos & (align - 1);
5668 if (align > 1)
5670 if (dump_file)
5671 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5673 unsigned old_align, old_misalign;
5674 struct ptr_info_def *pi = get_ptr_info (ddef);
5675 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5677 if (old_known
5678 && old_align > align)
5680 if (dump_file)
5682 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5683 if ((old_misalign & (align - 1)) != misalign)
5684 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5685 old_misalign, misalign);
5687 continue;
5690 if (old_known
5691 && ((misalign & (old_align - 1)) != old_misalign)
5692 && dump_file)
5693 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5694 old_misalign, misalign);
5696 set_ptr_info_alignment (pi, align, misalign);
5702 bool
5703 ipa_vr::nonzero_p (tree expr_type) const
5705 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5706 return true;
5708 unsigned prec = TYPE_PRECISION (expr_type);
5709 return (type == VR_RANGE
5710 && TYPE_UNSIGNED (expr_type)
5711 && wi::eq_p (min, wi::one (prec))
5712 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5715 /* Update value range of formal parameters as described in
5716 ipcp_transformation. */
5718 static void
5719 ipcp_update_vr (struct cgraph_node *node)
5721 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5722 if (!ts || vec_safe_length (ts->m_vr) == 0)
5723 return;
5724 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5725 unsigned count = vr.length ();
5726 if (!count)
5727 return;
5729 auto_vec<int, 16> new_indices;
5730 bool need_remapping = false;
5731 if (node->clone.param_adjustments)
5733 node->clone.param_adjustments->get_updated_indices (&new_indices);
5734 need_remapping = true;
5736 auto_vec <tree, 16> parm_decls;
5737 push_function_arg_decls (&parm_decls, node->decl);
5739 for (unsigned i = 0; i < count; ++i)
5741 tree parm;
5742 int remapped_idx;
5743 if (need_remapping)
5745 if (i >= new_indices.length ())
5746 continue;
5747 remapped_idx = new_indices[i];
5748 if (remapped_idx < 0)
5749 continue;
5751 else
5752 remapped_idx = i;
5754 parm = parm_decls[remapped_idx];
5756 gcc_checking_assert (parm);
5757 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5759 if (!ddef || !is_gimple_reg (parm))
5760 continue;
5762 if (vr[i].known
5763 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5765 tree type = TREE_TYPE (ddef);
5766 unsigned prec = TYPE_PRECISION (type);
5767 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5769 if (dump_file)
5771 fprintf (dump_file, "Setting value range of param %u "
5772 "(now %i) ", i, remapped_idx);
5773 fprintf (dump_file, "%s[",
5774 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5775 print_decs (vr[i].min, dump_file);
5776 fprintf (dump_file, ", ");
5777 print_decs (vr[i].max, dump_file);
5778 fprintf (dump_file, "]\n");
5780 set_range_info (ddef, vr[i].type,
5781 wide_int_storage::from (vr[i].min, prec,
5782 TYPE_SIGN (type)),
5783 wide_int_storage::from (vr[i].max, prec,
5784 TYPE_SIGN (type)));
5786 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5787 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5789 if (dump_file)
5790 fprintf (dump_file, "Setting nonnull for %u\n", i);
5791 set_ptr_nonnull (ddef);
5797 /* IPCP transformation phase doing propagation of aggregate values. */
5799 unsigned int
5800 ipcp_transform_function (struct cgraph_node *node)
5802 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5803 struct ipa_func_body_info fbi;
5804 struct ipa_agg_replacement_value *aggval;
5805 int param_count;
5806 bool cfg_changed = false, something_changed = false;
5808 gcc_checking_assert (cfun);
5809 gcc_checking_assert (current_function_decl);
5811 if (dump_file)
5812 fprintf (dump_file, "Modification phase of node %s\n",
5813 node->dump_name ());
5815 ipcp_update_bits (node);
5816 ipcp_update_vr (node);
5817 aggval = ipa_get_agg_replacements_for_node (node);
5818 if (!aggval)
5819 return 0;
5820 param_count = count_formal_params (node->decl);
5821 if (param_count == 0)
5822 return 0;
5823 adjust_agg_replacement_values (node, aggval);
5824 if (dump_file)
5825 ipa_dump_agg_replacement_values (dump_file, aggval);
5827 fbi.node = node;
5828 fbi.info = NULL;
5829 fbi.bb_infos = vNULL;
5830 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
5831 fbi.param_count = param_count;
5832 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
5834 vec_safe_grow_cleared (descriptors, param_count, true);
5835 ipa_populate_param_decls (node, *descriptors);
5836 calculate_dominance_info (CDI_DOMINATORS);
5837 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5838 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5840 int i;
5841 struct ipa_bb_info *bi;
5842 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5843 free_ipa_bb_info (bi);
5844 fbi.bb_infos.release ();
5845 free_dominance_info (CDI_DOMINATORS);
5847 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5848 s->agg_values = NULL;
5849 s->bits = NULL;
5850 s->m_vr = NULL;
5852 vec_free (descriptors);
5854 if (!something_changed)
5855 return 0;
5857 if (cfg_changed)
5858 delete_unreachable_blocks_update_callgraph (node, false);
5860 return TODO_update_ssa_only_virtuals;
5864 /* Return true if OTHER describes same agg value. */
5865 bool
5866 ipa_agg_value::equal_to (const ipa_agg_value &other)
5868 return offset == other.offset
5869 && operand_equal_p (value, other.value, 0);
5872 /* Destructor also removing individual aggregate values. */
5874 ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
5876 ipa_release_agg_values (m_known_aggs, false);
5881 #include "gt-ipa-prop.h"