ada: Fix infinite loop with multiple limited with clauses
[official-gcc.git] / gcc / ipa-prop.cc
blob9442cdd450122a87b4284a6ea64d0b5340d70417
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-iterator.h"
35 #include "gimple-fold.h"
36 #include "tree-eh.h"
37 #include "calls.h"
38 #include "stor-layout.h"
39 #include "print-tree.h"
40 #include "gimplify.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55 #include "options.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58 #include "gimple-range.h"
59 #include "value-range-storage.h"
61 /* Function summary where the parameter infos are actually stored. */
62 ipa_node_params_t *ipa_node_params_sum = NULL;
64 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
66 /* Edge summary for IPA-CP edge information. */
67 ipa_edge_args_sum_t *ipa_edge_args_sum;
69 /* Traits for a hash table for reusing ranges. */
71 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <ipa_vr *>
73 typedef ipa_vr *value_type;
74 typedef const vrange *compare_type;
75 static hashval_t
76 hash (const ipa_vr *p)
78 // This never get called, except in the verification code, as
79 // ipa_get_value_range() calculates the hash itself. This
80 // function is mostly here for completness' sake.
81 Value_Range vr;
82 p->get_vrange (vr);
83 inchash::hash hstate;
84 add_vrange (vr, hstate);
85 return hstate.end ();
87 static bool
88 equal (const ipa_vr *a, const vrange *b)
90 return a->equal_p (*b);
92 static const bool empty_zero_p = true;
93 static void
94 mark_empty (ipa_vr *&p)
96 p = NULL;
98 static bool
99 is_empty (const ipa_vr *p)
101 return p == NULL;
103 static bool
104 is_deleted (const ipa_vr *p)
106 return p == reinterpret_cast<const ipa_vr *> (1);
108 static void
109 mark_deleted (ipa_vr *&p)
111 p = reinterpret_cast<ipa_vr *> (1);
115 /* Hash table for avoid repeated allocations of equal ranges. */
116 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
118 /* Holders of ipa cgraph hooks: */
119 static struct cgraph_node_hook_list *function_insertion_hook_holder;
121 /* Description of a reference to an IPA constant. */
122 struct ipa_cst_ref_desc
124 /* Edge that corresponds to the statement which took the reference. */
125 struct cgraph_edge *cs;
126 /* Linked list of duplicates created when call graph edges are cloned. */
127 struct ipa_cst_ref_desc *next_duplicate;
128 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
129 if out of control. */
130 int refcount;
133 /* Allocation pool for reference descriptions. */
135 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
136 ("IPA-PROP ref descriptions");
138 ipa_vr::ipa_vr ()
139 : m_storage (NULL),
140 m_type (NULL)
144 ipa_vr::ipa_vr (const vrange &r)
145 : m_storage (ggc_alloc_vrange_storage (r)),
146 m_type (r.type ())
150 bool
151 ipa_vr::equal_p (const vrange &r) const
153 gcc_checking_assert (!r.undefined_p ());
154 return (types_compatible_p (m_type, r.type ()) && m_storage->equal_p (r));
157 void
158 ipa_vr::get_vrange (Value_Range &r) const
160 r.set_type (m_type);
161 m_storage->get_vrange (r, m_type);
164 void
165 ipa_vr::set_unknown ()
167 if (m_storage)
168 ggc_free (m_storage);
170 m_storage = NULL;
173 void
174 ipa_vr::streamer_read (lto_input_block *ib, data_in *data_in)
176 struct bitpack_d bp = streamer_read_bitpack (ib);
177 bool known = bp_unpack_value (&bp, 1);
178 if (known)
180 Value_Range vr;
181 streamer_read_value_range (ib, data_in, vr);
182 if (!m_storage || !m_storage->fits_p (vr))
184 if (m_storage)
185 ggc_free (m_storage);
186 m_storage = ggc_alloc_vrange_storage (vr);
188 m_storage->set_vrange (vr);
189 m_type = vr.type ();
191 else
193 m_storage = NULL;
194 m_type = NULL;
198 void
199 ipa_vr::streamer_write (output_block *ob) const
201 struct bitpack_d bp = bitpack_create (ob->main_stream);
202 bp_pack_value (&bp, !!m_storage, 1);
203 streamer_write_bitpack (&bp);
204 if (m_storage)
206 Value_Range vr (m_type);
207 m_storage->get_vrange (vr, m_type);
208 streamer_write_vrange (ob, vr);
212 void
213 ipa_vr::dump (FILE *out) const
215 if (known_p ())
217 Value_Range vr (m_type);
218 m_storage->get_vrange (vr, m_type);
219 vr.dump (out);
221 else
222 fprintf (out, "NO RANGE");
225 // These stubs are because we use an ipa_vr in a hash_traits and
226 // hash-traits.h defines an extern of gt_ggc_mx (T &) instead of
227 // picking up the gt_ggc_mx (T *) version.
228 void
229 gt_pch_nx (ipa_vr *&x)
231 return gt_pch_nx ((ipa_vr *) x);
234 void
235 gt_ggc_mx (ipa_vr *&x)
237 return gt_ggc_mx ((ipa_vr *) x);
241 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
242 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
244 static bool
245 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
247 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
249 if (!fs_opts)
250 return false;
251 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
254 /* Return index of the formal whose tree is PTREE in function which corresponds
255 to INFO. */
257 static int
258 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
259 tree ptree)
261 int i, count;
263 count = vec_safe_length (descriptors);
264 for (i = 0; i < count; i++)
265 if ((*descriptors)[i].decl_or_type == ptree)
266 return i;
268 return -1;
271 /* Return index of the formal whose tree is PTREE in function which corresponds
272 to INFO. */
275 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
277 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
280 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
281 NODE. */
283 static void
284 ipa_populate_param_decls (struct cgraph_node *node,
285 vec<ipa_param_descriptor, va_gc> &descriptors)
287 tree fndecl;
288 tree fnargs;
289 tree parm;
290 int param_num;
292 fndecl = node->decl;
293 gcc_assert (gimple_has_body_p (fndecl));
294 fnargs = DECL_ARGUMENTS (fndecl);
295 param_num = 0;
296 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
298 descriptors[param_num].decl_or_type = parm;
299 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
300 descriptors[param_num].move_cost = cost;
301 /* Watch overflow, move_cost is a bitfield. */
302 gcc_checking_assert (cost == descriptors[param_num].move_cost);
303 param_num++;
307 /* Return how many formal parameters FNDECL has. */
310 count_formal_params (tree fndecl)
312 tree parm;
313 int count = 0;
314 gcc_assert (gimple_has_body_p (fndecl));
316 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
317 count++;
319 return count;
322 /* Return the declaration of Ith formal parameter of the function corresponding
323 to INFO. Note there is no setter function as this array is built just once
324 using ipa_initialize_node_params. */
326 void
327 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
329 fprintf (file, "param #%i", i);
330 if ((*info->descriptors)[i].decl_or_type)
332 fprintf (file, " ");
333 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
337 /* If necessary, allocate vector of parameter descriptors in info of NODE.
338 Return true if they were allocated, false if not. */
340 static bool
341 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
343 ipa_node_params *info = ipa_node_params_sum->get_create (node);
345 if (!info->descriptors && param_count)
347 vec_safe_grow_cleared (info->descriptors, param_count, true);
348 return true;
350 else
351 return false;
354 /* Initialize the ipa_node_params structure associated with NODE by counting
355 the function parameters, creating the descriptors and populating their
356 param_decls. */
358 void
359 ipa_initialize_node_params (struct cgraph_node *node)
361 ipa_node_params *info = ipa_node_params_sum->get_create (node);
363 if (!info->descriptors
364 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
365 ipa_populate_param_decls (node, *info->descriptors);
368 /* Print the jump functions associated with call graph edge CS to file F. */
370 static void
371 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
373 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
374 int count = ipa_get_cs_argument_count (args);
376 for (int i = 0; i < count; i++)
378 struct ipa_jump_func *jump_func;
379 enum jump_func_type type;
381 jump_func = ipa_get_ith_jump_func (args, i);
382 type = jump_func->type;
384 fprintf (f, " param %d: ", i);
385 if (type == IPA_JF_UNKNOWN)
386 fprintf (f, "UNKNOWN\n");
387 else if (type == IPA_JF_CONST)
389 tree val = jump_func->value.constant.value;
390 fprintf (f, "CONST: ");
391 print_generic_expr (f, val);
392 if (TREE_CODE (val) == ADDR_EXPR
393 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
395 fprintf (f, " -> ");
396 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
398 fprintf (f, "\n");
400 else if (type == IPA_JF_PASS_THROUGH)
402 fprintf (f, "PASS THROUGH: ");
403 fprintf (f, "%d, op %s",
404 jump_func->value.pass_through.formal_id,
405 get_tree_code_name(jump_func->value.pass_through.operation));
406 if (jump_func->value.pass_through.operation != NOP_EXPR)
408 fprintf (f, " ");
409 print_generic_expr (f, jump_func->value.pass_through.operand);
411 if (jump_func->value.pass_through.agg_preserved)
412 fprintf (f, ", agg_preserved");
413 if (jump_func->value.pass_through.refdesc_decremented)
414 fprintf (f, ", refdesc_decremented");
415 fprintf (f, "\n");
417 else if (type == IPA_JF_ANCESTOR)
419 fprintf (f, "ANCESTOR: ");
420 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
421 jump_func->value.ancestor.formal_id,
422 jump_func->value.ancestor.offset);
423 if (jump_func->value.ancestor.agg_preserved)
424 fprintf (f, ", agg_preserved");
425 if (jump_func->value.ancestor.keep_null)
426 fprintf (f, ", keep_null");
427 fprintf (f, "\n");
430 if (jump_func->agg.items)
432 struct ipa_agg_jf_item *item;
433 int j;
435 fprintf (f, " Aggregate passed by %s:\n",
436 jump_func->agg.by_ref ? "reference" : "value");
437 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
439 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
440 item->offset);
441 fprintf (f, "type: ");
442 print_generic_expr (f, item->type);
443 fprintf (f, ", ");
444 if (item->jftype == IPA_JF_PASS_THROUGH)
445 fprintf (f, "PASS THROUGH: %d,",
446 item->value.pass_through.formal_id);
447 else if (item->jftype == IPA_JF_LOAD_AGG)
449 fprintf (f, "LOAD AGG: %d",
450 item->value.pass_through.formal_id);
451 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
452 item->value.load_agg.offset,
453 item->value.load_agg.by_ref ? "reference"
454 : "value");
457 if (item->jftype == IPA_JF_PASS_THROUGH
458 || item->jftype == IPA_JF_LOAD_AGG)
460 fprintf (f, " op %s",
461 get_tree_code_name (item->value.pass_through.operation));
462 if (item->value.pass_through.operation != NOP_EXPR)
464 fprintf (f, " ");
465 print_generic_expr (f, item->value.pass_through.operand);
468 else if (item->jftype == IPA_JF_CONST)
470 fprintf (f, "CONST: ");
471 print_generic_expr (f, item->value.constant);
473 else if (item->jftype == IPA_JF_UNKNOWN)
474 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
475 tree_to_uhwi (TYPE_SIZE (item->type)));
476 fprintf (f, "\n");
480 class ipa_polymorphic_call_context *ctx
481 = ipa_get_ith_polymorhic_call_context (args, i);
482 if (ctx && !ctx->useless_p ())
484 fprintf (f, " Context: ");
485 ctx->dump (dump_file);
488 if (jump_func->m_vr)
490 jump_func->m_vr->dump (f);
491 fprintf (f, "\n");
493 else
494 fprintf (f, " Unknown VR\n");
499 /* Print the jump functions of all arguments on all call graph edges going from
500 NODE to file F. */
502 void
503 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
505 struct cgraph_edge *cs;
507 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
508 for (cs = node->callees; cs; cs = cs->next_callee)
511 fprintf (f, " callsite %s -> %s : \n",
512 node->dump_name (),
513 cs->callee->dump_name ());
514 if (!ipa_edge_args_info_available_for_edge_p (cs))
515 fprintf (f, " no arg info\n");
516 else
517 ipa_print_node_jump_functions_for_edge (f, cs);
520 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
522 class cgraph_indirect_call_info *ii;
524 ii = cs->indirect_info;
525 if (ii->agg_contents)
526 fprintf (f, " indirect %s callsite, calling param %i, "
527 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
528 ii->member_ptr ? "member ptr" : "aggregate",
529 ii->param_index, ii->offset,
530 ii->by_ref ? "by reference" : "by_value");
531 else
532 fprintf (f, " indirect %s callsite, calling param %i, "
533 "offset " HOST_WIDE_INT_PRINT_DEC,
534 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
535 ii->offset);
537 if (cs->call_stmt)
539 fprintf (f, ", for stmt ");
540 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
542 else
543 fprintf (f, "\n");
544 if (ii->polymorphic)
545 ii->context.dump (f);
546 if (!ipa_edge_args_info_available_for_edge_p (cs))
547 fprintf (f, " no arg info\n");
548 else
549 ipa_print_node_jump_functions_for_edge (f, cs);
553 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
555 void
556 ipa_print_all_jump_functions (FILE *f)
558 struct cgraph_node *node;
560 fprintf (f, "\nJump functions:\n");
561 FOR_EACH_FUNCTION (node)
563 ipa_print_node_jump_functions (f, node);
567 /* Set jfunc to be a know-really nothing jump function. */
569 static void
570 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
572 jfunc->type = IPA_JF_UNKNOWN;
575 /* Set JFUNC to be a copy of another jmp (to be used by jump function
576 combination code). The two functions will share their rdesc. */
578 static void
579 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
580 struct ipa_jump_func *src)
583 gcc_checking_assert (src->type == IPA_JF_CONST);
584 dst->type = IPA_JF_CONST;
585 dst->value.constant = src->value.constant;
588 /* Set JFUNC to be a constant jmp function. */
590 static void
591 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
592 struct cgraph_edge *cs)
594 jfunc->type = IPA_JF_CONST;
595 jfunc->value.constant.value = unshare_expr_without_location (constant);
597 if (TREE_CODE (constant) == ADDR_EXPR
598 && (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL
599 || (VAR_P (TREE_OPERAND (constant, 0))
600 && TREE_STATIC (TREE_OPERAND (constant, 0)))))
602 struct ipa_cst_ref_desc *rdesc;
604 rdesc = ipa_refdesc_pool.allocate ();
605 rdesc->cs = cs;
606 rdesc->next_duplicate = NULL;
607 rdesc->refcount = 1;
608 jfunc->value.constant.rdesc = rdesc;
610 else
611 jfunc->value.constant.rdesc = NULL;
614 /* Set JFUNC to be a simple pass-through jump function. */
615 static void
616 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
617 bool agg_preserved)
619 jfunc->type = IPA_JF_PASS_THROUGH;
620 jfunc->value.pass_through.operand = NULL_TREE;
621 jfunc->value.pass_through.formal_id = formal_id;
622 jfunc->value.pass_through.operation = NOP_EXPR;
623 jfunc->value.pass_through.agg_preserved = agg_preserved;
624 jfunc->value.pass_through.refdesc_decremented = false;
627 /* Set JFUNC to be an unary pass through jump function. */
629 static void
630 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
631 enum tree_code operation)
633 jfunc->type = IPA_JF_PASS_THROUGH;
634 jfunc->value.pass_through.operand = NULL_TREE;
635 jfunc->value.pass_through.formal_id = formal_id;
636 jfunc->value.pass_through.operation = operation;
637 jfunc->value.pass_through.agg_preserved = false;
638 jfunc->value.pass_through.refdesc_decremented = false;
640 /* Set JFUNC to be an arithmetic pass through jump function. */
642 static void
643 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
644 tree operand, enum tree_code operation)
646 jfunc->type = IPA_JF_PASS_THROUGH;
647 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
648 jfunc->value.pass_through.formal_id = formal_id;
649 jfunc->value.pass_through.operation = operation;
650 jfunc->value.pass_through.agg_preserved = false;
651 jfunc->value.pass_through.refdesc_decremented = false;
654 /* Set JFUNC to be an ancestor jump function. */
656 static void
657 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
658 int formal_id, bool agg_preserved, bool keep_null)
660 jfunc->type = IPA_JF_ANCESTOR;
661 jfunc->value.ancestor.formal_id = formal_id;
662 jfunc->value.ancestor.offset = offset;
663 jfunc->value.ancestor.agg_preserved = agg_preserved;
664 jfunc->value.ancestor.keep_null = keep_null;
667 /* Get IPA BB information about the given BB. FBI is the context of analyzis
668 of this function body. */
670 static struct ipa_bb_info *
671 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
673 gcc_checking_assert (fbi);
674 return &fbi->bb_infos[bb->index];
677 /* Structure to be passed in between detect_type_change and
678 check_stmt_for_type_change. */
680 struct prop_type_change_info
682 /* Offset into the object where there is the virtual method pointer we are
683 looking for. */
684 HOST_WIDE_INT offset;
685 /* The declaration or SSA_NAME pointer of the base that we are checking for
686 type change. */
687 tree object;
688 /* Set to true if dynamic type change has been detected. */
689 bool type_maybe_changed;
692 /* Return true if STMT can modify a virtual method table pointer.
694 This function makes special assumptions about both constructors and
695 destructors which are all the functions that are allowed to alter the VMT
696 pointers. It assumes that destructors begin with assignment into all VMT
697 pointers and that constructors essentially look in the following way:
699 1) The very first thing they do is that they call constructors of ancestor
700 sub-objects that have them.
702 2) Then VMT pointers of this and all its ancestors is set to new values
703 corresponding to the type corresponding to the constructor.
705 3) Only afterwards, other stuff such as constructor of member sub-objects
706 and the code written by the user is run. Only this may include calling
707 virtual functions, directly or indirectly.
709 There is no way to call a constructor of an ancestor sub-object in any
710 other way.
712 This means that we do not have to care whether constructors get the correct
713 type information because they will always change it (in fact, if we define
714 the type to be given by the VMT pointer, it is undefined).
716 The most important fact to derive from the above is that if, for some
717 statement in the section 3, we try to detect whether the dynamic type has
718 changed, we can safely ignore all calls as we examine the function body
719 backwards until we reach statements in section 2 because these calls cannot
720 be ancestor constructors or destructors (if the input is not bogus) and so
721 do not change the dynamic type (this holds true only for automatically
722 allocated objects but at the moment we devirtualize only these). We then
723 must detect that statements in section 2 change the dynamic type and can try
724 to derive the new type. That is enough and we can stop, we will never see
725 the calls into constructors of sub-objects in this code. Therefore we can
726 safely ignore all call statements that we traverse.
729 static bool
730 stmt_may_be_vtbl_ptr_store (gimple *stmt)
732 if (is_gimple_call (stmt))
733 return false;
734 if (gimple_clobber_p (stmt))
735 return false;
736 else if (is_gimple_assign (stmt))
738 tree lhs = gimple_assign_lhs (stmt);
740 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
742 if (flag_strict_aliasing
743 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
744 return false;
746 if (TREE_CODE (lhs) == COMPONENT_REF
747 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
748 return false;
749 /* In the future we might want to use get_ref_base_and_extent to find
750 if there is a field corresponding to the offset and if so, proceed
751 almost like if it was a component ref. */
754 return true;
757 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
758 to check whether a particular statement may modify the virtual table
759 pointerIt stores its result into DATA, which points to a
760 prop_type_change_info structure. */
762 static bool
763 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
765 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
766 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
768 if (stmt_may_be_vtbl_ptr_store (stmt))
770 tci->type_maybe_changed = true;
771 return true;
773 else
774 return false;
777 /* See if ARG is PARAM_DECl describing instance passed by pointer
778 or reference in FUNCTION. Return false if the dynamic type may change
779 in between beggining of the function until CALL is invoked.
781 Generally functions are not allowed to change type of such instances,
782 but they call destructors. We assume that methods cannot destroy the THIS
783 pointer. Also as a special cases, constructor and destructors may change
784 type of the THIS pointer. */
786 static bool
787 param_type_may_change_p (tree function, tree arg, gimple *call)
789 /* Pure functions cannot do any changes on the dynamic type;
790 that require writting to memory. */
791 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
792 return false;
793 /* We need to check if we are within inlined consturctor
794 or destructor (ideally we would have way to check that the
795 inline cdtor is actually working on ARG, but we don't have
796 easy tie on this, so punt on all non-pure cdtors.
797 We may also record the types of cdtors and once we know type
798 of the instance match them.
800 Also code unification optimizations may merge calls from
801 different blocks making return values unreliable. So
802 do nothing during late optimization. */
803 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
804 return true;
805 if (TREE_CODE (arg) == SSA_NAME
806 && SSA_NAME_IS_DEFAULT_DEF (arg)
807 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
809 /* Normal (non-THIS) argument. */
810 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
811 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
812 /* THIS pointer of an method - here we want to watch constructors
813 and destructors as those definitely may change the dynamic
814 type. */
815 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
816 && !DECL_CXX_CONSTRUCTOR_P (function)
817 && !DECL_CXX_DESTRUCTOR_P (function)
818 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
820 /* Walk the inline stack and watch out for ctors/dtors. */
821 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
822 block = BLOCK_SUPERCONTEXT (block))
823 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
824 return true;
825 return false;
828 return true;
831 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
832 callsite CALL) by looking for assignments to its virtual table pointer. If
833 it is, return true. ARG is the object itself (not a pointer
834 to it, unless dereferenced). BASE is the base of the memory access as
835 returned by get_ref_base_and_extent, as is the offset.
837 This is helper function for detect_type_change and detect_type_change_ssa
838 that does the heavy work which is usually unnecesary. */
840 static bool
841 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
842 tree base, tree comp_type, gcall *call,
843 HOST_WIDE_INT offset)
845 struct prop_type_change_info tci;
846 ao_ref ao;
848 gcc_checking_assert (DECL_P (arg)
849 || TREE_CODE (arg) == MEM_REF
850 || handled_component_p (arg));
852 comp_type = TYPE_MAIN_VARIANT (comp_type);
854 /* Const calls cannot call virtual methods through VMT and so type changes do
855 not matter. */
856 if (!flag_devirtualize || !gimple_vuse (call)
857 /* Be sure expected_type is polymorphic. */
858 || !comp_type
859 || TREE_CODE (comp_type) != RECORD_TYPE
860 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
861 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
862 return true;
864 if (fbi->aa_walk_budget == 0)
865 return false;
867 ao_ref_init (&ao, arg);
868 ao.base = base;
869 ao.offset = offset;
870 ao.size = POINTER_SIZE;
871 ao.max_size = ao.size;
873 tci.offset = offset;
874 tci.object = get_base_address (arg);
875 tci.type_maybe_changed = false;
877 int walked
878 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
879 &tci, NULL, NULL, fbi->aa_walk_budget);
880 if (walked >= 0)
881 fbi->aa_walk_budget -= walked;
882 else
883 fbi->aa_walk_budget = 0;
885 if (walked >= 0 && !tci.type_maybe_changed)
886 return false;
888 return true;
891 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
892 If it is, return true. ARG is the object itself (not a pointer
893 to it, unless dereferenced). BASE is the base of the memory access as
894 returned by get_ref_base_and_extent, as is the offset. */
896 static bool
897 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
898 tree comp_type, gcall *call,
899 HOST_WIDE_INT offset)
901 if (!flag_devirtualize)
902 return false;
904 if (TREE_CODE (base) == MEM_REF
905 && !param_type_may_change_p (current_function_decl,
906 TREE_OPERAND (base, 0),
907 call))
908 return false;
909 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
910 call, offset);
913 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
914 SSA name (its dereference will become the base and the offset is assumed to
915 be zero). */
917 static bool
918 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
919 gcall *call)
921 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
922 if (!flag_devirtualize
923 || !POINTER_TYPE_P (TREE_TYPE (arg)))
924 return false;
926 if (!param_type_may_change_p (current_function_decl, arg, call))
927 return false;
929 arg = build2 (MEM_REF, ptr_type_node, arg,
930 build_int_cst (ptr_type_node, 0));
932 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
933 call, 0);
936 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
937 boolean variable pointed to by DATA. */
939 static bool
940 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
941 void *data)
943 bool *b = (bool *) data;
944 *b = true;
945 return true;
948 /* Find the nearest valid aa status for parameter specified by INDEX that
949 dominates BB. */
951 static struct ipa_param_aa_status *
952 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
953 int index)
955 while (true)
957 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
958 if (!bb)
959 return NULL;
960 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
961 if (!bi->param_aa_statuses.is_empty ()
962 && bi->param_aa_statuses[index].valid)
963 return &bi->param_aa_statuses[index];
967 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
968 structures and/or intialize the result with a dominating description as
969 necessary. */
971 static struct ipa_param_aa_status *
972 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
973 int index)
975 gcc_checking_assert (fbi);
976 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
977 if (bi->param_aa_statuses.is_empty ())
978 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
979 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
980 if (!paa->valid)
982 gcc_checking_assert (!paa->parm_modified
983 && !paa->ref_modified
984 && !paa->pt_modified);
985 struct ipa_param_aa_status *dom_paa;
986 dom_paa = find_dominating_aa_status (fbi, bb, index);
987 if (dom_paa)
988 *paa = *dom_paa;
989 else
990 paa->valid = true;
993 return paa;
996 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
997 a value known not to be modified in this function before reaching the
998 statement STMT. FBI holds information about the function we have so far
999 gathered but do not survive the summary building stage. */
1001 static bool
1002 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
1003 gimple *stmt, tree parm_load)
1005 struct ipa_param_aa_status *paa;
1006 bool modified = false;
1007 ao_ref refd;
1009 tree base = get_base_address (parm_load);
1010 gcc_assert (TREE_CODE (base) == PARM_DECL);
1011 if (TREE_READONLY (base))
1012 return true;
1014 gcc_checking_assert (fbi);
1015 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1016 if (paa->parm_modified || fbi->aa_walk_budget == 0)
1017 return false;
1019 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1020 ao_ref_init (&refd, parm_load);
1021 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1022 &modified, NULL, NULL,
1023 fbi->aa_walk_budget);
1024 if (walked < 0)
1026 modified = true;
1027 fbi->aa_walk_budget = 0;
1029 else
1030 fbi->aa_walk_budget -= walked;
1031 if (paa && modified)
1032 paa->parm_modified = true;
1033 return !modified;
1036 /* If STMT is an assignment that loads a value from an parameter declaration,
1037 return the index of the parameter in ipa_node_params which has not been
1038 modified. Otherwise return -1. */
1040 static int
1041 load_from_unmodified_param (struct ipa_func_body_info *fbi,
1042 vec<ipa_param_descriptor, va_gc> *descriptors,
1043 gimple *stmt)
1045 int index;
1046 tree op1;
1048 if (!gimple_assign_single_p (stmt))
1049 return -1;
1051 op1 = gimple_assign_rhs1 (stmt);
1052 if (TREE_CODE (op1) != PARM_DECL)
1053 return -1;
1055 index = ipa_get_param_decl_index_1 (descriptors, op1);
1056 if (index < 0
1057 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1058 return -1;
1060 return index;
1063 /* Return true if memory reference REF (which must be a load through parameter
1064 with INDEX) loads data that are known to be unmodified in this function
1065 before reaching statement STMT. */
1067 static bool
1068 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1069 int index, gimple *stmt, tree ref)
1071 struct ipa_param_aa_status *paa;
1072 bool modified = false;
1073 ao_ref refd;
1075 gcc_checking_assert (fbi);
1076 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1077 if (paa->ref_modified || fbi->aa_walk_budget == 0)
1078 return false;
1080 gcc_checking_assert (gimple_vuse (stmt));
1081 ao_ref_init (&refd, ref);
1082 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1083 &modified, NULL, NULL,
1084 fbi->aa_walk_budget);
1085 if (walked < 0)
1087 modified = true;
1088 fbi->aa_walk_budget = 0;
1090 else
1091 fbi->aa_walk_budget -= walked;
1092 if (modified)
1093 paa->ref_modified = true;
1094 return !modified;
1097 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1098 is known to be unmodified in this function before reaching call statement
1099 CALL into which it is passed. FBI describes the function body. */
1101 static bool
1102 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1103 gimple *call, tree parm)
1105 bool modified = false;
1106 ao_ref refd;
1108 /* It's unnecessary to calculate anything about memory contnets for a const
1109 function because it is not goin to use it. But do not cache the result
1110 either. Also, no such calculations for non-pointers. */
1111 if (!gimple_vuse (call)
1112 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1113 return false;
1115 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1116 gimple_bb (call),
1117 index);
1118 if (paa->pt_modified || fbi->aa_walk_budget == 0)
1119 return false;
1121 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1122 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1123 &modified, NULL, NULL,
1124 fbi->aa_walk_budget);
1125 if (walked < 0)
1127 fbi->aa_walk_budget = 0;
1128 modified = true;
1130 else
1131 fbi->aa_walk_budget -= walked;
1132 if (modified)
1133 paa->pt_modified = true;
1134 return !modified;
1137 /* Return true if we can prove that OP is a memory reference loading
1138 data from an aggregate passed as a parameter.
1140 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1141 false if it cannot prove that the value has not been modified before the
1142 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1143 if it cannot prove the value has not been modified, in that case it will
1144 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1146 INFO and PARMS_AINFO describe parameters of the current function (but the
1147 latter can be NULL), STMT is the load statement. If function returns true,
1148 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1149 within the aggregate and whether it is a load from a value passed by
1150 reference respectively.
1152 Return false if the offset divided by BITS_PER_UNIT would not fit into an
1153 unsigned int. */
1155 bool
1156 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1157 vec<ipa_param_descriptor, va_gc> *descriptors,
1158 gimple *stmt, tree op, int *index_p,
1159 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1160 bool *by_ref_p, bool *guaranteed_unmodified)
1162 int index;
1163 HOST_WIDE_INT size;
1164 bool reverse;
1165 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1167 if (!base
1168 || (*offset_p / BITS_PER_UNIT) > UINT_MAX)
1169 return false;
1171 /* We can not propagate across volatile loads. */
1172 if (TREE_THIS_VOLATILE (op))
1173 return false;
1175 if (DECL_P (base))
1177 int index = ipa_get_param_decl_index_1 (descriptors, base);
1178 if (index >= 0
1179 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1181 *index_p = index;
1182 *by_ref_p = false;
1183 if (size_p)
1184 *size_p = size;
1185 if (guaranteed_unmodified)
1186 *guaranteed_unmodified = true;
1187 return true;
1189 return false;
1192 if (TREE_CODE (base) != MEM_REF
1193 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1194 || !integer_zerop (TREE_OPERAND (base, 1)))
1195 return false;
1197 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1199 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1200 index = ipa_get_param_decl_index_1 (descriptors, parm);
1202 else
1204 /* This branch catches situations where a pointer parameter is not a
1205 gimple register, for example:
1207 void hip7(S*) (struct S * p)
1209 void (*<T2e4>) (struct S *) D.1867;
1210 struct S * p.1;
1212 <bb 2>:
1213 p.1_1 = p;
1214 D.1867_2 = p.1_1->f;
1215 D.1867_2 ();
1216 gdp = &p;
1219 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1220 index = load_from_unmodified_param (fbi, descriptors, def);
1223 if (index >= 0)
1225 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1226 if (!data_preserved && !guaranteed_unmodified)
1227 return false;
1229 *index_p = index;
1230 *by_ref_p = true;
1231 if (size_p)
1232 *size_p = size;
1233 if (guaranteed_unmodified)
1234 *guaranteed_unmodified = data_preserved;
1235 return true;
1237 return false;
1240 /* If STMT is an assignment that loads a value from a parameter declaration,
1241 or from an aggregate passed as the parameter either by value or reference,
1242 return the index of the parameter in ipa_node_params. Otherwise return -1.
1244 FBI holds gathered information about the function. INFO describes
1245 parameters of the function, STMT is the assignment statement. If it is a
1246 memory load from an aggregate, *OFFSET_P is filled with offset within the
1247 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1248 reference. */
1250 static int
1251 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1252 class ipa_node_params *info,
1253 gimple *stmt,
1254 HOST_WIDE_INT *offset_p,
1255 bool *by_ref_p)
1257 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1258 poly_int64 size;
1260 /* Load value from a parameter declaration. */
1261 if (index >= 0)
1263 *offset_p = -1;
1264 return index;
1267 if (!gimple_assign_load_p (stmt))
1268 return -1;
1270 tree rhs = gimple_assign_rhs1 (stmt);
1272 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1273 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1274 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1275 return -1;
1277 /* Skip memory reference containing bit-field. */
1278 if (TREE_CODE (rhs) == BIT_FIELD_REF
1279 || contains_bitfld_component_ref_p (rhs))
1280 return -1;
1282 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1283 offset_p, &size, by_ref_p))
1284 return -1;
1286 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1287 size));
1288 if (!*by_ref_p)
1290 tree param_type = ipa_get_type (info, index);
1292 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1293 return -1;
1295 else if (TREE_THIS_VOLATILE (rhs))
1296 return -1;
1298 return index;
1301 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1302 to find original pointer. Initialize RET to the pointer which results from
1303 the walk.
1304 If offset is known return true and initialize OFFSET_RET. */
1306 bool
1307 unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1309 poly_int64 offset = 0;
1310 bool offset_known = true;
1311 int i;
1313 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1315 if (TREE_CODE (op) == ADDR_EXPR)
1317 poly_int64 extra_offset = 0;
1318 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1319 &offset);
1320 if (!base)
1322 base = get_base_address (TREE_OPERAND (op, 0));
1323 if (TREE_CODE (base) != MEM_REF)
1324 break;
1325 offset_known = false;
1327 else
1329 if (TREE_CODE (base) != MEM_REF)
1330 break;
1331 offset += extra_offset;
1333 op = TREE_OPERAND (base, 0);
1334 if (mem_ref_offset (base).to_shwi (&extra_offset))
1335 offset += extra_offset;
1336 else
1337 offset_known = false;
1339 else if (TREE_CODE (op) == SSA_NAME
1340 && !SSA_NAME_IS_DEFAULT_DEF (op))
1342 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1344 if (gimple_assign_single_p (pstmt))
1345 op = gimple_assign_rhs1 (pstmt);
1346 else if (is_gimple_assign (pstmt)
1347 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1349 poly_int64 extra_offset = 0;
1350 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1351 &extra_offset))
1352 offset += extra_offset;
1353 else
1354 offset_known = false;
1355 op = gimple_assign_rhs1 (pstmt);
1357 else
1358 break;
1360 else
1361 break;
1363 *ret = op;
1364 *offset_ret = offset;
1365 return offset_known;
1368 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1369 of an assignment statement STMT, try to determine whether we are actually
1370 handling any of the following cases and construct an appropriate jump
1371 function into JFUNC if so:
1373 1) The passed value is loaded from a formal parameter which is not a gimple
1374 register (most probably because it is addressable, the value has to be
1375 scalar) and we can guarantee the value has not changed. This case can
1376 therefore be described by a simple pass-through jump function. For example:
1378 foo (int a)
1380 int a.0;
1382 a.0_2 = a;
1383 bar (a.0_2);
1385 2) The passed value can be described by a simple arithmetic pass-through
1386 jump function. E.g.
1388 foo (int a)
1390 int D.2064;
1392 D.2064_4 = a.1(D) + 4;
1393 bar (D.2064_4);
1395 This case can also occur in combination of the previous one, e.g.:
1397 foo (int a, int z)
1399 int a.0;
1400 int D.2064;
1402 a.0_3 = a;
1403 D.2064_4 = a.0_3 + 4;
1404 foo (D.2064_4);
1406 3) The passed value is an address of an object within another one (which
1407 also passed by reference). Such situations are described by an ancestor
1408 jump function and describe situations such as:
1410 B::foo() (struct B * const this)
1412 struct A * D.1845;
1414 D.1845_2 = &this_1(D)->D.1748;
1415 A::bar (D.1845_2);
1417 INFO is the structure describing individual parameters access different
1418 stages of IPA optimizations. PARMS_AINFO contains the information that is
1419 only needed for intraprocedural analysis. */
1421 static void
1422 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1423 class ipa_node_params *info,
1424 struct ipa_jump_func *jfunc,
1425 gcall *call, gimple *stmt, tree name,
1426 tree param_type)
1428 HOST_WIDE_INT offset, size;
1429 tree op1, tc_ssa, base, ssa;
1430 bool reverse;
1431 int index;
1433 op1 = gimple_assign_rhs1 (stmt);
1435 if (TREE_CODE (op1) == SSA_NAME)
1437 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1438 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1439 else
1440 index = load_from_unmodified_param (fbi, info->descriptors,
1441 SSA_NAME_DEF_STMT (op1));
1442 tc_ssa = op1;
1444 else
1446 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1447 tc_ssa = gimple_assign_lhs (stmt);
1450 if (index >= 0)
1452 switch (gimple_assign_rhs_class (stmt))
1454 case GIMPLE_BINARY_RHS:
1456 tree op2 = gimple_assign_rhs2 (stmt);
1457 if (!is_gimple_ip_invariant (op2)
1458 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1459 != tcc_comparison)
1460 && !useless_type_conversion_p (TREE_TYPE (name),
1461 TREE_TYPE (op1))))
1462 return;
1464 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1465 gimple_assign_rhs_code (stmt));
1466 break;
1468 case GIMPLE_SINGLE_RHS:
1470 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1471 tc_ssa);
1472 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1473 break;
1475 case GIMPLE_UNARY_RHS:
1476 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1477 ipa_set_jf_unary_pass_through (jfunc, index,
1478 gimple_assign_rhs_code (stmt));
1479 default:;
1481 return;
1484 if (TREE_CODE (op1) != ADDR_EXPR)
1485 return;
1486 op1 = TREE_OPERAND (op1, 0);
1487 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1488 offset_int mem_offset;
1489 if (!base
1490 || TREE_CODE (base) != MEM_REF
1491 || !mem_ref_offset (base).is_constant (&mem_offset))
1492 return;
1493 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1494 ssa = TREE_OPERAND (base, 0);
1495 if (TREE_CODE (ssa) != SSA_NAME
1496 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1497 || offset < 0)
1498 return;
1500 /* Dynamic types are changed in constructors and destructors. */
1501 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1502 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1503 ipa_set_ancestor_jf (jfunc, offset, index,
1504 parm_ref_data_pass_through_p (fbi, index, call, ssa),
1505 false);
1508 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1509 it looks like:
1511 iftmp.1_3 = &obj_2(D)->D.1762;
1513 The base of the MEM_REF must be a default definition SSA NAME of a
1514 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1515 whole MEM_REF expression is returned and the offset calculated from any
1516 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1517 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1519 static tree
1520 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1522 HOST_WIDE_INT size;
1523 tree expr, parm, obj;
1524 bool reverse;
1526 if (!gimple_assign_single_p (assign))
1527 return NULL_TREE;
1528 expr = gimple_assign_rhs1 (assign);
1530 if (TREE_CODE (expr) != ADDR_EXPR)
1531 return NULL_TREE;
1532 expr = TREE_OPERAND (expr, 0);
1533 obj = expr;
1534 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1536 offset_int mem_offset;
1537 if (!expr
1538 || TREE_CODE (expr) != MEM_REF
1539 || !mem_ref_offset (expr).is_constant (&mem_offset))
1540 return NULL_TREE;
1541 parm = TREE_OPERAND (expr, 0);
1542 if (TREE_CODE (parm) != SSA_NAME
1543 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1544 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1545 return NULL_TREE;
1547 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1548 *obj_p = obj;
1549 return expr;
1553 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1554 statement PHI, try to find out whether NAME is in fact a
1555 multiple-inheritance typecast from a descendant into an ancestor of a formal
1556 parameter and thus can be described by an ancestor jump function and if so,
1557 write the appropriate function into JFUNC.
1559 Essentially we want to match the following pattern:
1561 if (obj_2(D) != 0B)
1562 goto <bb 3>;
1563 else
1564 goto <bb 4>;
1566 <bb 3>:
1567 iftmp.1_3 = &obj_2(D)->D.1762;
1569 <bb 4>:
1570 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1571 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1572 return D.1879_6; */
1574 static void
1575 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1576 class ipa_node_params *info,
1577 struct ipa_jump_func *jfunc,
1578 gcall *call, gphi *phi)
1580 HOST_WIDE_INT offset;
1581 gimple *assign;
1582 basic_block phi_bb, assign_bb, cond_bb;
1583 tree tmp, parm, expr, obj;
1584 int index, i;
1586 if (gimple_phi_num_args (phi) != 2)
1587 return;
1589 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1590 tmp = PHI_ARG_DEF (phi, 0);
1591 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1592 tmp = PHI_ARG_DEF (phi, 1);
1593 else
1594 return;
1595 if (TREE_CODE (tmp) != SSA_NAME
1596 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1597 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1598 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1599 return;
1601 assign = SSA_NAME_DEF_STMT (tmp);
1602 assign_bb = gimple_bb (assign);
1603 if (!single_pred_p (assign_bb))
1604 return;
1605 expr = get_ancestor_addr_info (assign, &obj, &offset);
1606 if (!expr)
1607 return;
1608 parm = TREE_OPERAND (expr, 0);
1609 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1610 if (index < 0)
1611 return;
1613 cond_bb = single_pred (assign_bb);
1614 gcond *cond = safe_dyn_cast <gcond *> (*gsi_last_bb (cond_bb));
1615 if (!cond
1616 || gimple_cond_code (cond) != NE_EXPR
1617 || gimple_cond_lhs (cond) != parm
1618 || !integer_zerop (gimple_cond_rhs (cond)))
1619 return;
1621 phi_bb = gimple_bb (phi);
1622 for (i = 0; i < 2; i++)
1624 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1625 if (pred != assign_bb && pred != cond_bb)
1626 return;
1629 ipa_set_ancestor_jf (jfunc, offset, index,
1630 parm_ref_data_pass_through_p (fbi, index, call, parm),
1631 true);
1634 /* Inspect the given TYPE and return true iff it has the same structure (the
1635 same number of fields of the same types) as a C++ member pointer. If
1636 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1637 corresponding fields there. */
1639 static bool
1640 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1642 tree fld;
1644 if (TREE_CODE (type) != RECORD_TYPE)
1645 return false;
1647 fld = TYPE_FIELDS (type);
1648 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1649 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1650 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1651 return false;
1653 if (method_ptr)
1654 *method_ptr = fld;
1656 fld = DECL_CHAIN (fld);
1657 if (!fld || INTEGRAL_TYPE_P (fld)
1658 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1659 return false;
1660 if (delta)
1661 *delta = fld;
1663 if (DECL_CHAIN (fld))
1664 return false;
1666 return true;
1669 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1670 return the rhs of its defining statement, and this statement is stored in
1671 *RHS_STMT. Otherwise return RHS as it is. */
1673 static inline tree
1674 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1676 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1678 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1680 if (gimple_assign_single_p (def_stmt))
1681 rhs = gimple_assign_rhs1 (def_stmt);
1682 else
1683 break;
1684 *rhs_stmt = def_stmt;
1686 return rhs;
1689 /* Simple linked list, describing contents of an aggregate before call. */
1691 struct ipa_known_agg_contents_list
1693 /* Offset and size of the described part of the aggregate. */
1694 HOST_WIDE_INT offset, size;
1696 /* Type of the described part of the aggregate. */
1697 tree type;
1699 /* Known constant value or jump function data describing contents. */
1700 struct ipa_load_agg_data value;
1702 /* Pointer to the next structure in the list. */
1703 struct ipa_known_agg_contents_list *next;
1706 /* Add an aggregate content item into a linked list of
1707 ipa_known_agg_contents_list structure, in which all elements
1708 are sorted ascendingly by offset. */
1710 static inline void
1711 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1712 struct ipa_known_agg_contents_list *item)
1714 struct ipa_known_agg_contents_list *list = *plist;
1716 for (; list; list = list->next)
1718 if (list->offset >= item->offset)
1719 break;
1721 plist = &list->next;
1724 item->next = list;
1725 *plist = item;
1728 /* Check whether a given aggregate content is clobbered by certain element in
1729 a linked list of ipa_known_agg_contents_list. */
1731 static inline bool
1732 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1733 struct ipa_known_agg_contents_list *item)
1735 for (; list; list = list->next)
1737 if (list->offset >= item->offset)
1738 return list->offset < item->offset + item->size;
1740 if (list->offset + list->size > item->offset)
1741 return true;
1744 return false;
1747 /* Build aggregate jump function from LIST, assuming there are exactly
1748 VALUE_COUNT entries there and that offset of the passed argument
1749 is ARG_OFFSET and store it into JFUNC. */
1751 static void
1752 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1753 int value_count, HOST_WIDE_INT arg_offset,
1754 struct ipa_jump_func *jfunc)
1756 vec_safe_reserve (jfunc->agg.items, value_count, true);
1757 for (; list; list = list->next)
1759 struct ipa_agg_jf_item item;
1760 tree operand = list->value.pass_through.operand;
1762 if (list->value.pass_through.formal_id >= 0)
1764 /* Content value is derived from some formal paramerter. */
1765 if (list->value.offset >= 0)
1766 item.jftype = IPA_JF_LOAD_AGG;
1767 else
1768 item.jftype = IPA_JF_PASS_THROUGH;
1770 item.value.load_agg = list->value;
1771 if (operand)
1772 item.value.pass_through.operand
1773 = unshare_expr_without_location (operand);
1775 else if (operand)
1777 /* Content value is known constant. */
1778 item.jftype = IPA_JF_CONST;
1779 item.value.constant = unshare_expr_without_location (operand);
1781 else
1782 continue;
1784 item.type = list->type;
1785 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1787 item.offset = list->offset - arg_offset;
1788 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1790 jfunc->agg.items->quick_push (item);
1794 /* Given an assignment statement STMT, try to collect information into
1795 AGG_VALUE that will be used to construct jump function for RHS of the
1796 assignment, from which content value of an aggregate part comes.
1798 Besides constant and simple pass-through jump functions, also try to
1799 identify whether it matches the following pattern that can be described by
1800 a load-value-from-aggregate jump function, which is a derivative of simple
1801 pass-through jump function.
1803 foo (int *p)
1807 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1808 bar (q_5);
1811 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1812 constant, simple pass-through and load-vale-from-aggregate. If value
1813 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1814 set to -1. For simple pass-through and load-value-from-aggregate, field
1815 FORMAL_ID specifies the related formal parameter index, and field
1816 OFFSET can be used to distinguish them, -1 means simple pass-through,
1817 otherwise means load-value-from-aggregate. */
1819 static void
1820 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1821 struct ipa_load_agg_data *agg_value,
1822 gimple *stmt)
1824 tree lhs = gimple_assign_lhs (stmt);
1825 tree rhs1 = gimple_assign_rhs1 (stmt);
1826 enum tree_code code;
1827 int index = -1;
1829 /* Initialize jump function data for the aggregate part. */
1830 memset (agg_value, 0, sizeof (*agg_value));
1831 agg_value->pass_through.operation = NOP_EXPR;
1832 agg_value->pass_through.formal_id = -1;
1833 agg_value->offset = -1;
1835 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1836 || TREE_THIS_VOLATILE (lhs)
1837 || TREE_CODE (lhs) == BIT_FIELD_REF
1838 || contains_bitfld_component_ref_p (lhs))
1839 return;
1841 /* Skip SSA copies. */
1842 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1844 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1845 break;
1847 stmt = SSA_NAME_DEF_STMT (rhs1);
1848 if (!is_gimple_assign (stmt))
1849 break;
1851 rhs1 = gimple_assign_rhs1 (stmt);
1854 if (gphi *phi = dyn_cast<gphi *> (stmt))
1856 /* Also special case like the following (a is a formal parameter):
1858 _12 = *a_11(D).dim[0].stride;
1860 # iftmp.22_9 = PHI <_12(2), 1(3)>
1862 parm.6.dim[0].stride = iftmp.22_9;
1864 __x_MOD_foo (&parm.6, b_31(D));
1866 The aggregate function describing parm.6.dim[0].stride is encoded as a
1867 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1868 (the constant from the PHI node). */
1870 if (gimple_phi_num_args (phi) != 2)
1871 return;
1872 tree arg0 = gimple_phi_arg_def (phi, 0);
1873 tree arg1 = gimple_phi_arg_def (phi, 1);
1874 tree operand;
1876 if (is_gimple_ip_invariant (arg1))
1878 operand = arg1;
1879 rhs1 = arg0;
1881 else if (is_gimple_ip_invariant (arg0))
1883 operand = arg0;
1884 rhs1 = arg1;
1886 else
1887 return;
1889 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1890 if (!is_gimple_assign (stmt))
1891 return;
1893 code = ASSERT_EXPR;
1894 agg_value->pass_through.operand = operand;
1896 else if (is_gimple_assign (stmt))
1898 code = gimple_assign_rhs_code (stmt);
1899 switch (gimple_assign_rhs_class (stmt))
1901 case GIMPLE_SINGLE_RHS:
1902 if (is_gimple_ip_invariant (rhs1))
1904 agg_value->pass_through.operand = rhs1;
1905 return;
1907 code = NOP_EXPR;
1908 break;
1910 case GIMPLE_UNARY_RHS:
1911 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1912 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1913 tcc_binary, this subtleness is somewhat misleading.
1915 Since tcc_unary is widely used in IPA-CP code to check an operation
1916 with one operand, here we only allow tc_unary operation to avoid
1917 possible problem. Then we can use (opclass == tc_unary) or not to
1918 distinguish unary and binary. */
1919 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1920 return;
1922 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1923 break;
1925 case GIMPLE_BINARY_RHS:
1927 gimple *rhs1_stmt = stmt;
1928 gimple *rhs2_stmt = stmt;
1929 tree rhs2 = gimple_assign_rhs2 (stmt);
1931 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1932 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1934 if (is_gimple_ip_invariant (rhs2))
1936 agg_value->pass_through.operand = rhs2;
1937 stmt = rhs1_stmt;
1939 else if (is_gimple_ip_invariant (rhs1))
1941 if (TREE_CODE_CLASS (code) == tcc_comparison)
1942 code = swap_tree_comparison (code);
1943 else if (!commutative_tree_code (code))
1944 return;
1946 agg_value->pass_through.operand = rhs1;
1947 stmt = rhs2_stmt;
1948 rhs1 = rhs2;
1950 else
1951 return;
1953 if (TREE_CODE_CLASS (code) != tcc_comparison
1954 && !useless_type_conversion_p (TREE_TYPE (lhs),
1955 TREE_TYPE (rhs1)))
1956 return;
1958 break;
1960 default:
1961 return;
1964 else
1965 return;
1967 if (TREE_CODE (rhs1) != SSA_NAME)
1968 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1969 &agg_value->offset,
1970 &agg_value->by_ref);
1971 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1972 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1974 if (index >= 0)
1976 if (agg_value->offset >= 0)
1977 agg_value->type = TREE_TYPE (rhs1);
1978 agg_value->pass_through.formal_id = index;
1979 agg_value->pass_through.operation = code;
1981 else
1982 agg_value->pass_through.operand = NULL_TREE;
1985 /* If STMT is a memory store to the object whose address is BASE, extract
1986 information (offset, size, and value) into CONTENT, and return true,
1987 otherwise we conservatively assume the whole object is modified with
1988 unknown content, and return false. CHECK_REF means that access to object
1989 is expected to be in form of MEM_REF expression. */
1991 static bool
1992 extract_mem_content (struct ipa_func_body_info *fbi,
1993 gimple *stmt, tree base, bool check_ref,
1994 struct ipa_known_agg_contents_list *content)
1996 HOST_WIDE_INT lhs_offset, lhs_size;
1997 bool reverse;
1999 if (!is_gimple_assign (stmt))
2000 return false;
2002 tree lhs = gimple_assign_lhs (stmt);
2003 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
2004 &reverse);
2005 if (!lhs_base)
2006 return false;
2008 if (check_ref)
2010 if (TREE_CODE (lhs_base) != MEM_REF
2011 || TREE_OPERAND (lhs_base, 0) != base
2012 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
2013 return false;
2015 else if (lhs_base != base)
2016 return false;
2018 content->offset = lhs_offset;
2019 content->size = lhs_size;
2020 content->type = TREE_TYPE (lhs);
2021 content->next = NULL;
2023 analyze_agg_content_value (fbi, &content->value, stmt);
2024 return true;
2027 /* Traverse statements from CALL backwards, scanning whether an aggregate given
2028 in ARG is filled in constants or values that are derived from caller's
2029 formal parameter in the way described by some kinds of jump functions. FBI
2030 is the context of the caller function for interprocedural analysis. ARG can
2031 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
2032 the type of the aggregate, JFUNC is the jump function for the aggregate. */
2034 static void
2035 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
2036 gcall *call, tree arg,
2037 tree arg_type,
2038 struct ipa_jump_func *jfunc)
2040 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
2041 bitmap visited = NULL;
2042 int item_count = 0, value_count = 0;
2043 HOST_WIDE_INT arg_offset, arg_size;
2044 tree arg_base;
2045 bool check_ref, by_ref;
2046 ao_ref r;
2047 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
2049 if (max_agg_items == 0)
2050 return;
2052 /* The function operates in three stages. First, we prepare check_ref, r,
2053 arg_base and arg_offset based on what is actually passed as an actual
2054 argument. */
2056 if (POINTER_TYPE_P (arg_type))
2058 by_ref = true;
2059 if (TREE_CODE (arg) == SSA_NAME)
2061 tree type_size;
2062 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
2063 || !POINTER_TYPE_P (TREE_TYPE (arg)))
2064 return;
2065 check_ref = true;
2066 arg_base = arg;
2067 arg_offset = 0;
2068 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
2069 arg_size = tree_to_uhwi (type_size);
2070 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2072 else if (TREE_CODE (arg) == ADDR_EXPR)
2074 bool reverse;
2076 arg = TREE_OPERAND (arg, 0);
2077 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2078 &arg_size, &reverse);
2079 if (!arg_base)
2080 return;
2081 if (DECL_P (arg_base))
2083 check_ref = false;
2084 ao_ref_init (&r, arg_base);
2086 else
2087 return;
2089 else
2090 return;
2092 else
2094 bool reverse;
2096 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2098 by_ref = false;
2099 check_ref = false;
2100 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2101 &arg_size, &reverse);
2102 if (!arg_base)
2103 return;
2105 ao_ref_init (&r, arg);
2108 /* Second stage traverses virtual SSA web backwards starting from the call
2109 statement, only looks at individual dominating virtual operand (its
2110 definition dominates the call), as long as it is confident that content
2111 of the aggregate is affected by definition of the virtual operand, it
2112 builds a sorted linked list of ipa_agg_jf_list describing that. */
2114 for (tree dom_vuse = gimple_vuse (call);
2115 dom_vuse && fbi->aa_walk_budget > 0;)
2117 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
2119 if (gimple_code (stmt) == GIMPLE_PHI)
2121 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2122 fbi->aa_walk_budget,
2123 &visited, false, NULL, NULL);
2124 continue;
2127 fbi->aa_walk_budget--;
2128 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2130 struct ipa_known_agg_contents_list *content
2131 = XALLOCA (struct ipa_known_agg_contents_list);
2133 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2134 break;
2136 /* Now we get a dominating virtual operand, and need to check
2137 whether its value is clobbered any other dominating one. */
2138 if ((content->value.pass_through.formal_id >= 0
2139 || content->value.pass_through.operand)
2140 && !clobber_by_agg_contents_list_p (all_list, content)
2141 /* Since IPA-CP stores results with unsigned int offsets, we can
2142 discard those which would not fit now before we stream them to
2143 WPA. */
2144 && (content->offset + content->size - arg_offset
2145 <= (HOST_WIDE_INT) UINT_MAX * BITS_PER_UNIT))
2147 struct ipa_known_agg_contents_list *copy
2148 = XALLOCA (struct ipa_known_agg_contents_list);
2150 /* Add to the list consisting of only dominating virtual
2151 operands, whose definitions can finally reach the call. */
2152 add_to_agg_contents_list (&list, (*copy = *content, copy));
2154 if (++value_count == max_agg_items)
2155 break;
2158 /* Add to the list consisting of all dominating virtual operands. */
2159 add_to_agg_contents_list (&all_list, content);
2161 if (++item_count == 2 * max_agg_items)
2162 break;
2164 dom_vuse = gimple_vuse (stmt);
2167 if (visited)
2168 BITMAP_FREE (visited);
2170 /* Third stage just goes over the list and creates an appropriate vector of
2171 ipa_agg_jf_item structures out of it, of course only if there are
2172 any meaningful items to begin with. */
2174 if (value_count)
2176 jfunc->agg.by_ref = by_ref;
2177 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2182 /* Return the Ith param type of callee associated with call graph
2183 edge E. */
2185 tree
2186 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2188 int n;
2189 tree type = (e->callee
2190 ? TREE_TYPE (e->callee->decl)
2191 : gimple_call_fntype (e->call_stmt));
2192 tree t = TYPE_ARG_TYPES (type);
2194 for (n = 0; n < i; n++)
2196 if (!t)
2197 break;
2198 t = TREE_CHAIN (t);
2200 if (t && t != void_list_node)
2201 return TREE_VALUE (t);
2202 if (!e->callee)
2203 return NULL;
2204 t = DECL_ARGUMENTS (e->callee->decl);
2205 for (n = 0; n < i; n++)
2207 if (!t)
2208 return NULL;
2209 t = TREE_CHAIN (t);
2211 if (t)
2212 return TREE_TYPE (t);
2213 return NULL;
2216 /* Return a pointer to an ipa_vr just like TMP, but either find it in
2217 ipa_vr_hash_table or allocate it in GC memory. */
2219 static ipa_vr *
2220 ipa_get_value_range (const vrange &tmp)
2222 inchash::hash hstate;
2223 inchash::add_vrange (tmp, hstate);
2224 hashval_t hash = hstate.end ();
2225 ipa_vr **slot = ipa_vr_hash_table->find_slot_with_hash (&tmp, hash, INSERT);
2226 if (*slot)
2227 return *slot;
2229 ipa_vr *vr = new (ggc_alloc<ipa_vr> ()) ipa_vr (tmp);
2230 *slot = vr;
2231 return vr;
2234 /* Assign to JF a pointer to a range just like TMP but either fetch a
2235 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2237 static void
2238 ipa_set_jfunc_vr (ipa_jump_func *jf, const vrange &tmp)
2240 jf->m_vr = ipa_get_value_range (tmp);
2243 static void
2244 ipa_set_jfunc_vr (ipa_jump_func *jf, const ipa_vr &vr)
2246 Value_Range tmp;
2247 vr.get_vrange (tmp);
2248 ipa_set_jfunc_vr (jf, tmp);
2251 /* Compute jump function for all arguments of callsite CS and insert the
2252 information in the jump_functions array in the ipa_edge_args corresponding
2253 to this callsite. */
2255 static void
2256 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2257 struct cgraph_edge *cs)
2259 ipa_node_params *info = ipa_node_params_sum->get (cs->caller);
2260 ipa_edge_args *args = ipa_edge_args_sum->get_create (cs);
2261 gcall *call = cs->call_stmt;
2262 int n, arg_num = gimple_call_num_args (call);
2263 bool useful_context = false;
2265 if (arg_num == 0 || args->jump_functions)
2266 return;
2267 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2268 if (flag_devirtualize)
2269 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2271 if (gimple_call_internal_p (call))
2272 return;
2273 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2274 return;
2276 for (n = 0; n < arg_num; n++)
2278 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2279 tree arg = gimple_call_arg (call, n);
2280 tree param_type = ipa_get_callee_param_type (cs, n);
2281 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2283 tree instance;
2284 class ipa_polymorphic_call_context context (cs->caller->decl,
2285 arg, cs->call_stmt,
2286 &instance);
2287 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2288 &fbi->aa_walk_budget);
2289 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2290 if (!context.useless_p ())
2291 useful_context = true;
2294 Value_Range vr (TREE_TYPE (arg));
2295 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2297 bool addr_nonzero = false;
2298 bool strict_overflow = false;
2300 if (TREE_CODE (arg) == SSA_NAME
2301 && param_type
2302 && get_range_query (cfun)->range_of_expr (vr, arg, cs->call_stmt)
2303 && vr.nonzero_p ())
2304 addr_nonzero = true;
2305 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2306 addr_nonzero = true;
2308 if (addr_nonzero)
2309 vr.set_nonzero (TREE_TYPE (arg));
2311 unsigned HOST_WIDE_INT bitpos;
2312 unsigned align, prec = TYPE_PRECISION (TREE_TYPE (arg));
2314 get_pointer_alignment_1 (arg, &align, &bitpos);
2316 if (align > BITS_PER_UNIT
2317 && opt_for_fn (cs->caller->decl, flag_ipa_bit_cp))
2319 wide_int mask
2320 = wi::bit_and_not (wi::mask (prec, false, prec),
2321 wide_int::from (align / BITS_PER_UNIT - 1,
2322 prec, UNSIGNED));
2323 wide_int value = wide_int::from (bitpos / BITS_PER_UNIT, prec,
2324 UNSIGNED);
2325 irange_bitmask bm (value, mask);
2326 if (!addr_nonzero)
2327 vr.set_varying (TREE_TYPE (arg));
2328 irange &r = as_a <irange> (vr);
2329 r.update_bitmask (bm);
2330 ipa_set_jfunc_vr (jfunc, vr);
2332 else if (addr_nonzero)
2333 ipa_set_jfunc_vr (jfunc, vr);
2334 else
2335 gcc_assert (!jfunc->m_vr);
2337 else
2339 if (param_type
2340 && Value_Range::supports_type_p (TREE_TYPE (arg))
2341 && Value_Range::supports_type_p (param_type)
2342 && irange::supports_p (TREE_TYPE (arg))
2343 && irange::supports_p (param_type)
2344 && get_range_query (cfun)->range_of_expr (vr, arg, cs->call_stmt)
2345 && !vr.undefined_p ())
2347 Value_Range resvr (vr);
2348 range_cast (resvr, param_type);
2349 if (!resvr.undefined_p () && !resvr.varying_p ())
2350 ipa_set_jfunc_vr (jfunc, resvr);
2351 else
2352 gcc_assert (!jfunc->m_vr);
2354 else
2355 gcc_assert (!jfunc->m_vr);
2358 if (is_gimple_ip_invariant (arg)
2359 || (VAR_P (arg)
2360 && is_global_var (arg)
2361 && TREE_READONLY (arg)))
2362 ipa_set_jf_constant (jfunc, arg, cs);
2363 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2364 && TREE_CODE (arg) == PARM_DECL)
2366 int index = ipa_get_param_decl_index (info, arg);
2368 gcc_assert (index >=0);
2369 /* Aggregate passed by value, check for pass-through, otherwise we
2370 will attempt to fill in aggregate contents later in this
2371 for cycle. */
2372 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2374 ipa_set_jf_simple_pass_through (jfunc, index, false);
2375 continue;
2378 else if (TREE_CODE (arg) == SSA_NAME)
2380 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2382 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2383 if (index >= 0)
2385 bool agg_p;
2386 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2387 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2390 else
2392 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2393 if (is_gimple_assign (stmt))
2394 compute_complex_assign_jump_func (fbi, info, jfunc,
2395 call, stmt, arg, param_type);
2396 else if (gimple_code (stmt) == GIMPLE_PHI)
2397 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2398 call,
2399 as_a <gphi *> (stmt));
2403 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2404 passed (because type conversions are ignored in gimple). Usually we can
2405 safely get type from function declaration, but in case of K&R prototypes or
2406 variadic functions we can try our luck with type of the pointer passed.
2407 TODO: Since we look for actual initialization of the memory object, we may better
2408 work out the type based on the memory stores we find. */
2409 if (!param_type)
2410 param_type = TREE_TYPE (arg);
2412 if ((jfunc->type != IPA_JF_PASS_THROUGH
2413 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2414 && (jfunc->type != IPA_JF_ANCESTOR
2415 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2416 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2417 || POINTER_TYPE_P (param_type)))
2418 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2420 if (!useful_context)
2421 vec_free (args->polymorphic_call_contexts);
2424 /* Compute jump functions for all edges - both direct and indirect - outgoing
2425 from BB. */
2427 static void
2428 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2430 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2431 int i;
2432 struct cgraph_edge *cs;
2434 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2436 struct cgraph_node *callee = cs->callee;
2438 if (callee)
2440 callee = callee->ultimate_alias_target ();
2441 /* We do not need to bother analyzing calls to unknown functions
2442 unless they may become known during lto/whopr. */
2443 if (!callee->definition && !flag_lto
2444 && !gimple_call_fnspec (cs->call_stmt).known_p ())
2445 continue;
2447 ipa_compute_jump_functions_for_edge (fbi, cs);
2451 /* If STMT looks like a statement loading a value from a member pointer formal
2452 parameter, return that parameter and store the offset of the field to
2453 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2454 might be clobbered). If USE_DELTA, then we look for a use of the delta
2455 field rather than the pfn. */
2457 static tree
2458 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2459 HOST_WIDE_INT *offset_p)
2461 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2463 if (!gimple_assign_single_p (stmt))
2464 return NULL_TREE;
2466 rhs = gimple_assign_rhs1 (stmt);
2467 if (TREE_CODE (rhs) == COMPONENT_REF)
2469 ref_field = TREE_OPERAND (rhs, 1);
2470 rhs = TREE_OPERAND (rhs, 0);
2472 else
2473 ref_field = NULL_TREE;
2474 if (TREE_CODE (rhs) != MEM_REF)
2475 return NULL_TREE;
2476 rec = TREE_OPERAND (rhs, 0);
2477 if (TREE_CODE (rec) != ADDR_EXPR)
2478 return NULL_TREE;
2479 rec = TREE_OPERAND (rec, 0);
2480 if (TREE_CODE (rec) != PARM_DECL
2481 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2482 return NULL_TREE;
2483 ref_offset = TREE_OPERAND (rhs, 1);
2485 if (use_delta)
2486 fld = delta_field;
2487 else
2488 fld = ptr_field;
2489 if (offset_p)
2490 *offset_p = int_bit_position (fld);
2492 if (ref_field)
2494 if (integer_nonzerop (ref_offset))
2495 return NULL_TREE;
2496 return ref_field == fld ? rec : NULL_TREE;
2498 else
2499 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2500 : NULL_TREE;
2503 /* Returns true iff T is an SSA_NAME defined by a statement. */
2505 static bool
2506 ipa_is_ssa_with_stmt_def (tree t)
2508 if (TREE_CODE (t) == SSA_NAME
2509 && !SSA_NAME_IS_DEFAULT_DEF (t))
2510 return true;
2511 else
2512 return false;
2515 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2516 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2517 indirect call graph edge.
2518 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2520 static struct cgraph_edge *
2521 ipa_note_param_call (struct cgraph_node *node, int param_index,
2522 gcall *stmt, bool polymorphic)
2524 struct cgraph_edge *cs;
2526 cs = node->get_edge (stmt);
2527 cs->indirect_info->param_index = param_index;
2528 cs->indirect_info->agg_contents = 0;
2529 cs->indirect_info->member_ptr = 0;
2530 cs->indirect_info->guaranteed_unmodified = 0;
2531 ipa_node_params *info = ipa_node_params_sum->get (node);
2532 ipa_set_param_used_by_indirect_call (info, param_index, true);
2533 if (cs->indirect_info->polymorphic || polymorphic)
2534 ipa_set_param_used_by_polymorphic_call (info, param_index, true);
2535 return cs;
2538 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2539 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2540 intermediate information about each formal parameter. Currently it checks
2541 whether the call calls a pointer that is a formal parameter and if so, the
2542 parameter is marked with the called flag and an indirect call graph edge
2543 describing the call is created. This is very simple for ordinary pointers
2544 represented in SSA but not-so-nice when it comes to member pointers. The
2545 ugly part of this function does nothing more than trying to match the
2546 pattern of such a call. An example of such a pattern is the gimple dump
2547 below, the call is on the last line:
2549 <bb 2>:
2550 f$__delta_5 = f.__delta;
2551 f$__pfn_24 = f.__pfn;
2554 <bb 2>:
2555 f$__delta_5 = MEM[(struct *)&f];
2556 f$__pfn_24 = MEM[(struct *)&f + 4B];
2558 and a few lines below:
2560 <bb 5>
2561 D.2496_3 = (int) f$__pfn_24;
2562 D.2497_4 = D.2496_3 & 1;
2563 if (D.2497_4 != 0)
2564 goto <bb 3>;
2565 else
2566 goto <bb 4>;
2568 <bb 6>:
2569 D.2500_7 = (unsigned int) f$__delta_5;
2570 D.2501_8 = &S + D.2500_7;
2571 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2572 D.2503_10 = *D.2502_9;
2573 D.2504_12 = f$__pfn_24 + -1;
2574 D.2505_13 = (unsigned int) D.2504_12;
2575 D.2506_14 = D.2503_10 + D.2505_13;
2576 D.2507_15 = *D.2506_14;
2577 iftmp.11_16 = (String:: *) D.2507_15;
2579 <bb 7>:
2580 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2581 D.2500_19 = (unsigned int) f$__delta_5;
2582 D.2508_20 = &S + D.2500_19;
2583 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2585 Such patterns are results of simple calls to a member pointer:
2587 int doprinting (int (MyString::* f)(int) const)
2589 MyString S ("somestring");
2591 return (S.*f)(4);
2594 Moreover, the function also looks for called pointers loaded from aggregates
2595 passed by value or reference. */
2597 static void
2598 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2599 tree target)
2601 class ipa_node_params *info = fbi->info;
2602 HOST_WIDE_INT offset;
2603 bool by_ref;
2605 if (SSA_NAME_IS_DEFAULT_DEF (target))
2607 tree var = SSA_NAME_VAR (target);
2608 int index = ipa_get_param_decl_index (info, var);
2609 if (index >= 0)
2610 ipa_note_param_call (fbi->node, index, call, false);
2611 return;
2614 int index;
2615 gimple *def = SSA_NAME_DEF_STMT (target);
2616 bool guaranteed_unmodified;
2617 if (gimple_assign_single_p (def)
2618 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2619 gimple_assign_rhs1 (def), &index, &offset,
2620 NULL, &by_ref, &guaranteed_unmodified))
2622 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2623 call, false);
2624 cs->indirect_info->offset = offset;
2625 cs->indirect_info->agg_contents = 1;
2626 cs->indirect_info->by_ref = by_ref;
2627 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2628 return;
2631 /* Now we need to try to match the complex pattern of calling a member
2632 pointer. */
2633 if (gimple_code (def) != GIMPLE_PHI
2634 || gimple_phi_num_args (def) != 2
2635 || !POINTER_TYPE_P (TREE_TYPE (target))
2636 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2637 return;
2639 /* First, we need to check whether one of these is a load from a member
2640 pointer that is a parameter to this function. */
2641 tree n1 = PHI_ARG_DEF (def, 0);
2642 tree n2 = PHI_ARG_DEF (def, 1);
2643 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2644 return;
2645 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2646 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2648 tree rec;
2649 basic_block bb, virt_bb;
2650 basic_block join = gimple_bb (def);
2651 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2653 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2654 return;
2656 bb = EDGE_PRED (join, 0)->src;
2657 virt_bb = gimple_bb (d2);
2659 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2661 bb = EDGE_PRED (join, 1)->src;
2662 virt_bb = gimple_bb (d1);
2664 else
2665 return;
2667 /* Second, we need to check that the basic blocks are laid out in the way
2668 corresponding to the pattern. */
2670 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2671 || single_pred (virt_bb) != bb
2672 || single_succ (virt_bb) != join)
2673 return;
2675 /* Third, let's see that the branching is done depending on the least
2676 significant bit of the pfn. */
2678 gcond *branch = safe_dyn_cast <gcond *> (*gsi_last_bb (bb));
2679 if (!branch)
2680 return;
2682 if ((gimple_cond_code (branch) != NE_EXPR
2683 && gimple_cond_code (branch) != EQ_EXPR)
2684 || !integer_zerop (gimple_cond_rhs (branch)))
2685 return;
2687 tree cond = gimple_cond_lhs (branch);
2688 if (!ipa_is_ssa_with_stmt_def (cond))
2689 return;
2691 def = SSA_NAME_DEF_STMT (cond);
2692 if (!is_gimple_assign (def)
2693 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2694 || !integer_onep (gimple_assign_rhs2 (def)))
2695 return;
2697 cond = gimple_assign_rhs1 (def);
2698 if (!ipa_is_ssa_with_stmt_def (cond))
2699 return;
2701 def = SSA_NAME_DEF_STMT (cond);
2703 if (is_gimple_assign (def)
2704 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2706 cond = gimple_assign_rhs1 (def);
2707 if (!ipa_is_ssa_with_stmt_def (cond))
2708 return;
2709 def = SSA_NAME_DEF_STMT (cond);
2712 tree rec2;
2713 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2714 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2715 == ptrmemfunc_vbit_in_delta),
2716 NULL);
2717 if (rec != rec2)
2718 return;
2720 index = ipa_get_param_decl_index (info, rec);
2721 if (index >= 0
2722 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2724 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2725 call, false);
2726 cs->indirect_info->offset = offset;
2727 cs->indirect_info->agg_contents = 1;
2728 cs->indirect_info->member_ptr = 1;
2729 cs->indirect_info->guaranteed_unmodified = 1;
2732 return;
2735 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2736 object referenced in the expression is a formal parameter of the caller
2737 FBI->node (described by FBI->info), create a call note for the
2738 statement. */
2740 static void
2741 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2742 gcall *call, tree target)
2744 tree obj = OBJ_TYPE_REF_OBJECT (target);
2745 int index;
2746 HOST_WIDE_INT anc_offset;
2748 if (!flag_devirtualize)
2749 return;
2751 if (TREE_CODE (obj) != SSA_NAME)
2752 return;
2754 class ipa_node_params *info = fbi->info;
2755 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2757 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2758 return;
2760 anc_offset = 0;
2761 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2762 gcc_assert (index >= 0);
2763 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2764 call))
2765 return;
2767 else
2769 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2770 tree expr;
2772 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2773 if (!expr)
2774 return;
2775 index = ipa_get_param_decl_index (info,
2776 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2777 gcc_assert (index >= 0);
2778 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2779 call, anc_offset))
2780 return;
2783 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2784 call, true);
2785 class cgraph_indirect_call_info *ii = cs->indirect_info;
2786 ii->offset = anc_offset;
2787 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2788 ii->otr_type = obj_type_ref_class (target);
2789 ii->polymorphic = 1;
2792 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2793 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2794 containing intermediate information about each formal parameter. */
2796 static void
2797 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2799 tree target = gimple_call_fn (call);
2801 if (!target
2802 || (TREE_CODE (target) != SSA_NAME
2803 && !virtual_method_call_p (target)))
2804 return;
2806 struct cgraph_edge *cs = fbi->node->get_edge (call);
2807 /* If we previously turned the call into a direct call, there is
2808 no need to analyze. */
2809 if (cs && !cs->indirect_unknown_callee)
2810 return;
2812 if (cs->indirect_info->polymorphic && flag_devirtualize)
2814 tree instance;
2815 tree target = gimple_call_fn (call);
2816 ipa_polymorphic_call_context context (current_function_decl,
2817 target, call, &instance);
2819 gcc_checking_assert (cs->indirect_info->otr_type
2820 == obj_type_ref_class (target));
2821 gcc_checking_assert (cs->indirect_info->otr_token
2822 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2824 cs->indirect_info->vptr_changed
2825 = !context.get_dynamic_type (instance,
2826 OBJ_TYPE_REF_OBJECT (target),
2827 obj_type_ref_class (target), call,
2828 &fbi->aa_walk_budget);
2829 cs->indirect_info->context = context;
2832 if (TREE_CODE (target) == SSA_NAME)
2833 ipa_analyze_indirect_call_uses (fbi, call, target);
2834 else if (virtual_method_call_p (target))
2835 ipa_analyze_virtual_call_uses (fbi, call, target);
2839 /* Analyze the call statement STMT with respect to formal parameters (described
2840 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2841 formal parameters are called. */
2843 static void
2844 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2846 if (is_gimple_call (stmt))
2847 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2850 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2851 If OP is a parameter declaration, mark it as used in the info structure
2852 passed in DATA. */
2854 static bool
2855 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2857 class ipa_node_params *info = (class ipa_node_params *) data;
2859 op = get_base_address (op);
2860 if (op
2861 && TREE_CODE (op) == PARM_DECL)
2863 int index = ipa_get_param_decl_index (info, op);
2864 gcc_assert (index >= 0);
2865 ipa_set_param_used (info, index, true);
2868 return false;
2871 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2872 the findings in various structures of the associated ipa_node_params
2873 structure, such as parameter flags, notes etc. FBI holds various data about
2874 the function being analyzed. */
2876 static void
2877 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2879 gimple_stmt_iterator gsi;
2880 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2882 gimple *stmt = gsi_stmt (gsi);
2884 if (is_gimple_debug (stmt))
2885 continue;
2887 ipa_analyze_stmt_uses (fbi, stmt);
2888 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2889 visit_ref_for_mod_analysis,
2890 visit_ref_for_mod_analysis,
2891 visit_ref_for_mod_analysis);
2893 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2894 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2895 visit_ref_for_mod_analysis,
2896 visit_ref_for_mod_analysis,
2897 visit_ref_for_mod_analysis);
2900 /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2902 static bool
2903 load_from_dereferenced_name (tree expr, tree name)
2905 tree base = get_base_address (expr);
2906 return (TREE_CODE (base) == MEM_REF
2907 && TREE_OPERAND (base, 0) == name);
2910 /* Calculate controlled uses of parameters of NODE. */
2912 static void
2913 ipa_analyze_controlled_uses (struct cgraph_node *node)
2915 ipa_node_params *info = ipa_node_params_sum->get (node);
2917 for (int i = 0; i < ipa_get_param_count (info); i++)
2919 tree parm = ipa_get_param (info, i);
2920 int call_uses = 0;
2921 bool load_dereferenced = false;
2923 /* For SSA regs see if parameter is used. For non-SSA we compute
2924 the flag during modification analysis. */
2925 if (is_gimple_reg (parm))
2927 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2928 parm);
2929 if (ddef && !has_zero_uses (ddef))
2931 imm_use_iterator imm_iter;
2932 gimple *stmt;
2934 ipa_set_param_used (info, i, true);
2935 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)
2937 if (is_gimple_debug (stmt))
2938 continue;
2940 int all_stmt_uses = 0;
2941 use_operand_p use_p;
2942 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
2943 all_stmt_uses++;
2945 if (is_gimple_call (stmt))
2947 if (gimple_call_internal_p (stmt))
2949 call_uses = IPA_UNDESCRIBED_USE;
2950 break;
2952 int recognized_stmt_uses;
2953 if (gimple_call_fn (stmt) == ddef)
2954 recognized_stmt_uses = 1;
2955 else
2956 recognized_stmt_uses = 0;
2957 unsigned arg_count = gimple_call_num_args (stmt);
2958 for (unsigned i = 0; i < arg_count; i++)
2960 tree arg = gimple_call_arg (stmt, i);
2961 if (arg == ddef)
2962 recognized_stmt_uses++;
2963 else if (load_from_dereferenced_name (arg, ddef))
2965 load_dereferenced = true;
2966 recognized_stmt_uses++;
2970 if (recognized_stmt_uses != all_stmt_uses)
2972 call_uses = IPA_UNDESCRIBED_USE;
2973 break;
2975 if (call_uses >= 0)
2976 call_uses += all_stmt_uses;
2978 else if (gimple_assign_single_p (stmt))
2980 tree rhs = gimple_assign_rhs1 (stmt);
2981 if (all_stmt_uses != 1
2982 || !load_from_dereferenced_name (rhs, ddef))
2984 call_uses = IPA_UNDESCRIBED_USE;
2985 break;
2987 load_dereferenced = true;
2989 else
2991 call_uses = IPA_UNDESCRIBED_USE;
2992 break;
2996 else
2997 call_uses = 0;
2999 else
3000 call_uses = IPA_UNDESCRIBED_USE;
3001 ipa_set_controlled_uses (info, i, call_uses);
3002 ipa_set_param_load_dereferenced (info, i, load_dereferenced);
3006 /* Free stuff in BI. */
3008 static void
3009 free_ipa_bb_info (struct ipa_bb_info *bi)
3011 bi->cg_edges.release ();
3012 bi->param_aa_statuses.release ();
3015 /* Dominator walker driving the analysis. */
3017 class analysis_dom_walker : public dom_walker
3019 public:
3020 analysis_dom_walker (struct ipa_func_body_info *fbi)
3021 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
3023 edge before_dom_children (basic_block) final override;
3025 private:
3026 struct ipa_func_body_info *m_fbi;
3029 edge
3030 analysis_dom_walker::before_dom_children (basic_block bb)
3032 ipa_analyze_params_uses_in_bb (m_fbi, bb);
3033 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3034 return NULL;
3037 /* Release body info FBI. */
3039 void
3040 ipa_release_body_info (struct ipa_func_body_info *fbi)
3042 int i;
3043 struct ipa_bb_info *bi;
3045 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
3046 free_ipa_bb_info (bi);
3047 fbi->bb_infos.release ();
3050 /* Initialize the array describing properties of formal parameters
3051 of NODE, analyze their uses and compute jump functions associated
3052 with actual arguments of calls from within NODE. */
3054 void
3055 ipa_analyze_node (struct cgraph_node *node)
3057 struct ipa_func_body_info fbi;
3058 class ipa_node_params *info;
3060 ipa_check_create_node_params ();
3061 ipa_check_create_edge_args ();
3062 info = ipa_node_params_sum->get_create (node);
3064 if (info->analysis_done)
3065 return;
3066 info->analysis_done = 1;
3068 if (ipa_func_spec_opts_forbid_analysis_p (node)
3069 || (count_formal_params (node->decl)
3070 >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS)))
3072 gcc_assert (!ipa_get_param_count (info));
3073 return;
3076 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3077 push_cfun (func);
3078 calculate_dominance_info (CDI_DOMINATORS);
3079 ipa_initialize_node_params (node);
3080 ipa_analyze_controlled_uses (node);
3082 fbi.node = node;
3083 fbi.info = info;
3084 fbi.bb_infos = vNULL;
3085 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
3086 fbi.param_count = ipa_get_param_count (info);
3087 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
3089 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3091 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3092 bi->cg_edges.safe_push (cs);
3095 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3097 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3098 bi->cg_edges.safe_push (cs);
3101 enable_ranger (cfun, false);
3102 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3103 disable_ranger (cfun);
3105 ipa_release_body_info (&fbi);
3106 free_dominance_info (CDI_DOMINATORS);
3107 pop_cfun ();
3110 /* Update the jump functions associated with call graph edge E when the call
3111 graph edge CS is being inlined, assuming that E->caller is already (possibly
3112 indirectly) inlined into CS->callee and that E has not been inlined. */
3114 static void
3115 update_jump_functions_after_inlining (struct cgraph_edge *cs,
3116 struct cgraph_edge *e)
3118 ipa_edge_args *top = ipa_edge_args_sum->get (cs);
3119 ipa_edge_args *args = ipa_edge_args_sum->get (e);
3120 if (!args)
3121 return;
3122 int count = ipa_get_cs_argument_count (args);
3123 int i;
3125 for (i = 0; i < count; i++)
3127 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3128 class ipa_polymorphic_call_context *dst_ctx
3129 = ipa_get_ith_polymorhic_call_context (args, i);
3131 if (dst->agg.items)
3133 struct ipa_agg_jf_item *item;
3134 int j;
3136 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3138 int dst_fid;
3139 struct ipa_jump_func *src;
3141 if (item->jftype != IPA_JF_PASS_THROUGH
3142 && item->jftype != IPA_JF_LOAD_AGG)
3143 continue;
3145 dst_fid = item->value.pass_through.formal_id;
3146 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3148 item->jftype = IPA_JF_UNKNOWN;
3149 continue;
3152 item->value.pass_through.formal_id = -1;
3153 src = ipa_get_ith_jump_func (top, dst_fid);
3154 if (src->type == IPA_JF_CONST)
3156 if (item->jftype == IPA_JF_PASS_THROUGH
3157 && item->value.pass_through.operation == NOP_EXPR)
3159 item->jftype = IPA_JF_CONST;
3160 item->value.constant = src->value.constant.value;
3161 continue;
3164 else if (src->type == IPA_JF_PASS_THROUGH
3165 && src->value.pass_through.operation == NOP_EXPR)
3167 if (item->jftype == IPA_JF_PASS_THROUGH
3168 || !item->value.load_agg.by_ref
3169 || src->value.pass_through.agg_preserved)
3170 item->value.pass_through.formal_id
3171 = src->value.pass_through.formal_id;
3173 else if (src->type == IPA_JF_ANCESTOR)
3175 if (item->jftype == IPA_JF_PASS_THROUGH)
3177 if (!src->value.ancestor.offset)
3178 item->value.pass_through.formal_id
3179 = src->value.ancestor.formal_id;
3181 else if (src->value.ancestor.agg_preserved)
3183 gcc_checking_assert (item->value.load_agg.by_ref);
3185 item->value.pass_through.formal_id
3186 = src->value.ancestor.formal_id;
3187 item->value.load_agg.offset
3188 += src->value.ancestor.offset;
3192 if (item->value.pass_through.formal_id < 0)
3193 item->jftype = IPA_JF_UNKNOWN;
3197 if (!top)
3199 ipa_set_jf_unknown (dst);
3200 continue;
3203 if (dst->type == IPA_JF_ANCESTOR)
3205 struct ipa_jump_func *src;
3206 int dst_fid = dst->value.ancestor.formal_id;
3207 class ipa_polymorphic_call_context *src_ctx
3208 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3210 /* Variable number of arguments can cause havoc if we try to access
3211 one that does not exist in the inlined edge. So make sure we
3212 don't. */
3213 if (dst_fid >= ipa_get_cs_argument_count (top))
3215 ipa_set_jf_unknown (dst);
3216 continue;
3219 src = ipa_get_ith_jump_func (top, dst_fid);
3221 if (src_ctx && !src_ctx->useless_p ())
3223 class ipa_polymorphic_call_context ctx = *src_ctx;
3225 /* TODO: Make type preserved safe WRT contexts. */
3226 if (!ipa_get_jf_ancestor_type_preserved (dst))
3227 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3228 ctx.offset_by (dst->value.ancestor.offset);
3229 if (!ctx.useless_p ())
3231 if (!dst_ctx)
3233 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3234 count, true);
3235 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3238 dst_ctx->combine_with (ctx);
3242 /* Parameter and argument in ancestor jump function must be pointer
3243 type, which means access to aggregate must be by-reference. */
3244 gcc_assert (!src->agg.items || src->agg.by_ref);
3246 if (src->agg.items && dst->value.ancestor.agg_preserved)
3248 struct ipa_agg_jf_item *item;
3249 int j;
3251 /* Currently we do not produce clobber aggregate jump functions,
3252 replace with merging when we do. */
3253 gcc_assert (!dst->agg.items);
3255 dst->agg.items = vec_safe_copy (src->agg.items);
3256 dst->agg.by_ref = src->agg.by_ref;
3257 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3258 item->offset -= dst->value.ancestor.offset;
3261 if (src->type == IPA_JF_PASS_THROUGH
3262 && src->value.pass_through.operation == NOP_EXPR)
3264 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3265 dst->value.ancestor.agg_preserved &=
3266 src->value.pass_through.agg_preserved;
3268 else if (src->type == IPA_JF_ANCESTOR)
3270 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3271 dst->value.ancestor.offset += src->value.ancestor.offset;
3272 dst->value.ancestor.agg_preserved &=
3273 src->value.ancestor.agg_preserved;
3274 dst->value.ancestor.keep_null |= src->value.ancestor.keep_null;
3276 else
3277 ipa_set_jf_unknown (dst);
3279 else if (dst->type == IPA_JF_PASS_THROUGH)
3281 struct ipa_jump_func *src;
3282 /* We must check range due to calls with variable number of arguments
3283 and we cannot combine jump functions with operations. */
3284 if (dst->value.pass_through.operation == NOP_EXPR
3285 && (top && dst->value.pass_through.formal_id
3286 < ipa_get_cs_argument_count (top)))
3288 int dst_fid = dst->value.pass_through.formal_id;
3289 src = ipa_get_ith_jump_func (top, dst_fid);
3290 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3291 class ipa_polymorphic_call_context *src_ctx
3292 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3294 if (src_ctx && !src_ctx->useless_p ())
3296 class ipa_polymorphic_call_context ctx = *src_ctx;
3298 /* TODO: Make type preserved safe WRT contexts. */
3299 if (!ipa_get_jf_pass_through_type_preserved (dst))
3300 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3301 if (!ctx.useless_p ())
3303 if (!dst_ctx)
3305 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3306 count, true);
3307 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3309 dst_ctx->combine_with (ctx);
3312 switch (src->type)
3314 case IPA_JF_UNKNOWN:
3315 ipa_set_jf_unknown (dst);
3316 break;
3317 case IPA_JF_CONST:
3319 bool rd = ipa_get_jf_pass_through_refdesc_decremented (dst);
3320 ipa_set_jf_cst_copy (dst, src);
3321 if (rd)
3322 ipa_zap_jf_refdesc (dst);
3325 break;
3327 case IPA_JF_PASS_THROUGH:
3329 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3330 enum tree_code operation;
3331 operation = ipa_get_jf_pass_through_operation (src);
3333 if (operation == NOP_EXPR)
3335 bool agg_p;
3336 agg_p = dst_agg_p
3337 && ipa_get_jf_pass_through_agg_preserved (src);
3338 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3340 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3341 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3342 else
3344 tree operand = ipa_get_jf_pass_through_operand (src);
3345 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3346 operation);
3348 break;
3350 case IPA_JF_ANCESTOR:
3352 bool agg_p;
3353 agg_p = dst_agg_p
3354 && ipa_get_jf_ancestor_agg_preserved (src);
3355 ipa_set_ancestor_jf (dst,
3356 ipa_get_jf_ancestor_offset (src),
3357 ipa_get_jf_ancestor_formal_id (src),
3358 agg_p,
3359 ipa_get_jf_ancestor_keep_null (src));
3360 break;
3362 default:
3363 gcc_unreachable ();
3366 if (src->agg.items
3367 && (dst_agg_p || !src->agg.by_ref))
3369 /* Currently we do not produce clobber aggregate jump
3370 functions, replace with merging when we do. */
3371 gcc_assert (!dst->agg.items);
3373 dst->agg.by_ref = src->agg.by_ref;
3374 dst->agg.items = vec_safe_copy (src->agg.items);
3377 else
3378 ipa_set_jf_unknown (dst);
3383 /* If TARGET is an addr_expr of a function declaration, make it the
3384 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3385 Otherwise, return NULL. */
3387 struct cgraph_edge *
3388 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3389 bool speculative)
3391 struct cgraph_node *callee;
3392 bool unreachable = false;
3394 if (TREE_CODE (target) == ADDR_EXPR)
3395 target = TREE_OPERAND (target, 0);
3396 if (TREE_CODE (target) != FUNCTION_DECL)
3398 target = canonicalize_constructor_val (target, NULL);
3399 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3401 /* Member pointer call that goes through a VMT lookup. */
3402 if (ie->indirect_info->member_ptr
3403 /* Or if target is not an invariant expression and we do not
3404 know if it will evaulate to function at runtime.
3405 This can happen when folding through &VAR, where &VAR
3406 is IP invariant, but VAR itself is not.
3408 TODO: Revisit this when GCC 5 is branched. It seems that
3409 member_ptr check is not needed and that we may try to fold
3410 the expression and see if VAR is readonly. */
3411 || !is_gimple_ip_invariant (target))
3413 if (dump_enabled_p ())
3415 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3416 "discovered direct call non-invariant %s\n",
3417 ie->caller->dump_name ());
3419 return NULL;
3423 if (dump_enabled_p ())
3425 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3426 "discovered direct call to non-function in %s, "
3427 "making it __builtin_unreachable\n",
3428 ie->caller->dump_name ());
3431 target = builtin_decl_unreachable ();
3432 callee = cgraph_node::get_create (target);
3433 unreachable = true;
3435 else
3436 callee = cgraph_node::get (target);
3438 else
3439 callee = cgraph_node::get (target);
3441 /* Because may-edges are not explicitely represented and vtable may be external,
3442 we may create the first reference to the object in the unit. */
3443 if (!callee || callee->inlined_to)
3446 /* We are better to ensure we can refer to it.
3447 In the case of static functions we are out of luck, since we already
3448 removed its body. In the case of public functions we may or may
3449 not introduce the reference. */
3450 if (!canonicalize_constructor_val (target, NULL)
3451 || !TREE_PUBLIC (target))
3453 if (dump_file)
3454 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3455 "(%s -> %s) but cannot refer to it. Giving up.\n",
3456 ie->caller->dump_name (),
3457 ie->callee->dump_name ());
3458 return NULL;
3460 callee = cgraph_node::get_create (target);
3463 /* If the edge is already speculated. */
3464 if (speculative && ie->speculative)
3466 if (dump_file)
3468 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3469 if (!e2)
3471 if (dump_file)
3472 fprintf (dump_file, "ipa-prop: Discovered call to a "
3473 "speculative target (%s -> %s) but the call is "
3474 "already speculated to different target. "
3475 "Giving up.\n",
3476 ie->caller->dump_name (), callee->dump_name ());
3478 else
3480 if (dump_file)
3481 fprintf (dump_file,
3482 "ipa-prop: Discovered call to a speculative target "
3483 "(%s -> %s) this agree with previous speculation.\n",
3484 ie->caller->dump_name (), callee->dump_name ());
3487 return NULL;
3490 if (!dbg_cnt (devirt))
3491 return NULL;
3493 ipa_check_create_node_params ();
3495 /* We cannot make edges to inline clones. It is bug that someone removed
3496 the cgraph node too early. */
3497 gcc_assert (!callee->inlined_to);
3499 if (dump_file && !unreachable)
3501 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3502 "(%s -> %s), for stmt ",
3503 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3504 speculative ? "speculative" : "known",
3505 ie->caller->dump_name (),
3506 callee->dump_name ());
3507 if (ie->call_stmt)
3508 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3509 else
3510 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3512 if (dump_enabled_p ())
3514 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3515 "converting indirect call in %s to direct call to %s\n",
3516 ie->caller->dump_name (), callee->dump_name ());
3518 if (!speculative)
3520 struct cgraph_edge *orig = ie;
3521 ie = cgraph_edge::make_direct (ie, callee);
3522 /* If we resolved speculative edge the cost is already up to date
3523 for direct call (adjusted by inline_edge_duplication_hook). */
3524 if (ie == orig)
3526 ipa_call_summary *es = ipa_call_summaries->get (ie);
3527 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3528 - eni_size_weights.call_cost);
3529 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3530 - eni_time_weights.call_cost);
3533 else
3535 if (!callee->can_be_discarded_p ())
3537 cgraph_node *alias;
3538 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3539 if (alias)
3540 callee = alias;
3542 /* make_speculative will update ie's cost to direct call cost. */
3543 ie = ie->make_speculative
3544 (callee, ie->count.apply_scale (8, 10));
3547 return ie;
3550 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3551 CONSTRUCTOR and return it. Return NULL if the search fails for some
3552 reason. */
3554 static tree
3555 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3557 tree type = TREE_TYPE (constructor);
3558 if (TREE_CODE (type) != ARRAY_TYPE
3559 && TREE_CODE (type) != RECORD_TYPE)
3560 return NULL;
3562 unsigned ix;
3563 tree index, val;
3564 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3566 HOST_WIDE_INT elt_offset;
3567 if (TREE_CODE (type) == ARRAY_TYPE)
3569 offset_int off;
3570 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3571 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3573 if (index)
3575 if (TREE_CODE (index) == RANGE_EXPR)
3576 off = wi::to_offset (TREE_OPERAND (index, 0));
3577 else
3578 off = wi::to_offset (index);
3579 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3581 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3582 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3583 off = wi::sext (off - wi::to_offset (low_bound),
3584 TYPE_PRECISION (TREE_TYPE (index)));
3586 off *= wi::to_offset (unit_size);
3587 /* ??? Handle more than just the first index of a
3588 RANGE_EXPR. */
3590 else
3591 off = wi::to_offset (unit_size) * ix;
3593 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3594 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3595 continue;
3596 elt_offset = off.to_shwi ();
3598 else if (TREE_CODE (type) == RECORD_TYPE)
3600 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3601 if (DECL_BIT_FIELD (index))
3602 continue;
3603 elt_offset = int_bit_position (index);
3605 else
3606 gcc_unreachable ();
3608 if (elt_offset > req_offset)
3609 return NULL;
3611 if (TREE_CODE (val) == CONSTRUCTOR)
3612 return find_constructor_constant_at_offset (val,
3613 req_offset - elt_offset);
3615 if (elt_offset == req_offset
3616 && is_gimple_reg_type (TREE_TYPE (val))
3617 && is_gimple_ip_invariant (val))
3618 return val;
3620 return NULL;
3623 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3624 invariant from a static constructor and if so, return it. Otherwise return
3625 NULL. */
3627 tree
3628 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3630 if (by_ref)
3632 if (TREE_CODE (scalar) != ADDR_EXPR)
3633 return NULL;
3634 scalar = TREE_OPERAND (scalar, 0);
3637 if (!VAR_P (scalar)
3638 || !is_global_var (scalar)
3639 || !TREE_READONLY (scalar)
3640 || !DECL_INITIAL (scalar)
3641 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3642 return NULL;
3644 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3647 /* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
3648 is none. BY_REF specifies whether the value has to be passed by reference
3649 or by value. */
3651 static tree
3652 ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function *agg_jfunc,
3653 ipa_node_params *src_info,
3654 cgraph_node *src_node,
3655 HOST_WIDE_INT offset, bool by_ref)
3657 if (by_ref != agg_jfunc->by_ref)
3658 return NULL_TREE;
3660 for (const ipa_agg_jf_item &item : agg_jfunc->items)
3661 if (item.offset == offset)
3662 return ipa_agg_value_from_jfunc (src_info, src_node, &item);
3664 return NULL_TREE;
3667 /* Remove a reference to SYMBOL from the list of references of a node given by
3668 reference description RDESC. Return true if the reference has been
3669 successfully found and removed. */
3671 static bool
3672 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3674 struct ipa_ref *to_del;
3675 struct cgraph_edge *origin;
3677 origin = rdesc->cs;
3678 if (!origin)
3679 return false;
3680 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3681 origin->lto_stmt_uid, IPA_REF_ADDR);
3682 if (!to_del)
3683 return false;
3685 to_del->remove_reference ();
3686 if (dump_file)
3687 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3688 origin->caller->dump_name (), symbol->dump_name ());
3689 return true;
3692 /* If JFUNC has a reference description with refcount different from
3693 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3694 NULL. JFUNC must be a constant jump function. */
3696 static struct ipa_cst_ref_desc *
3697 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3699 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3700 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3701 return rdesc;
3702 else
3703 return NULL;
3706 /* If the value of constant jump function JFUNC is an address of a function
3707 declaration, return the associated call graph node. Otherwise return
3708 NULL. */
3710 static symtab_node *
3711 symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
3713 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3714 tree cst = ipa_get_jf_constant (jfunc);
3715 if (TREE_CODE (cst) != ADDR_EXPR
3716 || (TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL
3717 && TREE_CODE (TREE_OPERAND (cst, 0)) != VAR_DECL))
3718 return NULL;
3720 return symtab_node::get (TREE_OPERAND (cst, 0));
3724 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3725 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3726 the edge specified in the rdesc. Return false if either the symbol or the
3727 reference could not be found, otherwise return true. */
3729 static bool
3730 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3732 struct ipa_cst_ref_desc *rdesc;
3733 if (jfunc->type == IPA_JF_CONST
3734 && (rdesc = jfunc_rdesc_usable (jfunc))
3735 && --rdesc->refcount == 0)
3737 symtab_node *symbol = symtab_node_for_jfunc (jfunc);
3738 if (!symbol)
3739 return false;
3741 return remove_described_reference (symbol, rdesc);
3743 return true;
3746 /* Try to find a destination for indirect edge IE that corresponds to a simple
3747 call or a call of a member function pointer and where the destination is a
3748 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3749 the type of the parameter to which the result of JFUNC is passed. If it can
3750 be determined, return the newly direct edge, otherwise return NULL.
3751 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3752 relative to. */
3754 static struct cgraph_edge *
3755 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3756 struct ipa_jump_func *jfunc, tree target_type,
3757 struct cgraph_node *new_root,
3758 class ipa_node_params *new_root_info)
3760 struct cgraph_edge *cs;
3761 tree target = NULL_TREE;
3762 bool agg_contents = ie->indirect_info->agg_contents;
3763 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3764 if (agg_contents)
3766 if (scalar)
3767 target = ipa_find_agg_cst_from_init (scalar, ie->indirect_info->offset,
3768 ie->indirect_info->by_ref);
3769 if (!target && ie->indirect_info->guaranteed_unmodified)
3770 target = ipa_find_agg_cst_from_jfunc_items (&jfunc->agg, new_root_info,
3771 new_root,
3772 ie->indirect_info->offset,
3773 ie->indirect_info->by_ref);
3775 else
3776 target = scalar;
3777 if (!target)
3778 return NULL;
3779 cs = ipa_make_edge_direct_to_target (ie, target);
3781 if (cs && !agg_contents)
3783 bool ok;
3784 gcc_checking_assert (cs->callee
3785 && (cs != ie
3786 || jfunc->type != IPA_JF_CONST
3787 || !symtab_node_for_jfunc (jfunc)
3788 || cs->callee == symtab_node_for_jfunc (jfunc)));
3789 ok = try_decrement_rdesc_refcount (jfunc);
3790 gcc_checking_assert (ok);
3793 return cs;
3796 /* Return the target to be used in cases of impossible devirtualization. IE
3797 and target (the latter can be NULL) are dumped when dumping is enabled. */
3799 tree
3800 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3802 if (dump_file)
3804 if (target)
3805 fprintf (dump_file,
3806 "Type inconsistent devirtualization: %s->%s\n",
3807 ie->caller->dump_name (),
3808 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3809 else
3810 fprintf (dump_file,
3811 "No devirtualization target in %s\n",
3812 ie->caller->dump_name ());
3814 tree new_target = builtin_decl_unreachable ();
3815 cgraph_node::get_create (new_target);
3816 return new_target;
3819 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3820 call based on a formal parameter which is described by jump function JFUNC
3821 and if it can be determined, make it direct and return the direct edge.
3822 Otherwise, return NULL. CTX describes the polymorphic context that the
3823 parameter the call is based on brings along with it. NEW_ROOT and
3824 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3825 to. */
3827 static struct cgraph_edge *
3828 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3829 struct ipa_jump_func *jfunc,
3830 class ipa_polymorphic_call_context ctx,
3831 struct cgraph_node *new_root,
3832 class ipa_node_params *new_root_info)
3834 tree target = NULL;
3835 bool speculative = false;
3837 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3838 return NULL;
3840 gcc_assert (!ie->indirect_info->by_ref);
3842 /* Try to do lookup via known virtual table pointer value. */
3843 if (!ie->indirect_info->vptr_changed
3844 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3846 tree vtable;
3847 unsigned HOST_WIDE_INT offset;
3848 tree t = NULL_TREE;
3849 if (jfunc->type == IPA_JF_CONST)
3850 t = ipa_find_agg_cst_from_init (ipa_get_jf_constant (jfunc),
3851 ie->indirect_info->offset, true);
3852 if (!t)
3853 t = ipa_find_agg_cst_from_jfunc_items (&jfunc->agg, new_root_info,
3854 new_root,
3855 ie->indirect_info->offset, true);
3856 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3858 bool can_refer;
3859 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3860 vtable, offset, &can_refer);
3861 if (can_refer)
3863 if (!t
3864 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE,
3865 BUILT_IN_UNREACHABLE_TRAP)
3866 || !possible_polymorphic_call_target_p
3867 (ie, cgraph_node::get (t)))
3869 /* Do not speculate builtin_unreachable, it is stupid! */
3870 if (!ie->indirect_info->vptr_changed)
3871 target = ipa_impossible_devirt_target (ie, target);
3872 else
3873 target = NULL;
3875 else
3877 target = t;
3878 speculative = ie->indirect_info->vptr_changed;
3884 ipa_polymorphic_call_context ie_context (ie);
3885 vec <cgraph_node *>targets;
3886 bool final;
3888 ctx.offset_by (ie->indirect_info->offset);
3889 if (ie->indirect_info->vptr_changed)
3890 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3891 ie->indirect_info->otr_type);
3892 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3893 targets = possible_polymorphic_call_targets
3894 (ie->indirect_info->otr_type,
3895 ie->indirect_info->otr_token,
3896 ctx, &final);
3897 if (final && targets.length () <= 1)
3899 speculative = false;
3900 if (targets.length () == 1)
3901 target = targets[0]->decl;
3902 else
3903 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3905 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3906 && !ie->speculative && ie->maybe_hot_p ())
3908 cgraph_node *n;
3909 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3910 ie->indirect_info->otr_token,
3911 ie->indirect_info->context);
3912 if (n)
3914 target = n->decl;
3915 speculative = true;
3919 if (target)
3921 if (!possible_polymorphic_call_target_p
3922 (ie, cgraph_node::get_create (target)))
3924 if (speculative)
3925 return NULL;
3926 target = ipa_impossible_devirt_target (ie, target);
3928 return ipa_make_edge_direct_to_target (ie, target, speculative);
3930 else
3931 return NULL;
3934 /* Update the param called notes associated with NODE when CS is being inlined,
3935 assuming NODE is (potentially indirectly) inlined into CS->callee.
3936 Moreover, if the callee is discovered to be constant, create a new cgraph
3937 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3938 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3940 static bool
3941 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3942 struct cgraph_node *node,
3943 vec<cgraph_edge *> *new_edges)
3945 class ipa_edge_args *top;
3946 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3947 struct cgraph_node *new_root;
3948 class ipa_node_params *new_root_info, *inlined_node_info;
3949 bool res = false;
3951 ipa_check_create_edge_args ();
3952 top = ipa_edge_args_sum->get (cs);
3953 new_root = cs->caller->inlined_to
3954 ? cs->caller->inlined_to : cs->caller;
3955 new_root_info = ipa_node_params_sum->get (new_root);
3956 inlined_node_info = ipa_node_params_sum->get (cs->callee->function_symbol ());
3958 for (ie = node->indirect_calls; ie; ie = next_ie)
3960 class cgraph_indirect_call_info *ici = ie->indirect_info;
3961 struct ipa_jump_func *jfunc;
3962 int param_index;
3964 next_ie = ie->next_callee;
3966 if (ici->param_index == -1)
3967 continue;
3969 /* We must check range due to calls with variable number of arguments: */
3970 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3972 ici->param_index = -1;
3973 continue;
3976 param_index = ici->param_index;
3977 jfunc = ipa_get_ith_jump_func (top, param_index);
3979 auto_vec<cgraph_node *, 4> spec_targets;
3980 if (ie->speculative)
3981 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3982 direct;
3983 direct = direct->next_speculative_call_target ())
3984 spec_targets.safe_push (direct->callee);
3986 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3987 new_direct_edge = NULL;
3988 else if (ici->polymorphic)
3990 ipa_polymorphic_call_context ctx;
3991 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3992 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3993 new_root,
3994 new_root_info);
3996 else
3998 tree target_type = ipa_get_type (inlined_node_info, param_index);
3999 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
4000 target_type,
4001 new_root,
4002 new_root_info);
4005 /* If speculation was removed, then we need to do nothing. */
4006 if (new_direct_edge && new_direct_edge != ie
4007 && spec_targets.contains (new_direct_edge->callee))
4009 new_direct_edge->indirect_inlining_edge = 1;
4010 res = true;
4011 if (!new_direct_edge->speculative)
4012 continue;
4014 else if (new_direct_edge)
4016 new_direct_edge->indirect_inlining_edge = 1;
4017 if (new_edges)
4019 new_edges->safe_push (new_direct_edge);
4020 res = true;
4022 /* If speculative edge was introduced we still need to update
4023 call info of the indirect edge. */
4024 if (!new_direct_edge->speculative)
4025 continue;
4027 if (jfunc->type == IPA_JF_PASS_THROUGH
4028 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4030 if (ici->agg_contents
4031 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
4032 && !ici->polymorphic)
4033 ici->param_index = -1;
4034 else
4036 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4037 if (ici->polymorphic
4038 && !ipa_get_jf_pass_through_type_preserved (jfunc))
4039 ici->vptr_changed = true;
4040 ipa_set_param_used_by_indirect_call (new_root_info,
4041 ici->param_index, true);
4042 if (ici->polymorphic)
4043 ipa_set_param_used_by_polymorphic_call (new_root_info,
4044 ici->param_index, true);
4047 else if (jfunc->type == IPA_JF_ANCESTOR)
4049 if (ici->agg_contents
4050 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
4051 && !ici->polymorphic)
4052 ici->param_index = -1;
4053 else
4055 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4056 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
4057 if (ici->polymorphic
4058 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4059 ici->vptr_changed = true;
4060 ipa_set_param_used_by_indirect_call (new_root_info,
4061 ici->param_index, true);
4062 if (ici->polymorphic)
4063 ipa_set_param_used_by_polymorphic_call (new_root_info,
4064 ici->param_index, true);
4067 else
4068 /* Either we can find a destination for this edge now or never. */
4069 ici->param_index = -1;
4072 return res;
4075 /* Recursively traverse subtree of NODE (including node) made of inlined
4076 cgraph_edges when CS has been inlined and invoke
4077 update_indirect_edges_after_inlining on all nodes and
4078 update_jump_functions_after_inlining on all non-inlined edges that lead out
4079 of this subtree. Newly discovered indirect edges will be added to
4080 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4081 created. */
4083 static bool
4084 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4085 struct cgraph_node *node,
4086 vec<cgraph_edge *> *new_edges)
4088 struct cgraph_edge *e;
4089 bool res;
4091 res = update_indirect_edges_after_inlining (cs, node, new_edges);
4093 for (e = node->callees; e; e = e->next_callee)
4094 if (!e->inline_failed)
4095 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
4096 else
4097 update_jump_functions_after_inlining (cs, e);
4098 for (e = node->indirect_calls; e; e = e->next_callee)
4099 update_jump_functions_after_inlining (cs, e);
4101 return res;
4104 /* Combine two controlled uses counts as done during inlining. */
4106 static int
4107 combine_controlled_uses_counters (int c, int d)
4109 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4110 return IPA_UNDESCRIBED_USE;
4111 else
4112 return c + d - 1;
4115 /* Propagate number of controlled users from CS->caleee to the new root of the
4116 tree of inlined nodes. */
4118 static void
4119 propagate_controlled_uses (struct cgraph_edge *cs)
4121 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4122 if (!args)
4123 return;
4124 struct cgraph_node *new_root = cs->caller->inlined_to
4125 ? cs->caller->inlined_to : cs->caller;
4126 ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4127 ipa_node_params *old_root_info = ipa_node_params_sum->get (cs->callee);
4128 int count, i;
4130 if (!old_root_info)
4131 return;
4133 count = MIN (ipa_get_cs_argument_count (args),
4134 ipa_get_param_count (old_root_info));
4135 for (i = 0; i < count; i++)
4137 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4138 struct ipa_cst_ref_desc *rdesc;
4140 if (jf->type == IPA_JF_PASS_THROUGH
4141 && !ipa_get_jf_pass_through_refdesc_decremented (jf))
4143 int src_idx, c, d;
4144 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4145 c = ipa_get_controlled_uses (new_root_info, src_idx);
4146 d = ipa_get_controlled_uses (old_root_info, i);
4148 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4149 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4150 c = combine_controlled_uses_counters (c, d);
4151 ipa_set_controlled_uses (new_root_info, src_idx, c);
4152 bool lderef = true;
4153 if (c != IPA_UNDESCRIBED_USE)
4155 lderef = (ipa_get_param_load_dereferenced (new_root_info, src_idx)
4156 || ipa_get_param_load_dereferenced (old_root_info, i));
4157 ipa_set_param_load_dereferenced (new_root_info, src_idx, lderef);
4160 if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4162 struct cgraph_node *n;
4163 struct ipa_ref *ref;
4164 tree t = new_root_info->known_csts[src_idx];
4166 if (t && TREE_CODE (t) == ADDR_EXPR
4167 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4168 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4169 && (ref = new_root->find_reference (n, NULL, 0,
4170 IPA_REF_ADDR)))
4172 if (dump_file)
4173 fprintf (dump_file, "ipa-prop: Removing cloning-created "
4174 "reference from %s to %s.\n",
4175 new_root->dump_name (),
4176 n->dump_name ());
4177 ref->remove_reference ();
4181 else if (jf->type == IPA_JF_CONST
4182 && (rdesc = jfunc_rdesc_usable (jf)))
4184 int d = ipa_get_controlled_uses (old_root_info, i);
4185 int c = rdesc->refcount;
4186 tree cst = ipa_get_jf_constant (jf);
4187 rdesc->refcount = combine_controlled_uses_counters (c, d);
4188 if (rdesc->refcount != IPA_UNDESCRIBED_USE
4189 && ipa_get_param_load_dereferenced (old_root_info, i)
4190 && TREE_CODE (cst) == ADDR_EXPR
4191 && VAR_P (TREE_OPERAND (cst, 0)))
4193 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4194 new_root->create_reference (n, IPA_REF_LOAD, NULL);
4195 if (dump_file)
4196 fprintf (dump_file, "ipa-prop: Address IPA constant will reach "
4197 "a load so adding LOAD reference from %s to %s.\n",
4198 new_root->dump_name (), n->dump_name ());
4200 if (rdesc->refcount == 0)
4202 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4203 && ((TREE_CODE (TREE_OPERAND (cst, 0))
4204 == FUNCTION_DECL)
4205 || VAR_P (TREE_OPERAND (cst, 0))));
4207 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4208 if (n)
4210 remove_described_reference (n, rdesc);
4211 cgraph_node *clone = cs->caller;
4212 while (clone->inlined_to
4213 && clone->ipcp_clone
4214 && clone != rdesc->cs->caller)
4216 struct ipa_ref *ref;
4217 ref = clone->find_reference (n, NULL, 0, IPA_REF_ADDR);
4218 if (ref)
4220 if (dump_file)
4221 fprintf (dump_file, "ipa-prop: Removing "
4222 "cloning-created reference "
4223 "from %s to %s.\n",
4224 clone->dump_name (),
4225 n->dump_name ());
4226 ref->remove_reference ();
4228 clone = clone->callers->caller;
4235 for (i = ipa_get_param_count (old_root_info);
4236 i < ipa_get_cs_argument_count (args);
4237 i++)
4239 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4241 if (jf->type == IPA_JF_CONST)
4243 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4244 if (rdesc)
4245 rdesc->refcount = IPA_UNDESCRIBED_USE;
4247 else if (jf->type == IPA_JF_PASS_THROUGH)
4248 ipa_set_controlled_uses (new_root_info,
4249 jf->value.pass_through.formal_id,
4250 IPA_UNDESCRIBED_USE);
4254 /* Update jump functions and call note functions on inlining the call site CS.
4255 CS is expected to lead to a node already cloned by
4256 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4257 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4258 created. */
4260 bool
4261 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4262 vec<cgraph_edge *> *new_edges)
4264 bool changed;
4265 /* Do nothing if the preparation phase has not been carried out yet
4266 (i.e. during early inlining). */
4267 if (!ipa_node_params_sum)
4268 return false;
4269 gcc_assert (ipa_edge_args_sum);
4271 propagate_controlled_uses (cs);
4272 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4273 ipa_node_params_sum->remove (cs->callee);
4275 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4276 if (args)
4278 bool ok = true;
4279 if (args->jump_functions)
4281 struct ipa_jump_func *jf;
4282 int i;
4283 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4284 if (jf->type == IPA_JF_CONST
4285 && ipa_get_jf_constant_rdesc (jf))
4287 ok = false;
4288 break;
4291 if (ok)
4292 ipa_edge_args_sum->remove (cs);
4294 if (ipcp_transformation_sum)
4295 ipcp_transformation_sum->remove (cs->callee);
4297 return changed;
4300 /* Ensure that array of edge arguments infos is big enough to accommodate a
4301 structure for all edges and reallocates it if not. Also, allocate
4302 associated hash tables is they do not already exist. */
4304 void
4305 ipa_check_create_edge_args (void)
4307 if (!ipa_edge_args_sum)
4308 ipa_edge_args_sum
4309 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4310 ipa_edge_args_sum_t (symtab, true));
4311 if (!ipa_vr_hash_table)
4312 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4315 /* Free all ipa_edge structures. */
4317 void
4318 ipa_free_all_edge_args (void)
4320 if (!ipa_edge_args_sum)
4321 return;
4323 ggc_delete (ipa_edge_args_sum);
4324 ipa_edge_args_sum = NULL;
4327 /* Free all ipa_node_params structures. */
4329 void
4330 ipa_free_all_node_params (void)
4332 if (ipa_node_params_sum)
4333 ggc_delete (ipa_node_params_sum);
4334 ipa_node_params_sum = NULL;
4337 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4338 tables if they do not already exist. */
4340 void
4341 ipcp_transformation_initialize (void)
4343 if (!ipa_vr_hash_table)
4344 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4345 if (ipcp_transformation_sum == NULL)
4347 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4348 ipcp_transformation_sum->disable_insertion_hook ();
4352 /* Release the IPA CP transformation summary. */
4354 void
4355 ipcp_free_transformation_sum (void)
4357 if (!ipcp_transformation_sum)
4358 return;
4360 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4361 ggc_free (ipcp_transformation_sum);
4362 ipcp_transformation_sum = NULL;
4365 /* Set the aggregate replacements of NODE to be AGGVALS. */
4367 void
4368 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4369 vec<ipa_argagg_value, va_gc> *aggs)
4371 ipcp_transformation_initialize ();
4372 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4373 s->m_agg_values = aggs;
4376 /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4377 count data structures accordingly. */
4379 void
4380 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4382 if (args->jump_functions)
4384 struct ipa_jump_func *jf;
4385 int i;
4386 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4388 struct ipa_cst_ref_desc *rdesc;
4389 try_decrement_rdesc_refcount (jf);
4390 if (jf->type == IPA_JF_CONST
4391 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4392 && rdesc->cs == cs)
4393 rdesc->cs = NULL;
4398 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4399 reference count data strucutres accordingly. */
4401 void
4402 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4403 ipa_edge_args *old_args, ipa_edge_args *new_args)
4405 unsigned int i;
4407 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4408 if (old_args->polymorphic_call_contexts)
4409 new_args->polymorphic_call_contexts
4410 = vec_safe_copy (old_args->polymorphic_call_contexts);
4412 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4414 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4415 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4417 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4419 if (src_jf->type == IPA_JF_CONST)
4421 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4423 if (!src_rdesc)
4424 dst_jf->value.constant.rdesc = NULL;
4425 else if (src->caller == dst->caller)
4427 /* Creation of a speculative edge. If the source edge is the one
4428 grabbing a reference, we must create a new (duplicate)
4429 reference description. Otherwise they refer to the same
4430 description corresponding to a reference taken in a function
4431 src->caller is inlined to. In that case we just must
4432 increment the refcount. */
4433 if (src_rdesc->cs == src)
4435 symtab_node *n = symtab_node_for_jfunc (src_jf);
4436 gcc_checking_assert (n);
4437 ipa_ref *ref
4438 = src->caller->find_reference (n, src->call_stmt,
4439 src->lto_stmt_uid,
4440 IPA_REF_ADDR);
4441 gcc_checking_assert (ref);
4442 dst->caller->clone_reference (ref, ref->stmt);
4444 ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4445 dst_rdesc->cs = dst;
4446 dst_rdesc->refcount = src_rdesc->refcount;
4447 dst_rdesc->next_duplicate = NULL;
4448 dst_jf->value.constant.rdesc = dst_rdesc;
4450 else
4452 src_rdesc->refcount++;
4453 dst_jf->value.constant.rdesc = src_rdesc;
4456 else if (src_rdesc->cs == src)
4458 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4459 dst_rdesc->cs = dst;
4460 dst_rdesc->refcount = src_rdesc->refcount;
4461 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4462 src_rdesc->next_duplicate = dst_rdesc;
4463 dst_jf->value.constant.rdesc = dst_rdesc;
4465 else
4467 struct ipa_cst_ref_desc *dst_rdesc;
4468 /* This can happen during inlining, when a JFUNC can refer to a
4469 reference taken in a function up in the tree of inline clones.
4470 We need to find the duplicate that refers to our tree of
4471 inline clones. */
4473 gcc_assert (dst->caller->inlined_to);
4474 for (dst_rdesc = src_rdesc->next_duplicate;
4475 dst_rdesc;
4476 dst_rdesc = dst_rdesc->next_duplicate)
4478 struct cgraph_node *top;
4479 top = dst_rdesc->cs->caller->inlined_to
4480 ? dst_rdesc->cs->caller->inlined_to
4481 : dst_rdesc->cs->caller;
4482 if (dst->caller->inlined_to == top)
4483 break;
4485 gcc_assert (dst_rdesc);
4486 dst_jf->value.constant.rdesc = dst_rdesc;
4489 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4490 && src->caller == dst->caller)
4492 struct cgraph_node *inline_root = dst->caller->inlined_to
4493 ? dst->caller->inlined_to : dst->caller;
4494 ipa_node_params *root_info = ipa_node_params_sum->get (inline_root);
4495 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4497 int c = ipa_get_controlled_uses (root_info, idx);
4498 if (c != IPA_UNDESCRIBED_USE)
4500 c++;
4501 ipa_set_controlled_uses (root_info, idx, c);
4507 /* Analyze newly added function into callgraph. */
4509 static void
4510 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4512 if (node->has_gimple_body_p ())
4513 ipa_analyze_node (node);
4516 /* Hook that is called by summary when a node is duplicated. */
4518 void
4519 ipa_node_params_t::duplicate(cgraph_node *, cgraph_node *,
4520 ipa_node_params *old_info,
4521 ipa_node_params *new_info)
4523 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4524 new_info->lattices = NULL;
4525 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4526 new_info->known_csts = old_info->known_csts.copy ();
4527 new_info->known_contexts = old_info->known_contexts.copy ();
4529 new_info->analysis_done = old_info->analysis_done;
4530 new_info->node_enqueued = old_info->node_enqueued;
4531 new_info->versionable = old_info->versionable;
4534 /* Duplication of ipcp transformation summaries. */
4536 void
4537 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4538 ipcp_transformation *src_trans,
4539 ipcp_transformation *dst_trans)
4541 /* Avoid redundant work of duplicating vectors we will never use. */
4542 if (dst->inlined_to)
4543 return;
4544 dst_trans->m_agg_values = vec_safe_copy (src_trans->m_agg_values);
4545 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4548 /* Register our cgraph hooks if they are not already there. */
4550 void
4551 ipa_register_cgraph_hooks (void)
4553 ipa_check_create_node_params ();
4554 ipa_check_create_edge_args ();
4556 function_insertion_hook_holder =
4557 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4560 /* Unregister our cgraph hooks if they are not already there. */
4562 static void
4563 ipa_unregister_cgraph_hooks (void)
4565 if (function_insertion_hook_holder)
4566 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4567 function_insertion_hook_holder = NULL;
4570 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4571 longer needed after ipa-cp. */
4573 void
4574 ipa_free_all_structures_after_ipa_cp (void)
4576 if (!optimize && !in_lto_p)
4578 ipa_free_all_edge_args ();
4579 ipa_free_all_node_params ();
4580 ipcp_sources_pool.release ();
4581 ipcp_cst_values_pool.release ();
4582 ipcp_poly_ctx_values_pool.release ();
4583 ipcp_agg_lattice_pool.release ();
4584 ipa_unregister_cgraph_hooks ();
4585 ipa_refdesc_pool.release ();
4589 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4590 longer needed after indirect inlining. */
4592 void
4593 ipa_free_all_structures_after_iinln (void)
4595 ipa_free_all_edge_args ();
4596 ipa_free_all_node_params ();
4597 ipa_unregister_cgraph_hooks ();
4598 ipcp_sources_pool.release ();
4599 ipcp_cst_values_pool.release ();
4600 ipcp_poly_ctx_values_pool.release ();
4601 ipcp_agg_lattice_pool.release ();
4602 ipa_refdesc_pool.release ();
4605 /* Print ipa_tree_map data structures of all functions in the
4606 callgraph to F. */
4608 void
4609 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4611 int i, count;
4612 class ipa_node_params *info;
4614 if (!node->definition)
4615 return;
4616 info = ipa_node_params_sum->get (node);
4617 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4618 if (!info)
4620 fprintf (f, " no params return\n");
4621 return;
4623 count = ipa_get_param_count (info);
4624 for (i = 0; i < count; i++)
4626 int c;
4628 fprintf (f, " ");
4629 ipa_dump_param (f, info, i);
4630 if (ipa_is_param_used (info, i))
4631 fprintf (f, " used");
4632 if (ipa_is_param_used_by_ipa_predicates (info, i))
4633 fprintf (f, " used_by_ipa_predicates");
4634 if (ipa_is_param_used_by_indirect_call (info, i))
4635 fprintf (f, " used_by_indirect_call");
4636 if (ipa_is_param_used_by_polymorphic_call (info, i))
4637 fprintf (f, " used_by_polymorphic_call");
4638 c = ipa_get_controlled_uses (info, i);
4639 if (c == IPA_UNDESCRIBED_USE)
4640 fprintf (f, " undescribed_use");
4641 else
4642 fprintf (f, " controlled_uses=%i %s", c,
4643 ipa_get_param_load_dereferenced (info, i)
4644 ? "(load_dereferenced)" : "");
4645 fprintf (f, "\n");
4649 /* Print ipa_tree_map data structures of all functions in the
4650 callgraph to F. */
4652 void
4653 ipa_print_all_params (FILE * f)
4655 struct cgraph_node *node;
4657 fprintf (f, "\nFunction parameters:\n");
4658 FOR_EACH_FUNCTION (node)
4659 ipa_print_node_params (f, node);
4662 /* Stream out jump function JUMP_FUNC to OB. */
4664 static void
4665 ipa_write_jump_function (struct output_block *ob,
4666 struct ipa_jump_func *jump_func)
4668 struct ipa_agg_jf_item *item;
4669 struct bitpack_d bp;
4670 int i, count;
4671 int flag = 0;
4673 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4674 as well as WPA memory by handling them specially. */
4675 if (jump_func->type == IPA_JF_CONST
4676 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4677 flag = 1;
4679 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4680 switch (jump_func->type)
4682 case IPA_JF_UNKNOWN:
4683 break;
4684 case IPA_JF_CONST:
4685 gcc_assert (
4686 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4687 stream_write_tree (ob,
4688 flag
4689 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4690 : jump_func->value.constant.value, true);
4691 break;
4692 case IPA_JF_PASS_THROUGH:
4693 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4694 if (jump_func->value.pass_through.operation == NOP_EXPR)
4696 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4697 bp = bitpack_create (ob->main_stream);
4698 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4699 gcc_assert (!jump_func->value.pass_through.refdesc_decremented);
4700 streamer_write_bitpack (&bp);
4702 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4703 == tcc_unary)
4704 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4705 else
4707 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4708 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4710 break;
4711 case IPA_JF_ANCESTOR:
4712 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4713 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4714 bp = bitpack_create (ob->main_stream);
4715 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4716 bp_pack_value (&bp, jump_func->value.ancestor.keep_null, 1);
4717 streamer_write_bitpack (&bp);
4718 break;
4719 default:
4720 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4723 count = vec_safe_length (jump_func->agg.items);
4724 streamer_write_uhwi (ob, count);
4725 if (count)
4727 bp = bitpack_create (ob->main_stream);
4728 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4729 streamer_write_bitpack (&bp);
4732 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4734 stream_write_tree (ob, item->type, true);
4735 streamer_write_uhwi (ob, item->offset);
4736 streamer_write_uhwi (ob, item->jftype);
4737 switch (item->jftype)
4739 case IPA_JF_UNKNOWN:
4740 break;
4741 case IPA_JF_CONST:
4742 stream_write_tree (ob, item->value.constant, true);
4743 break;
4744 case IPA_JF_PASS_THROUGH:
4745 case IPA_JF_LOAD_AGG:
4746 streamer_write_uhwi (ob, item->value.pass_through.operation);
4747 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4748 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4749 != tcc_unary)
4750 stream_write_tree (ob, item->value.pass_through.operand, true);
4751 if (item->jftype == IPA_JF_LOAD_AGG)
4753 stream_write_tree (ob, item->value.load_agg.type, true);
4754 streamer_write_uhwi (ob, item->value.load_agg.offset);
4755 bp = bitpack_create (ob->main_stream);
4756 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4757 streamer_write_bitpack (&bp);
4759 break;
4760 default:
4761 fatal_error (UNKNOWN_LOCATION,
4762 "invalid jump function in LTO stream");
4766 bp = bitpack_create (ob->main_stream);
4767 if (jump_func->m_vr)
4768 jump_func->m_vr->streamer_write (ob);
4769 else
4771 bp_pack_value (&bp, false, 1);
4772 streamer_write_bitpack (&bp);
4776 /* Read in jump function JUMP_FUNC from IB. */
4778 static void
4779 ipa_read_jump_function (class lto_input_block *ib,
4780 struct ipa_jump_func *jump_func,
4781 struct cgraph_edge *cs,
4782 class data_in *data_in,
4783 bool prevails)
4785 enum jump_func_type jftype;
4786 enum tree_code operation;
4787 int i, count;
4788 int val = streamer_read_uhwi (ib);
4789 bool flag = val & 1;
4791 jftype = (enum jump_func_type) (val / 2);
4792 switch (jftype)
4794 case IPA_JF_UNKNOWN:
4795 ipa_set_jf_unknown (jump_func);
4796 break;
4797 case IPA_JF_CONST:
4799 tree t = stream_read_tree (ib, data_in);
4800 if (flag && prevails)
4801 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4802 ipa_set_jf_constant (jump_func, t, cs);
4804 break;
4805 case IPA_JF_PASS_THROUGH:
4806 operation = (enum tree_code) streamer_read_uhwi (ib);
4807 if (operation == NOP_EXPR)
4809 int formal_id = streamer_read_uhwi (ib);
4810 struct bitpack_d bp = streamer_read_bitpack (ib);
4811 bool agg_preserved = bp_unpack_value (&bp, 1);
4812 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4814 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4816 int formal_id = streamer_read_uhwi (ib);
4817 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4819 else
4821 tree operand = stream_read_tree (ib, data_in);
4822 int formal_id = streamer_read_uhwi (ib);
4823 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4824 operation);
4826 break;
4827 case IPA_JF_ANCESTOR:
4829 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4830 int formal_id = streamer_read_uhwi (ib);
4831 struct bitpack_d bp = streamer_read_bitpack (ib);
4832 bool agg_preserved = bp_unpack_value (&bp, 1);
4833 bool keep_null = bp_unpack_value (&bp, 1);
4834 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved,
4835 keep_null);
4836 break;
4838 default:
4839 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4842 count = streamer_read_uhwi (ib);
4843 if (prevails)
4845 jump_func->agg.items = NULL;
4846 vec_safe_reserve (jump_func->agg.items, count, true);
4848 if (count)
4850 struct bitpack_d bp = streamer_read_bitpack (ib);
4851 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4853 for (i = 0; i < count; i++)
4855 struct ipa_agg_jf_item item;
4856 item.type = stream_read_tree (ib, data_in);
4857 item.offset = streamer_read_uhwi (ib);
4858 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4860 switch (item.jftype)
4862 case IPA_JF_UNKNOWN:
4863 break;
4864 case IPA_JF_CONST:
4865 item.value.constant = stream_read_tree (ib, data_in);
4866 break;
4867 case IPA_JF_PASS_THROUGH:
4868 case IPA_JF_LOAD_AGG:
4869 operation = (enum tree_code) streamer_read_uhwi (ib);
4870 item.value.pass_through.operation = operation;
4871 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4872 if (TREE_CODE_CLASS (operation) == tcc_unary)
4873 item.value.pass_through.operand = NULL_TREE;
4874 else
4875 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4876 if (item.jftype == IPA_JF_LOAD_AGG)
4878 struct bitpack_d bp;
4879 item.value.load_agg.type = stream_read_tree (ib, data_in);
4880 item.value.load_agg.offset = streamer_read_uhwi (ib);
4881 bp = streamer_read_bitpack (ib);
4882 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4884 break;
4885 default:
4886 fatal_error (UNKNOWN_LOCATION,
4887 "invalid jump function in LTO stream");
4889 if (prevails)
4890 jump_func->agg.items->quick_push (item);
4893 ipa_vr vr;
4894 vr.streamer_read (ib, data_in);
4895 if (vr.known_p ())
4897 if (prevails)
4898 ipa_set_jfunc_vr (jump_func, vr);
4900 else
4901 jump_func->m_vr = NULL;
4904 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4905 relevant to indirect inlining to OB. */
4907 static void
4908 ipa_write_indirect_edge_info (struct output_block *ob,
4909 struct cgraph_edge *cs)
4911 class cgraph_indirect_call_info *ii = cs->indirect_info;
4912 struct bitpack_d bp;
4914 streamer_write_hwi (ob, ii->param_index);
4915 bp = bitpack_create (ob->main_stream);
4916 bp_pack_value (&bp, ii->polymorphic, 1);
4917 bp_pack_value (&bp, ii->agg_contents, 1);
4918 bp_pack_value (&bp, ii->member_ptr, 1);
4919 bp_pack_value (&bp, ii->by_ref, 1);
4920 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4921 bp_pack_value (&bp, ii->vptr_changed, 1);
4922 streamer_write_bitpack (&bp);
4923 if (ii->agg_contents || ii->polymorphic)
4924 streamer_write_hwi (ob, ii->offset);
4925 else
4926 gcc_assert (ii->offset == 0);
4928 if (ii->polymorphic)
4930 streamer_write_hwi (ob, ii->otr_token);
4931 stream_write_tree (ob, ii->otr_type, true);
4932 ii->context.stream_out (ob);
4936 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4937 relevant to indirect inlining from IB. */
4939 static void
4940 ipa_read_indirect_edge_info (class lto_input_block *ib,
4941 class data_in *data_in,
4942 struct cgraph_edge *cs,
4943 class ipa_node_params *info)
4945 class cgraph_indirect_call_info *ii = cs->indirect_info;
4946 struct bitpack_d bp;
4948 ii->param_index = (int) streamer_read_hwi (ib);
4949 bp = streamer_read_bitpack (ib);
4950 ii->polymorphic = bp_unpack_value (&bp, 1);
4951 ii->agg_contents = bp_unpack_value (&bp, 1);
4952 ii->member_ptr = bp_unpack_value (&bp, 1);
4953 ii->by_ref = bp_unpack_value (&bp, 1);
4954 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4955 ii->vptr_changed = bp_unpack_value (&bp, 1);
4956 if (ii->agg_contents || ii->polymorphic)
4957 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4958 else
4959 ii->offset = 0;
4960 if (ii->polymorphic)
4962 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4963 ii->otr_type = stream_read_tree (ib, data_in);
4964 ii->context.stream_in (ib, data_in);
4966 if (info && ii->param_index >= 0)
4968 if (ii->polymorphic)
4969 ipa_set_param_used_by_polymorphic_call (info,
4970 ii->param_index , true);
4971 ipa_set_param_used_by_indirect_call (info,
4972 ii->param_index, true);
4976 /* Stream out NODE info to OB. */
4978 static void
4979 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4981 int node_ref;
4982 lto_symtab_encoder_t encoder;
4983 ipa_node_params *info = ipa_node_params_sum->get (node);
4984 int j;
4985 struct cgraph_edge *e;
4986 struct bitpack_d bp;
4988 encoder = ob->decl_state->symtab_node_encoder;
4989 node_ref = lto_symtab_encoder_encode (encoder, node);
4990 streamer_write_uhwi (ob, node_ref);
4992 streamer_write_uhwi (ob, ipa_get_param_count (info));
4993 for (j = 0; j < ipa_get_param_count (info); j++)
4994 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4995 bp = bitpack_create (ob->main_stream);
4996 gcc_assert (info->analysis_done
4997 || ipa_get_param_count (info) == 0);
4998 gcc_assert (!info->node_enqueued);
4999 gcc_assert (!info->ipcp_orig_node);
5000 for (j = 0; j < ipa_get_param_count (info); j++)
5002 /* TODO: We could just not stream the bit in the undescribed case. */
5003 bool d = (ipa_get_controlled_uses (info, j) != IPA_UNDESCRIBED_USE)
5004 ? ipa_get_param_load_dereferenced (info, j) : true;
5005 bp_pack_value (&bp, d, 1);
5006 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5008 streamer_write_bitpack (&bp);
5009 for (j = 0; j < ipa_get_param_count (info); j++)
5011 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5012 stream_write_tree (ob, ipa_get_type (info, j), true);
5014 for (e = node->callees; e; e = e->next_callee)
5016 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5018 if (!args)
5020 streamer_write_uhwi (ob, 0);
5021 continue;
5024 streamer_write_uhwi (ob,
5025 ipa_get_cs_argument_count (args) * 2
5026 + (args->polymorphic_call_contexts != NULL));
5027 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5029 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5030 if (args->polymorphic_call_contexts != NULL)
5031 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5034 for (e = node->indirect_calls; e; e = e->next_callee)
5036 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5037 if (!args)
5038 streamer_write_uhwi (ob, 0);
5039 else
5041 streamer_write_uhwi (ob,
5042 ipa_get_cs_argument_count (args) * 2
5043 + (args->polymorphic_call_contexts != NULL));
5044 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5046 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5047 if (args->polymorphic_call_contexts != NULL)
5048 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5051 ipa_write_indirect_edge_info (ob, e);
5055 /* Stream in edge E from IB. */
5057 static void
5058 ipa_read_edge_info (class lto_input_block *ib,
5059 class data_in *data_in,
5060 struct cgraph_edge *e, bool prevails)
5062 int count = streamer_read_uhwi (ib);
5063 bool contexts_computed = count & 1;
5065 count /= 2;
5066 if (!count)
5067 return;
5068 if (prevails
5069 && (e->possibly_call_in_translation_unit_p ()
5070 /* Also stream in jump functions to builtins in hope that they
5071 will get fnspecs. */
5072 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
5074 ipa_edge_args *args = ipa_edge_args_sum->get_create (e);
5075 vec_safe_grow_cleared (args->jump_functions, count, true);
5076 if (contexts_computed)
5077 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
5078 for (int k = 0; k < count; k++)
5080 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5081 data_in, prevails);
5082 if (contexts_computed)
5083 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5084 (ib, data_in);
5087 else
5089 for (int k = 0; k < count; k++)
5091 struct ipa_jump_func dummy;
5092 ipa_read_jump_function (ib, &dummy, e,
5093 data_in, prevails);
5094 if (contexts_computed)
5096 class ipa_polymorphic_call_context ctx;
5097 ctx.stream_in (ib, data_in);
5103 /* Stream in NODE info from IB. */
5105 static void
5106 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5107 class data_in *data_in)
5109 int k;
5110 struct cgraph_edge *e;
5111 struct bitpack_d bp;
5112 bool prevails = node->prevailing_p ();
5113 ipa_node_params *info
5114 = prevails ? ipa_node_params_sum->get_create (node) : NULL;
5116 int param_count = streamer_read_uhwi (ib);
5117 if (prevails)
5119 ipa_alloc_node_params (node, param_count);
5120 for (k = 0; k < param_count; k++)
5121 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5122 if (ipa_get_param_count (info) != 0)
5123 info->analysis_done = true;
5124 info->node_enqueued = false;
5126 else
5127 for (k = 0; k < param_count; k++)
5128 streamer_read_uhwi (ib);
5130 bp = streamer_read_bitpack (ib);
5131 for (k = 0; k < param_count; k++)
5133 bool load_dereferenced = bp_unpack_value (&bp, 1);
5134 bool used = bp_unpack_value (&bp, 1);
5136 if (prevails)
5138 ipa_set_param_load_dereferenced (info, k, load_dereferenced);
5139 ipa_set_param_used (info, k, used);
5142 for (k = 0; k < param_count; k++)
5144 int nuses = streamer_read_hwi (ib);
5145 tree type = stream_read_tree (ib, data_in);
5147 if (prevails)
5149 ipa_set_controlled_uses (info, k, nuses);
5150 (*info->descriptors)[k].decl_or_type = type;
5153 for (e = node->callees; e; e = e->next_callee)
5154 ipa_read_edge_info (ib, data_in, e, prevails);
5155 for (e = node->indirect_calls; e; e = e->next_callee)
5157 ipa_read_edge_info (ib, data_in, e, prevails);
5158 ipa_read_indirect_edge_info (ib, data_in, e, info);
5162 /* Write jump functions for nodes in SET. */
5164 void
5165 ipa_prop_write_jump_functions (void)
5167 struct output_block *ob;
5168 unsigned int count = 0;
5169 lto_symtab_encoder_iterator lsei;
5170 lto_symtab_encoder_t encoder;
5172 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5173 return;
5175 ob = create_output_block (LTO_section_jump_functions);
5176 encoder = ob->decl_state->symtab_node_encoder;
5177 ob->symbol = NULL;
5178 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5179 lsei_next_function_in_partition (&lsei))
5181 cgraph_node *node = lsei_cgraph_node (lsei);
5182 if (node->has_gimple_body_p ()
5183 && ipa_node_params_sum->get (node) != NULL)
5184 count++;
5187 streamer_write_uhwi (ob, count);
5189 /* Process all of the functions. */
5190 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5191 lsei_next_function_in_partition (&lsei))
5193 cgraph_node *node = lsei_cgraph_node (lsei);
5194 if (node->has_gimple_body_p ()
5195 && ipa_node_params_sum->get (node) != NULL)
5196 ipa_write_node_info (ob, node);
5198 streamer_write_char_stream (ob->main_stream, 0);
5199 produce_asm (ob, NULL);
5200 destroy_output_block (ob);
5203 /* Read section in file FILE_DATA of length LEN with data DATA. */
5205 static void
5206 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5207 size_t len)
5209 const struct lto_function_header *header =
5210 (const struct lto_function_header *) data;
5211 const int cfg_offset = sizeof (struct lto_function_header);
5212 const int main_offset = cfg_offset + header->cfg_size;
5213 const int string_offset = main_offset + header->main_size;
5214 class data_in *data_in;
5215 unsigned int i;
5216 unsigned int count;
5218 lto_input_block ib_main ((const char *) data + main_offset,
5219 header->main_size, file_data);
5221 data_in =
5222 lto_data_in_create (file_data, (const char *) data + string_offset,
5223 header->string_size, vNULL);
5224 count = streamer_read_uhwi (&ib_main);
5226 for (i = 0; i < count; i++)
5228 unsigned int index;
5229 struct cgraph_node *node;
5230 lto_symtab_encoder_t encoder;
5232 index = streamer_read_uhwi (&ib_main);
5233 encoder = file_data->symtab_node_encoder;
5234 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5235 index));
5236 gcc_assert (node->definition);
5237 ipa_read_node_info (&ib_main, node, data_in);
5239 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5240 len);
5241 lto_data_in_delete (data_in);
5244 /* Read ipcp jump functions. */
5246 void
5247 ipa_prop_read_jump_functions (void)
5249 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5250 struct lto_file_decl_data *file_data;
5251 unsigned int j = 0;
5253 ipa_check_create_node_params ();
5254 ipa_check_create_edge_args ();
5255 ipa_register_cgraph_hooks ();
5257 while ((file_data = file_data_vec[j++]))
5259 size_t len;
5260 const char *data
5261 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5262 &len);
5263 if (data)
5264 ipa_prop_read_section (file_data, data, len);
5268 /* Return true if the IPA-CP transformation summary TS is non-NULL and contains
5269 useful info. */
5270 static bool
5271 useful_ipcp_transformation_info_p (ipcp_transformation *ts)
5273 if (!ts)
5274 return false;
5275 if (!vec_safe_is_empty (ts->m_agg_values)
5276 || !vec_safe_is_empty (ts->m_vr))
5277 return true;
5278 return false;
5281 /* Write into OB IPA-CP transfromation summary TS describing NODE. */
5283 void
5284 write_ipcp_transformation_info (output_block *ob, cgraph_node *node,
5285 ipcp_transformation *ts)
5287 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
5288 int node_ref = lto_symtab_encoder_encode (encoder, node);
5289 streamer_write_uhwi (ob, node_ref);
5291 streamer_write_uhwi (ob, vec_safe_length (ts->m_agg_values));
5292 for (const ipa_argagg_value &av : ts->m_agg_values)
5294 struct bitpack_d bp;
5296 stream_write_tree (ob, av.value, true);
5297 streamer_write_uhwi (ob, av.unit_offset);
5298 streamer_write_uhwi (ob, av.index);
5300 bp = bitpack_create (ob->main_stream);
5301 bp_pack_value (&bp, av.by_ref, 1);
5302 streamer_write_bitpack (&bp);
5305 streamer_write_uhwi (ob, vec_safe_length (ts->m_vr));
5306 for (const ipa_vr &parm_vr : ts->m_vr)
5307 parm_vr.streamer_write (ob);
5310 /* Stream in the aggregate value replacement chain for NODE from IB. */
5312 static void
5313 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5314 data_in *data_in)
5316 unsigned int count, i;
5317 ipcp_transformation_initialize ();
5318 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5320 count = streamer_read_uhwi (ib);
5321 if (count > 0)
5323 vec_safe_grow_cleared (ts->m_agg_values, count, true);
5324 for (i = 0; i <count; i++)
5326 ipa_argagg_value *av = &(*ts->m_agg_values)[i];;
5328 av->value = stream_read_tree (ib, data_in);
5329 av->unit_offset = streamer_read_uhwi (ib);
5330 av->index = streamer_read_uhwi (ib);
5332 bitpack_d bp = streamer_read_bitpack (ib);
5333 av->by_ref = bp_unpack_value (&bp, 1);
5337 count = streamer_read_uhwi (ib);
5338 if (count > 0)
5340 vec_safe_grow_cleared (ts->m_vr, count, true);
5341 for (i = 0; i < count; i++)
5343 ipa_vr *parm_vr;
5344 parm_vr = &(*ts->m_vr)[i];
5345 parm_vr->streamer_read (ib, data_in);
5350 /* Write all aggregate replacement for nodes in set. */
5352 void
5353 ipcp_write_transformation_summaries (void)
5355 struct output_block *ob;
5356 unsigned int count = 0;
5357 lto_symtab_encoder_t encoder;
5359 ob = create_output_block (LTO_section_ipcp_transform);
5360 encoder = ob->decl_state->symtab_node_encoder;
5361 ob->symbol = NULL;
5363 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5365 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
5366 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
5367 if (!cnode)
5368 continue;
5369 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5370 if (useful_ipcp_transformation_info_p (ts)
5371 && lto_symtab_encoder_encode_body_p (encoder, cnode))
5372 count++;
5375 streamer_write_uhwi (ob, count);
5377 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5379 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
5380 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
5381 if (!cnode)
5382 continue;
5383 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5384 if (useful_ipcp_transformation_info_p (ts)
5385 && lto_symtab_encoder_encode_body_p (encoder, cnode))
5386 write_ipcp_transformation_info (ob, cnode, ts);
5388 streamer_write_char_stream (ob->main_stream, 0);
5389 produce_asm (ob, NULL);
5390 destroy_output_block (ob);
5393 /* Read replacements section in file FILE_DATA of length LEN with data
5394 DATA. */
5396 static void
5397 read_replacements_section (struct lto_file_decl_data *file_data,
5398 const char *data,
5399 size_t len)
5401 const struct lto_function_header *header =
5402 (const struct lto_function_header *) data;
5403 const int cfg_offset = sizeof (struct lto_function_header);
5404 const int main_offset = cfg_offset + header->cfg_size;
5405 const int string_offset = main_offset + header->main_size;
5406 class data_in *data_in;
5407 unsigned int i;
5408 unsigned int count;
5410 lto_input_block ib_main ((const char *) data + main_offset,
5411 header->main_size, file_data);
5413 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5414 header->string_size, vNULL);
5415 count = streamer_read_uhwi (&ib_main);
5417 for (i = 0; i < count; i++)
5419 unsigned int index;
5420 struct cgraph_node *node;
5421 lto_symtab_encoder_t encoder;
5423 index = streamer_read_uhwi (&ib_main);
5424 encoder = file_data->symtab_node_encoder;
5425 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5426 index));
5427 read_ipcp_transformation_info (&ib_main, node, data_in);
5429 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5430 len);
5431 lto_data_in_delete (data_in);
5434 /* Read IPA-CP aggregate replacements. */
5436 void
5437 ipcp_read_transformation_summaries (void)
5439 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5440 struct lto_file_decl_data *file_data;
5441 unsigned int j = 0;
5443 while ((file_data = file_data_vec[j++]))
5445 size_t len;
5446 const char *data
5447 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5448 &len);
5449 if (data)
5450 read_replacements_section (file_data, data, len);
5454 /* Adjust the aggregate replacements in TS to reflect any parameter removals
5455 which might have already taken place. If after adjustments there are no
5456 aggregate replacements left, the m_agg_values will be set to NULL. In other
5457 cases, it may be shrunk. */
5459 static void
5460 adjust_agg_replacement_values (cgraph_node *node, ipcp_transformation *ts)
5462 clone_info *cinfo = clone_info::get (node);
5463 if (!cinfo || !cinfo->param_adjustments)
5464 return;
5466 auto_vec<int, 16> new_indices;
5467 cinfo->param_adjustments->get_updated_indices (&new_indices);
5468 bool removed_item = false;
5469 unsigned dst_index = 0;
5470 unsigned count = ts->m_agg_values->length ();
5471 for (unsigned i = 0; i < count; i++)
5473 ipa_argagg_value *v = &(*ts->m_agg_values)[i];
5474 gcc_checking_assert (v->index >= 0);
5476 int new_idx = -1;
5477 if ((unsigned) v->index < new_indices.length ())
5478 new_idx = new_indices[v->index];
5480 if (new_idx >= 0)
5482 v->index = new_idx;
5483 if (removed_item)
5484 (*ts->m_agg_values)[dst_index] = *v;
5485 dst_index++;
5487 else
5488 removed_item = true;
5491 if (dst_index == 0)
5493 ggc_free (ts->m_agg_values);
5494 ts->m_agg_values = NULL;
5496 else if (removed_item)
5497 ts->m_agg_values->truncate (dst_index);
5499 return;
5502 /* Dominator walker driving the ipcp modification phase. */
5504 class ipcp_modif_dom_walker : public dom_walker
5506 public:
5507 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5508 vec<ipa_param_descriptor, va_gc> *descs,
5509 ipcp_transformation *ts, bool *sc)
5510 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5511 m_ts (ts), m_something_changed (sc) {}
5513 edge before_dom_children (basic_block) final override;
5514 bool cleanup_eh ()
5515 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
5517 private:
5518 struct ipa_func_body_info *m_fbi;
5519 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5520 ipcp_transformation *m_ts;
5521 bool *m_something_changed;
5522 auto_bitmap m_need_eh_cleanup;
5525 edge
5526 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5528 gimple_stmt_iterator gsi;
5529 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5531 gimple *stmt = gsi_stmt (gsi);
5532 tree rhs, val, t;
5533 HOST_WIDE_INT bit_offset;
5534 poly_int64 size;
5535 int index;
5536 bool by_ref, vce;
5538 if (!gimple_assign_load_p (stmt))
5539 continue;
5540 rhs = gimple_assign_rhs1 (stmt);
5541 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5542 continue;
5544 vce = false;
5545 t = rhs;
5546 while (handled_component_p (t))
5548 /* V_C_E can do things like convert an array of integers to one
5549 bigger integer and similar things we do not handle below. */
5550 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5552 vce = true;
5553 break;
5555 t = TREE_OPERAND (t, 0);
5557 if (vce)
5558 continue;
5560 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5561 &bit_offset, &size, &by_ref))
5562 continue;
5563 unsigned unit_offset = bit_offset / BITS_PER_UNIT;
5564 ipa_argagg_value_list avl (m_ts);
5565 tree v = avl.get_value (index, unit_offset, by_ref);
5567 if (!v
5568 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), size))
5569 continue;
5571 gcc_checking_assert (is_gimple_ip_invariant (v));
5572 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v)))
5574 if (fold_convertible_p (TREE_TYPE (rhs), v))
5575 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v);
5576 else if (TYPE_SIZE (TREE_TYPE (rhs))
5577 == TYPE_SIZE (TREE_TYPE (v)))
5578 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v);
5579 else
5581 if (dump_file)
5583 fprintf (dump_file, " const ");
5584 print_generic_expr (dump_file, v);
5585 fprintf (dump_file, " can't be converted to type of ");
5586 print_generic_expr (dump_file, rhs);
5587 fprintf (dump_file, "\n");
5589 continue;
5592 else
5593 val = v;
5595 if (dump_file && (dump_flags & TDF_DETAILS))
5597 fprintf (dump_file, "Modifying stmt:\n ");
5598 print_gimple_stmt (dump_file, stmt, 0);
5600 gimple_assign_set_rhs_from_tree (&gsi, val);
5601 update_stmt (stmt);
5603 if (dump_file && (dump_flags & TDF_DETAILS))
5605 fprintf (dump_file, "into:\n ");
5606 print_gimple_stmt (dump_file, stmt, 0);
5607 fprintf (dump_file, "\n");
5610 *m_something_changed = true;
5611 if (maybe_clean_eh_stmt (stmt))
5612 bitmap_set_bit (m_need_eh_cleanup, bb->index);
5614 return NULL;
5617 /* If IPA-CP discovered a constant in parameter PARM at OFFSET of a given SIZE
5618 - whether passed by reference or not is given by BY_REF - return that
5619 constant. Otherwise return NULL_TREE. */
5621 tree
5622 ipcp_get_aggregate_const (struct function *func, tree parm, bool by_ref,
5623 HOST_WIDE_INT bit_offset, HOST_WIDE_INT bit_size)
5625 cgraph_node *node = cgraph_node::get (func->decl);
5626 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5628 if (!ts || !ts->m_agg_values)
5629 return NULL_TREE;
5631 int index = ts->get_param_index (func->decl, parm);
5632 if (index < 0)
5633 return NULL_TREE;
5635 ipa_argagg_value_list avl (ts);
5636 unsigned unit_offset = bit_offset / BITS_PER_UNIT;
5637 tree v = avl.get_value (index, unit_offset, by_ref);
5638 if (!v
5639 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), bit_size))
5640 return NULL_TREE;
5642 return v;
5645 /* Return true if we have recorded VALUE and MASK about PARM.
5646 Set VALUE and MASk accordingly. */
5648 bool
5649 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5651 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5652 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5653 if (!ts
5654 || vec_safe_length (ts->m_vr) == 0
5655 || !irange::supports_p (TREE_TYPE (parm)))
5656 return false;
5658 int i = ts->get_param_index (current_function_decl, parm);
5659 if (i < 0)
5660 return false;
5661 clone_info *cinfo = clone_info::get (cnode);
5662 if (cinfo && cinfo->param_adjustments)
5664 i = cinfo->param_adjustments->get_original_index (i);
5665 if (i < 0)
5666 return false;
5669 vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5670 if (!vr[i].known_p ())
5671 return false;
5672 Value_Range tmp;
5673 vr[i].get_vrange (tmp);
5674 if (tmp.undefined_p () || tmp.varying_p ())
5675 return false;
5676 irange &r = as_a <irange> (tmp);
5677 irange_bitmask bm = r.get_bitmask ();
5678 *mask = widest_int::from (bm.mask (), TYPE_SIGN (TREE_TYPE (parm)));
5679 *value = wide_int_to_tree (TREE_TYPE (parm), bm.value ());
5680 return true;
5683 /* Update value range of formal parameters of NODE as described in TS. */
5685 static void
5686 ipcp_update_vr (struct cgraph_node *node, ipcp_transformation *ts)
5688 if (vec_safe_is_empty (ts->m_vr))
5689 return;
5690 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5691 unsigned count = vr.length ();
5692 if (!count)
5693 return;
5695 auto_vec<int, 16> new_indices;
5696 bool need_remapping = false;
5697 clone_info *cinfo = clone_info::get (node);
5698 if (cinfo && cinfo->param_adjustments)
5700 cinfo->param_adjustments->get_updated_indices (&new_indices);
5701 need_remapping = true;
5703 auto_vec <tree, 16> parm_decls;
5704 push_function_arg_decls (&parm_decls, node->decl);
5706 for (unsigned i = 0; i < count; ++i)
5708 tree parm;
5709 int remapped_idx;
5710 if (need_remapping)
5712 if (i >= new_indices.length ())
5713 continue;
5714 remapped_idx = new_indices[i];
5715 if (remapped_idx < 0)
5716 continue;
5718 else
5719 remapped_idx = i;
5721 parm = parm_decls[remapped_idx];
5723 gcc_checking_assert (parm);
5724 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5726 if (!ddef || !is_gimple_reg (parm))
5727 continue;
5729 if (vr[i].known_p ())
5731 Value_Range tmp;
5732 vr[i].get_vrange (tmp);
5734 if (!tmp.undefined_p () && !tmp.varying_p ())
5736 if (dump_file)
5738 fprintf (dump_file, "Setting value range of param %u "
5739 "(now %i) ", i, remapped_idx);
5740 tmp.dump (dump_file);
5741 fprintf (dump_file, "]\n");
5743 set_range_info (ddef, tmp);
5745 if (POINTER_TYPE_P (TREE_TYPE (parm))
5746 && opt_for_fn (node->decl, flag_ipa_bit_cp))
5748 irange &r = as_a<irange> (tmp);
5749 irange_bitmask bm = r.get_bitmask ();
5750 unsigned tem = bm.mask ().to_uhwi ();
5751 unsigned HOST_WIDE_INT bitpos = bm.value ().to_uhwi ();
5752 unsigned align = tem & -tem;
5753 unsigned misalign = bitpos & (align - 1);
5755 if (align > 1)
5757 if (dump_file)
5759 fprintf (dump_file,
5760 "Adjusting mask for param %u to ", i);
5761 print_hex (bm.mask (), dump_file);
5762 fprintf (dump_file, "\n");
5765 if (dump_file)
5766 fprintf (dump_file,
5767 "Adjusting align: %u, misalign: %u\n",
5768 align, misalign);
5770 unsigned old_align, old_misalign;
5771 struct ptr_info_def *pi = get_ptr_info (ddef);
5772 bool old_known = get_ptr_info_alignment (pi, &old_align,
5773 &old_misalign);
5775 if (old_known && old_align > align)
5777 if (dump_file)
5779 fprintf (dump_file,
5780 "But alignment was already %u.\n",
5781 old_align);
5782 if ((old_misalign & (align - 1)) != misalign)
5783 fprintf (dump_file,
5784 "old_misalign (%u) and misalign "
5785 "(%u) mismatch\n",
5786 old_misalign, misalign);
5788 continue;
5791 if (dump_file
5792 && old_known
5793 && ((misalign & (old_align - 1)) != old_misalign))
5794 fprintf (dump_file,
5795 "old_misalign (%u) and misalign (%u) "
5796 "mismatch\n",
5797 old_misalign, misalign);
5799 set_ptr_info_alignment (pi, align, misalign);
5802 else if (dump_file && INTEGRAL_TYPE_P (TREE_TYPE (parm)))
5804 irange &r = as_a<irange> (tmp);
5805 irange_bitmask bm = r.get_bitmask ();
5806 unsigned prec = TYPE_PRECISION (TREE_TYPE (parm));
5807 if (wi::ne_p (bm.mask (), wi::shwi (-1, prec)))
5809 fprintf (dump_file,
5810 "Adjusting mask for param %u to ", i);
5811 print_hex (bm.mask (), dump_file);
5812 fprintf (dump_file, "\n");
5820 /* IPCP transformation phase doing propagation of aggregate values. */
5822 unsigned int
5823 ipcp_transform_function (struct cgraph_node *node)
5825 struct ipa_func_body_info fbi;
5826 int param_count;
5828 gcc_checking_assert (cfun);
5829 gcc_checking_assert (current_function_decl);
5831 if (dump_file)
5832 fprintf (dump_file, "Modification phase of node %s\n",
5833 node->dump_name ());
5835 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5836 if (!ts
5837 || (vec_safe_is_empty (ts->m_agg_values)
5838 && vec_safe_is_empty (ts->m_vr)))
5839 return 0;
5841 ts->maybe_create_parm_idx_map (cfun->decl);
5842 ipcp_update_vr (node, ts);
5843 if (vec_safe_is_empty (ts->m_agg_values))
5844 return 0;
5845 param_count = count_formal_params (node->decl);
5846 if (param_count == 0)
5847 return 0;
5849 adjust_agg_replacement_values (node, ts);
5850 if (vec_safe_is_empty (ts->m_agg_values))
5852 if (dump_file)
5853 fprintf (dump_file, " All affected aggregate parameters were either "
5854 "removed or converted into scalars, phase done.\n");
5855 return 0;
5857 if (dump_file)
5859 fprintf (dump_file, " Aggregate replacements:");
5860 ipa_argagg_value_list avs (ts);
5861 avs.dump (dump_file);
5864 fbi.node = node;
5865 fbi.info = NULL;
5866 fbi.bb_infos = vNULL;
5867 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
5868 fbi.param_count = param_count;
5869 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
5871 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5872 vec_safe_grow_cleared (descriptors, param_count, true);
5873 ipa_populate_param_decls (node, *descriptors);
5874 bool modified_mem_access = false;
5875 calculate_dominance_info (CDI_DOMINATORS);
5876 ipcp_modif_dom_walker walker (&fbi, descriptors, ts, &modified_mem_access);
5877 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5878 free_dominance_info (CDI_DOMINATORS);
5879 bool cfg_changed = walker.cleanup_eh ();
5881 int i;
5882 struct ipa_bb_info *bi;
5883 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5884 free_ipa_bb_info (bi);
5885 fbi.bb_infos.release ();
5887 vec_free (descriptors);
5888 if (cfg_changed)
5889 delete_unreachable_blocks_update_callgraph (node, false);
5891 return modified_mem_access ? TODO_update_ssa_only_virtuals : 0;
5895 #include "gt-ipa-prop.h"