hppa: Always enable PIE on 64-bit target
[official-gcc.git] / gcc / ipa-prop.cc
blobbec0ebd210cfd4a3686dcce1edfd575c4f3681ab
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-iterator.h"
35 #include "gimple-fold.h"
36 #include "tree-eh.h"
37 #include "calls.h"
38 #include "stor-layout.h"
39 #include "print-tree.h"
40 #include "gimplify.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55 #include "options.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58 #include "gimple-range.h"
59 #include "value-range-storage.h"
61 /* Function summary where the parameter infos are actually stored. */
62 ipa_node_params_t *ipa_node_params_sum = NULL;
64 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
66 /* Edge summary for IPA-CP edge information. */
67 ipa_edge_args_sum_t *ipa_edge_args_sum;
69 /* Traits for a hash table for reusing ranges. */
71 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <ipa_vr *>
73 typedef ipa_vr *value_type;
74 typedef const vrange *compare_type;
75 static hashval_t
76 hash (const ipa_vr *p)
78 // This never get called, except in the verification code, as
79 // ipa_get_value_range() calculates the hash itself. This
80 // function is mostly here for completness' sake.
81 Value_Range vr;
82 p->get_vrange (vr);
83 inchash::hash hstate;
84 add_vrange (vr, hstate);
85 return hstate.end ();
87 static bool
88 equal (const ipa_vr *a, const vrange *b)
90 return a->equal_p (*b);
92 static const bool empty_zero_p = true;
93 static void
94 mark_empty (ipa_vr *&p)
96 p = NULL;
98 static bool
99 is_empty (const ipa_vr *p)
101 return p == NULL;
103 static bool
104 is_deleted (const ipa_vr *p)
106 return p == reinterpret_cast<const ipa_vr *> (1);
108 static void
109 mark_deleted (ipa_vr *&p)
111 p = reinterpret_cast<ipa_vr *> (1);
115 /* Hash table for avoid repeated allocations of equal ranges. */
116 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
118 /* Holders of ipa cgraph hooks: */
119 static struct cgraph_node_hook_list *function_insertion_hook_holder;
121 /* Description of a reference to an IPA constant. */
122 struct ipa_cst_ref_desc
124 /* Edge that corresponds to the statement which took the reference. */
125 struct cgraph_edge *cs;
126 /* Linked list of duplicates created when call graph edges are cloned. */
127 struct ipa_cst_ref_desc *next_duplicate;
128 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
129 if out of control. */
130 int refcount;
133 /* Allocation pool for reference descriptions. */
135 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
136 ("IPA-PROP ref descriptions");
138 ipa_vr::ipa_vr ()
139 : m_storage (NULL),
140 m_type (NULL)
144 ipa_vr::ipa_vr (const vrange &r)
145 : m_storage (ggc_alloc_vrange_storage (r)),
146 m_type (r.type ())
150 bool
151 ipa_vr::equal_p (const vrange &r) const
153 gcc_checking_assert (!r.undefined_p ());
154 return (types_compatible_p (m_type, r.type ()) && m_storage->equal_p (r));
157 void
158 ipa_vr::get_vrange (Value_Range &r) const
160 r.set_type (m_type);
161 m_storage->get_vrange (r, m_type);
164 void
165 ipa_vr::set_unknown ()
167 if (m_storage)
168 ggc_free (m_storage);
170 m_storage = NULL;
173 void
174 ipa_vr::streamer_read (lto_input_block *ib, data_in *data_in)
176 struct bitpack_d bp = streamer_read_bitpack (ib);
177 bool known = bp_unpack_value (&bp, 1);
178 if (known)
180 Value_Range vr;
181 streamer_read_value_range (ib, data_in, vr);
182 if (!m_storage || !m_storage->fits_p (vr))
184 if (m_storage)
185 ggc_free (m_storage);
186 m_storage = ggc_alloc_vrange_storage (vr);
188 m_storage->set_vrange (vr);
189 m_type = vr.type ();
191 else
193 m_storage = NULL;
194 m_type = NULL;
198 void
199 ipa_vr::streamer_write (output_block *ob) const
201 struct bitpack_d bp = bitpack_create (ob->main_stream);
202 bp_pack_value (&bp, !!m_storage, 1);
203 streamer_write_bitpack (&bp);
204 if (m_storage)
206 Value_Range vr (m_type);
207 m_storage->get_vrange (vr, m_type);
208 streamer_write_vrange (ob, vr);
212 void
213 ipa_vr::dump (FILE *out) const
215 if (known_p ())
217 Value_Range vr (m_type);
218 m_storage->get_vrange (vr, m_type);
219 vr.dump (out);
221 else
222 fprintf (out, "NO RANGE");
225 // These stubs are because we use an ipa_vr in a hash_traits and
226 // hash-traits.h defines an extern of gt_ggc_mx (T &) instead of
227 // picking up the gt_ggc_mx (T *) version.
228 void
229 gt_pch_nx (ipa_vr *&x)
231 return gt_pch_nx ((ipa_vr *) x);
234 void
235 gt_ggc_mx (ipa_vr *&x)
237 return gt_ggc_mx ((ipa_vr *) x);
240 /* Analysis summery of function call return value. */
241 struct GTY(()) ipa_return_value_summary
243 /* Known value range.
244 This needs to be wrapped in struccture due to specific way
245 we allocate ipa_vr. */
246 ipa_vr *vr;
249 /* Function summary for return values. */
250 class ipa_return_value_sum_t : public function_summary <ipa_return_value_summary *>
252 public:
253 ipa_return_value_sum_t (symbol_table *table, bool ggc):
254 function_summary <ipa_return_value_summary *> (table, ggc) { }
256 /* Hook that is called by summary when a node is duplicated. */
257 void duplicate (cgraph_node *,
258 cgraph_node *,
259 ipa_return_value_summary *data,
260 ipa_return_value_summary *data2) final override
262 *data2=*data;
266 /* Variable hoding the return value summary. */
267 static GTY(()) function_summary <ipa_return_value_summary *> *ipa_return_value_sum;
270 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
271 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
273 static bool
274 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
276 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
278 if (!fs_opts)
279 return false;
280 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
283 /* Return index of the formal whose tree is PTREE in function which corresponds
284 to INFO. */
286 static int
287 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
288 tree ptree)
290 int i, count;
292 count = vec_safe_length (descriptors);
293 for (i = 0; i < count; i++)
294 if ((*descriptors)[i].decl_or_type == ptree)
295 return i;
297 return -1;
300 /* Return index of the formal whose tree is PTREE in function which corresponds
301 to INFO. */
304 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
306 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
309 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
310 NODE. */
312 static void
313 ipa_populate_param_decls (struct cgraph_node *node,
314 vec<ipa_param_descriptor, va_gc> &descriptors)
316 tree fndecl;
317 tree fnargs;
318 tree parm;
319 int param_num;
321 fndecl = node->decl;
322 gcc_assert (gimple_has_body_p (fndecl));
323 fnargs = DECL_ARGUMENTS (fndecl);
324 param_num = 0;
325 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
327 descriptors[param_num].decl_or_type = parm;
328 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
329 descriptors[param_num].move_cost = cost;
330 /* Watch overflow, move_cost is a bitfield. */
331 gcc_checking_assert (cost == descriptors[param_num].move_cost);
332 param_num++;
336 /* Return how many formal parameters FNDECL has. */
339 count_formal_params (tree fndecl)
341 tree parm;
342 int count = 0;
343 gcc_assert (gimple_has_body_p (fndecl));
345 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
346 count++;
348 return count;
351 /* Return the declaration of Ith formal parameter of the function corresponding
352 to INFO. Note there is no setter function as this array is built just once
353 using ipa_initialize_node_params. */
355 void
356 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
358 fprintf (file, "param #%i", i);
359 if ((*info->descriptors)[i].decl_or_type)
361 fprintf (file, " ");
362 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
366 /* If necessary, allocate vector of parameter descriptors in info of NODE.
367 Return true if they were allocated, false if not. */
369 static bool
370 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
372 ipa_node_params *info = ipa_node_params_sum->get_create (node);
374 if (!info->descriptors && param_count)
376 vec_safe_grow_cleared (info->descriptors, param_count, true);
377 return true;
379 else
380 return false;
383 /* Initialize the ipa_node_params structure associated with NODE by counting
384 the function parameters, creating the descriptors and populating their
385 param_decls. */
387 void
388 ipa_initialize_node_params (struct cgraph_node *node)
390 ipa_node_params *info = ipa_node_params_sum->get_create (node);
392 if (!info->descriptors
393 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
394 ipa_populate_param_decls (node, *info->descriptors);
397 /* Print VAL which is extracted from a jump function to F. */
399 static void
400 ipa_print_constant_value (FILE *f, tree val)
402 print_generic_expr (f, val);
404 /* This is in keeping with values_equal_for_ipcp_p. */
405 if (TREE_CODE (val) == ADDR_EXPR
406 && (TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL
407 || (TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
408 && DECL_IN_CONSTANT_POOL (TREE_OPERAND (val, 0)))))
410 fputs (" -> ", f);
411 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
415 /* Print the jump functions associated with call graph edge CS to file F. */
417 static void
418 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
420 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
421 int count = ipa_get_cs_argument_count (args);
423 for (int i = 0; i < count; i++)
425 struct ipa_jump_func *jump_func;
426 enum jump_func_type type;
428 jump_func = ipa_get_ith_jump_func (args, i);
429 type = jump_func->type;
431 fprintf (f, " param %d: ", i);
432 if (type == IPA_JF_UNKNOWN)
433 fprintf (f, "UNKNOWN\n");
434 else if (type == IPA_JF_CONST)
436 fprintf (f, "CONST: ");
437 ipa_print_constant_value (f, jump_func->value.constant.value);
438 fprintf (f, "\n");
440 else if (type == IPA_JF_PASS_THROUGH)
442 fprintf (f, "PASS THROUGH: ");
443 fprintf (f, "%d, op %s",
444 jump_func->value.pass_through.formal_id,
445 get_tree_code_name(jump_func->value.pass_through.operation));
446 if (jump_func->value.pass_through.operation != NOP_EXPR)
448 fprintf (f, " ");
449 print_generic_expr (f, jump_func->value.pass_through.operand);
451 if (jump_func->value.pass_through.agg_preserved)
452 fprintf (f, ", agg_preserved");
453 if (jump_func->value.pass_through.refdesc_decremented)
454 fprintf (f, ", refdesc_decremented");
455 fprintf (f, "\n");
457 else if (type == IPA_JF_ANCESTOR)
459 fprintf (f, "ANCESTOR: ");
460 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
461 jump_func->value.ancestor.formal_id,
462 jump_func->value.ancestor.offset);
463 if (jump_func->value.ancestor.agg_preserved)
464 fprintf (f, ", agg_preserved");
465 if (jump_func->value.ancestor.keep_null)
466 fprintf (f, ", keep_null");
467 fprintf (f, "\n");
470 if (jump_func->agg.items)
472 struct ipa_agg_jf_item *item;
473 int j;
475 fprintf (f, " Aggregate passed by %s:\n",
476 jump_func->agg.by_ref ? "reference" : "value");
477 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
479 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
480 item->offset);
481 fprintf (f, "type: ");
482 print_generic_expr (f, item->type);
483 fprintf (f, ", ");
484 if (item->jftype == IPA_JF_PASS_THROUGH)
485 fprintf (f, "PASS THROUGH: %d,",
486 item->value.pass_through.formal_id);
487 else if (item->jftype == IPA_JF_LOAD_AGG)
489 fprintf (f, "LOAD AGG: %d",
490 item->value.pass_through.formal_id);
491 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
492 item->value.load_agg.offset,
493 item->value.load_agg.by_ref ? "reference"
494 : "value");
497 if (item->jftype == IPA_JF_PASS_THROUGH
498 || item->jftype == IPA_JF_LOAD_AGG)
500 fprintf (f, " op %s",
501 get_tree_code_name (item->value.pass_through.operation));
502 if (item->value.pass_through.operation != NOP_EXPR)
504 fprintf (f, " ");
505 print_generic_expr (f, item->value.pass_through.operand);
508 else if (item->jftype == IPA_JF_CONST)
510 fprintf (f, "CONST: ");
511 ipa_print_constant_value (f, item->value.constant);
513 else if (item->jftype == IPA_JF_UNKNOWN)
514 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
515 tree_to_uhwi (TYPE_SIZE (item->type)));
516 fprintf (f, "\n");
520 class ipa_polymorphic_call_context *ctx
521 = ipa_get_ith_polymorhic_call_context (args, i);
522 if (ctx && !ctx->useless_p ())
524 fprintf (f, " Context: ");
525 ctx->dump (dump_file);
528 if (jump_func->m_vr)
530 jump_func->m_vr->dump (f);
531 fprintf (f, "\n");
533 else
534 fprintf (f, " Unknown VR\n");
539 /* Print the jump functions of all arguments on all call graph edges going from
540 NODE to file F. */
542 void
543 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
545 struct cgraph_edge *cs;
547 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
548 for (cs = node->callees; cs; cs = cs->next_callee)
551 fprintf (f, " callsite %s -> %s : \n",
552 node->dump_name (),
553 cs->callee->dump_name ());
554 if (!ipa_edge_args_info_available_for_edge_p (cs))
555 fprintf (f, " no arg info\n");
556 else
557 ipa_print_node_jump_functions_for_edge (f, cs);
560 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
562 class cgraph_indirect_call_info *ii;
564 ii = cs->indirect_info;
565 if (ii->agg_contents)
566 fprintf (f, " indirect %s callsite, calling param %i, "
567 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
568 ii->member_ptr ? "member ptr" : "aggregate",
569 ii->param_index, ii->offset,
570 ii->by_ref ? "by reference" : "by_value");
571 else
572 fprintf (f, " indirect %s callsite, calling param %i, "
573 "offset " HOST_WIDE_INT_PRINT_DEC,
574 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
575 ii->offset);
577 if (cs->call_stmt)
579 fprintf (f, ", for stmt ");
580 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
582 else
583 fprintf (f, "\n");
584 if (ii->polymorphic)
585 ii->context.dump (f);
586 if (!ipa_edge_args_info_available_for_edge_p (cs))
587 fprintf (f, " no arg info\n");
588 else
589 ipa_print_node_jump_functions_for_edge (f, cs);
593 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
595 void
596 ipa_print_all_jump_functions (FILE *f)
598 struct cgraph_node *node;
600 fprintf (f, "\nJump functions:\n");
601 FOR_EACH_FUNCTION (node)
603 ipa_print_node_jump_functions (f, node);
607 /* Set jfunc to be a know-really nothing jump function. */
609 static void
610 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
612 jfunc->type = IPA_JF_UNKNOWN;
615 /* Set JFUNC to be a copy of another jmp (to be used by jump function
616 combination code). The two functions will share their rdesc. */
618 static void
619 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
620 struct ipa_jump_func *src)
623 gcc_checking_assert (src->type == IPA_JF_CONST);
624 dst->type = IPA_JF_CONST;
625 dst->value.constant = src->value.constant;
628 /* Set JFUNC to be a constant jmp function. */
630 static void
631 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
632 struct cgraph_edge *cs)
634 jfunc->type = IPA_JF_CONST;
635 jfunc->value.constant.value = unshare_expr_without_location (constant);
637 if (TREE_CODE (constant) == ADDR_EXPR
638 && (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL
639 || (VAR_P (TREE_OPERAND (constant, 0))
640 && TREE_STATIC (TREE_OPERAND (constant, 0)))))
642 struct ipa_cst_ref_desc *rdesc;
644 rdesc = ipa_refdesc_pool.allocate ();
645 rdesc->cs = cs;
646 rdesc->next_duplicate = NULL;
647 rdesc->refcount = 1;
648 jfunc->value.constant.rdesc = rdesc;
650 else
651 jfunc->value.constant.rdesc = NULL;
654 /* Set JFUNC to be a simple pass-through jump function. */
655 static void
656 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
657 bool agg_preserved)
659 jfunc->type = IPA_JF_PASS_THROUGH;
660 jfunc->value.pass_through.operand = NULL_TREE;
661 jfunc->value.pass_through.formal_id = formal_id;
662 jfunc->value.pass_through.operation = NOP_EXPR;
663 jfunc->value.pass_through.agg_preserved = agg_preserved;
664 jfunc->value.pass_through.refdesc_decremented = false;
667 /* Set JFUNC to be an unary pass through jump function. */
669 static void
670 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
671 enum tree_code operation)
673 jfunc->type = IPA_JF_PASS_THROUGH;
674 jfunc->value.pass_through.operand = NULL_TREE;
675 jfunc->value.pass_through.formal_id = formal_id;
676 jfunc->value.pass_through.operation = operation;
677 jfunc->value.pass_through.agg_preserved = false;
678 jfunc->value.pass_through.refdesc_decremented = false;
680 /* Set JFUNC to be an arithmetic pass through jump function. */
682 static void
683 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
684 tree operand, enum tree_code operation)
686 jfunc->type = IPA_JF_PASS_THROUGH;
687 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
688 jfunc->value.pass_through.formal_id = formal_id;
689 jfunc->value.pass_through.operation = operation;
690 jfunc->value.pass_through.agg_preserved = false;
691 jfunc->value.pass_through.refdesc_decremented = false;
694 /* Set JFUNC to be an ancestor jump function. */
696 static void
697 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
698 int formal_id, bool agg_preserved, bool keep_null)
700 jfunc->type = IPA_JF_ANCESTOR;
701 jfunc->value.ancestor.formal_id = formal_id;
702 jfunc->value.ancestor.offset = offset;
703 jfunc->value.ancestor.agg_preserved = agg_preserved;
704 jfunc->value.ancestor.keep_null = keep_null;
707 /* Get IPA BB information about the given BB. FBI is the context of analyzis
708 of this function body. */
710 static struct ipa_bb_info *
711 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
713 gcc_checking_assert (fbi);
714 return &fbi->bb_infos[bb->index];
717 /* Structure to be passed in between detect_type_change and
718 check_stmt_for_type_change. */
720 struct prop_type_change_info
722 /* Offset into the object where there is the virtual method pointer we are
723 looking for. */
724 HOST_WIDE_INT offset;
725 /* The declaration or SSA_NAME pointer of the base that we are checking for
726 type change. */
727 tree object;
728 /* Set to true if dynamic type change has been detected. */
729 bool type_maybe_changed;
732 /* Return true if STMT can modify a virtual method table pointer.
734 This function makes special assumptions about both constructors and
735 destructors which are all the functions that are allowed to alter the VMT
736 pointers. It assumes that destructors begin with assignment into all VMT
737 pointers and that constructors essentially look in the following way:
739 1) The very first thing they do is that they call constructors of ancestor
740 sub-objects that have them.
742 2) Then VMT pointers of this and all its ancestors is set to new values
743 corresponding to the type corresponding to the constructor.
745 3) Only afterwards, other stuff such as constructor of member sub-objects
746 and the code written by the user is run. Only this may include calling
747 virtual functions, directly or indirectly.
749 There is no way to call a constructor of an ancestor sub-object in any
750 other way.
752 This means that we do not have to care whether constructors get the correct
753 type information because they will always change it (in fact, if we define
754 the type to be given by the VMT pointer, it is undefined).
756 The most important fact to derive from the above is that if, for some
757 statement in the section 3, we try to detect whether the dynamic type has
758 changed, we can safely ignore all calls as we examine the function body
759 backwards until we reach statements in section 2 because these calls cannot
760 be ancestor constructors or destructors (if the input is not bogus) and so
761 do not change the dynamic type (this holds true only for automatically
762 allocated objects but at the moment we devirtualize only these). We then
763 must detect that statements in section 2 change the dynamic type and can try
764 to derive the new type. That is enough and we can stop, we will never see
765 the calls into constructors of sub-objects in this code. Therefore we can
766 safely ignore all call statements that we traverse.
769 static bool
770 stmt_may_be_vtbl_ptr_store (gimple *stmt)
772 if (is_gimple_call (stmt))
773 return false;
774 if (gimple_clobber_p (stmt))
775 return false;
776 else if (is_gimple_assign (stmt))
778 tree lhs = gimple_assign_lhs (stmt);
780 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
782 if (flag_strict_aliasing
783 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
784 return false;
786 if (TREE_CODE (lhs) == COMPONENT_REF
787 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
788 return false;
789 /* In the future we might want to use get_ref_base_and_extent to find
790 if there is a field corresponding to the offset and if so, proceed
791 almost like if it was a component ref. */
794 return true;
797 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
798 to check whether a particular statement may modify the virtual table
799 pointerIt stores its result into DATA, which points to a
800 prop_type_change_info structure. */
802 static bool
803 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
805 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
806 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
808 if (stmt_may_be_vtbl_ptr_store (stmt))
810 tci->type_maybe_changed = true;
811 return true;
813 else
814 return false;
817 /* See if ARG is PARAM_DECl describing instance passed by pointer
818 or reference in FUNCTION. Return false if the dynamic type may change
819 in between beggining of the function until CALL is invoked.
821 Generally functions are not allowed to change type of such instances,
822 but they call destructors. We assume that methods cannot destroy the THIS
823 pointer. Also as a special cases, constructor and destructors may change
824 type of the THIS pointer. */
826 static bool
827 param_type_may_change_p (tree function, tree arg, gimple *call)
829 /* Pure functions cannot do any changes on the dynamic type;
830 that require writting to memory. */
831 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
832 return false;
833 /* We need to check if we are within inlined consturctor
834 or destructor (ideally we would have way to check that the
835 inline cdtor is actually working on ARG, but we don't have
836 easy tie on this, so punt on all non-pure cdtors.
837 We may also record the types of cdtors and once we know type
838 of the instance match them.
840 Also code unification optimizations may merge calls from
841 different blocks making return values unreliable. So
842 do nothing during late optimization. */
843 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
844 return true;
845 if (TREE_CODE (arg) == SSA_NAME
846 && SSA_NAME_IS_DEFAULT_DEF (arg)
847 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
849 /* Normal (non-THIS) argument. */
850 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
851 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
852 /* THIS pointer of an method - here we want to watch constructors
853 and destructors as those definitely may change the dynamic
854 type. */
855 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
856 && !DECL_CXX_CONSTRUCTOR_P (function)
857 && !DECL_CXX_DESTRUCTOR_P (function)
858 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
860 /* Walk the inline stack and watch out for ctors/dtors. */
861 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
862 block = BLOCK_SUPERCONTEXT (block))
863 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
864 return true;
865 return false;
868 return true;
871 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
872 callsite CALL) by looking for assignments to its virtual table pointer. If
873 it is, return true. ARG is the object itself (not a pointer
874 to it, unless dereferenced). BASE is the base of the memory access as
875 returned by get_ref_base_and_extent, as is the offset.
877 This is helper function for detect_type_change and detect_type_change_ssa
878 that does the heavy work which is usually unnecesary. */
880 static bool
881 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
882 tree base, tree comp_type, gcall *call,
883 HOST_WIDE_INT offset)
885 struct prop_type_change_info tci;
886 ao_ref ao;
888 gcc_checking_assert (DECL_P (arg)
889 || TREE_CODE (arg) == MEM_REF
890 || handled_component_p (arg));
892 comp_type = TYPE_MAIN_VARIANT (comp_type);
894 /* Const calls cannot call virtual methods through VMT and so type changes do
895 not matter. */
896 if (!flag_devirtualize || !gimple_vuse (call)
897 /* Be sure expected_type is polymorphic. */
898 || !comp_type
899 || TREE_CODE (comp_type) != RECORD_TYPE
900 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
901 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
902 return true;
904 if (fbi->aa_walk_budget == 0)
905 return false;
907 ao_ref_init (&ao, arg);
908 ao.base = base;
909 ao.offset = offset;
910 ao.size = POINTER_SIZE;
911 ao.max_size = ao.size;
913 tci.offset = offset;
914 tci.object = get_base_address (arg);
915 tci.type_maybe_changed = false;
917 int walked
918 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
919 &tci, NULL, NULL, fbi->aa_walk_budget);
920 if (walked >= 0)
921 fbi->aa_walk_budget -= walked;
922 else
923 fbi->aa_walk_budget = 0;
925 if (walked >= 0 && !tci.type_maybe_changed)
926 return false;
928 return true;
931 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
932 If it is, return true. ARG is the object itself (not a pointer
933 to it, unless dereferenced). BASE is the base of the memory access as
934 returned by get_ref_base_and_extent, as is the offset. */
936 static bool
937 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
938 tree comp_type, gcall *call,
939 HOST_WIDE_INT offset)
941 if (!flag_devirtualize)
942 return false;
944 if (TREE_CODE (base) == MEM_REF
945 && !param_type_may_change_p (current_function_decl,
946 TREE_OPERAND (base, 0),
947 call))
948 return false;
949 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
950 call, offset);
953 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
954 SSA name (its dereference will become the base and the offset is assumed to
955 be zero). */
957 static bool
958 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
959 gcall *call)
961 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
962 if (!flag_devirtualize
963 || !POINTER_TYPE_P (TREE_TYPE (arg)))
964 return false;
966 if (!param_type_may_change_p (current_function_decl, arg, call))
967 return false;
969 arg = build2 (MEM_REF, ptr_type_node, arg,
970 build_int_cst (ptr_type_node, 0));
972 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
973 call, 0);
976 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
977 boolean variable pointed to by DATA. */
979 static bool
980 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
981 void *data)
983 bool *b = (bool *) data;
984 *b = true;
985 return true;
988 /* Find the nearest valid aa status for parameter specified by INDEX that
989 dominates BB. */
991 static struct ipa_param_aa_status *
992 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
993 int index)
995 while (true)
997 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
998 if (!bb)
999 return NULL;
1000 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1001 if (!bi->param_aa_statuses.is_empty ()
1002 && bi->param_aa_statuses[index].valid)
1003 return &bi->param_aa_statuses[index];
1007 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
1008 structures and/or intialize the result with a dominating description as
1009 necessary. */
1011 static struct ipa_param_aa_status *
1012 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
1013 int index)
1015 gcc_checking_assert (fbi);
1016 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1017 if (bi->param_aa_statuses.is_empty ())
1018 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
1019 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
1020 if (!paa->valid)
1022 gcc_checking_assert (!paa->parm_modified
1023 && !paa->ref_modified
1024 && !paa->pt_modified);
1025 struct ipa_param_aa_status *dom_paa;
1026 dom_paa = find_dominating_aa_status (fbi, bb, index);
1027 if (dom_paa)
1028 *paa = *dom_paa;
1029 else
1030 paa->valid = true;
1033 return paa;
1036 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1037 a value known not to be modified in this function before reaching the
1038 statement STMT. FBI holds information about the function we have so far
1039 gathered but do not survive the summary building stage. */
1041 static bool
1042 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
1043 gimple *stmt, tree parm_load)
1045 struct ipa_param_aa_status *paa;
1046 bool modified = false;
1047 ao_ref refd;
1049 tree base = get_base_address (parm_load);
1050 gcc_assert (TREE_CODE (base) == PARM_DECL);
1051 if (TREE_READONLY (base))
1052 return true;
1054 gcc_checking_assert (fbi);
1055 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1056 if (paa->parm_modified || fbi->aa_walk_budget == 0)
1057 return false;
1059 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1060 ao_ref_init (&refd, parm_load);
1061 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1062 &modified, NULL, NULL,
1063 fbi->aa_walk_budget);
1064 if (walked < 0)
1066 modified = true;
1067 fbi->aa_walk_budget = 0;
1069 else
1070 fbi->aa_walk_budget -= walked;
1071 if (paa && modified)
1072 paa->parm_modified = true;
1073 return !modified;
1076 /* If STMT is an assignment that loads a value from an parameter declaration,
1077 return the index of the parameter in ipa_node_params which has not been
1078 modified. Otherwise return -1. */
1080 static int
1081 load_from_unmodified_param (struct ipa_func_body_info *fbi,
1082 vec<ipa_param_descriptor, va_gc> *descriptors,
1083 gimple *stmt)
1085 int index;
1086 tree op1;
1088 if (!gimple_assign_single_p (stmt))
1089 return -1;
1091 op1 = gimple_assign_rhs1 (stmt);
1092 if (TREE_CODE (op1) != PARM_DECL)
1093 return -1;
1095 index = ipa_get_param_decl_index_1 (descriptors, op1);
1096 if (index < 0
1097 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1098 return -1;
1100 return index;
1103 /* Return true if memory reference REF (which must be a load through parameter
1104 with INDEX) loads data that are known to be unmodified in this function
1105 before reaching statement STMT. */
1107 static bool
1108 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1109 int index, gimple *stmt, tree ref)
1111 struct ipa_param_aa_status *paa;
1112 bool modified = false;
1113 ao_ref refd;
1115 gcc_checking_assert (fbi);
1116 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1117 if (paa->ref_modified || fbi->aa_walk_budget == 0)
1118 return false;
1120 gcc_checking_assert (gimple_vuse (stmt));
1121 ao_ref_init (&refd, ref);
1122 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1123 &modified, NULL, NULL,
1124 fbi->aa_walk_budget);
1125 if (walked < 0)
1127 modified = true;
1128 fbi->aa_walk_budget = 0;
1130 else
1131 fbi->aa_walk_budget -= walked;
1132 if (modified)
1133 paa->ref_modified = true;
1134 return !modified;
1137 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1138 is known to be unmodified in this function before reaching call statement
1139 CALL into which it is passed. FBI describes the function body. */
1141 static bool
1142 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1143 gimple *call, tree parm)
1145 bool modified = false;
1146 ao_ref refd;
1148 /* It's unnecessary to calculate anything about memory contnets for a const
1149 function because it is not goin to use it. But do not cache the result
1150 either. Also, no such calculations for non-pointers. */
1151 if (!gimple_vuse (call)
1152 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1153 return false;
1155 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1156 gimple_bb (call),
1157 index);
1158 if (paa->pt_modified || fbi->aa_walk_budget == 0)
1159 return false;
1161 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1162 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1163 &modified, NULL, NULL,
1164 fbi->aa_walk_budget);
1165 if (walked < 0)
1167 fbi->aa_walk_budget = 0;
1168 modified = true;
1170 else
1171 fbi->aa_walk_budget -= walked;
1172 if (modified)
1173 paa->pt_modified = true;
1174 return !modified;
1177 /* Return true if we can prove that OP is a memory reference loading
1178 data from an aggregate passed as a parameter.
1180 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1181 false if it cannot prove that the value has not been modified before the
1182 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1183 if it cannot prove the value has not been modified, in that case it will
1184 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1186 INFO and PARMS_AINFO describe parameters of the current function (but the
1187 latter can be NULL), STMT is the load statement. If function returns true,
1188 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1189 within the aggregate and whether it is a load from a value passed by
1190 reference respectively.
1192 Return false if the offset divided by BITS_PER_UNIT would not fit into an
1193 unsigned int. */
1195 bool
1196 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1197 vec<ipa_param_descriptor, va_gc> *descriptors,
1198 gimple *stmt, tree op, int *index_p,
1199 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1200 bool *by_ref_p, bool *guaranteed_unmodified)
1202 int index;
1203 HOST_WIDE_INT size;
1204 bool reverse;
1205 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1207 if (!base
1208 || (*offset_p / BITS_PER_UNIT) > UINT_MAX)
1209 return false;
1211 /* We can not propagate across volatile loads. */
1212 if (TREE_THIS_VOLATILE (op))
1213 return false;
1215 if (DECL_P (base))
1217 int index = ipa_get_param_decl_index_1 (descriptors, base);
1218 if (index >= 0
1219 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1221 *index_p = index;
1222 *by_ref_p = false;
1223 if (size_p)
1224 *size_p = size;
1225 if (guaranteed_unmodified)
1226 *guaranteed_unmodified = true;
1227 return true;
1229 return false;
1232 if (TREE_CODE (base) != MEM_REF
1233 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1234 || !integer_zerop (TREE_OPERAND (base, 1)))
1235 return false;
1237 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1239 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1240 index = ipa_get_param_decl_index_1 (descriptors, parm);
1242 else
1244 /* This branch catches situations where a pointer parameter is not a
1245 gimple register, for example:
1247 void hip7(S*) (struct S * p)
1249 void (*<T2e4>) (struct S *) D.1867;
1250 struct S * p.1;
1252 <bb 2>:
1253 p.1_1 = p;
1254 D.1867_2 = p.1_1->f;
1255 D.1867_2 ();
1256 gdp = &p;
1259 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1260 index = load_from_unmodified_param (fbi, descriptors, def);
1263 if (index >= 0)
1265 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1266 if (!data_preserved && !guaranteed_unmodified)
1267 return false;
1269 *index_p = index;
1270 *by_ref_p = true;
1271 if (size_p)
1272 *size_p = size;
1273 if (guaranteed_unmodified)
1274 *guaranteed_unmodified = data_preserved;
1275 return true;
1277 return false;
1280 /* If STMT is an assignment that loads a value from a parameter declaration,
1281 or from an aggregate passed as the parameter either by value or reference,
1282 return the index of the parameter in ipa_node_params. Otherwise return -1.
1284 FBI holds gathered information about the function. INFO describes
1285 parameters of the function, STMT is the assignment statement. If it is a
1286 memory load from an aggregate, *OFFSET_P is filled with offset within the
1287 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1288 reference. */
1290 static int
1291 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1292 class ipa_node_params *info,
1293 gimple *stmt,
1294 HOST_WIDE_INT *offset_p,
1295 bool *by_ref_p)
1297 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1298 poly_int64 size;
1300 /* Load value from a parameter declaration. */
1301 if (index >= 0)
1303 *offset_p = -1;
1304 return index;
1307 if (!gimple_assign_load_p (stmt))
1308 return -1;
1310 tree rhs = gimple_assign_rhs1 (stmt);
1312 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1313 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1314 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1315 return -1;
1317 /* Skip memory reference containing bit-field. */
1318 if (TREE_CODE (rhs) == BIT_FIELD_REF
1319 || contains_bitfld_component_ref_p (rhs))
1320 return -1;
1322 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1323 offset_p, &size, by_ref_p))
1324 return -1;
1326 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1327 size));
1328 if (!*by_ref_p)
1330 tree param_type = ipa_get_type (info, index);
1332 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1333 return -1;
1335 else if (TREE_THIS_VOLATILE (rhs))
1336 return -1;
1338 return index;
1341 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1342 to find original pointer. Initialize RET to the pointer which results from
1343 the walk.
1344 If offset is known return true and initialize OFFSET_RET. */
1346 bool
1347 unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1349 poly_int64 offset = 0;
1350 bool offset_known = true;
1351 int i;
1353 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1355 if (TREE_CODE (op) == ADDR_EXPR)
1357 poly_int64 extra_offset = 0;
1358 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1359 &offset);
1360 if (!base)
1362 base = get_base_address (TREE_OPERAND (op, 0));
1363 if (TREE_CODE (base) != MEM_REF)
1364 break;
1365 offset_known = false;
1367 else
1369 if (TREE_CODE (base) != MEM_REF)
1370 break;
1371 offset += extra_offset;
1373 op = TREE_OPERAND (base, 0);
1374 if (mem_ref_offset (base).to_shwi (&extra_offset))
1375 offset += extra_offset;
1376 else
1377 offset_known = false;
1379 else if (TREE_CODE (op) == SSA_NAME
1380 && !SSA_NAME_IS_DEFAULT_DEF (op))
1382 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1384 if (gimple_assign_single_p (pstmt))
1385 op = gimple_assign_rhs1 (pstmt);
1386 else if (is_gimple_assign (pstmt)
1387 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1389 poly_int64 extra_offset = 0;
1390 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1391 &extra_offset))
1392 offset += extra_offset;
1393 else
1394 offset_known = false;
1395 op = gimple_assign_rhs1 (pstmt);
1397 else
1398 break;
1400 else
1401 break;
1403 *ret = op;
1404 *offset_ret = offset;
1405 return offset_known;
1408 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1409 of an assignment statement STMT, try to determine whether we are actually
1410 handling any of the following cases and construct an appropriate jump
1411 function into JFUNC if so:
1413 1) The passed value is loaded from a formal parameter which is not a gimple
1414 register (most probably because it is addressable, the value has to be
1415 scalar) and we can guarantee the value has not changed. This case can
1416 therefore be described by a simple pass-through jump function. For example:
1418 foo (int a)
1420 int a.0;
1422 a.0_2 = a;
1423 bar (a.0_2);
1425 2) The passed value can be described by a simple arithmetic pass-through
1426 jump function. E.g.
1428 foo (int a)
1430 int D.2064;
1432 D.2064_4 = a.1(D) + 4;
1433 bar (D.2064_4);
1435 This case can also occur in combination of the previous one, e.g.:
1437 foo (int a, int z)
1439 int a.0;
1440 int D.2064;
1442 a.0_3 = a;
1443 D.2064_4 = a.0_3 + 4;
1444 foo (D.2064_4);
1446 3) The passed value is an address of an object within another one (which
1447 also passed by reference). Such situations are described by an ancestor
1448 jump function and describe situations such as:
1450 B::foo() (struct B * const this)
1452 struct A * D.1845;
1454 D.1845_2 = &this_1(D)->D.1748;
1455 A::bar (D.1845_2);
1457 INFO is the structure describing individual parameters access different
1458 stages of IPA optimizations. PARMS_AINFO contains the information that is
1459 only needed for intraprocedural analysis. */
1461 static void
1462 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1463 class ipa_node_params *info,
1464 struct ipa_jump_func *jfunc,
1465 gcall *call, gimple *stmt, tree name,
1466 tree param_type)
1468 HOST_WIDE_INT offset, size;
1469 tree op1, tc_ssa, base, ssa;
1470 bool reverse;
1471 int index;
1473 op1 = gimple_assign_rhs1 (stmt);
1475 if (TREE_CODE (op1) == SSA_NAME)
1477 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1478 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1479 else
1480 index = load_from_unmodified_param (fbi, info->descriptors,
1481 SSA_NAME_DEF_STMT (op1));
1482 tc_ssa = op1;
1484 else
1486 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1487 tc_ssa = gimple_assign_lhs (stmt);
1490 if (index >= 0)
1492 switch (gimple_assign_rhs_class (stmt))
1494 case GIMPLE_BINARY_RHS:
1496 tree op2 = gimple_assign_rhs2 (stmt);
1497 if (!is_gimple_ip_invariant (op2)
1498 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1499 != tcc_comparison)
1500 && !useless_type_conversion_p (TREE_TYPE (name),
1501 TREE_TYPE (op1))))
1502 return;
1504 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1505 gimple_assign_rhs_code (stmt));
1506 break;
1508 case GIMPLE_SINGLE_RHS:
1510 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1511 tc_ssa);
1512 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1513 break;
1515 case GIMPLE_UNARY_RHS:
1516 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1517 ipa_set_jf_unary_pass_through (jfunc, index,
1518 gimple_assign_rhs_code (stmt));
1519 default:;
1521 return;
1524 if (TREE_CODE (op1) != ADDR_EXPR)
1525 return;
1526 op1 = TREE_OPERAND (op1, 0);
1527 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1528 offset_int mem_offset;
1529 if (!base
1530 || TREE_CODE (base) != MEM_REF
1531 || !mem_ref_offset (base).is_constant (&mem_offset))
1532 return;
1533 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1534 ssa = TREE_OPERAND (base, 0);
1535 if (TREE_CODE (ssa) != SSA_NAME
1536 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1537 || offset < 0)
1538 return;
1540 /* Dynamic types are changed in constructors and destructors. */
1541 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1542 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1543 ipa_set_ancestor_jf (jfunc, offset, index,
1544 parm_ref_data_pass_through_p (fbi, index, call, ssa),
1545 false);
1548 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1549 it looks like:
1551 iftmp.1_3 = &obj_2(D)->D.1762;
1553 The base of the MEM_REF must be a default definition SSA NAME of a
1554 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1555 whole MEM_REF expression is returned and the offset calculated from any
1556 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1557 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1559 static tree
1560 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1562 HOST_WIDE_INT size;
1563 tree expr, parm, obj;
1564 bool reverse;
1566 if (!gimple_assign_single_p (assign))
1567 return NULL_TREE;
1568 expr = gimple_assign_rhs1 (assign);
1570 if (TREE_CODE (expr) != ADDR_EXPR)
1571 return NULL_TREE;
1572 expr = TREE_OPERAND (expr, 0);
1573 obj = expr;
1574 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1576 offset_int mem_offset;
1577 if (!expr
1578 || TREE_CODE (expr) != MEM_REF
1579 || !mem_ref_offset (expr).is_constant (&mem_offset))
1580 return NULL_TREE;
1581 parm = TREE_OPERAND (expr, 0);
1582 if (TREE_CODE (parm) != SSA_NAME
1583 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1584 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1585 return NULL_TREE;
1587 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1588 *obj_p = obj;
1589 return expr;
1593 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1594 statement PHI, try to find out whether NAME is in fact a
1595 multiple-inheritance typecast from a descendant into an ancestor of a formal
1596 parameter and thus can be described by an ancestor jump function and if so,
1597 write the appropriate function into JFUNC.
1599 Essentially we want to match the following pattern:
1601 if (obj_2(D) != 0B)
1602 goto <bb 3>;
1603 else
1604 goto <bb 4>;
1606 <bb 3>:
1607 iftmp.1_3 = &obj_2(D)->D.1762;
1609 <bb 4>:
1610 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1611 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1612 return D.1879_6; */
1614 static void
1615 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1616 class ipa_node_params *info,
1617 struct ipa_jump_func *jfunc,
1618 gcall *call, gphi *phi)
1620 HOST_WIDE_INT offset;
1621 gimple *assign;
1622 basic_block phi_bb, assign_bb, cond_bb;
1623 tree tmp, parm, expr, obj;
1624 int index, i;
1626 if (gimple_phi_num_args (phi) != 2)
1627 return;
1629 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1630 tmp = PHI_ARG_DEF (phi, 0);
1631 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1632 tmp = PHI_ARG_DEF (phi, 1);
1633 else
1634 return;
1635 if (TREE_CODE (tmp) != SSA_NAME
1636 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1637 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1638 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1639 return;
1641 assign = SSA_NAME_DEF_STMT (tmp);
1642 assign_bb = gimple_bb (assign);
1643 if (!single_pred_p (assign_bb))
1644 return;
1645 expr = get_ancestor_addr_info (assign, &obj, &offset);
1646 if (!expr)
1647 return;
1648 parm = TREE_OPERAND (expr, 0);
1649 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1650 if (index < 0)
1651 return;
1653 cond_bb = single_pred (assign_bb);
1654 gcond *cond = safe_dyn_cast <gcond *> (*gsi_last_bb (cond_bb));
1655 if (!cond
1656 || gimple_cond_code (cond) != NE_EXPR
1657 || gimple_cond_lhs (cond) != parm
1658 || !integer_zerop (gimple_cond_rhs (cond)))
1659 return;
1661 phi_bb = gimple_bb (phi);
1662 for (i = 0; i < 2; i++)
1664 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1665 if (pred != assign_bb && pred != cond_bb)
1666 return;
1669 ipa_set_ancestor_jf (jfunc, offset, index,
1670 parm_ref_data_pass_through_p (fbi, index, call, parm),
1671 true);
1674 /* Inspect the given TYPE and return true iff it has the same structure (the
1675 same number of fields of the same types) as a C++ member pointer. If
1676 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1677 corresponding fields there. */
1679 static bool
1680 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1682 tree fld;
1684 if (TREE_CODE (type) != RECORD_TYPE)
1685 return false;
1687 fld = TYPE_FIELDS (type);
1688 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1689 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1690 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1691 return false;
1693 if (method_ptr)
1694 *method_ptr = fld;
1696 fld = DECL_CHAIN (fld);
1697 if (!fld || INTEGRAL_TYPE_P (fld)
1698 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1699 return false;
1700 if (delta)
1701 *delta = fld;
1703 if (DECL_CHAIN (fld))
1704 return false;
1706 return true;
1709 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1710 return the rhs of its defining statement, and this statement is stored in
1711 *RHS_STMT. Otherwise return RHS as it is. */
1713 static inline tree
1714 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1716 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1718 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1720 if (gimple_assign_single_p (def_stmt))
1721 rhs = gimple_assign_rhs1 (def_stmt);
1722 else
1723 break;
1724 *rhs_stmt = def_stmt;
1726 return rhs;
1729 /* Simple linked list, describing contents of an aggregate before call. */
1731 struct ipa_known_agg_contents_list
1733 /* Offset and size of the described part of the aggregate. */
1734 HOST_WIDE_INT offset, size;
1736 /* Type of the described part of the aggregate. */
1737 tree type;
1739 /* Known constant value or jump function data describing contents. */
1740 struct ipa_load_agg_data value;
1742 /* Pointer to the next structure in the list. */
1743 struct ipa_known_agg_contents_list *next;
1746 /* Add an aggregate content item into a linked list of
1747 ipa_known_agg_contents_list structure, in which all elements
1748 are sorted ascendingly by offset. */
1750 static inline void
1751 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1752 struct ipa_known_agg_contents_list *item)
1754 struct ipa_known_agg_contents_list *list = *plist;
1756 for (; list; list = list->next)
1758 if (list->offset >= item->offset)
1759 break;
1761 plist = &list->next;
1764 item->next = list;
1765 *plist = item;
1768 /* Check whether a given aggregate content is clobbered by certain element in
1769 a linked list of ipa_known_agg_contents_list. */
1771 static inline bool
1772 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1773 struct ipa_known_agg_contents_list *item)
1775 for (; list; list = list->next)
1777 if (list->offset >= item->offset)
1778 return list->offset < item->offset + item->size;
1780 if (list->offset + list->size > item->offset)
1781 return true;
1784 return false;
1787 /* Build aggregate jump function from LIST, assuming there are exactly
1788 VALUE_COUNT entries there and that offset of the passed argument
1789 is ARG_OFFSET and store it into JFUNC. */
1791 static void
1792 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1793 int value_count, HOST_WIDE_INT arg_offset,
1794 struct ipa_jump_func *jfunc)
1796 vec_safe_reserve (jfunc->agg.items, value_count, true);
1797 for (; list; list = list->next)
1799 struct ipa_agg_jf_item item;
1800 tree operand = list->value.pass_through.operand;
1802 if (list->value.pass_through.formal_id >= 0)
1804 /* Content value is derived from some formal paramerter. */
1805 if (list->value.offset >= 0)
1806 item.jftype = IPA_JF_LOAD_AGG;
1807 else
1808 item.jftype = IPA_JF_PASS_THROUGH;
1810 item.value.load_agg = list->value;
1811 if (operand)
1812 item.value.pass_through.operand
1813 = unshare_expr_without_location (operand);
1815 else if (operand)
1817 /* Content value is known constant. */
1818 item.jftype = IPA_JF_CONST;
1819 item.value.constant = unshare_expr_without_location (operand);
1821 else
1822 continue;
1824 item.type = list->type;
1825 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1827 item.offset = list->offset - arg_offset;
1828 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1830 jfunc->agg.items->quick_push (item);
1834 /* Given an assignment statement STMT, try to collect information into
1835 AGG_VALUE that will be used to construct jump function for RHS of the
1836 assignment, from which content value of an aggregate part comes.
1838 Besides constant and simple pass-through jump functions, also try to
1839 identify whether it matches the following pattern that can be described by
1840 a load-value-from-aggregate jump function, which is a derivative of simple
1841 pass-through jump function.
1843 foo (int *p)
1847 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1848 bar (q_5);
1851 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1852 constant, simple pass-through and load-vale-from-aggregate. If value
1853 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1854 set to -1. For simple pass-through and load-value-from-aggregate, field
1855 FORMAL_ID specifies the related formal parameter index, and field
1856 OFFSET can be used to distinguish them, -1 means simple pass-through,
1857 otherwise means load-value-from-aggregate. */
1859 static void
1860 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1861 struct ipa_load_agg_data *agg_value,
1862 gimple *stmt)
1864 tree lhs = gimple_assign_lhs (stmt);
1865 tree rhs1 = gimple_assign_rhs1 (stmt);
1866 enum tree_code code;
1867 int index = -1;
1869 /* Initialize jump function data for the aggregate part. */
1870 memset (agg_value, 0, sizeof (*agg_value));
1871 agg_value->pass_through.operation = NOP_EXPR;
1872 agg_value->pass_through.formal_id = -1;
1873 agg_value->offset = -1;
1875 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1876 || TREE_THIS_VOLATILE (lhs)
1877 || TREE_CODE (lhs) == BIT_FIELD_REF
1878 || contains_bitfld_component_ref_p (lhs))
1879 return;
1881 /* Skip SSA copies. */
1882 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1884 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1885 break;
1887 stmt = SSA_NAME_DEF_STMT (rhs1);
1888 if (!is_gimple_assign (stmt))
1889 break;
1891 rhs1 = gimple_assign_rhs1 (stmt);
1894 if (gphi *phi = dyn_cast<gphi *> (stmt))
1896 /* Also special case like the following (a is a formal parameter):
1898 _12 = *a_11(D).dim[0].stride;
1900 # iftmp.22_9 = PHI <_12(2), 1(3)>
1902 parm.6.dim[0].stride = iftmp.22_9;
1904 __x_MOD_foo (&parm.6, b_31(D));
1906 The aggregate function describing parm.6.dim[0].stride is encoded as a
1907 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1908 (the constant from the PHI node). */
1910 if (gimple_phi_num_args (phi) != 2)
1911 return;
1912 tree arg0 = gimple_phi_arg_def (phi, 0);
1913 tree arg1 = gimple_phi_arg_def (phi, 1);
1914 tree operand;
1916 if (is_gimple_ip_invariant (arg1))
1918 operand = arg1;
1919 rhs1 = arg0;
1921 else if (is_gimple_ip_invariant (arg0))
1923 operand = arg0;
1924 rhs1 = arg1;
1926 else
1927 return;
1929 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1930 if (!is_gimple_assign (stmt))
1931 return;
1933 code = ASSERT_EXPR;
1934 agg_value->pass_through.operand = operand;
1936 else if (is_gimple_assign (stmt))
1938 code = gimple_assign_rhs_code (stmt);
1939 switch (gimple_assign_rhs_class (stmt))
1941 case GIMPLE_SINGLE_RHS:
1942 if (is_gimple_ip_invariant (rhs1))
1944 agg_value->pass_through.operand = rhs1;
1945 return;
1947 code = NOP_EXPR;
1948 break;
1950 case GIMPLE_UNARY_RHS:
1951 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1952 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1953 tcc_binary, this subtleness is somewhat misleading.
1955 Since tcc_unary is widely used in IPA-CP code to check an operation
1956 with one operand, here we only allow tc_unary operation to avoid
1957 possible problem. Then we can use (opclass == tc_unary) or not to
1958 distinguish unary and binary. */
1959 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1960 return;
1962 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1963 break;
1965 case GIMPLE_BINARY_RHS:
1967 gimple *rhs1_stmt = stmt;
1968 gimple *rhs2_stmt = stmt;
1969 tree rhs2 = gimple_assign_rhs2 (stmt);
1971 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1972 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1974 if (is_gimple_ip_invariant (rhs2))
1976 agg_value->pass_through.operand = rhs2;
1977 stmt = rhs1_stmt;
1979 else if (is_gimple_ip_invariant (rhs1))
1981 if (TREE_CODE_CLASS (code) == tcc_comparison)
1982 code = swap_tree_comparison (code);
1983 else if (!commutative_tree_code (code))
1984 return;
1986 agg_value->pass_through.operand = rhs1;
1987 stmt = rhs2_stmt;
1988 rhs1 = rhs2;
1990 else
1991 return;
1993 if (TREE_CODE_CLASS (code) != tcc_comparison
1994 && !useless_type_conversion_p (TREE_TYPE (lhs),
1995 TREE_TYPE (rhs1)))
1996 return;
1998 break;
2000 default:
2001 return;
2004 else
2005 return;
2007 if (TREE_CODE (rhs1) != SSA_NAME)
2008 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
2009 &agg_value->offset,
2010 &agg_value->by_ref);
2011 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
2012 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
2014 if (index >= 0)
2016 if (agg_value->offset >= 0)
2017 agg_value->type = TREE_TYPE (rhs1);
2018 agg_value->pass_through.formal_id = index;
2019 agg_value->pass_through.operation = code;
2021 else
2022 agg_value->pass_through.operand = NULL_TREE;
2025 /* If STMT is a memory store to the object whose address is BASE, extract
2026 information (offset, size, and value) into CONTENT, and return true,
2027 otherwise we conservatively assume the whole object is modified with
2028 unknown content, and return false. CHECK_REF means that access to object
2029 is expected to be in form of MEM_REF expression. */
2031 static bool
2032 extract_mem_content (struct ipa_func_body_info *fbi,
2033 gimple *stmt, tree base, bool check_ref,
2034 struct ipa_known_agg_contents_list *content)
2036 HOST_WIDE_INT lhs_offset, lhs_size;
2037 bool reverse;
2039 if (!is_gimple_assign (stmt))
2040 return false;
2042 tree lhs = gimple_assign_lhs (stmt);
2043 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
2044 &reverse);
2045 if (!lhs_base)
2046 return false;
2048 if (check_ref)
2050 if (TREE_CODE (lhs_base) != MEM_REF
2051 || TREE_OPERAND (lhs_base, 0) != base
2052 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
2053 return false;
2055 else if (lhs_base != base)
2056 return false;
2058 content->offset = lhs_offset;
2059 content->size = lhs_size;
2060 content->type = TREE_TYPE (lhs);
2061 content->next = NULL;
2063 analyze_agg_content_value (fbi, &content->value, stmt);
2064 return true;
2067 /* Traverse statements from CALL backwards, scanning whether an aggregate given
2068 in ARG is filled in constants or values that are derived from caller's
2069 formal parameter in the way described by some kinds of jump functions. FBI
2070 is the context of the caller function for interprocedural analysis. ARG can
2071 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
2072 the type of the aggregate, JFUNC is the jump function for the aggregate. */
2074 static void
2075 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
2076 gcall *call, tree arg,
2077 tree arg_type,
2078 struct ipa_jump_func *jfunc)
2080 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
2081 bitmap visited = NULL;
2082 int item_count = 0, value_count = 0;
2083 HOST_WIDE_INT arg_offset, arg_size;
2084 tree arg_base;
2085 bool check_ref, by_ref;
2086 ao_ref r;
2087 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
2089 if (max_agg_items == 0)
2090 return;
2092 /* The function operates in three stages. First, we prepare check_ref, r,
2093 arg_base and arg_offset based on what is actually passed as an actual
2094 argument. */
2096 if (POINTER_TYPE_P (arg_type))
2098 by_ref = true;
2099 if (TREE_CODE (arg) == SSA_NAME)
2101 tree type_size;
2102 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
2103 || !POINTER_TYPE_P (TREE_TYPE (arg)))
2104 return;
2105 check_ref = true;
2106 arg_base = arg;
2107 arg_offset = 0;
2108 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
2109 arg_size = tree_to_uhwi (type_size);
2110 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2112 else if (TREE_CODE (arg) == ADDR_EXPR)
2114 bool reverse;
2116 arg = TREE_OPERAND (arg, 0);
2117 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2118 &arg_size, &reverse);
2119 if (!arg_base)
2120 return;
2121 if (DECL_P (arg_base))
2123 check_ref = false;
2124 ao_ref_init (&r, arg_base);
2126 else
2127 return;
2129 else
2130 return;
2132 else
2134 bool reverse;
2136 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2138 by_ref = false;
2139 check_ref = false;
2140 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2141 &arg_size, &reverse);
2142 if (!arg_base)
2143 return;
2145 ao_ref_init (&r, arg);
2148 /* Second stage traverses virtual SSA web backwards starting from the call
2149 statement, only looks at individual dominating virtual operand (its
2150 definition dominates the call), as long as it is confident that content
2151 of the aggregate is affected by definition of the virtual operand, it
2152 builds a sorted linked list of ipa_agg_jf_list describing that. */
2154 for (tree dom_vuse = gimple_vuse (call);
2155 dom_vuse && fbi->aa_walk_budget > 0;)
2157 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
2159 if (gimple_code (stmt) == GIMPLE_PHI)
2161 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2162 fbi->aa_walk_budget,
2163 &visited, false, NULL, NULL);
2164 continue;
2167 fbi->aa_walk_budget--;
2168 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2170 struct ipa_known_agg_contents_list *content
2171 = XALLOCA (struct ipa_known_agg_contents_list);
2173 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2174 break;
2176 /* Now we get a dominating virtual operand, and need to check
2177 whether its value is clobbered any other dominating one. */
2178 if ((content->value.pass_through.formal_id >= 0
2179 || content->value.pass_through.operand)
2180 && !clobber_by_agg_contents_list_p (all_list, content)
2181 /* Since IPA-CP stores results with unsigned int offsets, we can
2182 discard those which would not fit now before we stream them to
2183 WPA. */
2184 && (content->offset + content->size - arg_offset
2185 <= (HOST_WIDE_INT) UINT_MAX * BITS_PER_UNIT))
2187 struct ipa_known_agg_contents_list *copy
2188 = XALLOCA (struct ipa_known_agg_contents_list);
2190 /* Add to the list consisting of only dominating virtual
2191 operands, whose definitions can finally reach the call. */
2192 add_to_agg_contents_list (&list, (*copy = *content, copy));
2194 if (++value_count == max_agg_items)
2195 break;
2198 /* Add to the list consisting of all dominating virtual operands. */
2199 add_to_agg_contents_list (&all_list, content);
2201 if (++item_count == 2 * max_agg_items)
2202 break;
2204 dom_vuse = gimple_vuse (stmt);
2207 if (visited)
2208 BITMAP_FREE (visited);
2210 /* Third stage just goes over the list and creates an appropriate vector of
2211 ipa_agg_jf_item structures out of it, of course only if there are
2212 any meaningful items to begin with. */
2214 if (value_count)
2216 jfunc->agg.by_ref = by_ref;
2217 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2222 /* Return the Ith param type of callee associated with call graph
2223 edge E. */
2225 tree
2226 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2228 int n;
2229 tree type = (e->callee
2230 ? TREE_TYPE (e->callee->decl)
2231 : gimple_call_fntype (e->call_stmt));
2232 tree t = TYPE_ARG_TYPES (type);
2234 for (n = 0; n < i; n++)
2236 if (!t)
2237 break;
2238 t = TREE_CHAIN (t);
2240 if (t && t != void_list_node)
2241 return TREE_VALUE (t);
2242 if (!e->callee)
2243 return NULL;
2244 t = DECL_ARGUMENTS (e->callee->decl);
2245 for (n = 0; n < i; n++)
2247 if (!t)
2248 return NULL;
2249 t = TREE_CHAIN (t);
2251 if (t)
2252 return TREE_TYPE (t);
2253 return NULL;
2256 /* Return a pointer to an ipa_vr just like TMP, but either find it in
2257 ipa_vr_hash_table or allocate it in GC memory. */
2259 static ipa_vr *
2260 ipa_get_value_range (const vrange &tmp)
2262 inchash::hash hstate;
2263 inchash::add_vrange (tmp, hstate);
2264 hashval_t hash = hstate.end ();
2265 ipa_vr **slot = ipa_vr_hash_table->find_slot_with_hash (&tmp, hash, INSERT);
2266 if (*slot)
2267 return *slot;
2269 ipa_vr *vr = new (ggc_alloc<ipa_vr> ()) ipa_vr (tmp);
2270 *slot = vr;
2271 return vr;
2274 /* Assign to JF a pointer to a range just like TMP but either fetch a
2275 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2277 static void
2278 ipa_set_jfunc_vr (ipa_jump_func *jf, const vrange &tmp)
2280 jf->m_vr = ipa_get_value_range (tmp);
2283 static void
2284 ipa_set_jfunc_vr (ipa_jump_func *jf, const ipa_vr &vr)
2286 Value_Range tmp;
2287 vr.get_vrange (tmp);
2288 ipa_set_jfunc_vr (jf, tmp);
2291 /* Compute jump function for all arguments of callsite CS and insert the
2292 information in the jump_functions array in the ipa_edge_args corresponding
2293 to this callsite. */
2295 static void
2296 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2297 struct cgraph_edge *cs)
2299 ipa_node_params *info = ipa_node_params_sum->get (cs->caller);
2300 ipa_edge_args *args = ipa_edge_args_sum->get_create (cs);
2301 gcall *call = cs->call_stmt;
2302 int n, arg_num = gimple_call_num_args (call);
2303 bool useful_context = false;
2305 if (arg_num == 0 || args->jump_functions)
2306 return;
2307 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2308 if (flag_devirtualize)
2309 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2311 if (gimple_call_internal_p (call))
2312 return;
2313 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2314 return;
2316 for (n = 0; n < arg_num; n++)
2318 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2319 tree arg = gimple_call_arg (call, n);
2320 tree param_type = ipa_get_callee_param_type (cs, n);
2321 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2323 tree instance;
2324 class ipa_polymorphic_call_context context (cs->caller->decl,
2325 arg, cs->call_stmt,
2326 &instance);
2327 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2328 &fbi->aa_walk_budget);
2329 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2330 if (!context.useless_p ())
2331 useful_context = true;
2334 Value_Range vr (TREE_TYPE (arg));
2335 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2337 bool addr_nonzero = false;
2338 bool strict_overflow = false;
2340 if (TREE_CODE (arg) == SSA_NAME
2341 && param_type
2342 && get_range_query (cfun)->range_of_expr (vr, arg, cs->call_stmt)
2343 && vr.nonzero_p ())
2344 addr_nonzero = true;
2345 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2346 addr_nonzero = true;
2348 if (addr_nonzero)
2349 vr.set_nonzero (TREE_TYPE (arg));
2351 unsigned HOST_WIDE_INT bitpos;
2352 unsigned align, prec = TYPE_PRECISION (TREE_TYPE (arg));
2354 get_pointer_alignment_1 (arg, &align, &bitpos);
2356 if (align > BITS_PER_UNIT
2357 && opt_for_fn (cs->caller->decl, flag_ipa_bit_cp))
2359 wide_int mask
2360 = wi::bit_and_not (wi::mask (prec, false, prec),
2361 wide_int::from (align / BITS_PER_UNIT - 1,
2362 prec, UNSIGNED));
2363 wide_int value = wide_int::from (bitpos / BITS_PER_UNIT, prec,
2364 UNSIGNED);
2365 irange_bitmask bm (value, mask);
2366 if (!addr_nonzero)
2367 vr.set_varying (TREE_TYPE (arg));
2368 irange &r = as_a <irange> (vr);
2369 r.update_bitmask (bm);
2370 ipa_set_jfunc_vr (jfunc, vr);
2372 else if (addr_nonzero)
2373 ipa_set_jfunc_vr (jfunc, vr);
2374 else
2375 gcc_assert (!jfunc->m_vr);
2377 else
2379 if (param_type
2380 && Value_Range::supports_type_p (TREE_TYPE (arg))
2381 && Value_Range::supports_type_p (param_type)
2382 && irange::supports_p (TREE_TYPE (arg))
2383 && irange::supports_p (param_type)
2384 && get_range_query (cfun)->range_of_expr (vr, arg, cs->call_stmt)
2385 && !vr.undefined_p ())
2387 Value_Range resvr (vr);
2388 range_cast (resvr, param_type);
2389 if (!resvr.undefined_p () && !resvr.varying_p ())
2390 ipa_set_jfunc_vr (jfunc, resvr);
2391 else
2392 gcc_assert (!jfunc->m_vr);
2394 else
2395 gcc_assert (!jfunc->m_vr);
2398 if (is_gimple_ip_invariant (arg)
2399 || (VAR_P (arg)
2400 && is_global_var (arg)
2401 && TREE_READONLY (arg)))
2402 ipa_set_jf_constant (jfunc, arg, cs);
2403 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2404 && TREE_CODE (arg) == PARM_DECL)
2406 int index = ipa_get_param_decl_index (info, arg);
2408 gcc_assert (index >=0);
2409 /* Aggregate passed by value, check for pass-through, otherwise we
2410 will attempt to fill in aggregate contents later in this
2411 for cycle. */
2412 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2414 ipa_set_jf_simple_pass_through (jfunc, index, false);
2415 continue;
2418 else if (TREE_CODE (arg) == SSA_NAME)
2420 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2422 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2423 if (index >= 0)
2425 bool agg_p;
2426 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2427 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2430 else
2432 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2433 if (is_gimple_assign (stmt))
2434 compute_complex_assign_jump_func (fbi, info, jfunc,
2435 call, stmt, arg, param_type);
2436 else if (gimple_code (stmt) == GIMPLE_PHI)
2437 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2438 call,
2439 as_a <gphi *> (stmt));
2443 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2444 passed (because type conversions are ignored in gimple). Usually we can
2445 safely get type from function declaration, but in case of K&R prototypes or
2446 variadic functions we can try our luck with type of the pointer passed.
2447 TODO: Since we look for actual initialization of the memory object, we may better
2448 work out the type based on the memory stores we find. */
2449 if (!param_type)
2450 param_type = TREE_TYPE (arg);
2452 if ((jfunc->type != IPA_JF_PASS_THROUGH
2453 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2454 && (jfunc->type != IPA_JF_ANCESTOR
2455 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2456 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2457 || POINTER_TYPE_P (param_type)))
2458 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2460 if (!useful_context)
2461 vec_free (args->polymorphic_call_contexts);
2464 /* Compute jump functions for all edges - both direct and indirect - outgoing
2465 from BB. */
2467 static void
2468 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2470 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2471 int i;
2472 struct cgraph_edge *cs;
2474 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2476 struct cgraph_node *callee = cs->callee;
2478 if (callee)
2480 callee = callee->ultimate_alias_target ();
2481 /* We do not need to bother analyzing calls to unknown functions
2482 unless they may become known during lto/whopr. */
2483 if (!callee->definition && !flag_lto
2484 && !gimple_call_fnspec (cs->call_stmt).known_p ())
2485 continue;
2487 ipa_compute_jump_functions_for_edge (fbi, cs);
2491 /* If STMT looks like a statement loading a value from a member pointer formal
2492 parameter, return that parameter and store the offset of the field to
2493 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2494 might be clobbered). If USE_DELTA, then we look for a use of the delta
2495 field rather than the pfn. */
2497 static tree
2498 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2499 HOST_WIDE_INT *offset_p)
2501 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2503 if (!gimple_assign_single_p (stmt))
2504 return NULL_TREE;
2506 rhs = gimple_assign_rhs1 (stmt);
2507 if (TREE_CODE (rhs) == COMPONENT_REF)
2509 ref_field = TREE_OPERAND (rhs, 1);
2510 rhs = TREE_OPERAND (rhs, 0);
2512 else
2513 ref_field = NULL_TREE;
2514 if (TREE_CODE (rhs) != MEM_REF)
2515 return NULL_TREE;
2516 rec = TREE_OPERAND (rhs, 0);
2517 if (TREE_CODE (rec) != ADDR_EXPR)
2518 return NULL_TREE;
2519 rec = TREE_OPERAND (rec, 0);
2520 if (TREE_CODE (rec) != PARM_DECL
2521 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2522 return NULL_TREE;
2523 ref_offset = TREE_OPERAND (rhs, 1);
2525 if (use_delta)
2526 fld = delta_field;
2527 else
2528 fld = ptr_field;
2529 if (offset_p)
2530 *offset_p = int_bit_position (fld);
2532 if (ref_field)
2534 if (integer_nonzerop (ref_offset))
2535 return NULL_TREE;
2536 return ref_field == fld ? rec : NULL_TREE;
2538 else
2539 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2540 : NULL_TREE;
2543 /* Returns true iff T is an SSA_NAME defined by a statement. */
2545 static bool
2546 ipa_is_ssa_with_stmt_def (tree t)
2548 if (TREE_CODE (t) == SSA_NAME
2549 && !SSA_NAME_IS_DEFAULT_DEF (t))
2550 return true;
2551 else
2552 return false;
2555 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2556 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2557 indirect call graph edge.
2558 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2560 static struct cgraph_edge *
2561 ipa_note_param_call (struct cgraph_node *node, int param_index,
2562 gcall *stmt, bool polymorphic)
2564 struct cgraph_edge *cs;
2566 cs = node->get_edge (stmt);
2567 cs->indirect_info->param_index = param_index;
2568 cs->indirect_info->agg_contents = 0;
2569 cs->indirect_info->member_ptr = 0;
2570 cs->indirect_info->guaranteed_unmodified = 0;
2571 ipa_node_params *info = ipa_node_params_sum->get (node);
2572 ipa_set_param_used_by_indirect_call (info, param_index, true);
2573 if (cs->indirect_info->polymorphic || polymorphic)
2574 ipa_set_param_used_by_polymorphic_call (info, param_index, true);
2575 return cs;
2578 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2579 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2580 intermediate information about each formal parameter. Currently it checks
2581 whether the call calls a pointer that is a formal parameter and if so, the
2582 parameter is marked with the called flag and an indirect call graph edge
2583 describing the call is created. This is very simple for ordinary pointers
2584 represented in SSA but not-so-nice when it comes to member pointers. The
2585 ugly part of this function does nothing more than trying to match the
2586 pattern of such a call. An example of such a pattern is the gimple dump
2587 below, the call is on the last line:
2589 <bb 2>:
2590 f$__delta_5 = f.__delta;
2591 f$__pfn_24 = f.__pfn;
2594 <bb 2>:
2595 f$__delta_5 = MEM[(struct *)&f];
2596 f$__pfn_24 = MEM[(struct *)&f + 4B];
2598 and a few lines below:
2600 <bb 5>
2601 D.2496_3 = (int) f$__pfn_24;
2602 D.2497_4 = D.2496_3 & 1;
2603 if (D.2497_4 != 0)
2604 goto <bb 3>;
2605 else
2606 goto <bb 4>;
2608 <bb 6>:
2609 D.2500_7 = (unsigned int) f$__delta_5;
2610 D.2501_8 = &S + D.2500_7;
2611 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2612 D.2503_10 = *D.2502_9;
2613 D.2504_12 = f$__pfn_24 + -1;
2614 D.2505_13 = (unsigned int) D.2504_12;
2615 D.2506_14 = D.2503_10 + D.2505_13;
2616 D.2507_15 = *D.2506_14;
2617 iftmp.11_16 = (String:: *) D.2507_15;
2619 <bb 7>:
2620 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2621 D.2500_19 = (unsigned int) f$__delta_5;
2622 D.2508_20 = &S + D.2500_19;
2623 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2625 Such patterns are results of simple calls to a member pointer:
2627 int doprinting (int (MyString::* f)(int) const)
2629 MyString S ("somestring");
2631 return (S.*f)(4);
2634 Moreover, the function also looks for called pointers loaded from aggregates
2635 passed by value or reference. */
2637 static void
2638 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2639 tree target)
2641 class ipa_node_params *info = fbi->info;
2642 HOST_WIDE_INT offset;
2643 bool by_ref;
2645 if (SSA_NAME_IS_DEFAULT_DEF (target))
2647 tree var = SSA_NAME_VAR (target);
2648 int index = ipa_get_param_decl_index (info, var);
2649 if (index >= 0)
2650 ipa_note_param_call (fbi->node, index, call, false);
2651 return;
2654 int index;
2655 gimple *def = SSA_NAME_DEF_STMT (target);
2656 bool guaranteed_unmodified;
2657 if (gimple_assign_single_p (def)
2658 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2659 gimple_assign_rhs1 (def), &index, &offset,
2660 NULL, &by_ref, &guaranteed_unmodified))
2662 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2663 call, false);
2664 cs->indirect_info->offset = offset;
2665 cs->indirect_info->agg_contents = 1;
2666 cs->indirect_info->by_ref = by_ref;
2667 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2668 return;
2671 /* Now we need to try to match the complex pattern of calling a member
2672 pointer. */
2673 if (gimple_code (def) != GIMPLE_PHI
2674 || gimple_phi_num_args (def) != 2
2675 || !POINTER_TYPE_P (TREE_TYPE (target))
2676 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2677 return;
2679 /* First, we need to check whether one of these is a load from a member
2680 pointer that is a parameter to this function. */
2681 tree n1 = PHI_ARG_DEF (def, 0);
2682 tree n2 = PHI_ARG_DEF (def, 1);
2683 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2684 return;
2685 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2686 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2688 tree rec;
2689 basic_block bb, virt_bb;
2690 basic_block join = gimple_bb (def);
2691 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2693 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2694 return;
2696 bb = EDGE_PRED (join, 0)->src;
2697 virt_bb = gimple_bb (d2);
2699 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2701 bb = EDGE_PRED (join, 1)->src;
2702 virt_bb = gimple_bb (d1);
2704 else
2705 return;
2707 /* Second, we need to check that the basic blocks are laid out in the way
2708 corresponding to the pattern. */
2710 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2711 || single_pred (virt_bb) != bb
2712 || single_succ (virt_bb) != join)
2713 return;
2715 /* Third, let's see that the branching is done depending on the least
2716 significant bit of the pfn. */
2718 gcond *branch = safe_dyn_cast <gcond *> (*gsi_last_bb (bb));
2719 if (!branch)
2720 return;
2722 if ((gimple_cond_code (branch) != NE_EXPR
2723 && gimple_cond_code (branch) != EQ_EXPR)
2724 || !integer_zerop (gimple_cond_rhs (branch)))
2725 return;
2727 tree cond = gimple_cond_lhs (branch);
2728 if (!ipa_is_ssa_with_stmt_def (cond))
2729 return;
2731 def = SSA_NAME_DEF_STMT (cond);
2732 if (!is_gimple_assign (def)
2733 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2734 || !integer_onep (gimple_assign_rhs2 (def)))
2735 return;
2737 cond = gimple_assign_rhs1 (def);
2738 if (!ipa_is_ssa_with_stmt_def (cond))
2739 return;
2741 def = SSA_NAME_DEF_STMT (cond);
2743 if (is_gimple_assign (def)
2744 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2746 cond = gimple_assign_rhs1 (def);
2747 if (!ipa_is_ssa_with_stmt_def (cond))
2748 return;
2749 def = SSA_NAME_DEF_STMT (cond);
2752 tree rec2;
2753 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2754 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2755 == ptrmemfunc_vbit_in_delta),
2756 NULL);
2757 if (rec != rec2)
2758 return;
2760 index = ipa_get_param_decl_index (info, rec);
2761 if (index >= 0
2762 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2764 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2765 call, false);
2766 cs->indirect_info->offset = offset;
2767 cs->indirect_info->agg_contents = 1;
2768 cs->indirect_info->member_ptr = 1;
2769 cs->indirect_info->guaranteed_unmodified = 1;
2772 return;
2775 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2776 object referenced in the expression is a formal parameter of the caller
2777 FBI->node (described by FBI->info), create a call note for the
2778 statement. */
2780 static void
2781 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2782 gcall *call, tree target)
2784 tree obj = OBJ_TYPE_REF_OBJECT (target);
2785 int index;
2786 HOST_WIDE_INT anc_offset;
2788 if (!flag_devirtualize)
2789 return;
2791 if (TREE_CODE (obj) != SSA_NAME)
2792 return;
2794 class ipa_node_params *info = fbi->info;
2795 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2797 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2798 return;
2800 anc_offset = 0;
2801 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2802 gcc_assert (index >= 0);
2803 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2804 call))
2805 return;
2807 else
2809 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2810 tree expr;
2812 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2813 if (!expr)
2814 return;
2815 index = ipa_get_param_decl_index (info,
2816 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2817 gcc_assert (index >= 0);
2818 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2819 call, anc_offset))
2820 return;
2823 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2824 call, true);
2825 class cgraph_indirect_call_info *ii = cs->indirect_info;
2826 ii->offset = anc_offset;
2827 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2828 ii->otr_type = obj_type_ref_class (target);
2829 ii->polymorphic = 1;
2832 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2833 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2834 containing intermediate information about each formal parameter. */
2836 static void
2837 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2839 tree target = gimple_call_fn (call);
2841 if (!target
2842 || (TREE_CODE (target) != SSA_NAME
2843 && !virtual_method_call_p (target)))
2844 return;
2846 struct cgraph_edge *cs = fbi->node->get_edge (call);
2847 /* If we previously turned the call into a direct call, there is
2848 no need to analyze. */
2849 if (cs && !cs->indirect_unknown_callee)
2850 return;
2852 if (cs->indirect_info->polymorphic && flag_devirtualize)
2854 tree instance;
2855 tree target = gimple_call_fn (call);
2856 ipa_polymorphic_call_context context (current_function_decl,
2857 target, call, &instance);
2859 gcc_checking_assert (cs->indirect_info->otr_type
2860 == obj_type_ref_class (target));
2861 gcc_checking_assert (cs->indirect_info->otr_token
2862 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2864 cs->indirect_info->vptr_changed
2865 = !context.get_dynamic_type (instance,
2866 OBJ_TYPE_REF_OBJECT (target),
2867 obj_type_ref_class (target), call,
2868 &fbi->aa_walk_budget);
2869 cs->indirect_info->context = context;
2872 if (TREE_CODE (target) == SSA_NAME)
2873 ipa_analyze_indirect_call_uses (fbi, call, target);
2874 else if (virtual_method_call_p (target))
2875 ipa_analyze_virtual_call_uses (fbi, call, target);
2879 /* Analyze the call statement STMT with respect to formal parameters (described
2880 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2881 formal parameters are called. */
2883 static void
2884 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2886 if (is_gimple_call (stmt))
2887 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2890 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2891 If OP is a parameter declaration, mark it as used in the info structure
2892 passed in DATA. */
2894 static bool
2895 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2897 class ipa_node_params *info = (class ipa_node_params *) data;
2899 op = get_base_address (op);
2900 if (op
2901 && TREE_CODE (op) == PARM_DECL)
2903 int index = ipa_get_param_decl_index (info, op);
2904 gcc_assert (index >= 0);
2905 ipa_set_param_used (info, index, true);
2908 return false;
2911 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2912 the findings in various structures of the associated ipa_node_params
2913 structure, such as parameter flags, notes etc. FBI holds various data about
2914 the function being analyzed. */
2916 static void
2917 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2919 gimple_stmt_iterator gsi;
2920 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2922 gimple *stmt = gsi_stmt (gsi);
2924 if (is_gimple_debug (stmt))
2925 continue;
2927 ipa_analyze_stmt_uses (fbi, stmt);
2928 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2929 visit_ref_for_mod_analysis,
2930 visit_ref_for_mod_analysis,
2931 visit_ref_for_mod_analysis);
2933 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2934 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2935 visit_ref_for_mod_analysis,
2936 visit_ref_for_mod_analysis,
2937 visit_ref_for_mod_analysis);
2940 /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2942 static bool
2943 load_from_dereferenced_name (tree expr, tree name)
2945 tree base = get_base_address (expr);
2946 return (TREE_CODE (base) == MEM_REF
2947 && TREE_OPERAND (base, 0) == name);
2950 /* Calculate controlled uses of parameters of NODE. */
2952 static void
2953 ipa_analyze_controlled_uses (struct cgraph_node *node)
2955 ipa_node_params *info = ipa_node_params_sum->get (node);
2957 for (int i = 0; i < ipa_get_param_count (info); i++)
2959 tree parm = ipa_get_param (info, i);
2960 int call_uses = 0;
2961 bool load_dereferenced = false;
2963 /* For SSA regs see if parameter is used. For non-SSA we compute
2964 the flag during modification analysis. */
2965 if (is_gimple_reg (parm))
2967 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2968 parm);
2969 if (ddef && !has_zero_uses (ddef))
2971 imm_use_iterator imm_iter;
2972 gimple *stmt;
2974 ipa_set_param_used (info, i, true);
2975 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)
2977 if (is_gimple_debug (stmt))
2978 continue;
2980 int all_stmt_uses = 0;
2981 use_operand_p use_p;
2982 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
2983 all_stmt_uses++;
2985 if (is_gimple_call (stmt))
2987 if (gimple_call_internal_p (stmt))
2989 call_uses = IPA_UNDESCRIBED_USE;
2990 break;
2992 int recognized_stmt_uses;
2993 if (gimple_call_fn (stmt) == ddef)
2994 recognized_stmt_uses = 1;
2995 else
2996 recognized_stmt_uses = 0;
2997 unsigned arg_count = gimple_call_num_args (stmt);
2998 for (unsigned i = 0; i < arg_count; i++)
3000 tree arg = gimple_call_arg (stmt, i);
3001 if (arg == ddef)
3002 recognized_stmt_uses++;
3003 else if (load_from_dereferenced_name (arg, ddef))
3005 load_dereferenced = true;
3006 recognized_stmt_uses++;
3010 if (recognized_stmt_uses != all_stmt_uses)
3012 call_uses = IPA_UNDESCRIBED_USE;
3013 break;
3015 if (call_uses >= 0)
3016 call_uses += all_stmt_uses;
3018 else if (gimple_assign_single_p (stmt))
3020 tree rhs = gimple_assign_rhs1 (stmt);
3021 if (all_stmt_uses != 1
3022 || !load_from_dereferenced_name (rhs, ddef))
3024 call_uses = IPA_UNDESCRIBED_USE;
3025 break;
3027 load_dereferenced = true;
3029 else
3031 call_uses = IPA_UNDESCRIBED_USE;
3032 break;
3036 else
3037 call_uses = 0;
3039 else
3040 call_uses = IPA_UNDESCRIBED_USE;
3041 ipa_set_controlled_uses (info, i, call_uses);
3042 ipa_set_param_load_dereferenced (info, i, load_dereferenced);
3046 /* Free stuff in BI. */
3048 static void
3049 free_ipa_bb_info (struct ipa_bb_info *bi)
3051 bi->cg_edges.release ();
3052 bi->param_aa_statuses.release ();
3055 /* Dominator walker driving the analysis. */
3057 class analysis_dom_walker : public dom_walker
3059 public:
3060 analysis_dom_walker (struct ipa_func_body_info *fbi)
3061 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
3063 edge before_dom_children (basic_block) final override;
3065 private:
3066 struct ipa_func_body_info *m_fbi;
3069 edge
3070 analysis_dom_walker::before_dom_children (basic_block bb)
3072 ipa_analyze_params_uses_in_bb (m_fbi, bb);
3073 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3074 return NULL;
3077 /* Release body info FBI. */
3079 void
3080 ipa_release_body_info (struct ipa_func_body_info *fbi)
3082 int i;
3083 struct ipa_bb_info *bi;
3085 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
3086 free_ipa_bb_info (bi);
3087 fbi->bb_infos.release ();
3090 /* Initialize the array describing properties of formal parameters
3091 of NODE, analyze their uses and compute jump functions associated
3092 with actual arguments of calls from within NODE. */
3094 void
3095 ipa_analyze_node (struct cgraph_node *node)
3097 struct ipa_func_body_info fbi;
3098 class ipa_node_params *info;
3100 ipa_check_create_node_params ();
3101 ipa_check_create_edge_args ();
3102 info = ipa_node_params_sum->get_create (node);
3104 if (info->analysis_done)
3105 return;
3106 info->analysis_done = 1;
3108 if (ipa_func_spec_opts_forbid_analysis_p (node)
3109 || (count_formal_params (node->decl)
3110 >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS)))
3112 gcc_assert (!ipa_get_param_count (info));
3113 return;
3116 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3117 push_cfun (func);
3118 calculate_dominance_info (CDI_DOMINATORS);
3119 ipa_initialize_node_params (node);
3120 ipa_analyze_controlled_uses (node);
3122 fbi.node = node;
3123 fbi.info = info;
3124 fbi.bb_infos = vNULL;
3125 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
3126 fbi.param_count = ipa_get_param_count (info);
3127 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
3129 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3131 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3132 bi->cg_edges.safe_push (cs);
3135 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3137 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3138 bi->cg_edges.safe_push (cs);
3141 enable_ranger (cfun, false);
3142 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3143 disable_ranger (cfun);
3145 ipa_release_body_info (&fbi);
3146 free_dominance_info (CDI_DOMINATORS);
3147 pop_cfun ();
3150 /* Update the jump functions associated with call graph edge E when the call
3151 graph edge CS is being inlined, assuming that E->caller is already (possibly
3152 indirectly) inlined into CS->callee and that E has not been inlined. */
3154 static void
3155 update_jump_functions_after_inlining (struct cgraph_edge *cs,
3156 struct cgraph_edge *e)
3158 ipa_edge_args *top = ipa_edge_args_sum->get (cs);
3159 ipa_edge_args *args = ipa_edge_args_sum->get (e);
3160 if (!args)
3161 return;
3162 int count = ipa_get_cs_argument_count (args);
3163 int i;
3165 for (i = 0; i < count; i++)
3167 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3168 class ipa_polymorphic_call_context *dst_ctx
3169 = ipa_get_ith_polymorhic_call_context (args, i);
3171 if (dst->agg.items)
3173 struct ipa_agg_jf_item *item;
3174 int j;
3176 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3178 int dst_fid;
3179 struct ipa_jump_func *src;
3181 if (item->jftype != IPA_JF_PASS_THROUGH
3182 && item->jftype != IPA_JF_LOAD_AGG)
3183 continue;
3185 dst_fid = item->value.pass_through.formal_id;
3186 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3188 item->jftype = IPA_JF_UNKNOWN;
3189 continue;
3192 item->value.pass_through.formal_id = -1;
3193 src = ipa_get_ith_jump_func (top, dst_fid);
3194 if (src->type == IPA_JF_CONST)
3196 if (item->jftype == IPA_JF_PASS_THROUGH
3197 && item->value.pass_through.operation == NOP_EXPR)
3199 item->jftype = IPA_JF_CONST;
3200 item->value.constant = src->value.constant.value;
3201 continue;
3204 else if (src->type == IPA_JF_PASS_THROUGH
3205 && src->value.pass_through.operation == NOP_EXPR)
3207 if (item->jftype == IPA_JF_PASS_THROUGH
3208 || !item->value.load_agg.by_ref
3209 || src->value.pass_through.agg_preserved)
3210 item->value.pass_through.formal_id
3211 = src->value.pass_through.formal_id;
3213 else if (src->type == IPA_JF_ANCESTOR)
3215 if (item->jftype == IPA_JF_PASS_THROUGH)
3217 if (!src->value.ancestor.offset)
3218 item->value.pass_through.formal_id
3219 = src->value.ancestor.formal_id;
3221 else if (src->value.ancestor.agg_preserved)
3223 gcc_checking_assert (item->value.load_agg.by_ref);
3225 item->value.pass_through.formal_id
3226 = src->value.ancestor.formal_id;
3227 item->value.load_agg.offset
3228 += src->value.ancestor.offset;
3232 if (item->value.pass_through.formal_id < 0)
3233 item->jftype = IPA_JF_UNKNOWN;
3237 if (!top)
3239 ipa_set_jf_unknown (dst);
3240 continue;
3243 if (dst->type == IPA_JF_ANCESTOR)
3245 struct ipa_jump_func *src;
3246 int dst_fid = dst->value.ancestor.formal_id;
3247 class ipa_polymorphic_call_context *src_ctx
3248 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3250 /* Variable number of arguments can cause havoc if we try to access
3251 one that does not exist in the inlined edge. So make sure we
3252 don't. */
3253 if (dst_fid >= ipa_get_cs_argument_count (top))
3255 ipa_set_jf_unknown (dst);
3256 continue;
3259 src = ipa_get_ith_jump_func (top, dst_fid);
3261 if (src_ctx && !src_ctx->useless_p ())
3263 class ipa_polymorphic_call_context ctx = *src_ctx;
3265 /* TODO: Make type preserved safe WRT contexts. */
3266 if (!ipa_get_jf_ancestor_type_preserved (dst))
3267 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3268 ctx.offset_by (dst->value.ancestor.offset);
3269 if (!ctx.useless_p ())
3271 if (!dst_ctx)
3273 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3274 count, true);
3275 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3278 dst_ctx->combine_with (ctx);
3282 /* Parameter and argument in ancestor jump function must be pointer
3283 type, which means access to aggregate must be by-reference. */
3284 gcc_assert (!src->agg.items || src->agg.by_ref);
3286 if (src->agg.items && dst->value.ancestor.agg_preserved)
3288 struct ipa_agg_jf_item *item;
3289 int j;
3291 /* Currently we do not produce clobber aggregate jump functions,
3292 replace with merging when we do. */
3293 gcc_assert (!dst->agg.items);
3295 dst->agg.items = vec_safe_copy (src->agg.items);
3296 dst->agg.by_ref = src->agg.by_ref;
3297 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3298 item->offset -= dst->value.ancestor.offset;
3301 if (src->type == IPA_JF_PASS_THROUGH
3302 && src->value.pass_through.operation == NOP_EXPR)
3304 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3305 dst->value.ancestor.agg_preserved &=
3306 src->value.pass_through.agg_preserved;
3308 else if (src->type == IPA_JF_ANCESTOR)
3310 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3311 dst->value.ancestor.offset += src->value.ancestor.offset;
3312 dst->value.ancestor.agg_preserved &=
3313 src->value.ancestor.agg_preserved;
3314 dst->value.ancestor.keep_null |= src->value.ancestor.keep_null;
3316 else
3317 ipa_set_jf_unknown (dst);
3319 else if (dst->type == IPA_JF_PASS_THROUGH)
3321 struct ipa_jump_func *src;
3322 /* We must check range due to calls with variable number of arguments
3323 and we cannot combine jump functions with operations. */
3324 if (dst->value.pass_through.operation == NOP_EXPR
3325 && (top && dst->value.pass_through.formal_id
3326 < ipa_get_cs_argument_count (top)))
3328 int dst_fid = dst->value.pass_through.formal_id;
3329 src = ipa_get_ith_jump_func (top, dst_fid);
3330 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3331 class ipa_polymorphic_call_context *src_ctx
3332 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3334 if (src_ctx && !src_ctx->useless_p ())
3336 class ipa_polymorphic_call_context ctx = *src_ctx;
3338 /* TODO: Make type preserved safe WRT contexts. */
3339 if (!ipa_get_jf_pass_through_type_preserved (dst))
3340 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3341 if (!ctx.useless_p ())
3343 if (!dst_ctx)
3345 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3346 count, true);
3347 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3349 dst_ctx->combine_with (ctx);
3352 switch (src->type)
3354 case IPA_JF_UNKNOWN:
3355 ipa_set_jf_unknown (dst);
3356 break;
3357 case IPA_JF_CONST:
3359 bool rd = ipa_get_jf_pass_through_refdesc_decremented (dst);
3360 ipa_set_jf_cst_copy (dst, src);
3361 if (rd)
3362 ipa_zap_jf_refdesc (dst);
3365 break;
3367 case IPA_JF_PASS_THROUGH:
3369 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3370 enum tree_code operation;
3371 operation = ipa_get_jf_pass_through_operation (src);
3373 if (operation == NOP_EXPR)
3375 bool agg_p;
3376 agg_p = dst_agg_p
3377 && ipa_get_jf_pass_through_agg_preserved (src);
3378 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3380 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3381 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3382 else
3384 tree operand = ipa_get_jf_pass_through_operand (src);
3385 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3386 operation);
3388 break;
3390 case IPA_JF_ANCESTOR:
3392 bool agg_p;
3393 agg_p = dst_agg_p
3394 && ipa_get_jf_ancestor_agg_preserved (src);
3395 ipa_set_ancestor_jf (dst,
3396 ipa_get_jf_ancestor_offset (src),
3397 ipa_get_jf_ancestor_formal_id (src),
3398 agg_p,
3399 ipa_get_jf_ancestor_keep_null (src));
3400 break;
3402 default:
3403 gcc_unreachable ();
3406 if (src->agg.items
3407 && (dst_agg_p || !src->agg.by_ref))
3409 /* Currently we do not produce clobber aggregate jump
3410 functions, replace with merging when we do. */
3411 gcc_assert (!dst->agg.items);
3413 dst->agg.by_ref = src->agg.by_ref;
3414 dst->agg.items = vec_safe_copy (src->agg.items);
3417 else
3418 ipa_set_jf_unknown (dst);
3423 /* If TARGET is an addr_expr of a function declaration, make it the
3424 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3425 Otherwise, return NULL. */
3427 struct cgraph_edge *
3428 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3429 bool speculative)
3431 struct cgraph_node *callee;
3432 bool unreachable = false;
3434 if (TREE_CODE (target) == ADDR_EXPR)
3435 target = TREE_OPERAND (target, 0);
3436 if (TREE_CODE (target) != FUNCTION_DECL)
3438 target = canonicalize_constructor_val (target, NULL);
3439 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3441 /* Member pointer call that goes through a VMT lookup. */
3442 if (ie->indirect_info->member_ptr
3443 /* Or if target is not an invariant expression and we do not
3444 know if it will evaulate to function at runtime.
3445 This can happen when folding through &VAR, where &VAR
3446 is IP invariant, but VAR itself is not.
3448 TODO: Revisit this when GCC 5 is branched. It seems that
3449 member_ptr check is not needed and that we may try to fold
3450 the expression and see if VAR is readonly. */
3451 || !is_gimple_ip_invariant (target))
3453 if (dump_enabled_p ())
3455 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3456 "discovered direct call non-invariant %s\n",
3457 ie->caller->dump_name ());
3459 return NULL;
3463 if (dump_enabled_p ())
3465 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3466 "discovered direct call to non-function in %s, "
3467 "making it __builtin_unreachable\n",
3468 ie->caller->dump_name ());
3471 target = builtin_decl_unreachable ();
3472 callee = cgraph_node::get_create (target);
3473 unreachable = true;
3475 else
3476 callee = cgraph_node::get (target);
3478 else
3479 callee = cgraph_node::get (target);
3481 /* Because may-edges are not explicitely represented and vtable may be external,
3482 we may create the first reference to the object in the unit. */
3483 if (!callee || callee->inlined_to)
3486 /* We are better to ensure we can refer to it.
3487 In the case of static functions we are out of luck, since we already
3488 removed its body. In the case of public functions we may or may
3489 not introduce the reference. */
3490 if (!canonicalize_constructor_val (target, NULL)
3491 || !TREE_PUBLIC (target))
3493 if (dump_file)
3494 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3495 "(%s -> %s) but cannot refer to it. Giving up.\n",
3496 ie->caller->dump_name (),
3497 ie->callee->dump_name ());
3498 return NULL;
3500 callee = cgraph_node::get_create (target);
3503 /* If the edge is already speculated. */
3504 if (speculative && ie->speculative)
3506 if (dump_file)
3508 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3509 if (!e2)
3511 if (dump_file)
3512 fprintf (dump_file, "ipa-prop: Discovered call to a "
3513 "speculative target (%s -> %s) but the call is "
3514 "already speculated to different target. "
3515 "Giving up.\n",
3516 ie->caller->dump_name (), callee->dump_name ());
3518 else
3520 if (dump_file)
3521 fprintf (dump_file,
3522 "ipa-prop: Discovered call to a speculative target "
3523 "(%s -> %s) this agree with previous speculation.\n",
3524 ie->caller->dump_name (), callee->dump_name ());
3527 return NULL;
3530 if (!dbg_cnt (devirt))
3531 return NULL;
3533 ipa_check_create_node_params ();
3535 /* We cannot make edges to inline clones. It is bug that someone removed
3536 the cgraph node too early. */
3537 gcc_assert (!callee->inlined_to);
3539 if (dump_file && !unreachable)
3541 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3542 "(%s -> %s), for stmt ",
3543 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3544 speculative ? "speculative" : "known",
3545 ie->caller->dump_name (),
3546 callee->dump_name ());
3547 if (ie->call_stmt)
3548 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3549 else
3550 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3552 if (dump_enabled_p ())
3554 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3555 "converting indirect call in %s to direct call to %s\n",
3556 ie->caller->dump_name (), callee->dump_name ());
3558 if (!speculative)
3560 struct cgraph_edge *orig = ie;
3561 ie = cgraph_edge::make_direct (ie, callee);
3562 /* If we resolved speculative edge the cost is already up to date
3563 for direct call (adjusted by inline_edge_duplication_hook). */
3564 if (ie == orig)
3566 ipa_call_summary *es = ipa_call_summaries->get (ie);
3567 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3568 - eni_size_weights.call_cost);
3569 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3570 - eni_time_weights.call_cost);
3573 else
3575 if (!callee->can_be_discarded_p ())
3577 cgraph_node *alias;
3578 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3579 if (alias)
3580 callee = alias;
3582 /* make_speculative will update ie's cost to direct call cost. */
3583 ie = ie->make_speculative
3584 (callee, ie->count.apply_scale (8, 10));
3587 return ie;
3590 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3591 CONSTRUCTOR and return it. Return NULL if the search fails for some
3592 reason. */
3594 static tree
3595 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3597 tree type = TREE_TYPE (constructor);
3598 if (TREE_CODE (type) != ARRAY_TYPE
3599 && TREE_CODE (type) != RECORD_TYPE)
3600 return NULL;
3602 unsigned ix;
3603 tree index, val;
3604 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3606 HOST_WIDE_INT elt_offset;
3607 if (TREE_CODE (type) == ARRAY_TYPE)
3609 offset_int off;
3610 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3611 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3613 if (index)
3615 if (TREE_CODE (index) == RANGE_EXPR)
3616 off = wi::to_offset (TREE_OPERAND (index, 0));
3617 else
3618 off = wi::to_offset (index);
3619 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3621 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3622 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3623 off = wi::sext (off - wi::to_offset (low_bound),
3624 TYPE_PRECISION (TREE_TYPE (index)));
3626 off *= wi::to_offset (unit_size);
3627 /* ??? Handle more than just the first index of a
3628 RANGE_EXPR. */
3630 else
3631 off = wi::to_offset (unit_size) * ix;
3633 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3634 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3635 continue;
3636 elt_offset = off.to_shwi ();
3638 else if (TREE_CODE (type) == RECORD_TYPE)
3640 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3641 if (DECL_BIT_FIELD (index))
3642 continue;
3643 elt_offset = int_bit_position (index);
3645 else
3646 gcc_unreachable ();
3648 if (elt_offset > req_offset)
3649 return NULL;
3651 if (TREE_CODE (val) == CONSTRUCTOR)
3652 return find_constructor_constant_at_offset (val,
3653 req_offset - elt_offset);
3655 if (elt_offset == req_offset
3656 && is_gimple_reg_type (TREE_TYPE (val))
3657 && is_gimple_ip_invariant (val))
3658 return val;
3660 return NULL;
3663 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3664 invariant from a static constructor and if so, return it. Otherwise return
3665 NULL. */
3667 tree
3668 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3670 if (by_ref)
3672 if (TREE_CODE (scalar) != ADDR_EXPR)
3673 return NULL;
3674 scalar = TREE_OPERAND (scalar, 0);
3677 if (!VAR_P (scalar)
3678 || !is_global_var (scalar)
3679 || !TREE_READONLY (scalar)
3680 || !DECL_INITIAL (scalar)
3681 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3682 return NULL;
3684 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3687 /* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
3688 is none. BY_REF specifies whether the value has to be passed by reference
3689 or by value. */
3691 static tree
3692 ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function *agg_jfunc,
3693 ipa_node_params *src_info,
3694 cgraph_node *src_node,
3695 HOST_WIDE_INT offset, bool by_ref)
3697 if (by_ref != agg_jfunc->by_ref)
3698 return NULL_TREE;
3700 for (const ipa_agg_jf_item &item : agg_jfunc->items)
3701 if (item.offset == offset)
3702 return ipa_agg_value_from_jfunc (src_info, src_node, &item);
3704 return NULL_TREE;
3707 /* Remove a reference to SYMBOL from the list of references of a node given by
3708 reference description RDESC. Return true if the reference has been
3709 successfully found and removed. */
3711 static bool
3712 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3714 struct ipa_ref *to_del;
3715 struct cgraph_edge *origin;
3717 origin = rdesc->cs;
3718 if (!origin)
3719 return false;
3720 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3721 origin->lto_stmt_uid, IPA_REF_ADDR);
3722 if (!to_del)
3723 return false;
3725 to_del->remove_reference ();
3726 if (dump_file)
3727 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3728 origin->caller->dump_name (), symbol->dump_name ());
3729 return true;
3732 /* If JFUNC has a reference description with refcount different from
3733 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3734 NULL. JFUNC must be a constant jump function. */
3736 static struct ipa_cst_ref_desc *
3737 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3739 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3740 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3741 return rdesc;
3742 else
3743 return NULL;
3746 /* If the value of constant jump function JFUNC is an address of a function
3747 declaration, return the associated call graph node. Otherwise return
3748 NULL. */
3750 static symtab_node *
3751 symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
3753 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3754 tree cst = ipa_get_jf_constant (jfunc);
3755 if (TREE_CODE (cst) != ADDR_EXPR
3756 || (TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL
3757 && TREE_CODE (TREE_OPERAND (cst, 0)) != VAR_DECL))
3758 return NULL;
3760 return symtab_node::get (TREE_OPERAND (cst, 0));
3764 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3765 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3766 the edge specified in the rdesc. Return false if either the symbol or the
3767 reference could not be found, otherwise return true. */
3769 static bool
3770 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3772 struct ipa_cst_ref_desc *rdesc;
3773 if (jfunc->type == IPA_JF_CONST
3774 && (rdesc = jfunc_rdesc_usable (jfunc))
3775 && --rdesc->refcount == 0)
3777 symtab_node *symbol = symtab_node_for_jfunc (jfunc);
3778 if (!symbol)
3779 return false;
3781 return remove_described_reference (symbol, rdesc);
3783 return true;
3786 /* Try to find a destination for indirect edge IE that corresponds to a simple
3787 call or a call of a member function pointer and where the destination is a
3788 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3789 the type of the parameter to which the result of JFUNC is passed. If it can
3790 be determined, return the newly direct edge, otherwise return NULL.
3791 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3792 relative to. */
3794 static struct cgraph_edge *
3795 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3796 struct ipa_jump_func *jfunc, tree target_type,
3797 struct cgraph_node *new_root,
3798 class ipa_node_params *new_root_info)
3800 struct cgraph_edge *cs;
3801 tree target = NULL_TREE;
3802 bool agg_contents = ie->indirect_info->agg_contents;
3803 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3804 if (agg_contents)
3806 if (scalar)
3807 target = ipa_find_agg_cst_from_init (scalar, ie->indirect_info->offset,
3808 ie->indirect_info->by_ref);
3809 if (!target && ie->indirect_info->guaranteed_unmodified)
3810 target = ipa_find_agg_cst_from_jfunc_items (&jfunc->agg, new_root_info,
3811 new_root,
3812 ie->indirect_info->offset,
3813 ie->indirect_info->by_ref);
3815 else
3816 target = scalar;
3817 if (!target)
3818 return NULL;
3819 cs = ipa_make_edge_direct_to_target (ie, target);
3821 if (cs && !agg_contents)
3823 bool ok;
3824 gcc_checking_assert (cs->callee
3825 && (cs != ie
3826 || jfunc->type != IPA_JF_CONST
3827 || !symtab_node_for_jfunc (jfunc)
3828 || cs->callee == symtab_node_for_jfunc (jfunc)));
3829 ok = try_decrement_rdesc_refcount (jfunc);
3830 gcc_checking_assert (ok);
3833 return cs;
3836 /* Return the target to be used in cases of impossible devirtualization. IE
3837 and target (the latter can be NULL) are dumped when dumping is enabled. */
3839 tree
3840 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3842 if (dump_file)
3844 if (target)
3845 fprintf (dump_file,
3846 "Type inconsistent devirtualization: %s->%s\n",
3847 ie->caller->dump_name (),
3848 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3849 else
3850 fprintf (dump_file,
3851 "No devirtualization target in %s\n",
3852 ie->caller->dump_name ());
3854 tree new_target = builtin_decl_unreachable ();
3855 cgraph_node::get_create (new_target);
3856 return new_target;
3859 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3860 call based on a formal parameter which is described by jump function JFUNC
3861 and if it can be determined, make it direct and return the direct edge.
3862 Otherwise, return NULL. CTX describes the polymorphic context that the
3863 parameter the call is based on brings along with it. NEW_ROOT and
3864 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3865 to. */
3867 static struct cgraph_edge *
3868 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3869 struct ipa_jump_func *jfunc,
3870 class ipa_polymorphic_call_context ctx,
3871 struct cgraph_node *new_root,
3872 class ipa_node_params *new_root_info)
3874 tree target = NULL;
3875 bool speculative = false;
3877 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3878 return NULL;
3880 gcc_assert (!ie->indirect_info->by_ref);
3882 /* Try to do lookup via known virtual table pointer value. */
3883 if (!ie->indirect_info->vptr_changed
3884 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3886 tree vtable;
3887 unsigned HOST_WIDE_INT offset;
3888 tree t = NULL_TREE;
3889 if (jfunc->type == IPA_JF_CONST)
3890 t = ipa_find_agg_cst_from_init (ipa_get_jf_constant (jfunc),
3891 ie->indirect_info->offset, true);
3892 if (!t)
3893 t = ipa_find_agg_cst_from_jfunc_items (&jfunc->agg, new_root_info,
3894 new_root,
3895 ie->indirect_info->offset, true);
3896 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3898 bool can_refer;
3899 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3900 vtable, offset, &can_refer);
3901 if (can_refer)
3903 if (!t
3904 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE,
3905 BUILT_IN_UNREACHABLE_TRAP)
3906 || !possible_polymorphic_call_target_p
3907 (ie, cgraph_node::get (t)))
3909 /* Do not speculate builtin_unreachable, it is stupid! */
3910 if (!ie->indirect_info->vptr_changed)
3911 target = ipa_impossible_devirt_target (ie, target);
3912 else
3913 target = NULL;
3915 else
3917 target = t;
3918 speculative = ie->indirect_info->vptr_changed;
3924 ipa_polymorphic_call_context ie_context (ie);
3925 vec <cgraph_node *>targets;
3926 bool final;
3928 ctx.offset_by (ie->indirect_info->offset);
3929 if (ie->indirect_info->vptr_changed)
3930 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3931 ie->indirect_info->otr_type);
3932 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3933 targets = possible_polymorphic_call_targets
3934 (ie->indirect_info->otr_type,
3935 ie->indirect_info->otr_token,
3936 ctx, &final);
3937 if (final && targets.length () <= 1)
3939 speculative = false;
3940 if (targets.length () == 1)
3941 target = targets[0]->decl;
3942 else
3943 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3945 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3946 && !ie->speculative && ie->maybe_hot_p ())
3948 cgraph_node *n;
3949 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3950 ie->indirect_info->otr_token,
3951 ie->indirect_info->context);
3952 if (n)
3954 target = n->decl;
3955 speculative = true;
3959 if (target)
3961 if (!possible_polymorphic_call_target_p
3962 (ie, cgraph_node::get_create (target)))
3964 if (speculative)
3965 return NULL;
3966 target = ipa_impossible_devirt_target (ie, target);
3968 return ipa_make_edge_direct_to_target (ie, target, speculative);
3970 else
3971 return NULL;
3974 /* Update the param called notes associated with NODE when CS is being inlined,
3975 assuming NODE is (potentially indirectly) inlined into CS->callee.
3976 Moreover, if the callee is discovered to be constant, create a new cgraph
3977 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3978 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3980 static bool
3981 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3982 struct cgraph_node *node,
3983 vec<cgraph_edge *> *new_edges)
3985 class ipa_edge_args *top;
3986 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3987 struct cgraph_node *new_root;
3988 class ipa_node_params *new_root_info, *inlined_node_info;
3989 bool res = false;
3991 ipa_check_create_edge_args ();
3992 top = ipa_edge_args_sum->get (cs);
3993 new_root = cs->caller->inlined_to
3994 ? cs->caller->inlined_to : cs->caller;
3995 new_root_info = ipa_node_params_sum->get (new_root);
3996 inlined_node_info = ipa_node_params_sum->get (cs->callee->function_symbol ());
3998 for (ie = node->indirect_calls; ie; ie = next_ie)
4000 class cgraph_indirect_call_info *ici = ie->indirect_info;
4001 struct ipa_jump_func *jfunc;
4002 int param_index;
4004 next_ie = ie->next_callee;
4006 if (ici->param_index == -1)
4007 continue;
4009 /* We must check range due to calls with variable number of arguments: */
4010 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
4012 ici->param_index = -1;
4013 continue;
4016 param_index = ici->param_index;
4017 jfunc = ipa_get_ith_jump_func (top, param_index);
4019 auto_vec<cgraph_node *, 4> spec_targets;
4020 if (ie->speculative)
4021 for (cgraph_edge *direct = ie->first_speculative_call_target ();
4022 direct;
4023 direct = direct->next_speculative_call_target ())
4024 spec_targets.safe_push (direct->callee);
4026 if (!opt_for_fn (node->decl, flag_indirect_inlining))
4027 new_direct_edge = NULL;
4028 else if (ici->polymorphic)
4030 ipa_polymorphic_call_context ctx;
4031 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
4032 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
4033 new_root,
4034 new_root_info);
4036 else
4038 tree target_type = ipa_get_type (inlined_node_info, param_index);
4039 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
4040 target_type,
4041 new_root,
4042 new_root_info);
4045 /* If speculation was removed, then we need to do nothing. */
4046 if (new_direct_edge && new_direct_edge != ie
4047 && spec_targets.contains (new_direct_edge->callee))
4049 new_direct_edge->indirect_inlining_edge = 1;
4050 res = true;
4051 if (!new_direct_edge->speculative)
4052 continue;
4054 else if (new_direct_edge)
4056 new_direct_edge->indirect_inlining_edge = 1;
4057 if (new_edges)
4059 new_edges->safe_push (new_direct_edge);
4060 res = true;
4062 /* If speculative edge was introduced we still need to update
4063 call info of the indirect edge. */
4064 if (!new_direct_edge->speculative)
4065 continue;
4067 if (jfunc->type == IPA_JF_PASS_THROUGH
4068 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4070 if (ici->agg_contents
4071 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
4072 && !ici->polymorphic)
4073 ici->param_index = -1;
4074 else
4076 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4077 if (ici->polymorphic
4078 && !ipa_get_jf_pass_through_type_preserved (jfunc))
4079 ici->vptr_changed = true;
4080 ipa_set_param_used_by_indirect_call (new_root_info,
4081 ici->param_index, true);
4082 if (ici->polymorphic)
4083 ipa_set_param_used_by_polymorphic_call (new_root_info,
4084 ici->param_index, true);
4087 else if (jfunc->type == IPA_JF_ANCESTOR)
4089 if (ici->agg_contents
4090 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
4091 && !ici->polymorphic)
4092 ici->param_index = -1;
4093 else
4095 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4096 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
4097 if (ici->polymorphic
4098 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4099 ici->vptr_changed = true;
4100 ipa_set_param_used_by_indirect_call (new_root_info,
4101 ici->param_index, true);
4102 if (ici->polymorphic)
4103 ipa_set_param_used_by_polymorphic_call (new_root_info,
4104 ici->param_index, true);
4107 else
4108 /* Either we can find a destination for this edge now or never. */
4109 ici->param_index = -1;
4112 return res;
4115 /* Recursively traverse subtree of NODE (including node) made of inlined
4116 cgraph_edges when CS has been inlined and invoke
4117 update_indirect_edges_after_inlining on all nodes and
4118 update_jump_functions_after_inlining on all non-inlined edges that lead out
4119 of this subtree. Newly discovered indirect edges will be added to
4120 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4121 created. */
4123 static bool
4124 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4125 struct cgraph_node *node,
4126 vec<cgraph_edge *> *new_edges)
4128 struct cgraph_edge *e;
4129 bool res;
4131 res = update_indirect_edges_after_inlining (cs, node, new_edges);
4133 for (e = node->callees; e; e = e->next_callee)
4134 if (!e->inline_failed)
4135 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
4136 else
4137 update_jump_functions_after_inlining (cs, e);
4138 for (e = node->indirect_calls; e; e = e->next_callee)
4139 update_jump_functions_after_inlining (cs, e);
4141 return res;
4144 /* Combine two controlled uses counts as done during inlining. */
4146 static int
4147 combine_controlled_uses_counters (int c, int d)
4149 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4150 return IPA_UNDESCRIBED_USE;
4151 else
4152 return c + d - 1;
4155 /* Propagate number of controlled users from CS->caleee to the new root of the
4156 tree of inlined nodes. */
4158 static void
4159 propagate_controlled_uses (struct cgraph_edge *cs)
4161 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4162 if (!args)
4163 return;
4164 struct cgraph_node *new_root = cs->caller->inlined_to
4165 ? cs->caller->inlined_to : cs->caller;
4166 ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4167 ipa_node_params *old_root_info = ipa_node_params_sum->get (cs->callee);
4168 int count, i;
4170 if (!old_root_info)
4171 return;
4173 count = MIN (ipa_get_cs_argument_count (args),
4174 ipa_get_param_count (old_root_info));
4175 for (i = 0; i < count; i++)
4177 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4178 struct ipa_cst_ref_desc *rdesc;
4180 if (jf->type == IPA_JF_PASS_THROUGH
4181 && !ipa_get_jf_pass_through_refdesc_decremented (jf))
4183 int src_idx, c, d;
4184 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4185 c = ipa_get_controlled_uses (new_root_info, src_idx);
4186 d = ipa_get_controlled_uses (old_root_info, i);
4188 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4189 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4190 c = combine_controlled_uses_counters (c, d);
4191 ipa_set_controlled_uses (new_root_info, src_idx, c);
4192 bool lderef = true;
4193 if (c != IPA_UNDESCRIBED_USE)
4195 lderef = (ipa_get_param_load_dereferenced (new_root_info, src_idx)
4196 || ipa_get_param_load_dereferenced (old_root_info, i));
4197 ipa_set_param_load_dereferenced (new_root_info, src_idx, lderef);
4200 if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4202 struct cgraph_node *n;
4203 struct ipa_ref *ref;
4204 tree t = new_root_info->known_csts[src_idx];
4206 if (t && TREE_CODE (t) == ADDR_EXPR
4207 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4208 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4209 && (ref = new_root->find_reference (n, NULL, 0,
4210 IPA_REF_ADDR)))
4212 if (dump_file)
4213 fprintf (dump_file, "ipa-prop: Removing cloning-created "
4214 "reference from %s to %s.\n",
4215 new_root->dump_name (),
4216 n->dump_name ());
4217 ref->remove_reference ();
4221 else if (jf->type == IPA_JF_CONST
4222 && (rdesc = jfunc_rdesc_usable (jf)))
4224 int d = ipa_get_controlled_uses (old_root_info, i);
4225 int c = rdesc->refcount;
4226 tree cst = ipa_get_jf_constant (jf);
4227 rdesc->refcount = combine_controlled_uses_counters (c, d);
4228 if (rdesc->refcount != IPA_UNDESCRIBED_USE
4229 && ipa_get_param_load_dereferenced (old_root_info, i)
4230 && TREE_CODE (cst) == ADDR_EXPR
4231 && VAR_P (TREE_OPERAND (cst, 0)))
4233 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4234 new_root->create_reference (n, IPA_REF_LOAD, NULL);
4235 if (dump_file)
4236 fprintf (dump_file, "ipa-prop: Address IPA constant will reach "
4237 "a load so adding LOAD reference from %s to %s.\n",
4238 new_root->dump_name (), n->dump_name ());
4240 if (rdesc->refcount == 0)
4242 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4243 && ((TREE_CODE (TREE_OPERAND (cst, 0))
4244 == FUNCTION_DECL)
4245 || VAR_P (TREE_OPERAND (cst, 0))));
4247 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4248 if (n)
4250 remove_described_reference (n, rdesc);
4251 cgraph_node *clone = cs->caller;
4252 while (clone->inlined_to
4253 && clone->ipcp_clone
4254 && clone != rdesc->cs->caller)
4256 struct ipa_ref *ref;
4257 ref = clone->find_reference (n, NULL, 0, IPA_REF_ADDR);
4258 if (ref)
4260 if (dump_file)
4261 fprintf (dump_file, "ipa-prop: Removing "
4262 "cloning-created reference "
4263 "from %s to %s.\n",
4264 clone->dump_name (),
4265 n->dump_name ());
4266 ref->remove_reference ();
4268 clone = clone->callers->caller;
4275 for (i = ipa_get_param_count (old_root_info);
4276 i < ipa_get_cs_argument_count (args);
4277 i++)
4279 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4281 if (jf->type == IPA_JF_CONST)
4283 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4284 if (rdesc)
4285 rdesc->refcount = IPA_UNDESCRIBED_USE;
4287 else if (jf->type == IPA_JF_PASS_THROUGH)
4288 ipa_set_controlled_uses (new_root_info,
4289 jf->value.pass_through.formal_id,
4290 IPA_UNDESCRIBED_USE);
4294 /* Update jump functions and call note functions on inlining the call site CS.
4295 CS is expected to lead to a node already cloned by
4296 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4297 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4298 created. */
4300 bool
4301 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4302 vec<cgraph_edge *> *new_edges)
4304 bool changed;
4305 /* Do nothing if the preparation phase has not been carried out yet
4306 (i.e. during early inlining). */
4307 if (!ipa_node_params_sum)
4308 return false;
4309 gcc_assert (ipa_edge_args_sum);
4311 propagate_controlled_uses (cs);
4312 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4313 ipa_node_params_sum->remove (cs->callee);
4315 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4316 if (args)
4318 bool ok = true;
4319 if (args->jump_functions)
4321 struct ipa_jump_func *jf;
4322 int i;
4323 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4324 if (jf->type == IPA_JF_CONST
4325 && ipa_get_jf_constant_rdesc (jf))
4327 ok = false;
4328 break;
4331 if (ok)
4332 ipa_edge_args_sum->remove (cs);
4334 if (ipcp_transformation_sum)
4335 ipcp_transformation_sum->remove (cs->callee);
4337 return changed;
4340 /* Ensure that array of edge arguments infos is big enough to accommodate a
4341 structure for all edges and reallocates it if not. Also, allocate
4342 associated hash tables is they do not already exist. */
4344 void
4345 ipa_check_create_edge_args (void)
4347 if (!ipa_edge_args_sum)
4348 ipa_edge_args_sum
4349 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4350 ipa_edge_args_sum_t (symtab, true));
4351 if (!ipa_vr_hash_table)
4352 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4355 /* Free all ipa_edge structures. */
4357 void
4358 ipa_free_all_edge_args (void)
4360 if (!ipa_edge_args_sum)
4361 return;
4363 ggc_delete (ipa_edge_args_sum);
4364 ipa_edge_args_sum = NULL;
4367 /* Free all ipa_node_params structures. */
4369 void
4370 ipa_free_all_node_params (void)
4372 if (ipa_node_params_sum)
4373 ggc_delete (ipa_node_params_sum);
4374 ipa_node_params_sum = NULL;
4377 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4378 tables if they do not already exist. */
4380 void
4381 ipcp_transformation_initialize (void)
4383 if (!ipa_vr_hash_table)
4384 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4385 if (ipcp_transformation_sum == NULL)
4387 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4388 ipcp_transformation_sum->disable_insertion_hook ();
4392 /* Release the IPA CP transformation summary. */
4394 void
4395 ipcp_free_transformation_sum (void)
4397 if (!ipcp_transformation_sum)
4398 return;
4400 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4401 ggc_free (ipcp_transformation_sum);
4402 ipcp_transformation_sum = NULL;
4405 /* Set the aggregate replacements of NODE to be AGGVALS. */
4407 void
4408 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4409 vec<ipa_argagg_value, va_gc> *aggs)
4411 ipcp_transformation_initialize ();
4412 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4413 s->m_agg_values = aggs;
4416 /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4417 count data structures accordingly. */
4419 void
4420 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4422 if (args->jump_functions)
4424 struct ipa_jump_func *jf;
4425 int i;
4426 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4428 struct ipa_cst_ref_desc *rdesc;
4429 try_decrement_rdesc_refcount (jf);
4430 if (jf->type == IPA_JF_CONST
4431 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4432 && rdesc->cs == cs)
4433 rdesc->cs = NULL;
4438 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4439 reference count data strucutres accordingly. */
4441 void
4442 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4443 ipa_edge_args *old_args, ipa_edge_args *new_args)
4445 unsigned int i;
4447 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4448 if (old_args->polymorphic_call_contexts)
4449 new_args->polymorphic_call_contexts
4450 = vec_safe_copy (old_args->polymorphic_call_contexts);
4452 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4454 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4455 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4457 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4459 if (src_jf->type == IPA_JF_CONST)
4461 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4463 if (!src_rdesc)
4464 dst_jf->value.constant.rdesc = NULL;
4465 else if (src->caller == dst->caller)
4467 /* Creation of a speculative edge. If the source edge is the one
4468 grabbing a reference, we must create a new (duplicate)
4469 reference description. Otherwise they refer to the same
4470 description corresponding to a reference taken in a function
4471 src->caller is inlined to. In that case we just must
4472 increment the refcount. */
4473 if (src_rdesc->cs == src)
4475 symtab_node *n = symtab_node_for_jfunc (src_jf);
4476 gcc_checking_assert (n);
4477 ipa_ref *ref
4478 = src->caller->find_reference (n, src->call_stmt,
4479 src->lto_stmt_uid,
4480 IPA_REF_ADDR);
4481 gcc_checking_assert (ref);
4482 dst->caller->clone_reference (ref, ref->stmt);
4484 ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4485 dst_rdesc->cs = dst;
4486 dst_rdesc->refcount = src_rdesc->refcount;
4487 dst_rdesc->next_duplicate = NULL;
4488 dst_jf->value.constant.rdesc = dst_rdesc;
4490 else
4492 src_rdesc->refcount++;
4493 dst_jf->value.constant.rdesc = src_rdesc;
4496 else if (src_rdesc->cs == src)
4498 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4499 dst_rdesc->cs = dst;
4500 dst_rdesc->refcount = src_rdesc->refcount;
4501 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4502 src_rdesc->next_duplicate = dst_rdesc;
4503 dst_jf->value.constant.rdesc = dst_rdesc;
4505 else
4507 struct ipa_cst_ref_desc *dst_rdesc;
4508 /* This can happen during inlining, when a JFUNC can refer to a
4509 reference taken in a function up in the tree of inline clones.
4510 We need to find the duplicate that refers to our tree of
4511 inline clones. */
4513 gcc_assert (dst->caller->inlined_to);
4514 for (dst_rdesc = src_rdesc->next_duplicate;
4515 dst_rdesc;
4516 dst_rdesc = dst_rdesc->next_duplicate)
4518 struct cgraph_node *top;
4519 top = dst_rdesc->cs->caller->inlined_to
4520 ? dst_rdesc->cs->caller->inlined_to
4521 : dst_rdesc->cs->caller;
4522 if (dst->caller->inlined_to == top)
4523 break;
4525 gcc_assert (dst_rdesc);
4526 dst_jf->value.constant.rdesc = dst_rdesc;
4529 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4530 && src->caller == dst->caller)
4532 struct cgraph_node *inline_root = dst->caller->inlined_to
4533 ? dst->caller->inlined_to : dst->caller;
4534 ipa_node_params *root_info = ipa_node_params_sum->get (inline_root);
4535 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4537 int c = ipa_get_controlled_uses (root_info, idx);
4538 if (c != IPA_UNDESCRIBED_USE)
4540 c++;
4541 ipa_set_controlled_uses (root_info, idx, c);
4547 /* Analyze newly added function into callgraph. */
4549 static void
4550 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4552 if (node->has_gimple_body_p ())
4553 ipa_analyze_node (node);
4556 /* Hook that is called by summary when a node is duplicated. */
4558 void
4559 ipa_node_params_t::duplicate(cgraph_node *, cgraph_node *,
4560 ipa_node_params *old_info,
4561 ipa_node_params *new_info)
4563 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4564 new_info->lattices = NULL;
4565 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4566 new_info->known_csts = old_info->known_csts.copy ();
4567 new_info->known_contexts = old_info->known_contexts.copy ();
4569 new_info->analysis_done = old_info->analysis_done;
4570 new_info->node_enqueued = old_info->node_enqueued;
4571 new_info->versionable = old_info->versionable;
4574 /* Duplication of ipcp transformation summaries. */
4576 void
4577 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4578 ipcp_transformation *src_trans,
4579 ipcp_transformation *dst_trans)
4581 /* Avoid redundant work of duplicating vectors we will never use. */
4582 if (dst->inlined_to)
4583 return;
4584 dst_trans->m_agg_values = vec_safe_copy (src_trans->m_agg_values);
4585 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4588 /* Register our cgraph hooks if they are not already there. */
4590 void
4591 ipa_register_cgraph_hooks (void)
4593 ipa_check_create_node_params ();
4594 ipa_check_create_edge_args ();
4596 function_insertion_hook_holder =
4597 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4600 /* Unregister our cgraph hooks if they are not already there. */
4602 static void
4603 ipa_unregister_cgraph_hooks (void)
4605 if (function_insertion_hook_holder)
4606 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4607 function_insertion_hook_holder = NULL;
4610 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4611 longer needed after ipa-cp. */
4613 void
4614 ipa_free_all_structures_after_ipa_cp (void)
4616 if (!optimize && !in_lto_p)
4618 ipa_free_all_edge_args ();
4619 ipa_free_all_node_params ();
4620 ipcp_sources_pool.release ();
4621 ipcp_cst_values_pool.release ();
4622 ipcp_poly_ctx_values_pool.release ();
4623 ipcp_agg_lattice_pool.release ();
4624 ipa_unregister_cgraph_hooks ();
4625 ipa_refdesc_pool.release ();
4629 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4630 longer needed after indirect inlining. */
4632 void
4633 ipa_free_all_structures_after_iinln (void)
4635 ipa_free_all_edge_args ();
4636 ipa_free_all_node_params ();
4637 ipa_unregister_cgraph_hooks ();
4638 ipcp_sources_pool.release ();
4639 ipcp_cst_values_pool.release ();
4640 ipcp_poly_ctx_values_pool.release ();
4641 ipcp_agg_lattice_pool.release ();
4642 ipa_refdesc_pool.release ();
4645 /* Print ipa_tree_map data structures of all functions in the
4646 callgraph to F. */
4648 void
4649 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4651 int i, count;
4652 class ipa_node_params *info;
4654 if (!node->definition)
4655 return;
4656 info = ipa_node_params_sum->get (node);
4657 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4658 if (!info)
4660 fprintf (f, " no params return\n");
4661 return;
4663 count = ipa_get_param_count (info);
4664 for (i = 0; i < count; i++)
4666 int c;
4668 fprintf (f, " ");
4669 ipa_dump_param (f, info, i);
4670 if (ipa_is_param_used (info, i))
4671 fprintf (f, " used");
4672 if (ipa_is_param_used_by_ipa_predicates (info, i))
4673 fprintf (f, " used_by_ipa_predicates");
4674 if (ipa_is_param_used_by_indirect_call (info, i))
4675 fprintf (f, " used_by_indirect_call");
4676 if (ipa_is_param_used_by_polymorphic_call (info, i))
4677 fprintf (f, " used_by_polymorphic_call");
4678 c = ipa_get_controlled_uses (info, i);
4679 if (c == IPA_UNDESCRIBED_USE)
4680 fprintf (f, " undescribed_use");
4681 else
4682 fprintf (f, " controlled_uses=%i %s", c,
4683 ipa_get_param_load_dereferenced (info, i)
4684 ? "(load_dereferenced)" : "");
4685 fprintf (f, "\n");
4689 /* Print ipa_tree_map data structures of all functions in the
4690 callgraph to F. */
4692 void
4693 ipa_print_all_params (FILE * f)
4695 struct cgraph_node *node;
4697 fprintf (f, "\nFunction parameters:\n");
4698 FOR_EACH_FUNCTION (node)
4699 ipa_print_node_params (f, node);
4702 /* Stream out jump function JUMP_FUNC to OB. */
4704 static void
4705 ipa_write_jump_function (struct output_block *ob,
4706 struct ipa_jump_func *jump_func)
4708 struct ipa_agg_jf_item *item;
4709 struct bitpack_d bp;
4710 int i, count;
4711 int flag = 0;
4713 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4714 as well as WPA memory by handling them specially. */
4715 if (jump_func->type == IPA_JF_CONST
4716 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4717 flag = 1;
4719 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4720 switch (jump_func->type)
4722 case IPA_JF_UNKNOWN:
4723 break;
4724 case IPA_JF_CONST:
4725 gcc_assert (
4726 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4727 stream_write_tree (ob,
4728 flag
4729 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4730 : jump_func->value.constant.value, true);
4731 break;
4732 case IPA_JF_PASS_THROUGH:
4733 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4734 if (jump_func->value.pass_through.operation == NOP_EXPR)
4736 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4737 bp = bitpack_create (ob->main_stream);
4738 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4739 gcc_assert (!jump_func->value.pass_through.refdesc_decremented);
4740 streamer_write_bitpack (&bp);
4742 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4743 == tcc_unary)
4744 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4745 else
4747 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4748 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4750 break;
4751 case IPA_JF_ANCESTOR:
4752 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4753 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4754 bp = bitpack_create (ob->main_stream);
4755 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4756 bp_pack_value (&bp, jump_func->value.ancestor.keep_null, 1);
4757 streamer_write_bitpack (&bp);
4758 break;
4759 default:
4760 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4763 count = vec_safe_length (jump_func->agg.items);
4764 streamer_write_uhwi (ob, count);
4765 if (count)
4767 bp = bitpack_create (ob->main_stream);
4768 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4769 streamer_write_bitpack (&bp);
4772 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4774 stream_write_tree (ob, item->type, true);
4775 streamer_write_uhwi (ob, item->offset);
4776 streamer_write_uhwi (ob, item->jftype);
4777 switch (item->jftype)
4779 case IPA_JF_UNKNOWN:
4780 break;
4781 case IPA_JF_CONST:
4782 stream_write_tree (ob, item->value.constant, true);
4783 break;
4784 case IPA_JF_PASS_THROUGH:
4785 case IPA_JF_LOAD_AGG:
4786 streamer_write_uhwi (ob, item->value.pass_through.operation);
4787 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4788 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4789 != tcc_unary)
4790 stream_write_tree (ob, item->value.pass_through.operand, true);
4791 if (item->jftype == IPA_JF_LOAD_AGG)
4793 stream_write_tree (ob, item->value.load_agg.type, true);
4794 streamer_write_uhwi (ob, item->value.load_agg.offset);
4795 bp = bitpack_create (ob->main_stream);
4796 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4797 streamer_write_bitpack (&bp);
4799 break;
4800 default:
4801 fatal_error (UNKNOWN_LOCATION,
4802 "invalid jump function in LTO stream");
4806 bp = bitpack_create (ob->main_stream);
4807 if (jump_func->m_vr)
4808 jump_func->m_vr->streamer_write (ob);
4809 else
4811 bp_pack_value (&bp, false, 1);
4812 streamer_write_bitpack (&bp);
4816 /* Read in jump function JUMP_FUNC from IB. */
4818 static void
4819 ipa_read_jump_function (class lto_input_block *ib,
4820 struct ipa_jump_func *jump_func,
4821 struct cgraph_edge *cs,
4822 class data_in *data_in,
4823 bool prevails)
4825 enum jump_func_type jftype;
4826 enum tree_code operation;
4827 int i, count;
4828 int val = streamer_read_uhwi (ib);
4829 bool flag = val & 1;
4831 jftype = (enum jump_func_type) (val / 2);
4832 switch (jftype)
4834 case IPA_JF_UNKNOWN:
4835 ipa_set_jf_unknown (jump_func);
4836 break;
4837 case IPA_JF_CONST:
4839 tree t = stream_read_tree (ib, data_in);
4840 if (flag && prevails)
4841 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4842 ipa_set_jf_constant (jump_func, t, cs);
4844 break;
4845 case IPA_JF_PASS_THROUGH:
4846 operation = (enum tree_code) streamer_read_uhwi (ib);
4847 if (operation == NOP_EXPR)
4849 int formal_id = streamer_read_uhwi (ib);
4850 struct bitpack_d bp = streamer_read_bitpack (ib);
4851 bool agg_preserved = bp_unpack_value (&bp, 1);
4852 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4854 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4856 int formal_id = streamer_read_uhwi (ib);
4857 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4859 else
4861 tree operand = stream_read_tree (ib, data_in);
4862 int formal_id = streamer_read_uhwi (ib);
4863 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4864 operation);
4866 break;
4867 case IPA_JF_ANCESTOR:
4869 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4870 int formal_id = streamer_read_uhwi (ib);
4871 struct bitpack_d bp = streamer_read_bitpack (ib);
4872 bool agg_preserved = bp_unpack_value (&bp, 1);
4873 bool keep_null = bp_unpack_value (&bp, 1);
4874 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved,
4875 keep_null);
4876 break;
4878 default:
4879 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4882 count = streamer_read_uhwi (ib);
4883 if (prevails)
4885 jump_func->agg.items = NULL;
4886 vec_safe_reserve (jump_func->agg.items, count, true);
4888 if (count)
4890 struct bitpack_d bp = streamer_read_bitpack (ib);
4891 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4893 for (i = 0; i < count; i++)
4895 struct ipa_agg_jf_item item;
4896 item.type = stream_read_tree (ib, data_in);
4897 item.offset = streamer_read_uhwi (ib);
4898 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4900 switch (item.jftype)
4902 case IPA_JF_UNKNOWN:
4903 break;
4904 case IPA_JF_CONST:
4905 item.value.constant = stream_read_tree (ib, data_in);
4906 break;
4907 case IPA_JF_PASS_THROUGH:
4908 case IPA_JF_LOAD_AGG:
4909 operation = (enum tree_code) streamer_read_uhwi (ib);
4910 item.value.pass_through.operation = operation;
4911 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4912 if (TREE_CODE_CLASS (operation) == tcc_unary)
4913 item.value.pass_through.operand = NULL_TREE;
4914 else
4915 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4916 if (item.jftype == IPA_JF_LOAD_AGG)
4918 struct bitpack_d bp;
4919 item.value.load_agg.type = stream_read_tree (ib, data_in);
4920 item.value.load_agg.offset = streamer_read_uhwi (ib);
4921 bp = streamer_read_bitpack (ib);
4922 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4924 break;
4925 default:
4926 fatal_error (UNKNOWN_LOCATION,
4927 "invalid jump function in LTO stream");
4929 if (prevails)
4930 jump_func->agg.items->quick_push (item);
4933 ipa_vr vr;
4934 vr.streamer_read (ib, data_in);
4935 if (vr.known_p ())
4937 if (prevails)
4938 ipa_set_jfunc_vr (jump_func, vr);
4940 else
4941 jump_func->m_vr = NULL;
4944 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4945 relevant to indirect inlining to OB. */
4947 static void
4948 ipa_write_indirect_edge_info (struct output_block *ob,
4949 struct cgraph_edge *cs)
4951 class cgraph_indirect_call_info *ii = cs->indirect_info;
4952 struct bitpack_d bp;
4954 streamer_write_hwi (ob, ii->param_index);
4955 bp = bitpack_create (ob->main_stream);
4956 bp_pack_value (&bp, ii->polymorphic, 1);
4957 bp_pack_value (&bp, ii->agg_contents, 1);
4958 bp_pack_value (&bp, ii->member_ptr, 1);
4959 bp_pack_value (&bp, ii->by_ref, 1);
4960 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4961 bp_pack_value (&bp, ii->vptr_changed, 1);
4962 streamer_write_bitpack (&bp);
4963 if (ii->agg_contents || ii->polymorphic)
4964 streamer_write_hwi (ob, ii->offset);
4965 else
4966 gcc_assert (ii->offset == 0);
4968 if (ii->polymorphic)
4970 streamer_write_hwi (ob, ii->otr_token);
4971 stream_write_tree (ob, ii->otr_type, true);
4972 ii->context.stream_out (ob);
4976 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4977 relevant to indirect inlining from IB. */
4979 static void
4980 ipa_read_indirect_edge_info (class lto_input_block *ib,
4981 class data_in *data_in,
4982 struct cgraph_edge *cs,
4983 class ipa_node_params *info)
4985 class cgraph_indirect_call_info *ii = cs->indirect_info;
4986 struct bitpack_d bp;
4988 ii->param_index = (int) streamer_read_hwi (ib);
4989 bp = streamer_read_bitpack (ib);
4990 ii->polymorphic = bp_unpack_value (&bp, 1);
4991 ii->agg_contents = bp_unpack_value (&bp, 1);
4992 ii->member_ptr = bp_unpack_value (&bp, 1);
4993 ii->by_ref = bp_unpack_value (&bp, 1);
4994 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4995 ii->vptr_changed = bp_unpack_value (&bp, 1);
4996 if (ii->agg_contents || ii->polymorphic)
4997 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4998 else
4999 ii->offset = 0;
5000 if (ii->polymorphic)
5002 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
5003 ii->otr_type = stream_read_tree (ib, data_in);
5004 ii->context.stream_in (ib, data_in);
5006 if (info && ii->param_index >= 0)
5008 if (ii->polymorphic)
5009 ipa_set_param_used_by_polymorphic_call (info,
5010 ii->param_index , true);
5011 ipa_set_param_used_by_indirect_call (info,
5012 ii->param_index, true);
5016 /* Stream out NODE info to OB. */
5018 static void
5019 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5021 int node_ref;
5022 lto_symtab_encoder_t encoder;
5023 ipa_node_params *info = ipa_node_params_sum->get (node);
5024 int j;
5025 struct cgraph_edge *e;
5026 struct bitpack_d bp;
5028 encoder = ob->decl_state->symtab_node_encoder;
5029 node_ref = lto_symtab_encoder_encode (encoder, node);
5030 streamer_write_uhwi (ob, node_ref);
5032 streamer_write_uhwi (ob, ipa_get_param_count (info));
5033 for (j = 0; j < ipa_get_param_count (info); j++)
5034 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5035 bp = bitpack_create (ob->main_stream);
5036 gcc_assert (info->analysis_done
5037 || ipa_get_param_count (info) == 0);
5038 gcc_assert (!info->node_enqueued);
5039 gcc_assert (!info->ipcp_orig_node);
5040 for (j = 0; j < ipa_get_param_count (info); j++)
5042 /* TODO: We could just not stream the bit in the undescribed case. */
5043 bool d = (ipa_get_controlled_uses (info, j) != IPA_UNDESCRIBED_USE)
5044 ? ipa_get_param_load_dereferenced (info, j) : true;
5045 bp_pack_value (&bp, d, 1);
5046 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5048 streamer_write_bitpack (&bp);
5049 for (j = 0; j < ipa_get_param_count (info); j++)
5051 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5052 stream_write_tree (ob, ipa_get_type (info, j), true);
5054 for (e = node->callees; e; e = e->next_callee)
5056 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5058 if (!args)
5060 streamer_write_uhwi (ob, 0);
5061 continue;
5064 streamer_write_uhwi (ob,
5065 ipa_get_cs_argument_count (args) * 2
5066 + (args->polymorphic_call_contexts != NULL));
5067 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5069 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5070 if (args->polymorphic_call_contexts != NULL)
5071 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5074 for (e = node->indirect_calls; e; e = e->next_callee)
5076 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5077 if (!args)
5078 streamer_write_uhwi (ob, 0);
5079 else
5081 streamer_write_uhwi (ob,
5082 ipa_get_cs_argument_count (args) * 2
5083 + (args->polymorphic_call_contexts != NULL));
5084 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5086 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5087 if (args->polymorphic_call_contexts != NULL)
5088 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5091 ipa_write_indirect_edge_info (ob, e);
5095 /* Stream in edge E from IB. */
5097 static void
5098 ipa_read_edge_info (class lto_input_block *ib,
5099 class data_in *data_in,
5100 struct cgraph_edge *e, bool prevails)
5102 int count = streamer_read_uhwi (ib);
5103 bool contexts_computed = count & 1;
5105 count /= 2;
5106 if (!count)
5107 return;
5108 if (prevails
5109 && (e->possibly_call_in_translation_unit_p ()
5110 /* Also stream in jump functions to builtins in hope that they
5111 will get fnspecs. */
5112 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
5114 ipa_edge_args *args = ipa_edge_args_sum->get_create (e);
5115 vec_safe_grow_cleared (args->jump_functions, count, true);
5116 if (contexts_computed)
5117 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
5118 for (int k = 0; k < count; k++)
5120 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5121 data_in, prevails);
5122 if (contexts_computed)
5123 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5124 (ib, data_in);
5127 else
5129 for (int k = 0; k < count; k++)
5131 struct ipa_jump_func dummy;
5132 ipa_read_jump_function (ib, &dummy, e,
5133 data_in, prevails);
5134 if (contexts_computed)
5136 class ipa_polymorphic_call_context ctx;
5137 ctx.stream_in (ib, data_in);
5143 /* Stream in NODE info from IB. */
5145 static void
5146 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5147 class data_in *data_in)
5149 int k;
5150 struct cgraph_edge *e;
5151 struct bitpack_d bp;
5152 bool prevails = node->prevailing_p ();
5153 ipa_node_params *info
5154 = prevails ? ipa_node_params_sum->get_create (node) : NULL;
5156 int param_count = streamer_read_uhwi (ib);
5157 if (prevails)
5159 ipa_alloc_node_params (node, param_count);
5160 for (k = 0; k < param_count; k++)
5161 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5162 if (ipa_get_param_count (info) != 0)
5163 info->analysis_done = true;
5164 info->node_enqueued = false;
5166 else
5167 for (k = 0; k < param_count; k++)
5168 streamer_read_uhwi (ib);
5170 bp = streamer_read_bitpack (ib);
5171 for (k = 0; k < param_count; k++)
5173 bool load_dereferenced = bp_unpack_value (&bp, 1);
5174 bool used = bp_unpack_value (&bp, 1);
5176 if (prevails)
5178 ipa_set_param_load_dereferenced (info, k, load_dereferenced);
5179 ipa_set_param_used (info, k, used);
5182 for (k = 0; k < param_count; k++)
5184 int nuses = streamer_read_hwi (ib);
5185 tree type = stream_read_tree (ib, data_in);
5187 if (prevails)
5189 ipa_set_controlled_uses (info, k, nuses);
5190 (*info->descriptors)[k].decl_or_type = type;
5193 for (e = node->callees; e; e = e->next_callee)
5194 ipa_read_edge_info (ib, data_in, e, prevails);
5195 for (e = node->indirect_calls; e; e = e->next_callee)
5197 ipa_read_edge_info (ib, data_in, e, prevails);
5198 ipa_read_indirect_edge_info (ib, data_in, e, info);
5202 /* Write jump functions for nodes in SET. */
5204 void
5205 ipa_prop_write_jump_functions (void)
5207 struct output_block *ob;
5208 unsigned int count = 0;
5209 lto_symtab_encoder_iterator lsei;
5210 lto_symtab_encoder_t encoder;
5212 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5213 return;
5215 ob = create_output_block (LTO_section_jump_functions);
5216 encoder = ob->decl_state->symtab_node_encoder;
5217 ob->symbol = NULL;
5218 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5219 lsei_next_function_in_partition (&lsei))
5221 cgraph_node *node = lsei_cgraph_node (lsei);
5222 if (node->has_gimple_body_p ()
5223 && ipa_node_params_sum->get (node) != NULL)
5224 count++;
5227 streamer_write_uhwi (ob, count);
5229 /* Process all of the functions. */
5230 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5231 lsei_next_function_in_partition (&lsei))
5233 cgraph_node *node = lsei_cgraph_node (lsei);
5234 if (node->has_gimple_body_p ()
5235 && ipa_node_params_sum->get (node) != NULL)
5236 ipa_write_node_info (ob, node);
5238 streamer_write_char_stream (ob->main_stream, 0);
5239 produce_asm (ob, NULL);
5240 destroy_output_block (ob);
5243 /* Read section in file FILE_DATA of length LEN with data DATA. */
5245 static void
5246 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5247 size_t len)
5249 const struct lto_function_header *header =
5250 (const struct lto_function_header *) data;
5251 const int cfg_offset = sizeof (struct lto_function_header);
5252 const int main_offset = cfg_offset + header->cfg_size;
5253 const int string_offset = main_offset + header->main_size;
5254 class data_in *data_in;
5255 unsigned int i;
5256 unsigned int count;
5258 lto_input_block ib_main ((const char *) data + main_offset,
5259 header->main_size, file_data);
5261 data_in =
5262 lto_data_in_create (file_data, (const char *) data + string_offset,
5263 header->string_size, vNULL);
5264 count = streamer_read_uhwi (&ib_main);
5266 for (i = 0; i < count; i++)
5268 unsigned int index;
5269 struct cgraph_node *node;
5270 lto_symtab_encoder_t encoder;
5272 index = streamer_read_uhwi (&ib_main);
5273 encoder = file_data->symtab_node_encoder;
5274 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5275 index));
5276 gcc_assert (node->definition);
5277 ipa_read_node_info (&ib_main, node, data_in);
5279 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5280 len);
5281 lto_data_in_delete (data_in);
5284 /* Read ipcp jump functions. */
5286 void
5287 ipa_prop_read_jump_functions (void)
5289 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5290 struct lto_file_decl_data *file_data;
5291 unsigned int j = 0;
5293 ipa_check_create_node_params ();
5294 ipa_check_create_edge_args ();
5295 ipa_register_cgraph_hooks ();
5297 while ((file_data = file_data_vec[j++]))
5299 size_t len;
5300 const char *data
5301 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5302 &len);
5303 if (data)
5304 ipa_prop_read_section (file_data, data, len);
5308 /* Return true if the IPA-CP transformation summary TS is non-NULL and contains
5309 useful info. */
5310 static bool
5311 useful_ipcp_transformation_info_p (ipcp_transformation *ts)
5313 if (!ts)
5314 return false;
5315 if (!vec_safe_is_empty (ts->m_agg_values)
5316 || !vec_safe_is_empty (ts->m_vr))
5317 return true;
5318 return false;
5321 /* Write into OB IPA-CP transfromation summary TS describing NODE. */
5323 void
5324 write_ipcp_transformation_info (output_block *ob, cgraph_node *node,
5325 ipcp_transformation *ts)
5327 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
5328 int node_ref = lto_symtab_encoder_encode (encoder, node);
5329 streamer_write_uhwi (ob, node_ref);
5331 streamer_write_uhwi (ob, vec_safe_length (ts->m_agg_values));
5332 for (const ipa_argagg_value &av : ts->m_agg_values)
5334 struct bitpack_d bp;
5336 stream_write_tree (ob, av.value, true);
5337 streamer_write_uhwi (ob, av.unit_offset);
5338 streamer_write_uhwi (ob, av.index);
5340 bp = bitpack_create (ob->main_stream);
5341 bp_pack_value (&bp, av.by_ref, 1);
5342 bp_pack_value (&bp, av.killed, 1);
5343 streamer_write_bitpack (&bp);
5346 streamer_write_uhwi (ob, vec_safe_length (ts->m_vr));
5347 for (const ipa_vr &parm_vr : ts->m_vr)
5348 parm_vr.streamer_write (ob);
5351 /* Stream in the aggregate value replacement chain for NODE from IB. */
5353 static void
5354 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5355 data_in *data_in)
5357 unsigned int count, i;
5358 ipcp_transformation_initialize ();
5359 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5361 count = streamer_read_uhwi (ib);
5362 if (count > 0)
5364 vec_safe_grow_cleared (ts->m_agg_values, count, true);
5365 for (i = 0; i <count; i++)
5367 ipa_argagg_value *av = &(*ts->m_agg_values)[i];;
5369 av->value = stream_read_tree (ib, data_in);
5370 av->unit_offset = streamer_read_uhwi (ib);
5371 av->index = streamer_read_uhwi (ib);
5373 bitpack_d bp = streamer_read_bitpack (ib);
5374 av->by_ref = bp_unpack_value (&bp, 1);
5375 av->killed = bp_unpack_value (&bp, 1);
5379 count = streamer_read_uhwi (ib);
5380 if (count > 0)
5382 vec_safe_grow_cleared (ts->m_vr, count, true);
5383 for (i = 0; i < count; i++)
5385 ipa_vr *parm_vr;
5386 parm_vr = &(*ts->m_vr)[i];
5387 parm_vr->streamer_read (ib, data_in);
5392 /* Write all aggregate replacement for nodes in set. */
5394 void
5395 ipcp_write_transformation_summaries (void)
5397 struct output_block *ob;
5398 unsigned int count = 0;
5399 lto_symtab_encoder_t encoder;
5401 ob = create_output_block (LTO_section_ipcp_transform);
5402 encoder = ob->decl_state->symtab_node_encoder;
5403 ob->symbol = NULL;
5405 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5407 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
5408 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
5409 if (!cnode)
5410 continue;
5411 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5412 if (useful_ipcp_transformation_info_p (ts)
5413 && lto_symtab_encoder_encode_body_p (encoder, cnode))
5414 count++;
5417 streamer_write_uhwi (ob, count);
5419 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5421 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
5422 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
5423 if (!cnode)
5424 continue;
5425 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5426 if (useful_ipcp_transformation_info_p (ts)
5427 && lto_symtab_encoder_encode_body_p (encoder, cnode))
5428 write_ipcp_transformation_info (ob, cnode, ts);
5430 streamer_write_char_stream (ob->main_stream, 0);
5431 produce_asm (ob, NULL);
5432 destroy_output_block (ob);
5435 /* Read replacements section in file FILE_DATA of length LEN with data
5436 DATA. */
5438 static void
5439 read_replacements_section (struct lto_file_decl_data *file_data,
5440 const char *data,
5441 size_t len)
5443 const struct lto_function_header *header =
5444 (const struct lto_function_header *) data;
5445 const int cfg_offset = sizeof (struct lto_function_header);
5446 const int main_offset = cfg_offset + header->cfg_size;
5447 const int string_offset = main_offset + header->main_size;
5448 class data_in *data_in;
5449 unsigned int i;
5450 unsigned int count;
5452 lto_input_block ib_main ((const char *) data + main_offset,
5453 header->main_size, file_data);
5455 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5456 header->string_size, vNULL);
5457 count = streamer_read_uhwi (&ib_main);
5459 for (i = 0; i < count; i++)
5461 unsigned int index;
5462 struct cgraph_node *node;
5463 lto_symtab_encoder_t encoder;
5465 index = streamer_read_uhwi (&ib_main);
5466 encoder = file_data->symtab_node_encoder;
5467 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5468 index));
5469 read_ipcp_transformation_info (&ib_main, node, data_in);
5471 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5472 len);
5473 lto_data_in_delete (data_in);
5476 /* Read IPA-CP aggregate replacements. */
5478 void
5479 ipcp_read_transformation_summaries (void)
5481 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5482 struct lto_file_decl_data *file_data;
5483 unsigned int j = 0;
5485 while ((file_data = file_data_vec[j++]))
5487 size_t len;
5488 const char *data
5489 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5490 &len);
5491 if (data)
5492 read_replacements_section (file_data, data, len);
5496 /* Adjust the aggregate replacements in TS to reflect any parameter removals
5497 which might have already taken place. If after adjustments there are no
5498 aggregate replacements left, the m_agg_values will be set to NULL. In other
5499 cases, it may be shrunk. */
5501 static void
5502 adjust_agg_replacement_values (cgraph_node *node, ipcp_transformation *ts)
5504 clone_info *cinfo = clone_info::get (node);
5505 if (!cinfo || !cinfo->param_adjustments)
5506 return;
5508 auto_vec<int, 16> new_indices;
5509 cinfo->param_adjustments->get_updated_indices (&new_indices);
5510 bool removed_item = false;
5511 unsigned dst_index = 0;
5512 unsigned count = ts->m_agg_values->length ();
5513 for (unsigned i = 0; i < count; i++)
5515 ipa_argagg_value *v = &(*ts->m_agg_values)[i];
5516 gcc_checking_assert (v->index >= 0);
5518 int new_idx = -1;
5519 if ((unsigned) v->index < new_indices.length ())
5520 new_idx = new_indices[v->index];
5522 if (new_idx >= 0)
5524 v->index = new_idx;
5525 if (removed_item)
5526 (*ts->m_agg_values)[dst_index] = *v;
5527 dst_index++;
5529 else
5530 removed_item = true;
5533 if (dst_index == 0)
5535 ggc_free (ts->m_agg_values);
5536 ts->m_agg_values = NULL;
5538 else if (removed_item)
5539 ts->m_agg_values->truncate (dst_index);
5541 return;
5544 /* Dominator walker driving the ipcp modification phase. */
5546 class ipcp_modif_dom_walker : public dom_walker
5548 public:
5549 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5550 vec<ipa_param_descriptor, va_gc> *descs,
5551 ipcp_transformation *ts, bool *sc)
5552 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5553 m_ts (ts), m_something_changed (sc) {}
5555 edge before_dom_children (basic_block) final override;
5556 bool cleanup_eh ()
5557 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
5559 private:
5560 struct ipa_func_body_info *m_fbi;
5561 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5562 ipcp_transformation *m_ts;
5563 bool *m_something_changed;
5564 auto_bitmap m_need_eh_cleanup;
5567 edge
5568 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5570 gimple_stmt_iterator gsi;
5571 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5573 gimple *stmt = gsi_stmt (gsi);
5574 tree rhs, val, t;
5575 HOST_WIDE_INT bit_offset;
5576 poly_int64 size;
5577 int index;
5578 bool by_ref, vce;
5580 if (!gimple_assign_load_p (stmt))
5581 continue;
5582 rhs = gimple_assign_rhs1 (stmt);
5583 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5584 continue;
5586 vce = false;
5587 t = rhs;
5588 while (handled_component_p (t))
5590 /* V_C_E can do things like convert an array of integers to one
5591 bigger integer and similar things we do not handle below. */
5592 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5594 vce = true;
5595 break;
5597 t = TREE_OPERAND (t, 0);
5599 if (vce)
5600 continue;
5602 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5603 &bit_offset, &size, &by_ref))
5604 continue;
5605 unsigned unit_offset = bit_offset / BITS_PER_UNIT;
5606 ipa_argagg_value_list avl (m_ts);
5607 tree v = avl.get_value (index, unit_offset, by_ref);
5609 if (!v
5610 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), size))
5611 continue;
5613 gcc_checking_assert (is_gimple_ip_invariant (v));
5614 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v)))
5616 if (fold_convertible_p (TREE_TYPE (rhs), v))
5617 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v);
5618 else if (TYPE_SIZE (TREE_TYPE (rhs))
5619 == TYPE_SIZE (TREE_TYPE (v)))
5620 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v);
5621 else
5623 if (dump_file)
5625 fprintf (dump_file, " const ");
5626 print_generic_expr (dump_file, v);
5627 fprintf (dump_file, " can't be converted to type of ");
5628 print_generic_expr (dump_file, rhs);
5629 fprintf (dump_file, "\n");
5631 continue;
5634 else
5635 val = v;
5637 if (dump_file && (dump_flags & TDF_DETAILS))
5639 fprintf (dump_file, "Modifying stmt:\n ");
5640 print_gimple_stmt (dump_file, stmt, 0);
5642 gimple_assign_set_rhs_from_tree (&gsi, val);
5643 update_stmt (stmt);
5645 if (dump_file && (dump_flags & TDF_DETAILS))
5647 fprintf (dump_file, "into:\n ");
5648 print_gimple_stmt (dump_file, stmt, 0);
5649 fprintf (dump_file, "\n");
5652 *m_something_changed = true;
5653 if (maybe_clean_eh_stmt (stmt))
5654 bitmap_set_bit (m_need_eh_cleanup, bb->index);
5656 return NULL;
5659 /* If IPA-CP discovered a constant in parameter PARM at OFFSET of a given SIZE
5660 - whether passed by reference or not is given by BY_REF - return that
5661 constant. Otherwise return NULL_TREE. The is supposed to be used only
5662 after clone materialization and transformation is done (because it asserts
5663 that killed constants have been pruned). */
5665 tree
5666 ipcp_get_aggregate_const (struct function *func, tree parm, bool by_ref,
5667 HOST_WIDE_INT bit_offset, HOST_WIDE_INT bit_size)
5669 cgraph_node *node = cgraph_node::get (func->decl);
5670 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5672 if (!ts || !ts->m_agg_values)
5673 return NULL_TREE;
5675 int index = ts->get_param_index (func->decl, parm);
5676 if (index < 0)
5677 return NULL_TREE;
5679 ipa_argagg_value_list avl (ts);
5680 unsigned unit_offset = bit_offset / BITS_PER_UNIT;
5681 const ipa_argagg_value *av = avl.get_elt (index, unit_offset);
5682 if (!av || av->by_ref != by_ref)
5683 return NULL_TREE;
5684 gcc_assert (!av->killed);
5685 tree v = av->value;
5686 if (!v
5687 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), bit_size))
5688 return NULL_TREE;
5690 return v;
5693 /* Return true if we have recorded VALUE and MASK about PARM.
5694 Set VALUE and MASk accordingly. */
5696 bool
5697 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5699 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5700 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5701 if (!ts
5702 || vec_safe_length (ts->m_vr) == 0
5703 || !irange::supports_p (TREE_TYPE (parm)))
5704 return false;
5706 int i = ts->get_param_index (current_function_decl, parm);
5707 if (i < 0)
5708 return false;
5709 clone_info *cinfo = clone_info::get (cnode);
5710 if (cinfo && cinfo->param_adjustments)
5712 i = cinfo->param_adjustments->get_original_index (i);
5713 if (i < 0)
5714 return false;
5717 vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5718 if (!vr[i].known_p ())
5719 return false;
5720 Value_Range tmp;
5721 vr[i].get_vrange (tmp);
5722 if (tmp.undefined_p () || tmp.varying_p ())
5723 return false;
5724 irange &r = as_a <irange> (tmp);
5725 irange_bitmask bm = r.get_bitmask ();
5726 *mask = widest_int::from (bm.mask (), TYPE_SIGN (TREE_TYPE (parm)));
5727 *value = wide_int_to_tree (TREE_TYPE (parm), bm.value ());
5728 return true;
5731 /* Update value range of formal parameters of NODE as described in TS. */
5733 static void
5734 ipcp_update_vr (struct cgraph_node *node, ipcp_transformation *ts)
5736 if (vec_safe_is_empty (ts->m_vr))
5737 return;
5738 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5739 unsigned count = vr.length ();
5740 if (!count)
5741 return;
5743 auto_vec<int, 16> new_indices;
5744 bool need_remapping = false;
5745 clone_info *cinfo = clone_info::get (node);
5746 if (cinfo && cinfo->param_adjustments)
5748 cinfo->param_adjustments->get_updated_indices (&new_indices);
5749 need_remapping = true;
5751 auto_vec <tree, 16> parm_decls;
5752 push_function_arg_decls (&parm_decls, node->decl);
5754 for (unsigned i = 0; i < count; ++i)
5756 tree parm;
5757 int remapped_idx;
5758 if (need_remapping)
5760 if (i >= new_indices.length ())
5761 continue;
5762 remapped_idx = new_indices[i];
5763 if (remapped_idx < 0)
5764 continue;
5766 else
5767 remapped_idx = i;
5769 parm = parm_decls[remapped_idx];
5771 gcc_checking_assert (parm);
5772 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5774 if (!ddef || !is_gimple_reg (parm))
5775 continue;
5777 if (vr[i].known_p ())
5779 Value_Range tmp;
5780 vr[i].get_vrange (tmp);
5782 if (!tmp.undefined_p () && !tmp.varying_p ())
5784 if (dump_file)
5786 fprintf (dump_file, "Setting value range of param %u "
5787 "(now %i) ", i, remapped_idx);
5788 tmp.dump (dump_file);
5789 fprintf (dump_file, "]\n");
5791 set_range_info (ddef, tmp);
5793 if (POINTER_TYPE_P (TREE_TYPE (parm))
5794 && opt_for_fn (node->decl, flag_ipa_bit_cp))
5796 irange &r = as_a<irange> (tmp);
5797 irange_bitmask bm = r.get_bitmask ();
5798 unsigned tem = bm.mask ().to_uhwi ();
5799 unsigned HOST_WIDE_INT bitpos = bm.value ().to_uhwi ();
5800 unsigned align = tem & -tem;
5801 unsigned misalign = bitpos & (align - 1);
5803 if (align > 1)
5805 if (dump_file)
5807 fprintf (dump_file,
5808 "Adjusting mask for param %u to ", i);
5809 print_hex (bm.mask (), dump_file);
5810 fprintf (dump_file, "\n");
5813 if (dump_file)
5814 fprintf (dump_file,
5815 "Adjusting align: %u, misalign: %u\n",
5816 align, misalign);
5818 unsigned old_align, old_misalign;
5819 struct ptr_info_def *pi = get_ptr_info (ddef);
5820 bool old_known = get_ptr_info_alignment (pi, &old_align,
5821 &old_misalign);
5823 if (old_known && old_align > align)
5825 if (dump_file)
5827 fprintf (dump_file,
5828 "But alignment was already %u.\n",
5829 old_align);
5830 if ((old_misalign & (align - 1)) != misalign)
5831 fprintf (dump_file,
5832 "old_misalign (%u) and misalign "
5833 "(%u) mismatch\n",
5834 old_misalign, misalign);
5836 continue;
5839 if (dump_file
5840 && old_known
5841 && ((misalign & (old_align - 1)) != old_misalign))
5842 fprintf (dump_file,
5843 "old_misalign (%u) and misalign (%u) "
5844 "mismatch\n",
5845 old_misalign, misalign);
5847 set_ptr_info_alignment (pi, align, misalign);
5850 else if (dump_file && INTEGRAL_TYPE_P (TREE_TYPE (parm)))
5852 irange &r = as_a<irange> (tmp);
5853 irange_bitmask bm = r.get_bitmask ();
5854 unsigned prec = TYPE_PRECISION (TREE_TYPE (parm));
5855 if (wi::ne_p (bm.mask (), wi::shwi (-1, prec)))
5857 fprintf (dump_file,
5858 "Adjusting mask for param %u to ", i);
5859 print_hex (bm.mask (), dump_file);
5860 fprintf (dump_file, "\n");
5868 /* IPCP transformation phase doing propagation of aggregate values. */
5870 unsigned int
5871 ipcp_transform_function (struct cgraph_node *node)
5873 struct ipa_func_body_info fbi;
5874 int param_count;
5876 gcc_checking_assert (cfun);
5877 gcc_checking_assert (current_function_decl);
5879 if (dump_file)
5880 fprintf (dump_file, "Modification phase of node %s\n",
5881 node->dump_name ());
5883 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5884 if (!ts
5885 || (vec_safe_is_empty (ts->m_agg_values)
5886 && vec_safe_is_empty (ts->m_vr)))
5887 return 0;
5889 ts->maybe_create_parm_idx_map (cfun->decl);
5890 ipcp_update_vr (node, ts);
5891 if (vec_safe_is_empty (ts->m_agg_values))
5892 return 0;
5893 param_count = count_formal_params (node->decl);
5894 if (param_count == 0)
5895 return 0;
5897 adjust_agg_replacement_values (node, ts);
5898 if (vec_safe_is_empty (ts->m_agg_values))
5900 if (dump_file)
5901 fprintf (dump_file, " All affected aggregate parameters were either "
5902 "removed or converted into scalars, phase done.\n");
5903 return 0;
5905 if (dump_file)
5907 fprintf (dump_file, " Aggregate replacements:");
5908 ipa_argagg_value_list avs (ts);
5909 avs.dump (dump_file);
5912 fbi.node = node;
5913 fbi.info = NULL;
5914 fbi.bb_infos = vNULL;
5915 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
5916 fbi.param_count = param_count;
5917 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
5919 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5920 vec_safe_grow_cleared (descriptors, param_count, true);
5921 ipa_populate_param_decls (node, *descriptors);
5922 bool modified_mem_access = false;
5923 calculate_dominance_info (CDI_DOMINATORS);
5924 ipcp_modif_dom_walker walker (&fbi, descriptors, ts, &modified_mem_access);
5925 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5926 free_dominance_info (CDI_DOMINATORS);
5927 bool cfg_changed = walker.cleanup_eh ();
5929 int i;
5930 struct ipa_bb_info *bi;
5931 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5932 free_ipa_bb_info (bi);
5933 fbi.bb_infos.release ();
5935 ts->remove_argaggs_if ([](const ipa_argagg_value &v)
5937 return v.killed;
5940 vec_free (descriptors);
5941 if (cfg_changed)
5942 delete_unreachable_blocks_update_callgraph (node, false);
5944 return modified_mem_access ? TODO_update_ssa_only_virtuals : 0;
5947 /* Record that current function return value range is VAL. */
5949 void
5950 ipa_record_return_value_range (Value_Range val)
5952 cgraph_node *n = cgraph_node::get (current_function_decl);
5953 if (!ipa_return_value_sum)
5955 if (!ipa_vr_hash_table)
5956 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
5957 ipa_return_value_sum = new (ggc_alloc_no_dtor <ipa_return_value_sum_t> ())
5958 ipa_return_value_sum_t (symtab, true);
5959 ipa_return_value_sum->disable_insertion_hook ();
5961 ipa_return_value_sum->get_create (n)->vr = ipa_get_value_range (val);
5962 if (dump_file && (dump_flags & TDF_DETAILS))
5964 fprintf (dump_file, "Recording return range ");
5965 val.dump (dump_file);
5966 fprintf (dump_file, "\n");
5970 /* Return true if value range of DECL is known and if so initialize RANGE. */
5972 bool
5973 ipa_return_value_range (Value_Range &range, tree decl)
5975 cgraph_node *n = cgraph_node::get (decl);
5976 if (!n || !ipa_return_value_sum)
5977 return false;
5978 enum availability avail;
5979 n = n->ultimate_alias_target (&avail);
5980 if (avail < AVAIL_AVAILABLE)
5981 return false;
5982 if (n->decl != decl && !useless_type_conversion_p (TREE_TYPE (decl), TREE_TYPE (n->decl)))
5983 return false;
5984 ipa_return_value_summary *v = ipa_return_value_sum->get (n);
5985 if (!v)
5986 return false;
5987 v->vr->get_vrange (range);
5988 return true;
5991 /* Reset all state within ipa-prop.cc so that we can rerun the compiler
5992 within the same process. For use by toplev::finalize. */
5994 void
5995 ipa_prop_cc_finalize (void)
5997 if (function_insertion_hook_holder)
5998 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
5999 function_insertion_hook_holder = NULL;
6001 if (ipa_edge_args_sum)
6002 ggc_delete (ipa_edge_args_sum);
6003 ipa_edge_args_sum = NULL;
6005 if (ipa_node_params_sum)
6006 ggc_delete (ipa_node_params_sum);
6007 ipa_node_params_sum = NULL;
6010 #include "gt-ipa-prop.h"