Daily bump.
[official-gcc.git] / gcc / ipa-prop.c
blobbc5643206b9094215c3ec94b914f3aa538213602
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55 #include "options.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58 #include "gimple-range.h"
60 /* Function summary where the parameter infos are actually stored. */
61 ipa_node_params_t *ipa_node_params_sum = NULL;
63 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
65 /* Edge summary for IPA-CP edge information. */
66 ipa_edge_args_sum_t *ipa_edge_args_sum;
68 /* Traits for a hash table for reusing already existing ipa_bits. */
70 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
72 typedef ipa_bits *value_type;
73 typedef ipa_bits *compare_type;
74 static hashval_t
75 hash (const ipa_bits *p)
77 hashval_t t = (hashval_t) p->value.to_shwi ();
78 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
80 static bool
81 equal (const ipa_bits *a, const ipa_bits *b)
83 return a->value == b->value && a->mask == b->mask;
85 static const bool empty_zero_p = true;
86 static void
87 mark_empty (ipa_bits *&p)
89 p = NULL;
91 static bool
92 is_empty (const ipa_bits *p)
94 return p == NULL;
96 static bool
97 is_deleted (const ipa_bits *p)
99 return p == reinterpret_cast<const ipa_bits *> (1);
101 static void
102 mark_deleted (ipa_bits *&p)
104 p = reinterpret_cast<ipa_bits *> (1);
108 /* Hash table for avoid repeated allocations of equal ipa_bits. */
109 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
111 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
112 the equiv bitmap is not hashed and is expected to be NULL. */
114 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
116 typedef value_range *value_type;
117 typedef value_range *compare_type;
118 static hashval_t
119 hash (const value_range *p)
121 inchash::hash hstate (p->kind ());
122 inchash::add_expr (p->min (), hstate);
123 inchash::add_expr (p->max (), hstate);
124 return hstate.end ();
126 static bool
127 equal (const value_range *a, const value_range *b)
129 return (a->equal_p (*b)
130 && types_compatible_p (a->type (), b->type ()));
132 static const bool empty_zero_p = true;
133 static void
134 mark_empty (value_range *&p)
136 p = NULL;
138 static bool
139 is_empty (const value_range *p)
141 return p == NULL;
143 static bool
144 is_deleted (const value_range *p)
146 return p == reinterpret_cast<const value_range *> (1);
148 static void
149 mark_deleted (value_range *&p)
151 p = reinterpret_cast<value_range *> (1);
155 /* Hash table for avoid repeated allocations of equal value_ranges. */
156 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
158 /* Holders of ipa cgraph hooks: */
159 static struct cgraph_node_hook_list *function_insertion_hook_holder;
161 /* Description of a reference to an IPA constant. */
162 struct ipa_cst_ref_desc
164 /* Edge that corresponds to the statement which took the reference. */
165 struct cgraph_edge *cs;
166 /* Linked list of duplicates created when call graph edges are cloned. */
167 struct ipa_cst_ref_desc *next_duplicate;
168 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
169 if out of control. */
170 int refcount;
173 /* Allocation pool for reference descriptions. */
175 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
176 ("IPA-PROP ref descriptions");
178 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
179 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
181 static bool
182 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
184 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
186 if (!fs_opts)
187 return false;
188 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
191 /* Return index of the formal whose tree is PTREE in function which corresponds
192 to INFO. */
194 static int
195 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
196 tree ptree)
198 int i, count;
200 count = vec_safe_length (descriptors);
201 for (i = 0; i < count; i++)
202 if ((*descriptors)[i].decl_or_type == ptree)
203 return i;
205 return -1;
208 /* Return index of the formal whose tree is PTREE in function which corresponds
209 to INFO. */
212 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
214 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
217 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
218 NODE. */
220 static void
221 ipa_populate_param_decls (struct cgraph_node *node,
222 vec<ipa_param_descriptor, va_gc> &descriptors)
224 tree fndecl;
225 tree fnargs;
226 tree parm;
227 int param_num;
229 fndecl = node->decl;
230 gcc_assert (gimple_has_body_p (fndecl));
231 fnargs = DECL_ARGUMENTS (fndecl);
232 param_num = 0;
233 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
235 descriptors[param_num].decl_or_type = parm;
236 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
237 descriptors[param_num].move_cost = cost;
238 /* Watch overflow, move_cost is a bitfield. */
239 gcc_checking_assert (cost == descriptors[param_num].move_cost);
240 param_num++;
244 /* Return how many formal parameters FNDECL has. */
247 count_formal_params (tree fndecl)
249 tree parm;
250 int count = 0;
251 gcc_assert (gimple_has_body_p (fndecl));
253 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
254 count++;
256 return count;
259 /* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
263 void
264 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
266 fprintf (file, "param #%i", i);
267 if ((*info->descriptors)[i].decl_or_type)
269 fprintf (file, " ");
270 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
274 /* If necessary, allocate vector of parameter descriptors in info of NODE.
275 Return true if they were allocated, false if not. */
277 static bool
278 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
280 ipa_node_params *info = ipa_node_params_sum->get_create (node);
282 if (!info->descriptors && param_count)
284 vec_safe_grow_cleared (info->descriptors, param_count, true);
285 return true;
287 else
288 return false;
291 /* Initialize the ipa_node_params structure associated with NODE by counting
292 the function parameters, creating the descriptors and populating their
293 param_decls. */
295 void
296 ipa_initialize_node_params (struct cgraph_node *node)
298 ipa_node_params *info = ipa_node_params_sum->get_create (node);
300 if (!info->descriptors
301 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
302 ipa_populate_param_decls (node, *info->descriptors);
305 /* Print the jump functions associated with call graph edge CS to file F. */
307 static void
308 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
310 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
311 int count = ipa_get_cs_argument_count (args);
313 for (int i = 0; i < count; i++)
315 struct ipa_jump_func *jump_func;
316 enum jump_func_type type;
318 jump_func = ipa_get_ith_jump_func (args, i);
319 type = jump_func->type;
321 fprintf (f, " param %d: ", i);
322 if (type == IPA_JF_UNKNOWN)
323 fprintf (f, "UNKNOWN\n");
324 else if (type == IPA_JF_CONST)
326 tree val = jump_func->value.constant.value;
327 fprintf (f, "CONST: ");
328 print_generic_expr (f, val);
329 if (TREE_CODE (val) == ADDR_EXPR
330 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
332 fprintf (f, " -> ");
333 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
335 fprintf (f, "\n");
337 else if (type == IPA_JF_PASS_THROUGH)
339 fprintf (f, "PASS THROUGH: ");
340 fprintf (f, "%d, op %s",
341 jump_func->value.pass_through.formal_id,
342 get_tree_code_name(jump_func->value.pass_through.operation));
343 if (jump_func->value.pass_through.operation != NOP_EXPR)
345 fprintf (f, " ");
346 print_generic_expr (f, jump_func->value.pass_through.operand);
348 if (jump_func->value.pass_through.agg_preserved)
349 fprintf (f, ", agg_preserved");
350 fprintf (f, "\n");
352 else if (type == IPA_JF_ANCESTOR)
354 fprintf (f, "ANCESTOR: ");
355 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
356 jump_func->value.ancestor.formal_id,
357 jump_func->value.ancestor.offset);
358 if (jump_func->value.ancestor.agg_preserved)
359 fprintf (f, ", agg_preserved");
360 fprintf (f, "\n");
363 if (jump_func->agg.items)
365 struct ipa_agg_jf_item *item;
366 int j;
368 fprintf (f, " Aggregate passed by %s:\n",
369 jump_func->agg.by_ref ? "reference" : "value");
370 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
372 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
373 item->offset);
374 fprintf (f, "type: ");
375 print_generic_expr (f, item->type);
376 fprintf (f, ", ");
377 if (item->jftype == IPA_JF_PASS_THROUGH)
378 fprintf (f, "PASS THROUGH: %d,",
379 item->value.pass_through.formal_id);
380 else if (item->jftype == IPA_JF_LOAD_AGG)
382 fprintf (f, "LOAD AGG: %d",
383 item->value.pass_through.formal_id);
384 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
385 item->value.load_agg.offset,
386 item->value.load_agg.by_ref ? "reference"
387 : "value");
390 if (item->jftype == IPA_JF_PASS_THROUGH
391 || item->jftype == IPA_JF_LOAD_AGG)
393 fprintf (f, " op %s",
394 get_tree_code_name (item->value.pass_through.operation));
395 if (item->value.pass_through.operation != NOP_EXPR)
397 fprintf (f, " ");
398 print_generic_expr (f, item->value.pass_through.operand);
401 else if (item->jftype == IPA_JF_CONST)
403 fprintf (f, "CONST: ");
404 print_generic_expr (f, item->value.constant);
406 else if (item->jftype == IPA_JF_UNKNOWN)
407 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
408 tree_to_uhwi (TYPE_SIZE (item->type)));
409 fprintf (f, "\n");
413 class ipa_polymorphic_call_context *ctx
414 = ipa_get_ith_polymorhic_call_context (args, i);
415 if (ctx && !ctx->useless_p ())
417 fprintf (f, " Context: ");
418 ctx->dump (dump_file);
421 if (jump_func->bits)
423 fprintf (f, " value: ");
424 print_hex (jump_func->bits->value, f);
425 fprintf (f, ", mask: ");
426 print_hex (jump_func->bits->mask, f);
427 fprintf (f, "\n");
429 else
430 fprintf (f, " Unknown bits\n");
432 if (jump_func->m_vr)
434 fprintf (f, " VR ");
435 fprintf (f, "%s[",
436 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
437 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
438 fprintf (f, ", ");
439 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
440 fprintf (f, "]\n");
442 else
443 fprintf (f, " Unknown VR\n");
448 /* Print the jump functions of all arguments on all call graph edges going from
449 NODE to file F. */
451 void
452 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
454 struct cgraph_edge *cs;
456 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
457 for (cs = node->callees; cs; cs = cs->next_callee)
460 fprintf (f, " callsite %s -> %s : \n",
461 node->dump_name (),
462 cs->callee->dump_name ());
463 if (!ipa_edge_args_info_available_for_edge_p (cs))
464 fprintf (f, " no arg info\n");
465 else
466 ipa_print_node_jump_functions_for_edge (f, cs);
469 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
471 class cgraph_indirect_call_info *ii;
473 ii = cs->indirect_info;
474 if (ii->agg_contents)
475 fprintf (f, " indirect %s callsite, calling param %i, "
476 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
477 ii->member_ptr ? "member ptr" : "aggregate",
478 ii->param_index, ii->offset,
479 ii->by_ref ? "by reference" : "by_value");
480 else
481 fprintf (f, " indirect %s callsite, calling param %i, "
482 "offset " HOST_WIDE_INT_PRINT_DEC,
483 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
484 ii->offset);
486 if (cs->call_stmt)
488 fprintf (f, ", for stmt ");
489 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
491 else
492 fprintf (f, "\n");
493 if (ii->polymorphic)
494 ii->context.dump (f);
495 if (!ipa_edge_args_info_available_for_edge_p (cs))
496 fprintf (f, " no arg info\n");
497 else
498 ipa_print_node_jump_functions_for_edge (f, cs);
502 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
504 void
505 ipa_print_all_jump_functions (FILE *f)
507 struct cgraph_node *node;
509 fprintf (f, "\nJump functions:\n");
510 FOR_EACH_FUNCTION (node)
512 ipa_print_node_jump_functions (f, node);
516 /* Set jfunc to be a know-really nothing jump function. */
518 static void
519 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
521 jfunc->type = IPA_JF_UNKNOWN;
524 /* Set JFUNC to be a copy of another jmp (to be used by jump function
525 combination code). The two functions will share their rdesc. */
527 static void
528 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
529 struct ipa_jump_func *src)
532 gcc_checking_assert (src->type == IPA_JF_CONST);
533 dst->type = IPA_JF_CONST;
534 dst->value.constant = src->value.constant;
537 /* Set JFUNC to be a constant jmp function. */
539 static void
540 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
541 struct cgraph_edge *cs)
543 jfunc->type = IPA_JF_CONST;
544 jfunc->value.constant.value = unshare_expr_without_location (constant);
546 if (TREE_CODE (constant) == ADDR_EXPR
547 && (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL
548 || (TREE_CODE (TREE_OPERAND (constant, 0)) == VAR_DECL
549 && TREE_STATIC (TREE_OPERAND (constant, 0)))))
551 struct ipa_cst_ref_desc *rdesc;
553 rdesc = ipa_refdesc_pool.allocate ();
554 rdesc->cs = cs;
555 rdesc->next_duplicate = NULL;
556 rdesc->refcount = 1;
557 jfunc->value.constant.rdesc = rdesc;
559 else
560 jfunc->value.constant.rdesc = NULL;
563 /* Set JFUNC to be a simple pass-through jump function. */
564 static void
565 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
566 bool agg_preserved)
568 jfunc->type = IPA_JF_PASS_THROUGH;
569 jfunc->value.pass_through.operand = NULL_TREE;
570 jfunc->value.pass_through.formal_id = formal_id;
571 jfunc->value.pass_through.operation = NOP_EXPR;
572 jfunc->value.pass_through.agg_preserved = agg_preserved;
575 /* Set JFUNC to be an unary pass through jump function. */
577 static void
578 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
579 enum tree_code operation)
581 jfunc->type = IPA_JF_PASS_THROUGH;
582 jfunc->value.pass_through.operand = NULL_TREE;
583 jfunc->value.pass_through.formal_id = formal_id;
584 jfunc->value.pass_through.operation = operation;
585 jfunc->value.pass_through.agg_preserved = false;
587 /* Set JFUNC to be an arithmetic pass through jump function. */
589 static void
590 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
591 tree operand, enum tree_code operation)
593 jfunc->type = IPA_JF_PASS_THROUGH;
594 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
595 jfunc->value.pass_through.formal_id = formal_id;
596 jfunc->value.pass_through.operation = operation;
597 jfunc->value.pass_through.agg_preserved = false;
600 /* Set JFUNC to be an ancestor jump function. */
602 static void
603 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
604 int formal_id, bool agg_preserved)
606 jfunc->type = IPA_JF_ANCESTOR;
607 jfunc->value.ancestor.formal_id = formal_id;
608 jfunc->value.ancestor.offset = offset;
609 jfunc->value.ancestor.agg_preserved = agg_preserved;
612 /* Get IPA BB information about the given BB. FBI is the context of analyzis
613 of this function body. */
615 static struct ipa_bb_info *
616 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
618 gcc_checking_assert (fbi);
619 return &fbi->bb_infos[bb->index];
622 /* Structure to be passed in between detect_type_change and
623 check_stmt_for_type_change. */
625 struct prop_type_change_info
627 /* Offset into the object where there is the virtual method pointer we are
628 looking for. */
629 HOST_WIDE_INT offset;
630 /* The declaration or SSA_NAME pointer of the base that we are checking for
631 type change. */
632 tree object;
633 /* Set to true if dynamic type change has been detected. */
634 bool type_maybe_changed;
637 /* Return true if STMT can modify a virtual method table pointer.
639 This function makes special assumptions about both constructors and
640 destructors which are all the functions that are allowed to alter the VMT
641 pointers. It assumes that destructors begin with assignment into all VMT
642 pointers and that constructors essentially look in the following way:
644 1) The very first thing they do is that they call constructors of ancestor
645 sub-objects that have them.
647 2) Then VMT pointers of this and all its ancestors is set to new values
648 corresponding to the type corresponding to the constructor.
650 3) Only afterwards, other stuff such as constructor of member sub-objects
651 and the code written by the user is run. Only this may include calling
652 virtual functions, directly or indirectly.
654 There is no way to call a constructor of an ancestor sub-object in any
655 other way.
657 This means that we do not have to care whether constructors get the correct
658 type information because they will always change it (in fact, if we define
659 the type to be given by the VMT pointer, it is undefined).
661 The most important fact to derive from the above is that if, for some
662 statement in the section 3, we try to detect whether the dynamic type has
663 changed, we can safely ignore all calls as we examine the function body
664 backwards until we reach statements in section 2 because these calls cannot
665 be ancestor constructors or destructors (if the input is not bogus) and so
666 do not change the dynamic type (this holds true only for automatically
667 allocated objects but at the moment we devirtualize only these). We then
668 must detect that statements in section 2 change the dynamic type and can try
669 to derive the new type. That is enough and we can stop, we will never see
670 the calls into constructors of sub-objects in this code. Therefore we can
671 safely ignore all call statements that we traverse.
674 static bool
675 stmt_may_be_vtbl_ptr_store (gimple *stmt)
677 if (is_gimple_call (stmt))
678 return false;
679 if (gimple_clobber_p (stmt))
680 return false;
681 else if (is_gimple_assign (stmt))
683 tree lhs = gimple_assign_lhs (stmt);
685 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
687 if (flag_strict_aliasing
688 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
689 return false;
691 if (TREE_CODE (lhs) == COMPONENT_REF
692 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
693 return false;
694 /* In the future we might want to use get_ref_base_and_extent to find
695 if there is a field corresponding to the offset and if so, proceed
696 almost like if it was a component ref. */
699 return true;
702 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
703 to check whether a particular statement may modify the virtual table
704 pointerIt stores its result into DATA, which points to a
705 prop_type_change_info structure. */
707 static bool
708 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
710 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
711 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
713 if (stmt_may_be_vtbl_ptr_store (stmt))
715 tci->type_maybe_changed = true;
716 return true;
718 else
719 return false;
722 /* See if ARG is PARAM_DECl describing instance passed by pointer
723 or reference in FUNCTION. Return false if the dynamic type may change
724 in between beggining of the function until CALL is invoked.
726 Generally functions are not allowed to change type of such instances,
727 but they call destructors. We assume that methods cannot destroy the THIS
728 pointer. Also as a special cases, constructor and destructors may change
729 type of the THIS pointer. */
731 static bool
732 param_type_may_change_p (tree function, tree arg, gimple *call)
734 /* Pure functions cannot do any changes on the dynamic type;
735 that require writting to memory. */
736 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
737 return false;
738 /* We need to check if we are within inlined consturctor
739 or destructor (ideally we would have way to check that the
740 inline cdtor is actually working on ARG, but we don't have
741 easy tie on this, so punt on all non-pure cdtors.
742 We may also record the types of cdtors and once we know type
743 of the instance match them.
745 Also code unification optimizations may merge calls from
746 different blocks making return values unreliable. So
747 do nothing during late optimization. */
748 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
749 return true;
750 if (TREE_CODE (arg) == SSA_NAME
751 && SSA_NAME_IS_DEFAULT_DEF (arg)
752 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
754 /* Normal (non-THIS) argument. */
755 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
756 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
757 /* THIS pointer of an method - here we want to watch constructors
758 and destructors as those definitely may change the dynamic
759 type. */
760 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
761 && !DECL_CXX_CONSTRUCTOR_P (function)
762 && !DECL_CXX_DESTRUCTOR_P (function)
763 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
765 /* Walk the inline stack and watch out for ctors/dtors. */
766 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
767 block = BLOCK_SUPERCONTEXT (block))
768 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
769 return true;
770 return false;
773 return true;
776 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
777 callsite CALL) by looking for assignments to its virtual table pointer. If
778 it is, return true. ARG is the object itself (not a pointer
779 to it, unless dereferenced). BASE is the base of the memory access as
780 returned by get_ref_base_and_extent, as is the offset.
782 This is helper function for detect_type_change and detect_type_change_ssa
783 that does the heavy work which is usually unnecesary. */
785 static bool
786 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
787 tree base, tree comp_type, gcall *call,
788 HOST_WIDE_INT offset)
790 struct prop_type_change_info tci;
791 ao_ref ao;
793 gcc_checking_assert (DECL_P (arg)
794 || TREE_CODE (arg) == MEM_REF
795 || handled_component_p (arg));
797 comp_type = TYPE_MAIN_VARIANT (comp_type);
799 /* Const calls cannot call virtual methods through VMT and so type changes do
800 not matter. */
801 if (!flag_devirtualize || !gimple_vuse (call)
802 /* Be sure expected_type is polymorphic. */
803 || !comp_type
804 || TREE_CODE (comp_type) != RECORD_TYPE
805 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
806 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
807 return true;
809 if (fbi->aa_walk_budget == 0)
810 return false;
812 ao_ref_init (&ao, arg);
813 ao.base = base;
814 ao.offset = offset;
815 ao.size = POINTER_SIZE;
816 ao.max_size = ao.size;
818 tci.offset = offset;
819 tci.object = get_base_address (arg);
820 tci.type_maybe_changed = false;
822 int walked
823 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
824 &tci, NULL, NULL, fbi->aa_walk_budget);
825 if (walked >= 0)
826 fbi->aa_walk_budget -= walked;
827 else
828 fbi->aa_walk_budget = 0;
830 if (walked >= 0 && !tci.type_maybe_changed)
831 return false;
833 return true;
836 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
837 If it is, return true. ARG is the object itself (not a pointer
838 to it, unless dereferenced). BASE is the base of the memory access as
839 returned by get_ref_base_and_extent, as is the offset. */
841 static bool
842 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
843 tree comp_type, gcall *call,
844 HOST_WIDE_INT offset)
846 if (!flag_devirtualize)
847 return false;
849 if (TREE_CODE (base) == MEM_REF
850 && !param_type_may_change_p (current_function_decl,
851 TREE_OPERAND (base, 0),
852 call))
853 return false;
854 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
855 call, offset);
858 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
859 SSA name (its dereference will become the base and the offset is assumed to
860 be zero). */
862 static bool
863 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
864 gcall *call)
866 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
867 if (!flag_devirtualize
868 || !POINTER_TYPE_P (TREE_TYPE (arg)))
869 return false;
871 if (!param_type_may_change_p (current_function_decl, arg, call))
872 return false;
874 arg = build2 (MEM_REF, ptr_type_node, arg,
875 build_int_cst (ptr_type_node, 0));
877 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
878 call, 0);
881 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
882 boolean variable pointed to by DATA. */
884 static bool
885 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
886 void *data)
888 bool *b = (bool *) data;
889 *b = true;
890 return true;
893 /* Find the nearest valid aa status for parameter specified by INDEX that
894 dominates BB. */
896 static struct ipa_param_aa_status *
897 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
898 int index)
900 while (true)
902 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
903 if (!bb)
904 return NULL;
905 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
906 if (!bi->param_aa_statuses.is_empty ()
907 && bi->param_aa_statuses[index].valid)
908 return &bi->param_aa_statuses[index];
912 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
913 structures and/or intialize the result with a dominating description as
914 necessary. */
916 static struct ipa_param_aa_status *
917 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
918 int index)
920 gcc_checking_assert (fbi);
921 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
922 if (bi->param_aa_statuses.is_empty ())
923 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
924 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
925 if (!paa->valid)
927 gcc_checking_assert (!paa->parm_modified
928 && !paa->ref_modified
929 && !paa->pt_modified);
930 struct ipa_param_aa_status *dom_paa;
931 dom_paa = find_dominating_aa_status (fbi, bb, index);
932 if (dom_paa)
933 *paa = *dom_paa;
934 else
935 paa->valid = true;
938 return paa;
941 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
942 a value known not to be modified in this function before reaching the
943 statement STMT. FBI holds information about the function we have so far
944 gathered but do not survive the summary building stage. */
946 static bool
947 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
948 gimple *stmt, tree parm_load)
950 struct ipa_param_aa_status *paa;
951 bool modified = false;
952 ao_ref refd;
954 tree base = get_base_address (parm_load);
955 gcc_assert (TREE_CODE (base) == PARM_DECL);
956 if (TREE_READONLY (base))
957 return true;
959 gcc_checking_assert (fbi);
960 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
961 if (paa->parm_modified || fbi->aa_walk_budget == 0)
962 return false;
964 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
965 ao_ref_init (&refd, parm_load);
966 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
967 &modified, NULL, NULL,
968 fbi->aa_walk_budget);
969 if (walked < 0)
971 modified = true;
972 fbi->aa_walk_budget = 0;
974 else
975 fbi->aa_walk_budget -= walked;
976 if (paa && modified)
977 paa->parm_modified = true;
978 return !modified;
981 /* If STMT is an assignment that loads a value from an parameter declaration,
982 return the index of the parameter in ipa_node_params which has not been
983 modified. Otherwise return -1. */
985 static int
986 load_from_unmodified_param (struct ipa_func_body_info *fbi,
987 vec<ipa_param_descriptor, va_gc> *descriptors,
988 gimple *stmt)
990 int index;
991 tree op1;
993 if (!gimple_assign_single_p (stmt))
994 return -1;
996 op1 = gimple_assign_rhs1 (stmt);
997 if (TREE_CODE (op1) != PARM_DECL)
998 return -1;
1000 index = ipa_get_param_decl_index_1 (descriptors, op1);
1001 if (index < 0
1002 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1003 return -1;
1005 return index;
1008 /* Return true if memory reference REF (which must be a load through parameter
1009 with INDEX) loads data that are known to be unmodified in this function
1010 before reaching statement STMT. */
1012 static bool
1013 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1014 int index, gimple *stmt, tree ref)
1016 struct ipa_param_aa_status *paa;
1017 bool modified = false;
1018 ao_ref refd;
1020 gcc_checking_assert (fbi);
1021 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1022 if (paa->ref_modified || fbi->aa_walk_budget == 0)
1023 return false;
1025 gcc_checking_assert (gimple_vuse (stmt));
1026 ao_ref_init (&refd, ref);
1027 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1028 &modified, NULL, NULL,
1029 fbi->aa_walk_budget);
1030 if (walked < 0)
1032 modified = true;
1033 fbi->aa_walk_budget = 0;
1035 else
1036 fbi->aa_walk_budget -= walked;
1037 if (modified)
1038 paa->ref_modified = true;
1039 return !modified;
1042 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1043 is known to be unmodified in this function before reaching call statement
1044 CALL into which it is passed. FBI describes the function body. */
1046 static bool
1047 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1048 gimple *call, tree parm)
1050 bool modified = false;
1051 ao_ref refd;
1053 /* It's unnecessary to calculate anything about memory contnets for a const
1054 function because it is not goin to use it. But do not cache the result
1055 either. Also, no such calculations for non-pointers. */
1056 if (!gimple_vuse (call)
1057 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1058 return false;
1060 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1061 gimple_bb (call),
1062 index);
1063 if (paa->pt_modified || fbi->aa_walk_budget == 0)
1064 return false;
1066 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1067 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1068 &modified, NULL, NULL,
1069 fbi->aa_walk_budget);
1070 if (walked < 0)
1072 fbi->aa_walk_budget = 0;
1073 modified = true;
1075 else
1076 fbi->aa_walk_budget -= walked;
1077 if (modified)
1078 paa->pt_modified = true;
1079 return !modified;
1082 /* Return true if we can prove that OP is a memory reference loading
1083 data from an aggregate passed as a parameter.
1085 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1086 false if it cannot prove that the value has not been modified before the
1087 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1088 if it cannot prove the value has not been modified, in that case it will
1089 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1091 INFO and PARMS_AINFO describe parameters of the current function (but the
1092 latter can be NULL), STMT is the load statement. If function returns true,
1093 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1094 within the aggregate and whether it is a load from a value passed by
1095 reference respectively. */
1097 bool
1098 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1099 vec<ipa_param_descriptor, va_gc> *descriptors,
1100 gimple *stmt, tree op, int *index_p,
1101 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1102 bool *by_ref_p, bool *guaranteed_unmodified)
1104 int index;
1105 HOST_WIDE_INT size;
1106 bool reverse;
1107 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1109 if (!base)
1110 return false;
1112 if (DECL_P (base))
1114 int index = ipa_get_param_decl_index_1 (descriptors, base);
1115 if (index >= 0
1116 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1118 *index_p = index;
1119 *by_ref_p = false;
1120 if (size_p)
1121 *size_p = size;
1122 if (guaranteed_unmodified)
1123 *guaranteed_unmodified = true;
1124 return true;
1126 return false;
1129 if (TREE_CODE (base) != MEM_REF
1130 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1131 || !integer_zerop (TREE_OPERAND (base, 1)))
1132 return false;
1134 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1136 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1137 index = ipa_get_param_decl_index_1 (descriptors, parm);
1139 else
1141 /* This branch catches situations where a pointer parameter is not a
1142 gimple register, for example:
1144 void hip7(S*) (struct S * p)
1146 void (*<T2e4>) (struct S *) D.1867;
1147 struct S * p.1;
1149 <bb 2>:
1150 p.1_1 = p;
1151 D.1867_2 = p.1_1->f;
1152 D.1867_2 ();
1153 gdp = &p;
1156 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1157 index = load_from_unmodified_param (fbi, descriptors, def);
1160 if (index >= 0)
1162 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1163 if (!data_preserved && !guaranteed_unmodified)
1164 return false;
1166 *index_p = index;
1167 *by_ref_p = true;
1168 if (size_p)
1169 *size_p = size;
1170 if (guaranteed_unmodified)
1171 *guaranteed_unmodified = data_preserved;
1172 return true;
1174 return false;
1177 /* If STMT is an assignment that loads a value from a parameter declaration,
1178 or from an aggregate passed as the parameter either by value or reference,
1179 return the index of the parameter in ipa_node_params. Otherwise return -1.
1181 FBI holds gathered information about the function. INFO describes
1182 parameters of the function, STMT is the assignment statement. If it is a
1183 memory load from an aggregate, *OFFSET_P is filled with offset within the
1184 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1185 reference. */
1187 static int
1188 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1189 class ipa_node_params *info,
1190 gimple *stmt,
1191 HOST_WIDE_INT *offset_p,
1192 bool *by_ref_p)
1194 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1195 poly_int64 size;
1197 /* Load value from a parameter declaration. */
1198 if (index >= 0)
1200 *offset_p = -1;
1201 return index;
1204 if (!gimple_assign_load_p (stmt))
1205 return -1;
1207 tree rhs = gimple_assign_rhs1 (stmt);
1209 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1210 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1211 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1212 return -1;
1214 /* Skip memory reference containing bit-field. */
1215 if (TREE_CODE (rhs) == BIT_FIELD_REF
1216 || contains_bitfld_component_ref_p (rhs))
1217 return -1;
1219 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1220 offset_p, &size, by_ref_p))
1221 return -1;
1223 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1224 size));
1225 if (!*by_ref_p)
1227 tree param_type = ipa_get_type (info, index);
1229 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1230 return -1;
1232 else if (TREE_THIS_VOLATILE (rhs))
1233 return -1;
1235 return index;
1238 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1239 to find original pointer. Initialize RET to the pointer which results from
1240 the walk.
1241 If offset is known return true and initialize OFFSET_RET. */
1243 bool
1244 unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1246 poly_int64 offset = 0;
1247 bool offset_known = true;
1248 int i;
1250 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1252 if (TREE_CODE (op) == ADDR_EXPR)
1254 poly_int64 extra_offset = 0;
1255 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1256 &offset);
1257 if (!base)
1259 base = get_base_address (TREE_OPERAND (op, 0));
1260 if (TREE_CODE (base) != MEM_REF)
1261 break;
1262 offset_known = false;
1264 else
1266 if (TREE_CODE (base) != MEM_REF)
1267 break;
1268 offset += extra_offset;
1270 op = TREE_OPERAND (base, 0);
1271 if (mem_ref_offset (base).to_shwi (&extra_offset))
1272 offset += extra_offset;
1273 else
1274 offset_known = false;
1276 else if (TREE_CODE (op) == SSA_NAME
1277 && !SSA_NAME_IS_DEFAULT_DEF (op))
1279 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1281 if (gimple_assign_single_p (pstmt))
1282 op = gimple_assign_rhs1 (pstmt);
1283 else if (is_gimple_assign (pstmt)
1284 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1286 poly_int64 extra_offset = 0;
1287 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1288 &extra_offset))
1289 offset += extra_offset;
1290 else
1291 offset_known = false;
1292 op = gimple_assign_rhs1 (pstmt);
1294 else
1295 break;
1297 else
1298 break;
1300 *ret = op;
1301 *offset_ret = offset;
1302 return offset_known;
1305 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1306 of an assignment statement STMT, try to determine whether we are actually
1307 handling any of the following cases and construct an appropriate jump
1308 function into JFUNC if so:
1310 1) The passed value is loaded from a formal parameter which is not a gimple
1311 register (most probably because it is addressable, the value has to be
1312 scalar) and we can guarantee the value has not changed. This case can
1313 therefore be described by a simple pass-through jump function. For example:
1315 foo (int a)
1317 int a.0;
1319 a.0_2 = a;
1320 bar (a.0_2);
1322 2) The passed value can be described by a simple arithmetic pass-through
1323 jump function. E.g.
1325 foo (int a)
1327 int D.2064;
1329 D.2064_4 = a.1(D) + 4;
1330 bar (D.2064_4);
1332 This case can also occur in combination of the previous one, e.g.:
1334 foo (int a, int z)
1336 int a.0;
1337 int D.2064;
1339 a.0_3 = a;
1340 D.2064_4 = a.0_3 + 4;
1341 foo (D.2064_4);
1343 3) The passed value is an address of an object within another one (which
1344 also passed by reference). Such situations are described by an ancestor
1345 jump function and describe situations such as:
1347 B::foo() (struct B * const this)
1349 struct A * D.1845;
1351 D.1845_2 = &this_1(D)->D.1748;
1352 A::bar (D.1845_2);
1354 INFO is the structure describing individual parameters access different
1355 stages of IPA optimizations. PARMS_AINFO contains the information that is
1356 only needed for intraprocedural analysis. */
1358 static void
1359 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1360 class ipa_node_params *info,
1361 struct ipa_jump_func *jfunc,
1362 gcall *call, gimple *stmt, tree name,
1363 tree param_type)
1365 HOST_WIDE_INT offset, size;
1366 tree op1, tc_ssa, base, ssa;
1367 bool reverse;
1368 int index;
1370 op1 = gimple_assign_rhs1 (stmt);
1372 if (TREE_CODE (op1) == SSA_NAME)
1374 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1375 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1376 else
1377 index = load_from_unmodified_param (fbi, info->descriptors,
1378 SSA_NAME_DEF_STMT (op1));
1379 tc_ssa = op1;
1381 else
1383 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1384 tc_ssa = gimple_assign_lhs (stmt);
1387 if (index >= 0)
1389 switch (gimple_assign_rhs_class (stmt))
1391 case GIMPLE_BINARY_RHS:
1393 tree op2 = gimple_assign_rhs2 (stmt);
1394 if (!is_gimple_ip_invariant (op2)
1395 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1396 != tcc_comparison)
1397 && !useless_type_conversion_p (TREE_TYPE (name),
1398 TREE_TYPE (op1))))
1399 return;
1401 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1402 gimple_assign_rhs_code (stmt));
1403 break;
1405 case GIMPLE_SINGLE_RHS:
1407 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1408 tc_ssa);
1409 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1410 break;
1412 case GIMPLE_UNARY_RHS:
1413 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1414 ipa_set_jf_unary_pass_through (jfunc, index,
1415 gimple_assign_rhs_code (stmt));
1416 default:;
1418 return;
1421 if (TREE_CODE (op1) != ADDR_EXPR)
1422 return;
1423 op1 = TREE_OPERAND (op1, 0);
1424 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1425 offset_int mem_offset;
1426 if (!base
1427 || TREE_CODE (base) != MEM_REF
1428 || !mem_ref_offset (base).is_constant (&mem_offset))
1429 return;
1430 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1431 ssa = TREE_OPERAND (base, 0);
1432 if (TREE_CODE (ssa) != SSA_NAME
1433 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1434 || offset < 0)
1435 return;
1437 /* Dynamic types are changed in constructors and destructors. */
1438 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1439 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1440 ipa_set_ancestor_jf (jfunc, offset, index,
1441 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1444 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1445 it looks like:
1447 iftmp.1_3 = &obj_2(D)->D.1762;
1449 The base of the MEM_REF must be a default definition SSA NAME of a
1450 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1451 whole MEM_REF expression is returned and the offset calculated from any
1452 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1453 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1455 static tree
1456 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1458 HOST_WIDE_INT size;
1459 tree expr, parm, obj;
1460 bool reverse;
1462 if (!gimple_assign_single_p (assign))
1463 return NULL_TREE;
1464 expr = gimple_assign_rhs1 (assign);
1466 if (TREE_CODE (expr) != ADDR_EXPR)
1467 return NULL_TREE;
1468 expr = TREE_OPERAND (expr, 0);
1469 obj = expr;
1470 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1472 offset_int mem_offset;
1473 if (!expr
1474 || TREE_CODE (expr) != MEM_REF
1475 || !mem_ref_offset (expr).is_constant (&mem_offset))
1476 return NULL_TREE;
1477 parm = TREE_OPERAND (expr, 0);
1478 if (TREE_CODE (parm) != SSA_NAME
1479 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1480 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1481 return NULL_TREE;
1483 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1484 *obj_p = obj;
1485 return expr;
1489 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1490 statement PHI, try to find out whether NAME is in fact a
1491 multiple-inheritance typecast from a descendant into an ancestor of a formal
1492 parameter and thus can be described by an ancestor jump function and if so,
1493 write the appropriate function into JFUNC.
1495 Essentially we want to match the following pattern:
1497 if (obj_2(D) != 0B)
1498 goto <bb 3>;
1499 else
1500 goto <bb 4>;
1502 <bb 3>:
1503 iftmp.1_3 = &obj_2(D)->D.1762;
1505 <bb 4>:
1506 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1507 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1508 return D.1879_6; */
1510 static void
1511 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1512 class ipa_node_params *info,
1513 struct ipa_jump_func *jfunc,
1514 gcall *call, gphi *phi)
1516 HOST_WIDE_INT offset;
1517 gimple *assign, *cond;
1518 basic_block phi_bb, assign_bb, cond_bb;
1519 tree tmp, parm, expr, obj;
1520 int index, i;
1522 if (gimple_phi_num_args (phi) != 2)
1523 return;
1525 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1526 tmp = PHI_ARG_DEF (phi, 0);
1527 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1528 tmp = PHI_ARG_DEF (phi, 1);
1529 else
1530 return;
1531 if (TREE_CODE (tmp) != SSA_NAME
1532 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1533 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1534 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1535 return;
1537 assign = SSA_NAME_DEF_STMT (tmp);
1538 assign_bb = gimple_bb (assign);
1539 if (!single_pred_p (assign_bb))
1540 return;
1541 expr = get_ancestor_addr_info (assign, &obj, &offset);
1542 if (!expr)
1543 return;
1544 parm = TREE_OPERAND (expr, 0);
1545 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1546 if (index < 0)
1547 return;
1549 cond_bb = single_pred (assign_bb);
1550 cond = last_stmt (cond_bb);
1551 if (!cond
1552 || gimple_code (cond) != GIMPLE_COND
1553 || gimple_cond_code (cond) != NE_EXPR
1554 || gimple_cond_lhs (cond) != parm
1555 || !integer_zerop (gimple_cond_rhs (cond)))
1556 return;
1558 phi_bb = gimple_bb (phi);
1559 for (i = 0; i < 2; i++)
1561 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1562 if (pred != assign_bb && pred != cond_bb)
1563 return;
1566 ipa_set_ancestor_jf (jfunc, offset, index,
1567 parm_ref_data_pass_through_p (fbi, index, call, parm));
1570 /* Inspect the given TYPE and return true iff it has the same structure (the
1571 same number of fields of the same types) as a C++ member pointer. If
1572 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1573 corresponding fields there. */
1575 static bool
1576 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1578 tree fld;
1580 if (TREE_CODE (type) != RECORD_TYPE)
1581 return false;
1583 fld = TYPE_FIELDS (type);
1584 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1585 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1586 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1587 return false;
1589 if (method_ptr)
1590 *method_ptr = fld;
1592 fld = DECL_CHAIN (fld);
1593 if (!fld || INTEGRAL_TYPE_P (fld)
1594 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1595 return false;
1596 if (delta)
1597 *delta = fld;
1599 if (DECL_CHAIN (fld))
1600 return false;
1602 return true;
1605 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1606 return the rhs of its defining statement, and this statement is stored in
1607 *RHS_STMT. Otherwise return RHS as it is. */
1609 static inline tree
1610 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1612 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1614 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1616 if (gimple_assign_single_p (def_stmt))
1617 rhs = gimple_assign_rhs1 (def_stmt);
1618 else
1619 break;
1620 *rhs_stmt = def_stmt;
1622 return rhs;
1625 /* Simple linked list, describing contents of an aggregate before call. */
1627 struct ipa_known_agg_contents_list
1629 /* Offset and size of the described part of the aggregate. */
1630 HOST_WIDE_INT offset, size;
1632 /* Type of the described part of the aggregate. */
1633 tree type;
1635 /* Known constant value or jump function data describing contents. */
1636 struct ipa_load_agg_data value;
1638 /* Pointer to the next structure in the list. */
1639 struct ipa_known_agg_contents_list *next;
1642 /* Add an aggregate content item into a linked list of
1643 ipa_known_agg_contents_list structure, in which all elements
1644 are sorted ascendingly by offset. */
1646 static inline void
1647 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1648 struct ipa_known_agg_contents_list *item)
1650 struct ipa_known_agg_contents_list *list = *plist;
1652 for (; list; list = list->next)
1654 if (list->offset >= item->offset)
1655 break;
1657 plist = &list->next;
1660 item->next = list;
1661 *plist = item;
1664 /* Check whether a given aggregate content is clobbered by certain element in
1665 a linked list of ipa_known_agg_contents_list. */
1667 static inline bool
1668 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1669 struct ipa_known_agg_contents_list *item)
1671 for (; list; list = list->next)
1673 if (list->offset >= item->offset)
1674 return list->offset < item->offset + item->size;
1676 if (list->offset + list->size > item->offset)
1677 return true;
1680 return false;
1683 /* Build aggregate jump function from LIST, assuming there are exactly
1684 VALUE_COUNT entries there and that offset of the passed argument
1685 is ARG_OFFSET and store it into JFUNC. */
1687 static void
1688 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1689 int value_count, HOST_WIDE_INT arg_offset,
1690 struct ipa_jump_func *jfunc)
1692 vec_safe_reserve (jfunc->agg.items, value_count, true);
1693 for (; list; list = list->next)
1695 struct ipa_agg_jf_item item;
1696 tree operand = list->value.pass_through.operand;
1698 if (list->value.pass_through.formal_id >= 0)
1700 /* Content value is derived from some formal paramerter. */
1701 if (list->value.offset >= 0)
1702 item.jftype = IPA_JF_LOAD_AGG;
1703 else
1704 item.jftype = IPA_JF_PASS_THROUGH;
1706 item.value.load_agg = list->value;
1707 if (operand)
1708 item.value.pass_through.operand
1709 = unshare_expr_without_location (operand);
1711 else if (operand)
1713 /* Content value is known constant. */
1714 item.jftype = IPA_JF_CONST;
1715 item.value.constant = unshare_expr_without_location (operand);
1717 else
1718 continue;
1720 item.type = list->type;
1721 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1723 item.offset = list->offset - arg_offset;
1724 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1726 jfunc->agg.items->quick_push (item);
1730 /* Given an assignment statement STMT, try to collect information into
1731 AGG_VALUE that will be used to construct jump function for RHS of the
1732 assignment, from which content value of an aggregate part comes.
1734 Besides constant and simple pass-through jump functions, also try to
1735 identify whether it matches the following pattern that can be described by
1736 a load-value-from-aggregate jump function, which is a derivative of simple
1737 pass-through jump function.
1739 foo (int *p)
1743 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1744 bar (q_5);
1747 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1748 constant, simple pass-through and load-vale-from-aggregate. If value
1749 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1750 set to -1. For simple pass-through and load-value-from-aggregate, field
1751 FORMAL_ID specifies the related formal parameter index, and field
1752 OFFSET can be used to distinguish them, -1 means simple pass-through,
1753 otherwise means load-value-from-aggregate. */
1755 static void
1756 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1757 struct ipa_load_agg_data *agg_value,
1758 gimple *stmt)
1760 tree lhs = gimple_assign_lhs (stmt);
1761 tree rhs1 = gimple_assign_rhs1 (stmt);
1762 enum tree_code code;
1763 int index = -1;
1765 /* Initialize jump function data for the aggregate part. */
1766 memset (agg_value, 0, sizeof (*agg_value));
1767 agg_value->pass_through.operation = NOP_EXPR;
1768 agg_value->pass_through.formal_id = -1;
1769 agg_value->offset = -1;
1771 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1772 || TREE_THIS_VOLATILE (lhs)
1773 || TREE_CODE (lhs) == BIT_FIELD_REF
1774 || contains_bitfld_component_ref_p (lhs))
1775 return;
1777 /* Skip SSA copies. */
1778 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1780 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1781 break;
1783 stmt = SSA_NAME_DEF_STMT (rhs1);
1784 if (!is_gimple_assign (stmt))
1785 break;
1787 rhs1 = gimple_assign_rhs1 (stmt);
1790 if (gphi *phi = dyn_cast<gphi *> (stmt))
1792 /* Also special case like the following (a is a formal parameter):
1794 _12 = *a_11(D).dim[0].stride;
1796 # iftmp.22_9 = PHI <_12(2), 1(3)>
1798 parm.6.dim[0].stride = iftmp.22_9;
1800 __x_MOD_foo (&parm.6, b_31(D));
1802 The aggregate function describing parm.6.dim[0].stride is encoded as a
1803 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1804 (the constant from the PHI node). */
1806 if (gimple_phi_num_args (phi) != 2)
1807 return;
1808 tree arg0 = gimple_phi_arg_def (phi, 0);
1809 tree arg1 = gimple_phi_arg_def (phi, 1);
1810 tree operand;
1812 if (is_gimple_ip_invariant (arg1))
1814 operand = arg1;
1815 rhs1 = arg0;
1817 else if (is_gimple_ip_invariant (arg0))
1819 operand = arg0;
1820 rhs1 = arg1;
1822 else
1823 return;
1825 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1826 if (!is_gimple_assign (stmt))
1827 return;
1829 code = ASSERT_EXPR;
1830 agg_value->pass_through.operand = operand;
1832 else if (is_gimple_assign (stmt))
1834 code = gimple_assign_rhs_code (stmt);
1835 switch (gimple_assign_rhs_class (stmt))
1837 case GIMPLE_SINGLE_RHS:
1838 if (is_gimple_ip_invariant (rhs1))
1840 agg_value->pass_through.operand = rhs1;
1841 return;
1843 code = NOP_EXPR;
1844 break;
1846 case GIMPLE_UNARY_RHS:
1847 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1848 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1849 tcc_binary, this subtleness is somewhat misleading.
1851 Since tcc_unary is widely used in IPA-CP code to check an operation
1852 with one operand, here we only allow tc_unary operation to avoid
1853 possible problem. Then we can use (opclass == tc_unary) or not to
1854 distinguish unary and binary. */
1855 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1856 return;
1858 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1859 break;
1861 case GIMPLE_BINARY_RHS:
1863 gimple *rhs1_stmt = stmt;
1864 gimple *rhs2_stmt = stmt;
1865 tree rhs2 = gimple_assign_rhs2 (stmt);
1867 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1868 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1870 if (is_gimple_ip_invariant (rhs2))
1872 agg_value->pass_through.operand = rhs2;
1873 stmt = rhs1_stmt;
1875 else if (is_gimple_ip_invariant (rhs1))
1877 if (TREE_CODE_CLASS (code) == tcc_comparison)
1878 code = swap_tree_comparison (code);
1879 else if (!commutative_tree_code (code))
1880 return;
1882 agg_value->pass_through.operand = rhs1;
1883 stmt = rhs2_stmt;
1884 rhs1 = rhs2;
1886 else
1887 return;
1889 if (TREE_CODE_CLASS (code) != tcc_comparison
1890 && !useless_type_conversion_p (TREE_TYPE (lhs),
1891 TREE_TYPE (rhs1)))
1892 return;
1894 break;
1896 default:
1897 return;
1900 else
1901 return;
1903 if (TREE_CODE (rhs1) != SSA_NAME)
1904 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1905 &agg_value->offset,
1906 &agg_value->by_ref);
1907 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1908 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1910 if (index >= 0)
1912 if (agg_value->offset >= 0)
1913 agg_value->type = TREE_TYPE (rhs1);
1914 agg_value->pass_through.formal_id = index;
1915 agg_value->pass_through.operation = code;
1917 else
1918 agg_value->pass_through.operand = NULL_TREE;
1921 /* If STMT is a memory store to the object whose address is BASE, extract
1922 information (offset, size, and value) into CONTENT, and return true,
1923 otherwise we conservatively assume the whole object is modified with
1924 unknown content, and return false. CHECK_REF means that access to object
1925 is expected to be in form of MEM_REF expression. */
1927 static bool
1928 extract_mem_content (struct ipa_func_body_info *fbi,
1929 gimple *stmt, tree base, bool check_ref,
1930 struct ipa_known_agg_contents_list *content)
1932 HOST_WIDE_INT lhs_offset, lhs_size;
1933 bool reverse;
1935 if (!is_gimple_assign (stmt))
1936 return false;
1938 tree lhs = gimple_assign_lhs (stmt);
1939 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1940 &reverse);
1941 if (!lhs_base)
1942 return false;
1944 if (check_ref)
1946 if (TREE_CODE (lhs_base) != MEM_REF
1947 || TREE_OPERAND (lhs_base, 0) != base
1948 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1949 return false;
1951 else if (lhs_base != base)
1952 return false;
1954 content->offset = lhs_offset;
1955 content->size = lhs_size;
1956 content->type = TREE_TYPE (lhs);
1957 content->next = NULL;
1959 analyze_agg_content_value (fbi, &content->value, stmt);
1960 return true;
1963 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1964 in ARG is filled in constants or values that are derived from caller's
1965 formal parameter in the way described by some kinds of jump functions. FBI
1966 is the context of the caller function for interprocedural analysis. ARG can
1967 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1968 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1970 static void
1971 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1972 gcall *call, tree arg,
1973 tree arg_type,
1974 struct ipa_jump_func *jfunc)
1976 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1977 bitmap visited = NULL;
1978 int item_count = 0, value_count = 0;
1979 HOST_WIDE_INT arg_offset, arg_size;
1980 tree arg_base;
1981 bool check_ref, by_ref;
1982 ao_ref r;
1983 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
1985 if (max_agg_items == 0)
1986 return;
1988 /* The function operates in three stages. First, we prepare check_ref, r,
1989 arg_base and arg_offset based on what is actually passed as an actual
1990 argument. */
1992 if (POINTER_TYPE_P (arg_type))
1994 by_ref = true;
1995 if (TREE_CODE (arg) == SSA_NAME)
1997 tree type_size;
1998 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1999 || !POINTER_TYPE_P (TREE_TYPE (arg)))
2000 return;
2001 check_ref = true;
2002 arg_base = arg;
2003 arg_offset = 0;
2004 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
2005 arg_size = tree_to_uhwi (type_size);
2006 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2008 else if (TREE_CODE (arg) == ADDR_EXPR)
2010 bool reverse;
2012 arg = TREE_OPERAND (arg, 0);
2013 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2014 &arg_size, &reverse);
2015 if (!arg_base)
2016 return;
2017 if (DECL_P (arg_base))
2019 check_ref = false;
2020 ao_ref_init (&r, arg_base);
2022 else
2023 return;
2025 else
2026 return;
2028 else
2030 bool reverse;
2032 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2034 by_ref = false;
2035 check_ref = false;
2036 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2037 &arg_size, &reverse);
2038 if (!arg_base)
2039 return;
2041 ao_ref_init (&r, arg);
2044 /* Second stage traverses virtual SSA web backwards starting from the call
2045 statement, only looks at individual dominating virtual operand (its
2046 definition dominates the call), as long as it is confident that content
2047 of the aggregate is affected by definition of the virtual operand, it
2048 builds a sorted linked list of ipa_agg_jf_list describing that. */
2050 for (tree dom_vuse = gimple_vuse (call);
2051 dom_vuse && fbi->aa_walk_budget > 0;)
2053 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
2055 if (gimple_code (stmt) == GIMPLE_PHI)
2057 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2058 fbi->aa_walk_budget,
2059 &visited, false, NULL, NULL);
2060 continue;
2063 fbi->aa_walk_budget--;
2064 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2066 struct ipa_known_agg_contents_list *content
2067 = XALLOCA (struct ipa_known_agg_contents_list);
2069 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2070 break;
2072 /* Now we get a dominating virtual operand, and need to check
2073 whether its value is clobbered any other dominating one. */
2074 if ((content->value.pass_through.formal_id >= 0
2075 || content->value.pass_through.operand)
2076 && !clobber_by_agg_contents_list_p (all_list, content))
2078 struct ipa_known_agg_contents_list *copy
2079 = XALLOCA (struct ipa_known_agg_contents_list);
2081 /* Add to the list consisting of only dominating virtual
2082 operands, whose definitions can finally reach the call. */
2083 add_to_agg_contents_list (&list, (*copy = *content, copy));
2085 if (++value_count == max_agg_items)
2086 break;
2089 /* Add to the list consisting of all dominating virtual operands. */
2090 add_to_agg_contents_list (&all_list, content);
2092 if (++item_count == 2 * max_agg_items)
2093 break;
2095 dom_vuse = gimple_vuse (stmt);
2098 if (visited)
2099 BITMAP_FREE (visited);
2101 /* Third stage just goes over the list and creates an appropriate vector of
2102 ipa_agg_jf_item structures out of it, of course only if there are
2103 any meaningful items to begin with. */
2105 if (value_count)
2107 jfunc->agg.by_ref = by_ref;
2108 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2113 /* Return the Ith param type of callee associated with call graph
2114 edge E. */
2116 tree
2117 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2119 int n;
2120 tree type = (e->callee
2121 ? TREE_TYPE (e->callee->decl)
2122 : gimple_call_fntype (e->call_stmt));
2123 tree t = TYPE_ARG_TYPES (type);
2125 for (n = 0; n < i; n++)
2127 if (!t)
2128 break;
2129 t = TREE_CHAIN (t);
2131 if (t)
2132 return TREE_VALUE (t);
2133 if (!e->callee)
2134 return NULL;
2135 t = DECL_ARGUMENTS (e->callee->decl);
2136 for (n = 0; n < i; n++)
2138 if (!t)
2139 return NULL;
2140 t = TREE_CHAIN (t);
2142 if (t)
2143 return TREE_TYPE (t);
2144 return NULL;
2147 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2148 allocated structure or a previously existing one shared with other jump
2149 functions and/or transformation summaries. */
2151 ipa_bits *
2152 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2154 ipa_bits tmp;
2155 tmp.value = value;
2156 tmp.mask = mask;
2158 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2159 if (*slot)
2160 return *slot;
2162 ipa_bits *res = ggc_alloc<ipa_bits> ();
2163 res->value = value;
2164 res->mask = mask;
2165 *slot = res;
2167 return res;
2170 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2171 table in order to avoid creating multiple same ipa_bits structures. */
2173 static void
2174 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2175 const widest_int &mask)
2177 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2180 /* Return a pointer to a value_range just like *TMP, but either find it in
2181 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2183 static value_range *
2184 ipa_get_value_range (value_range *tmp)
2186 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2187 if (*slot)
2188 return *slot;
2190 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
2191 *vr = *tmp;
2192 *slot = vr;
2194 return vr;
2197 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2198 equiv set. Use hash table in order to avoid creating multiple same copies of
2199 value_ranges. */
2201 static value_range *
2202 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2204 value_range tmp (min, max, kind);
2205 return ipa_get_value_range (&tmp);
2208 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2209 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2210 same value_range structures. */
2212 static void
2213 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2214 tree min, tree max)
2216 jf->m_vr = ipa_get_value_range (type, min, max);
2219 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2220 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2222 static void
2223 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2225 jf->m_vr = ipa_get_value_range (tmp);
2228 /* Compute jump function for all arguments of callsite CS and insert the
2229 information in the jump_functions array in the ipa_edge_args corresponding
2230 to this callsite. */
2232 static void
2233 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2234 struct cgraph_edge *cs)
2236 ipa_node_params *info = ipa_node_params_sum->get (cs->caller);
2237 ipa_edge_args *args = ipa_edge_args_sum->get_create (cs);
2238 gcall *call = cs->call_stmt;
2239 int n, arg_num = gimple_call_num_args (call);
2240 bool useful_context = false;
2241 value_range vr;
2243 if (arg_num == 0 || args->jump_functions)
2244 return;
2245 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2246 if (flag_devirtualize)
2247 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2249 if (gimple_call_internal_p (call))
2250 return;
2251 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2252 return;
2254 for (n = 0; n < arg_num; n++)
2256 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2257 tree arg = gimple_call_arg (call, n);
2258 tree param_type = ipa_get_callee_param_type (cs, n);
2259 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2261 tree instance;
2262 class ipa_polymorphic_call_context context (cs->caller->decl,
2263 arg, cs->call_stmt,
2264 &instance);
2265 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2266 &fbi->aa_walk_budget);
2267 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2268 if (!context.useless_p ())
2269 useful_context = true;
2272 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2274 bool addr_nonzero = false;
2275 bool strict_overflow = false;
2277 if (TREE_CODE (arg) == SSA_NAME
2278 && param_type
2279 && get_range_query (cfun)->range_of_expr (vr, arg)
2280 && vr.nonzero_p ())
2281 addr_nonzero = true;
2282 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2283 addr_nonzero = true;
2285 if (addr_nonzero)
2287 tree z = build_int_cst (TREE_TYPE (arg), 0);
2288 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2290 else
2291 gcc_assert (!jfunc->m_vr);
2293 else
2295 if (TREE_CODE (arg) == SSA_NAME
2296 && param_type
2297 && get_range_query (cfun)->range_of_expr (vr, arg)
2298 && !vr.undefined_p ())
2300 value_range resvr;
2301 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2302 &vr, TREE_TYPE (arg));
2303 if (!resvr.undefined_p () && !resvr.varying_p ())
2304 ipa_set_jfunc_vr (jfunc, &resvr);
2305 else
2306 gcc_assert (!jfunc->m_vr);
2308 else
2309 gcc_assert (!jfunc->m_vr);
2312 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2313 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2315 if (TREE_CODE (arg) == SSA_NAME)
2316 ipa_set_jfunc_bits (jfunc, 0,
2317 widest_int::from (get_nonzero_bits (arg),
2318 TYPE_SIGN (TREE_TYPE (arg))));
2319 else
2320 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2322 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2324 unsigned HOST_WIDE_INT bitpos;
2325 unsigned align;
2327 get_pointer_alignment_1 (arg, &align, &bitpos);
2328 widest_int mask = wi::bit_and_not
2329 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2330 align / BITS_PER_UNIT - 1);
2331 widest_int value = bitpos / BITS_PER_UNIT;
2332 ipa_set_jfunc_bits (jfunc, value, mask);
2334 else
2335 gcc_assert (!jfunc->bits);
2337 if (is_gimple_ip_invariant (arg)
2338 || (VAR_P (arg)
2339 && is_global_var (arg)
2340 && TREE_READONLY (arg)))
2341 ipa_set_jf_constant (jfunc, arg, cs);
2342 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2343 && TREE_CODE (arg) == PARM_DECL)
2345 int index = ipa_get_param_decl_index (info, arg);
2347 gcc_assert (index >=0);
2348 /* Aggregate passed by value, check for pass-through, otherwise we
2349 will attempt to fill in aggregate contents later in this
2350 for cycle. */
2351 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2353 ipa_set_jf_simple_pass_through (jfunc, index, false);
2354 continue;
2357 else if (TREE_CODE (arg) == SSA_NAME)
2359 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2361 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2362 if (index >= 0)
2364 bool agg_p;
2365 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2366 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2369 else
2371 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2372 if (is_gimple_assign (stmt))
2373 compute_complex_assign_jump_func (fbi, info, jfunc,
2374 call, stmt, arg, param_type);
2375 else if (gimple_code (stmt) == GIMPLE_PHI)
2376 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2377 call,
2378 as_a <gphi *> (stmt));
2382 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2383 passed (because type conversions are ignored in gimple). Usually we can
2384 safely get type from function declaration, but in case of K&R prototypes or
2385 variadic functions we can try our luck with type of the pointer passed.
2386 TODO: Since we look for actual initialization of the memory object, we may better
2387 work out the type based on the memory stores we find. */
2388 if (!param_type)
2389 param_type = TREE_TYPE (arg);
2391 if ((jfunc->type != IPA_JF_PASS_THROUGH
2392 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2393 && (jfunc->type != IPA_JF_ANCESTOR
2394 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2395 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2396 || POINTER_TYPE_P (param_type)))
2397 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2399 if (!useful_context)
2400 vec_free (args->polymorphic_call_contexts);
2403 /* Compute jump functions for all edges - both direct and indirect - outgoing
2404 from BB. */
2406 static void
2407 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2409 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2410 int i;
2411 struct cgraph_edge *cs;
2413 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2415 struct cgraph_node *callee = cs->callee;
2417 if (callee)
2419 callee = callee->ultimate_alias_target ();
2420 /* We do not need to bother analyzing calls to unknown functions
2421 unless they may become known during lto/whopr. */
2422 if (!callee->definition && !flag_lto
2423 && !gimple_call_fnspec (cs->call_stmt).known_p ())
2424 continue;
2426 ipa_compute_jump_functions_for_edge (fbi, cs);
2430 /* If STMT looks like a statement loading a value from a member pointer formal
2431 parameter, return that parameter and store the offset of the field to
2432 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2433 might be clobbered). If USE_DELTA, then we look for a use of the delta
2434 field rather than the pfn. */
2436 static tree
2437 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2438 HOST_WIDE_INT *offset_p)
2440 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2442 if (!gimple_assign_single_p (stmt))
2443 return NULL_TREE;
2445 rhs = gimple_assign_rhs1 (stmt);
2446 if (TREE_CODE (rhs) == COMPONENT_REF)
2448 ref_field = TREE_OPERAND (rhs, 1);
2449 rhs = TREE_OPERAND (rhs, 0);
2451 else
2452 ref_field = NULL_TREE;
2453 if (TREE_CODE (rhs) != MEM_REF)
2454 return NULL_TREE;
2455 rec = TREE_OPERAND (rhs, 0);
2456 if (TREE_CODE (rec) != ADDR_EXPR)
2457 return NULL_TREE;
2458 rec = TREE_OPERAND (rec, 0);
2459 if (TREE_CODE (rec) != PARM_DECL
2460 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2461 return NULL_TREE;
2462 ref_offset = TREE_OPERAND (rhs, 1);
2464 if (use_delta)
2465 fld = delta_field;
2466 else
2467 fld = ptr_field;
2468 if (offset_p)
2469 *offset_p = int_bit_position (fld);
2471 if (ref_field)
2473 if (integer_nonzerop (ref_offset))
2474 return NULL_TREE;
2475 return ref_field == fld ? rec : NULL_TREE;
2477 else
2478 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2479 : NULL_TREE;
2482 /* Returns true iff T is an SSA_NAME defined by a statement. */
2484 static bool
2485 ipa_is_ssa_with_stmt_def (tree t)
2487 if (TREE_CODE (t) == SSA_NAME
2488 && !SSA_NAME_IS_DEFAULT_DEF (t))
2489 return true;
2490 else
2491 return false;
2494 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2495 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2496 indirect call graph edge.
2497 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2499 static struct cgraph_edge *
2500 ipa_note_param_call (struct cgraph_node *node, int param_index,
2501 gcall *stmt, bool polymorphic)
2503 struct cgraph_edge *cs;
2505 cs = node->get_edge (stmt);
2506 cs->indirect_info->param_index = param_index;
2507 cs->indirect_info->agg_contents = 0;
2508 cs->indirect_info->member_ptr = 0;
2509 cs->indirect_info->guaranteed_unmodified = 0;
2510 ipa_node_params *info = ipa_node_params_sum->get (node);
2511 ipa_set_param_used_by_indirect_call (info, param_index, true);
2512 if (cs->indirect_info->polymorphic || polymorphic)
2513 ipa_set_param_used_by_polymorphic_call (info, param_index, true);
2514 return cs;
2517 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2518 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2519 intermediate information about each formal parameter. Currently it checks
2520 whether the call calls a pointer that is a formal parameter and if so, the
2521 parameter is marked with the called flag and an indirect call graph edge
2522 describing the call is created. This is very simple for ordinary pointers
2523 represented in SSA but not-so-nice when it comes to member pointers. The
2524 ugly part of this function does nothing more than trying to match the
2525 pattern of such a call. An example of such a pattern is the gimple dump
2526 below, the call is on the last line:
2528 <bb 2>:
2529 f$__delta_5 = f.__delta;
2530 f$__pfn_24 = f.__pfn;
2533 <bb 2>:
2534 f$__delta_5 = MEM[(struct *)&f];
2535 f$__pfn_24 = MEM[(struct *)&f + 4B];
2537 and a few lines below:
2539 <bb 5>
2540 D.2496_3 = (int) f$__pfn_24;
2541 D.2497_4 = D.2496_3 & 1;
2542 if (D.2497_4 != 0)
2543 goto <bb 3>;
2544 else
2545 goto <bb 4>;
2547 <bb 6>:
2548 D.2500_7 = (unsigned int) f$__delta_5;
2549 D.2501_8 = &S + D.2500_7;
2550 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2551 D.2503_10 = *D.2502_9;
2552 D.2504_12 = f$__pfn_24 + -1;
2553 D.2505_13 = (unsigned int) D.2504_12;
2554 D.2506_14 = D.2503_10 + D.2505_13;
2555 D.2507_15 = *D.2506_14;
2556 iftmp.11_16 = (String:: *) D.2507_15;
2558 <bb 7>:
2559 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2560 D.2500_19 = (unsigned int) f$__delta_5;
2561 D.2508_20 = &S + D.2500_19;
2562 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2564 Such patterns are results of simple calls to a member pointer:
2566 int doprinting (int (MyString::* f)(int) const)
2568 MyString S ("somestring");
2570 return (S.*f)(4);
2573 Moreover, the function also looks for called pointers loaded from aggregates
2574 passed by value or reference. */
2576 static void
2577 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2578 tree target)
2580 class ipa_node_params *info = fbi->info;
2581 HOST_WIDE_INT offset;
2582 bool by_ref;
2584 if (SSA_NAME_IS_DEFAULT_DEF (target))
2586 tree var = SSA_NAME_VAR (target);
2587 int index = ipa_get_param_decl_index (info, var);
2588 if (index >= 0)
2589 ipa_note_param_call (fbi->node, index, call, false);
2590 return;
2593 int index;
2594 gimple *def = SSA_NAME_DEF_STMT (target);
2595 bool guaranteed_unmodified;
2596 if (gimple_assign_single_p (def)
2597 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2598 gimple_assign_rhs1 (def), &index, &offset,
2599 NULL, &by_ref, &guaranteed_unmodified))
2601 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2602 call, false);
2603 cs->indirect_info->offset = offset;
2604 cs->indirect_info->agg_contents = 1;
2605 cs->indirect_info->by_ref = by_ref;
2606 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2607 return;
2610 /* Now we need to try to match the complex pattern of calling a member
2611 pointer. */
2612 if (gimple_code (def) != GIMPLE_PHI
2613 || gimple_phi_num_args (def) != 2
2614 || !POINTER_TYPE_P (TREE_TYPE (target))
2615 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2616 return;
2618 /* First, we need to check whether one of these is a load from a member
2619 pointer that is a parameter to this function. */
2620 tree n1 = PHI_ARG_DEF (def, 0);
2621 tree n2 = PHI_ARG_DEF (def, 1);
2622 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2623 return;
2624 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2625 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2627 tree rec;
2628 basic_block bb, virt_bb;
2629 basic_block join = gimple_bb (def);
2630 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2632 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2633 return;
2635 bb = EDGE_PRED (join, 0)->src;
2636 virt_bb = gimple_bb (d2);
2638 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2640 bb = EDGE_PRED (join, 1)->src;
2641 virt_bb = gimple_bb (d1);
2643 else
2644 return;
2646 /* Second, we need to check that the basic blocks are laid out in the way
2647 corresponding to the pattern. */
2649 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2650 || single_pred (virt_bb) != bb
2651 || single_succ (virt_bb) != join)
2652 return;
2654 /* Third, let's see that the branching is done depending on the least
2655 significant bit of the pfn. */
2657 gimple *branch = last_stmt (bb);
2658 if (!branch || gimple_code (branch) != GIMPLE_COND)
2659 return;
2661 if ((gimple_cond_code (branch) != NE_EXPR
2662 && gimple_cond_code (branch) != EQ_EXPR)
2663 || !integer_zerop (gimple_cond_rhs (branch)))
2664 return;
2666 tree cond = gimple_cond_lhs (branch);
2667 if (!ipa_is_ssa_with_stmt_def (cond))
2668 return;
2670 def = SSA_NAME_DEF_STMT (cond);
2671 if (!is_gimple_assign (def)
2672 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2673 || !integer_onep (gimple_assign_rhs2 (def)))
2674 return;
2676 cond = gimple_assign_rhs1 (def);
2677 if (!ipa_is_ssa_with_stmt_def (cond))
2678 return;
2680 def = SSA_NAME_DEF_STMT (cond);
2682 if (is_gimple_assign (def)
2683 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2685 cond = gimple_assign_rhs1 (def);
2686 if (!ipa_is_ssa_with_stmt_def (cond))
2687 return;
2688 def = SSA_NAME_DEF_STMT (cond);
2691 tree rec2;
2692 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2693 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2694 == ptrmemfunc_vbit_in_delta),
2695 NULL);
2696 if (rec != rec2)
2697 return;
2699 index = ipa_get_param_decl_index (info, rec);
2700 if (index >= 0
2701 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2703 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2704 call, false);
2705 cs->indirect_info->offset = offset;
2706 cs->indirect_info->agg_contents = 1;
2707 cs->indirect_info->member_ptr = 1;
2708 cs->indirect_info->guaranteed_unmodified = 1;
2711 return;
2714 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2715 object referenced in the expression is a formal parameter of the caller
2716 FBI->node (described by FBI->info), create a call note for the
2717 statement. */
2719 static void
2720 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2721 gcall *call, tree target)
2723 tree obj = OBJ_TYPE_REF_OBJECT (target);
2724 int index;
2725 HOST_WIDE_INT anc_offset;
2727 if (!flag_devirtualize)
2728 return;
2730 if (TREE_CODE (obj) != SSA_NAME)
2731 return;
2733 class ipa_node_params *info = fbi->info;
2734 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2736 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2737 return;
2739 anc_offset = 0;
2740 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2741 gcc_assert (index >= 0);
2742 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2743 call))
2744 return;
2746 else
2748 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2749 tree expr;
2751 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2752 if (!expr)
2753 return;
2754 index = ipa_get_param_decl_index (info,
2755 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2756 gcc_assert (index >= 0);
2757 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2758 call, anc_offset))
2759 return;
2762 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2763 call, true);
2764 class cgraph_indirect_call_info *ii = cs->indirect_info;
2765 ii->offset = anc_offset;
2766 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2767 ii->otr_type = obj_type_ref_class (target);
2768 ii->polymorphic = 1;
2771 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2772 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2773 containing intermediate information about each formal parameter. */
2775 static void
2776 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2778 tree target = gimple_call_fn (call);
2780 if (!target
2781 || (TREE_CODE (target) != SSA_NAME
2782 && !virtual_method_call_p (target)))
2783 return;
2785 struct cgraph_edge *cs = fbi->node->get_edge (call);
2786 /* If we previously turned the call into a direct call, there is
2787 no need to analyze. */
2788 if (cs && !cs->indirect_unknown_callee)
2789 return;
2791 if (cs->indirect_info->polymorphic && flag_devirtualize)
2793 tree instance;
2794 tree target = gimple_call_fn (call);
2795 ipa_polymorphic_call_context context (current_function_decl,
2796 target, call, &instance);
2798 gcc_checking_assert (cs->indirect_info->otr_type
2799 == obj_type_ref_class (target));
2800 gcc_checking_assert (cs->indirect_info->otr_token
2801 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2803 cs->indirect_info->vptr_changed
2804 = !context.get_dynamic_type (instance,
2805 OBJ_TYPE_REF_OBJECT (target),
2806 obj_type_ref_class (target), call,
2807 &fbi->aa_walk_budget);
2808 cs->indirect_info->context = context;
2811 if (TREE_CODE (target) == SSA_NAME)
2812 ipa_analyze_indirect_call_uses (fbi, call, target);
2813 else if (virtual_method_call_p (target))
2814 ipa_analyze_virtual_call_uses (fbi, call, target);
2818 /* Analyze the call statement STMT with respect to formal parameters (described
2819 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2820 formal parameters are called. */
2822 static void
2823 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2825 if (is_gimple_call (stmt))
2826 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2829 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2830 If OP is a parameter declaration, mark it as used in the info structure
2831 passed in DATA. */
2833 static bool
2834 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2836 class ipa_node_params *info = (class ipa_node_params *) data;
2838 op = get_base_address (op);
2839 if (op
2840 && TREE_CODE (op) == PARM_DECL)
2842 int index = ipa_get_param_decl_index (info, op);
2843 gcc_assert (index >= 0);
2844 ipa_set_param_used (info, index, true);
2847 return false;
2850 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2851 the findings in various structures of the associated ipa_node_params
2852 structure, such as parameter flags, notes etc. FBI holds various data about
2853 the function being analyzed. */
2855 static void
2856 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2858 gimple_stmt_iterator gsi;
2859 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2861 gimple *stmt = gsi_stmt (gsi);
2863 if (is_gimple_debug (stmt))
2864 continue;
2866 ipa_analyze_stmt_uses (fbi, stmt);
2867 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2868 visit_ref_for_mod_analysis,
2869 visit_ref_for_mod_analysis,
2870 visit_ref_for_mod_analysis);
2872 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2873 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2874 visit_ref_for_mod_analysis,
2875 visit_ref_for_mod_analysis,
2876 visit_ref_for_mod_analysis);
2879 /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2881 static bool
2882 load_from_dereferenced_name (tree expr, tree name)
2884 tree base = get_base_address (expr);
2885 return (TREE_CODE (base) == MEM_REF
2886 && TREE_OPERAND (base, 0) == name);
2889 /* Calculate controlled uses of parameters of NODE. */
2891 static void
2892 ipa_analyze_controlled_uses (struct cgraph_node *node)
2894 ipa_node_params *info = ipa_node_params_sum->get (node);
2896 for (int i = 0; i < ipa_get_param_count (info); i++)
2898 tree parm = ipa_get_param (info, i);
2899 int call_uses = 0;
2900 bool load_dereferenced = false;
2902 /* For SSA regs see if parameter is used. For non-SSA we compute
2903 the flag during modification analysis. */
2904 if (is_gimple_reg (parm))
2906 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2907 parm);
2908 if (ddef && !has_zero_uses (ddef))
2910 imm_use_iterator imm_iter;
2911 gimple *stmt;
2913 ipa_set_param_used (info, i, true);
2914 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)
2916 if (is_gimple_debug (stmt))
2917 continue;
2919 int all_stmt_uses = 0;
2920 use_operand_p use_p;
2921 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
2922 all_stmt_uses++;
2924 if (is_gimple_call (stmt))
2926 if (gimple_call_internal_p (stmt))
2928 call_uses = IPA_UNDESCRIBED_USE;
2929 break;
2931 int recognized_stmt_uses;
2932 if (gimple_call_fn (stmt) == ddef)
2933 recognized_stmt_uses = 1;
2934 else
2935 recognized_stmt_uses = 0;
2936 unsigned arg_count = gimple_call_num_args (stmt);
2937 for (unsigned i = 0; i < arg_count; i++)
2939 tree arg = gimple_call_arg (stmt, i);
2940 if (arg == ddef)
2941 recognized_stmt_uses++;
2942 else if (load_from_dereferenced_name (arg, ddef))
2944 load_dereferenced = true;
2945 recognized_stmt_uses++;
2949 if (recognized_stmt_uses != all_stmt_uses)
2951 call_uses = IPA_UNDESCRIBED_USE;
2952 break;
2954 if (call_uses >= 0)
2955 call_uses += all_stmt_uses;
2957 else if (gimple_assign_single_p (stmt))
2959 tree rhs = gimple_assign_rhs1 (stmt);
2960 if (all_stmt_uses != 1
2961 || !load_from_dereferenced_name (rhs, ddef))
2963 call_uses = IPA_UNDESCRIBED_USE;
2964 break;
2966 load_dereferenced = true;
2968 else
2970 call_uses = IPA_UNDESCRIBED_USE;
2971 break;
2975 else
2976 call_uses = 0;
2978 else
2979 call_uses = IPA_UNDESCRIBED_USE;
2980 ipa_set_controlled_uses (info, i, call_uses);
2981 ipa_set_param_load_dereferenced (info, i, load_dereferenced);
2985 /* Free stuff in BI. */
2987 static void
2988 free_ipa_bb_info (struct ipa_bb_info *bi)
2990 bi->cg_edges.release ();
2991 bi->param_aa_statuses.release ();
2994 /* Dominator walker driving the analysis. */
2996 class analysis_dom_walker : public dom_walker
2998 public:
2999 analysis_dom_walker (struct ipa_func_body_info *fbi)
3000 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
3002 virtual edge before_dom_children (basic_block);
3004 private:
3005 struct ipa_func_body_info *m_fbi;
3008 edge
3009 analysis_dom_walker::before_dom_children (basic_block bb)
3011 ipa_analyze_params_uses_in_bb (m_fbi, bb);
3012 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3013 return NULL;
3016 /* Release body info FBI. */
3018 void
3019 ipa_release_body_info (struct ipa_func_body_info *fbi)
3021 int i;
3022 struct ipa_bb_info *bi;
3024 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
3025 free_ipa_bb_info (bi);
3026 fbi->bb_infos.release ();
3029 /* Initialize the array describing properties of formal parameters
3030 of NODE, analyze their uses and compute jump functions associated
3031 with actual arguments of calls from within NODE. */
3033 void
3034 ipa_analyze_node (struct cgraph_node *node)
3036 struct ipa_func_body_info fbi;
3037 class ipa_node_params *info;
3039 ipa_check_create_node_params ();
3040 ipa_check_create_edge_args ();
3041 info = ipa_node_params_sum->get_create (node);
3043 if (info->analysis_done)
3044 return;
3045 info->analysis_done = 1;
3047 if (ipa_func_spec_opts_forbid_analysis_p (node))
3049 for (int i = 0; i < ipa_get_param_count (info); i++)
3051 ipa_set_param_used (info, i, true);
3052 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
3054 return;
3057 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3058 push_cfun (func);
3059 calculate_dominance_info (CDI_DOMINATORS);
3060 ipa_initialize_node_params (node);
3061 ipa_analyze_controlled_uses (node);
3063 fbi.node = node;
3064 fbi.info = info;
3065 fbi.bb_infos = vNULL;
3066 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
3067 fbi.param_count = ipa_get_param_count (info);
3068 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
3070 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3072 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3073 bi->cg_edges.safe_push (cs);
3076 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3078 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3079 bi->cg_edges.safe_push (cs);
3082 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3084 ipa_release_body_info (&fbi);
3085 free_dominance_info (CDI_DOMINATORS);
3086 pop_cfun ();
3089 /* Update the jump functions associated with call graph edge E when the call
3090 graph edge CS is being inlined, assuming that E->caller is already (possibly
3091 indirectly) inlined into CS->callee and that E has not been inlined. */
3093 static void
3094 update_jump_functions_after_inlining (struct cgraph_edge *cs,
3095 struct cgraph_edge *e)
3097 ipa_edge_args *top = ipa_edge_args_sum->get (cs);
3098 ipa_edge_args *args = ipa_edge_args_sum->get (e);
3099 if (!args)
3100 return;
3101 int count = ipa_get_cs_argument_count (args);
3102 int i;
3104 for (i = 0; i < count; i++)
3106 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3107 class ipa_polymorphic_call_context *dst_ctx
3108 = ipa_get_ith_polymorhic_call_context (args, i);
3110 if (dst->agg.items)
3112 struct ipa_agg_jf_item *item;
3113 int j;
3115 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3117 int dst_fid;
3118 struct ipa_jump_func *src;
3120 if (item->jftype != IPA_JF_PASS_THROUGH
3121 && item->jftype != IPA_JF_LOAD_AGG)
3122 continue;
3124 dst_fid = item->value.pass_through.formal_id;
3125 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3127 item->jftype = IPA_JF_UNKNOWN;
3128 continue;
3131 item->value.pass_through.formal_id = -1;
3132 src = ipa_get_ith_jump_func (top, dst_fid);
3133 if (src->type == IPA_JF_CONST)
3135 if (item->jftype == IPA_JF_PASS_THROUGH
3136 && item->value.pass_through.operation == NOP_EXPR)
3138 item->jftype = IPA_JF_CONST;
3139 item->value.constant = src->value.constant.value;
3140 continue;
3143 else if (src->type == IPA_JF_PASS_THROUGH
3144 && src->value.pass_through.operation == NOP_EXPR)
3146 if (item->jftype == IPA_JF_PASS_THROUGH
3147 || !item->value.load_agg.by_ref
3148 || src->value.pass_through.agg_preserved)
3149 item->value.pass_through.formal_id
3150 = src->value.pass_through.formal_id;
3152 else if (src->type == IPA_JF_ANCESTOR)
3154 if (item->jftype == IPA_JF_PASS_THROUGH)
3156 if (!src->value.ancestor.offset)
3157 item->value.pass_through.formal_id
3158 = src->value.ancestor.formal_id;
3160 else if (src->value.ancestor.agg_preserved)
3162 gcc_checking_assert (item->value.load_agg.by_ref);
3164 item->value.pass_through.formal_id
3165 = src->value.ancestor.formal_id;
3166 item->value.load_agg.offset
3167 += src->value.ancestor.offset;
3171 if (item->value.pass_through.formal_id < 0)
3172 item->jftype = IPA_JF_UNKNOWN;
3176 if (!top)
3178 ipa_set_jf_unknown (dst);
3179 continue;
3182 if (dst->type == IPA_JF_ANCESTOR)
3184 struct ipa_jump_func *src;
3185 int dst_fid = dst->value.ancestor.formal_id;
3186 class ipa_polymorphic_call_context *src_ctx
3187 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3189 /* Variable number of arguments can cause havoc if we try to access
3190 one that does not exist in the inlined edge. So make sure we
3191 don't. */
3192 if (dst_fid >= ipa_get_cs_argument_count (top))
3194 ipa_set_jf_unknown (dst);
3195 continue;
3198 src = ipa_get_ith_jump_func (top, dst_fid);
3200 if (src_ctx && !src_ctx->useless_p ())
3202 class ipa_polymorphic_call_context ctx = *src_ctx;
3204 /* TODO: Make type preserved safe WRT contexts. */
3205 if (!ipa_get_jf_ancestor_type_preserved (dst))
3206 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3207 ctx.offset_by (dst->value.ancestor.offset);
3208 if (!ctx.useless_p ())
3210 if (!dst_ctx)
3212 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3213 count, true);
3214 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3217 dst_ctx->combine_with (ctx);
3221 /* Parameter and argument in ancestor jump function must be pointer
3222 type, which means access to aggregate must be by-reference. */
3223 gcc_assert (!src->agg.items || src->agg.by_ref);
3225 if (src->agg.items && dst->value.ancestor.agg_preserved)
3227 struct ipa_agg_jf_item *item;
3228 int j;
3230 /* Currently we do not produce clobber aggregate jump functions,
3231 replace with merging when we do. */
3232 gcc_assert (!dst->agg.items);
3234 dst->agg.items = vec_safe_copy (src->agg.items);
3235 dst->agg.by_ref = src->agg.by_ref;
3236 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3237 item->offset -= dst->value.ancestor.offset;
3240 if (src->type == IPA_JF_PASS_THROUGH
3241 && src->value.pass_through.operation == NOP_EXPR)
3243 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3244 dst->value.ancestor.agg_preserved &=
3245 src->value.pass_through.agg_preserved;
3247 else if (src->type == IPA_JF_ANCESTOR)
3249 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3250 dst->value.ancestor.offset += src->value.ancestor.offset;
3251 dst->value.ancestor.agg_preserved &=
3252 src->value.ancestor.agg_preserved;
3254 else
3255 ipa_set_jf_unknown (dst);
3257 else if (dst->type == IPA_JF_PASS_THROUGH)
3259 struct ipa_jump_func *src;
3260 /* We must check range due to calls with variable number of arguments
3261 and we cannot combine jump functions with operations. */
3262 if (dst->value.pass_through.operation == NOP_EXPR
3263 && (top && dst->value.pass_through.formal_id
3264 < ipa_get_cs_argument_count (top)))
3266 int dst_fid = dst->value.pass_through.formal_id;
3267 src = ipa_get_ith_jump_func (top, dst_fid);
3268 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3269 class ipa_polymorphic_call_context *src_ctx
3270 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3272 if (src_ctx && !src_ctx->useless_p ())
3274 class ipa_polymorphic_call_context ctx = *src_ctx;
3276 /* TODO: Make type preserved safe WRT contexts. */
3277 if (!ipa_get_jf_pass_through_type_preserved (dst))
3278 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3279 if (!ctx.useless_p ())
3281 if (!dst_ctx)
3283 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3284 count, true);
3285 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3287 dst_ctx->combine_with (ctx);
3290 switch (src->type)
3292 case IPA_JF_UNKNOWN:
3293 ipa_set_jf_unknown (dst);
3294 break;
3295 case IPA_JF_CONST:
3296 ipa_set_jf_cst_copy (dst, src);
3297 break;
3299 case IPA_JF_PASS_THROUGH:
3301 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3302 enum tree_code operation;
3303 operation = ipa_get_jf_pass_through_operation (src);
3305 if (operation == NOP_EXPR)
3307 bool agg_p;
3308 agg_p = dst_agg_p
3309 && ipa_get_jf_pass_through_agg_preserved (src);
3310 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3312 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3313 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3314 else
3316 tree operand = ipa_get_jf_pass_through_operand (src);
3317 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3318 operation);
3320 break;
3322 case IPA_JF_ANCESTOR:
3324 bool agg_p;
3325 agg_p = dst_agg_p
3326 && ipa_get_jf_ancestor_agg_preserved (src);
3327 ipa_set_ancestor_jf (dst,
3328 ipa_get_jf_ancestor_offset (src),
3329 ipa_get_jf_ancestor_formal_id (src),
3330 agg_p);
3331 break;
3333 default:
3334 gcc_unreachable ();
3337 if (src->agg.items
3338 && (dst_agg_p || !src->agg.by_ref))
3340 /* Currently we do not produce clobber aggregate jump
3341 functions, replace with merging when we do. */
3342 gcc_assert (!dst->agg.items);
3344 dst->agg.by_ref = src->agg.by_ref;
3345 dst->agg.items = vec_safe_copy (src->agg.items);
3348 else
3349 ipa_set_jf_unknown (dst);
3354 /* If TARGET is an addr_expr of a function declaration, make it the
3355 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3356 Otherwise, return NULL. */
3358 struct cgraph_edge *
3359 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3360 bool speculative)
3362 struct cgraph_node *callee;
3363 bool unreachable = false;
3365 if (TREE_CODE (target) == ADDR_EXPR)
3366 target = TREE_OPERAND (target, 0);
3367 if (TREE_CODE (target) != FUNCTION_DECL)
3369 target = canonicalize_constructor_val (target, NULL);
3370 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3372 /* Member pointer call that goes through a VMT lookup. */
3373 if (ie->indirect_info->member_ptr
3374 /* Or if target is not an invariant expression and we do not
3375 know if it will evaulate to function at runtime.
3376 This can happen when folding through &VAR, where &VAR
3377 is IP invariant, but VAR itself is not.
3379 TODO: Revisit this when GCC 5 is branched. It seems that
3380 member_ptr check is not needed and that we may try to fold
3381 the expression and see if VAR is readonly. */
3382 || !is_gimple_ip_invariant (target))
3384 if (dump_enabled_p ())
3386 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3387 "discovered direct call non-invariant %s\n",
3388 ie->caller->dump_name ());
3390 return NULL;
3394 if (dump_enabled_p ())
3396 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3397 "discovered direct call to non-function in %s, "
3398 "making it __builtin_unreachable\n",
3399 ie->caller->dump_name ());
3402 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3403 callee = cgraph_node::get_create (target);
3404 unreachable = true;
3406 else
3407 callee = cgraph_node::get (target);
3409 else
3410 callee = cgraph_node::get (target);
3412 /* Because may-edges are not explicitely represented and vtable may be external,
3413 we may create the first reference to the object in the unit. */
3414 if (!callee || callee->inlined_to)
3417 /* We are better to ensure we can refer to it.
3418 In the case of static functions we are out of luck, since we already
3419 removed its body. In the case of public functions we may or may
3420 not introduce the reference. */
3421 if (!canonicalize_constructor_val (target, NULL)
3422 || !TREE_PUBLIC (target))
3424 if (dump_file)
3425 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3426 "(%s -> %s) but cannot refer to it. Giving up.\n",
3427 ie->caller->dump_name (),
3428 ie->callee->dump_name ());
3429 return NULL;
3431 callee = cgraph_node::get_create (target);
3434 /* If the edge is already speculated. */
3435 if (speculative && ie->speculative)
3437 if (dump_file)
3439 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3440 if (!e2)
3442 if (dump_file)
3443 fprintf (dump_file, "ipa-prop: Discovered call to a "
3444 "speculative target (%s -> %s) but the call is "
3445 "already speculated to different target. "
3446 "Giving up.\n",
3447 ie->caller->dump_name (), callee->dump_name ());
3449 else
3451 if (dump_file)
3452 fprintf (dump_file,
3453 "ipa-prop: Discovered call to a speculative target "
3454 "(%s -> %s) this agree with previous speculation.\n",
3455 ie->caller->dump_name (), callee->dump_name ());
3458 return NULL;
3461 if (!dbg_cnt (devirt))
3462 return NULL;
3464 ipa_check_create_node_params ();
3466 /* We cannot make edges to inline clones. It is bug that someone removed
3467 the cgraph node too early. */
3468 gcc_assert (!callee->inlined_to);
3470 if (dump_file && !unreachable)
3472 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3473 "(%s -> %s), for stmt ",
3474 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3475 speculative ? "speculative" : "known",
3476 ie->caller->dump_name (),
3477 callee->dump_name ());
3478 if (ie->call_stmt)
3479 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3480 else
3481 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3483 if (dump_enabled_p ())
3485 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3486 "converting indirect call in %s to direct call to %s\n",
3487 ie->caller->dump_name (), callee->dump_name ());
3489 if (!speculative)
3491 struct cgraph_edge *orig = ie;
3492 ie = cgraph_edge::make_direct (ie, callee);
3493 /* If we resolved speculative edge the cost is already up to date
3494 for direct call (adjusted by inline_edge_duplication_hook). */
3495 if (ie == orig)
3497 ipa_call_summary *es = ipa_call_summaries->get (ie);
3498 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3499 - eni_size_weights.call_cost);
3500 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3501 - eni_time_weights.call_cost);
3504 else
3506 if (!callee->can_be_discarded_p ())
3508 cgraph_node *alias;
3509 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3510 if (alias)
3511 callee = alias;
3513 /* make_speculative will update ie's cost to direct call cost. */
3514 ie = ie->make_speculative
3515 (callee, ie->count.apply_scale (8, 10));
3518 return ie;
3521 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3522 CONSTRUCTOR and return it. Return NULL if the search fails for some
3523 reason. */
3525 static tree
3526 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3528 tree type = TREE_TYPE (constructor);
3529 if (TREE_CODE (type) != ARRAY_TYPE
3530 && TREE_CODE (type) != RECORD_TYPE)
3531 return NULL;
3533 unsigned ix;
3534 tree index, val;
3535 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3537 HOST_WIDE_INT elt_offset;
3538 if (TREE_CODE (type) == ARRAY_TYPE)
3540 offset_int off;
3541 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3542 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3544 if (index)
3546 if (TREE_CODE (index) == RANGE_EXPR)
3547 off = wi::to_offset (TREE_OPERAND (index, 0));
3548 else
3549 off = wi::to_offset (index);
3550 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3552 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3553 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3554 off = wi::sext (off - wi::to_offset (low_bound),
3555 TYPE_PRECISION (TREE_TYPE (index)));
3557 off *= wi::to_offset (unit_size);
3558 /* ??? Handle more than just the first index of a
3559 RANGE_EXPR. */
3561 else
3562 off = wi::to_offset (unit_size) * ix;
3564 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3565 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3566 continue;
3567 elt_offset = off.to_shwi ();
3569 else if (TREE_CODE (type) == RECORD_TYPE)
3571 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3572 if (DECL_BIT_FIELD (index))
3573 continue;
3574 elt_offset = int_bit_position (index);
3576 else
3577 gcc_unreachable ();
3579 if (elt_offset > req_offset)
3580 return NULL;
3582 if (TREE_CODE (val) == CONSTRUCTOR)
3583 return find_constructor_constant_at_offset (val,
3584 req_offset - elt_offset);
3586 if (elt_offset == req_offset
3587 && is_gimple_reg_type (TREE_TYPE (val))
3588 && is_gimple_ip_invariant (val))
3589 return val;
3591 return NULL;
3594 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3595 invariant from a static constructor and if so, return it. Otherwise return
3596 NULL. */
3598 static tree
3599 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3601 if (by_ref)
3603 if (TREE_CODE (scalar) != ADDR_EXPR)
3604 return NULL;
3605 scalar = TREE_OPERAND (scalar, 0);
3608 if (!VAR_P (scalar)
3609 || !is_global_var (scalar)
3610 || !TREE_READONLY (scalar)
3611 || !DECL_INITIAL (scalar)
3612 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3613 return NULL;
3615 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3618 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3619 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3620 return NULL if there is none. BY_REF specifies whether the value has to be
3621 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3622 the boolean it points to is set to true if the value comes from an
3623 initializer of a constant. */
3625 tree
3626 ipa_find_agg_cst_for_param (const ipa_agg_value_set *agg, tree scalar,
3627 HOST_WIDE_INT offset, bool by_ref,
3628 bool *from_global_constant)
3630 struct ipa_agg_value *item;
3631 int i;
3633 if (scalar)
3635 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3636 if (res)
3638 if (from_global_constant)
3639 *from_global_constant = true;
3640 return res;
3644 if (!agg
3645 || by_ref != agg->by_ref)
3646 return NULL;
3648 FOR_EACH_VEC_ELT (agg->items, i, item)
3649 if (item->offset == offset)
3651 /* Currently we do not have clobber values, return NULL for them once
3652 we do. */
3653 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3654 if (from_global_constant)
3655 *from_global_constant = false;
3656 return item->value;
3658 return NULL;
3661 /* Remove a reference to SYMBOL from the list of references of a node given by
3662 reference description RDESC. Return true if the reference has been
3663 successfully found and removed. */
3665 static bool
3666 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3668 struct ipa_ref *to_del;
3669 struct cgraph_edge *origin;
3671 origin = rdesc->cs;
3672 if (!origin)
3673 return false;
3674 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3675 origin->lto_stmt_uid);
3676 if (!to_del)
3677 return false;
3679 to_del->remove_reference ();
3680 if (dump_file)
3681 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3682 origin->caller->dump_name (), symbol->dump_name ());
3683 return true;
3686 /* If JFUNC has a reference description with refcount different from
3687 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3688 NULL. JFUNC must be a constant jump function. */
3690 static struct ipa_cst_ref_desc *
3691 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3693 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3694 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3695 return rdesc;
3696 else
3697 return NULL;
3700 /* If the value of constant jump function JFUNC is an address of a function
3701 declaration, return the associated call graph node. Otherwise return
3702 NULL. */
3704 static symtab_node *
3705 symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
3707 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3708 tree cst = ipa_get_jf_constant (jfunc);
3709 if (TREE_CODE (cst) != ADDR_EXPR
3710 || (TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL
3711 && TREE_CODE (TREE_OPERAND (cst, 0)) != VAR_DECL))
3712 return NULL;
3714 return symtab_node::get (TREE_OPERAND (cst, 0));
3718 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3719 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3720 the edge specified in the rdesc. Return false if either the symbol or the
3721 reference could not be found, otherwise return true. */
3723 static bool
3724 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3726 struct ipa_cst_ref_desc *rdesc;
3727 if (jfunc->type == IPA_JF_CONST
3728 && (rdesc = jfunc_rdesc_usable (jfunc))
3729 && --rdesc->refcount == 0)
3731 symtab_node *symbol = symtab_node_for_jfunc (jfunc);
3732 if (!symbol)
3733 return false;
3735 return remove_described_reference (symbol, rdesc);
3737 return true;
3740 /* Try to find a destination for indirect edge IE that corresponds to a simple
3741 call or a call of a member function pointer and where the destination is a
3742 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3743 the type of the parameter to which the result of JFUNC is passed. If it can
3744 be determined, return the newly direct edge, otherwise return NULL.
3745 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3746 relative to. */
3748 static struct cgraph_edge *
3749 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3750 struct ipa_jump_func *jfunc, tree target_type,
3751 struct cgraph_node *new_root,
3752 class ipa_node_params *new_root_info)
3754 struct cgraph_edge *cs;
3755 tree target;
3756 bool agg_contents = ie->indirect_info->agg_contents;
3757 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3758 if (agg_contents)
3760 bool from_global_constant;
3761 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3762 new_root,
3763 &jfunc->agg);
3764 target = ipa_find_agg_cst_for_param (&agg, scalar,
3765 ie->indirect_info->offset,
3766 ie->indirect_info->by_ref,
3767 &from_global_constant);
3768 agg.release ();
3769 if (target
3770 && !from_global_constant
3771 && !ie->indirect_info->guaranteed_unmodified)
3772 return NULL;
3774 else
3775 target = scalar;
3776 if (!target)
3777 return NULL;
3778 cs = ipa_make_edge_direct_to_target (ie, target);
3780 if (cs && !agg_contents)
3782 bool ok;
3783 gcc_checking_assert (cs->callee
3784 && (cs != ie
3785 || jfunc->type != IPA_JF_CONST
3786 || !symtab_node_for_jfunc (jfunc)
3787 || cs->callee == symtab_node_for_jfunc (jfunc)));
3788 ok = try_decrement_rdesc_refcount (jfunc);
3789 gcc_checking_assert (ok);
3792 return cs;
3795 /* Return the target to be used in cases of impossible devirtualization. IE
3796 and target (the latter can be NULL) are dumped when dumping is enabled. */
3798 tree
3799 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3801 if (dump_file)
3803 if (target)
3804 fprintf (dump_file,
3805 "Type inconsistent devirtualization: %s->%s\n",
3806 ie->caller->dump_name (),
3807 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3808 else
3809 fprintf (dump_file,
3810 "No devirtualization target in %s\n",
3811 ie->caller->dump_name ());
3813 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3814 cgraph_node::get_create (new_target);
3815 return new_target;
3818 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3819 call based on a formal parameter which is described by jump function JFUNC
3820 and if it can be determined, make it direct and return the direct edge.
3821 Otherwise, return NULL. CTX describes the polymorphic context that the
3822 parameter the call is based on brings along with it. NEW_ROOT and
3823 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3824 to. */
3826 static struct cgraph_edge *
3827 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3828 struct ipa_jump_func *jfunc,
3829 class ipa_polymorphic_call_context ctx,
3830 struct cgraph_node *new_root,
3831 class ipa_node_params *new_root_info)
3833 tree target = NULL;
3834 bool speculative = false;
3836 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3837 return NULL;
3839 gcc_assert (!ie->indirect_info->by_ref);
3841 /* Try to do lookup via known virtual table pointer value. */
3842 if (!ie->indirect_info->vptr_changed
3843 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3845 tree vtable;
3846 unsigned HOST_WIDE_INT offset;
3847 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3848 : NULL;
3849 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3850 new_root,
3851 &jfunc->agg);
3852 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3853 ie->indirect_info->offset,
3854 true);
3855 agg.release ();
3856 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3858 bool can_refer;
3859 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3860 vtable, offset, &can_refer);
3861 if (can_refer)
3863 if (!t
3864 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3865 || !possible_polymorphic_call_target_p
3866 (ie, cgraph_node::get (t)))
3868 /* Do not speculate builtin_unreachable, it is stupid! */
3869 if (!ie->indirect_info->vptr_changed)
3870 target = ipa_impossible_devirt_target (ie, target);
3871 else
3872 target = NULL;
3874 else
3876 target = t;
3877 speculative = ie->indirect_info->vptr_changed;
3883 ipa_polymorphic_call_context ie_context (ie);
3884 vec <cgraph_node *>targets;
3885 bool final;
3887 ctx.offset_by (ie->indirect_info->offset);
3888 if (ie->indirect_info->vptr_changed)
3889 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3890 ie->indirect_info->otr_type);
3891 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3892 targets = possible_polymorphic_call_targets
3893 (ie->indirect_info->otr_type,
3894 ie->indirect_info->otr_token,
3895 ctx, &final);
3896 if (final && targets.length () <= 1)
3898 speculative = false;
3899 if (targets.length () == 1)
3900 target = targets[0]->decl;
3901 else
3902 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3904 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3905 && !ie->speculative && ie->maybe_hot_p ())
3907 cgraph_node *n;
3908 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3909 ie->indirect_info->otr_token,
3910 ie->indirect_info->context);
3911 if (n)
3913 target = n->decl;
3914 speculative = true;
3918 if (target)
3920 if (!possible_polymorphic_call_target_p
3921 (ie, cgraph_node::get_create (target)))
3923 if (speculative)
3924 return NULL;
3925 target = ipa_impossible_devirt_target (ie, target);
3927 return ipa_make_edge_direct_to_target (ie, target, speculative);
3929 else
3930 return NULL;
3933 /* Update the param called notes associated with NODE when CS is being inlined,
3934 assuming NODE is (potentially indirectly) inlined into CS->callee.
3935 Moreover, if the callee is discovered to be constant, create a new cgraph
3936 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3937 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3939 static bool
3940 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3941 struct cgraph_node *node,
3942 vec<cgraph_edge *> *new_edges)
3944 class ipa_edge_args *top;
3945 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3946 struct cgraph_node *new_root;
3947 class ipa_node_params *new_root_info, *inlined_node_info;
3948 bool res = false;
3950 ipa_check_create_edge_args ();
3951 top = ipa_edge_args_sum->get (cs);
3952 new_root = cs->caller->inlined_to
3953 ? cs->caller->inlined_to : cs->caller;
3954 new_root_info = ipa_node_params_sum->get (new_root);
3955 inlined_node_info = ipa_node_params_sum->get (cs->callee->function_symbol ());
3957 for (ie = node->indirect_calls; ie; ie = next_ie)
3959 class cgraph_indirect_call_info *ici = ie->indirect_info;
3960 struct ipa_jump_func *jfunc;
3961 int param_index;
3963 next_ie = ie->next_callee;
3965 if (ici->param_index == -1)
3966 continue;
3968 /* We must check range due to calls with variable number of arguments: */
3969 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3971 ici->param_index = -1;
3972 continue;
3975 param_index = ici->param_index;
3976 jfunc = ipa_get_ith_jump_func (top, param_index);
3978 auto_vec<cgraph_node *, 4> spec_targets;
3979 if (ie->speculative)
3980 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3981 direct;
3982 direct = direct->next_speculative_call_target ())
3983 spec_targets.safe_push (direct->callee);
3985 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3986 new_direct_edge = NULL;
3987 else if (ici->polymorphic)
3989 ipa_polymorphic_call_context ctx;
3990 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3991 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3992 new_root,
3993 new_root_info);
3995 else
3997 tree target_type = ipa_get_type (inlined_node_info, param_index);
3998 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3999 target_type,
4000 new_root,
4001 new_root_info);
4004 /* If speculation was removed, then we need to do nothing. */
4005 if (new_direct_edge && new_direct_edge != ie
4006 && spec_targets.contains (new_direct_edge->callee))
4008 new_direct_edge->indirect_inlining_edge = 1;
4009 res = true;
4010 if (!new_direct_edge->speculative)
4011 continue;
4013 else if (new_direct_edge)
4015 new_direct_edge->indirect_inlining_edge = 1;
4016 if (new_edges)
4018 new_edges->safe_push (new_direct_edge);
4019 res = true;
4021 /* If speculative edge was introduced we still need to update
4022 call info of the indirect edge. */
4023 if (!new_direct_edge->speculative)
4024 continue;
4026 if (jfunc->type == IPA_JF_PASS_THROUGH
4027 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4029 if (ici->agg_contents
4030 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
4031 && !ici->polymorphic)
4032 ici->param_index = -1;
4033 else
4035 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4036 if (ici->polymorphic
4037 && !ipa_get_jf_pass_through_type_preserved (jfunc))
4038 ici->vptr_changed = true;
4039 ipa_set_param_used_by_indirect_call (new_root_info,
4040 ici->param_index, true);
4041 if (ici->polymorphic)
4042 ipa_set_param_used_by_polymorphic_call (new_root_info,
4043 ici->param_index, true);
4046 else if (jfunc->type == IPA_JF_ANCESTOR)
4048 if (ici->agg_contents
4049 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
4050 && !ici->polymorphic)
4051 ici->param_index = -1;
4052 else
4054 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4055 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
4056 if (ici->polymorphic
4057 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4058 ici->vptr_changed = true;
4059 ipa_set_param_used_by_indirect_call (new_root_info,
4060 ici->param_index, true);
4061 if (ici->polymorphic)
4062 ipa_set_param_used_by_polymorphic_call (new_root_info,
4063 ici->param_index, true);
4066 else
4067 /* Either we can find a destination for this edge now or never. */
4068 ici->param_index = -1;
4071 return res;
4074 /* Recursively traverse subtree of NODE (including node) made of inlined
4075 cgraph_edges when CS has been inlined and invoke
4076 update_indirect_edges_after_inlining on all nodes and
4077 update_jump_functions_after_inlining on all non-inlined edges that lead out
4078 of this subtree. Newly discovered indirect edges will be added to
4079 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4080 created. */
4082 static bool
4083 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4084 struct cgraph_node *node,
4085 vec<cgraph_edge *> *new_edges)
4087 struct cgraph_edge *e;
4088 bool res;
4090 res = update_indirect_edges_after_inlining (cs, node, new_edges);
4092 for (e = node->callees; e; e = e->next_callee)
4093 if (!e->inline_failed)
4094 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
4095 else
4096 update_jump_functions_after_inlining (cs, e);
4097 for (e = node->indirect_calls; e; e = e->next_callee)
4098 update_jump_functions_after_inlining (cs, e);
4100 return res;
4103 /* Combine two controlled uses counts as done during inlining. */
4105 static int
4106 combine_controlled_uses_counters (int c, int d)
4108 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4109 return IPA_UNDESCRIBED_USE;
4110 else
4111 return c + d - 1;
4114 /* Propagate number of controlled users from CS->caleee to the new root of the
4115 tree of inlined nodes. */
4117 static void
4118 propagate_controlled_uses (struct cgraph_edge *cs)
4120 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4121 if (!args)
4122 return;
4123 struct cgraph_node *new_root = cs->caller->inlined_to
4124 ? cs->caller->inlined_to : cs->caller;
4125 ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4126 ipa_node_params *old_root_info = ipa_node_params_sum->get (cs->callee);
4127 int count, i;
4129 if (!old_root_info)
4130 return;
4132 count = MIN (ipa_get_cs_argument_count (args),
4133 ipa_get_param_count (old_root_info));
4134 for (i = 0; i < count; i++)
4136 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4137 struct ipa_cst_ref_desc *rdesc;
4139 if (jf->type == IPA_JF_PASS_THROUGH)
4141 int src_idx, c, d;
4142 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4143 c = ipa_get_controlled_uses (new_root_info, src_idx);
4144 d = ipa_get_controlled_uses (old_root_info, i);
4146 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4147 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4148 c = combine_controlled_uses_counters (c, d);
4149 ipa_set_controlled_uses (new_root_info, src_idx, c);
4150 bool lderef = true;
4151 if (c != IPA_UNDESCRIBED_USE)
4153 lderef = (ipa_get_param_load_dereferenced (new_root_info, src_idx)
4154 || ipa_get_param_load_dereferenced (old_root_info, i));
4155 ipa_set_param_load_dereferenced (new_root_info, src_idx, lderef);
4158 if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4160 struct cgraph_node *n;
4161 struct ipa_ref *ref;
4162 tree t = new_root_info->known_csts[src_idx];
4164 if (t && TREE_CODE (t) == ADDR_EXPR
4165 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4166 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4167 && (ref = new_root->find_reference (n, NULL, 0)))
4169 if (dump_file)
4170 fprintf (dump_file, "ipa-prop: Removing cloning-created "
4171 "reference from %s to %s.\n",
4172 new_root->dump_name (),
4173 n->dump_name ());
4174 ref->remove_reference ();
4178 else if (jf->type == IPA_JF_CONST
4179 && (rdesc = jfunc_rdesc_usable (jf)))
4181 int d = ipa_get_controlled_uses (old_root_info, i);
4182 int c = rdesc->refcount;
4183 rdesc->refcount = combine_controlled_uses_counters (c, d);
4184 if (rdesc->refcount == 0)
4186 tree cst = ipa_get_jf_constant (jf);
4187 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4188 && ((TREE_CODE (TREE_OPERAND (cst, 0))
4189 == FUNCTION_DECL)
4190 || (TREE_CODE (TREE_OPERAND (cst, 0))
4191 == VAR_DECL)));
4193 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4194 if (n)
4196 struct cgraph_node *clone;
4197 bool removed = remove_described_reference (n, rdesc);
4198 /* The reference might have been removed by IPA-CP. */
4199 if (removed
4200 && ipa_get_param_load_dereferenced (old_root_info, i))
4202 new_root->create_reference (n, IPA_REF_LOAD, NULL);
4203 if (dump_file)
4204 fprintf (dump_file, "ipa-prop: ...replaced it with "
4205 "LOAD one from %s to %s.\n",
4206 new_root->dump_name (), n->dump_name ());
4209 clone = cs->caller;
4210 while (clone->inlined_to
4211 && clone->ipcp_clone
4212 && clone != rdesc->cs->caller)
4214 struct ipa_ref *ref;
4215 ref = clone->find_reference (n, NULL, 0);
4216 if (ref)
4218 if (dump_file)
4219 fprintf (dump_file, "ipa-prop: Removing "
4220 "cloning-created reference "
4221 "from %s to %s.\n",
4222 clone->dump_name (),
4223 n->dump_name ());
4224 ref->remove_reference ();
4226 clone = clone->callers->caller;
4233 for (i = ipa_get_param_count (old_root_info);
4234 i < ipa_get_cs_argument_count (args);
4235 i++)
4237 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4239 if (jf->type == IPA_JF_CONST)
4241 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4242 if (rdesc)
4243 rdesc->refcount = IPA_UNDESCRIBED_USE;
4245 else if (jf->type == IPA_JF_PASS_THROUGH)
4246 ipa_set_controlled_uses (new_root_info,
4247 jf->value.pass_through.formal_id,
4248 IPA_UNDESCRIBED_USE);
4252 /* Update jump functions and call note functions on inlining the call site CS.
4253 CS is expected to lead to a node already cloned by
4254 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4255 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4256 created. */
4258 bool
4259 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4260 vec<cgraph_edge *> *new_edges)
4262 bool changed;
4263 /* Do nothing if the preparation phase has not been carried out yet
4264 (i.e. during early inlining). */
4265 if (!ipa_node_params_sum)
4266 return false;
4267 gcc_assert (ipa_edge_args_sum);
4269 propagate_controlled_uses (cs);
4270 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4271 ipa_node_params_sum->remove (cs->callee);
4273 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4274 if (args)
4276 bool ok = true;
4277 if (args->jump_functions)
4279 struct ipa_jump_func *jf;
4280 int i;
4281 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4282 if (jf->type == IPA_JF_CONST
4283 && ipa_get_jf_constant_rdesc (jf))
4285 ok = false;
4286 break;
4289 if (ok)
4290 ipa_edge_args_sum->remove (cs);
4292 if (ipcp_transformation_sum)
4293 ipcp_transformation_sum->remove (cs->callee);
4295 return changed;
4298 /* Ensure that array of edge arguments infos is big enough to accommodate a
4299 structure for all edges and reallocates it if not. Also, allocate
4300 associated hash tables is they do not already exist. */
4302 void
4303 ipa_check_create_edge_args (void)
4305 if (!ipa_edge_args_sum)
4306 ipa_edge_args_sum
4307 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4308 ipa_edge_args_sum_t (symtab, true));
4309 if (!ipa_bits_hash_table)
4310 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4311 if (!ipa_vr_hash_table)
4312 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4315 /* Free all ipa_edge structures. */
4317 void
4318 ipa_free_all_edge_args (void)
4320 if (!ipa_edge_args_sum)
4321 return;
4323 ggc_delete (ipa_edge_args_sum);
4324 ipa_edge_args_sum = NULL;
4327 /* Free all ipa_node_params structures. */
4329 void
4330 ipa_free_all_node_params (void)
4332 if (ipa_node_params_sum)
4333 ggc_delete (ipa_node_params_sum);
4334 ipa_node_params_sum = NULL;
4337 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4338 tables if they do not already exist. */
4340 void
4341 ipcp_transformation_initialize (void)
4343 if (!ipa_bits_hash_table)
4344 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4345 if (!ipa_vr_hash_table)
4346 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4347 if (ipcp_transformation_sum == NULL)
4349 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4350 ipcp_transformation_sum->disable_insertion_hook ();
4354 /* Release the IPA CP transformation summary. */
4356 void
4357 ipcp_free_transformation_sum (void)
4359 if (!ipcp_transformation_sum)
4360 return;
4362 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4363 ggc_free (ipcp_transformation_sum);
4364 ipcp_transformation_sum = NULL;
4367 /* Set the aggregate replacements of NODE to be AGGVALS. */
4369 void
4370 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4371 struct ipa_agg_replacement_value *aggvals)
4373 ipcp_transformation_initialize ();
4374 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4375 s->agg_values = aggvals;
4378 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4379 count data structures accordingly. */
4381 void
4382 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4384 if (args->jump_functions)
4386 struct ipa_jump_func *jf;
4387 int i;
4388 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4390 struct ipa_cst_ref_desc *rdesc;
4391 try_decrement_rdesc_refcount (jf);
4392 if (jf->type == IPA_JF_CONST
4393 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4394 && rdesc->cs == cs)
4395 rdesc->cs = NULL;
4400 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4401 reference count data strucutres accordingly. */
4403 void
4404 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4405 ipa_edge_args *old_args, ipa_edge_args *new_args)
4407 unsigned int i;
4409 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4410 if (old_args->polymorphic_call_contexts)
4411 new_args->polymorphic_call_contexts
4412 = vec_safe_copy (old_args->polymorphic_call_contexts);
4414 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4416 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4417 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4419 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4421 if (src_jf->type == IPA_JF_CONST)
4423 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4425 if (!src_rdesc)
4426 dst_jf->value.constant.rdesc = NULL;
4427 else if (src->caller == dst->caller)
4429 /* Creation of a speculative edge. If the source edge is the one
4430 grabbing a reference, we must create a new (duplicate)
4431 reference description. Otherwise they refer to the same
4432 description corresponding to a reference taken in a function
4433 src->caller is inlined to. In that case we just must
4434 increment the refcount. */
4435 if (src_rdesc->cs == src)
4437 symtab_node *n = symtab_node_for_jfunc (src_jf);
4438 gcc_checking_assert (n);
4439 ipa_ref *ref
4440 = src->caller->find_reference (n, src->call_stmt,
4441 src->lto_stmt_uid);
4442 gcc_checking_assert (ref);
4443 dst->caller->clone_reference (ref, ref->stmt);
4445 ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4446 dst_rdesc->cs = dst;
4447 dst_rdesc->refcount = src_rdesc->refcount;
4448 dst_rdesc->next_duplicate = NULL;
4449 dst_jf->value.constant.rdesc = dst_rdesc;
4451 else
4453 src_rdesc->refcount++;
4454 dst_jf->value.constant.rdesc = src_rdesc;
4457 else if (src_rdesc->cs == src)
4459 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4460 dst_rdesc->cs = dst;
4461 dst_rdesc->refcount = src_rdesc->refcount;
4462 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4463 src_rdesc->next_duplicate = dst_rdesc;
4464 dst_jf->value.constant.rdesc = dst_rdesc;
4466 else
4468 struct ipa_cst_ref_desc *dst_rdesc;
4469 /* This can happen during inlining, when a JFUNC can refer to a
4470 reference taken in a function up in the tree of inline clones.
4471 We need to find the duplicate that refers to our tree of
4472 inline clones. */
4474 gcc_assert (dst->caller->inlined_to);
4475 for (dst_rdesc = src_rdesc->next_duplicate;
4476 dst_rdesc;
4477 dst_rdesc = dst_rdesc->next_duplicate)
4479 struct cgraph_node *top;
4480 top = dst_rdesc->cs->caller->inlined_to
4481 ? dst_rdesc->cs->caller->inlined_to
4482 : dst_rdesc->cs->caller;
4483 if (dst->caller->inlined_to == top)
4484 break;
4486 gcc_assert (dst_rdesc);
4487 dst_jf->value.constant.rdesc = dst_rdesc;
4490 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4491 && src->caller == dst->caller)
4493 struct cgraph_node *inline_root = dst->caller->inlined_to
4494 ? dst->caller->inlined_to : dst->caller;
4495 ipa_node_params *root_info = ipa_node_params_sum->get (inline_root);
4496 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4498 int c = ipa_get_controlled_uses (root_info, idx);
4499 if (c != IPA_UNDESCRIBED_USE)
4501 c++;
4502 ipa_set_controlled_uses (root_info, idx, c);
4508 /* Analyze newly added function into callgraph. */
4510 static void
4511 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4513 if (node->has_gimple_body_p ())
4514 ipa_analyze_node (node);
4517 /* Hook that is called by summary when a node is duplicated. */
4519 void
4520 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4521 ipa_node_params *old_info,
4522 ipa_node_params *new_info)
4524 ipa_agg_replacement_value *old_av, *new_av;
4526 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4527 new_info->lattices = NULL;
4528 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4529 new_info->known_csts = old_info->known_csts.copy ();
4530 new_info->known_contexts = old_info->known_contexts.copy ();
4532 new_info->analysis_done = old_info->analysis_done;
4533 new_info->node_enqueued = old_info->node_enqueued;
4534 new_info->versionable = old_info->versionable;
4536 old_av = ipa_get_agg_replacements_for_node (src);
4537 if (old_av)
4539 new_av = NULL;
4540 while (old_av)
4542 struct ipa_agg_replacement_value *v;
4544 v = ggc_alloc<ipa_agg_replacement_value> ();
4545 memcpy (v, old_av, sizeof (*v));
4546 v->next = new_av;
4547 new_av = v;
4548 old_av = old_av->next;
4550 ipa_set_node_agg_value_chain (dst, new_av);
4554 /* Duplication of ipcp transformation summaries. */
4556 void
4557 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4558 ipcp_transformation *src_trans,
4559 ipcp_transformation *dst_trans)
4561 /* Avoid redundant work of duplicating vectors we will never use. */
4562 if (dst->inlined_to)
4563 return;
4564 dst_trans->bits = vec_safe_copy (src_trans->bits);
4565 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4566 ipa_agg_replacement_value *agg = src_trans->agg_values,
4567 **aggptr = &dst_trans->agg_values;
4568 while (agg)
4570 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4571 **aggptr = *agg;
4572 agg = agg->next;
4573 aggptr = &(*aggptr)->next;
4577 /* Register our cgraph hooks if they are not already there. */
4579 void
4580 ipa_register_cgraph_hooks (void)
4582 ipa_check_create_node_params ();
4583 ipa_check_create_edge_args ();
4585 function_insertion_hook_holder =
4586 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4589 /* Unregister our cgraph hooks if they are not already there. */
4591 static void
4592 ipa_unregister_cgraph_hooks (void)
4594 if (function_insertion_hook_holder)
4595 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4596 function_insertion_hook_holder = NULL;
4599 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4600 longer needed after ipa-cp. */
4602 void
4603 ipa_free_all_structures_after_ipa_cp (void)
4605 if (!optimize && !in_lto_p)
4607 ipa_free_all_edge_args ();
4608 ipa_free_all_node_params ();
4609 ipcp_sources_pool.release ();
4610 ipcp_cst_values_pool.release ();
4611 ipcp_poly_ctx_values_pool.release ();
4612 ipcp_agg_lattice_pool.release ();
4613 ipa_unregister_cgraph_hooks ();
4614 ipa_refdesc_pool.release ();
4618 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4619 longer needed after indirect inlining. */
4621 void
4622 ipa_free_all_structures_after_iinln (void)
4624 ipa_free_all_edge_args ();
4625 ipa_free_all_node_params ();
4626 ipa_unregister_cgraph_hooks ();
4627 ipcp_sources_pool.release ();
4628 ipcp_cst_values_pool.release ();
4629 ipcp_poly_ctx_values_pool.release ();
4630 ipcp_agg_lattice_pool.release ();
4631 ipa_refdesc_pool.release ();
4634 /* Print ipa_tree_map data structures of all functions in the
4635 callgraph to F. */
4637 void
4638 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4640 int i, count;
4641 class ipa_node_params *info;
4643 if (!node->definition)
4644 return;
4645 info = ipa_node_params_sum->get (node);
4646 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4647 if (!info)
4649 fprintf (f, " no params return\n");
4650 return;
4652 count = ipa_get_param_count (info);
4653 for (i = 0; i < count; i++)
4655 int c;
4657 fprintf (f, " ");
4658 ipa_dump_param (f, info, i);
4659 if (ipa_is_param_used (info, i))
4660 fprintf (f, " used");
4661 if (ipa_is_param_used_by_ipa_predicates (info, i))
4662 fprintf (f, " used_by_ipa_predicates");
4663 if (ipa_is_param_used_by_indirect_call (info, i))
4664 fprintf (f, " used_by_indirect_call");
4665 if (ipa_is_param_used_by_polymorphic_call (info, i))
4666 fprintf (f, " used_by_polymorphic_call");
4667 c = ipa_get_controlled_uses (info, i);
4668 if (c == IPA_UNDESCRIBED_USE)
4669 fprintf (f, " undescribed_use");
4670 else
4671 fprintf (f, " controlled_uses=%i %s", c,
4672 ipa_get_param_load_dereferenced (info, i)
4673 ? "(load_dereferenced)" : "");
4674 fprintf (f, "\n");
4678 /* Print ipa_tree_map data structures of all functions in the
4679 callgraph to F. */
4681 void
4682 ipa_print_all_params (FILE * f)
4684 struct cgraph_node *node;
4686 fprintf (f, "\nFunction parameters:\n");
4687 FOR_EACH_FUNCTION (node)
4688 ipa_print_node_params (f, node);
4691 /* Dump the AV linked list. */
4693 void
4694 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4696 bool comma = false;
4697 fprintf (f, " Aggregate replacements:");
4698 for (; av; av = av->next)
4700 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4701 av->index, av->offset);
4702 print_generic_expr (f, av->value);
4703 comma = true;
4705 fprintf (f, "\n");
4708 /* Stream out jump function JUMP_FUNC to OB. */
4710 static void
4711 ipa_write_jump_function (struct output_block *ob,
4712 struct ipa_jump_func *jump_func)
4714 struct ipa_agg_jf_item *item;
4715 struct bitpack_d bp;
4716 int i, count;
4717 int flag = 0;
4719 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4720 as well as WPA memory by handling them specially. */
4721 if (jump_func->type == IPA_JF_CONST
4722 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4723 flag = 1;
4725 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4726 switch (jump_func->type)
4728 case IPA_JF_UNKNOWN:
4729 break;
4730 case IPA_JF_CONST:
4731 gcc_assert (
4732 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4733 stream_write_tree (ob,
4734 flag
4735 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4736 : jump_func->value.constant.value, true);
4737 break;
4738 case IPA_JF_PASS_THROUGH:
4739 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4740 if (jump_func->value.pass_through.operation == NOP_EXPR)
4742 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4743 bp = bitpack_create (ob->main_stream);
4744 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4745 streamer_write_bitpack (&bp);
4747 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4748 == tcc_unary)
4749 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4750 else
4752 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4753 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4755 break;
4756 case IPA_JF_ANCESTOR:
4757 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4758 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4759 bp = bitpack_create (ob->main_stream);
4760 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4761 streamer_write_bitpack (&bp);
4762 break;
4763 default:
4764 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4767 count = vec_safe_length (jump_func->agg.items);
4768 streamer_write_uhwi (ob, count);
4769 if (count)
4771 bp = bitpack_create (ob->main_stream);
4772 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4773 streamer_write_bitpack (&bp);
4776 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4778 stream_write_tree (ob, item->type, true);
4779 streamer_write_uhwi (ob, item->offset);
4780 streamer_write_uhwi (ob, item->jftype);
4781 switch (item->jftype)
4783 case IPA_JF_UNKNOWN:
4784 break;
4785 case IPA_JF_CONST:
4786 stream_write_tree (ob, item->value.constant, true);
4787 break;
4788 case IPA_JF_PASS_THROUGH:
4789 case IPA_JF_LOAD_AGG:
4790 streamer_write_uhwi (ob, item->value.pass_through.operation);
4791 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4792 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4793 != tcc_unary)
4794 stream_write_tree (ob, item->value.pass_through.operand, true);
4795 if (item->jftype == IPA_JF_LOAD_AGG)
4797 stream_write_tree (ob, item->value.load_agg.type, true);
4798 streamer_write_uhwi (ob, item->value.load_agg.offset);
4799 bp = bitpack_create (ob->main_stream);
4800 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4801 streamer_write_bitpack (&bp);
4803 break;
4804 default:
4805 fatal_error (UNKNOWN_LOCATION,
4806 "invalid jump function in LTO stream");
4810 bp = bitpack_create (ob->main_stream);
4811 bp_pack_value (&bp, !!jump_func->bits, 1);
4812 streamer_write_bitpack (&bp);
4813 if (jump_func->bits)
4815 streamer_write_widest_int (ob, jump_func->bits->value);
4816 streamer_write_widest_int (ob, jump_func->bits->mask);
4818 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4819 streamer_write_bitpack (&bp);
4820 if (jump_func->m_vr)
4822 streamer_write_enum (ob->main_stream, value_rang_type,
4823 VR_LAST, jump_func->m_vr->kind ());
4824 stream_write_tree (ob, jump_func->m_vr->min (), true);
4825 stream_write_tree (ob, jump_func->m_vr->max (), true);
4829 /* Read in jump function JUMP_FUNC from IB. */
4831 static void
4832 ipa_read_jump_function (class lto_input_block *ib,
4833 struct ipa_jump_func *jump_func,
4834 struct cgraph_edge *cs,
4835 class data_in *data_in,
4836 bool prevails)
4838 enum jump_func_type jftype;
4839 enum tree_code operation;
4840 int i, count;
4841 int val = streamer_read_uhwi (ib);
4842 bool flag = val & 1;
4844 jftype = (enum jump_func_type) (val / 2);
4845 switch (jftype)
4847 case IPA_JF_UNKNOWN:
4848 ipa_set_jf_unknown (jump_func);
4849 break;
4850 case IPA_JF_CONST:
4852 tree t = stream_read_tree (ib, data_in);
4853 if (flag && prevails)
4854 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4855 ipa_set_jf_constant (jump_func, t, cs);
4857 break;
4858 case IPA_JF_PASS_THROUGH:
4859 operation = (enum tree_code) streamer_read_uhwi (ib);
4860 if (operation == NOP_EXPR)
4862 int formal_id = streamer_read_uhwi (ib);
4863 struct bitpack_d bp = streamer_read_bitpack (ib);
4864 bool agg_preserved = bp_unpack_value (&bp, 1);
4865 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4867 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4869 int formal_id = streamer_read_uhwi (ib);
4870 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4872 else
4874 tree operand = stream_read_tree (ib, data_in);
4875 int formal_id = streamer_read_uhwi (ib);
4876 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4877 operation);
4879 break;
4880 case IPA_JF_ANCESTOR:
4882 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4883 int formal_id = streamer_read_uhwi (ib);
4884 struct bitpack_d bp = streamer_read_bitpack (ib);
4885 bool agg_preserved = bp_unpack_value (&bp, 1);
4886 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4887 break;
4889 default:
4890 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4893 count = streamer_read_uhwi (ib);
4894 if (prevails)
4896 jump_func->agg.items = NULL;
4897 vec_safe_reserve (jump_func->agg.items, count, true);
4899 if (count)
4901 struct bitpack_d bp = streamer_read_bitpack (ib);
4902 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4904 for (i = 0; i < count; i++)
4906 struct ipa_agg_jf_item item;
4907 item.type = stream_read_tree (ib, data_in);
4908 item.offset = streamer_read_uhwi (ib);
4909 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4911 switch (item.jftype)
4913 case IPA_JF_UNKNOWN:
4914 break;
4915 case IPA_JF_CONST:
4916 item.value.constant = stream_read_tree (ib, data_in);
4917 break;
4918 case IPA_JF_PASS_THROUGH:
4919 case IPA_JF_LOAD_AGG:
4920 operation = (enum tree_code) streamer_read_uhwi (ib);
4921 item.value.pass_through.operation = operation;
4922 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4923 if (TREE_CODE_CLASS (operation) == tcc_unary)
4924 item.value.pass_through.operand = NULL_TREE;
4925 else
4926 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4927 if (item.jftype == IPA_JF_LOAD_AGG)
4929 struct bitpack_d bp;
4930 item.value.load_agg.type = stream_read_tree (ib, data_in);
4931 item.value.load_agg.offset = streamer_read_uhwi (ib);
4932 bp = streamer_read_bitpack (ib);
4933 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4935 break;
4936 default:
4937 fatal_error (UNKNOWN_LOCATION,
4938 "invalid jump function in LTO stream");
4940 if (prevails)
4941 jump_func->agg.items->quick_push (item);
4944 struct bitpack_d bp = streamer_read_bitpack (ib);
4945 bool bits_known = bp_unpack_value (&bp, 1);
4946 if (bits_known)
4948 widest_int value = streamer_read_widest_int (ib);
4949 widest_int mask = streamer_read_widest_int (ib);
4950 if (prevails)
4951 ipa_set_jfunc_bits (jump_func, value, mask);
4953 else
4954 jump_func->bits = NULL;
4956 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4957 bool vr_known = bp_unpack_value (&vr_bp, 1);
4958 if (vr_known)
4960 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4961 VR_LAST);
4962 tree min = stream_read_tree (ib, data_in);
4963 tree max = stream_read_tree (ib, data_in);
4964 if (prevails)
4965 ipa_set_jfunc_vr (jump_func, type, min, max);
4967 else
4968 jump_func->m_vr = NULL;
4971 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4972 relevant to indirect inlining to OB. */
4974 static void
4975 ipa_write_indirect_edge_info (struct output_block *ob,
4976 struct cgraph_edge *cs)
4978 class cgraph_indirect_call_info *ii = cs->indirect_info;
4979 struct bitpack_d bp;
4981 streamer_write_hwi (ob, ii->param_index);
4982 bp = bitpack_create (ob->main_stream);
4983 bp_pack_value (&bp, ii->polymorphic, 1);
4984 bp_pack_value (&bp, ii->agg_contents, 1);
4985 bp_pack_value (&bp, ii->member_ptr, 1);
4986 bp_pack_value (&bp, ii->by_ref, 1);
4987 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4988 bp_pack_value (&bp, ii->vptr_changed, 1);
4989 streamer_write_bitpack (&bp);
4990 if (ii->agg_contents || ii->polymorphic)
4991 streamer_write_hwi (ob, ii->offset);
4992 else
4993 gcc_assert (ii->offset == 0);
4995 if (ii->polymorphic)
4997 streamer_write_hwi (ob, ii->otr_token);
4998 stream_write_tree (ob, ii->otr_type, true);
4999 ii->context.stream_out (ob);
5003 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5004 relevant to indirect inlining from IB. */
5006 static void
5007 ipa_read_indirect_edge_info (class lto_input_block *ib,
5008 class data_in *data_in,
5009 struct cgraph_edge *cs,
5010 class ipa_node_params *info)
5012 class cgraph_indirect_call_info *ii = cs->indirect_info;
5013 struct bitpack_d bp;
5015 ii->param_index = (int) streamer_read_hwi (ib);
5016 bp = streamer_read_bitpack (ib);
5017 ii->polymorphic = bp_unpack_value (&bp, 1);
5018 ii->agg_contents = bp_unpack_value (&bp, 1);
5019 ii->member_ptr = bp_unpack_value (&bp, 1);
5020 ii->by_ref = bp_unpack_value (&bp, 1);
5021 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
5022 ii->vptr_changed = bp_unpack_value (&bp, 1);
5023 if (ii->agg_contents || ii->polymorphic)
5024 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5025 else
5026 ii->offset = 0;
5027 if (ii->polymorphic)
5029 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
5030 ii->otr_type = stream_read_tree (ib, data_in);
5031 ii->context.stream_in (ib, data_in);
5033 if (info && ii->param_index >= 0)
5035 if (ii->polymorphic)
5036 ipa_set_param_used_by_polymorphic_call (info,
5037 ii->param_index , true);
5038 ipa_set_param_used_by_indirect_call (info,
5039 ii->param_index, true);
5043 /* Stream out NODE info to OB. */
5045 static void
5046 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5048 int node_ref;
5049 lto_symtab_encoder_t encoder;
5050 ipa_node_params *info = ipa_node_params_sum->get (node);
5051 int j;
5052 struct cgraph_edge *e;
5053 struct bitpack_d bp;
5055 encoder = ob->decl_state->symtab_node_encoder;
5056 node_ref = lto_symtab_encoder_encode (encoder, node);
5057 streamer_write_uhwi (ob, node_ref);
5059 streamer_write_uhwi (ob, ipa_get_param_count (info));
5060 for (j = 0; j < ipa_get_param_count (info); j++)
5061 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5062 bp = bitpack_create (ob->main_stream);
5063 gcc_assert (info->analysis_done
5064 || ipa_get_param_count (info) == 0);
5065 gcc_assert (!info->node_enqueued);
5066 gcc_assert (!info->ipcp_orig_node);
5067 for (j = 0; j < ipa_get_param_count (info); j++)
5069 /* TODO: We could just not stream the bit in the undescribed case. */
5070 bool d = (ipa_get_controlled_uses (info, j) != IPA_UNDESCRIBED_USE)
5071 ? ipa_get_param_load_dereferenced (info, j) : true;
5072 bp_pack_value (&bp, d, 1);
5073 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5075 streamer_write_bitpack (&bp);
5076 for (j = 0; j < ipa_get_param_count (info); j++)
5078 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5079 stream_write_tree (ob, ipa_get_type (info, j), true);
5081 for (e = node->callees; e; e = e->next_callee)
5083 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5085 if (!args)
5087 streamer_write_uhwi (ob, 0);
5088 continue;
5091 streamer_write_uhwi (ob,
5092 ipa_get_cs_argument_count (args) * 2
5093 + (args->polymorphic_call_contexts != NULL));
5094 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5096 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5097 if (args->polymorphic_call_contexts != NULL)
5098 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5101 for (e = node->indirect_calls; e; e = e->next_callee)
5103 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5104 if (!args)
5105 streamer_write_uhwi (ob, 0);
5106 else
5108 streamer_write_uhwi (ob,
5109 ipa_get_cs_argument_count (args) * 2
5110 + (args->polymorphic_call_contexts != NULL));
5111 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5113 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5114 if (args->polymorphic_call_contexts != NULL)
5115 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5118 ipa_write_indirect_edge_info (ob, e);
5122 /* Stream in edge E from IB. */
5124 static void
5125 ipa_read_edge_info (class lto_input_block *ib,
5126 class data_in *data_in,
5127 struct cgraph_edge *e, bool prevails)
5129 int count = streamer_read_uhwi (ib);
5130 bool contexts_computed = count & 1;
5132 count /= 2;
5133 if (!count)
5134 return;
5135 if (prevails
5136 && (e->possibly_call_in_translation_unit_p ()
5137 /* Also stream in jump functions to builtins in hope that they
5138 will get fnspecs. */
5139 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
5141 ipa_edge_args *args = ipa_edge_args_sum->get_create (e);
5142 vec_safe_grow_cleared (args->jump_functions, count, true);
5143 if (contexts_computed)
5144 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
5145 for (int k = 0; k < count; k++)
5147 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5148 data_in, prevails);
5149 if (contexts_computed)
5150 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5151 (ib, data_in);
5154 else
5156 for (int k = 0; k < count; k++)
5158 struct ipa_jump_func dummy;
5159 ipa_read_jump_function (ib, &dummy, e,
5160 data_in, prevails);
5161 if (contexts_computed)
5163 class ipa_polymorphic_call_context ctx;
5164 ctx.stream_in (ib, data_in);
5170 /* Stream in NODE info from IB. */
5172 static void
5173 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5174 class data_in *data_in)
5176 int k;
5177 struct cgraph_edge *e;
5178 struct bitpack_d bp;
5179 bool prevails = node->prevailing_p ();
5180 ipa_node_params *info
5181 = prevails ? ipa_node_params_sum->get_create (node) : NULL;
5183 int param_count = streamer_read_uhwi (ib);
5184 if (prevails)
5186 ipa_alloc_node_params (node, param_count);
5187 for (k = 0; k < param_count; k++)
5188 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5189 if (ipa_get_param_count (info) != 0)
5190 info->analysis_done = true;
5191 info->node_enqueued = false;
5193 else
5194 for (k = 0; k < param_count; k++)
5195 streamer_read_uhwi (ib);
5197 bp = streamer_read_bitpack (ib);
5198 for (k = 0; k < param_count; k++)
5200 bool load_dereferenced = bp_unpack_value (&bp, 1);
5201 bool used = bp_unpack_value (&bp, 1);
5203 if (prevails)
5205 ipa_set_param_load_dereferenced (info, k, load_dereferenced);
5206 ipa_set_param_used (info, k, used);
5209 for (k = 0; k < param_count; k++)
5211 int nuses = streamer_read_hwi (ib);
5212 tree type = stream_read_tree (ib, data_in);
5214 if (prevails)
5216 ipa_set_controlled_uses (info, k, nuses);
5217 (*info->descriptors)[k].decl_or_type = type;
5220 for (e = node->callees; e; e = e->next_callee)
5221 ipa_read_edge_info (ib, data_in, e, prevails);
5222 for (e = node->indirect_calls; e; e = e->next_callee)
5224 ipa_read_edge_info (ib, data_in, e, prevails);
5225 ipa_read_indirect_edge_info (ib, data_in, e, info);
5229 /* Write jump functions for nodes in SET. */
5231 void
5232 ipa_prop_write_jump_functions (void)
5234 struct output_block *ob;
5235 unsigned int count = 0;
5236 lto_symtab_encoder_iterator lsei;
5237 lto_symtab_encoder_t encoder;
5239 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5240 return;
5242 ob = create_output_block (LTO_section_jump_functions);
5243 encoder = ob->decl_state->symtab_node_encoder;
5244 ob->symbol = NULL;
5245 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5246 lsei_next_function_in_partition (&lsei))
5248 cgraph_node *node = lsei_cgraph_node (lsei);
5249 if (node->has_gimple_body_p ()
5250 && ipa_node_params_sum->get (node) != NULL)
5251 count++;
5254 streamer_write_uhwi (ob, count);
5256 /* Process all of the functions. */
5257 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5258 lsei_next_function_in_partition (&lsei))
5260 cgraph_node *node = lsei_cgraph_node (lsei);
5261 if (node->has_gimple_body_p ()
5262 && ipa_node_params_sum->get (node) != NULL)
5263 ipa_write_node_info (ob, node);
5265 streamer_write_char_stream (ob->main_stream, 0);
5266 produce_asm (ob, NULL);
5267 destroy_output_block (ob);
5270 /* Read section in file FILE_DATA of length LEN with data DATA. */
5272 static void
5273 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5274 size_t len)
5276 const struct lto_function_header *header =
5277 (const struct lto_function_header *) data;
5278 const int cfg_offset = sizeof (struct lto_function_header);
5279 const int main_offset = cfg_offset + header->cfg_size;
5280 const int string_offset = main_offset + header->main_size;
5281 class data_in *data_in;
5282 unsigned int i;
5283 unsigned int count;
5285 lto_input_block ib_main ((const char *) data + main_offset,
5286 header->main_size, file_data->mode_table);
5288 data_in =
5289 lto_data_in_create (file_data, (const char *) data + string_offset,
5290 header->string_size, vNULL);
5291 count = streamer_read_uhwi (&ib_main);
5293 for (i = 0; i < count; i++)
5295 unsigned int index;
5296 struct cgraph_node *node;
5297 lto_symtab_encoder_t encoder;
5299 index = streamer_read_uhwi (&ib_main);
5300 encoder = file_data->symtab_node_encoder;
5301 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5302 index));
5303 gcc_assert (node->definition);
5304 ipa_read_node_info (&ib_main, node, data_in);
5306 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5307 len);
5308 lto_data_in_delete (data_in);
5311 /* Read ipcp jump functions. */
5313 void
5314 ipa_prop_read_jump_functions (void)
5316 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5317 struct lto_file_decl_data *file_data;
5318 unsigned int j = 0;
5320 ipa_check_create_node_params ();
5321 ipa_check_create_edge_args ();
5322 ipa_register_cgraph_hooks ();
5324 while ((file_data = file_data_vec[j++]))
5326 size_t len;
5327 const char *data
5328 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5329 &len);
5330 if (data)
5331 ipa_prop_read_section (file_data, data, len);
5335 void
5336 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5338 int node_ref;
5339 unsigned int count = 0;
5340 lto_symtab_encoder_t encoder;
5341 struct ipa_agg_replacement_value *aggvals, *av;
5343 aggvals = ipa_get_agg_replacements_for_node (node);
5344 encoder = ob->decl_state->symtab_node_encoder;
5345 node_ref = lto_symtab_encoder_encode (encoder, node);
5346 streamer_write_uhwi (ob, node_ref);
5348 for (av = aggvals; av; av = av->next)
5349 count++;
5350 streamer_write_uhwi (ob, count);
5352 for (av = aggvals; av; av = av->next)
5354 struct bitpack_d bp;
5356 streamer_write_uhwi (ob, av->offset);
5357 streamer_write_uhwi (ob, av->index);
5358 stream_write_tree (ob, av->value, true);
5360 bp = bitpack_create (ob->main_stream);
5361 bp_pack_value (&bp, av->by_ref, 1);
5362 streamer_write_bitpack (&bp);
5365 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5366 if (ts && vec_safe_length (ts->m_vr) > 0)
5368 count = ts->m_vr->length ();
5369 streamer_write_uhwi (ob, count);
5370 for (unsigned i = 0; i < count; ++i)
5372 struct bitpack_d bp;
5373 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5374 bp = bitpack_create (ob->main_stream);
5375 bp_pack_value (&bp, parm_vr->known, 1);
5376 streamer_write_bitpack (&bp);
5377 if (parm_vr->known)
5379 streamer_write_enum (ob->main_stream, value_rang_type,
5380 VR_LAST, parm_vr->type);
5381 streamer_write_wide_int (ob, parm_vr->min);
5382 streamer_write_wide_int (ob, parm_vr->max);
5386 else
5387 streamer_write_uhwi (ob, 0);
5389 if (ts && vec_safe_length (ts->bits) > 0)
5391 count = ts->bits->length ();
5392 streamer_write_uhwi (ob, count);
5394 for (unsigned i = 0; i < count; ++i)
5396 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5397 struct bitpack_d bp = bitpack_create (ob->main_stream);
5398 bp_pack_value (&bp, !!bits_jfunc, 1);
5399 streamer_write_bitpack (&bp);
5400 if (bits_jfunc)
5402 streamer_write_widest_int (ob, bits_jfunc->value);
5403 streamer_write_widest_int (ob, bits_jfunc->mask);
5407 else
5408 streamer_write_uhwi (ob, 0);
5411 /* Stream in the aggregate value replacement chain for NODE from IB. */
5413 static void
5414 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5415 data_in *data_in)
5417 struct ipa_agg_replacement_value *aggvals = NULL;
5418 unsigned int count, i;
5420 count = streamer_read_uhwi (ib);
5421 for (i = 0; i <count; i++)
5423 struct ipa_agg_replacement_value *av;
5424 struct bitpack_d bp;
5426 av = ggc_alloc<ipa_agg_replacement_value> ();
5427 av->offset = streamer_read_uhwi (ib);
5428 av->index = streamer_read_uhwi (ib);
5429 av->value = stream_read_tree (ib, data_in);
5430 bp = streamer_read_bitpack (ib);
5431 av->by_ref = bp_unpack_value (&bp, 1);
5432 av->next = aggvals;
5433 aggvals = av;
5435 ipa_set_node_agg_value_chain (node, aggvals);
5437 count = streamer_read_uhwi (ib);
5438 if (count > 0)
5440 ipcp_transformation_initialize ();
5441 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5442 vec_safe_grow_cleared (ts->m_vr, count, true);
5443 for (i = 0; i < count; i++)
5445 ipa_vr *parm_vr;
5446 parm_vr = &(*ts->m_vr)[i];
5447 struct bitpack_d bp;
5448 bp = streamer_read_bitpack (ib);
5449 parm_vr->known = bp_unpack_value (&bp, 1);
5450 if (parm_vr->known)
5452 parm_vr->type = streamer_read_enum (ib, value_range_kind,
5453 VR_LAST);
5454 parm_vr->min = streamer_read_wide_int (ib);
5455 parm_vr->max = streamer_read_wide_int (ib);
5459 count = streamer_read_uhwi (ib);
5460 if (count > 0)
5462 ipcp_transformation_initialize ();
5463 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5464 vec_safe_grow_cleared (ts->bits, count, true);
5466 for (i = 0; i < count; i++)
5468 struct bitpack_d bp = streamer_read_bitpack (ib);
5469 bool known = bp_unpack_value (&bp, 1);
5470 if (known)
5472 const widest_int value = streamer_read_widest_int (ib);
5473 const widest_int mask = streamer_read_widest_int (ib);
5474 ipa_bits *bits
5475 = ipa_get_ipa_bits_for_value (value, mask);
5476 (*ts->bits)[i] = bits;
5482 /* Write all aggregate replacement for nodes in set. */
5484 void
5485 ipcp_write_transformation_summaries (void)
5487 struct cgraph_node *node;
5488 struct output_block *ob;
5489 unsigned int count = 0;
5490 lto_symtab_encoder_iterator lsei;
5491 lto_symtab_encoder_t encoder;
5493 ob = create_output_block (LTO_section_ipcp_transform);
5494 encoder = ob->decl_state->symtab_node_encoder;
5495 ob->symbol = NULL;
5496 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5497 lsei_next_function_in_partition (&lsei))
5499 node = lsei_cgraph_node (lsei);
5500 if (node->has_gimple_body_p ())
5501 count++;
5504 streamer_write_uhwi (ob, count);
5506 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5507 lsei_next_function_in_partition (&lsei))
5509 node = lsei_cgraph_node (lsei);
5510 if (node->has_gimple_body_p ())
5511 write_ipcp_transformation_info (ob, node);
5513 streamer_write_char_stream (ob->main_stream, 0);
5514 produce_asm (ob, NULL);
5515 destroy_output_block (ob);
5518 /* Read replacements section in file FILE_DATA of length LEN with data
5519 DATA. */
5521 static void
5522 read_replacements_section (struct lto_file_decl_data *file_data,
5523 const char *data,
5524 size_t len)
5526 const struct lto_function_header *header =
5527 (const struct lto_function_header *) data;
5528 const int cfg_offset = sizeof (struct lto_function_header);
5529 const int main_offset = cfg_offset + header->cfg_size;
5530 const int string_offset = main_offset + header->main_size;
5531 class data_in *data_in;
5532 unsigned int i;
5533 unsigned int count;
5535 lto_input_block ib_main ((const char *) data + main_offset,
5536 header->main_size, file_data->mode_table);
5538 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5539 header->string_size, vNULL);
5540 count = streamer_read_uhwi (&ib_main);
5542 for (i = 0; i < count; i++)
5544 unsigned int index;
5545 struct cgraph_node *node;
5546 lto_symtab_encoder_t encoder;
5548 index = streamer_read_uhwi (&ib_main);
5549 encoder = file_data->symtab_node_encoder;
5550 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5551 index));
5552 gcc_assert (node->definition);
5553 read_ipcp_transformation_info (&ib_main, node, data_in);
5555 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5556 len);
5557 lto_data_in_delete (data_in);
5560 /* Read IPA-CP aggregate replacements. */
5562 void
5563 ipcp_read_transformation_summaries (void)
5565 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5566 struct lto_file_decl_data *file_data;
5567 unsigned int j = 0;
5569 while ((file_data = file_data_vec[j++]))
5571 size_t len;
5572 const char *data
5573 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5574 &len);
5575 if (data)
5576 read_replacements_section (file_data, data, len);
5580 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5581 NODE but also if any parameter was IPA-SRAed into a scalar go ahead with
5582 substitution of the default_definitions of that new param with the
5583 appropriate constant.
5585 Return two bools. the first it true if at least one item in AGGVAL still
5586 exists and function body walk should go ahead. The second is true if any
5587 values were already substituted for scalarized parameters and update_cfg
5588 shuld be run after replace_uses_by. */
5590 static std::pair<bool, bool>
5591 adjust_agg_replacement_values (cgraph_node *node,
5592 ipa_agg_replacement_value *aggval,
5593 const vec<ipa_param_descriptor, va_gc>
5594 &descriptors)
5596 struct ipa_agg_replacement_value *v;
5597 clone_info *cinfo = clone_info::get (node);
5598 if (!cinfo || !cinfo->param_adjustments)
5599 return std::pair<bool, bool> (true, false);
5601 bool anything_left = false;
5602 bool done_replacement = false;
5603 for (v = aggval; v; v = v->next)
5605 gcc_checking_assert (v->index >= 0);
5607 unsigned unit_offset = v->offset / BITS_PER_UNIT;
5608 tree cst_type = TREE_TYPE (v->value);
5609 int split_idx;
5610 int new_idx
5611 = cinfo->param_adjustments->get_updated_index_or_split (v->index,
5612 unit_offset,
5613 cst_type,
5614 &split_idx);
5615 v->index = new_idx;
5616 if (new_idx >= 0)
5617 anything_left = true;
5618 else if (split_idx >= 0)
5620 tree parm = ipa_get_param (descriptors, split_idx);
5621 tree ddef = ssa_default_def (cfun, parm);
5622 if (ddef)
5624 replace_uses_by (ddef, v->value);
5625 done_replacement = true;
5629 return std::pair<bool, bool> (anything_left, done_replacement);
5632 /* Dominator walker driving the ipcp modification phase. */
5634 class ipcp_modif_dom_walker : public dom_walker
5636 public:
5637 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5638 vec<ipa_param_descriptor, va_gc> *descs,
5639 struct ipa_agg_replacement_value *av,
5640 bool *sc)
5641 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5642 m_aggval (av), m_something_changed (sc) {}
5644 virtual edge before_dom_children (basic_block);
5645 bool cleanup_eh ()
5646 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
5648 private:
5649 struct ipa_func_body_info *m_fbi;
5650 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5651 struct ipa_agg_replacement_value *m_aggval;
5652 bool *m_something_changed;
5653 auto_bitmap m_need_eh_cleanup;
5656 edge
5657 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5659 gimple_stmt_iterator gsi;
5660 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5662 struct ipa_agg_replacement_value *v;
5663 gimple *stmt = gsi_stmt (gsi);
5664 tree rhs, val, t;
5665 HOST_WIDE_INT offset;
5666 poly_int64 size;
5667 int index;
5668 bool by_ref, vce;
5670 if (!gimple_assign_load_p (stmt))
5671 continue;
5672 rhs = gimple_assign_rhs1 (stmt);
5673 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5674 continue;
5676 vce = false;
5677 t = rhs;
5678 while (handled_component_p (t))
5680 /* V_C_E can do things like convert an array of integers to one
5681 bigger integer and similar things we do not handle below. */
5682 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5684 vce = true;
5685 break;
5687 t = TREE_OPERAND (t, 0);
5689 if (vce)
5690 continue;
5692 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5693 &offset, &size, &by_ref))
5694 continue;
5695 for (v = m_aggval; v; v = v->next)
5696 if (v->index == index
5697 && v->offset == offset)
5698 break;
5699 if (!v
5700 || v->by_ref != by_ref
5701 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5702 size))
5703 continue;
5705 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5706 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5708 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5709 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5710 else if (TYPE_SIZE (TREE_TYPE (rhs))
5711 == TYPE_SIZE (TREE_TYPE (v->value)))
5712 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5713 else
5715 if (dump_file)
5717 fprintf (dump_file, " const ");
5718 print_generic_expr (dump_file, v->value);
5719 fprintf (dump_file, " can't be converted to type of ");
5720 print_generic_expr (dump_file, rhs);
5721 fprintf (dump_file, "\n");
5723 continue;
5726 else
5727 val = v->value;
5729 if (dump_file && (dump_flags & TDF_DETAILS))
5731 fprintf (dump_file, "Modifying stmt:\n ");
5732 print_gimple_stmt (dump_file, stmt, 0);
5734 gimple_assign_set_rhs_from_tree (&gsi, val);
5735 update_stmt (stmt);
5737 if (dump_file && (dump_flags & TDF_DETAILS))
5739 fprintf (dump_file, "into:\n ");
5740 print_gimple_stmt (dump_file, stmt, 0);
5741 fprintf (dump_file, "\n");
5744 *m_something_changed = true;
5745 if (maybe_clean_eh_stmt (stmt))
5746 bitmap_set_bit (m_need_eh_cleanup, bb->index);
5748 return NULL;
5751 /* Return true if we have recorded VALUE and MASK about PARM.
5752 Set VALUE and MASk accordingly. */
5754 bool
5755 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5757 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5758 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5759 if (!ts || vec_safe_length (ts->bits) == 0)
5760 return false;
5762 int i = 0;
5763 for (tree p = DECL_ARGUMENTS (current_function_decl);
5764 p != parm; p = DECL_CHAIN (p))
5766 i++;
5767 /* Ignore static chain. */
5768 if (!p)
5769 return false;
5772 clone_info *cinfo = clone_info::get (cnode);
5773 if (cinfo && cinfo->param_adjustments)
5775 i = cinfo->param_adjustments->get_original_index (i);
5776 if (i < 0)
5777 return false;
5780 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5781 if (!bits[i])
5782 return false;
5783 *mask = bits[i]->mask;
5784 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5785 return true;
5789 /* Update bits info of formal parameters as described in
5790 ipcp_transformation. */
5792 static void
5793 ipcp_update_bits (struct cgraph_node *node)
5795 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5797 if (!ts || vec_safe_length (ts->bits) == 0)
5798 return;
5799 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5800 unsigned count = bits.length ();
5801 if (!count)
5802 return;
5804 auto_vec<int, 16> new_indices;
5805 bool need_remapping = false;
5806 clone_info *cinfo = clone_info::get (node);
5807 if (cinfo && cinfo->param_adjustments)
5809 cinfo->param_adjustments->get_updated_indices (&new_indices);
5810 need_remapping = true;
5812 auto_vec <tree, 16> parm_decls;
5813 push_function_arg_decls (&parm_decls, node->decl);
5815 for (unsigned i = 0; i < count; ++i)
5817 tree parm;
5818 if (need_remapping)
5820 if (i >= new_indices.length ())
5821 continue;
5822 int idx = new_indices[i];
5823 if (idx < 0)
5824 continue;
5825 parm = parm_decls[idx];
5827 else
5828 parm = parm_decls[i];
5829 gcc_checking_assert (parm);
5832 if (!bits[i]
5833 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5834 || POINTER_TYPE_P (TREE_TYPE (parm)))
5835 || !is_gimple_reg (parm))
5836 continue;
5838 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5839 if (!ddef)
5840 continue;
5842 if (dump_file)
5844 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5845 print_hex (bits[i]->mask, dump_file);
5846 fprintf (dump_file, "\n");
5849 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5851 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5852 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5854 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5855 | wide_int::from (bits[i]->value, prec, sgn);
5856 set_nonzero_bits (ddef, nonzero_bits);
5858 else
5860 unsigned tem = bits[i]->mask.to_uhwi ();
5861 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5862 unsigned align = tem & -tem;
5863 unsigned misalign = bitpos & (align - 1);
5865 if (align > 1)
5867 if (dump_file)
5868 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5870 unsigned old_align, old_misalign;
5871 struct ptr_info_def *pi = get_ptr_info (ddef);
5872 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5874 if (old_known
5875 && old_align > align)
5877 if (dump_file)
5879 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5880 if ((old_misalign & (align - 1)) != misalign)
5881 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5882 old_misalign, misalign);
5884 continue;
5887 if (old_known
5888 && ((misalign & (old_align - 1)) != old_misalign)
5889 && dump_file)
5890 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5891 old_misalign, misalign);
5893 set_ptr_info_alignment (pi, align, misalign);
5899 bool
5900 ipa_vr::nonzero_p (tree expr_type) const
5902 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5903 return true;
5905 unsigned prec = TYPE_PRECISION (expr_type);
5906 return (type == VR_RANGE
5907 && TYPE_UNSIGNED (expr_type)
5908 && wi::eq_p (min, wi::one (prec))
5909 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5912 /* Update value range of formal parameters as described in
5913 ipcp_transformation. */
5915 static void
5916 ipcp_update_vr (struct cgraph_node *node)
5918 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5919 if (!ts || vec_safe_length (ts->m_vr) == 0)
5920 return;
5921 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5922 unsigned count = vr.length ();
5923 if (!count)
5924 return;
5926 auto_vec<int, 16> new_indices;
5927 bool need_remapping = false;
5928 clone_info *cinfo = clone_info::get (node);
5929 if (cinfo && cinfo->param_adjustments)
5931 cinfo->param_adjustments->get_updated_indices (&new_indices);
5932 need_remapping = true;
5934 auto_vec <tree, 16> parm_decls;
5935 push_function_arg_decls (&parm_decls, node->decl);
5937 for (unsigned i = 0; i < count; ++i)
5939 tree parm;
5940 int remapped_idx;
5941 if (need_remapping)
5943 if (i >= new_indices.length ())
5944 continue;
5945 remapped_idx = new_indices[i];
5946 if (remapped_idx < 0)
5947 continue;
5949 else
5950 remapped_idx = i;
5952 parm = parm_decls[remapped_idx];
5954 gcc_checking_assert (parm);
5955 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5957 if (!ddef || !is_gimple_reg (parm))
5958 continue;
5960 if (vr[i].known
5961 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5963 tree type = TREE_TYPE (ddef);
5964 unsigned prec = TYPE_PRECISION (type);
5965 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5967 if (dump_file)
5969 fprintf (dump_file, "Setting value range of param %u "
5970 "(now %i) ", i, remapped_idx);
5971 fprintf (dump_file, "%s[",
5972 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5973 print_decs (vr[i].min, dump_file);
5974 fprintf (dump_file, ", ");
5975 print_decs (vr[i].max, dump_file);
5976 fprintf (dump_file, "]\n");
5978 set_range_info (ddef, vr[i].type,
5979 wide_int_storage::from (vr[i].min, prec,
5980 TYPE_SIGN (type)),
5981 wide_int_storage::from (vr[i].max, prec,
5982 TYPE_SIGN (type)));
5984 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5985 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5987 if (dump_file)
5988 fprintf (dump_file, "Setting nonnull for %u\n", i);
5989 set_ptr_nonnull (ddef);
5995 /* IPCP transformation phase doing propagation of aggregate values. */
5997 unsigned int
5998 ipcp_transform_function (struct cgraph_node *node)
6000 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
6001 struct ipa_func_body_info fbi;
6002 struct ipa_agg_replacement_value *aggval;
6003 int param_count;
6005 gcc_checking_assert (cfun);
6006 gcc_checking_assert (current_function_decl);
6008 if (dump_file)
6009 fprintf (dump_file, "Modification phase of node %s\n",
6010 node->dump_name ());
6012 ipcp_update_bits (node);
6013 ipcp_update_vr (node);
6014 aggval = ipa_get_agg_replacements_for_node (node);
6015 if (!aggval)
6016 return 0;
6017 param_count = count_formal_params (node->decl);
6018 if (param_count == 0)
6019 return 0;
6020 vec_safe_grow_cleared (descriptors, param_count, true);
6021 ipa_populate_param_decls (node, *descriptors);
6022 std::pair<bool, bool> rr
6023 = adjust_agg_replacement_values (node, aggval, *descriptors);
6024 bool cfg_changed = rr.second;
6025 if (!rr.first)
6027 vec_free (descriptors);
6028 if (dump_file)
6029 fprintf (dump_file, " All affected aggregate parameters were either "
6030 "removed or converted into scalars, phase done.\n");
6031 if (cfg_changed)
6032 delete_unreachable_blocks_update_callgraph (node, false);
6033 return 0;
6035 if (dump_file)
6036 ipa_dump_agg_replacement_values (dump_file, aggval);
6038 fbi.node = node;
6039 fbi.info = NULL;
6040 fbi.bb_infos = vNULL;
6041 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
6042 fbi.param_count = param_count;
6043 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
6045 bool modified_mem_access = false;
6046 calculate_dominance_info (CDI_DOMINATORS);
6047 ipcp_modif_dom_walker walker (&fbi, descriptors, aggval, &modified_mem_access);
6048 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6049 free_dominance_info (CDI_DOMINATORS);
6050 cfg_changed |= walker.cleanup_eh ();
6052 int i;
6053 struct ipa_bb_info *bi;
6054 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
6055 free_ipa_bb_info (bi);
6056 fbi.bb_infos.release ();
6058 ipcp_transformation *s = ipcp_transformation_sum->get (node);
6059 s->agg_values = NULL;
6060 s->bits = NULL;
6061 s->m_vr = NULL;
6063 vec_free (descriptors);
6064 if (cfg_changed)
6065 delete_unreachable_blocks_update_callgraph (node, false);
6067 return modified_mem_access ? TODO_update_ssa_only_virtuals : 0;
6071 /* Return true if OTHER describes same agg value. */
6072 bool
6073 ipa_agg_value::equal_to (const ipa_agg_value &other)
6075 return offset == other.offset
6076 && operand_equal_p (value, other.value, 0);
6079 /* Destructor also removing individual aggregate values. */
6081 ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
6083 ipa_release_agg_values (m_known_aggs, false);
6088 #include "gt-ipa-prop.h"