[ARM] Add source mode to coprocessor pattern SETs
[official-gcc.git] / gcc / ipa-prop.c
blobb3d51595d3fa6867eabe669238627f06f6756617
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
63 /* Traits for a hash table for reusing already existing ipa_bits. */
65 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67 typedef ipa_bits *value_type;
68 typedef ipa_bits *compare_type;
69 static hashval_t
70 hash (const ipa_bits *p)
72 hashval_t t = (hashval_t) p->value.to_shwi ();
73 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 static bool
76 equal (const ipa_bits *a, const ipa_bits *b)
78 return a->value == b->value && a->mask == b->mask;
80 static void
81 mark_empty (ipa_bits *&p)
83 p = NULL;
85 static bool
86 is_empty (const ipa_bits *p)
88 return p == NULL;
90 static bool
91 is_deleted (const ipa_bits *p)
93 return p == reinterpret_cast<const ipa_bits *> (1);
95 static void
96 mark_deleted (ipa_bits *&p)
98 p = reinterpret_cast<ipa_bits *> (1);
102 /* Hash table for avoid repeated allocations of equal ipa_bits. */
103 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
105 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
106 the equiv bitmap is not hashed and is expected to be NULL. */
108 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
110 typedef value_range *value_type;
111 typedef value_range *compare_type;
112 static hashval_t
113 hash (const value_range *p)
115 gcc_checking_assert (!p->equiv);
116 hashval_t t = (hashval_t) p->type;
117 t = iterative_hash_expr (p->min, t);
118 return iterative_hash_expr (p->max, t);
120 static bool
121 equal (const value_range *a, const value_range *b)
123 return a->type == b->type && a->min == b->min && a->max == b->max;
125 static void
126 mark_empty (value_range *&p)
128 p = NULL;
130 static bool
131 is_empty (const value_range *p)
133 return p == NULL;
135 static bool
136 is_deleted (const value_range *p)
138 return p == reinterpret_cast<const value_range *> (1);
140 static void
141 mark_deleted (value_range *&p)
143 p = reinterpret_cast<value_range *> (1);
147 /* Hash table for avoid repeated allocations of equal value_ranges. */
148 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
150 /* Holders of ipa cgraph hooks: */
151 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
152 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
153 static struct cgraph_node_hook_list *function_insertion_hook_holder;
155 /* Description of a reference to an IPA constant. */
156 struct ipa_cst_ref_desc
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge *cs;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc *next_duplicate;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
164 int refcount;
167 /* Allocation pool for reference descriptions. */
169 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
170 ("IPA-PROP ref descriptions");
172 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
175 static bool
176 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
178 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
180 if (!fs_opts)
181 return false;
182 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
185 /* Return index of the formal whose tree is PTREE in function which corresponds
186 to INFO. */
188 static int
189 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
190 tree ptree)
192 int i, count;
194 count = vec_safe_length (descriptors);
195 for (i = 0; i < count; i++)
196 if ((*descriptors)[i].decl_or_type == ptree)
197 return i;
199 return -1;
202 /* Return index of the formal whose tree is PTREE in function which corresponds
203 to INFO. */
206 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
208 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
211 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
212 NODE. */
214 static void
215 ipa_populate_param_decls (struct cgraph_node *node,
216 vec<ipa_param_descriptor, va_gc> &descriptors)
218 tree fndecl;
219 tree fnargs;
220 tree parm;
221 int param_num;
223 fndecl = node->decl;
224 gcc_assert (gimple_has_body_p (fndecl));
225 fnargs = DECL_ARGUMENTS (fndecl);
226 param_num = 0;
227 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
229 descriptors[param_num].decl_or_type = parm;
230 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
231 true);
232 param_num++;
236 /* Return how many formal parameters FNDECL has. */
239 count_formal_params (tree fndecl)
241 tree parm;
242 int count = 0;
243 gcc_assert (gimple_has_body_p (fndecl));
245 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
246 count++;
248 return count;
251 /* Return the declaration of Ith formal parameter of the function corresponding
252 to INFO. Note there is no setter function as this array is built just once
253 using ipa_initialize_node_params. */
255 void
256 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
258 fprintf (file, "param #%i", i);
259 if ((*info->descriptors)[i].decl_or_type)
261 fprintf (file, " ");
262 print_generic_expr (file, (*info->descriptors)[i].decl_or_type, 0);
266 /* If necessary, allocate vector of parameter descriptors in info of NODE.
267 Return true if they were allocated, false if not. */
269 static bool
270 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
272 struct ipa_node_params *info = IPA_NODE_REF (node);
274 if (!info->descriptors && param_count)
276 vec_safe_grow_cleared (info->descriptors, param_count);
277 return true;
279 else
280 return false;
283 /* Initialize the ipa_node_params structure associated with NODE by counting
284 the function parameters, creating the descriptors and populating their
285 param_decls. */
287 void
288 ipa_initialize_node_params (struct cgraph_node *node)
290 struct ipa_node_params *info = IPA_NODE_REF (node);
292 if (!info->descriptors
293 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
294 ipa_populate_param_decls (node, *info->descriptors);
297 /* Print the jump functions associated with call graph edge CS to file F. */
299 static void
300 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
302 int i, count;
304 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
305 for (i = 0; i < count; i++)
307 struct ipa_jump_func *jump_func;
308 enum jump_func_type type;
310 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
311 type = jump_func->type;
313 fprintf (f, " param %d: ", i);
314 if (type == IPA_JF_UNKNOWN)
315 fprintf (f, "UNKNOWN\n");
316 else if (type == IPA_JF_CONST)
318 tree val = jump_func->value.constant.value;
319 fprintf (f, "CONST: ");
320 print_generic_expr (f, val, 0);
321 if (TREE_CODE (val) == ADDR_EXPR
322 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
324 fprintf (f, " -> ");
325 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
328 fprintf (f, "\n");
330 else if (type == IPA_JF_PASS_THROUGH)
332 fprintf (f, "PASS THROUGH: ");
333 fprintf (f, "%d, op %s",
334 jump_func->value.pass_through.formal_id,
335 get_tree_code_name(jump_func->value.pass_through.operation));
336 if (jump_func->value.pass_through.operation != NOP_EXPR)
338 fprintf (f, " ");
339 print_generic_expr (f,
340 jump_func->value.pass_through.operand, 0);
342 if (jump_func->value.pass_through.agg_preserved)
343 fprintf (f, ", agg_preserved");
344 fprintf (f, "\n");
346 else if (type == IPA_JF_ANCESTOR)
348 fprintf (f, "ANCESTOR: ");
349 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
350 jump_func->value.ancestor.formal_id,
351 jump_func->value.ancestor.offset);
352 if (jump_func->value.ancestor.agg_preserved)
353 fprintf (f, ", agg_preserved");
354 fprintf (f, "\n");
357 if (jump_func->agg.items)
359 struct ipa_agg_jf_item *item;
360 int j;
362 fprintf (f, " Aggregate passed by %s:\n",
363 jump_func->agg.by_ref ? "reference" : "value");
364 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
366 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
367 item->offset);
368 if (TYPE_P (item->value))
369 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
370 tree_to_uhwi (TYPE_SIZE (item->value)));
371 else
373 fprintf (f, "cst: ");
374 print_generic_expr (f, item->value, 0);
376 fprintf (f, "\n");
380 struct ipa_polymorphic_call_context *ctx
381 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
382 if (ctx && !ctx->useless_p ())
384 fprintf (f, " Context: ");
385 ctx->dump (dump_file);
388 if (jump_func->bits)
390 fprintf (f, " value: ");
391 print_hex (jump_func->bits->value, f);
392 fprintf (f, ", mask: ");
393 print_hex (jump_func->bits->mask, f);
394 fprintf (f, "\n");
396 else
397 fprintf (f, " Unknown bits\n");
399 if (jump_func->m_vr)
401 fprintf (f, " VR ");
402 fprintf (f, "%s[",
403 (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
404 print_decs (jump_func->m_vr->min, f);
405 fprintf (f, ", ");
406 print_decs (jump_func->m_vr->max, f);
407 fprintf (f, "]\n");
409 else
410 fprintf (f, " Unknown VR\n");
415 /* Print the jump functions of all arguments on all call graph edges going from
416 NODE to file F. */
418 void
419 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
421 struct cgraph_edge *cs;
423 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
424 node->order);
425 for (cs = node->callees; cs; cs = cs->next_callee)
427 if (!ipa_edge_args_info_available_for_edge_p (cs))
428 continue;
430 fprintf (f, " callsite %s/%i -> %s/%i : \n",
431 xstrdup_for_dump (node->name ()), node->order,
432 xstrdup_for_dump (cs->callee->name ()),
433 cs->callee->order);
434 ipa_print_node_jump_functions_for_edge (f, cs);
437 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
439 struct cgraph_indirect_call_info *ii;
440 if (!ipa_edge_args_info_available_for_edge_p (cs))
441 continue;
443 ii = cs->indirect_info;
444 if (ii->agg_contents)
445 fprintf (f, " indirect %s callsite, calling param %i, "
446 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
447 ii->member_ptr ? "member ptr" : "aggregate",
448 ii->param_index, ii->offset,
449 ii->by_ref ? "by reference" : "by_value");
450 else
451 fprintf (f, " indirect %s callsite, calling param %i, "
452 "offset " HOST_WIDE_INT_PRINT_DEC,
453 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
454 ii->offset);
456 if (cs->call_stmt)
458 fprintf (f, ", for stmt ");
459 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
461 else
462 fprintf (f, "\n");
463 if (ii->polymorphic)
464 ii->context.dump (f);
465 ipa_print_node_jump_functions_for_edge (f, cs);
469 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
471 void
472 ipa_print_all_jump_functions (FILE *f)
474 struct cgraph_node *node;
476 fprintf (f, "\nJump functions:\n");
477 FOR_EACH_FUNCTION (node)
479 ipa_print_node_jump_functions (f, node);
483 /* Set jfunc to be a know-really nothing jump function. */
485 static void
486 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
488 jfunc->type = IPA_JF_UNKNOWN;
489 jfunc->bits = NULL;
490 jfunc->m_vr = NULL;
493 /* Set JFUNC to be a copy of another jmp (to be used by jump function
494 combination code). The two functions will share their rdesc. */
496 static void
497 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
498 struct ipa_jump_func *src)
501 gcc_checking_assert (src->type == IPA_JF_CONST);
502 dst->type = IPA_JF_CONST;
503 dst->value.constant = src->value.constant;
506 /* Set JFUNC to be a constant jmp function. */
508 static void
509 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
510 struct cgraph_edge *cs)
512 jfunc->type = IPA_JF_CONST;
513 jfunc->value.constant.value = unshare_expr_without_location (constant);
515 if (TREE_CODE (constant) == ADDR_EXPR
516 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
518 struct ipa_cst_ref_desc *rdesc;
520 rdesc = ipa_refdesc_pool.allocate ();
521 rdesc->cs = cs;
522 rdesc->next_duplicate = NULL;
523 rdesc->refcount = 1;
524 jfunc->value.constant.rdesc = rdesc;
526 else
527 jfunc->value.constant.rdesc = NULL;
530 /* Set JFUNC to be a simple pass-through jump function. */
531 static void
532 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
533 bool agg_preserved)
535 jfunc->type = IPA_JF_PASS_THROUGH;
536 jfunc->value.pass_through.operand = NULL_TREE;
537 jfunc->value.pass_through.formal_id = formal_id;
538 jfunc->value.pass_through.operation = NOP_EXPR;
539 jfunc->value.pass_through.agg_preserved = agg_preserved;
542 /* Set JFUNC to be an unary pass through jump function. */
544 static void
545 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
546 enum tree_code operation)
548 jfunc->type = IPA_JF_PASS_THROUGH;
549 jfunc->value.pass_through.operand = NULL_TREE;
550 jfunc->value.pass_through.formal_id = formal_id;
551 jfunc->value.pass_through.operation = operation;
552 jfunc->value.pass_through.agg_preserved = false;
554 /* Set JFUNC to be an arithmetic pass through jump function. */
556 static void
557 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
558 tree operand, enum tree_code operation)
560 jfunc->type = IPA_JF_PASS_THROUGH;
561 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
562 jfunc->value.pass_through.formal_id = formal_id;
563 jfunc->value.pass_through.operation = operation;
564 jfunc->value.pass_through.agg_preserved = false;
567 /* Set JFUNC to be an ancestor jump function. */
569 static void
570 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
571 int formal_id, bool agg_preserved)
573 jfunc->type = IPA_JF_ANCESTOR;
574 jfunc->value.ancestor.formal_id = formal_id;
575 jfunc->value.ancestor.offset = offset;
576 jfunc->value.ancestor.agg_preserved = agg_preserved;
579 /* Get IPA BB information about the given BB. FBI is the context of analyzis
580 of this function body. */
582 static struct ipa_bb_info *
583 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
585 gcc_checking_assert (fbi);
586 return &fbi->bb_infos[bb->index];
589 /* Structure to be passed in between detect_type_change and
590 check_stmt_for_type_change. */
592 struct prop_type_change_info
594 /* Offset into the object where there is the virtual method pointer we are
595 looking for. */
596 HOST_WIDE_INT offset;
597 /* The declaration or SSA_NAME pointer of the base that we are checking for
598 type change. */
599 tree object;
600 /* Set to true if dynamic type change has been detected. */
601 bool type_maybe_changed;
604 /* Return true if STMT can modify a virtual method table pointer.
606 This function makes special assumptions about both constructors and
607 destructors which are all the functions that are allowed to alter the VMT
608 pointers. It assumes that destructors begin with assignment into all VMT
609 pointers and that constructors essentially look in the following way:
611 1) The very first thing they do is that they call constructors of ancestor
612 sub-objects that have them.
614 2) Then VMT pointers of this and all its ancestors is set to new values
615 corresponding to the type corresponding to the constructor.
617 3) Only afterwards, other stuff such as constructor of member sub-objects
618 and the code written by the user is run. Only this may include calling
619 virtual functions, directly or indirectly.
621 There is no way to call a constructor of an ancestor sub-object in any
622 other way.
624 This means that we do not have to care whether constructors get the correct
625 type information because they will always change it (in fact, if we define
626 the type to be given by the VMT pointer, it is undefined).
628 The most important fact to derive from the above is that if, for some
629 statement in the section 3, we try to detect whether the dynamic type has
630 changed, we can safely ignore all calls as we examine the function body
631 backwards until we reach statements in section 2 because these calls cannot
632 be ancestor constructors or destructors (if the input is not bogus) and so
633 do not change the dynamic type (this holds true only for automatically
634 allocated objects but at the moment we devirtualize only these). We then
635 must detect that statements in section 2 change the dynamic type and can try
636 to derive the new type. That is enough and we can stop, we will never see
637 the calls into constructors of sub-objects in this code. Therefore we can
638 safely ignore all call statements that we traverse.
641 static bool
642 stmt_may_be_vtbl_ptr_store (gimple *stmt)
644 if (is_gimple_call (stmt))
645 return false;
646 if (gimple_clobber_p (stmt))
647 return false;
648 else if (is_gimple_assign (stmt))
650 tree lhs = gimple_assign_lhs (stmt);
652 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
654 if (flag_strict_aliasing
655 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
656 return false;
658 if (TREE_CODE (lhs) == COMPONENT_REF
659 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
660 return false;
661 /* In the future we might want to use get_base_ref_and_offset to find
662 if there is a field corresponding to the offset and if so, proceed
663 almost like if it was a component ref. */
666 return true;
669 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
670 to check whether a particular statement may modify the virtual table
671 pointerIt stores its result into DATA, which points to a
672 prop_type_change_info structure. */
674 static bool
675 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
677 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
678 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
680 if (stmt_may_be_vtbl_ptr_store (stmt))
682 tci->type_maybe_changed = true;
683 return true;
685 else
686 return false;
689 /* See if ARG is PARAM_DECl describing instance passed by pointer
690 or reference in FUNCTION. Return false if the dynamic type may change
691 in between beggining of the function until CALL is invoked.
693 Generally functions are not allowed to change type of such instances,
694 but they call destructors. We assume that methods can not destroy the THIS
695 pointer. Also as a special cases, constructor and destructors may change
696 type of the THIS pointer. */
698 static bool
699 param_type_may_change_p (tree function, tree arg, gimple *call)
701 /* Pure functions can not do any changes on the dynamic type;
702 that require writting to memory. */
703 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
704 return false;
705 /* We need to check if we are within inlined consturctor
706 or destructor (ideally we would have way to check that the
707 inline cdtor is actually working on ARG, but we don't have
708 easy tie on this, so punt on all non-pure cdtors.
709 We may also record the types of cdtors and once we know type
710 of the instance match them.
712 Also code unification optimizations may merge calls from
713 different blocks making return values unreliable. So
714 do nothing during late optimization. */
715 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
716 return true;
717 if (TREE_CODE (arg) == SSA_NAME
718 && SSA_NAME_IS_DEFAULT_DEF (arg)
719 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
721 /* Normal (non-THIS) argument. */
722 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
723 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
724 /* THIS pointer of an method - here we want to watch constructors
725 and destructors as those definitely may change the dynamic
726 type. */
727 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
728 && !DECL_CXX_CONSTRUCTOR_P (function)
729 && !DECL_CXX_DESTRUCTOR_P (function)
730 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
732 /* Walk the inline stack and watch out for ctors/dtors. */
733 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
734 block = BLOCK_SUPERCONTEXT (block))
735 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
736 return true;
737 return false;
740 return true;
743 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
744 callsite CALL) by looking for assignments to its virtual table pointer. If
745 it is, return true and fill in the jump function JFUNC with relevant type
746 information or set it to unknown. ARG is the object itself (not a pointer
747 to it, unless dereferenced). BASE is the base of the memory access as
748 returned by get_ref_base_and_extent, as is the offset.
750 This is helper function for detect_type_change and detect_type_change_ssa
751 that does the heavy work which is usually unnecesary. */
753 static bool
754 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
755 gcall *call, struct ipa_jump_func *jfunc,
756 HOST_WIDE_INT offset)
758 struct prop_type_change_info tci;
759 ao_ref ao;
760 bool entry_reached = false;
762 gcc_checking_assert (DECL_P (arg)
763 || TREE_CODE (arg) == MEM_REF
764 || handled_component_p (arg));
766 comp_type = TYPE_MAIN_VARIANT (comp_type);
768 /* Const calls cannot call virtual methods through VMT and so type changes do
769 not matter. */
770 if (!flag_devirtualize || !gimple_vuse (call)
771 /* Be sure expected_type is polymorphic. */
772 || !comp_type
773 || TREE_CODE (comp_type) != RECORD_TYPE
774 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
775 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
776 return true;
778 ao_ref_init (&ao, arg);
779 ao.base = base;
780 ao.offset = offset;
781 ao.size = POINTER_SIZE;
782 ao.max_size = ao.size;
784 tci.offset = offset;
785 tci.object = get_base_address (arg);
786 tci.type_maybe_changed = false;
788 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
789 &tci, NULL, &entry_reached);
790 if (!tci.type_maybe_changed)
791 return false;
793 ipa_set_jf_unknown (jfunc);
794 return true;
797 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
798 If it is, return true and fill in the jump function JFUNC with relevant type
799 information or set it to unknown. ARG is the object itself (not a pointer
800 to it, unless dereferenced). BASE is the base of the memory access as
801 returned by get_ref_base_and_extent, as is the offset. */
803 static bool
804 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
805 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
807 if (!flag_devirtualize)
808 return false;
810 if (TREE_CODE (base) == MEM_REF
811 && !param_type_may_change_p (current_function_decl,
812 TREE_OPERAND (base, 0),
813 call))
814 return false;
815 return detect_type_change_from_memory_writes (arg, base, comp_type,
816 call, jfunc, offset);
819 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
820 SSA name (its dereference will become the base and the offset is assumed to
821 be zero). */
823 static bool
824 detect_type_change_ssa (tree arg, tree comp_type,
825 gcall *call, struct ipa_jump_func *jfunc)
827 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
828 if (!flag_devirtualize
829 || !POINTER_TYPE_P (TREE_TYPE (arg)))
830 return false;
832 if (!param_type_may_change_p (current_function_decl, arg, call))
833 return false;
835 arg = build2 (MEM_REF, ptr_type_node, arg,
836 build_int_cst (ptr_type_node, 0));
838 return detect_type_change_from_memory_writes (arg, arg, comp_type,
839 call, jfunc, 0);
842 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
843 boolean variable pointed to by DATA. */
845 static bool
846 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
847 void *data)
849 bool *b = (bool *) data;
850 *b = true;
851 return true;
854 /* Return true if we have already walked so many statements in AA that we
855 should really just start giving up. */
857 static bool
858 aa_overwalked (struct ipa_func_body_info *fbi)
860 gcc_checking_assert (fbi);
861 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
864 /* Find the nearest valid aa status for parameter specified by INDEX that
865 dominates BB. */
867 static struct ipa_param_aa_status *
868 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
869 int index)
871 while (true)
873 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
874 if (!bb)
875 return NULL;
876 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
877 if (!bi->param_aa_statuses.is_empty ()
878 && bi->param_aa_statuses[index].valid)
879 return &bi->param_aa_statuses[index];
883 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
884 structures and/or intialize the result with a dominating description as
885 necessary. */
887 static struct ipa_param_aa_status *
888 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
889 int index)
891 gcc_checking_assert (fbi);
892 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
893 if (bi->param_aa_statuses.is_empty ())
894 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
895 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
896 if (!paa->valid)
898 gcc_checking_assert (!paa->parm_modified
899 && !paa->ref_modified
900 && !paa->pt_modified);
901 struct ipa_param_aa_status *dom_paa;
902 dom_paa = find_dominating_aa_status (fbi, bb, index);
903 if (dom_paa)
904 *paa = *dom_paa;
905 else
906 paa->valid = true;
909 return paa;
912 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
913 a value known not to be modified in this function before reaching the
914 statement STMT. FBI holds information about the function we have so far
915 gathered but do not survive the summary building stage. */
917 static bool
918 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
919 gimple *stmt, tree parm_load)
921 struct ipa_param_aa_status *paa;
922 bool modified = false;
923 ao_ref refd;
925 tree base = get_base_address (parm_load);
926 gcc_assert (TREE_CODE (base) == PARM_DECL);
927 if (TREE_READONLY (base))
928 return true;
930 /* FIXME: FBI can be NULL if we are being called from outside
931 ipa_node_analysis or ipcp_transform_function, which currently happens
932 during inlining analysis. It would be great to extend fbi's lifetime and
933 always have it. Currently, we are just not afraid of too much walking in
934 that case. */
935 if (fbi)
937 if (aa_overwalked (fbi))
938 return false;
939 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
940 if (paa->parm_modified)
941 return false;
943 else
944 paa = NULL;
946 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
947 ao_ref_init (&refd, parm_load);
948 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
949 &modified, NULL);
950 if (fbi)
951 fbi->aa_walked += walked;
952 if (paa && modified)
953 paa->parm_modified = true;
954 return !modified;
957 /* If STMT is an assignment that loads a value from an parameter declaration,
958 return the index of the parameter in ipa_node_params which has not been
959 modified. Otherwise return -1. */
961 static int
962 load_from_unmodified_param (struct ipa_func_body_info *fbi,
963 vec<ipa_param_descriptor, va_gc> *descriptors,
964 gimple *stmt)
966 int index;
967 tree op1;
969 if (!gimple_assign_single_p (stmt))
970 return -1;
972 op1 = gimple_assign_rhs1 (stmt);
973 if (TREE_CODE (op1) != PARM_DECL)
974 return -1;
976 index = ipa_get_param_decl_index_1 (descriptors, op1);
977 if (index < 0
978 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
979 return -1;
981 return index;
984 /* Return true if memory reference REF (which must be a load through parameter
985 with INDEX) loads data that are known to be unmodified in this function
986 before reaching statement STMT. */
988 static bool
989 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
990 int index, gimple *stmt, tree ref)
992 struct ipa_param_aa_status *paa;
993 bool modified = false;
994 ao_ref refd;
996 /* FIXME: FBI can be NULL if we are being called from outside
997 ipa_node_analysis or ipcp_transform_function, which currently happens
998 during inlining analysis. It would be great to extend fbi's lifetime and
999 always have it. Currently, we are just not afraid of too much walking in
1000 that case. */
1001 if (fbi)
1003 if (aa_overwalked (fbi))
1004 return false;
1005 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1006 if (paa->ref_modified)
1007 return false;
1009 else
1010 paa = NULL;
1012 gcc_checking_assert (gimple_vuse (stmt));
1013 ao_ref_init (&refd, ref);
1014 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1015 &modified, NULL);
1016 if (fbi)
1017 fbi->aa_walked += walked;
1018 if (paa && modified)
1019 paa->ref_modified = true;
1020 return !modified;
1023 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1024 is known to be unmodified in this function before reaching call statement
1025 CALL into which it is passed. FBI describes the function body. */
1027 static bool
1028 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1029 gimple *call, tree parm)
1031 bool modified = false;
1032 ao_ref refd;
1034 /* It's unnecessary to calculate anything about memory contnets for a const
1035 function because it is not goin to use it. But do not cache the result
1036 either. Also, no such calculations for non-pointers. */
1037 if (!gimple_vuse (call)
1038 || !POINTER_TYPE_P (TREE_TYPE (parm))
1039 || aa_overwalked (fbi))
1040 return false;
1042 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1043 gimple_bb (call),
1044 index);
1045 if (paa->pt_modified)
1046 return false;
1048 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1049 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1050 &modified, NULL);
1051 fbi->aa_walked += walked;
1052 if (modified)
1053 paa->pt_modified = true;
1054 return !modified;
1057 /* Return true if we can prove that OP is a memory reference loading
1058 data from an aggregate passed as a parameter.
1060 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1061 false if it cannot prove that the value has not been modified before the
1062 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1063 if it cannot prove the value has not been modified, in that case it will
1064 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1066 INFO and PARMS_AINFO describe parameters of the current function (but the
1067 latter can be NULL), STMT is the load statement. If function returns true,
1068 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1069 within the aggregate and whether it is a load from a value passed by
1070 reference respectively. */
1072 bool
1073 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1074 vec<ipa_param_descriptor, va_gc> *descriptors,
1075 gimple *stmt, tree op, int *index_p,
1076 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1077 bool *by_ref_p, bool *guaranteed_unmodified)
1079 int index;
1080 HOST_WIDE_INT size, max_size;
1081 bool reverse;
1082 tree base
1083 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
1085 if (max_size == -1 || max_size != size || *offset_p < 0)
1086 return false;
1088 if (DECL_P (base))
1090 int index = ipa_get_param_decl_index_1 (descriptors, base);
1091 if (index >= 0
1092 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1094 *index_p = index;
1095 *by_ref_p = false;
1096 if (size_p)
1097 *size_p = size;
1098 if (guaranteed_unmodified)
1099 *guaranteed_unmodified = true;
1100 return true;
1102 return false;
1105 if (TREE_CODE (base) != MEM_REF
1106 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1107 || !integer_zerop (TREE_OPERAND (base, 1)))
1108 return false;
1110 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1112 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1113 index = ipa_get_param_decl_index_1 (descriptors, parm);
1115 else
1117 /* This branch catches situations where a pointer parameter is not a
1118 gimple register, for example:
1120 void hip7(S*) (struct S * p)
1122 void (*<T2e4>) (struct S *) D.1867;
1123 struct S * p.1;
1125 <bb 2>:
1126 p.1_1 = p;
1127 D.1867_2 = p.1_1->f;
1128 D.1867_2 ();
1129 gdp = &p;
1132 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1133 index = load_from_unmodified_param (fbi, descriptors, def);
1136 if (index >= 0)
1138 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1139 if (!data_preserved && !guaranteed_unmodified)
1140 return false;
1142 *index_p = index;
1143 *by_ref_p = true;
1144 if (size_p)
1145 *size_p = size;
1146 if (guaranteed_unmodified)
1147 *guaranteed_unmodified = data_preserved;
1148 return true;
1150 return false;
1153 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1154 of an assignment statement STMT, try to determine whether we are actually
1155 handling any of the following cases and construct an appropriate jump
1156 function into JFUNC if so:
1158 1) The passed value is loaded from a formal parameter which is not a gimple
1159 register (most probably because it is addressable, the value has to be
1160 scalar) and we can guarantee the value has not changed. This case can
1161 therefore be described by a simple pass-through jump function. For example:
1163 foo (int a)
1165 int a.0;
1167 a.0_2 = a;
1168 bar (a.0_2);
1170 2) The passed value can be described by a simple arithmetic pass-through
1171 jump function. E.g.
1173 foo (int a)
1175 int D.2064;
1177 D.2064_4 = a.1(D) + 4;
1178 bar (D.2064_4);
1180 This case can also occur in combination of the previous one, e.g.:
1182 foo (int a, int z)
1184 int a.0;
1185 int D.2064;
1187 a.0_3 = a;
1188 D.2064_4 = a.0_3 + 4;
1189 foo (D.2064_4);
1191 3) The passed value is an address of an object within another one (which
1192 also passed by reference). Such situations are described by an ancestor
1193 jump function and describe situations such as:
1195 B::foo() (struct B * const this)
1197 struct A * D.1845;
1199 D.1845_2 = &this_1(D)->D.1748;
1200 A::bar (D.1845_2);
1202 INFO is the structure describing individual parameters access different
1203 stages of IPA optimizations. PARMS_AINFO contains the information that is
1204 only needed for intraprocedural analysis. */
1206 static void
1207 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1208 struct ipa_node_params *info,
1209 struct ipa_jump_func *jfunc,
1210 gcall *call, gimple *stmt, tree name,
1211 tree param_type)
1213 HOST_WIDE_INT offset, size, max_size;
1214 tree op1, tc_ssa, base, ssa;
1215 bool reverse;
1216 int index;
1218 op1 = gimple_assign_rhs1 (stmt);
1220 if (TREE_CODE (op1) == SSA_NAME)
1222 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1223 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1224 else
1225 index = load_from_unmodified_param (fbi, info->descriptors,
1226 SSA_NAME_DEF_STMT (op1));
1227 tc_ssa = op1;
1229 else
1231 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1232 tc_ssa = gimple_assign_lhs (stmt);
1235 if (index >= 0)
1237 switch (gimple_assign_rhs_class (stmt))
1239 case GIMPLE_BINARY_RHS:
1241 tree op2 = gimple_assign_rhs2 (stmt);
1242 if (!is_gimple_ip_invariant (op2)
1243 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1244 != tcc_comparison)
1245 && !useless_type_conversion_p (TREE_TYPE (name),
1246 TREE_TYPE (op1))))
1247 return;
1249 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1250 gimple_assign_rhs_code (stmt));
1251 break;
1253 case GIMPLE_SINGLE_RHS:
1255 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1256 tc_ssa);
1257 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1258 break;
1260 case GIMPLE_UNARY_RHS:
1261 if (is_gimple_assign (stmt)
1262 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1263 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1264 ipa_set_jf_unary_pass_through (jfunc, index,
1265 gimple_assign_rhs_code (stmt));
1266 default:;
1268 return;
1271 if (TREE_CODE (op1) != ADDR_EXPR)
1272 return;
1273 op1 = TREE_OPERAND (op1, 0);
1274 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1275 return;
1276 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1277 if (TREE_CODE (base) != MEM_REF
1278 /* If this is a varying address, punt. */
1279 || max_size == -1
1280 || max_size != size)
1281 return;
1282 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1283 ssa = TREE_OPERAND (base, 0);
1284 if (TREE_CODE (ssa) != SSA_NAME
1285 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1286 || offset < 0)
1287 return;
1289 /* Dynamic types are changed in constructors and destructors. */
1290 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1291 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1292 ipa_set_ancestor_jf (jfunc, offset, index,
1293 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1296 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1297 it looks like:
1299 iftmp.1_3 = &obj_2(D)->D.1762;
1301 The base of the MEM_REF must be a default definition SSA NAME of a
1302 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1303 whole MEM_REF expression is returned and the offset calculated from any
1304 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1305 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1307 static tree
1308 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1310 HOST_WIDE_INT size, max_size;
1311 tree expr, parm, obj;
1312 bool reverse;
1314 if (!gimple_assign_single_p (assign))
1315 return NULL_TREE;
1316 expr = gimple_assign_rhs1 (assign);
1318 if (TREE_CODE (expr) != ADDR_EXPR)
1319 return NULL_TREE;
1320 expr = TREE_OPERAND (expr, 0);
1321 obj = expr;
1322 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1324 if (TREE_CODE (expr) != MEM_REF
1325 /* If this is a varying address, punt. */
1326 || max_size == -1
1327 || max_size != size
1328 || *offset < 0)
1329 return NULL_TREE;
1330 parm = TREE_OPERAND (expr, 0);
1331 if (TREE_CODE (parm) != SSA_NAME
1332 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1333 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1334 return NULL_TREE;
1336 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1337 *obj_p = obj;
1338 return expr;
1342 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1343 statement PHI, try to find out whether NAME is in fact a
1344 multiple-inheritance typecast from a descendant into an ancestor of a formal
1345 parameter and thus can be described by an ancestor jump function and if so,
1346 write the appropriate function into JFUNC.
1348 Essentially we want to match the following pattern:
1350 if (obj_2(D) != 0B)
1351 goto <bb 3>;
1352 else
1353 goto <bb 4>;
1355 <bb 3>:
1356 iftmp.1_3 = &obj_2(D)->D.1762;
1358 <bb 4>:
1359 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1360 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1361 return D.1879_6; */
1363 static void
1364 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1365 struct ipa_node_params *info,
1366 struct ipa_jump_func *jfunc,
1367 gcall *call, gphi *phi)
1369 HOST_WIDE_INT offset;
1370 gimple *assign, *cond;
1371 basic_block phi_bb, assign_bb, cond_bb;
1372 tree tmp, parm, expr, obj;
1373 int index, i;
1375 if (gimple_phi_num_args (phi) != 2)
1376 return;
1378 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1379 tmp = PHI_ARG_DEF (phi, 0);
1380 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1381 tmp = PHI_ARG_DEF (phi, 1);
1382 else
1383 return;
1384 if (TREE_CODE (tmp) != SSA_NAME
1385 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1386 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1387 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1388 return;
1390 assign = SSA_NAME_DEF_STMT (tmp);
1391 assign_bb = gimple_bb (assign);
1392 if (!single_pred_p (assign_bb))
1393 return;
1394 expr = get_ancestor_addr_info (assign, &obj, &offset);
1395 if (!expr)
1396 return;
1397 parm = TREE_OPERAND (expr, 0);
1398 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1399 if (index < 0)
1400 return;
1402 cond_bb = single_pred (assign_bb);
1403 cond = last_stmt (cond_bb);
1404 if (!cond
1405 || gimple_code (cond) != GIMPLE_COND
1406 || gimple_cond_code (cond) != NE_EXPR
1407 || gimple_cond_lhs (cond) != parm
1408 || !integer_zerop (gimple_cond_rhs (cond)))
1409 return;
1411 phi_bb = gimple_bb (phi);
1412 for (i = 0; i < 2; i++)
1414 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1415 if (pred != assign_bb && pred != cond_bb)
1416 return;
1419 ipa_set_ancestor_jf (jfunc, offset, index,
1420 parm_ref_data_pass_through_p (fbi, index, call, parm));
1423 /* Inspect the given TYPE and return true iff it has the same structure (the
1424 same number of fields of the same types) as a C++ member pointer. If
1425 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1426 corresponding fields there. */
1428 static bool
1429 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1431 tree fld;
1433 if (TREE_CODE (type) != RECORD_TYPE)
1434 return false;
1436 fld = TYPE_FIELDS (type);
1437 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1438 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1439 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1440 return false;
1442 if (method_ptr)
1443 *method_ptr = fld;
1445 fld = DECL_CHAIN (fld);
1446 if (!fld || INTEGRAL_TYPE_P (fld)
1447 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1448 return false;
1449 if (delta)
1450 *delta = fld;
1452 if (DECL_CHAIN (fld))
1453 return false;
1455 return true;
1458 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1459 return the rhs of its defining statement. Otherwise return RHS as it
1460 is. */
1462 static inline tree
1463 get_ssa_def_if_simple_copy (tree rhs)
1465 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1467 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1469 if (gimple_assign_single_p (def_stmt))
1470 rhs = gimple_assign_rhs1 (def_stmt);
1471 else
1472 break;
1474 return rhs;
1477 /* Simple linked list, describing known contents of an aggregate beforere
1478 call. */
1480 struct ipa_known_agg_contents_list
1482 /* Offset and size of the described part of the aggregate. */
1483 HOST_WIDE_INT offset, size;
1484 /* Known constant value or NULL if the contents is known to be unknown. */
1485 tree constant;
1486 /* Pointer to the next structure in the list. */
1487 struct ipa_known_agg_contents_list *next;
1490 /* Find the proper place in linked list of ipa_known_agg_contents_list
1491 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1492 unless there is a partial overlap, in which case return NULL, or such
1493 element is already there, in which case set *ALREADY_THERE to true. */
1495 static struct ipa_known_agg_contents_list **
1496 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1497 HOST_WIDE_INT lhs_offset,
1498 HOST_WIDE_INT lhs_size,
1499 bool *already_there)
1501 struct ipa_known_agg_contents_list **p = list;
1502 while (*p && (*p)->offset < lhs_offset)
1504 if ((*p)->offset + (*p)->size > lhs_offset)
1505 return NULL;
1506 p = &(*p)->next;
1509 if (*p && (*p)->offset < lhs_offset + lhs_size)
1511 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1512 /* We already know this value is subsequently overwritten with
1513 something else. */
1514 *already_there = true;
1515 else
1516 /* Otherwise this is a partial overlap which we cannot
1517 represent. */
1518 return NULL;
1520 return p;
1523 /* Build aggregate jump function from LIST, assuming there are exactly
1524 CONST_COUNT constant entries there and that th offset of the passed argument
1525 is ARG_OFFSET and store it into JFUNC. */
1527 static void
1528 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1529 int const_count, HOST_WIDE_INT arg_offset,
1530 struct ipa_jump_func *jfunc)
1532 vec_alloc (jfunc->agg.items, const_count);
1533 while (list)
1535 if (list->constant)
1537 struct ipa_agg_jf_item item;
1538 item.offset = list->offset - arg_offset;
1539 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1540 item.value = unshare_expr_without_location (list->constant);
1541 jfunc->agg.items->quick_push (item);
1543 list = list->next;
1547 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1548 in ARG is filled in with constant values. ARG can either be an aggregate
1549 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1550 aggregate. JFUNC is the jump function into which the constants are
1551 subsequently stored. */
1553 static void
1554 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1555 tree arg_type,
1556 struct ipa_jump_func *jfunc)
1558 struct ipa_known_agg_contents_list *list = NULL;
1559 int item_count = 0, const_count = 0;
1560 HOST_WIDE_INT arg_offset, arg_size;
1561 gimple_stmt_iterator gsi;
1562 tree arg_base;
1563 bool check_ref, by_ref;
1564 ao_ref r;
1566 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1567 return;
1569 /* The function operates in three stages. First, we prepare check_ref, r,
1570 arg_base and arg_offset based on what is actually passed as an actual
1571 argument. */
1573 if (POINTER_TYPE_P (arg_type))
1575 by_ref = true;
1576 if (TREE_CODE (arg) == SSA_NAME)
1578 tree type_size;
1579 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1580 return;
1581 check_ref = true;
1582 arg_base = arg;
1583 arg_offset = 0;
1584 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1585 arg_size = tree_to_uhwi (type_size);
1586 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1588 else if (TREE_CODE (arg) == ADDR_EXPR)
1590 HOST_WIDE_INT arg_max_size;
1591 bool reverse;
1593 arg = TREE_OPERAND (arg, 0);
1594 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1595 &arg_max_size, &reverse);
1596 if (arg_max_size == -1
1597 || arg_max_size != arg_size
1598 || arg_offset < 0)
1599 return;
1600 if (DECL_P (arg_base))
1602 check_ref = false;
1603 ao_ref_init (&r, arg_base);
1605 else
1606 return;
1608 else
1609 return;
1611 else
1613 HOST_WIDE_INT arg_max_size;
1614 bool reverse;
1616 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1618 by_ref = false;
1619 check_ref = false;
1620 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1621 &arg_max_size, &reverse);
1622 if (arg_max_size == -1
1623 || arg_max_size != arg_size
1624 || arg_offset < 0)
1625 return;
1627 ao_ref_init (&r, arg);
1630 /* Second stage walks back the BB, looks at individual statements and as long
1631 as it is confident of how the statements affect contents of the
1632 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1633 describing it. */
1634 gsi = gsi_for_stmt (call);
1635 gsi_prev (&gsi);
1636 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1638 struct ipa_known_agg_contents_list *n, **p;
1639 gimple *stmt = gsi_stmt (gsi);
1640 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1641 tree lhs, rhs, lhs_base;
1642 bool reverse;
1644 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1645 continue;
1646 if (!gimple_assign_single_p (stmt))
1647 break;
1649 lhs = gimple_assign_lhs (stmt);
1650 rhs = gimple_assign_rhs1 (stmt);
1651 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1652 || TREE_CODE (lhs) == BIT_FIELD_REF
1653 || contains_bitfld_component_ref_p (lhs))
1654 break;
1656 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1657 &lhs_max_size, &reverse);
1658 if (lhs_max_size == -1
1659 || lhs_max_size != lhs_size)
1660 break;
1662 if (check_ref)
1664 if (TREE_CODE (lhs_base) != MEM_REF
1665 || TREE_OPERAND (lhs_base, 0) != arg_base
1666 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1667 break;
1669 else if (lhs_base != arg_base)
1671 if (DECL_P (lhs_base))
1672 continue;
1673 else
1674 break;
1677 bool already_there = false;
1678 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1679 &already_there);
1680 if (!p)
1681 break;
1682 if (already_there)
1683 continue;
1685 rhs = get_ssa_def_if_simple_copy (rhs);
1686 n = XALLOCA (struct ipa_known_agg_contents_list);
1687 n->size = lhs_size;
1688 n->offset = lhs_offset;
1689 if (is_gimple_ip_invariant (rhs))
1691 n->constant = rhs;
1692 const_count++;
1694 else
1695 n->constant = NULL_TREE;
1696 n->next = *p;
1697 *p = n;
1699 item_count++;
1700 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1701 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1702 break;
1705 /* Third stage just goes over the list and creates an appropriate vector of
1706 ipa_agg_jf_item structures out of it, of sourse only if there are
1707 any known constants to begin with. */
1709 if (const_count)
1711 jfunc->agg.by_ref = by_ref;
1712 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1716 /* Return the Ith param type of callee associated with call graph
1717 edge E. */
1719 tree
1720 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1722 int n;
1723 tree type = (e->callee
1724 ? TREE_TYPE (e->callee->decl)
1725 : gimple_call_fntype (e->call_stmt));
1726 tree t = TYPE_ARG_TYPES (type);
1728 for (n = 0; n < i; n++)
1730 if (!t)
1731 break;
1732 t = TREE_CHAIN (t);
1734 if (t)
1735 return TREE_VALUE (t);
1736 if (!e->callee)
1737 return NULL;
1738 t = DECL_ARGUMENTS (e->callee->decl);
1739 for (n = 0; n < i; n++)
1741 if (!t)
1742 return NULL;
1743 t = TREE_CHAIN (t);
1745 if (t)
1746 return TREE_TYPE (t);
1747 return NULL;
1750 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1751 allocated structure or a previously existing one shared with other jump
1752 functions and/or transformation summaries. */
1754 ipa_bits *
1755 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1757 ipa_bits tmp;
1758 tmp.value = value;
1759 tmp.mask = mask;
1761 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1762 if (*slot)
1763 return *slot;
1765 ipa_bits *res = ggc_alloc<ipa_bits> ();
1766 res->value = value;
1767 res->mask = mask;
1768 *slot = res;
1770 return res;
1773 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1774 table in order to avoid creating multiple same ipa_bits structures. */
1776 static void
1777 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1778 const widest_int &mask)
1780 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1783 /* Return a pointer to a value_range just like *TMP, but either find it in
1784 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1786 static value_range *
1787 ipa_get_value_range (value_range *tmp)
1789 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1790 if (*slot)
1791 return *slot;
1793 value_range *vr = ggc_alloc<value_range> ();
1794 *vr = *tmp;
1795 *slot = vr;
1797 return vr;
1800 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1801 equiv set. Use hash table in order to avoid creating multiple same copies of
1802 value_ranges. */
1804 static value_range *
1805 ipa_get_value_range (enum value_range_type type, tree min, tree max)
1807 value_range tmp;
1808 tmp.type = type;
1809 tmp.min = min;
1810 tmp.max = max;
1811 tmp.equiv = NULL;
1812 return ipa_get_value_range (&tmp);
1815 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1816 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1817 same value_range structures. */
1819 static void
1820 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1821 tree min, tree max)
1823 jf->m_vr = ipa_get_value_range (type, min, max);
1826 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1827 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1829 static void
1830 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1832 jf->m_vr = ipa_get_value_range (tmp);
1835 /* Compute jump function for all arguments of callsite CS and insert the
1836 information in the jump_functions array in the ipa_edge_args corresponding
1837 to this callsite. */
1839 static void
1840 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1841 struct cgraph_edge *cs)
1843 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1844 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1845 gcall *call = cs->call_stmt;
1846 int n, arg_num = gimple_call_num_args (call);
1847 bool useful_context = false;
1849 if (arg_num == 0 || args->jump_functions)
1850 return;
1851 vec_safe_grow_cleared (args->jump_functions, arg_num);
1852 if (flag_devirtualize)
1853 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1855 if (gimple_call_internal_p (call))
1856 return;
1857 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1858 return;
1860 for (n = 0; n < arg_num; n++)
1862 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1863 tree arg = gimple_call_arg (call, n);
1864 tree param_type = ipa_get_callee_param_type (cs, n);
1865 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1867 tree instance;
1868 struct ipa_polymorphic_call_context context (cs->caller->decl,
1869 arg, cs->call_stmt,
1870 &instance);
1871 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1872 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1873 if (!context.useless_p ())
1874 useful_context = true;
1877 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1879 bool addr_nonzero = false;
1880 bool strict_overflow = false;
1882 if (TREE_CODE (arg) == SSA_NAME
1883 && param_type
1884 && get_ptr_nonnull (arg))
1885 addr_nonzero = true;
1886 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1887 addr_nonzero = true;
1889 if (addr_nonzero)
1891 tree z = build_int_cst (TREE_TYPE (arg), 0);
1892 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1894 else
1895 gcc_assert (!jfunc->m_vr);
1897 else
1899 wide_int min, max;
1900 value_range_type type;
1901 if (TREE_CODE (arg) == SSA_NAME
1902 && param_type
1903 && (type = get_range_info (arg, &min, &max))
1904 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1906 value_range tmpvr,resvr;
1908 tmpvr.type = type;
1909 tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1910 tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1911 tmpvr.equiv = NULL;
1912 memset (&resvr, 0, sizeof (resvr));
1913 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1914 &tmpvr, TREE_TYPE (arg));
1915 if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1916 ipa_set_jfunc_vr (jfunc, &resvr);
1917 else
1918 gcc_assert (!jfunc->m_vr);
1920 else
1921 gcc_assert (!jfunc->m_vr);
1924 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1925 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1927 if (TREE_CODE (arg) == SSA_NAME)
1928 ipa_set_jfunc_bits (jfunc, 0,
1929 widest_int::from (get_nonzero_bits (arg),
1930 TYPE_SIGN (TREE_TYPE (arg))));
1931 else
1932 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1934 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1936 unsigned HOST_WIDE_INT bitpos;
1937 unsigned align;
1939 get_pointer_alignment_1 (arg, &align, &bitpos);
1940 widest_int mask
1941 = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1942 .and_not (align / BITS_PER_UNIT - 1);
1943 widest_int value = bitpos / BITS_PER_UNIT;
1944 ipa_set_jfunc_bits (jfunc, value, mask);
1946 else
1947 gcc_assert (!jfunc->bits);
1949 if (is_gimple_ip_invariant (arg)
1950 || (VAR_P (arg)
1951 && is_global_var (arg)
1952 && TREE_READONLY (arg)))
1953 ipa_set_jf_constant (jfunc, arg, cs);
1954 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1955 && TREE_CODE (arg) == PARM_DECL)
1957 int index = ipa_get_param_decl_index (info, arg);
1959 gcc_assert (index >=0);
1960 /* Aggregate passed by value, check for pass-through, otherwise we
1961 will attempt to fill in aggregate contents later in this
1962 for cycle. */
1963 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1965 ipa_set_jf_simple_pass_through (jfunc, index, false);
1966 continue;
1969 else if (TREE_CODE (arg) == SSA_NAME)
1971 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1973 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1974 if (index >= 0)
1976 bool agg_p;
1977 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1978 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1981 else
1983 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1984 if (is_gimple_assign (stmt))
1985 compute_complex_assign_jump_func (fbi, info, jfunc,
1986 call, stmt, arg, param_type);
1987 else if (gimple_code (stmt) == GIMPLE_PHI)
1988 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1989 call,
1990 as_a <gphi *> (stmt));
1994 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1995 passed (because type conversions are ignored in gimple). Usually we can
1996 safely get type from function declaration, but in case of K&R prototypes or
1997 variadic functions we can try our luck with type of the pointer passed.
1998 TODO: Since we look for actual initialization of the memory object, we may better
1999 work out the type based on the memory stores we find. */
2000 if (!param_type)
2001 param_type = TREE_TYPE (arg);
2003 if ((jfunc->type != IPA_JF_PASS_THROUGH
2004 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2005 && (jfunc->type != IPA_JF_ANCESTOR
2006 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2007 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2008 || POINTER_TYPE_P (param_type)))
2009 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
2011 if (!useful_context)
2012 vec_free (args->polymorphic_call_contexts);
2015 /* Compute jump functions for all edges - both direct and indirect - outgoing
2016 from BB. */
2018 static void
2019 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2021 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2022 int i;
2023 struct cgraph_edge *cs;
2025 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2027 struct cgraph_node *callee = cs->callee;
2029 if (callee)
2031 callee->ultimate_alias_target ();
2032 /* We do not need to bother analyzing calls to unknown functions
2033 unless they may become known during lto/whopr. */
2034 if (!callee->definition && !flag_lto)
2035 continue;
2037 ipa_compute_jump_functions_for_edge (fbi, cs);
2041 /* If STMT looks like a statement loading a value from a member pointer formal
2042 parameter, return that parameter and store the offset of the field to
2043 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2044 might be clobbered). If USE_DELTA, then we look for a use of the delta
2045 field rather than the pfn. */
2047 static tree
2048 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2049 HOST_WIDE_INT *offset_p)
2051 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2053 if (!gimple_assign_single_p (stmt))
2054 return NULL_TREE;
2056 rhs = gimple_assign_rhs1 (stmt);
2057 if (TREE_CODE (rhs) == COMPONENT_REF)
2059 ref_field = TREE_OPERAND (rhs, 1);
2060 rhs = TREE_OPERAND (rhs, 0);
2062 else
2063 ref_field = NULL_TREE;
2064 if (TREE_CODE (rhs) != MEM_REF)
2065 return NULL_TREE;
2066 rec = TREE_OPERAND (rhs, 0);
2067 if (TREE_CODE (rec) != ADDR_EXPR)
2068 return NULL_TREE;
2069 rec = TREE_OPERAND (rec, 0);
2070 if (TREE_CODE (rec) != PARM_DECL
2071 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2072 return NULL_TREE;
2073 ref_offset = TREE_OPERAND (rhs, 1);
2075 if (use_delta)
2076 fld = delta_field;
2077 else
2078 fld = ptr_field;
2079 if (offset_p)
2080 *offset_p = int_bit_position (fld);
2082 if (ref_field)
2084 if (integer_nonzerop (ref_offset))
2085 return NULL_TREE;
2086 return ref_field == fld ? rec : NULL_TREE;
2088 else
2089 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2090 : NULL_TREE;
2093 /* Returns true iff T is an SSA_NAME defined by a statement. */
2095 static bool
2096 ipa_is_ssa_with_stmt_def (tree t)
2098 if (TREE_CODE (t) == SSA_NAME
2099 && !SSA_NAME_IS_DEFAULT_DEF (t))
2100 return true;
2101 else
2102 return false;
2105 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2106 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2107 indirect call graph edge. */
2109 static struct cgraph_edge *
2110 ipa_note_param_call (struct cgraph_node *node, int param_index,
2111 gcall *stmt)
2113 struct cgraph_edge *cs;
2115 cs = node->get_edge (stmt);
2116 cs->indirect_info->param_index = param_index;
2117 cs->indirect_info->agg_contents = 0;
2118 cs->indirect_info->member_ptr = 0;
2119 cs->indirect_info->guaranteed_unmodified = 0;
2120 return cs;
2123 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2124 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2125 intermediate information about each formal parameter. Currently it checks
2126 whether the call calls a pointer that is a formal parameter and if so, the
2127 parameter is marked with the called flag and an indirect call graph edge
2128 describing the call is created. This is very simple for ordinary pointers
2129 represented in SSA but not-so-nice when it comes to member pointers. The
2130 ugly part of this function does nothing more than trying to match the
2131 pattern of such a call. An example of such a pattern is the gimple dump
2132 below, the call is on the last line:
2134 <bb 2>:
2135 f$__delta_5 = f.__delta;
2136 f$__pfn_24 = f.__pfn;
2139 <bb 2>:
2140 f$__delta_5 = MEM[(struct *)&f];
2141 f$__pfn_24 = MEM[(struct *)&f + 4B];
2143 and a few lines below:
2145 <bb 5>
2146 D.2496_3 = (int) f$__pfn_24;
2147 D.2497_4 = D.2496_3 & 1;
2148 if (D.2497_4 != 0)
2149 goto <bb 3>;
2150 else
2151 goto <bb 4>;
2153 <bb 6>:
2154 D.2500_7 = (unsigned int) f$__delta_5;
2155 D.2501_8 = &S + D.2500_7;
2156 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2157 D.2503_10 = *D.2502_9;
2158 D.2504_12 = f$__pfn_24 + -1;
2159 D.2505_13 = (unsigned int) D.2504_12;
2160 D.2506_14 = D.2503_10 + D.2505_13;
2161 D.2507_15 = *D.2506_14;
2162 iftmp.11_16 = (String:: *) D.2507_15;
2164 <bb 7>:
2165 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2166 D.2500_19 = (unsigned int) f$__delta_5;
2167 D.2508_20 = &S + D.2500_19;
2168 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2170 Such patterns are results of simple calls to a member pointer:
2172 int doprinting (int (MyString::* f)(int) const)
2174 MyString S ("somestring");
2176 return (S.*f)(4);
2179 Moreover, the function also looks for called pointers loaded from aggregates
2180 passed by value or reference. */
2182 static void
2183 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2184 tree target)
2186 struct ipa_node_params *info = fbi->info;
2187 HOST_WIDE_INT offset;
2188 bool by_ref;
2190 if (SSA_NAME_IS_DEFAULT_DEF (target))
2192 tree var = SSA_NAME_VAR (target);
2193 int index = ipa_get_param_decl_index (info, var);
2194 if (index >= 0)
2195 ipa_note_param_call (fbi->node, index, call);
2196 return;
2199 int index;
2200 gimple *def = SSA_NAME_DEF_STMT (target);
2201 bool guaranteed_unmodified;
2202 if (gimple_assign_single_p (def)
2203 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2204 gimple_assign_rhs1 (def), &index, &offset,
2205 NULL, &by_ref, &guaranteed_unmodified))
2207 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2208 cs->indirect_info->offset = offset;
2209 cs->indirect_info->agg_contents = 1;
2210 cs->indirect_info->by_ref = by_ref;
2211 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2212 return;
2215 /* Now we need to try to match the complex pattern of calling a member
2216 pointer. */
2217 if (gimple_code (def) != GIMPLE_PHI
2218 || gimple_phi_num_args (def) != 2
2219 || !POINTER_TYPE_P (TREE_TYPE (target))
2220 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2221 return;
2223 /* First, we need to check whether one of these is a load from a member
2224 pointer that is a parameter to this function. */
2225 tree n1 = PHI_ARG_DEF (def, 0);
2226 tree n2 = PHI_ARG_DEF (def, 1);
2227 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2228 return;
2229 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2230 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2232 tree rec;
2233 basic_block bb, virt_bb;
2234 basic_block join = gimple_bb (def);
2235 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2237 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2238 return;
2240 bb = EDGE_PRED (join, 0)->src;
2241 virt_bb = gimple_bb (d2);
2243 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2245 bb = EDGE_PRED (join, 1)->src;
2246 virt_bb = gimple_bb (d1);
2248 else
2249 return;
2251 /* Second, we need to check that the basic blocks are laid out in the way
2252 corresponding to the pattern. */
2254 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2255 || single_pred (virt_bb) != bb
2256 || single_succ (virt_bb) != join)
2257 return;
2259 /* Third, let's see that the branching is done depending on the least
2260 significant bit of the pfn. */
2262 gimple *branch = last_stmt (bb);
2263 if (!branch || gimple_code (branch) != GIMPLE_COND)
2264 return;
2266 if ((gimple_cond_code (branch) != NE_EXPR
2267 && gimple_cond_code (branch) != EQ_EXPR)
2268 || !integer_zerop (gimple_cond_rhs (branch)))
2269 return;
2271 tree cond = gimple_cond_lhs (branch);
2272 if (!ipa_is_ssa_with_stmt_def (cond))
2273 return;
2275 def = SSA_NAME_DEF_STMT (cond);
2276 if (!is_gimple_assign (def)
2277 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2278 || !integer_onep (gimple_assign_rhs2 (def)))
2279 return;
2281 cond = gimple_assign_rhs1 (def);
2282 if (!ipa_is_ssa_with_stmt_def (cond))
2283 return;
2285 def = SSA_NAME_DEF_STMT (cond);
2287 if (is_gimple_assign (def)
2288 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2290 cond = gimple_assign_rhs1 (def);
2291 if (!ipa_is_ssa_with_stmt_def (cond))
2292 return;
2293 def = SSA_NAME_DEF_STMT (cond);
2296 tree rec2;
2297 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2298 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2299 == ptrmemfunc_vbit_in_delta),
2300 NULL);
2301 if (rec != rec2)
2302 return;
2304 index = ipa_get_param_decl_index (info, rec);
2305 if (index >= 0
2306 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2308 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2309 cs->indirect_info->offset = offset;
2310 cs->indirect_info->agg_contents = 1;
2311 cs->indirect_info->member_ptr = 1;
2312 cs->indirect_info->guaranteed_unmodified = 1;
2315 return;
2318 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2319 object referenced in the expression is a formal parameter of the caller
2320 FBI->node (described by FBI->info), create a call note for the
2321 statement. */
2323 static void
2324 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2325 gcall *call, tree target)
2327 tree obj = OBJ_TYPE_REF_OBJECT (target);
2328 int index;
2329 HOST_WIDE_INT anc_offset;
2331 if (!flag_devirtualize)
2332 return;
2334 if (TREE_CODE (obj) != SSA_NAME)
2335 return;
2337 struct ipa_node_params *info = fbi->info;
2338 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2340 struct ipa_jump_func jfunc;
2341 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2342 return;
2344 anc_offset = 0;
2345 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2346 gcc_assert (index >= 0);
2347 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2348 call, &jfunc))
2349 return;
2351 else
2353 struct ipa_jump_func jfunc;
2354 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2355 tree expr;
2357 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2358 if (!expr)
2359 return;
2360 index = ipa_get_param_decl_index (info,
2361 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2362 gcc_assert (index >= 0);
2363 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2364 call, &jfunc, anc_offset))
2365 return;
2368 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2369 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2370 ii->offset = anc_offset;
2371 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2372 ii->otr_type = obj_type_ref_class (target);
2373 ii->polymorphic = 1;
2376 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2377 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2378 containing intermediate information about each formal parameter. */
2380 static void
2381 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2383 tree target = gimple_call_fn (call);
2385 if (!target
2386 || (TREE_CODE (target) != SSA_NAME
2387 && !virtual_method_call_p (target)))
2388 return;
2390 struct cgraph_edge *cs = fbi->node->get_edge (call);
2391 /* If we previously turned the call into a direct call, there is
2392 no need to analyze. */
2393 if (cs && !cs->indirect_unknown_callee)
2394 return;
2396 if (cs->indirect_info->polymorphic && flag_devirtualize)
2398 tree instance;
2399 tree target = gimple_call_fn (call);
2400 ipa_polymorphic_call_context context (current_function_decl,
2401 target, call, &instance);
2403 gcc_checking_assert (cs->indirect_info->otr_type
2404 == obj_type_ref_class (target));
2405 gcc_checking_assert (cs->indirect_info->otr_token
2406 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2408 cs->indirect_info->vptr_changed
2409 = !context.get_dynamic_type (instance,
2410 OBJ_TYPE_REF_OBJECT (target),
2411 obj_type_ref_class (target), call);
2412 cs->indirect_info->context = context;
2415 if (TREE_CODE (target) == SSA_NAME)
2416 ipa_analyze_indirect_call_uses (fbi, call, target);
2417 else if (virtual_method_call_p (target))
2418 ipa_analyze_virtual_call_uses (fbi, call, target);
2422 /* Analyze the call statement STMT with respect to formal parameters (described
2423 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2424 formal parameters are called. */
2426 static void
2427 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2429 if (is_gimple_call (stmt))
2430 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2433 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2434 If OP is a parameter declaration, mark it as used in the info structure
2435 passed in DATA. */
2437 static bool
2438 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2440 struct ipa_node_params *info = (struct ipa_node_params *) data;
2442 op = get_base_address (op);
2443 if (op
2444 && TREE_CODE (op) == PARM_DECL)
2446 int index = ipa_get_param_decl_index (info, op);
2447 gcc_assert (index >= 0);
2448 ipa_set_param_used (info, index, true);
2451 return false;
2454 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2455 the findings in various structures of the associated ipa_node_params
2456 structure, such as parameter flags, notes etc. FBI holds various data about
2457 the function being analyzed. */
2459 static void
2460 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2462 gimple_stmt_iterator gsi;
2463 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2465 gimple *stmt = gsi_stmt (gsi);
2467 if (is_gimple_debug (stmt))
2468 continue;
2470 ipa_analyze_stmt_uses (fbi, stmt);
2471 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2472 visit_ref_for_mod_analysis,
2473 visit_ref_for_mod_analysis,
2474 visit_ref_for_mod_analysis);
2476 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2477 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2478 visit_ref_for_mod_analysis,
2479 visit_ref_for_mod_analysis,
2480 visit_ref_for_mod_analysis);
2483 /* Calculate controlled uses of parameters of NODE. */
2485 static void
2486 ipa_analyze_controlled_uses (struct cgraph_node *node)
2488 struct ipa_node_params *info = IPA_NODE_REF (node);
2490 for (int i = 0; i < ipa_get_param_count (info); i++)
2492 tree parm = ipa_get_param (info, i);
2493 int controlled_uses = 0;
2495 /* For SSA regs see if parameter is used. For non-SSA we compute
2496 the flag during modification analysis. */
2497 if (is_gimple_reg (parm))
2499 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2500 parm);
2501 if (ddef && !has_zero_uses (ddef))
2503 imm_use_iterator imm_iter;
2504 use_operand_p use_p;
2506 ipa_set_param_used (info, i, true);
2507 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2508 if (!is_gimple_call (USE_STMT (use_p)))
2510 if (!is_gimple_debug (USE_STMT (use_p)))
2512 controlled_uses = IPA_UNDESCRIBED_USE;
2513 break;
2516 else
2517 controlled_uses++;
2519 else
2520 controlled_uses = 0;
2522 else
2523 controlled_uses = IPA_UNDESCRIBED_USE;
2524 ipa_set_controlled_uses (info, i, controlled_uses);
2528 /* Free stuff in BI. */
2530 static void
2531 free_ipa_bb_info (struct ipa_bb_info *bi)
2533 bi->cg_edges.release ();
2534 bi->param_aa_statuses.release ();
2537 /* Dominator walker driving the analysis. */
2539 class analysis_dom_walker : public dom_walker
2541 public:
2542 analysis_dom_walker (struct ipa_func_body_info *fbi)
2543 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2545 virtual edge before_dom_children (basic_block);
2547 private:
2548 struct ipa_func_body_info *m_fbi;
2551 edge
2552 analysis_dom_walker::before_dom_children (basic_block bb)
2554 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2555 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2556 return NULL;
2559 /* Release body info FBI. */
2561 void
2562 ipa_release_body_info (struct ipa_func_body_info *fbi)
2564 int i;
2565 struct ipa_bb_info *bi;
2567 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2568 free_ipa_bb_info (bi);
2569 fbi->bb_infos.release ();
2572 /* Initialize the array describing properties of formal parameters
2573 of NODE, analyze their uses and compute jump functions associated
2574 with actual arguments of calls from within NODE. */
2576 void
2577 ipa_analyze_node (struct cgraph_node *node)
2579 struct ipa_func_body_info fbi;
2580 struct ipa_node_params *info;
2582 ipa_check_create_node_params ();
2583 ipa_check_create_edge_args ();
2584 info = IPA_NODE_REF (node);
2586 if (info->analysis_done)
2587 return;
2588 info->analysis_done = 1;
2590 if (ipa_func_spec_opts_forbid_analysis_p (node))
2592 for (int i = 0; i < ipa_get_param_count (info); i++)
2594 ipa_set_param_used (info, i, true);
2595 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2597 return;
2600 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2601 push_cfun (func);
2602 calculate_dominance_info (CDI_DOMINATORS);
2603 ipa_initialize_node_params (node);
2604 ipa_analyze_controlled_uses (node);
2606 fbi.node = node;
2607 fbi.info = IPA_NODE_REF (node);
2608 fbi.bb_infos = vNULL;
2609 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2610 fbi.param_count = ipa_get_param_count (info);
2611 fbi.aa_walked = 0;
2613 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2615 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2616 bi->cg_edges.safe_push (cs);
2619 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2621 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2622 bi->cg_edges.safe_push (cs);
2625 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2627 ipa_release_body_info (&fbi);
2628 free_dominance_info (CDI_DOMINATORS);
2629 pop_cfun ();
2632 /* Update the jump functions associated with call graph edge E when the call
2633 graph edge CS is being inlined, assuming that E->caller is already (possibly
2634 indirectly) inlined into CS->callee and that E has not been inlined. */
2636 static void
2637 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2638 struct cgraph_edge *e)
2640 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2641 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2642 int count = ipa_get_cs_argument_count (args);
2643 int i;
2645 for (i = 0; i < count; i++)
2647 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2648 struct ipa_polymorphic_call_context *dst_ctx
2649 = ipa_get_ith_polymorhic_call_context (args, i);
2651 if (dst->type == IPA_JF_ANCESTOR)
2653 struct ipa_jump_func *src;
2654 int dst_fid = dst->value.ancestor.formal_id;
2655 struct ipa_polymorphic_call_context *src_ctx
2656 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2658 /* Variable number of arguments can cause havoc if we try to access
2659 one that does not exist in the inlined edge. So make sure we
2660 don't. */
2661 if (dst_fid >= ipa_get_cs_argument_count (top))
2663 ipa_set_jf_unknown (dst);
2664 continue;
2667 src = ipa_get_ith_jump_func (top, dst_fid);
2669 if (src_ctx && !src_ctx->useless_p ())
2671 struct ipa_polymorphic_call_context ctx = *src_ctx;
2673 /* TODO: Make type preserved safe WRT contexts. */
2674 if (!ipa_get_jf_ancestor_type_preserved (dst))
2675 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2676 ctx.offset_by (dst->value.ancestor.offset);
2677 if (!ctx.useless_p ())
2679 if (!dst_ctx)
2681 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2682 count);
2683 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2686 dst_ctx->combine_with (ctx);
2690 if (src->agg.items
2691 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2693 struct ipa_agg_jf_item *item;
2694 int j;
2696 /* Currently we do not produce clobber aggregate jump functions,
2697 replace with merging when we do. */
2698 gcc_assert (!dst->agg.items);
2700 dst->agg.items = vec_safe_copy (src->agg.items);
2701 dst->agg.by_ref = src->agg.by_ref;
2702 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2703 item->offset -= dst->value.ancestor.offset;
2706 if (src->type == IPA_JF_PASS_THROUGH
2707 && src->value.pass_through.operation == NOP_EXPR)
2709 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2710 dst->value.ancestor.agg_preserved &=
2711 src->value.pass_through.agg_preserved;
2713 else if (src->type == IPA_JF_PASS_THROUGH
2714 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2716 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2717 dst->value.ancestor.agg_preserved = false;
2719 else if (src->type == IPA_JF_ANCESTOR)
2721 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2722 dst->value.ancestor.offset += src->value.ancestor.offset;
2723 dst->value.ancestor.agg_preserved &=
2724 src->value.ancestor.agg_preserved;
2726 else
2727 ipa_set_jf_unknown (dst);
2729 else if (dst->type == IPA_JF_PASS_THROUGH)
2731 struct ipa_jump_func *src;
2732 /* We must check range due to calls with variable number of arguments
2733 and we cannot combine jump functions with operations. */
2734 if (dst->value.pass_through.operation == NOP_EXPR
2735 && (dst->value.pass_through.formal_id
2736 < ipa_get_cs_argument_count (top)))
2738 int dst_fid = dst->value.pass_through.formal_id;
2739 src = ipa_get_ith_jump_func (top, dst_fid);
2740 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2741 struct ipa_polymorphic_call_context *src_ctx
2742 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2744 if (src_ctx && !src_ctx->useless_p ())
2746 struct ipa_polymorphic_call_context ctx = *src_ctx;
2748 /* TODO: Make type preserved safe WRT contexts. */
2749 if (!ipa_get_jf_pass_through_type_preserved (dst))
2750 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2751 if (!ctx.useless_p ())
2753 if (!dst_ctx)
2755 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2756 count);
2757 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2759 dst_ctx->combine_with (ctx);
2762 switch (src->type)
2764 case IPA_JF_UNKNOWN:
2765 ipa_set_jf_unknown (dst);
2766 break;
2767 case IPA_JF_CONST:
2768 ipa_set_jf_cst_copy (dst, src);
2769 break;
2771 case IPA_JF_PASS_THROUGH:
2773 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2774 enum tree_code operation;
2775 operation = ipa_get_jf_pass_through_operation (src);
2777 if (operation == NOP_EXPR)
2779 bool agg_p;
2780 agg_p = dst_agg_p
2781 && ipa_get_jf_pass_through_agg_preserved (src);
2782 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2784 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2785 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2786 else
2788 tree operand = ipa_get_jf_pass_through_operand (src);
2789 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2790 operation);
2792 break;
2794 case IPA_JF_ANCESTOR:
2796 bool agg_p;
2797 agg_p = dst_agg_p
2798 && ipa_get_jf_ancestor_agg_preserved (src);
2799 ipa_set_ancestor_jf (dst,
2800 ipa_get_jf_ancestor_offset (src),
2801 ipa_get_jf_ancestor_formal_id (src),
2802 agg_p);
2803 break;
2805 default:
2806 gcc_unreachable ();
2809 if (src->agg.items
2810 && (dst_agg_p || !src->agg.by_ref))
2812 /* Currently we do not produce clobber aggregate jump
2813 functions, replace with merging when we do. */
2814 gcc_assert (!dst->agg.items);
2816 dst->agg.by_ref = src->agg.by_ref;
2817 dst->agg.items = vec_safe_copy (src->agg.items);
2820 else
2821 ipa_set_jf_unknown (dst);
2826 /* If TARGET is an addr_expr of a function declaration, make it the
2827 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2828 Otherwise, return NULL. */
2830 struct cgraph_edge *
2831 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2832 bool speculative)
2834 struct cgraph_node *callee;
2835 struct inline_edge_summary *es = inline_edge_summary (ie);
2836 bool unreachable = false;
2838 if (TREE_CODE (target) == ADDR_EXPR)
2839 target = TREE_OPERAND (target, 0);
2840 if (TREE_CODE (target) != FUNCTION_DECL)
2842 target = canonicalize_constructor_val (target, NULL);
2843 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2845 /* Member pointer call that goes through a VMT lookup. */
2846 if (ie->indirect_info->member_ptr
2847 /* Or if target is not an invariant expression and we do not
2848 know if it will evaulate to function at runtime.
2849 This can happen when folding through &VAR, where &VAR
2850 is IP invariant, but VAR itself is not.
2852 TODO: Revisit this when GCC 5 is branched. It seems that
2853 member_ptr check is not needed and that we may try to fold
2854 the expression and see if VAR is readonly. */
2855 || !is_gimple_ip_invariant (target))
2857 if (dump_enabled_p ())
2859 location_t loc = gimple_location_safe (ie->call_stmt);
2860 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2861 "discovered direct call non-invariant "
2862 "%s/%i\n",
2863 ie->caller->name (), ie->caller->order);
2865 return NULL;
2869 if (dump_enabled_p ())
2871 location_t loc = gimple_location_safe (ie->call_stmt);
2872 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2873 "discovered direct call to non-function in %s/%i, "
2874 "making it __builtin_unreachable\n",
2875 ie->caller->name (), ie->caller->order);
2878 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2879 callee = cgraph_node::get_create (target);
2880 unreachable = true;
2882 else
2883 callee = cgraph_node::get (target);
2885 else
2886 callee = cgraph_node::get (target);
2888 /* Because may-edges are not explicitely represented and vtable may be external,
2889 we may create the first reference to the object in the unit. */
2890 if (!callee || callee->global.inlined_to)
2893 /* We are better to ensure we can refer to it.
2894 In the case of static functions we are out of luck, since we already
2895 removed its body. In the case of public functions we may or may
2896 not introduce the reference. */
2897 if (!canonicalize_constructor_val (target, NULL)
2898 || !TREE_PUBLIC (target))
2900 if (dump_file)
2901 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2902 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2903 xstrdup_for_dump (ie->caller->name ()),
2904 ie->caller->order,
2905 xstrdup_for_dump (ie->callee->name ()),
2906 ie->callee->order);
2907 return NULL;
2909 callee = cgraph_node::get_create (target);
2912 /* If the edge is already speculated. */
2913 if (speculative && ie->speculative)
2915 struct cgraph_edge *e2;
2916 struct ipa_ref *ref;
2917 ie->speculative_call_info (e2, ie, ref);
2918 if (e2->callee->ultimate_alias_target ()
2919 != callee->ultimate_alias_target ())
2921 if (dump_file)
2922 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2923 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2924 xstrdup_for_dump (ie->caller->name ()),
2925 ie->caller->order,
2926 xstrdup_for_dump (callee->name ()),
2927 callee->order,
2928 xstrdup_for_dump (e2->callee->name ()),
2929 e2->callee->order);
2931 else
2933 if (dump_file)
2934 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2935 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2936 xstrdup_for_dump (ie->caller->name ()),
2937 ie->caller->order,
2938 xstrdup_for_dump (callee->name ()),
2939 callee->order);
2941 return NULL;
2944 if (!dbg_cnt (devirt))
2945 return NULL;
2947 ipa_check_create_node_params ();
2949 /* We can not make edges to inline clones. It is bug that someone removed
2950 the cgraph node too early. */
2951 gcc_assert (!callee->global.inlined_to);
2953 if (dump_file && !unreachable)
2955 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2956 "(%s/%i -> %s/%i), for stmt ",
2957 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2958 speculative ? "speculative" : "known",
2959 xstrdup_for_dump (ie->caller->name ()),
2960 ie->caller->order,
2961 xstrdup_for_dump (callee->name ()),
2962 callee->order);
2963 if (ie->call_stmt)
2964 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2965 else
2966 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2968 if (dump_enabled_p ())
2970 location_t loc = gimple_location_safe (ie->call_stmt);
2972 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2973 "converting indirect call in %s to direct call to %s\n",
2974 ie->caller->name (), callee->name ());
2976 if (!speculative)
2978 struct cgraph_edge *orig = ie;
2979 ie = ie->make_direct (callee);
2980 /* If we resolved speculative edge the cost is already up to date
2981 for direct call (adjusted by inline_edge_duplication_hook). */
2982 if (ie == orig)
2984 es = inline_edge_summary (ie);
2985 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2986 - eni_size_weights.call_cost);
2987 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2988 - eni_time_weights.call_cost);
2991 else
2993 if (!callee->can_be_discarded_p ())
2995 cgraph_node *alias;
2996 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2997 if (alias)
2998 callee = alias;
3000 /* make_speculative will update ie's cost to direct call cost. */
3001 ie = ie->make_speculative
3002 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
3005 return ie;
3008 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3009 CONSTRUCTOR and return it. Return NULL if the search fails for some
3010 reason. */
3012 static tree
3013 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3015 tree type = TREE_TYPE (constructor);
3016 if (TREE_CODE (type) != ARRAY_TYPE
3017 && TREE_CODE (type) != RECORD_TYPE)
3018 return NULL;
3020 unsigned ix;
3021 tree index, val;
3022 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3024 HOST_WIDE_INT elt_offset;
3025 if (TREE_CODE (type) == ARRAY_TYPE)
3027 offset_int off;
3028 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3029 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3031 if (index)
3033 off = wi::to_offset (index);
3034 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3036 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3037 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3038 off = wi::sext (off - wi::to_offset (low_bound),
3039 TYPE_PRECISION (TREE_TYPE (index)));
3041 off *= wi::to_offset (unit_size);
3043 else
3044 off = wi::to_offset (unit_size) * ix;
3046 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3047 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3048 continue;
3049 elt_offset = off.to_shwi ();
3051 else if (TREE_CODE (type) == RECORD_TYPE)
3053 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3054 if (DECL_BIT_FIELD (index))
3055 continue;
3056 elt_offset = int_bit_position (index);
3058 else
3059 gcc_unreachable ();
3061 if (elt_offset > req_offset)
3062 return NULL;
3064 if (TREE_CODE (val) == CONSTRUCTOR)
3065 return find_constructor_constant_at_offset (val,
3066 req_offset - elt_offset);
3068 if (elt_offset == req_offset
3069 && is_gimple_reg_type (TREE_TYPE (val))
3070 && is_gimple_ip_invariant (val))
3071 return val;
3073 return NULL;
3076 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3077 invariant from a static constructor and if so, return it. Otherwise return
3078 NULL. */
3080 static tree
3081 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3083 if (by_ref)
3085 if (TREE_CODE (scalar) != ADDR_EXPR)
3086 return NULL;
3087 scalar = TREE_OPERAND (scalar, 0);
3090 if (!VAR_P (scalar)
3091 || !is_global_var (scalar)
3092 || !TREE_READONLY (scalar)
3093 || !DECL_INITIAL (scalar)
3094 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3095 return NULL;
3097 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3100 /* Retrieve value from aggregate jump function AGG or static initializer of
3101 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3102 none. BY_REF specifies whether the value has to be passed by reference or
3103 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3104 to is set to true if the value comes from an initializer of a constant. */
3106 tree
3107 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3108 HOST_WIDE_INT offset, bool by_ref,
3109 bool *from_global_constant)
3111 struct ipa_agg_jf_item *item;
3112 int i;
3114 if (scalar)
3116 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3117 if (res)
3119 if (from_global_constant)
3120 *from_global_constant = true;
3121 return res;
3125 if (!agg
3126 || by_ref != agg->by_ref)
3127 return NULL;
3129 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3130 if (item->offset == offset)
3132 /* Currently we do not have clobber values, return NULL for them once
3133 we do. */
3134 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3135 if (from_global_constant)
3136 *from_global_constant = false;
3137 return item->value;
3139 return NULL;
3142 /* Remove a reference to SYMBOL from the list of references of a node given by
3143 reference description RDESC. Return true if the reference has been
3144 successfully found and removed. */
3146 static bool
3147 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3149 struct ipa_ref *to_del;
3150 struct cgraph_edge *origin;
3152 origin = rdesc->cs;
3153 if (!origin)
3154 return false;
3155 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3156 origin->lto_stmt_uid);
3157 if (!to_del)
3158 return false;
3160 to_del->remove_reference ();
3161 if (dump_file)
3162 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3163 xstrdup_for_dump (origin->caller->name ()),
3164 origin->caller->order, xstrdup_for_dump (symbol->name ()));
3165 return true;
3168 /* If JFUNC has a reference description with refcount different from
3169 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3170 NULL. JFUNC must be a constant jump function. */
3172 static struct ipa_cst_ref_desc *
3173 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3175 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3176 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3177 return rdesc;
3178 else
3179 return NULL;
3182 /* If the value of constant jump function JFUNC is an address of a function
3183 declaration, return the associated call graph node. Otherwise return
3184 NULL. */
3186 static cgraph_node *
3187 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3189 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3190 tree cst = ipa_get_jf_constant (jfunc);
3191 if (TREE_CODE (cst) != ADDR_EXPR
3192 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3193 return NULL;
3195 return cgraph_node::get (TREE_OPERAND (cst, 0));
3199 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3200 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3201 the edge specified in the rdesc. Return false if either the symbol or the
3202 reference could not be found, otherwise return true. */
3204 static bool
3205 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3207 struct ipa_cst_ref_desc *rdesc;
3208 if (jfunc->type == IPA_JF_CONST
3209 && (rdesc = jfunc_rdesc_usable (jfunc))
3210 && --rdesc->refcount == 0)
3212 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3213 if (!symbol)
3214 return false;
3216 return remove_described_reference (symbol, rdesc);
3218 return true;
3221 /* Try to find a destination for indirect edge IE that corresponds to a simple
3222 call or a call of a member function pointer and where the destination is a
3223 pointer formal parameter described by jump function JFUNC. If it can be
3224 determined, return the newly direct edge, otherwise return NULL.
3225 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3227 static struct cgraph_edge *
3228 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3229 struct ipa_jump_func *jfunc,
3230 struct ipa_node_params *new_root_info)
3232 struct cgraph_edge *cs;
3233 tree target;
3234 bool agg_contents = ie->indirect_info->agg_contents;
3235 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3236 if (agg_contents)
3238 bool from_global_constant;
3239 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3240 ie->indirect_info->offset,
3241 ie->indirect_info->by_ref,
3242 &from_global_constant);
3243 if (target
3244 && !from_global_constant
3245 && !ie->indirect_info->guaranteed_unmodified)
3246 return NULL;
3248 else
3249 target = scalar;
3250 if (!target)
3251 return NULL;
3252 cs = ipa_make_edge_direct_to_target (ie, target);
3254 if (cs && !agg_contents)
3256 bool ok;
3257 gcc_checking_assert (cs->callee
3258 && (cs != ie
3259 || jfunc->type != IPA_JF_CONST
3260 || !cgraph_node_for_jfunc (jfunc)
3261 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3262 ok = try_decrement_rdesc_refcount (jfunc);
3263 gcc_checking_assert (ok);
3266 return cs;
3269 /* Return the target to be used in cases of impossible devirtualization. IE
3270 and target (the latter can be NULL) are dumped when dumping is enabled. */
3272 tree
3273 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3275 if (dump_file)
3277 if (target)
3278 fprintf (dump_file,
3279 "Type inconsistent devirtualization: %s/%i->%s\n",
3280 ie->caller->name (), ie->caller->order,
3281 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3282 else
3283 fprintf (dump_file,
3284 "No devirtualization target in %s/%i\n",
3285 ie->caller->name (), ie->caller->order);
3287 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3288 cgraph_node::get_create (new_target);
3289 return new_target;
3292 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3293 call based on a formal parameter which is described by jump function JFUNC
3294 and if it can be determined, make it direct and return the direct edge.
3295 Otherwise, return NULL. CTX describes the polymorphic context that the
3296 parameter the call is based on brings along with it. */
3298 static struct cgraph_edge *
3299 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3300 struct ipa_jump_func *jfunc,
3301 struct ipa_polymorphic_call_context ctx)
3303 tree target = NULL;
3304 bool speculative = false;
3306 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3307 return NULL;
3309 gcc_assert (!ie->indirect_info->by_ref);
3311 /* Try to do lookup via known virtual table pointer value. */
3312 if (!ie->indirect_info->vptr_changed
3313 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3315 tree vtable;
3316 unsigned HOST_WIDE_INT offset;
3317 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3318 : NULL;
3319 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3320 ie->indirect_info->offset,
3321 true);
3322 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3324 bool can_refer;
3325 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3326 vtable, offset, &can_refer);
3327 if (can_refer)
3329 if (!t
3330 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3331 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3332 || !possible_polymorphic_call_target_p
3333 (ie, cgraph_node::get (t)))
3335 /* Do not speculate builtin_unreachable, it is stupid! */
3336 if (!ie->indirect_info->vptr_changed)
3337 target = ipa_impossible_devirt_target (ie, target);
3338 else
3339 target = NULL;
3341 else
3343 target = t;
3344 speculative = ie->indirect_info->vptr_changed;
3350 ipa_polymorphic_call_context ie_context (ie);
3351 vec <cgraph_node *>targets;
3352 bool final;
3354 ctx.offset_by (ie->indirect_info->offset);
3355 if (ie->indirect_info->vptr_changed)
3356 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3357 ie->indirect_info->otr_type);
3358 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3359 targets = possible_polymorphic_call_targets
3360 (ie->indirect_info->otr_type,
3361 ie->indirect_info->otr_token,
3362 ctx, &final);
3363 if (final && targets.length () <= 1)
3365 speculative = false;
3366 if (targets.length () == 1)
3367 target = targets[0]->decl;
3368 else
3369 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3371 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3372 && !ie->speculative && ie->maybe_hot_p ())
3374 cgraph_node *n;
3375 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3376 ie->indirect_info->otr_token,
3377 ie->indirect_info->context);
3378 if (n)
3380 target = n->decl;
3381 speculative = true;
3385 if (target)
3387 if (!possible_polymorphic_call_target_p
3388 (ie, cgraph_node::get_create (target)))
3390 if (speculative)
3391 return NULL;
3392 target = ipa_impossible_devirt_target (ie, target);
3394 return ipa_make_edge_direct_to_target (ie, target, speculative);
3396 else
3397 return NULL;
3400 /* Update the param called notes associated with NODE when CS is being inlined,
3401 assuming NODE is (potentially indirectly) inlined into CS->callee.
3402 Moreover, if the callee is discovered to be constant, create a new cgraph
3403 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3404 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3406 static bool
3407 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3408 struct cgraph_node *node,
3409 vec<cgraph_edge *> *new_edges)
3411 struct ipa_edge_args *top;
3412 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3413 struct ipa_node_params *new_root_info;
3414 bool res = false;
3416 ipa_check_create_edge_args ();
3417 top = IPA_EDGE_REF (cs);
3418 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3419 ? cs->caller->global.inlined_to
3420 : cs->caller);
3422 for (ie = node->indirect_calls; ie; ie = next_ie)
3424 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3425 struct ipa_jump_func *jfunc;
3426 int param_index;
3427 cgraph_node *spec_target = NULL;
3429 next_ie = ie->next_callee;
3431 if (ici->param_index == -1)
3432 continue;
3434 /* We must check range due to calls with variable number of arguments: */
3435 if (ici->param_index >= ipa_get_cs_argument_count (top))
3437 ici->param_index = -1;
3438 continue;
3441 param_index = ici->param_index;
3442 jfunc = ipa_get_ith_jump_func (top, param_index);
3444 if (ie->speculative)
3446 struct cgraph_edge *de;
3447 struct ipa_ref *ref;
3448 ie->speculative_call_info (de, ie, ref);
3449 spec_target = de->callee;
3452 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3453 new_direct_edge = NULL;
3454 else if (ici->polymorphic)
3456 ipa_polymorphic_call_context ctx;
3457 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3458 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3460 else
3461 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3462 new_root_info);
3463 /* If speculation was removed, then we need to do nothing. */
3464 if (new_direct_edge && new_direct_edge != ie
3465 && new_direct_edge->callee == spec_target)
3467 new_direct_edge->indirect_inlining_edge = 1;
3468 top = IPA_EDGE_REF (cs);
3469 res = true;
3470 if (!new_direct_edge->speculative)
3471 continue;
3473 else if (new_direct_edge)
3475 new_direct_edge->indirect_inlining_edge = 1;
3476 if (new_direct_edge->call_stmt)
3477 new_direct_edge->call_stmt_cannot_inline_p
3478 = !gimple_check_call_matching_types (
3479 new_direct_edge->call_stmt,
3480 new_direct_edge->callee->decl, false);
3481 if (new_edges)
3483 new_edges->safe_push (new_direct_edge);
3484 res = true;
3486 top = IPA_EDGE_REF (cs);
3487 /* If speculative edge was introduced we still need to update
3488 call info of the indirect edge. */
3489 if (!new_direct_edge->speculative)
3490 continue;
3492 if (jfunc->type == IPA_JF_PASS_THROUGH
3493 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3495 if (ici->agg_contents
3496 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3497 && !ici->polymorphic)
3498 ici->param_index = -1;
3499 else
3501 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3502 if (ici->polymorphic
3503 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3504 ici->vptr_changed = true;
3507 else if (jfunc->type == IPA_JF_ANCESTOR)
3509 if (ici->agg_contents
3510 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3511 && !ici->polymorphic)
3512 ici->param_index = -1;
3513 else
3515 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3516 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3517 if (ici->polymorphic
3518 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3519 ici->vptr_changed = true;
3522 else
3523 /* Either we can find a destination for this edge now or never. */
3524 ici->param_index = -1;
3527 return res;
3530 /* Recursively traverse subtree of NODE (including node) made of inlined
3531 cgraph_edges when CS has been inlined and invoke
3532 update_indirect_edges_after_inlining on all nodes and
3533 update_jump_functions_after_inlining on all non-inlined edges that lead out
3534 of this subtree. Newly discovered indirect edges will be added to
3535 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3536 created. */
3538 static bool
3539 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3540 struct cgraph_node *node,
3541 vec<cgraph_edge *> *new_edges)
3543 struct cgraph_edge *e;
3544 bool res;
3546 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3548 for (e = node->callees; e; e = e->next_callee)
3549 if (!e->inline_failed)
3550 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3551 else
3552 update_jump_functions_after_inlining (cs, e);
3553 for (e = node->indirect_calls; e; e = e->next_callee)
3554 update_jump_functions_after_inlining (cs, e);
3556 return res;
3559 /* Combine two controlled uses counts as done during inlining. */
3561 static int
3562 combine_controlled_uses_counters (int c, int d)
3564 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3565 return IPA_UNDESCRIBED_USE;
3566 else
3567 return c + d - 1;
3570 /* Propagate number of controlled users from CS->caleee to the new root of the
3571 tree of inlined nodes. */
3573 static void
3574 propagate_controlled_uses (struct cgraph_edge *cs)
3576 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3577 struct cgraph_node *new_root = cs->caller->global.inlined_to
3578 ? cs->caller->global.inlined_to : cs->caller;
3579 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3580 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3581 int count, i;
3583 count = MIN (ipa_get_cs_argument_count (args),
3584 ipa_get_param_count (old_root_info));
3585 for (i = 0; i < count; i++)
3587 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3588 struct ipa_cst_ref_desc *rdesc;
3590 if (jf->type == IPA_JF_PASS_THROUGH)
3592 int src_idx, c, d;
3593 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3594 c = ipa_get_controlled_uses (new_root_info, src_idx);
3595 d = ipa_get_controlled_uses (old_root_info, i);
3597 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3598 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3599 c = combine_controlled_uses_counters (c, d);
3600 ipa_set_controlled_uses (new_root_info, src_idx, c);
3601 if (c == 0 && new_root_info->ipcp_orig_node)
3603 struct cgraph_node *n;
3604 struct ipa_ref *ref;
3605 tree t = new_root_info->known_csts[src_idx];
3607 if (t && TREE_CODE (t) == ADDR_EXPR
3608 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3609 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3610 && (ref = new_root->find_reference (n, NULL, 0)))
3612 if (dump_file)
3613 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3614 "reference from %s/%i to %s/%i.\n",
3615 xstrdup_for_dump (new_root->name ()),
3616 new_root->order,
3617 xstrdup_for_dump (n->name ()), n->order);
3618 ref->remove_reference ();
3622 else if (jf->type == IPA_JF_CONST
3623 && (rdesc = jfunc_rdesc_usable (jf)))
3625 int d = ipa_get_controlled_uses (old_root_info, i);
3626 int c = rdesc->refcount;
3627 rdesc->refcount = combine_controlled_uses_counters (c, d);
3628 if (rdesc->refcount == 0)
3630 tree cst = ipa_get_jf_constant (jf);
3631 struct cgraph_node *n;
3632 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3633 && TREE_CODE (TREE_OPERAND (cst, 0))
3634 == FUNCTION_DECL);
3635 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3636 if (n)
3638 struct cgraph_node *clone;
3639 bool ok;
3640 ok = remove_described_reference (n, rdesc);
3641 gcc_checking_assert (ok);
3643 clone = cs->caller;
3644 while (clone->global.inlined_to
3645 && clone != rdesc->cs->caller
3646 && IPA_NODE_REF (clone)->ipcp_orig_node)
3648 struct ipa_ref *ref;
3649 ref = clone->find_reference (n, NULL, 0);
3650 if (ref)
3652 if (dump_file)
3653 fprintf (dump_file, "ipa-prop: Removing "
3654 "cloning-created reference "
3655 "from %s/%i to %s/%i.\n",
3656 xstrdup_for_dump (clone->name ()),
3657 clone->order,
3658 xstrdup_for_dump (n->name ()),
3659 n->order);
3660 ref->remove_reference ();
3662 clone = clone->callers->caller;
3669 for (i = ipa_get_param_count (old_root_info);
3670 i < ipa_get_cs_argument_count (args);
3671 i++)
3673 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3675 if (jf->type == IPA_JF_CONST)
3677 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3678 if (rdesc)
3679 rdesc->refcount = IPA_UNDESCRIBED_USE;
3681 else if (jf->type == IPA_JF_PASS_THROUGH)
3682 ipa_set_controlled_uses (new_root_info,
3683 jf->value.pass_through.formal_id,
3684 IPA_UNDESCRIBED_USE);
3688 /* Update jump functions and call note functions on inlining the call site CS.
3689 CS is expected to lead to a node already cloned by
3690 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3691 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3692 created. */
3694 bool
3695 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3696 vec<cgraph_edge *> *new_edges)
3698 bool changed;
3699 /* Do nothing if the preparation phase has not been carried out yet
3700 (i.e. during early inlining). */
3701 if (!ipa_node_params_sum)
3702 return false;
3703 gcc_assert (ipa_edge_args_vector);
3705 propagate_controlled_uses (cs);
3706 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3708 return changed;
3711 /* Ensure that array of edge arguments infos is big enough to accommodate a
3712 structure for all edges and reallocates it if not. Also, allocate
3713 associated hash tables is they do not already exist. */
3715 void
3716 ipa_check_create_edge_args (void)
3718 if (vec_safe_length (ipa_edge_args_vector)
3719 <= (unsigned) symtab->edges_max_uid)
3720 vec_safe_grow_cleared (ipa_edge_args_vector, symtab->edges_max_uid + 1);
3721 if (!ipa_bits_hash_table)
3722 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3723 if (!ipa_vr_hash_table)
3724 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3727 /* Frees all dynamically allocated structures that the argument info points
3728 to. */
3730 void
3731 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3733 vec_free (args->jump_functions);
3734 memset (args, 0, sizeof (*args));
3737 /* Free all ipa_edge structures. */
3739 void
3740 ipa_free_all_edge_args (void)
3742 int i;
3743 struct ipa_edge_args *args;
3745 if (!ipa_edge_args_vector)
3746 return;
3748 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3749 ipa_free_edge_args_substructures (args);
3751 vec_free (ipa_edge_args_vector);
3754 /* Free all ipa_node_params structures. */
3756 void
3757 ipa_free_all_node_params (void)
3759 ipa_node_params_sum->release ();
3760 ipa_node_params_sum = NULL;
3763 /* Grow ipcp_transformations if necessary. Also allocate any necessary hash
3764 tables if they do not already exist. */
3766 void
3767 ipcp_grow_transformations_if_necessary (void)
3769 if (vec_safe_length (ipcp_transformations)
3770 <= (unsigned) symtab->cgraph_max_uid)
3771 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3772 if (!ipa_bits_hash_table)
3773 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3774 if (!ipa_vr_hash_table)
3775 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3778 /* Set the aggregate replacements of NODE to be AGGVALS. */
3780 void
3781 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3782 struct ipa_agg_replacement_value *aggvals)
3784 ipcp_grow_transformations_if_necessary ();
3785 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3788 /* Hook that is called by cgraph.c when an edge is removed. */
3790 static void
3791 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3793 struct ipa_edge_args *args;
3795 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3796 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3797 return;
3799 args = IPA_EDGE_REF (cs);
3800 if (args->jump_functions)
3802 struct ipa_jump_func *jf;
3803 int i;
3804 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3806 struct ipa_cst_ref_desc *rdesc;
3807 try_decrement_rdesc_refcount (jf);
3808 if (jf->type == IPA_JF_CONST
3809 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3810 && rdesc->cs == cs)
3811 rdesc->cs = NULL;
3815 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3818 /* Hook that is called by cgraph.c when an edge is duplicated. */
3820 static void
3821 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3822 void *)
3824 struct ipa_edge_args *old_args, *new_args;
3825 unsigned int i;
3827 ipa_check_create_edge_args ();
3829 old_args = IPA_EDGE_REF (src);
3830 new_args = IPA_EDGE_REF (dst);
3832 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3833 if (old_args->polymorphic_call_contexts)
3834 new_args->polymorphic_call_contexts
3835 = vec_safe_copy (old_args->polymorphic_call_contexts);
3837 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3839 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3840 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3842 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3844 if (src_jf->type == IPA_JF_CONST)
3846 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3848 if (!src_rdesc)
3849 dst_jf->value.constant.rdesc = NULL;
3850 else if (src->caller == dst->caller)
3852 struct ipa_ref *ref;
3853 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3854 gcc_checking_assert (n);
3855 ref = src->caller->find_reference (n, src->call_stmt,
3856 src->lto_stmt_uid);
3857 gcc_checking_assert (ref);
3858 dst->caller->clone_reference (ref, ref->stmt);
3860 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3861 dst_rdesc->cs = dst;
3862 dst_rdesc->refcount = src_rdesc->refcount;
3863 dst_rdesc->next_duplicate = NULL;
3864 dst_jf->value.constant.rdesc = dst_rdesc;
3866 else if (src_rdesc->cs == src)
3868 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3869 dst_rdesc->cs = dst;
3870 dst_rdesc->refcount = src_rdesc->refcount;
3871 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3872 src_rdesc->next_duplicate = dst_rdesc;
3873 dst_jf->value.constant.rdesc = dst_rdesc;
3875 else
3877 struct ipa_cst_ref_desc *dst_rdesc;
3878 /* This can happen during inlining, when a JFUNC can refer to a
3879 reference taken in a function up in the tree of inline clones.
3880 We need to find the duplicate that refers to our tree of
3881 inline clones. */
3883 gcc_assert (dst->caller->global.inlined_to);
3884 for (dst_rdesc = src_rdesc->next_duplicate;
3885 dst_rdesc;
3886 dst_rdesc = dst_rdesc->next_duplicate)
3888 struct cgraph_node *top;
3889 top = dst_rdesc->cs->caller->global.inlined_to
3890 ? dst_rdesc->cs->caller->global.inlined_to
3891 : dst_rdesc->cs->caller;
3892 if (dst->caller->global.inlined_to == top)
3893 break;
3895 gcc_assert (dst_rdesc);
3896 dst_jf->value.constant.rdesc = dst_rdesc;
3899 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3900 && src->caller == dst->caller)
3902 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3903 ? dst->caller->global.inlined_to : dst->caller;
3904 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3905 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3907 int c = ipa_get_controlled_uses (root_info, idx);
3908 if (c != IPA_UNDESCRIBED_USE)
3910 c++;
3911 ipa_set_controlled_uses (root_info, idx, c);
3917 /* Analyze newly added function into callgraph. */
3919 static void
3920 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3922 if (node->has_gimple_body_p ())
3923 ipa_analyze_node (node);
3926 /* Hook that is called by summary when a node is duplicated. */
3928 void
3929 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3930 ipa_node_params *old_info,
3931 ipa_node_params *new_info)
3933 ipa_agg_replacement_value *old_av, *new_av;
3935 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3936 new_info->lattices = NULL;
3937 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3938 new_info->known_csts = old_info->known_csts.copy ();
3939 new_info->known_contexts = old_info->known_contexts.copy ();
3941 new_info->analysis_done = old_info->analysis_done;
3942 new_info->node_enqueued = old_info->node_enqueued;
3943 new_info->versionable = old_info->versionable;
3945 old_av = ipa_get_agg_replacements_for_node (src);
3946 if (old_av)
3948 new_av = NULL;
3949 while (old_av)
3951 struct ipa_agg_replacement_value *v;
3953 v = ggc_alloc<ipa_agg_replacement_value> ();
3954 memcpy (v, old_av, sizeof (*v));
3955 v->next = new_av;
3956 new_av = v;
3957 old_av = old_av->next;
3959 ipa_set_node_agg_value_chain (dst, new_av);
3962 ipcp_transformation_summary *src_trans
3963 = ipcp_get_transformation_summary (src);
3965 if (src_trans)
3967 ipcp_grow_transformations_if_necessary ();
3968 src_trans = ipcp_get_transformation_summary (src);
3969 ipcp_transformation_summary *dst_trans
3970 = ipcp_get_transformation_summary (dst);
3972 dst_trans->bits = vec_safe_copy (src_trans->bits);
3974 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3975 vec<ipa_vr, va_gc> *&dst_vr
3976 = ipcp_get_transformation_summary (dst)->m_vr;
3977 if (vec_safe_length (src_trans->m_vr) > 0)
3979 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3980 for (unsigned i = 0; i < src_vr->length (); ++i)
3981 dst_vr->quick_push ((*src_vr)[i]);
3986 /* Register our cgraph hooks if they are not already there. */
3988 void
3989 ipa_register_cgraph_hooks (void)
3991 ipa_check_create_node_params ();
3993 if (!edge_removal_hook_holder)
3994 edge_removal_hook_holder =
3995 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3996 if (!edge_duplication_hook_holder)
3997 edge_duplication_hook_holder =
3998 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3999 function_insertion_hook_holder =
4000 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4003 /* Unregister our cgraph hooks if they are not already there. */
4005 static void
4006 ipa_unregister_cgraph_hooks (void)
4008 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
4009 edge_removal_hook_holder = NULL;
4010 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
4011 edge_duplication_hook_holder = NULL;
4012 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4013 function_insertion_hook_holder = NULL;
4016 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4017 longer needed after ipa-cp. */
4019 void
4020 ipa_free_all_structures_after_ipa_cp (void)
4022 if (!optimize && !in_lto_p)
4024 ipa_free_all_edge_args ();
4025 ipa_free_all_node_params ();
4026 ipcp_sources_pool.release ();
4027 ipcp_cst_values_pool.release ();
4028 ipcp_poly_ctx_values_pool.release ();
4029 ipcp_agg_lattice_pool.release ();
4030 ipa_unregister_cgraph_hooks ();
4031 ipa_refdesc_pool.release ();
4035 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4036 longer needed after indirect inlining. */
4038 void
4039 ipa_free_all_structures_after_iinln (void)
4041 ipa_free_all_edge_args ();
4042 ipa_free_all_node_params ();
4043 ipa_unregister_cgraph_hooks ();
4044 ipcp_sources_pool.release ();
4045 ipcp_cst_values_pool.release ();
4046 ipcp_poly_ctx_values_pool.release ();
4047 ipcp_agg_lattice_pool.release ();
4048 ipa_refdesc_pool.release ();
4051 /* Print ipa_tree_map data structures of all functions in the
4052 callgraph to F. */
4054 void
4055 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4057 int i, count;
4058 struct ipa_node_params *info;
4060 if (!node->definition)
4061 return;
4062 info = IPA_NODE_REF (node);
4063 fprintf (f, " function %s/%i parameter descriptors:\n",
4064 node->name (), node->order);
4065 count = ipa_get_param_count (info);
4066 for (i = 0; i < count; i++)
4068 int c;
4070 fprintf (f, " ");
4071 ipa_dump_param (f, info, i);
4072 if (ipa_is_param_used (info, i))
4073 fprintf (f, " used");
4074 c = ipa_get_controlled_uses (info, i);
4075 if (c == IPA_UNDESCRIBED_USE)
4076 fprintf (f, " undescribed_use");
4077 else
4078 fprintf (f, " controlled_uses=%i", c);
4079 fprintf (f, "\n");
4083 /* Print ipa_tree_map data structures of all functions in the
4084 callgraph to F. */
4086 void
4087 ipa_print_all_params (FILE * f)
4089 struct cgraph_node *node;
4091 fprintf (f, "\nFunction parameters:\n");
4092 FOR_EACH_FUNCTION (node)
4093 ipa_print_node_params (f, node);
4096 /* Return a heap allocated vector containing formal parameters of FNDECL. */
4098 vec<tree>
4099 ipa_get_vector_of_formal_parms (tree fndecl)
4101 vec<tree> args;
4102 int count;
4103 tree parm;
4105 gcc_assert (!flag_wpa);
4106 count = count_formal_params (fndecl);
4107 args.create (count);
4108 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4109 args.quick_push (parm);
4111 return args;
4114 /* Return a heap allocated vector containing types of formal parameters of
4115 function type FNTYPE. */
4117 vec<tree>
4118 ipa_get_vector_of_formal_parm_types (tree fntype)
4120 vec<tree> types;
4121 int count = 0;
4122 tree t;
4124 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4125 count++;
4127 types.create (count);
4128 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4129 types.quick_push (TREE_VALUE (t));
4131 return types;
4134 /* Modify the function declaration FNDECL and its type according to the plan in
4135 ADJUSTMENTS. It also sets base fields of individual adjustments structures
4136 to reflect the actual parameters being modified which are determined by the
4137 base_index field. */
4139 void
4140 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
4142 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4143 tree orig_type = TREE_TYPE (fndecl);
4144 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4146 /* The following test is an ugly hack, some functions simply don't have any
4147 arguments in their type. This is probably a bug but well... */
4148 bool care_for_types = (old_arg_types != NULL_TREE);
4149 bool last_parm_void;
4150 vec<tree> otypes;
4151 if (care_for_types)
4153 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4154 == void_type_node);
4155 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4156 if (last_parm_void)
4157 gcc_assert (oparms.length () + 1 == otypes.length ());
4158 else
4159 gcc_assert (oparms.length () == otypes.length ());
4161 else
4163 last_parm_void = false;
4164 otypes.create (0);
4167 int len = adjustments.length ();
4168 tree *link = &DECL_ARGUMENTS (fndecl);
4169 tree new_arg_types = NULL;
4170 for (int i = 0; i < len; i++)
4172 struct ipa_parm_adjustment *adj;
4173 gcc_assert (link);
4175 adj = &adjustments[i];
4176 tree parm;
4177 if (adj->op == IPA_PARM_OP_NEW)
4178 parm = NULL;
4179 else
4180 parm = oparms[adj->base_index];
4181 adj->base = parm;
4183 if (adj->op == IPA_PARM_OP_COPY)
4185 if (care_for_types)
4186 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4187 new_arg_types);
4188 *link = parm;
4189 link = &DECL_CHAIN (parm);
4191 else if (adj->op != IPA_PARM_OP_REMOVE)
4193 tree new_parm;
4194 tree ptype;
4196 if (adj->by_ref)
4197 ptype = build_pointer_type (adj->type);
4198 else
4200 ptype = adj->type;
4201 if (is_gimple_reg_type (ptype)
4202 && TYPE_MODE (ptype) != BLKmode)
4204 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4205 if (TYPE_ALIGN (ptype) != malign)
4206 ptype = build_aligned_type (ptype, malign);
4210 if (care_for_types)
4211 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4213 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4214 ptype);
4215 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4216 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4217 DECL_ARTIFICIAL (new_parm) = 1;
4218 DECL_ARG_TYPE (new_parm) = ptype;
4219 DECL_CONTEXT (new_parm) = fndecl;
4220 TREE_USED (new_parm) = 1;
4221 DECL_IGNORED_P (new_parm) = 1;
4222 layout_decl (new_parm, 0);
4224 if (adj->op == IPA_PARM_OP_NEW)
4225 adj->base = NULL;
4226 else
4227 adj->base = parm;
4228 adj->new_decl = new_parm;
4230 *link = new_parm;
4231 link = &DECL_CHAIN (new_parm);
4235 *link = NULL_TREE;
4237 tree new_reversed = NULL;
4238 if (care_for_types)
4240 new_reversed = nreverse (new_arg_types);
4241 if (last_parm_void)
4243 if (new_reversed)
4244 TREE_CHAIN (new_arg_types) = void_list_node;
4245 else
4246 new_reversed = void_list_node;
4250 /* Use copy_node to preserve as much as possible from original type
4251 (debug info, attribute lists etc.)
4252 Exception is METHOD_TYPEs must have THIS argument.
4253 When we are asked to remove it, we need to build new FUNCTION_TYPE
4254 instead. */
4255 tree new_type = NULL;
4256 if (TREE_CODE (orig_type) != METHOD_TYPE
4257 || (adjustments[0].op == IPA_PARM_OP_COPY
4258 && adjustments[0].base_index == 0))
4260 new_type = build_distinct_type_copy (orig_type);
4261 TYPE_ARG_TYPES (new_type) = new_reversed;
4263 else
4265 new_type
4266 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4267 new_reversed));
4268 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4269 DECL_VINDEX (fndecl) = NULL_TREE;
4272 /* When signature changes, we need to clear builtin info. */
4273 if (DECL_BUILT_IN (fndecl))
4275 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4276 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4279 TREE_TYPE (fndecl) = new_type;
4280 DECL_VIRTUAL_P (fndecl) = 0;
4281 DECL_LANG_SPECIFIC (fndecl) = NULL;
4282 otypes.release ();
4283 oparms.release ();
4286 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4287 If this is a directly recursive call, CS must be NULL. Otherwise it must
4288 contain the corresponding call graph edge. */
4290 void
4291 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4292 ipa_parm_adjustment_vec adjustments)
4294 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4295 vec<tree> vargs;
4296 vec<tree, va_gc> **debug_args = NULL;
4297 gcall *new_stmt;
4298 gimple_stmt_iterator gsi, prev_gsi;
4299 tree callee_decl;
4300 int i, len;
4302 len = adjustments.length ();
4303 vargs.create (len);
4304 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4305 current_node->remove_stmt_references (stmt);
4307 gsi = gsi_for_stmt (stmt);
4308 prev_gsi = gsi;
4309 gsi_prev (&prev_gsi);
4310 for (i = 0; i < len; i++)
4312 struct ipa_parm_adjustment *adj;
4314 adj = &adjustments[i];
4316 if (adj->op == IPA_PARM_OP_COPY)
4318 tree arg = gimple_call_arg (stmt, adj->base_index);
4320 vargs.quick_push (arg);
4322 else if (adj->op != IPA_PARM_OP_REMOVE)
4324 tree expr, base, off;
4325 location_t loc;
4326 unsigned int deref_align = 0;
4327 bool deref_base = false;
4329 /* We create a new parameter out of the value of the old one, we can
4330 do the following kind of transformations:
4332 - A scalar passed by reference is converted to a scalar passed by
4333 value. (adj->by_ref is false and the type of the original
4334 actual argument is a pointer to a scalar).
4336 - A part of an aggregate is passed instead of the whole aggregate.
4337 The part can be passed either by value or by reference, this is
4338 determined by value of adj->by_ref. Moreover, the code below
4339 handles both situations when the original aggregate is passed by
4340 value (its type is not a pointer) and when it is passed by
4341 reference (it is a pointer to an aggregate).
4343 When the new argument is passed by reference (adj->by_ref is true)
4344 it must be a part of an aggregate and therefore we form it by
4345 simply taking the address of a reference inside the original
4346 aggregate. */
4348 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4349 base = gimple_call_arg (stmt, adj->base_index);
4350 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4351 : EXPR_LOCATION (base);
4353 if (TREE_CODE (base) != ADDR_EXPR
4354 && POINTER_TYPE_P (TREE_TYPE (base)))
4355 off = build_int_cst (adj->alias_ptr_type,
4356 adj->offset / BITS_PER_UNIT);
4357 else
4359 HOST_WIDE_INT base_offset;
4360 tree prev_base;
4361 bool addrof;
4363 if (TREE_CODE (base) == ADDR_EXPR)
4365 base = TREE_OPERAND (base, 0);
4366 addrof = true;
4368 else
4369 addrof = false;
4370 prev_base = base;
4371 base = get_addr_base_and_unit_offset (base, &base_offset);
4372 /* Aggregate arguments can have non-invariant addresses. */
4373 if (!base)
4375 base = build_fold_addr_expr (prev_base);
4376 off = build_int_cst (adj->alias_ptr_type,
4377 adj->offset / BITS_PER_UNIT);
4379 else if (TREE_CODE (base) == MEM_REF)
4381 if (!addrof)
4383 deref_base = true;
4384 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4386 off = build_int_cst (adj->alias_ptr_type,
4387 base_offset
4388 + adj->offset / BITS_PER_UNIT);
4389 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4390 off);
4391 base = TREE_OPERAND (base, 0);
4393 else
4395 off = build_int_cst (adj->alias_ptr_type,
4396 base_offset
4397 + adj->offset / BITS_PER_UNIT);
4398 base = build_fold_addr_expr (base);
4402 if (!adj->by_ref)
4404 tree type = adj->type;
4405 unsigned int align;
4406 unsigned HOST_WIDE_INT misalign;
4408 if (deref_base)
4410 align = deref_align;
4411 misalign = 0;
4413 else
4415 get_pointer_alignment_1 (base, &align, &misalign);
4416 if (TYPE_ALIGN (type) > align)
4417 align = TYPE_ALIGN (type);
4419 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4420 * BITS_PER_UNIT);
4421 misalign = misalign & (align - 1);
4422 if (misalign != 0)
4423 align = least_bit_hwi (misalign);
4424 if (align < TYPE_ALIGN (type))
4425 type = build_aligned_type (type, align);
4426 base = force_gimple_operand_gsi (&gsi, base,
4427 true, NULL, true, GSI_SAME_STMT);
4428 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4429 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4430 /* If expr is not a valid gimple call argument emit
4431 a load into a temporary. */
4432 if (is_gimple_reg_type (TREE_TYPE (expr)))
4434 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4435 if (gimple_in_ssa_p (cfun))
4437 gimple_set_vuse (tem, gimple_vuse (stmt));
4438 expr = make_ssa_name (TREE_TYPE (expr), tem);
4440 else
4441 expr = create_tmp_reg (TREE_TYPE (expr));
4442 gimple_assign_set_lhs (tem, expr);
4443 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4446 else
4448 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4449 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4450 expr = build_fold_addr_expr (expr);
4451 expr = force_gimple_operand_gsi (&gsi, expr,
4452 true, NULL, true, GSI_SAME_STMT);
4454 vargs.quick_push (expr);
4456 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4458 unsigned int ix;
4459 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4460 gimple *def_temp;
4462 arg = gimple_call_arg (stmt, adj->base_index);
4463 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4465 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4466 continue;
4467 arg = fold_convert_loc (gimple_location (stmt),
4468 TREE_TYPE (origin), arg);
4470 if (debug_args == NULL)
4471 debug_args = decl_debug_args_insert (callee_decl);
4472 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4473 if (ddecl == origin)
4475 ddecl = (**debug_args)[ix + 1];
4476 break;
4478 if (ddecl == NULL)
4480 ddecl = make_node (DEBUG_EXPR_DECL);
4481 DECL_ARTIFICIAL (ddecl) = 1;
4482 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4483 SET_DECL_MODE (ddecl, DECL_MODE (origin));
4485 vec_safe_push (*debug_args, origin);
4486 vec_safe_push (*debug_args, ddecl);
4488 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4489 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4493 if (dump_file && (dump_flags & TDF_DETAILS))
4495 fprintf (dump_file, "replacing stmt:");
4496 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4499 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4500 vargs.release ();
4501 if (gimple_call_lhs (stmt))
4502 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4504 gimple_set_block (new_stmt, gimple_block (stmt));
4505 if (gimple_has_location (stmt))
4506 gimple_set_location (new_stmt, gimple_location (stmt));
4507 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4508 gimple_call_copy_flags (new_stmt, stmt);
4509 if (gimple_in_ssa_p (cfun))
4511 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4512 if (gimple_vdef (stmt))
4514 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4515 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4519 if (dump_file && (dump_flags & TDF_DETAILS))
4521 fprintf (dump_file, "with stmt:");
4522 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4523 fprintf (dump_file, "\n");
4525 gsi_replace (&gsi, new_stmt, true);
4526 if (cs)
4527 cs->set_call_stmt (new_stmt);
4530 current_node->record_stmt_references (gsi_stmt (gsi));
4531 gsi_prev (&gsi);
4533 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4536 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4537 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4538 specifies whether the function should care about type incompatibility the
4539 current and new expressions. If it is false, the function will leave
4540 incompatibility issues to the caller. Return true iff the expression
4541 was modified. */
4543 bool
4544 ipa_modify_expr (tree *expr, bool convert,
4545 ipa_parm_adjustment_vec adjustments)
4547 struct ipa_parm_adjustment *cand
4548 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4549 if (!cand)
4550 return false;
4552 tree src;
4553 if (cand->by_ref)
4555 src = build_simple_mem_ref (cand->new_decl);
4556 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4558 else
4559 src = cand->new_decl;
4561 if (dump_file && (dump_flags & TDF_DETAILS))
4563 fprintf (dump_file, "About to replace expr ");
4564 print_generic_expr (dump_file, *expr, 0);
4565 fprintf (dump_file, " with ");
4566 print_generic_expr (dump_file, src, 0);
4567 fprintf (dump_file, "\n");
4570 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4572 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4573 *expr = vce;
4575 else
4576 *expr = src;
4577 return true;
4580 /* If T is an SSA_NAME, return NULL if it is not a default def or
4581 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4582 the base variable is always returned, regardless if it is a default
4583 def. Return T if it is not an SSA_NAME. */
4585 static tree
4586 get_ssa_base_param (tree t, bool ignore_default_def)
4588 if (TREE_CODE (t) == SSA_NAME)
4590 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4591 return SSA_NAME_VAR (t);
4592 else
4593 return NULL_TREE;
4595 return t;
4598 /* Given an expression, return an adjustment entry specifying the
4599 transformation to be done on EXPR. If no suitable adjustment entry
4600 was found, returns NULL.
4602 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4603 default def, otherwise bail on them.
4605 If CONVERT is non-NULL, this function will set *CONVERT if the
4606 expression provided is a component reference. ADJUSTMENTS is the
4607 adjustments vector. */
4609 ipa_parm_adjustment *
4610 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4611 ipa_parm_adjustment_vec adjustments,
4612 bool ignore_default_def)
4614 if (TREE_CODE (**expr) == BIT_FIELD_REF
4615 || TREE_CODE (**expr) == IMAGPART_EXPR
4616 || TREE_CODE (**expr) == REALPART_EXPR)
4618 *expr = &TREE_OPERAND (**expr, 0);
4619 if (convert)
4620 *convert = true;
4623 HOST_WIDE_INT offset, size, max_size;
4624 bool reverse;
4625 tree base
4626 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4627 if (!base || size == -1 || max_size == -1)
4628 return NULL;
4630 if (TREE_CODE (base) == MEM_REF)
4632 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4633 base = TREE_OPERAND (base, 0);
4636 base = get_ssa_base_param (base, ignore_default_def);
4637 if (!base || TREE_CODE (base) != PARM_DECL)
4638 return NULL;
4640 struct ipa_parm_adjustment *cand = NULL;
4641 unsigned int len = adjustments.length ();
4642 for (unsigned i = 0; i < len; i++)
4644 struct ipa_parm_adjustment *adj = &adjustments[i];
4646 if (adj->base == base
4647 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4649 cand = adj;
4650 break;
4654 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4655 return NULL;
4656 return cand;
4659 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4661 static bool
4662 index_in_adjustments_multiple_times_p (int base_index,
4663 ipa_parm_adjustment_vec adjustments)
4665 int i, len = adjustments.length ();
4666 bool one = false;
4668 for (i = 0; i < len; i++)
4670 struct ipa_parm_adjustment *adj;
4671 adj = &adjustments[i];
4673 if (adj->base_index == base_index)
4675 if (one)
4676 return true;
4677 else
4678 one = true;
4681 return false;
4685 /* Return adjustments that should have the same effect on function parameters
4686 and call arguments as if they were first changed according to adjustments in
4687 INNER and then by adjustments in OUTER. */
4689 ipa_parm_adjustment_vec
4690 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4691 ipa_parm_adjustment_vec outer)
4693 int i, outlen = outer.length ();
4694 int inlen = inner.length ();
4695 int removals = 0;
4696 ipa_parm_adjustment_vec adjustments, tmp;
4698 tmp.create (inlen);
4699 for (i = 0; i < inlen; i++)
4701 struct ipa_parm_adjustment *n;
4702 n = &inner[i];
4704 if (n->op == IPA_PARM_OP_REMOVE)
4705 removals++;
4706 else
4708 /* FIXME: Handling of new arguments are not implemented yet. */
4709 gcc_assert (n->op != IPA_PARM_OP_NEW);
4710 tmp.quick_push (*n);
4714 adjustments.create (outlen + removals);
4715 for (i = 0; i < outlen; i++)
4717 struct ipa_parm_adjustment r;
4718 struct ipa_parm_adjustment *out = &outer[i];
4719 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4721 memset (&r, 0, sizeof (r));
4722 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4723 if (out->op == IPA_PARM_OP_REMOVE)
4725 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4727 r.op = IPA_PARM_OP_REMOVE;
4728 adjustments.quick_push (r);
4730 continue;
4732 else
4734 /* FIXME: Handling of new arguments are not implemented yet. */
4735 gcc_assert (out->op != IPA_PARM_OP_NEW);
4738 r.base_index = in->base_index;
4739 r.type = out->type;
4741 /* FIXME: Create nonlocal value too. */
4743 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4744 r.op = IPA_PARM_OP_COPY;
4745 else if (in->op == IPA_PARM_OP_COPY)
4746 r.offset = out->offset;
4747 else if (out->op == IPA_PARM_OP_COPY)
4748 r.offset = in->offset;
4749 else
4750 r.offset = in->offset + out->offset;
4751 adjustments.quick_push (r);
4754 for (i = 0; i < inlen; i++)
4756 struct ipa_parm_adjustment *n = &inner[i];
4758 if (n->op == IPA_PARM_OP_REMOVE)
4759 adjustments.quick_push (*n);
4762 tmp.release ();
4763 return adjustments;
4766 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4767 friendly way, assuming they are meant to be applied to FNDECL. */
4769 void
4770 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4771 tree fndecl)
4773 int i, len = adjustments.length ();
4774 bool first = true;
4775 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4777 fprintf (file, "IPA param adjustments: ");
4778 for (i = 0; i < len; i++)
4780 struct ipa_parm_adjustment *adj;
4781 adj = &adjustments[i];
4783 if (!first)
4784 fprintf (file, " ");
4785 else
4786 first = false;
4788 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4789 print_generic_expr (file, parms[adj->base_index], 0);
4790 if (adj->base)
4792 fprintf (file, ", base: ");
4793 print_generic_expr (file, adj->base, 0);
4795 if (adj->new_decl)
4797 fprintf (file, ", new_decl: ");
4798 print_generic_expr (file, adj->new_decl, 0);
4800 if (adj->new_ssa_base)
4802 fprintf (file, ", new_ssa_base: ");
4803 print_generic_expr (file, adj->new_ssa_base, 0);
4806 if (adj->op == IPA_PARM_OP_COPY)
4807 fprintf (file, ", copy_param");
4808 else if (adj->op == IPA_PARM_OP_REMOVE)
4809 fprintf (file, ", remove_param");
4810 else
4811 fprintf (file, ", offset %li", (long) adj->offset);
4812 if (adj->by_ref)
4813 fprintf (file, ", by_ref");
4814 print_node_brief (file, ", type: ", adj->type, 0);
4815 fprintf (file, "\n");
4817 parms.release ();
4820 /* Dump the AV linked list. */
4822 void
4823 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4825 bool comma = false;
4826 fprintf (f, " Aggregate replacements:");
4827 for (; av; av = av->next)
4829 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4830 av->index, av->offset);
4831 print_generic_expr (f, av->value, 0);
4832 comma = true;
4834 fprintf (f, "\n");
4837 /* Stream out jump function JUMP_FUNC to OB. */
4839 static void
4840 ipa_write_jump_function (struct output_block *ob,
4841 struct ipa_jump_func *jump_func)
4843 struct ipa_agg_jf_item *item;
4844 struct bitpack_d bp;
4845 int i, count;
4847 streamer_write_uhwi (ob, jump_func->type);
4848 switch (jump_func->type)
4850 case IPA_JF_UNKNOWN:
4851 break;
4852 case IPA_JF_CONST:
4853 gcc_assert (
4854 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4855 stream_write_tree (ob, jump_func->value.constant.value, true);
4856 break;
4857 case IPA_JF_PASS_THROUGH:
4858 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4859 if (jump_func->value.pass_through.operation == NOP_EXPR)
4861 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4862 bp = bitpack_create (ob->main_stream);
4863 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4864 streamer_write_bitpack (&bp);
4866 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4867 == tcc_unary)
4868 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4869 else
4871 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4872 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4874 break;
4875 case IPA_JF_ANCESTOR:
4876 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4877 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4878 bp = bitpack_create (ob->main_stream);
4879 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4880 streamer_write_bitpack (&bp);
4881 break;
4884 count = vec_safe_length (jump_func->agg.items);
4885 streamer_write_uhwi (ob, count);
4886 if (count)
4888 bp = bitpack_create (ob->main_stream);
4889 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4890 streamer_write_bitpack (&bp);
4893 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4895 streamer_write_uhwi (ob, item->offset);
4896 stream_write_tree (ob, item->value, true);
4899 bp = bitpack_create (ob->main_stream);
4900 bp_pack_value (&bp, !!jump_func->bits, 1);
4901 streamer_write_bitpack (&bp);
4902 if (jump_func->bits)
4904 streamer_write_widest_int (ob, jump_func->bits->value);
4905 streamer_write_widest_int (ob, jump_func->bits->mask);
4907 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4908 streamer_write_bitpack (&bp);
4909 if (jump_func->m_vr)
4911 streamer_write_enum (ob->main_stream, value_rang_type,
4912 VR_LAST, jump_func->m_vr->type);
4913 stream_write_tree (ob, jump_func->m_vr->min, true);
4914 stream_write_tree (ob, jump_func->m_vr->max, true);
4918 /* Read in jump function JUMP_FUNC from IB. */
4920 static void
4921 ipa_read_jump_function (struct lto_input_block *ib,
4922 struct ipa_jump_func *jump_func,
4923 struct cgraph_edge *cs,
4924 struct data_in *data_in)
4926 enum jump_func_type jftype;
4927 enum tree_code operation;
4928 int i, count;
4930 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4931 switch (jftype)
4933 case IPA_JF_UNKNOWN:
4934 ipa_set_jf_unknown (jump_func);
4935 break;
4936 case IPA_JF_CONST:
4937 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4938 break;
4939 case IPA_JF_PASS_THROUGH:
4940 operation = (enum tree_code) streamer_read_uhwi (ib);
4941 if (operation == NOP_EXPR)
4943 int formal_id = streamer_read_uhwi (ib);
4944 struct bitpack_d bp = streamer_read_bitpack (ib);
4945 bool agg_preserved = bp_unpack_value (&bp, 1);
4946 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4948 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4950 int formal_id = streamer_read_uhwi (ib);
4951 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4953 else
4955 tree operand = stream_read_tree (ib, data_in);
4956 int formal_id = streamer_read_uhwi (ib);
4957 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4958 operation);
4960 break;
4961 case IPA_JF_ANCESTOR:
4963 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4964 int formal_id = streamer_read_uhwi (ib);
4965 struct bitpack_d bp = streamer_read_bitpack (ib);
4966 bool agg_preserved = bp_unpack_value (&bp, 1);
4967 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4968 break;
4972 count = streamer_read_uhwi (ib);
4973 vec_alloc (jump_func->agg.items, count);
4974 if (count)
4976 struct bitpack_d bp = streamer_read_bitpack (ib);
4977 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4979 for (i = 0; i < count; i++)
4981 struct ipa_agg_jf_item item;
4982 item.offset = streamer_read_uhwi (ib);
4983 item.value = stream_read_tree (ib, data_in);
4984 jump_func->agg.items->quick_push (item);
4987 struct bitpack_d bp = streamer_read_bitpack (ib);
4988 bool bits_known = bp_unpack_value (&bp, 1);
4989 if (bits_known)
4991 widest_int value = streamer_read_widest_int (ib);
4992 widest_int mask = streamer_read_widest_int (ib);
4993 ipa_set_jfunc_bits (jump_func, value, mask);
4995 else
4996 jump_func->bits = NULL;
4998 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4999 bool vr_known = bp_unpack_value (&vr_bp, 1);
5000 if (vr_known)
5002 enum value_range_type type = streamer_read_enum (ib, value_range_type,
5003 VR_LAST);
5004 tree min = stream_read_tree (ib, data_in);
5005 tree max = stream_read_tree (ib, data_in);
5006 ipa_set_jfunc_vr (jump_func, type, min, max);
5008 else
5009 jump_func->m_vr = NULL;
5012 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
5013 relevant to indirect inlining to OB. */
5015 static void
5016 ipa_write_indirect_edge_info (struct output_block *ob,
5017 struct cgraph_edge *cs)
5019 struct cgraph_indirect_call_info *ii = cs->indirect_info;
5020 struct bitpack_d bp;
5022 streamer_write_hwi (ob, ii->param_index);
5023 bp = bitpack_create (ob->main_stream);
5024 bp_pack_value (&bp, ii->polymorphic, 1);
5025 bp_pack_value (&bp, ii->agg_contents, 1);
5026 bp_pack_value (&bp, ii->member_ptr, 1);
5027 bp_pack_value (&bp, ii->by_ref, 1);
5028 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
5029 bp_pack_value (&bp, ii->vptr_changed, 1);
5030 streamer_write_bitpack (&bp);
5031 if (ii->agg_contents || ii->polymorphic)
5032 streamer_write_hwi (ob, ii->offset);
5033 else
5034 gcc_assert (ii->offset == 0);
5036 if (ii->polymorphic)
5038 streamer_write_hwi (ob, ii->otr_token);
5039 stream_write_tree (ob, ii->otr_type, true);
5040 ii->context.stream_out (ob);
5044 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5045 relevant to indirect inlining from IB. */
5047 static void
5048 ipa_read_indirect_edge_info (struct lto_input_block *ib,
5049 struct data_in *data_in,
5050 struct cgraph_edge *cs)
5052 struct cgraph_indirect_call_info *ii = cs->indirect_info;
5053 struct bitpack_d bp;
5055 ii->param_index = (int) streamer_read_hwi (ib);
5056 bp = streamer_read_bitpack (ib);
5057 ii->polymorphic = bp_unpack_value (&bp, 1);
5058 ii->agg_contents = bp_unpack_value (&bp, 1);
5059 ii->member_ptr = bp_unpack_value (&bp, 1);
5060 ii->by_ref = bp_unpack_value (&bp, 1);
5061 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
5062 ii->vptr_changed = bp_unpack_value (&bp, 1);
5063 if (ii->agg_contents || ii->polymorphic)
5064 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5065 else
5066 ii->offset = 0;
5067 if (ii->polymorphic)
5069 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
5070 ii->otr_type = stream_read_tree (ib, data_in);
5071 ii->context.stream_in (ib, data_in);
5075 /* Stream out NODE info to OB. */
5077 static void
5078 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5080 int node_ref;
5081 lto_symtab_encoder_t encoder;
5082 struct ipa_node_params *info = IPA_NODE_REF (node);
5083 int j;
5084 struct cgraph_edge *e;
5085 struct bitpack_d bp;
5087 encoder = ob->decl_state->symtab_node_encoder;
5088 node_ref = lto_symtab_encoder_encode (encoder, node);
5089 streamer_write_uhwi (ob, node_ref);
5091 streamer_write_uhwi (ob, ipa_get_param_count (info));
5092 for (j = 0; j < ipa_get_param_count (info); j++)
5093 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5094 bp = bitpack_create (ob->main_stream);
5095 gcc_assert (info->analysis_done
5096 || ipa_get_param_count (info) == 0);
5097 gcc_assert (!info->node_enqueued);
5098 gcc_assert (!info->ipcp_orig_node);
5099 for (j = 0; j < ipa_get_param_count (info); j++)
5100 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5101 streamer_write_bitpack (&bp);
5102 for (j = 0; j < ipa_get_param_count (info); j++)
5104 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5105 stream_write_tree (ob, ipa_get_type (info, j), true);
5107 for (e = node->callees; e; e = e->next_callee)
5109 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5111 streamer_write_uhwi (ob,
5112 ipa_get_cs_argument_count (args) * 2
5113 + (args->polymorphic_call_contexts != NULL));
5114 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5116 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5117 if (args->polymorphic_call_contexts != NULL)
5118 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5121 for (e = node->indirect_calls; e; e = e->next_callee)
5123 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5125 streamer_write_uhwi (ob,
5126 ipa_get_cs_argument_count (args) * 2
5127 + (args->polymorphic_call_contexts != NULL));
5128 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5130 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5131 if (args->polymorphic_call_contexts != NULL)
5132 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5134 ipa_write_indirect_edge_info (ob, e);
5138 /* Stream in NODE info from IB. */
5140 static void
5141 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
5142 struct data_in *data_in)
5144 struct ipa_node_params *info = IPA_NODE_REF (node);
5145 int k;
5146 struct cgraph_edge *e;
5147 struct bitpack_d bp;
5149 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
5151 for (k = 0; k < ipa_get_param_count (info); k++)
5152 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5154 bp = streamer_read_bitpack (ib);
5155 if (ipa_get_param_count (info) != 0)
5156 info->analysis_done = true;
5157 info->node_enqueued = false;
5158 for (k = 0; k < ipa_get_param_count (info); k++)
5159 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
5160 for (k = 0; k < ipa_get_param_count (info); k++)
5162 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
5163 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
5165 for (e = node->callees; e; e = e->next_callee)
5167 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5168 int count = streamer_read_uhwi (ib);
5169 bool contexts_computed = count & 1;
5170 count /= 2;
5172 if (!count)
5173 continue;
5174 vec_safe_grow_cleared (args->jump_functions, count);
5175 if (contexts_computed)
5176 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5178 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5180 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5181 data_in);
5182 if (contexts_computed)
5183 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5186 for (e = node->indirect_calls; e; e = e->next_callee)
5188 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5189 int count = streamer_read_uhwi (ib);
5190 bool contexts_computed = count & 1;
5191 count /= 2;
5193 if (count)
5195 vec_safe_grow_cleared (args->jump_functions, count);
5196 if (contexts_computed)
5197 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5198 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5200 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5201 data_in);
5202 if (contexts_computed)
5203 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5206 ipa_read_indirect_edge_info (ib, data_in, e);
5210 /* Write jump functions for nodes in SET. */
5212 void
5213 ipa_prop_write_jump_functions (void)
5215 struct cgraph_node *node;
5216 struct output_block *ob;
5217 unsigned int count = 0;
5218 lto_symtab_encoder_iterator lsei;
5219 lto_symtab_encoder_t encoder;
5221 if (!ipa_node_params_sum || !ipa_edge_args_vector)
5222 return;
5224 ob = create_output_block (LTO_section_jump_functions);
5225 encoder = ob->decl_state->symtab_node_encoder;
5226 ob->symbol = NULL;
5227 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5228 lsei_next_function_in_partition (&lsei))
5230 node = lsei_cgraph_node (lsei);
5231 if (node->has_gimple_body_p ()
5232 && IPA_NODE_REF (node) != NULL)
5233 count++;
5236 streamer_write_uhwi (ob, count);
5238 /* Process all of the functions. */
5239 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5240 lsei_next_function_in_partition (&lsei))
5242 node = lsei_cgraph_node (lsei);
5243 if (node->has_gimple_body_p ()
5244 && IPA_NODE_REF (node) != NULL)
5245 ipa_write_node_info (ob, node);
5247 streamer_write_char_stream (ob->main_stream, 0);
5248 produce_asm (ob, NULL);
5249 destroy_output_block (ob);
5252 /* Read section in file FILE_DATA of length LEN with data DATA. */
5254 static void
5255 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5256 size_t len)
5258 const struct lto_function_header *header =
5259 (const struct lto_function_header *) data;
5260 const int cfg_offset = sizeof (struct lto_function_header);
5261 const int main_offset = cfg_offset + header->cfg_size;
5262 const int string_offset = main_offset + header->main_size;
5263 struct data_in *data_in;
5264 unsigned int i;
5265 unsigned int count;
5267 lto_input_block ib_main ((const char *) data + main_offset,
5268 header->main_size, file_data->mode_table);
5270 data_in =
5271 lto_data_in_create (file_data, (const char *) data + string_offset,
5272 header->string_size, vNULL);
5273 count = streamer_read_uhwi (&ib_main);
5275 for (i = 0; i < count; i++)
5277 unsigned int index;
5278 struct cgraph_node *node;
5279 lto_symtab_encoder_t encoder;
5281 index = streamer_read_uhwi (&ib_main);
5282 encoder = file_data->symtab_node_encoder;
5283 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5284 index));
5285 gcc_assert (node->definition);
5286 ipa_read_node_info (&ib_main, node, data_in);
5288 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5289 len);
5290 lto_data_in_delete (data_in);
5293 /* Read ipcp jump functions. */
5295 void
5296 ipa_prop_read_jump_functions (void)
5298 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5299 struct lto_file_decl_data *file_data;
5300 unsigned int j = 0;
5302 ipa_check_create_node_params ();
5303 ipa_check_create_edge_args ();
5304 ipa_register_cgraph_hooks ();
5306 while ((file_data = file_data_vec[j++]))
5308 size_t len;
5309 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5311 if (data)
5312 ipa_prop_read_section (file_data, data, len);
5316 /* After merging units, we can get mismatch in argument counts.
5317 Also decl merging might've rendered parameter lists obsolete.
5318 Also compute called_with_variable_arg info. */
5320 void
5321 ipa_update_after_lto_read (void)
5323 ipa_check_create_node_params ();
5324 ipa_check_create_edge_args ();
5327 void
5328 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5330 int node_ref;
5331 unsigned int count = 0;
5332 lto_symtab_encoder_t encoder;
5333 struct ipa_agg_replacement_value *aggvals, *av;
5335 aggvals = ipa_get_agg_replacements_for_node (node);
5336 encoder = ob->decl_state->symtab_node_encoder;
5337 node_ref = lto_symtab_encoder_encode (encoder, node);
5338 streamer_write_uhwi (ob, node_ref);
5340 for (av = aggvals; av; av = av->next)
5341 count++;
5342 streamer_write_uhwi (ob, count);
5344 for (av = aggvals; av; av = av->next)
5346 struct bitpack_d bp;
5348 streamer_write_uhwi (ob, av->offset);
5349 streamer_write_uhwi (ob, av->index);
5350 stream_write_tree (ob, av->value, true);
5352 bp = bitpack_create (ob->main_stream);
5353 bp_pack_value (&bp, av->by_ref, 1);
5354 streamer_write_bitpack (&bp);
5357 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5358 if (ts && vec_safe_length (ts->m_vr) > 0)
5360 count = ts->m_vr->length ();
5361 streamer_write_uhwi (ob, count);
5362 for (unsigned i = 0; i < count; ++i)
5364 struct bitpack_d bp;
5365 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5366 bp = bitpack_create (ob->main_stream);
5367 bp_pack_value (&bp, parm_vr->known, 1);
5368 streamer_write_bitpack (&bp);
5369 if (parm_vr->known)
5371 streamer_write_enum (ob->main_stream, value_rang_type,
5372 VR_LAST, parm_vr->type);
5373 streamer_write_wide_int (ob, parm_vr->min);
5374 streamer_write_wide_int (ob, parm_vr->max);
5378 else
5379 streamer_write_uhwi (ob, 0);
5381 if (ts && vec_safe_length (ts->bits) > 0)
5383 count = ts->bits->length ();
5384 streamer_write_uhwi (ob, count);
5386 for (unsigned i = 0; i < count; ++i)
5388 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5389 struct bitpack_d bp = bitpack_create (ob->main_stream);
5390 bp_pack_value (&bp, !!bits_jfunc, 1);
5391 streamer_write_bitpack (&bp);
5392 if (bits_jfunc)
5394 streamer_write_widest_int (ob, bits_jfunc->value);
5395 streamer_write_widest_int (ob, bits_jfunc->mask);
5399 else
5400 streamer_write_uhwi (ob, 0);
5403 /* Stream in the aggregate value replacement chain for NODE from IB. */
5405 static void
5406 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5407 data_in *data_in)
5409 struct ipa_agg_replacement_value *aggvals = NULL;
5410 unsigned int count, i;
5412 count = streamer_read_uhwi (ib);
5413 for (i = 0; i <count; i++)
5415 struct ipa_agg_replacement_value *av;
5416 struct bitpack_d bp;
5418 av = ggc_alloc<ipa_agg_replacement_value> ();
5419 av->offset = streamer_read_uhwi (ib);
5420 av->index = streamer_read_uhwi (ib);
5421 av->value = stream_read_tree (ib, data_in);
5422 bp = streamer_read_bitpack (ib);
5423 av->by_ref = bp_unpack_value (&bp, 1);
5424 av->next = aggvals;
5425 aggvals = av;
5427 ipa_set_node_agg_value_chain (node, aggvals);
5429 count = streamer_read_uhwi (ib);
5430 if (count > 0)
5432 ipcp_grow_transformations_if_necessary ();
5434 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5435 vec_safe_grow_cleared (ts->m_vr, count);
5436 for (i = 0; i < count; i++)
5438 ipa_vr *parm_vr;
5439 parm_vr = &(*ts->m_vr)[i];
5440 struct bitpack_d bp;
5441 bp = streamer_read_bitpack (ib);
5442 parm_vr->known = bp_unpack_value (&bp, 1);
5443 if (parm_vr->known)
5445 parm_vr->type = streamer_read_enum (ib, value_range_type,
5446 VR_LAST);
5447 parm_vr->min = streamer_read_wide_int (ib);
5448 parm_vr->max = streamer_read_wide_int (ib);
5452 count = streamer_read_uhwi (ib);
5453 if (count > 0)
5455 ipcp_grow_transformations_if_necessary ();
5457 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5458 vec_safe_grow_cleared (ts->bits, count);
5460 for (i = 0; i < count; i++)
5462 struct bitpack_d bp = streamer_read_bitpack (ib);
5463 bool known = bp_unpack_value (&bp, 1);
5464 if (known)
5466 ipa_bits *bits
5467 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
5468 streamer_read_widest_int (ib));
5469 (*ts->bits)[i] = bits;
5475 /* Write all aggregate replacement for nodes in set. */
5477 void
5478 ipcp_write_transformation_summaries (void)
5480 struct cgraph_node *node;
5481 struct output_block *ob;
5482 unsigned int count = 0;
5483 lto_symtab_encoder_iterator lsei;
5484 lto_symtab_encoder_t encoder;
5486 ob = create_output_block (LTO_section_ipcp_transform);
5487 encoder = ob->decl_state->symtab_node_encoder;
5488 ob->symbol = NULL;
5489 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5490 lsei_next_function_in_partition (&lsei))
5492 node = lsei_cgraph_node (lsei);
5493 if (node->has_gimple_body_p ())
5494 count++;
5497 streamer_write_uhwi (ob, count);
5499 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5500 lsei_next_function_in_partition (&lsei))
5502 node = lsei_cgraph_node (lsei);
5503 if (node->has_gimple_body_p ())
5504 write_ipcp_transformation_info (ob, node);
5506 streamer_write_char_stream (ob->main_stream, 0);
5507 produce_asm (ob, NULL);
5508 destroy_output_block (ob);
5511 /* Read replacements section in file FILE_DATA of length LEN with data
5512 DATA. */
5514 static void
5515 read_replacements_section (struct lto_file_decl_data *file_data,
5516 const char *data,
5517 size_t len)
5519 const struct lto_function_header *header =
5520 (const struct lto_function_header *) data;
5521 const int cfg_offset = sizeof (struct lto_function_header);
5522 const int main_offset = cfg_offset + header->cfg_size;
5523 const int string_offset = main_offset + header->main_size;
5524 struct data_in *data_in;
5525 unsigned int i;
5526 unsigned int count;
5528 lto_input_block ib_main ((const char *) data + main_offset,
5529 header->main_size, file_data->mode_table);
5531 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5532 header->string_size, vNULL);
5533 count = streamer_read_uhwi (&ib_main);
5535 for (i = 0; i < count; i++)
5537 unsigned int index;
5538 struct cgraph_node *node;
5539 lto_symtab_encoder_t encoder;
5541 index = streamer_read_uhwi (&ib_main);
5542 encoder = file_data->symtab_node_encoder;
5543 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5544 index));
5545 gcc_assert (node->definition);
5546 read_ipcp_transformation_info (&ib_main, node, data_in);
5548 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5549 len);
5550 lto_data_in_delete (data_in);
5553 /* Read IPA-CP aggregate replacements. */
5555 void
5556 ipcp_read_transformation_summaries (void)
5558 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5559 struct lto_file_decl_data *file_data;
5560 unsigned int j = 0;
5562 while ((file_data = file_data_vec[j++]))
5564 size_t len;
5565 const char *data = lto_get_section_data (file_data,
5566 LTO_section_ipcp_transform,
5567 NULL, &len);
5568 if (data)
5569 read_replacements_section (file_data, data, len);
5573 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5574 NODE. */
5576 static void
5577 adjust_agg_replacement_values (struct cgraph_node *node,
5578 struct ipa_agg_replacement_value *aggval)
5580 struct ipa_agg_replacement_value *v;
5581 int i, c = 0, d = 0, *adj;
5583 if (!node->clone.combined_args_to_skip)
5584 return;
5586 for (v = aggval; v; v = v->next)
5588 gcc_assert (v->index >= 0);
5589 if (c < v->index)
5590 c = v->index;
5592 c++;
5594 adj = XALLOCAVEC (int, c);
5595 for (i = 0; i < c; i++)
5596 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5598 adj[i] = -1;
5599 d++;
5601 else
5602 adj[i] = i - d;
5604 for (v = aggval; v; v = v->next)
5605 v->index = adj[v->index];
5608 /* Dominator walker driving the ipcp modification phase. */
5610 class ipcp_modif_dom_walker : public dom_walker
5612 public:
5613 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5614 vec<ipa_param_descriptor, va_gc> *descs,
5615 struct ipa_agg_replacement_value *av,
5616 bool *sc, bool *cc)
5617 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5618 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5620 virtual edge before_dom_children (basic_block);
5622 private:
5623 struct ipa_func_body_info *m_fbi;
5624 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5625 struct ipa_agg_replacement_value *m_aggval;
5626 bool *m_something_changed, *m_cfg_changed;
5629 edge
5630 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5632 gimple_stmt_iterator gsi;
5633 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5635 struct ipa_agg_replacement_value *v;
5636 gimple *stmt = gsi_stmt (gsi);
5637 tree rhs, val, t;
5638 HOST_WIDE_INT offset, size;
5639 int index;
5640 bool by_ref, vce;
5642 if (!gimple_assign_load_p (stmt))
5643 continue;
5644 rhs = gimple_assign_rhs1 (stmt);
5645 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5646 continue;
5648 vce = false;
5649 t = rhs;
5650 while (handled_component_p (t))
5652 /* V_C_E can do things like convert an array of integers to one
5653 bigger integer and similar things we do not handle below. */
5654 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5656 vce = true;
5657 break;
5659 t = TREE_OPERAND (t, 0);
5661 if (vce)
5662 continue;
5664 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5665 &offset, &size, &by_ref))
5666 continue;
5667 for (v = m_aggval; v; v = v->next)
5668 if (v->index == index
5669 && v->offset == offset)
5670 break;
5671 if (!v
5672 || v->by_ref != by_ref
5673 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5674 continue;
5676 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5677 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5679 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5680 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5681 else if (TYPE_SIZE (TREE_TYPE (rhs))
5682 == TYPE_SIZE (TREE_TYPE (v->value)))
5683 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5684 else
5686 if (dump_file)
5688 fprintf (dump_file, " const ");
5689 print_generic_expr (dump_file, v->value, 0);
5690 fprintf (dump_file, " can't be converted to type of ");
5691 print_generic_expr (dump_file, rhs, 0);
5692 fprintf (dump_file, "\n");
5694 continue;
5697 else
5698 val = v->value;
5700 if (dump_file && (dump_flags & TDF_DETAILS))
5702 fprintf (dump_file, "Modifying stmt:\n ");
5703 print_gimple_stmt (dump_file, stmt, 0, 0);
5705 gimple_assign_set_rhs_from_tree (&gsi, val);
5706 update_stmt (stmt);
5708 if (dump_file && (dump_flags & TDF_DETAILS))
5710 fprintf (dump_file, "into:\n ");
5711 print_gimple_stmt (dump_file, stmt, 0, 0);
5712 fprintf (dump_file, "\n");
5715 *m_something_changed = true;
5716 if (maybe_clean_eh_stmt (stmt)
5717 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5718 *m_cfg_changed = true;
5720 return NULL;
5723 /* Update bits info of formal parameters as described in
5724 ipcp_transformation_summary. */
5726 static void
5727 ipcp_update_bits (struct cgraph_node *node)
5729 tree parm = DECL_ARGUMENTS (node->decl);
5730 tree next_parm = parm;
5731 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5733 if (!ts || vec_safe_length (ts->bits) == 0)
5734 return;
5736 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5737 unsigned count = bits.length ();
5739 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5741 if (node->clone.combined_args_to_skip
5742 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5743 continue;
5745 gcc_checking_assert (parm);
5746 next_parm = DECL_CHAIN (parm);
5748 if (!bits[i]
5749 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5750 || POINTER_TYPE_P (TREE_TYPE (parm)))
5751 || !is_gimple_reg (parm))
5752 continue;
5754 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5755 if (!ddef)
5756 continue;
5758 if (dump_file)
5760 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5761 print_hex (bits[i]->mask, dump_file);
5762 fprintf (dump_file, "\n");
5765 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5767 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5768 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5770 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5771 | wide_int::from (bits[i]->value, prec, sgn);
5772 set_nonzero_bits (ddef, nonzero_bits);
5774 else
5776 unsigned tem = bits[i]->mask.to_uhwi ();
5777 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5778 unsigned align = tem & -tem;
5779 unsigned misalign = bitpos & (align - 1);
5781 if (align > 1)
5783 if (dump_file)
5784 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5786 unsigned old_align, old_misalign;
5787 struct ptr_info_def *pi = get_ptr_info (ddef);
5788 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5790 if (old_known
5791 && old_align > align)
5793 if (dump_file)
5795 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5796 if ((old_misalign & (align - 1)) != misalign)
5797 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5798 old_misalign, misalign);
5800 continue;
5803 if (old_known
5804 && ((misalign & (old_align - 1)) != old_misalign)
5805 && dump_file)
5806 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5807 old_misalign, misalign);
5809 set_ptr_info_alignment (pi, align, misalign);
5815 /* Update value range of formal parameters as described in
5816 ipcp_transformation_summary. */
5818 static void
5819 ipcp_update_vr (struct cgraph_node *node)
5821 tree fndecl = node->decl;
5822 tree parm = DECL_ARGUMENTS (fndecl);
5823 tree next_parm = parm;
5824 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5825 if (!ts || vec_safe_length (ts->m_vr) == 0)
5826 return;
5827 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5828 unsigned count = vr.length ();
5830 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5832 if (node->clone.combined_args_to_skip
5833 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5834 continue;
5835 gcc_checking_assert (parm);
5836 next_parm = DECL_CHAIN (parm);
5837 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5839 if (!ddef || !is_gimple_reg (parm))
5840 continue;
5842 if (vr[i].known
5843 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5845 tree type = TREE_TYPE (ddef);
5846 unsigned prec = TYPE_PRECISION (type);
5847 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5849 if (dump_file)
5851 fprintf (dump_file, "Setting value range of param %u ", i);
5852 fprintf (dump_file, "%s[",
5853 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5854 print_decs (vr[i].min, dump_file);
5855 fprintf (dump_file, ", ");
5856 print_decs (vr[i].max, dump_file);
5857 fprintf (dump_file, "]\n");
5859 set_range_info (ddef, vr[i].type,
5860 wide_int_storage::from (vr[i].min, prec,
5861 TYPE_SIGN (type)),
5862 wide_int_storage::from (vr[i].max, prec,
5863 TYPE_SIGN (type)));
5865 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5866 && vr[i].type == VR_ANTI_RANGE
5867 && wi::eq_p (vr[i].min, 0)
5868 && wi::eq_p (vr[i].max, 0))
5870 if (dump_file)
5871 fprintf (dump_file, "Setting nonnull for %u\n", i);
5872 set_ptr_nonnull (ddef);
5878 /* IPCP transformation phase doing propagation of aggregate values. */
5880 unsigned int
5881 ipcp_transform_function (struct cgraph_node *node)
5883 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5884 struct ipa_func_body_info fbi;
5885 struct ipa_agg_replacement_value *aggval;
5886 int param_count;
5887 bool cfg_changed = false, something_changed = false;
5889 gcc_checking_assert (cfun);
5890 gcc_checking_assert (current_function_decl);
5892 if (dump_file)
5893 fprintf (dump_file, "Modification phase of node %s/%i\n",
5894 node->name (), node->order);
5896 ipcp_update_bits (node);
5897 ipcp_update_vr (node);
5898 aggval = ipa_get_agg_replacements_for_node (node);
5899 if (!aggval)
5900 return 0;
5901 param_count = count_formal_params (node->decl);
5902 if (param_count == 0)
5903 return 0;
5904 adjust_agg_replacement_values (node, aggval);
5905 if (dump_file)
5906 ipa_dump_agg_replacement_values (dump_file, aggval);
5908 fbi.node = node;
5909 fbi.info = NULL;
5910 fbi.bb_infos = vNULL;
5911 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5912 fbi.param_count = param_count;
5913 fbi.aa_walked = 0;
5915 vec_safe_grow_cleared (descriptors, param_count);
5916 ipa_populate_param_decls (node, *descriptors);
5917 calculate_dominance_info (CDI_DOMINATORS);
5918 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5919 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5921 int i;
5922 struct ipa_bb_info *bi;
5923 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5924 free_ipa_bb_info (bi);
5925 fbi.bb_infos.release ();
5926 free_dominance_info (CDI_DOMINATORS);
5927 (*ipcp_transformations)[node->uid].agg_values = NULL;
5928 (*ipcp_transformations)[node->uid].bits = NULL;
5929 (*ipcp_transformations)[node->uid].m_vr = NULL;
5931 vec_free (descriptors);
5933 if (!something_changed)
5934 return 0;
5935 else if (cfg_changed)
5936 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5937 else
5938 return TODO_update_ssa_only_virtuals;
5941 #include "gt-ipa-prop.h"