backport: unnecessary duplication and repeating bugs like PR78439 due to changes...
[official-gcc.git] / gcc / ipa-prop.c
bloba687f7cb29e9961b44539b9b3fe483ca0a3879f0
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Edge summary for IPA-CP edge information. */
61 ipa_edge_args_sum_t *ipa_edge_args_sum;
63 /* Traits for a hash table for reusing already existing ipa_bits. */
65 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67 typedef ipa_bits *value_type;
68 typedef ipa_bits *compare_type;
69 static hashval_t
70 hash (const ipa_bits *p)
72 hashval_t t = (hashval_t) p->value.to_shwi ();
73 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 static bool
76 equal (const ipa_bits *a, const ipa_bits *b)
78 return a->value == b->value && a->mask == b->mask;
80 static void
81 mark_empty (ipa_bits *&p)
83 p = NULL;
85 static bool
86 is_empty (const ipa_bits *p)
88 return p == NULL;
90 static bool
91 is_deleted (const ipa_bits *p)
93 return p == reinterpret_cast<const ipa_bits *> (1);
95 static void
96 mark_deleted (ipa_bits *&p)
98 p = reinterpret_cast<ipa_bits *> (1);
102 /* Hash table for avoid repeated allocations of equal ipa_bits. */
103 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
105 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
106 the equiv bitmap is not hashed and is expected to be NULL. */
108 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
110 typedef value_range *value_type;
111 typedef value_range *compare_type;
112 static hashval_t
113 hash (const value_range *p)
115 gcc_checking_assert (!p->equiv);
116 hashval_t t = (hashval_t) p->type;
117 t = iterative_hash_expr (p->min, t);
118 return iterative_hash_expr (p->max, t);
120 static bool
121 equal (const value_range *a, const value_range *b)
123 return a->type == b->type && a->min == b->min && a->max == b->max;
125 static void
126 mark_empty (value_range *&p)
128 p = NULL;
130 static bool
131 is_empty (const value_range *p)
133 return p == NULL;
135 static bool
136 is_deleted (const value_range *p)
138 return p == reinterpret_cast<const value_range *> (1);
140 static void
141 mark_deleted (value_range *&p)
143 p = reinterpret_cast<value_range *> (1);
147 /* Hash table for avoid repeated allocations of equal value_ranges. */
148 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
150 /* Holders of ipa cgraph hooks: */
151 static struct cgraph_node_hook_list *function_insertion_hook_holder;
153 /* Description of a reference to an IPA constant. */
154 struct ipa_cst_ref_desc
156 /* Edge that corresponds to the statement which took the reference. */
157 struct cgraph_edge *cs;
158 /* Linked list of duplicates created when call graph edges are cloned. */
159 struct ipa_cst_ref_desc *next_duplicate;
160 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
161 if out of control. */
162 int refcount;
165 /* Allocation pool for reference descriptions. */
167 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
168 ("IPA-PROP ref descriptions");
170 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
171 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
173 static bool
174 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
176 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
178 if (!fs_opts)
179 return false;
180 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
183 /* Return index of the formal whose tree is PTREE in function which corresponds
184 to INFO. */
186 static int
187 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
188 tree ptree)
190 int i, count;
192 count = vec_safe_length (descriptors);
193 for (i = 0; i < count; i++)
194 if ((*descriptors)[i].decl_or_type == ptree)
195 return i;
197 return -1;
200 /* Return index of the formal whose tree is PTREE in function which corresponds
201 to INFO. */
204 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
206 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
209 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
210 NODE. */
212 static void
213 ipa_populate_param_decls (struct cgraph_node *node,
214 vec<ipa_param_descriptor, va_gc> &descriptors)
216 tree fndecl;
217 tree fnargs;
218 tree parm;
219 int param_num;
221 fndecl = node->decl;
222 gcc_assert (gimple_has_body_p (fndecl));
223 fnargs = DECL_ARGUMENTS (fndecl);
224 param_num = 0;
225 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
227 descriptors[param_num].decl_or_type = parm;
228 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
229 true);
230 param_num++;
234 /* Return how many formal parameters FNDECL has. */
237 count_formal_params (tree fndecl)
239 tree parm;
240 int count = 0;
241 gcc_assert (gimple_has_body_p (fndecl));
243 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
244 count++;
246 return count;
249 /* Return the declaration of Ith formal parameter of the function corresponding
250 to INFO. Note there is no setter function as this array is built just once
251 using ipa_initialize_node_params. */
253 void
254 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
256 fprintf (file, "param #%i", i);
257 if ((*info->descriptors)[i].decl_or_type)
259 fprintf (file, " ");
260 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
264 /* If necessary, allocate vector of parameter descriptors in info of NODE.
265 Return true if they were allocated, false if not. */
267 static bool
268 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
270 struct ipa_node_params *info = IPA_NODE_REF (node);
272 if (!info->descriptors && param_count)
274 vec_safe_grow_cleared (info->descriptors, param_count);
275 return true;
277 else
278 return false;
281 /* Initialize the ipa_node_params structure associated with NODE by counting
282 the function parameters, creating the descriptors and populating their
283 param_decls. */
285 void
286 ipa_initialize_node_params (struct cgraph_node *node)
288 struct ipa_node_params *info = IPA_NODE_REF (node);
290 if (!info->descriptors
291 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
292 ipa_populate_param_decls (node, *info->descriptors);
295 /* Print the jump functions associated with call graph edge CS to file F. */
297 static void
298 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
300 int i, count;
302 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
303 for (i = 0; i < count; i++)
305 struct ipa_jump_func *jump_func;
306 enum jump_func_type type;
308 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
309 type = jump_func->type;
311 fprintf (f, " param %d: ", i);
312 if (type == IPA_JF_UNKNOWN)
313 fprintf (f, "UNKNOWN\n");
314 else if (type == IPA_JF_CONST)
316 tree val = jump_func->value.constant.value;
317 fprintf (f, "CONST: ");
318 print_generic_expr (f, val);
319 if (TREE_CODE (val) == ADDR_EXPR
320 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
322 fprintf (f, " -> ");
323 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
325 fprintf (f, "\n");
327 else if (type == IPA_JF_PASS_THROUGH)
329 fprintf (f, "PASS THROUGH: ");
330 fprintf (f, "%d, op %s",
331 jump_func->value.pass_through.formal_id,
332 get_tree_code_name(jump_func->value.pass_through.operation));
333 if (jump_func->value.pass_through.operation != NOP_EXPR)
335 fprintf (f, " ");
336 print_generic_expr (f, jump_func->value.pass_through.operand);
338 if (jump_func->value.pass_through.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 fprintf (f, "\n");
342 else if (type == IPA_JF_ANCESTOR)
344 fprintf (f, "ANCESTOR: ");
345 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
346 jump_func->value.ancestor.formal_id,
347 jump_func->value.ancestor.offset);
348 if (jump_func->value.ancestor.agg_preserved)
349 fprintf (f, ", agg_preserved");
350 fprintf (f, "\n");
353 if (jump_func->agg.items)
355 struct ipa_agg_jf_item *item;
356 int j;
358 fprintf (f, " Aggregate passed by %s:\n",
359 jump_func->agg.by_ref ? "reference" : "value");
360 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
362 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
363 item->offset);
364 if (TYPE_P (item->value))
365 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
366 tree_to_uhwi (TYPE_SIZE (item->value)));
367 else
369 fprintf (f, "cst: ");
370 print_generic_expr (f, item->value);
372 fprintf (f, "\n");
376 struct ipa_polymorphic_call_context *ctx
377 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
378 if (ctx && !ctx->useless_p ())
380 fprintf (f, " Context: ");
381 ctx->dump (dump_file);
384 if (jump_func->bits)
386 fprintf (f, " value: ");
387 print_hex (jump_func->bits->value, f);
388 fprintf (f, ", mask: ");
389 print_hex (jump_func->bits->mask, f);
390 fprintf (f, "\n");
392 else
393 fprintf (f, " Unknown bits\n");
395 if (jump_func->m_vr)
397 fprintf (f, " VR ");
398 fprintf (f, "%s[",
399 (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
400 print_decs (wi::to_wide (jump_func->m_vr->min), f);
401 fprintf (f, ", ");
402 print_decs (wi::to_wide (jump_func->m_vr->max), f);
403 fprintf (f, "]\n");
405 else
406 fprintf (f, " Unknown VR\n");
411 /* Print the jump functions of all arguments on all call graph edges going from
412 NODE to file F. */
414 void
415 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
417 struct cgraph_edge *cs;
419 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
420 for (cs = node->callees; cs; cs = cs->next_callee)
422 if (!ipa_edge_args_info_available_for_edge_p (cs))
423 continue;
425 fprintf (f, " callsite %s -> %s : \n",
426 node->dump_name (),
427 cs->callee->dump_name ());
428 ipa_print_node_jump_functions_for_edge (f, cs);
431 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
433 struct cgraph_indirect_call_info *ii;
434 if (!ipa_edge_args_info_available_for_edge_p (cs))
435 continue;
437 ii = cs->indirect_info;
438 if (ii->agg_contents)
439 fprintf (f, " indirect %s callsite, calling param %i, "
440 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
441 ii->member_ptr ? "member ptr" : "aggregate",
442 ii->param_index, ii->offset,
443 ii->by_ref ? "by reference" : "by_value");
444 else
445 fprintf (f, " indirect %s callsite, calling param %i, "
446 "offset " HOST_WIDE_INT_PRINT_DEC,
447 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
448 ii->offset);
450 if (cs->call_stmt)
452 fprintf (f, ", for stmt ");
453 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
455 else
456 fprintf (f, "\n");
457 if (ii->polymorphic)
458 ii->context.dump (f);
459 ipa_print_node_jump_functions_for_edge (f, cs);
463 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
465 void
466 ipa_print_all_jump_functions (FILE *f)
468 struct cgraph_node *node;
470 fprintf (f, "\nJump functions:\n");
471 FOR_EACH_FUNCTION (node)
473 ipa_print_node_jump_functions (f, node);
477 /* Set jfunc to be a know-really nothing jump function. */
479 static void
480 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
482 jfunc->type = IPA_JF_UNKNOWN;
483 jfunc->bits = NULL;
484 jfunc->m_vr = NULL;
487 /* Set JFUNC to be a copy of another jmp (to be used by jump function
488 combination code). The two functions will share their rdesc. */
490 static void
491 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
492 struct ipa_jump_func *src)
495 gcc_checking_assert (src->type == IPA_JF_CONST);
496 dst->type = IPA_JF_CONST;
497 dst->value.constant = src->value.constant;
500 /* Set JFUNC to be a constant jmp function. */
502 static void
503 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
504 struct cgraph_edge *cs)
506 jfunc->type = IPA_JF_CONST;
507 jfunc->value.constant.value = unshare_expr_without_location (constant);
509 if (TREE_CODE (constant) == ADDR_EXPR
510 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
512 struct ipa_cst_ref_desc *rdesc;
514 rdesc = ipa_refdesc_pool.allocate ();
515 rdesc->cs = cs;
516 rdesc->next_duplicate = NULL;
517 rdesc->refcount = 1;
518 jfunc->value.constant.rdesc = rdesc;
520 else
521 jfunc->value.constant.rdesc = NULL;
524 /* Set JFUNC to be a simple pass-through jump function. */
525 static void
526 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
527 bool agg_preserved)
529 jfunc->type = IPA_JF_PASS_THROUGH;
530 jfunc->value.pass_through.operand = NULL_TREE;
531 jfunc->value.pass_through.formal_id = formal_id;
532 jfunc->value.pass_through.operation = NOP_EXPR;
533 jfunc->value.pass_through.agg_preserved = agg_preserved;
536 /* Set JFUNC to be an unary pass through jump function. */
538 static void
539 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
540 enum tree_code operation)
542 jfunc->type = IPA_JF_PASS_THROUGH;
543 jfunc->value.pass_through.operand = NULL_TREE;
544 jfunc->value.pass_through.formal_id = formal_id;
545 jfunc->value.pass_through.operation = operation;
546 jfunc->value.pass_through.agg_preserved = false;
548 /* Set JFUNC to be an arithmetic pass through jump function. */
550 static void
551 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
552 tree operand, enum tree_code operation)
554 jfunc->type = IPA_JF_PASS_THROUGH;
555 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
556 jfunc->value.pass_through.formal_id = formal_id;
557 jfunc->value.pass_through.operation = operation;
558 jfunc->value.pass_through.agg_preserved = false;
561 /* Set JFUNC to be an ancestor jump function. */
563 static void
564 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
565 int formal_id, bool agg_preserved)
567 jfunc->type = IPA_JF_ANCESTOR;
568 jfunc->value.ancestor.formal_id = formal_id;
569 jfunc->value.ancestor.offset = offset;
570 jfunc->value.ancestor.agg_preserved = agg_preserved;
573 /* Get IPA BB information about the given BB. FBI is the context of analyzis
574 of this function body. */
576 static struct ipa_bb_info *
577 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
579 gcc_checking_assert (fbi);
580 return &fbi->bb_infos[bb->index];
583 /* Structure to be passed in between detect_type_change and
584 check_stmt_for_type_change. */
586 struct prop_type_change_info
588 /* Offset into the object where there is the virtual method pointer we are
589 looking for. */
590 HOST_WIDE_INT offset;
591 /* The declaration or SSA_NAME pointer of the base that we are checking for
592 type change. */
593 tree object;
594 /* Set to true if dynamic type change has been detected. */
595 bool type_maybe_changed;
598 /* Return true if STMT can modify a virtual method table pointer.
600 This function makes special assumptions about both constructors and
601 destructors which are all the functions that are allowed to alter the VMT
602 pointers. It assumes that destructors begin with assignment into all VMT
603 pointers and that constructors essentially look in the following way:
605 1) The very first thing they do is that they call constructors of ancestor
606 sub-objects that have them.
608 2) Then VMT pointers of this and all its ancestors is set to new values
609 corresponding to the type corresponding to the constructor.
611 3) Only afterwards, other stuff such as constructor of member sub-objects
612 and the code written by the user is run. Only this may include calling
613 virtual functions, directly or indirectly.
615 There is no way to call a constructor of an ancestor sub-object in any
616 other way.
618 This means that we do not have to care whether constructors get the correct
619 type information because they will always change it (in fact, if we define
620 the type to be given by the VMT pointer, it is undefined).
622 The most important fact to derive from the above is that if, for some
623 statement in the section 3, we try to detect whether the dynamic type has
624 changed, we can safely ignore all calls as we examine the function body
625 backwards until we reach statements in section 2 because these calls cannot
626 be ancestor constructors or destructors (if the input is not bogus) and so
627 do not change the dynamic type (this holds true only for automatically
628 allocated objects but at the moment we devirtualize only these). We then
629 must detect that statements in section 2 change the dynamic type and can try
630 to derive the new type. That is enough and we can stop, we will never see
631 the calls into constructors of sub-objects in this code. Therefore we can
632 safely ignore all call statements that we traverse.
635 static bool
636 stmt_may_be_vtbl_ptr_store (gimple *stmt)
638 if (is_gimple_call (stmt))
639 return false;
640 if (gimple_clobber_p (stmt))
641 return false;
642 else if (is_gimple_assign (stmt))
644 tree lhs = gimple_assign_lhs (stmt);
646 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
648 if (flag_strict_aliasing
649 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
650 return false;
652 if (TREE_CODE (lhs) == COMPONENT_REF
653 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
654 return false;
655 /* In the future we might want to use get_base_ref_and_offset to find
656 if there is a field corresponding to the offset and if so, proceed
657 almost like if it was a component ref. */
660 return true;
663 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
664 to check whether a particular statement may modify the virtual table
665 pointerIt stores its result into DATA, which points to a
666 prop_type_change_info structure. */
668 static bool
669 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
671 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
672 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
674 if (stmt_may_be_vtbl_ptr_store (stmt))
676 tci->type_maybe_changed = true;
677 return true;
679 else
680 return false;
683 /* See if ARG is PARAM_DECl describing instance passed by pointer
684 or reference in FUNCTION. Return false if the dynamic type may change
685 in between beggining of the function until CALL is invoked.
687 Generally functions are not allowed to change type of such instances,
688 but they call destructors. We assume that methods can not destroy the THIS
689 pointer. Also as a special cases, constructor and destructors may change
690 type of the THIS pointer. */
692 static bool
693 param_type_may_change_p (tree function, tree arg, gimple *call)
695 /* Pure functions can not do any changes on the dynamic type;
696 that require writting to memory. */
697 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
698 return false;
699 /* We need to check if we are within inlined consturctor
700 or destructor (ideally we would have way to check that the
701 inline cdtor is actually working on ARG, but we don't have
702 easy tie on this, so punt on all non-pure cdtors.
703 We may also record the types of cdtors and once we know type
704 of the instance match them.
706 Also code unification optimizations may merge calls from
707 different blocks making return values unreliable. So
708 do nothing during late optimization. */
709 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
710 return true;
711 if (TREE_CODE (arg) == SSA_NAME
712 && SSA_NAME_IS_DEFAULT_DEF (arg)
713 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
715 /* Normal (non-THIS) argument. */
716 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
717 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
718 /* THIS pointer of an method - here we want to watch constructors
719 and destructors as those definitely may change the dynamic
720 type. */
721 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
722 && !DECL_CXX_CONSTRUCTOR_P (function)
723 && !DECL_CXX_DESTRUCTOR_P (function)
724 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
726 /* Walk the inline stack and watch out for ctors/dtors. */
727 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
728 block = BLOCK_SUPERCONTEXT (block))
729 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
730 return true;
731 return false;
734 return true;
737 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
738 callsite CALL) by looking for assignments to its virtual table pointer. If
739 it is, return true and fill in the jump function JFUNC with relevant type
740 information or set it to unknown. ARG is the object itself (not a pointer
741 to it, unless dereferenced). BASE is the base of the memory access as
742 returned by get_ref_base_and_extent, as is the offset.
744 This is helper function for detect_type_change and detect_type_change_ssa
745 that does the heavy work which is usually unnecesary. */
747 static bool
748 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
749 gcall *call, struct ipa_jump_func *jfunc,
750 HOST_WIDE_INT offset)
752 struct prop_type_change_info tci;
753 ao_ref ao;
754 bool entry_reached = false;
756 gcc_checking_assert (DECL_P (arg)
757 || TREE_CODE (arg) == MEM_REF
758 || handled_component_p (arg));
760 comp_type = TYPE_MAIN_VARIANT (comp_type);
762 /* Const calls cannot call virtual methods through VMT and so type changes do
763 not matter. */
764 if (!flag_devirtualize || !gimple_vuse (call)
765 /* Be sure expected_type is polymorphic. */
766 || !comp_type
767 || TREE_CODE (comp_type) != RECORD_TYPE
768 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
769 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
770 return true;
772 ao_ref_init (&ao, arg);
773 ao.base = base;
774 ao.offset = offset;
775 ao.size = POINTER_SIZE;
776 ao.max_size = ao.size;
778 tci.offset = offset;
779 tci.object = get_base_address (arg);
780 tci.type_maybe_changed = false;
782 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
783 &tci, NULL, &entry_reached);
784 if (!tci.type_maybe_changed)
785 return false;
787 ipa_set_jf_unknown (jfunc);
788 return true;
791 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
792 If it is, return true and fill in the jump function JFUNC with relevant type
793 information or set it to unknown. ARG is the object itself (not a pointer
794 to it, unless dereferenced). BASE is the base of the memory access as
795 returned by get_ref_base_and_extent, as is the offset. */
797 static bool
798 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
799 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
801 if (!flag_devirtualize)
802 return false;
804 if (TREE_CODE (base) == MEM_REF
805 && !param_type_may_change_p (current_function_decl,
806 TREE_OPERAND (base, 0),
807 call))
808 return false;
809 return detect_type_change_from_memory_writes (arg, base, comp_type,
810 call, jfunc, offset);
813 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
814 SSA name (its dereference will become the base and the offset is assumed to
815 be zero). */
817 static bool
818 detect_type_change_ssa (tree arg, tree comp_type,
819 gcall *call, struct ipa_jump_func *jfunc)
821 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
822 if (!flag_devirtualize
823 || !POINTER_TYPE_P (TREE_TYPE (arg)))
824 return false;
826 if (!param_type_may_change_p (current_function_decl, arg, call))
827 return false;
829 arg = build2 (MEM_REF, ptr_type_node, arg,
830 build_int_cst (ptr_type_node, 0));
832 return detect_type_change_from_memory_writes (arg, arg, comp_type,
833 call, jfunc, 0);
836 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
837 boolean variable pointed to by DATA. */
839 static bool
840 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
841 void *data)
843 bool *b = (bool *) data;
844 *b = true;
845 return true;
848 /* Return true if we have already walked so many statements in AA that we
849 should really just start giving up. */
851 static bool
852 aa_overwalked (struct ipa_func_body_info *fbi)
854 gcc_checking_assert (fbi);
855 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
858 /* Find the nearest valid aa status for parameter specified by INDEX that
859 dominates BB. */
861 static struct ipa_param_aa_status *
862 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
863 int index)
865 while (true)
867 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
868 if (!bb)
869 return NULL;
870 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
871 if (!bi->param_aa_statuses.is_empty ()
872 && bi->param_aa_statuses[index].valid)
873 return &bi->param_aa_statuses[index];
877 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
878 structures and/or intialize the result with a dominating description as
879 necessary. */
881 static struct ipa_param_aa_status *
882 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
883 int index)
885 gcc_checking_assert (fbi);
886 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
887 if (bi->param_aa_statuses.is_empty ())
888 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
889 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
890 if (!paa->valid)
892 gcc_checking_assert (!paa->parm_modified
893 && !paa->ref_modified
894 && !paa->pt_modified);
895 struct ipa_param_aa_status *dom_paa;
896 dom_paa = find_dominating_aa_status (fbi, bb, index);
897 if (dom_paa)
898 *paa = *dom_paa;
899 else
900 paa->valid = true;
903 return paa;
906 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
907 a value known not to be modified in this function before reaching the
908 statement STMT. FBI holds information about the function we have so far
909 gathered but do not survive the summary building stage. */
911 static bool
912 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
913 gimple *stmt, tree parm_load)
915 struct ipa_param_aa_status *paa;
916 bool modified = false;
917 ao_ref refd;
919 tree base = get_base_address (parm_load);
920 gcc_assert (TREE_CODE (base) == PARM_DECL);
921 if (TREE_READONLY (base))
922 return true;
924 /* FIXME: FBI can be NULL if we are being called from outside
925 ipa_node_analysis or ipcp_transform_function, which currently happens
926 during inlining analysis. It would be great to extend fbi's lifetime and
927 always have it. Currently, we are just not afraid of too much walking in
928 that case. */
929 if (fbi)
931 if (aa_overwalked (fbi))
932 return false;
933 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
934 if (paa->parm_modified)
935 return false;
937 else
938 paa = NULL;
940 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
941 ao_ref_init (&refd, parm_load);
942 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
943 &modified, NULL);
944 if (fbi)
945 fbi->aa_walked += walked;
946 if (paa && modified)
947 paa->parm_modified = true;
948 return !modified;
951 /* If STMT is an assignment that loads a value from an parameter declaration,
952 return the index of the parameter in ipa_node_params which has not been
953 modified. Otherwise return -1. */
955 static int
956 load_from_unmodified_param (struct ipa_func_body_info *fbi,
957 vec<ipa_param_descriptor, va_gc> *descriptors,
958 gimple *stmt)
960 int index;
961 tree op1;
963 if (!gimple_assign_single_p (stmt))
964 return -1;
966 op1 = gimple_assign_rhs1 (stmt);
967 if (TREE_CODE (op1) != PARM_DECL)
968 return -1;
970 index = ipa_get_param_decl_index_1 (descriptors, op1);
971 if (index < 0
972 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
973 return -1;
975 return index;
978 /* Return true if memory reference REF (which must be a load through parameter
979 with INDEX) loads data that are known to be unmodified in this function
980 before reaching statement STMT. */
982 static bool
983 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
984 int index, gimple *stmt, tree ref)
986 struct ipa_param_aa_status *paa;
987 bool modified = false;
988 ao_ref refd;
990 /* FIXME: FBI can be NULL if we are being called from outside
991 ipa_node_analysis or ipcp_transform_function, which currently happens
992 during inlining analysis. It would be great to extend fbi's lifetime and
993 always have it. Currently, we are just not afraid of too much walking in
994 that case. */
995 if (fbi)
997 if (aa_overwalked (fbi))
998 return false;
999 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1000 if (paa->ref_modified)
1001 return false;
1003 else
1004 paa = NULL;
1006 gcc_checking_assert (gimple_vuse (stmt));
1007 ao_ref_init (&refd, ref);
1008 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1009 &modified, NULL);
1010 if (fbi)
1011 fbi->aa_walked += walked;
1012 if (paa && modified)
1013 paa->ref_modified = true;
1014 return !modified;
1017 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1018 is known to be unmodified in this function before reaching call statement
1019 CALL into which it is passed. FBI describes the function body. */
1021 static bool
1022 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1023 gimple *call, tree parm)
1025 bool modified = false;
1026 ao_ref refd;
1028 /* It's unnecessary to calculate anything about memory contnets for a const
1029 function because it is not goin to use it. But do not cache the result
1030 either. Also, no such calculations for non-pointers. */
1031 if (!gimple_vuse (call)
1032 || !POINTER_TYPE_P (TREE_TYPE (parm))
1033 || aa_overwalked (fbi))
1034 return false;
1036 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1037 gimple_bb (call),
1038 index);
1039 if (paa->pt_modified)
1040 return false;
1042 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1043 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1044 &modified, NULL);
1045 fbi->aa_walked += walked;
1046 if (modified)
1047 paa->pt_modified = true;
1048 return !modified;
1051 /* Return true if we can prove that OP is a memory reference loading
1052 data from an aggregate passed as a parameter.
1054 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1055 false if it cannot prove that the value has not been modified before the
1056 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1057 if it cannot prove the value has not been modified, in that case it will
1058 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1060 INFO and PARMS_AINFO describe parameters of the current function (but the
1061 latter can be NULL), STMT is the load statement. If function returns true,
1062 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1063 within the aggregate and whether it is a load from a value passed by
1064 reference respectively. */
1066 bool
1067 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1068 vec<ipa_param_descriptor, va_gc> *descriptors,
1069 gimple *stmt, tree op, int *index_p,
1070 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1071 bool *by_ref_p, bool *guaranteed_unmodified)
1073 int index;
1074 HOST_WIDE_INT size, max_size;
1075 bool reverse;
1076 tree base
1077 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
1079 if (max_size == -1 || max_size != size || *offset_p < 0)
1080 return false;
1082 if (DECL_P (base))
1084 int index = ipa_get_param_decl_index_1 (descriptors, base);
1085 if (index >= 0
1086 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1088 *index_p = index;
1089 *by_ref_p = false;
1090 if (size_p)
1091 *size_p = size;
1092 if (guaranteed_unmodified)
1093 *guaranteed_unmodified = true;
1094 return true;
1096 return false;
1099 if (TREE_CODE (base) != MEM_REF
1100 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1101 || !integer_zerop (TREE_OPERAND (base, 1)))
1102 return false;
1104 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1106 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1107 index = ipa_get_param_decl_index_1 (descriptors, parm);
1109 else
1111 /* This branch catches situations where a pointer parameter is not a
1112 gimple register, for example:
1114 void hip7(S*) (struct S * p)
1116 void (*<T2e4>) (struct S *) D.1867;
1117 struct S * p.1;
1119 <bb 2>:
1120 p.1_1 = p;
1121 D.1867_2 = p.1_1->f;
1122 D.1867_2 ();
1123 gdp = &p;
1126 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1127 index = load_from_unmodified_param (fbi, descriptors, def);
1130 if (index >= 0)
1132 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1133 if (!data_preserved && !guaranteed_unmodified)
1134 return false;
1136 *index_p = index;
1137 *by_ref_p = true;
1138 if (size_p)
1139 *size_p = size;
1140 if (guaranteed_unmodified)
1141 *guaranteed_unmodified = data_preserved;
1142 return true;
1144 return false;
1147 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1148 of an assignment statement STMT, try to determine whether we are actually
1149 handling any of the following cases and construct an appropriate jump
1150 function into JFUNC if so:
1152 1) The passed value is loaded from a formal parameter which is not a gimple
1153 register (most probably because it is addressable, the value has to be
1154 scalar) and we can guarantee the value has not changed. This case can
1155 therefore be described by a simple pass-through jump function. For example:
1157 foo (int a)
1159 int a.0;
1161 a.0_2 = a;
1162 bar (a.0_2);
1164 2) The passed value can be described by a simple arithmetic pass-through
1165 jump function. E.g.
1167 foo (int a)
1169 int D.2064;
1171 D.2064_4 = a.1(D) + 4;
1172 bar (D.2064_4);
1174 This case can also occur in combination of the previous one, e.g.:
1176 foo (int a, int z)
1178 int a.0;
1179 int D.2064;
1181 a.0_3 = a;
1182 D.2064_4 = a.0_3 + 4;
1183 foo (D.2064_4);
1185 3) The passed value is an address of an object within another one (which
1186 also passed by reference). Such situations are described by an ancestor
1187 jump function and describe situations such as:
1189 B::foo() (struct B * const this)
1191 struct A * D.1845;
1193 D.1845_2 = &this_1(D)->D.1748;
1194 A::bar (D.1845_2);
1196 INFO is the structure describing individual parameters access different
1197 stages of IPA optimizations. PARMS_AINFO contains the information that is
1198 only needed for intraprocedural analysis. */
1200 static void
1201 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1202 struct ipa_node_params *info,
1203 struct ipa_jump_func *jfunc,
1204 gcall *call, gimple *stmt, tree name,
1205 tree param_type)
1207 HOST_WIDE_INT offset, size, max_size;
1208 tree op1, tc_ssa, base, ssa;
1209 bool reverse;
1210 int index;
1212 op1 = gimple_assign_rhs1 (stmt);
1214 if (TREE_CODE (op1) == SSA_NAME)
1216 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1217 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1218 else
1219 index = load_from_unmodified_param (fbi, info->descriptors,
1220 SSA_NAME_DEF_STMT (op1));
1221 tc_ssa = op1;
1223 else
1225 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1226 tc_ssa = gimple_assign_lhs (stmt);
1229 if (index >= 0)
1231 switch (gimple_assign_rhs_class (stmt))
1233 case GIMPLE_BINARY_RHS:
1235 tree op2 = gimple_assign_rhs2 (stmt);
1236 if (!is_gimple_ip_invariant (op2)
1237 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1238 != tcc_comparison)
1239 && !useless_type_conversion_p (TREE_TYPE (name),
1240 TREE_TYPE (op1))))
1241 return;
1243 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1244 gimple_assign_rhs_code (stmt));
1245 break;
1247 case GIMPLE_SINGLE_RHS:
1249 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1250 tc_ssa);
1251 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1252 break;
1254 case GIMPLE_UNARY_RHS:
1255 if (is_gimple_assign (stmt)
1256 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1257 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1258 ipa_set_jf_unary_pass_through (jfunc, index,
1259 gimple_assign_rhs_code (stmt));
1260 default:;
1262 return;
1265 if (TREE_CODE (op1) != ADDR_EXPR)
1266 return;
1267 op1 = TREE_OPERAND (op1, 0);
1268 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1269 return;
1270 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1271 if (TREE_CODE (base) != MEM_REF
1272 /* If this is a varying address, punt. */
1273 || max_size == -1
1274 || max_size != size)
1275 return;
1276 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1277 ssa = TREE_OPERAND (base, 0);
1278 if (TREE_CODE (ssa) != SSA_NAME
1279 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1280 || offset < 0)
1281 return;
1283 /* Dynamic types are changed in constructors and destructors. */
1284 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1285 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1286 ipa_set_ancestor_jf (jfunc, offset, index,
1287 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1290 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1291 it looks like:
1293 iftmp.1_3 = &obj_2(D)->D.1762;
1295 The base of the MEM_REF must be a default definition SSA NAME of a
1296 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1297 whole MEM_REF expression is returned and the offset calculated from any
1298 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1299 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1301 static tree
1302 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1304 HOST_WIDE_INT size, max_size;
1305 tree expr, parm, obj;
1306 bool reverse;
1308 if (!gimple_assign_single_p (assign))
1309 return NULL_TREE;
1310 expr = gimple_assign_rhs1 (assign);
1312 if (TREE_CODE (expr) != ADDR_EXPR)
1313 return NULL_TREE;
1314 expr = TREE_OPERAND (expr, 0);
1315 obj = expr;
1316 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1318 if (TREE_CODE (expr) != MEM_REF
1319 /* If this is a varying address, punt. */
1320 || max_size == -1
1321 || max_size != size
1322 || *offset < 0)
1323 return NULL_TREE;
1324 parm = TREE_OPERAND (expr, 0);
1325 if (TREE_CODE (parm) != SSA_NAME
1326 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1327 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1328 return NULL_TREE;
1330 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1331 *obj_p = obj;
1332 return expr;
1336 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1337 statement PHI, try to find out whether NAME is in fact a
1338 multiple-inheritance typecast from a descendant into an ancestor of a formal
1339 parameter and thus can be described by an ancestor jump function and if so,
1340 write the appropriate function into JFUNC.
1342 Essentially we want to match the following pattern:
1344 if (obj_2(D) != 0B)
1345 goto <bb 3>;
1346 else
1347 goto <bb 4>;
1349 <bb 3>:
1350 iftmp.1_3 = &obj_2(D)->D.1762;
1352 <bb 4>:
1353 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1354 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1355 return D.1879_6; */
1357 static void
1358 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1359 struct ipa_node_params *info,
1360 struct ipa_jump_func *jfunc,
1361 gcall *call, gphi *phi)
1363 HOST_WIDE_INT offset;
1364 gimple *assign, *cond;
1365 basic_block phi_bb, assign_bb, cond_bb;
1366 tree tmp, parm, expr, obj;
1367 int index, i;
1369 if (gimple_phi_num_args (phi) != 2)
1370 return;
1372 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1373 tmp = PHI_ARG_DEF (phi, 0);
1374 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1375 tmp = PHI_ARG_DEF (phi, 1);
1376 else
1377 return;
1378 if (TREE_CODE (tmp) != SSA_NAME
1379 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1380 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1381 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1382 return;
1384 assign = SSA_NAME_DEF_STMT (tmp);
1385 assign_bb = gimple_bb (assign);
1386 if (!single_pred_p (assign_bb))
1387 return;
1388 expr = get_ancestor_addr_info (assign, &obj, &offset);
1389 if (!expr)
1390 return;
1391 parm = TREE_OPERAND (expr, 0);
1392 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1393 if (index < 0)
1394 return;
1396 cond_bb = single_pred (assign_bb);
1397 cond = last_stmt (cond_bb);
1398 if (!cond
1399 || gimple_code (cond) != GIMPLE_COND
1400 || gimple_cond_code (cond) != NE_EXPR
1401 || gimple_cond_lhs (cond) != parm
1402 || !integer_zerop (gimple_cond_rhs (cond)))
1403 return;
1405 phi_bb = gimple_bb (phi);
1406 for (i = 0; i < 2; i++)
1408 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1409 if (pred != assign_bb && pred != cond_bb)
1410 return;
1413 ipa_set_ancestor_jf (jfunc, offset, index,
1414 parm_ref_data_pass_through_p (fbi, index, call, parm));
1417 /* Inspect the given TYPE and return true iff it has the same structure (the
1418 same number of fields of the same types) as a C++ member pointer. If
1419 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1420 corresponding fields there. */
1422 static bool
1423 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1425 tree fld;
1427 if (TREE_CODE (type) != RECORD_TYPE)
1428 return false;
1430 fld = TYPE_FIELDS (type);
1431 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1432 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1433 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1434 return false;
1436 if (method_ptr)
1437 *method_ptr = fld;
1439 fld = DECL_CHAIN (fld);
1440 if (!fld || INTEGRAL_TYPE_P (fld)
1441 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1442 return false;
1443 if (delta)
1444 *delta = fld;
1446 if (DECL_CHAIN (fld))
1447 return false;
1449 return true;
1452 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1453 return the rhs of its defining statement. Otherwise return RHS as it
1454 is. */
1456 static inline tree
1457 get_ssa_def_if_simple_copy (tree rhs)
1459 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1461 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1463 if (gimple_assign_single_p (def_stmt))
1464 rhs = gimple_assign_rhs1 (def_stmt);
1465 else
1466 break;
1468 return rhs;
1471 /* Simple linked list, describing known contents of an aggregate beforere
1472 call. */
1474 struct ipa_known_agg_contents_list
1476 /* Offset and size of the described part of the aggregate. */
1477 HOST_WIDE_INT offset, size;
1478 /* Known constant value or NULL if the contents is known to be unknown. */
1479 tree constant;
1480 /* Pointer to the next structure in the list. */
1481 struct ipa_known_agg_contents_list *next;
1484 /* Find the proper place in linked list of ipa_known_agg_contents_list
1485 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1486 unless there is a partial overlap, in which case return NULL, or such
1487 element is already there, in which case set *ALREADY_THERE to true. */
1489 static struct ipa_known_agg_contents_list **
1490 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1491 HOST_WIDE_INT lhs_offset,
1492 HOST_WIDE_INT lhs_size,
1493 bool *already_there)
1495 struct ipa_known_agg_contents_list **p = list;
1496 while (*p && (*p)->offset < lhs_offset)
1498 if ((*p)->offset + (*p)->size > lhs_offset)
1499 return NULL;
1500 p = &(*p)->next;
1503 if (*p && (*p)->offset < lhs_offset + lhs_size)
1505 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1506 /* We already know this value is subsequently overwritten with
1507 something else. */
1508 *already_there = true;
1509 else
1510 /* Otherwise this is a partial overlap which we cannot
1511 represent. */
1512 return NULL;
1514 return p;
1517 /* Build aggregate jump function from LIST, assuming there are exactly
1518 CONST_COUNT constant entries there and that th offset of the passed argument
1519 is ARG_OFFSET and store it into JFUNC. */
1521 static void
1522 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1523 int const_count, HOST_WIDE_INT arg_offset,
1524 struct ipa_jump_func *jfunc)
1526 vec_alloc (jfunc->agg.items, const_count);
1527 while (list)
1529 if (list->constant)
1531 struct ipa_agg_jf_item item;
1532 item.offset = list->offset - arg_offset;
1533 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1534 item.value = unshare_expr_without_location (list->constant);
1535 jfunc->agg.items->quick_push (item);
1537 list = list->next;
1541 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1542 in ARG is filled in with constant values. ARG can either be an aggregate
1543 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1544 aggregate. JFUNC is the jump function into which the constants are
1545 subsequently stored. */
1547 static void
1548 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1549 tree arg_type,
1550 struct ipa_jump_func *jfunc)
1552 struct ipa_known_agg_contents_list *list = NULL;
1553 int item_count = 0, const_count = 0;
1554 HOST_WIDE_INT arg_offset, arg_size;
1555 gimple_stmt_iterator gsi;
1556 tree arg_base;
1557 bool check_ref, by_ref;
1558 ao_ref r;
1560 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1561 return;
1563 /* The function operates in three stages. First, we prepare check_ref, r,
1564 arg_base and arg_offset based on what is actually passed as an actual
1565 argument. */
1567 if (POINTER_TYPE_P (arg_type))
1569 by_ref = true;
1570 if (TREE_CODE (arg) == SSA_NAME)
1572 tree type_size;
1573 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1574 return;
1575 check_ref = true;
1576 arg_base = arg;
1577 arg_offset = 0;
1578 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1579 arg_size = tree_to_uhwi (type_size);
1580 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1582 else if (TREE_CODE (arg) == ADDR_EXPR)
1584 HOST_WIDE_INT arg_max_size;
1585 bool reverse;
1587 arg = TREE_OPERAND (arg, 0);
1588 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1589 &arg_max_size, &reverse);
1590 if (arg_max_size == -1
1591 || arg_max_size != arg_size
1592 || arg_offset < 0)
1593 return;
1594 if (DECL_P (arg_base))
1596 check_ref = false;
1597 ao_ref_init (&r, arg_base);
1599 else
1600 return;
1602 else
1603 return;
1605 else
1607 HOST_WIDE_INT arg_max_size;
1608 bool reverse;
1610 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1612 by_ref = false;
1613 check_ref = false;
1614 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1615 &arg_max_size, &reverse);
1616 if (arg_max_size == -1
1617 || arg_max_size != arg_size
1618 || arg_offset < 0)
1619 return;
1621 ao_ref_init (&r, arg);
1624 /* Second stage walks back the BB, looks at individual statements and as long
1625 as it is confident of how the statements affect contents of the
1626 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1627 describing it. */
1628 gsi = gsi_for_stmt (call);
1629 gsi_prev (&gsi);
1630 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1632 struct ipa_known_agg_contents_list *n, **p;
1633 gimple *stmt = gsi_stmt (gsi);
1634 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1635 tree lhs, rhs, lhs_base;
1636 bool reverse;
1638 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1639 continue;
1640 if (!gimple_assign_single_p (stmt))
1641 break;
1643 lhs = gimple_assign_lhs (stmt);
1644 rhs = gimple_assign_rhs1 (stmt);
1645 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1646 || TREE_CODE (lhs) == BIT_FIELD_REF
1647 || contains_bitfld_component_ref_p (lhs))
1648 break;
1650 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1651 &lhs_max_size, &reverse);
1652 if (lhs_max_size == -1
1653 || lhs_max_size != lhs_size)
1654 break;
1656 if (check_ref)
1658 if (TREE_CODE (lhs_base) != MEM_REF
1659 || TREE_OPERAND (lhs_base, 0) != arg_base
1660 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1661 break;
1663 else if (lhs_base != arg_base)
1665 if (DECL_P (lhs_base))
1666 continue;
1667 else
1668 break;
1671 bool already_there = false;
1672 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1673 &already_there);
1674 if (!p)
1675 break;
1676 if (already_there)
1677 continue;
1679 rhs = get_ssa_def_if_simple_copy (rhs);
1680 n = XALLOCA (struct ipa_known_agg_contents_list);
1681 n->size = lhs_size;
1682 n->offset = lhs_offset;
1683 if (is_gimple_ip_invariant (rhs))
1685 n->constant = rhs;
1686 const_count++;
1688 else
1689 n->constant = NULL_TREE;
1690 n->next = *p;
1691 *p = n;
1693 item_count++;
1694 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1695 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1696 break;
1699 /* Third stage just goes over the list and creates an appropriate vector of
1700 ipa_agg_jf_item structures out of it, of sourse only if there are
1701 any known constants to begin with. */
1703 if (const_count)
1705 jfunc->agg.by_ref = by_ref;
1706 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1710 /* Return the Ith param type of callee associated with call graph
1711 edge E. */
1713 tree
1714 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1716 int n;
1717 tree type = (e->callee
1718 ? TREE_TYPE (e->callee->decl)
1719 : gimple_call_fntype (e->call_stmt));
1720 tree t = TYPE_ARG_TYPES (type);
1722 for (n = 0; n < i; n++)
1724 if (!t)
1725 break;
1726 t = TREE_CHAIN (t);
1728 if (t)
1729 return TREE_VALUE (t);
1730 if (!e->callee)
1731 return NULL;
1732 t = DECL_ARGUMENTS (e->callee->decl);
1733 for (n = 0; n < i; n++)
1735 if (!t)
1736 return NULL;
1737 t = TREE_CHAIN (t);
1739 if (t)
1740 return TREE_TYPE (t);
1741 return NULL;
1744 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1745 allocated structure or a previously existing one shared with other jump
1746 functions and/or transformation summaries. */
1748 ipa_bits *
1749 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1751 ipa_bits tmp;
1752 tmp.value = value;
1753 tmp.mask = mask;
1755 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1756 if (*slot)
1757 return *slot;
1759 ipa_bits *res = ggc_alloc<ipa_bits> ();
1760 res->value = value;
1761 res->mask = mask;
1762 *slot = res;
1764 return res;
1767 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1768 table in order to avoid creating multiple same ipa_bits structures. */
1770 static void
1771 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1772 const widest_int &mask)
1774 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1777 /* Return a pointer to a value_range just like *TMP, but either find it in
1778 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1780 static value_range *
1781 ipa_get_value_range (value_range *tmp)
1783 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1784 if (*slot)
1785 return *slot;
1787 value_range *vr = ggc_alloc<value_range> ();
1788 *vr = *tmp;
1789 *slot = vr;
1791 return vr;
1794 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1795 equiv set. Use hash table in order to avoid creating multiple same copies of
1796 value_ranges. */
1798 static value_range *
1799 ipa_get_value_range (enum value_range_type type, tree min, tree max)
1801 value_range tmp;
1802 tmp.type = type;
1803 tmp.min = min;
1804 tmp.max = max;
1805 tmp.equiv = NULL;
1806 return ipa_get_value_range (&tmp);
1809 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1810 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1811 same value_range structures. */
1813 static void
1814 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1815 tree min, tree max)
1817 jf->m_vr = ipa_get_value_range (type, min, max);
1820 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1821 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1823 static void
1824 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1826 jf->m_vr = ipa_get_value_range (tmp);
1829 /* Compute jump function for all arguments of callsite CS and insert the
1830 information in the jump_functions array in the ipa_edge_args corresponding
1831 to this callsite. */
1833 static void
1834 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1835 struct cgraph_edge *cs)
1837 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1838 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1839 gcall *call = cs->call_stmt;
1840 int n, arg_num = gimple_call_num_args (call);
1841 bool useful_context = false;
1843 if (arg_num == 0 || args->jump_functions)
1844 return;
1845 vec_safe_grow_cleared (args->jump_functions, arg_num);
1846 if (flag_devirtualize)
1847 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1849 if (gimple_call_internal_p (call))
1850 return;
1851 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1852 return;
1854 for (n = 0; n < arg_num; n++)
1856 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1857 tree arg = gimple_call_arg (call, n);
1858 tree param_type = ipa_get_callee_param_type (cs, n);
1859 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1861 tree instance;
1862 struct ipa_polymorphic_call_context context (cs->caller->decl,
1863 arg, cs->call_stmt,
1864 &instance);
1865 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1866 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1867 if (!context.useless_p ())
1868 useful_context = true;
1871 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1873 bool addr_nonzero = false;
1874 bool strict_overflow = false;
1876 if (TREE_CODE (arg) == SSA_NAME
1877 && param_type
1878 && get_ptr_nonnull (arg))
1879 addr_nonzero = true;
1880 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1881 addr_nonzero = true;
1883 if (addr_nonzero)
1885 tree z = build_int_cst (TREE_TYPE (arg), 0);
1886 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1888 else
1889 gcc_assert (!jfunc->m_vr);
1891 else
1893 wide_int min, max;
1894 value_range_type type;
1895 if (TREE_CODE (arg) == SSA_NAME
1896 && param_type
1897 && (type = get_range_info (arg, &min, &max))
1898 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1900 value_range tmpvr,resvr;
1902 tmpvr.type = type;
1903 tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1904 tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1905 tmpvr.equiv = NULL;
1906 memset (&resvr, 0, sizeof (resvr));
1907 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1908 &tmpvr, TREE_TYPE (arg));
1909 if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1910 ipa_set_jfunc_vr (jfunc, &resvr);
1911 else
1912 gcc_assert (!jfunc->m_vr);
1914 else
1915 gcc_assert (!jfunc->m_vr);
1918 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1919 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1921 if (TREE_CODE (arg) == SSA_NAME)
1922 ipa_set_jfunc_bits (jfunc, 0,
1923 widest_int::from (get_nonzero_bits (arg),
1924 TYPE_SIGN (TREE_TYPE (arg))));
1925 else
1926 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1928 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1930 unsigned HOST_WIDE_INT bitpos;
1931 unsigned align;
1933 get_pointer_alignment_1 (arg, &align, &bitpos);
1934 widest_int mask = wi::bit_and_not
1935 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1936 align / BITS_PER_UNIT - 1);
1937 widest_int value = bitpos / BITS_PER_UNIT;
1938 ipa_set_jfunc_bits (jfunc, value, mask);
1940 else
1941 gcc_assert (!jfunc->bits);
1943 if (is_gimple_ip_invariant (arg)
1944 || (VAR_P (arg)
1945 && is_global_var (arg)
1946 && TREE_READONLY (arg)))
1947 ipa_set_jf_constant (jfunc, arg, cs);
1948 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1949 && TREE_CODE (arg) == PARM_DECL)
1951 int index = ipa_get_param_decl_index (info, arg);
1953 gcc_assert (index >=0);
1954 /* Aggregate passed by value, check for pass-through, otherwise we
1955 will attempt to fill in aggregate contents later in this
1956 for cycle. */
1957 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1959 ipa_set_jf_simple_pass_through (jfunc, index, false);
1960 continue;
1963 else if (TREE_CODE (arg) == SSA_NAME)
1965 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1967 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1968 if (index >= 0)
1970 bool agg_p;
1971 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1972 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1975 else
1977 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1978 if (is_gimple_assign (stmt))
1979 compute_complex_assign_jump_func (fbi, info, jfunc,
1980 call, stmt, arg, param_type);
1981 else if (gimple_code (stmt) == GIMPLE_PHI)
1982 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1983 call,
1984 as_a <gphi *> (stmt));
1988 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1989 passed (because type conversions are ignored in gimple). Usually we can
1990 safely get type from function declaration, but in case of K&R prototypes or
1991 variadic functions we can try our luck with type of the pointer passed.
1992 TODO: Since we look for actual initialization of the memory object, we may better
1993 work out the type based on the memory stores we find. */
1994 if (!param_type)
1995 param_type = TREE_TYPE (arg);
1997 if ((jfunc->type != IPA_JF_PASS_THROUGH
1998 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1999 && (jfunc->type != IPA_JF_ANCESTOR
2000 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2001 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2002 || POINTER_TYPE_P (param_type)))
2003 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
2005 if (!useful_context)
2006 vec_free (args->polymorphic_call_contexts);
2009 /* Compute jump functions for all edges - both direct and indirect - outgoing
2010 from BB. */
2012 static void
2013 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2015 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2016 int i;
2017 struct cgraph_edge *cs;
2019 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2021 struct cgraph_node *callee = cs->callee;
2023 if (callee)
2025 callee->ultimate_alias_target ();
2026 /* We do not need to bother analyzing calls to unknown functions
2027 unless they may become known during lto/whopr. */
2028 if (!callee->definition && !flag_lto)
2029 continue;
2031 ipa_compute_jump_functions_for_edge (fbi, cs);
2035 /* If STMT looks like a statement loading a value from a member pointer formal
2036 parameter, return that parameter and store the offset of the field to
2037 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2038 might be clobbered). If USE_DELTA, then we look for a use of the delta
2039 field rather than the pfn. */
2041 static tree
2042 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2043 HOST_WIDE_INT *offset_p)
2045 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2047 if (!gimple_assign_single_p (stmt))
2048 return NULL_TREE;
2050 rhs = gimple_assign_rhs1 (stmt);
2051 if (TREE_CODE (rhs) == COMPONENT_REF)
2053 ref_field = TREE_OPERAND (rhs, 1);
2054 rhs = TREE_OPERAND (rhs, 0);
2056 else
2057 ref_field = NULL_TREE;
2058 if (TREE_CODE (rhs) != MEM_REF)
2059 return NULL_TREE;
2060 rec = TREE_OPERAND (rhs, 0);
2061 if (TREE_CODE (rec) != ADDR_EXPR)
2062 return NULL_TREE;
2063 rec = TREE_OPERAND (rec, 0);
2064 if (TREE_CODE (rec) != PARM_DECL
2065 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2066 return NULL_TREE;
2067 ref_offset = TREE_OPERAND (rhs, 1);
2069 if (use_delta)
2070 fld = delta_field;
2071 else
2072 fld = ptr_field;
2073 if (offset_p)
2074 *offset_p = int_bit_position (fld);
2076 if (ref_field)
2078 if (integer_nonzerop (ref_offset))
2079 return NULL_TREE;
2080 return ref_field == fld ? rec : NULL_TREE;
2082 else
2083 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2084 : NULL_TREE;
2087 /* Returns true iff T is an SSA_NAME defined by a statement. */
2089 static bool
2090 ipa_is_ssa_with_stmt_def (tree t)
2092 if (TREE_CODE (t) == SSA_NAME
2093 && !SSA_NAME_IS_DEFAULT_DEF (t))
2094 return true;
2095 else
2096 return false;
2099 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2100 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2101 indirect call graph edge. */
2103 static struct cgraph_edge *
2104 ipa_note_param_call (struct cgraph_node *node, int param_index,
2105 gcall *stmt)
2107 struct cgraph_edge *cs;
2109 cs = node->get_edge (stmt);
2110 cs->indirect_info->param_index = param_index;
2111 cs->indirect_info->agg_contents = 0;
2112 cs->indirect_info->member_ptr = 0;
2113 cs->indirect_info->guaranteed_unmodified = 0;
2114 return cs;
2117 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2118 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2119 intermediate information about each formal parameter. Currently it checks
2120 whether the call calls a pointer that is a formal parameter and if so, the
2121 parameter is marked with the called flag and an indirect call graph edge
2122 describing the call is created. This is very simple for ordinary pointers
2123 represented in SSA but not-so-nice when it comes to member pointers. The
2124 ugly part of this function does nothing more than trying to match the
2125 pattern of such a call. An example of such a pattern is the gimple dump
2126 below, the call is on the last line:
2128 <bb 2>:
2129 f$__delta_5 = f.__delta;
2130 f$__pfn_24 = f.__pfn;
2133 <bb 2>:
2134 f$__delta_5 = MEM[(struct *)&f];
2135 f$__pfn_24 = MEM[(struct *)&f + 4B];
2137 and a few lines below:
2139 <bb 5>
2140 D.2496_3 = (int) f$__pfn_24;
2141 D.2497_4 = D.2496_3 & 1;
2142 if (D.2497_4 != 0)
2143 goto <bb 3>;
2144 else
2145 goto <bb 4>;
2147 <bb 6>:
2148 D.2500_7 = (unsigned int) f$__delta_5;
2149 D.2501_8 = &S + D.2500_7;
2150 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2151 D.2503_10 = *D.2502_9;
2152 D.2504_12 = f$__pfn_24 + -1;
2153 D.2505_13 = (unsigned int) D.2504_12;
2154 D.2506_14 = D.2503_10 + D.2505_13;
2155 D.2507_15 = *D.2506_14;
2156 iftmp.11_16 = (String:: *) D.2507_15;
2158 <bb 7>:
2159 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2160 D.2500_19 = (unsigned int) f$__delta_5;
2161 D.2508_20 = &S + D.2500_19;
2162 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2164 Such patterns are results of simple calls to a member pointer:
2166 int doprinting (int (MyString::* f)(int) const)
2168 MyString S ("somestring");
2170 return (S.*f)(4);
2173 Moreover, the function also looks for called pointers loaded from aggregates
2174 passed by value or reference. */
2176 static void
2177 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2178 tree target)
2180 struct ipa_node_params *info = fbi->info;
2181 HOST_WIDE_INT offset;
2182 bool by_ref;
2184 if (SSA_NAME_IS_DEFAULT_DEF (target))
2186 tree var = SSA_NAME_VAR (target);
2187 int index = ipa_get_param_decl_index (info, var);
2188 if (index >= 0)
2189 ipa_note_param_call (fbi->node, index, call);
2190 return;
2193 int index;
2194 gimple *def = SSA_NAME_DEF_STMT (target);
2195 bool guaranteed_unmodified;
2196 if (gimple_assign_single_p (def)
2197 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2198 gimple_assign_rhs1 (def), &index, &offset,
2199 NULL, &by_ref, &guaranteed_unmodified))
2201 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2202 cs->indirect_info->offset = offset;
2203 cs->indirect_info->agg_contents = 1;
2204 cs->indirect_info->by_ref = by_ref;
2205 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2206 return;
2209 /* Now we need to try to match the complex pattern of calling a member
2210 pointer. */
2211 if (gimple_code (def) != GIMPLE_PHI
2212 || gimple_phi_num_args (def) != 2
2213 || !POINTER_TYPE_P (TREE_TYPE (target))
2214 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2215 return;
2217 /* First, we need to check whether one of these is a load from a member
2218 pointer that is a parameter to this function. */
2219 tree n1 = PHI_ARG_DEF (def, 0);
2220 tree n2 = PHI_ARG_DEF (def, 1);
2221 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2222 return;
2223 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2224 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2226 tree rec;
2227 basic_block bb, virt_bb;
2228 basic_block join = gimple_bb (def);
2229 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2231 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2232 return;
2234 bb = EDGE_PRED (join, 0)->src;
2235 virt_bb = gimple_bb (d2);
2237 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2239 bb = EDGE_PRED (join, 1)->src;
2240 virt_bb = gimple_bb (d1);
2242 else
2243 return;
2245 /* Second, we need to check that the basic blocks are laid out in the way
2246 corresponding to the pattern. */
2248 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2249 || single_pred (virt_bb) != bb
2250 || single_succ (virt_bb) != join)
2251 return;
2253 /* Third, let's see that the branching is done depending on the least
2254 significant bit of the pfn. */
2256 gimple *branch = last_stmt (bb);
2257 if (!branch || gimple_code (branch) != GIMPLE_COND)
2258 return;
2260 if ((gimple_cond_code (branch) != NE_EXPR
2261 && gimple_cond_code (branch) != EQ_EXPR)
2262 || !integer_zerop (gimple_cond_rhs (branch)))
2263 return;
2265 tree cond = gimple_cond_lhs (branch);
2266 if (!ipa_is_ssa_with_stmt_def (cond))
2267 return;
2269 def = SSA_NAME_DEF_STMT (cond);
2270 if (!is_gimple_assign (def)
2271 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2272 || !integer_onep (gimple_assign_rhs2 (def)))
2273 return;
2275 cond = gimple_assign_rhs1 (def);
2276 if (!ipa_is_ssa_with_stmt_def (cond))
2277 return;
2279 def = SSA_NAME_DEF_STMT (cond);
2281 if (is_gimple_assign (def)
2282 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2284 cond = gimple_assign_rhs1 (def);
2285 if (!ipa_is_ssa_with_stmt_def (cond))
2286 return;
2287 def = SSA_NAME_DEF_STMT (cond);
2290 tree rec2;
2291 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2292 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2293 == ptrmemfunc_vbit_in_delta),
2294 NULL);
2295 if (rec != rec2)
2296 return;
2298 index = ipa_get_param_decl_index (info, rec);
2299 if (index >= 0
2300 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2302 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2303 cs->indirect_info->offset = offset;
2304 cs->indirect_info->agg_contents = 1;
2305 cs->indirect_info->member_ptr = 1;
2306 cs->indirect_info->guaranteed_unmodified = 1;
2309 return;
2312 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2313 object referenced in the expression is a formal parameter of the caller
2314 FBI->node (described by FBI->info), create a call note for the
2315 statement. */
2317 static void
2318 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2319 gcall *call, tree target)
2321 tree obj = OBJ_TYPE_REF_OBJECT (target);
2322 int index;
2323 HOST_WIDE_INT anc_offset;
2325 if (!flag_devirtualize)
2326 return;
2328 if (TREE_CODE (obj) != SSA_NAME)
2329 return;
2331 struct ipa_node_params *info = fbi->info;
2332 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2334 struct ipa_jump_func jfunc;
2335 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2336 return;
2338 anc_offset = 0;
2339 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2340 gcc_assert (index >= 0);
2341 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2342 call, &jfunc))
2343 return;
2345 else
2347 struct ipa_jump_func jfunc;
2348 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2349 tree expr;
2351 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2352 if (!expr)
2353 return;
2354 index = ipa_get_param_decl_index (info,
2355 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2356 gcc_assert (index >= 0);
2357 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2358 call, &jfunc, anc_offset))
2359 return;
2362 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2363 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2364 ii->offset = anc_offset;
2365 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2366 ii->otr_type = obj_type_ref_class (target);
2367 ii->polymorphic = 1;
2370 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2371 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2372 containing intermediate information about each formal parameter. */
2374 static void
2375 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2377 tree target = gimple_call_fn (call);
2379 if (!target
2380 || (TREE_CODE (target) != SSA_NAME
2381 && !virtual_method_call_p (target)))
2382 return;
2384 struct cgraph_edge *cs = fbi->node->get_edge (call);
2385 /* If we previously turned the call into a direct call, there is
2386 no need to analyze. */
2387 if (cs && !cs->indirect_unknown_callee)
2388 return;
2390 if (cs->indirect_info->polymorphic && flag_devirtualize)
2392 tree instance;
2393 tree target = gimple_call_fn (call);
2394 ipa_polymorphic_call_context context (current_function_decl,
2395 target, call, &instance);
2397 gcc_checking_assert (cs->indirect_info->otr_type
2398 == obj_type_ref_class (target));
2399 gcc_checking_assert (cs->indirect_info->otr_token
2400 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2402 cs->indirect_info->vptr_changed
2403 = !context.get_dynamic_type (instance,
2404 OBJ_TYPE_REF_OBJECT (target),
2405 obj_type_ref_class (target), call);
2406 cs->indirect_info->context = context;
2409 if (TREE_CODE (target) == SSA_NAME)
2410 ipa_analyze_indirect_call_uses (fbi, call, target);
2411 else if (virtual_method_call_p (target))
2412 ipa_analyze_virtual_call_uses (fbi, call, target);
2416 /* Analyze the call statement STMT with respect to formal parameters (described
2417 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2418 formal parameters are called. */
2420 static void
2421 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2423 if (is_gimple_call (stmt))
2424 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2427 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2428 If OP is a parameter declaration, mark it as used in the info structure
2429 passed in DATA. */
2431 static bool
2432 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2434 struct ipa_node_params *info = (struct ipa_node_params *) data;
2436 op = get_base_address (op);
2437 if (op
2438 && TREE_CODE (op) == PARM_DECL)
2440 int index = ipa_get_param_decl_index (info, op);
2441 gcc_assert (index >= 0);
2442 ipa_set_param_used (info, index, true);
2445 return false;
2448 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2449 the findings in various structures of the associated ipa_node_params
2450 structure, such as parameter flags, notes etc. FBI holds various data about
2451 the function being analyzed. */
2453 static void
2454 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2456 gimple_stmt_iterator gsi;
2457 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2459 gimple *stmt = gsi_stmt (gsi);
2461 if (is_gimple_debug (stmt))
2462 continue;
2464 ipa_analyze_stmt_uses (fbi, stmt);
2465 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2466 visit_ref_for_mod_analysis,
2467 visit_ref_for_mod_analysis,
2468 visit_ref_for_mod_analysis);
2470 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2471 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2472 visit_ref_for_mod_analysis,
2473 visit_ref_for_mod_analysis,
2474 visit_ref_for_mod_analysis);
2477 /* Calculate controlled uses of parameters of NODE. */
2479 static void
2480 ipa_analyze_controlled_uses (struct cgraph_node *node)
2482 struct ipa_node_params *info = IPA_NODE_REF (node);
2484 for (int i = 0; i < ipa_get_param_count (info); i++)
2486 tree parm = ipa_get_param (info, i);
2487 int controlled_uses = 0;
2489 /* For SSA regs see if parameter is used. For non-SSA we compute
2490 the flag during modification analysis. */
2491 if (is_gimple_reg (parm))
2493 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2494 parm);
2495 if (ddef && !has_zero_uses (ddef))
2497 imm_use_iterator imm_iter;
2498 use_operand_p use_p;
2500 ipa_set_param_used (info, i, true);
2501 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2502 if (!is_gimple_call (USE_STMT (use_p)))
2504 if (!is_gimple_debug (USE_STMT (use_p)))
2506 controlled_uses = IPA_UNDESCRIBED_USE;
2507 break;
2510 else
2511 controlled_uses++;
2513 else
2514 controlled_uses = 0;
2516 else
2517 controlled_uses = IPA_UNDESCRIBED_USE;
2518 ipa_set_controlled_uses (info, i, controlled_uses);
2522 /* Free stuff in BI. */
2524 static void
2525 free_ipa_bb_info (struct ipa_bb_info *bi)
2527 bi->cg_edges.release ();
2528 bi->param_aa_statuses.release ();
2531 /* Dominator walker driving the analysis. */
2533 class analysis_dom_walker : public dom_walker
2535 public:
2536 analysis_dom_walker (struct ipa_func_body_info *fbi)
2537 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2539 virtual edge before_dom_children (basic_block);
2541 private:
2542 struct ipa_func_body_info *m_fbi;
2545 edge
2546 analysis_dom_walker::before_dom_children (basic_block bb)
2548 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2549 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2550 return NULL;
2553 /* Release body info FBI. */
2555 void
2556 ipa_release_body_info (struct ipa_func_body_info *fbi)
2558 int i;
2559 struct ipa_bb_info *bi;
2561 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2562 free_ipa_bb_info (bi);
2563 fbi->bb_infos.release ();
2566 /* Initialize the array describing properties of formal parameters
2567 of NODE, analyze their uses and compute jump functions associated
2568 with actual arguments of calls from within NODE. */
2570 void
2571 ipa_analyze_node (struct cgraph_node *node)
2573 struct ipa_func_body_info fbi;
2574 struct ipa_node_params *info;
2576 ipa_check_create_node_params ();
2577 ipa_check_create_edge_args ();
2578 info = IPA_NODE_REF (node);
2580 if (info->analysis_done)
2581 return;
2582 info->analysis_done = 1;
2584 if (ipa_func_spec_opts_forbid_analysis_p (node))
2586 for (int i = 0; i < ipa_get_param_count (info); i++)
2588 ipa_set_param_used (info, i, true);
2589 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2591 return;
2594 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2595 push_cfun (func);
2596 calculate_dominance_info (CDI_DOMINATORS);
2597 ipa_initialize_node_params (node);
2598 ipa_analyze_controlled_uses (node);
2600 fbi.node = node;
2601 fbi.info = IPA_NODE_REF (node);
2602 fbi.bb_infos = vNULL;
2603 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2604 fbi.param_count = ipa_get_param_count (info);
2605 fbi.aa_walked = 0;
2607 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2609 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2610 bi->cg_edges.safe_push (cs);
2613 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2615 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2616 bi->cg_edges.safe_push (cs);
2619 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2621 ipa_release_body_info (&fbi);
2622 free_dominance_info (CDI_DOMINATORS);
2623 pop_cfun ();
2626 /* Update the jump functions associated with call graph edge E when the call
2627 graph edge CS is being inlined, assuming that E->caller is already (possibly
2628 indirectly) inlined into CS->callee and that E has not been inlined. */
2630 static void
2631 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2632 struct cgraph_edge *e)
2634 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2635 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2636 int count = ipa_get_cs_argument_count (args);
2637 int i;
2639 for (i = 0; i < count; i++)
2641 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2642 struct ipa_polymorphic_call_context *dst_ctx
2643 = ipa_get_ith_polymorhic_call_context (args, i);
2645 if (dst->type == IPA_JF_ANCESTOR)
2647 struct ipa_jump_func *src;
2648 int dst_fid = dst->value.ancestor.formal_id;
2649 struct ipa_polymorphic_call_context *src_ctx
2650 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2652 /* Variable number of arguments can cause havoc if we try to access
2653 one that does not exist in the inlined edge. So make sure we
2654 don't. */
2655 if (dst_fid >= ipa_get_cs_argument_count (top))
2657 ipa_set_jf_unknown (dst);
2658 continue;
2661 src = ipa_get_ith_jump_func (top, dst_fid);
2663 if (src_ctx && !src_ctx->useless_p ())
2665 struct ipa_polymorphic_call_context ctx = *src_ctx;
2667 /* TODO: Make type preserved safe WRT contexts. */
2668 if (!ipa_get_jf_ancestor_type_preserved (dst))
2669 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2670 ctx.offset_by (dst->value.ancestor.offset);
2671 if (!ctx.useless_p ())
2673 if (!dst_ctx)
2675 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2676 count);
2677 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2680 dst_ctx->combine_with (ctx);
2684 if (src->agg.items
2685 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2687 struct ipa_agg_jf_item *item;
2688 int j;
2690 /* Currently we do not produce clobber aggregate jump functions,
2691 replace with merging when we do. */
2692 gcc_assert (!dst->agg.items);
2694 dst->agg.items = vec_safe_copy (src->agg.items);
2695 dst->agg.by_ref = src->agg.by_ref;
2696 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2697 item->offset -= dst->value.ancestor.offset;
2700 if (src->type == IPA_JF_PASS_THROUGH
2701 && src->value.pass_through.operation == NOP_EXPR)
2703 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2704 dst->value.ancestor.agg_preserved &=
2705 src->value.pass_through.agg_preserved;
2707 else if (src->type == IPA_JF_PASS_THROUGH
2708 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2710 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2711 dst->value.ancestor.agg_preserved = false;
2713 else if (src->type == IPA_JF_ANCESTOR)
2715 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2716 dst->value.ancestor.offset += src->value.ancestor.offset;
2717 dst->value.ancestor.agg_preserved &=
2718 src->value.ancestor.agg_preserved;
2720 else
2721 ipa_set_jf_unknown (dst);
2723 else if (dst->type == IPA_JF_PASS_THROUGH)
2725 struct ipa_jump_func *src;
2726 /* We must check range due to calls with variable number of arguments
2727 and we cannot combine jump functions with operations. */
2728 if (dst->value.pass_through.operation == NOP_EXPR
2729 && (dst->value.pass_through.formal_id
2730 < ipa_get_cs_argument_count (top)))
2732 int dst_fid = dst->value.pass_through.formal_id;
2733 src = ipa_get_ith_jump_func (top, dst_fid);
2734 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2735 struct ipa_polymorphic_call_context *src_ctx
2736 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2738 if (src_ctx && !src_ctx->useless_p ())
2740 struct ipa_polymorphic_call_context ctx = *src_ctx;
2742 /* TODO: Make type preserved safe WRT contexts. */
2743 if (!ipa_get_jf_pass_through_type_preserved (dst))
2744 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2745 if (!ctx.useless_p ())
2747 if (!dst_ctx)
2749 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2750 count);
2751 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2753 dst_ctx->combine_with (ctx);
2756 switch (src->type)
2758 case IPA_JF_UNKNOWN:
2759 ipa_set_jf_unknown (dst);
2760 break;
2761 case IPA_JF_CONST:
2762 ipa_set_jf_cst_copy (dst, src);
2763 break;
2765 case IPA_JF_PASS_THROUGH:
2767 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2768 enum tree_code operation;
2769 operation = ipa_get_jf_pass_through_operation (src);
2771 if (operation == NOP_EXPR)
2773 bool agg_p;
2774 agg_p = dst_agg_p
2775 && ipa_get_jf_pass_through_agg_preserved (src);
2776 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2778 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2779 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2780 else
2782 tree operand = ipa_get_jf_pass_through_operand (src);
2783 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2784 operation);
2786 break;
2788 case IPA_JF_ANCESTOR:
2790 bool agg_p;
2791 agg_p = dst_agg_p
2792 && ipa_get_jf_ancestor_agg_preserved (src);
2793 ipa_set_ancestor_jf (dst,
2794 ipa_get_jf_ancestor_offset (src),
2795 ipa_get_jf_ancestor_formal_id (src),
2796 agg_p);
2797 break;
2799 default:
2800 gcc_unreachable ();
2803 if (src->agg.items
2804 && (dst_agg_p || !src->agg.by_ref))
2806 /* Currently we do not produce clobber aggregate jump
2807 functions, replace with merging when we do. */
2808 gcc_assert (!dst->agg.items);
2810 dst->agg.by_ref = src->agg.by_ref;
2811 dst->agg.items = vec_safe_copy (src->agg.items);
2814 else
2815 ipa_set_jf_unknown (dst);
2820 /* If TARGET is an addr_expr of a function declaration, make it the
2821 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2822 Otherwise, return NULL. */
2824 struct cgraph_edge *
2825 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2826 bool speculative)
2828 struct cgraph_node *callee;
2829 struct ipa_call_summary *es = ipa_call_summaries->get (ie);
2830 bool unreachable = false;
2832 if (TREE_CODE (target) == ADDR_EXPR)
2833 target = TREE_OPERAND (target, 0);
2834 if (TREE_CODE (target) != FUNCTION_DECL)
2836 target = canonicalize_constructor_val (target, NULL);
2837 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2839 /* Member pointer call that goes through a VMT lookup. */
2840 if (ie->indirect_info->member_ptr
2841 /* Or if target is not an invariant expression and we do not
2842 know if it will evaulate to function at runtime.
2843 This can happen when folding through &VAR, where &VAR
2844 is IP invariant, but VAR itself is not.
2846 TODO: Revisit this when GCC 5 is branched. It seems that
2847 member_ptr check is not needed and that we may try to fold
2848 the expression and see if VAR is readonly. */
2849 || !is_gimple_ip_invariant (target))
2851 if (dump_enabled_p ())
2853 location_t loc = gimple_location_safe (ie->call_stmt);
2854 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2855 "discovered direct call non-invariant %s\n",
2856 ie->caller->dump_name ());
2858 return NULL;
2862 if (dump_enabled_p ())
2864 location_t loc = gimple_location_safe (ie->call_stmt);
2865 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2866 "discovered direct call to non-function in %s, "
2867 "making it __builtin_unreachable\n",
2868 ie->caller->dump_name ());
2871 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2872 callee = cgraph_node::get_create (target);
2873 unreachable = true;
2875 else
2876 callee = cgraph_node::get (target);
2878 else
2879 callee = cgraph_node::get (target);
2881 /* Because may-edges are not explicitely represented and vtable may be external,
2882 we may create the first reference to the object in the unit. */
2883 if (!callee || callee->global.inlined_to)
2886 /* We are better to ensure we can refer to it.
2887 In the case of static functions we are out of luck, since we already
2888 removed its body. In the case of public functions we may or may
2889 not introduce the reference. */
2890 if (!canonicalize_constructor_val (target, NULL)
2891 || !TREE_PUBLIC (target))
2893 if (dump_file)
2894 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2895 "(%s -> %s) but can not refer to it. Giving up.\n",
2896 ie->caller->dump_name (),
2897 ie->callee->dump_name ());
2898 return NULL;
2900 callee = cgraph_node::get_create (target);
2903 /* If the edge is already speculated. */
2904 if (speculative && ie->speculative)
2906 struct cgraph_edge *e2;
2907 struct ipa_ref *ref;
2908 ie->speculative_call_info (e2, ie, ref);
2909 if (e2->callee->ultimate_alias_target ()
2910 != callee->ultimate_alias_target ())
2912 if (dump_file)
2913 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2914 "target (%s -> %s) but the call is already "
2915 "speculated to %s. Giving up.\n",
2916 ie->caller->dump_name (), callee->dump_name (),
2917 e2->callee->dump_name ());
2919 else
2921 if (dump_file)
2922 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2923 "(%s -> %s) this agree with previous speculation.\n",
2924 ie->caller->dump_name (), callee->dump_name ());
2926 return NULL;
2929 if (!dbg_cnt (devirt))
2930 return NULL;
2932 ipa_check_create_node_params ();
2934 /* We can not make edges to inline clones. It is bug that someone removed
2935 the cgraph node too early. */
2936 gcc_assert (!callee->global.inlined_to);
2938 if (dump_file && !unreachable)
2940 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2941 "(%s -> %s), for stmt ",
2942 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2943 speculative ? "speculative" : "known",
2944 ie->caller->dump_name (),
2945 callee->dump_name ());
2946 if (ie->call_stmt)
2947 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2948 else
2949 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2951 if (dump_enabled_p ())
2953 location_t loc = gimple_location_safe (ie->call_stmt);
2955 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2956 "converting indirect call in %s to direct call to %s\n",
2957 ie->caller->name (), callee->name ());
2959 if (!speculative)
2961 struct cgraph_edge *orig = ie;
2962 ie = ie->make_direct (callee);
2963 /* If we resolved speculative edge the cost is already up to date
2964 for direct call (adjusted by inline_edge_duplication_hook). */
2965 if (ie == orig)
2967 es = ipa_call_summaries->get (ie);
2968 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2969 - eni_size_weights.call_cost);
2970 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2971 - eni_time_weights.call_cost);
2974 else
2976 if (!callee->can_be_discarded_p ())
2978 cgraph_node *alias;
2979 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2980 if (alias)
2981 callee = alias;
2983 /* make_speculative will update ie's cost to direct call cost. */
2984 ie = ie->make_speculative
2985 (callee, ie->count.apply_scale (8, 10), ie->frequency * 8 / 10);
2988 return ie;
2991 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2992 CONSTRUCTOR and return it. Return NULL if the search fails for some
2993 reason. */
2995 static tree
2996 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2998 tree type = TREE_TYPE (constructor);
2999 if (TREE_CODE (type) != ARRAY_TYPE
3000 && TREE_CODE (type) != RECORD_TYPE)
3001 return NULL;
3003 unsigned ix;
3004 tree index, val;
3005 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3007 HOST_WIDE_INT elt_offset;
3008 if (TREE_CODE (type) == ARRAY_TYPE)
3010 offset_int off;
3011 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3012 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3014 if (index)
3016 if (TREE_CODE (index) == RANGE_EXPR)
3017 off = wi::to_offset (TREE_OPERAND (index, 0));
3018 else
3019 off = wi::to_offset (index);
3020 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3022 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3023 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3024 off = wi::sext (off - wi::to_offset (low_bound),
3025 TYPE_PRECISION (TREE_TYPE (index)));
3027 off *= wi::to_offset (unit_size);
3028 /* ??? Handle more than just the first index of a
3029 RANGE_EXPR. */
3031 else
3032 off = wi::to_offset (unit_size) * ix;
3034 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3035 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3036 continue;
3037 elt_offset = off.to_shwi ();
3039 else if (TREE_CODE (type) == RECORD_TYPE)
3041 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3042 if (DECL_BIT_FIELD (index))
3043 continue;
3044 elt_offset = int_bit_position (index);
3046 else
3047 gcc_unreachable ();
3049 if (elt_offset > req_offset)
3050 return NULL;
3052 if (TREE_CODE (val) == CONSTRUCTOR)
3053 return find_constructor_constant_at_offset (val,
3054 req_offset - elt_offset);
3056 if (elt_offset == req_offset
3057 && is_gimple_reg_type (TREE_TYPE (val))
3058 && is_gimple_ip_invariant (val))
3059 return val;
3061 return NULL;
3064 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3065 invariant from a static constructor and if so, return it. Otherwise return
3066 NULL. */
3068 static tree
3069 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3071 if (by_ref)
3073 if (TREE_CODE (scalar) != ADDR_EXPR)
3074 return NULL;
3075 scalar = TREE_OPERAND (scalar, 0);
3078 if (!VAR_P (scalar)
3079 || !is_global_var (scalar)
3080 || !TREE_READONLY (scalar)
3081 || !DECL_INITIAL (scalar)
3082 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3083 return NULL;
3085 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3088 /* Retrieve value from aggregate jump function AGG or static initializer of
3089 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3090 none. BY_REF specifies whether the value has to be passed by reference or
3091 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3092 to is set to true if the value comes from an initializer of a constant. */
3094 tree
3095 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3096 HOST_WIDE_INT offset, bool by_ref,
3097 bool *from_global_constant)
3099 struct ipa_agg_jf_item *item;
3100 int i;
3102 if (scalar)
3104 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3105 if (res)
3107 if (from_global_constant)
3108 *from_global_constant = true;
3109 return res;
3113 if (!agg
3114 || by_ref != agg->by_ref)
3115 return NULL;
3117 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3118 if (item->offset == offset)
3120 /* Currently we do not have clobber values, return NULL for them once
3121 we do. */
3122 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3123 if (from_global_constant)
3124 *from_global_constant = false;
3125 return item->value;
3127 return NULL;
3130 /* Remove a reference to SYMBOL from the list of references of a node given by
3131 reference description RDESC. Return true if the reference has been
3132 successfully found and removed. */
3134 static bool
3135 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3137 struct ipa_ref *to_del;
3138 struct cgraph_edge *origin;
3140 origin = rdesc->cs;
3141 if (!origin)
3142 return false;
3143 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3144 origin->lto_stmt_uid);
3145 if (!to_del)
3146 return false;
3148 to_del->remove_reference ();
3149 if (dump_file)
3150 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3151 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3152 return true;
3155 /* If JFUNC has a reference description with refcount different from
3156 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3157 NULL. JFUNC must be a constant jump function. */
3159 static struct ipa_cst_ref_desc *
3160 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3162 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3163 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3164 return rdesc;
3165 else
3166 return NULL;
3169 /* If the value of constant jump function JFUNC is an address of a function
3170 declaration, return the associated call graph node. Otherwise return
3171 NULL. */
3173 static cgraph_node *
3174 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3176 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3177 tree cst = ipa_get_jf_constant (jfunc);
3178 if (TREE_CODE (cst) != ADDR_EXPR
3179 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3180 return NULL;
3182 return cgraph_node::get (TREE_OPERAND (cst, 0));
3186 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3187 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3188 the edge specified in the rdesc. Return false if either the symbol or the
3189 reference could not be found, otherwise return true. */
3191 static bool
3192 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3194 struct ipa_cst_ref_desc *rdesc;
3195 if (jfunc->type == IPA_JF_CONST
3196 && (rdesc = jfunc_rdesc_usable (jfunc))
3197 && --rdesc->refcount == 0)
3199 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3200 if (!symbol)
3201 return false;
3203 return remove_described_reference (symbol, rdesc);
3205 return true;
3208 /* Try to find a destination for indirect edge IE that corresponds to a simple
3209 call or a call of a member function pointer and where the destination is a
3210 pointer formal parameter described by jump function JFUNC. If it can be
3211 determined, return the newly direct edge, otherwise return NULL.
3212 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3214 static struct cgraph_edge *
3215 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3216 struct ipa_jump_func *jfunc,
3217 struct ipa_node_params *new_root_info)
3219 struct cgraph_edge *cs;
3220 tree target;
3221 bool agg_contents = ie->indirect_info->agg_contents;
3222 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3223 if (agg_contents)
3225 bool from_global_constant;
3226 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3227 ie->indirect_info->offset,
3228 ie->indirect_info->by_ref,
3229 &from_global_constant);
3230 if (target
3231 && !from_global_constant
3232 && !ie->indirect_info->guaranteed_unmodified)
3233 return NULL;
3235 else
3236 target = scalar;
3237 if (!target)
3238 return NULL;
3239 cs = ipa_make_edge_direct_to_target (ie, target);
3241 if (cs && !agg_contents)
3243 bool ok;
3244 gcc_checking_assert (cs->callee
3245 && (cs != ie
3246 || jfunc->type != IPA_JF_CONST
3247 || !cgraph_node_for_jfunc (jfunc)
3248 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3249 ok = try_decrement_rdesc_refcount (jfunc);
3250 gcc_checking_assert (ok);
3253 return cs;
3256 /* Return the target to be used in cases of impossible devirtualization. IE
3257 and target (the latter can be NULL) are dumped when dumping is enabled. */
3259 tree
3260 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3262 if (dump_file)
3264 if (target)
3265 fprintf (dump_file,
3266 "Type inconsistent devirtualization: %s->%s\n",
3267 ie->caller->dump_name (),
3268 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3269 else
3270 fprintf (dump_file,
3271 "No devirtualization target in %s\n",
3272 ie->caller->dump_name ());
3274 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3275 cgraph_node::get_create (new_target);
3276 return new_target;
3279 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3280 call based on a formal parameter which is described by jump function JFUNC
3281 and if it can be determined, make it direct and return the direct edge.
3282 Otherwise, return NULL. CTX describes the polymorphic context that the
3283 parameter the call is based on brings along with it. */
3285 static struct cgraph_edge *
3286 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3287 struct ipa_jump_func *jfunc,
3288 struct ipa_polymorphic_call_context ctx)
3290 tree target = NULL;
3291 bool speculative = false;
3293 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3294 return NULL;
3296 gcc_assert (!ie->indirect_info->by_ref);
3298 /* Try to do lookup via known virtual table pointer value. */
3299 if (!ie->indirect_info->vptr_changed
3300 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3302 tree vtable;
3303 unsigned HOST_WIDE_INT offset;
3304 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3305 : NULL;
3306 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3307 ie->indirect_info->offset,
3308 true);
3309 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3311 bool can_refer;
3312 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3313 vtable, offset, &can_refer);
3314 if (can_refer)
3316 if (!t
3317 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3318 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3319 || !possible_polymorphic_call_target_p
3320 (ie, cgraph_node::get (t)))
3322 /* Do not speculate builtin_unreachable, it is stupid! */
3323 if (!ie->indirect_info->vptr_changed)
3324 target = ipa_impossible_devirt_target (ie, target);
3325 else
3326 target = NULL;
3328 else
3330 target = t;
3331 speculative = ie->indirect_info->vptr_changed;
3337 ipa_polymorphic_call_context ie_context (ie);
3338 vec <cgraph_node *>targets;
3339 bool final;
3341 ctx.offset_by (ie->indirect_info->offset);
3342 if (ie->indirect_info->vptr_changed)
3343 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3344 ie->indirect_info->otr_type);
3345 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3346 targets = possible_polymorphic_call_targets
3347 (ie->indirect_info->otr_type,
3348 ie->indirect_info->otr_token,
3349 ctx, &final);
3350 if (final && targets.length () <= 1)
3352 speculative = false;
3353 if (targets.length () == 1)
3354 target = targets[0]->decl;
3355 else
3356 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3358 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3359 && !ie->speculative && ie->maybe_hot_p ())
3361 cgraph_node *n;
3362 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3363 ie->indirect_info->otr_token,
3364 ie->indirect_info->context);
3365 if (n)
3367 target = n->decl;
3368 speculative = true;
3372 if (target)
3374 if (!possible_polymorphic_call_target_p
3375 (ie, cgraph_node::get_create (target)))
3377 if (speculative)
3378 return NULL;
3379 target = ipa_impossible_devirt_target (ie, target);
3381 return ipa_make_edge_direct_to_target (ie, target, speculative);
3383 else
3384 return NULL;
3387 /* Update the param called notes associated with NODE when CS is being inlined,
3388 assuming NODE is (potentially indirectly) inlined into CS->callee.
3389 Moreover, if the callee is discovered to be constant, create a new cgraph
3390 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3391 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3393 static bool
3394 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3395 struct cgraph_node *node,
3396 vec<cgraph_edge *> *new_edges)
3398 struct ipa_edge_args *top;
3399 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3400 struct ipa_node_params *new_root_info;
3401 bool res = false;
3403 ipa_check_create_edge_args ();
3404 top = IPA_EDGE_REF (cs);
3405 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3406 ? cs->caller->global.inlined_to
3407 : cs->caller);
3409 for (ie = node->indirect_calls; ie; ie = next_ie)
3411 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3412 struct ipa_jump_func *jfunc;
3413 int param_index;
3414 cgraph_node *spec_target = NULL;
3416 next_ie = ie->next_callee;
3418 if (ici->param_index == -1)
3419 continue;
3421 /* We must check range due to calls with variable number of arguments: */
3422 if (ici->param_index >= ipa_get_cs_argument_count (top))
3424 ici->param_index = -1;
3425 continue;
3428 param_index = ici->param_index;
3429 jfunc = ipa_get_ith_jump_func (top, param_index);
3431 if (ie->speculative)
3433 struct cgraph_edge *de;
3434 struct ipa_ref *ref;
3435 ie->speculative_call_info (de, ie, ref);
3436 spec_target = de->callee;
3439 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3440 new_direct_edge = NULL;
3441 else if (ici->polymorphic)
3443 ipa_polymorphic_call_context ctx;
3444 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3445 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3447 else
3448 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3449 new_root_info);
3450 /* If speculation was removed, then we need to do nothing. */
3451 if (new_direct_edge && new_direct_edge != ie
3452 && new_direct_edge->callee == spec_target)
3454 new_direct_edge->indirect_inlining_edge = 1;
3455 top = IPA_EDGE_REF (cs);
3456 res = true;
3457 if (!new_direct_edge->speculative)
3458 continue;
3460 else if (new_direct_edge)
3462 new_direct_edge->indirect_inlining_edge = 1;
3463 if (new_direct_edge->call_stmt)
3464 new_direct_edge->call_stmt_cannot_inline_p
3465 = !gimple_check_call_matching_types (
3466 new_direct_edge->call_stmt,
3467 new_direct_edge->callee->decl, false);
3468 if (new_edges)
3470 new_edges->safe_push (new_direct_edge);
3471 res = true;
3473 top = IPA_EDGE_REF (cs);
3474 /* If speculative edge was introduced we still need to update
3475 call info of the indirect edge. */
3476 if (!new_direct_edge->speculative)
3477 continue;
3479 if (jfunc->type == IPA_JF_PASS_THROUGH
3480 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3482 if (ici->agg_contents
3483 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3484 && !ici->polymorphic)
3485 ici->param_index = -1;
3486 else
3488 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3489 if (ici->polymorphic
3490 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3491 ici->vptr_changed = true;
3494 else if (jfunc->type == IPA_JF_ANCESTOR)
3496 if (ici->agg_contents
3497 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3498 && !ici->polymorphic)
3499 ici->param_index = -1;
3500 else
3502 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3503 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3504 if (ici->polymorphic
3505 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3506 ici->vptr_changed = true;
3509 else
3510 /* Either we can find a destination for this edge now or never. */
3511 ici->param_index = -1;
3514 return res;
3517 /* Recursively traverse subtree of NODE (including node) made of inlined
3518 cgraph_edges when CS has been inlined and invoke
3519 update_indirect_edges_after_inlining on all nodes and
3520 update_jump_functions_after_inlining on all non-inlined edges that lead out
3521 of this subtree. Newly discovered indirect edges will be added to
3522 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3523 created. */
3525 static bool
3526 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3527 struct cgraph_node *node,
3528 vec<cgraph_edge *> *new_edges)
3530 struct cgraph_edge *e;
3531 bool res;
3533 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3535 for (e = node->callees; e; e = e->next_callee)
3536 if (!e->inline_failed)
3537 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3538 else
3539 update_jump_functions_after_inlining (cs, e);
3540 for (e = node->indirect_calls; e; e = e->next_callee)
3541 update_jump_functions_after_inlining (cs, e);
3543 return res;
3546 /* Combine two controlled uses counts as done during inlining. */
3548 static int
3549 combine_controlled_uses_counters (int c, int d)
3551 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3552 return IPA_UNDESCRIBED_USE;
3553 else
3554 return c + d - 1;
3557 /* Propagate number of controlled users from CS->caleee to the new root of the
3558 tree of inlined nodes. */
3560 static void
3561 propagate_controlled_uses (struct cgraph_edge *cs)
3563 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3564 struct cgraph_node *new_root = cs->caller->global.inlined_to
3565 ? cs->caller->global.inlined_to : cs->caller;
3566 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3567 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3568 int count, i;
3570 count = MIN (ipa_get_cs_argument_count (args),
3571 ipa_get_param_count (old_root_info));
3572 for (i = 0; i < count; i++)
3574 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3575 struct ipa_cst_ref_desc *rdesc;
3577 if (jf->type == IPA_JF_PASS_THROUGH)
3579 int src_idx, c, d;
3580 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3581 c = ipa_get_controlled_uses (new_root_info, src_idx);
3582 d = ipa_get_controlled_uses (old_root_info, i);
3584 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3585 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3586 c = combine_controlled_uses_counters (c, d);
3587 ipa_set_controlled_uses (new_root_info, src_idx, c);
3588 if (c == 0 && new_root_info->ipcp_orig_node)
3590 struct cgraph_node *n;
3591 struct ipa_ref *ref;
3592 tree t = new_root_info->known_csts[src_idx];
3594 if (t && TREE_CODE (t) == ADDR_EXPR
3595 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3596 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3597 && (ref = new_root->find_reference (n, NULL, 0)))
3599 if (dump_file)
3600 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3601 "reference from %s to %s.\n",
3602 new_root->dump_name (),
3603 n->dump_name ());
3604 ref->remove_reference ();
3608 else if (jf->type == IPA_JF_CONST
3609 && (rdesc = jfunc_rdesc_usable (jf)))
3611 int d = ipa_get_controlled_uses (old_root_info, i);
3612 int c = rdesc->refcount;
3613 rdesc->refcount = combine_controlled_uses_counters (c, d);
3614 if (rdesc->refcount == 0)
3616 tree cst = ipa_get_jf_constant (jf);
3617 struct cgraph_node *n;
3618 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3619 && TREE_CODE (TREE_OPERAND (cst, 0))
3620 == FUNCTION_DECL);
3621 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3622 if (n)
3624 struct cgraph_node *clone;
3625 bool ok;
3626 ok = remove_described_reference (n, rdesc);
3627 gcc_checking_assert (ok);
3629 clone = cs->caller;
3630 while (clone->global.inlined_to
3631 && clone != rdesc->cs->caller
3632 && IPA_NODE_REF (clone)->ipcp_orig_node)
3634 struct ipa_ref *ref;
3635 ref = clone->find_reference (n, NULL, 0);
3636 if (ref)
3638 if (dump_file)
3639 fprintf (dump_file, "ipa-prop: Removing "
3640 "cloning-created reference "
3641 "from %s to %s.\n",
3642 clone->dump_name (),
3643 n->dump_name ());
3644 ref->remove_reference ();
3646 clone = clone->callers->caller;
3653 for (i = ipa_get_param_count (old_root_info);
3654 i < ipa_get_cs_argument_count (args);
3655 i++)
3657 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3659 if (jf->type == IPA_JF_CONST)
3661 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3662 if (rdesc)
3663 rdesc->refcount = IPA_UNDESCRIBED_USE;
3665 else if (jf->type == IPA_JF_PASS_THROUGH)
3666 ipa_set_controlled_uses (new_root_info,
3667 jf->value.pass_through.formal_id,
3668 IPA_UNDESCRIBED_USE);
3672 /* Update jump functions and call note functions on inlining the call site CS.
3673 CS is expected to lead to a node already cloned by
3674 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3675 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3676 created. */
3678 bool
3679 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3680 vec<cgraph_edge *> *new_edges)
3682 bool changed;
3683 /* Do nothing if the preparation phase has not been carried out yet
3684 (i.e. during early inlining). */
3685 if (!ipa_node_params_sum)
3686 return false;
3687 gcc_assert (ipa_edge_args_sum);
3689 propagate_controlled_uses (cs);
3690 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3692 return changed;
3695 /* Ensure that array of edge arguments infos is big enough to accommodate a
3696 structure for all edges and reallocates it if not. Also, allocate
3697 associated hash tables is they do not already exist. */
3699 void
3700 ipa_check_create_edge_args (void)
3702 if (!ipa_edge_args_sum)
3703 ipa_edge_args_sum
3704 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3705 ipa_edge_args_sum_t (symtab, true));
3706 if (!ipa_bits_hash_table)
3707 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3708 if (!ipa_vr_hash_table)
3709 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3712 /* Frees all dynamically allocated structures that the argument info points
3713 to. */
3715 void
3716 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3718 vec_free (args->jump_functions);
3719 *args = ipa_edge_args ();
3722 /* Free all ipa_edge structures. */
3724 void
3725 ipa_free_all_edge_args (void)
3727 if (!ipa_edge_args_sum)
3728 return;
3730 ipa_edge_args_sum->release ();
3731 ipa_edge_args_sum = NULL;
3734 /* Free all ipa_node_params structures. */
3736 void
3737 ipa_free_all_node_params (void)
3739 ipa_node_params_sum->release ();
3740 ipa_node_params_sum = NULL;
3743 /* Grow ipcp_transformations if necessary. Also allocate any necessary hash
3744 tables if they do not already exist. */
3746 void
3747 ipcp_grow_transformations_if_necessary (void)
3749 if (vec_safe_length (ipcp_transformations)
3750 <= (unsigned) symtab->cgraph_max_uid)
3751 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3752 if (!ipa_bits_hash_table)
3753 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3754 if (!ipa_vr_hash_table)
3755 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3758 /* Set the aggregate replacements of NODE to be AGGVALS. */
3760 void
3761 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3762 struct ipa_agg_replacement_value *aggvals)
3764 ipcp_grow_transformations_if_necessary ();
3765 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3768 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3769 count data structures accordingly. */
3771 void
3772 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3774 if (args->jump_functions)
3776 struct ipa_jump_func *jf;
3777 int i;
3778 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3780 struct ipa_cst_ref_desc *rdesc;
3781 try_decrement_rdesc_refcount (jf);
3782 if (jf->type == IPA_JF_CONST
3783 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3784 && rdesc->cs == cs)
3785 rdesc->cs = NULL;
3790 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3791 reference count data strucutres accordingly. */
3793 void
3794 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3795 ipa_edge_args *old_args, ipa_edge_args *new_args)
3797 unsigned int i;
3799 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3800 if (old_args->polymorphic_call_contexts)
3801 new_args->polymorphic_call_contexts
3802 = vec_safe_copy (old_args->polymorphic_call_contexts);
3804 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3806 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3807 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3809 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3811 if (src_jf->type == IPA_JF_CONST)
3813 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3815 if (!src_rdesc)
3816 dst_jf->value.constant.rdesc = NULL;
3817 else if (src->caller == dst->caller)
3819 struct ipa_ref *ref;
3820 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3821 gcc_checking_assert (n);
3822 ref = src->caller->find_reference (n, src->call_stmt,
3823 src->lto_stmt_uid);
3824 gcc_checking_assert (ref);
3825 dst->caller->clone_reference (ref, ref->stmt);
3827 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3828 dst_rdesc->cs = dst;
3829 dst_rdesc->refcount = src_rdesc->refcount;
3830 dst_rdesc->next_duplicate = NULL;
3831 dst_jf->value.constant.rdesc = dst_rdesc;
3833 else if (src_rdesc->cs == src)
3835 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3836 dst_rdesc->cs = dst;
3837 dst_rdesc->refcount = src_rdesc->refcount;
3838 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3839 src_rdesc->next_duplicate = dst_rdesc;
3840 dst_jf->value.constant.rdesc = dst_rdesc;
3842 else
3844 struct ipa_cst_ref_desc *dst_rdesc;
3845 /* This can happen during inlining, when a JFUNC can refer to a
3846 reference taken in a function up in the tree of inline clones.
3847 We need to find the duplicate that refers to our tree of
3848 inline clones. */
3850 gcc_assert (dst->caller->global.inlined_to);
3851 for (dst_rdesc = src_rdesc->next_duplicate;
3852 dst_rdesc;
3853 dst_rdesc = dst_rdesc->next_duplicate)
3855 struct cgraph_node *top;
3856 top = dst_rdesc->cs->caller->global.inlined_to
3857 ? dst_rdesc->cs->caller->global.inlined_to
3858 : dst_rdesc->cs->caller;
3859 if (dst->caller->global.inlined_to == top)
3860 break;
3862 gcc_assert (dst_rdesc);
3863 dst_jf->value.constant.rdesc = dst_rdesc;
3866 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3867 && src->caller == dst->caller)
3869 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3870 ? dst->caller->global.inlined_to : dst->caller;
3871 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3872 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3874 int c = ipa_get_controlled_uses (root_info, idx);
3875 if (c != IPA_UNDESCRIBED_USE)
3877 c++;
3878 ipa_set_controlled_uses (root_info, idx, c);
3884 /* Analyze newly added function into callgraph. */
3886 static void
3887 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3889 if (node->has_gimple_body_p ())
3890 ipa_analyze_node (node);
3893 /* Hook that is called by summary when a node is duplicated. */
3895 void
3896 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3897 ipa_node_params *old_info,
3898 ipa_node_params *new_info)
3900 ipa_agg_replacement_value *old_av, *new_av;
3902 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3903 new_info->lattices = NULL;
3904 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3905 new_info->known_csts = old_info->known_csts.copy ();
3906 new_info->known_contexts = old_info->known_contexts.copy ();
3908 new_info->analysis_done = old_info->analysis_done;
3909 new_info->node_enqueued = old_info->node_enqueued;
3910 new_info->versionable = old_info->versionable;
3912 old_av = ipa_get_agg_replacements_for_node (src);
3913 if (old_av)
3915 new_av = NULL;
3916 while (old_av)
3918 struct ipa_agg_replacement_value *v;
3920 v = ggc_alloc<ipa_agg_replacement_value> ();
3921 memcpy (v, old_av, sizeof (*v));
3922 v->next = new_av;
3923 new_av = v;
3924 old_av = old_av->next;
3926 ipa_set_node_agg_value_chain (dst, new_av);
3929 ipcp_transformation_summary *src_trans
3930 = ipcp_get_transformation_summary (src);
3932 if (src_trans)
3934 ipcp_grow_transformations_if_necessary ();
3935 src_trans = ipcp_get_transformation_summary (src);
3936 ipcp_transformation_summary *dst_trans
3937 = ipcp_get_transformation_summary (dst);
3939 dst_trans->bits = vec_safe_copy (src_trans->bits);
3941 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3942 vec<ipa_vr, va_gc> *&dst_vr
3943 = ipcp_get_transformation_summary (dst)->m_vr;
3944 if (vec_safe_length (src_trans->m_vr) > 0)
3946 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3947 for (unsigned i = 0; i < src_vr->length (); ++i)
3948 dst_vr->quick_push ((*src_vr)[i]);
3953 /* Register our cgraph hooks if they are not already there. */
3955 void
3956 ipa_register_cgraph_hooks (void)
3958 ipa_check_create_node_params ();
3959 ipa_check_create_edge_args ();
3961 function_insertion_hook_holder =
3962 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3965 /* Unregister our cgraph hooks if they are not already there. */
3967 static void
3968 ipa_unregister_cgraph_hooks (void)
3970 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3971 function_insertion_hook_holder = NULL;
3974 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3975 longer needed after ipa-cp. */
3977 void
3978 ipa_free_all_structures_after_ipa_cp (void)
3980 if (!optimize && !in_lto_p)
3982 ipa_free_all_edge_args ();
3983 ipa_free_all_node_params ();
3984 ipcp_sources_pool.release ();
3985 ipcp_cst_values_pool.release ();
3986 ipcp_poly_ctx_values_pool.release ();
3987 ipcp_agg_lattice_pool.release ();
3988 ipa_unregister_cgraph_hooks ();
3989 ipa_refdesc_pool.release ();
3993 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3994 longer needed after indirect inlining. */
3996 void
3997 ipa_free_all_structures_after_iinln (void)
3999 ipa_free_all_edge_args ();
4000 ipa_free_all_node_params ();
4001 ipa_unregister_cgraph_hooks ();
4002 ipcp_sources_pool.release ();
4003 ipcp_cst_values_pool.release ();
4004 ipcp_poly_ctx_values_pool.release ();
4005 ipcp_agg_lattice_pool.release ();
4006 ipa_refdesc_pool.release ();
4009 /* Print ipa_tree_map data structures of all functions in the
4010 callgraph to F. */
4012 void
4013 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4015 int i, count;
4016 struct ipa_node_params *info;
4018 if (!node->definition)
4019 return;
4020 info = IPA_NODE_REF (node);
4021 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4022 count = ipa_get_param_count (info);
4023 for (i = 0; i < count; i++)
4025 int c;
4027 fprintf (f, " ");
4028 ipa_dump_param (f, info, i);
4029 if (ipa_is_param_used (info, i))
4030 fprintf (f, " used");
4031 c = ipa_get_controlled_uses (info, i);
4032 if (c == IPA_UNDESCRIBED_USE)
4033 fprintf (f, " undescribed_use");
4034 else
4035 fprintf (f, " controlled_uses=%i", c);
4036 fprintf (f, "\n");
4040 /* Print ipa_tree_map data structures of all functions in the
4041 callgraph to F. */
4043 void
4044 ipa_print_all_params (FILE * f)
4046 struct cgraph_node *node;
4048 fprintf (f, "\nFunction parameters:\n");
4049 FOR_EACH_FUNCTION (node)
4050 ipa_print_node_params (f, node);
4053 /* Return a heap allocated vector containing formal parameters of FNDECL. */
4055 vec<tree>
4056 ipa_get_vector_of_formal_parms (tree fndecl)
4058 vec<tree> args;
4059 int count;
4060 tree parm;
4062 gcc_assert (!flag_wpa);
4063 count = count_formal_params (fndecl);
4064 args.create (count);
4065 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4066 args.quick_push (parm);
4068 return args;
4071 /* Return a heap allocated vector containing types of formal parameters of
4072 function type FNTYPE. */
4074 vec<tree>
4075 ipa_get_vector_of_formal_parm_types (tree fntype)
4077 vec<tree> types;
4078 int count = 0;
4079 tree t;
4081 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4082 count++;
4084 types.create (count);
4085 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4086 types.quick_push (TREE_VALUE (t));
4088 return types;
4091 /* Modify the function declaration FNDECL and its type according to the plan in
4092 ADJUSTMENTS. It also sets base fields of individual adjustments structures
4093 to reflect the actual parameters being modified which are determined by the
4094 base_index field. */
4096 void
4097 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
4099 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4100 tree orig_type = TREE_TYPE (fndecl);
4101 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4103 /* The following test is an ugly hack, some functions simply don't have any
4104 arguments in their type. This is probably a bug but well... */
4105 bool care_for_types = (old_arg_types != NULL_TREE);
4106 bool last_parm_void;
4107 vec<tree> otypes;
4108 if (care_for_types)
4110 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4111 == void_type_node);
4112 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4113 if (last_parm_void)
4114 gcc_assert (oparms.length () + 1 == otypes.length ());
4115 else
4116 gcc_assert (oparms.length () == otypes.length ());
4118 else
4120 last_parm_void = false;
4121 otypes.create (0);
4124 int len = adjustments.length ();
4125 tree *link = &DECL_ARGUMENTS (fndecl);
4126 tree new_arg_types = NULL;
4127 for (int i = 0; i < len; i++)
4129 struct ipa_parm_adjustment *adj;
4130 gcc_assert (link);
4132 adj = &adjustments[i];
4133 tree parm;
4134 if (adj->op == IPA_PARM_OP_NEW)
4135 parm = NULL;
4136 else
4137 parm = oparms[adj->base_index];
4138 adj->base = parm;
4140 if (adj->op == IPA_PARM_OP_COPY)
4142 if (care_for_types)
4143 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4144 new_arg_types);
4145 *link = parm;
4146 link = &DECL_CHAIN (parm);
4148 else if (adj->op != IPA_PARM_OP_REMOVE)
4150 tree new_parm;
4151 tree ptype;
4153 if (adj->by_ref)
4154 ptype = build_pointer_type (adj->type);
4155 else
4157 ptype = adj->type;
4158 if (is_gimple_reg_type (ptype)
4159 && TYPE_MODE (ptype) != BLKmode)
4161 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4162 if (TYPE_ALIGN (ptype) != malign)
4163 ptype = build_aligned_type (ptype, malign);
4167 if (care_for_types)
4168 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4170 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4171 ptype);
4172 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4173 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4174 DECL_ARTIFICIAL (new_parm) = 1;
4175 DECL_ARG_TYPE (new_parm) = ptype;
4176 DECL_CONTEXT (new_parm) = fndecl;
4177 TREE_USED (new_parm) = 1;
4178 DECL_IGNORED_P (new_parm) = 1;
4179 layout_decl (new_parm, 0);
4181 if (adj->op == IPA_PARM_OP_NEW)
4182 adj->base = NULL;
4183 else
4184 adj->base = parm;
4185 adj->new_decl = new_parm;
4187 *link = new_parm;
4188 link = &DECL_CHAIN (new_parm);
4192 *link = NULL_TREE;
4194 tree new_reversed = NULL;
4195 if (care_for_types)
4197 new_reversed = nreverse (new_arg_types);
4198 if (last_parm_void)
4200 if (new_reversed)
4201 TREE_CHAIN (new_arg_types) = void_list_node;
4202 else
4203 new_reversed = void_list_node;
4207 /* Use copy_node to preserve as much as possible from original type
4208 (debug info, attribute lists etc.)
4209 Exception is METHOD_TYPEs must have THIS argument.
4210 When we are asked to remove it, we need to build new FUNCTION_TYPE
4211 instead. */
4212 tree new_type = NULL;
4213 if (TREE_CODE (orig_type) != METHOD_TYPE
4214 || (adjustments[0].op == IPA_PARM_OP_COPY
4215 && adjustments[0].base_index == 0))
4217 new_type = build_distinct_type_copy (orig_type);
4218 TYPE_ARG_TYPES (new_type) = new_reversed;
4220 else
4222 new_type
4223 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4224 new_reversed));
4225 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4226 DECL_VINDEX (fndecl) = NULL_TREE;
4229 /* When signature changes, we need to clear builtin info. */
4230 if (DECL_BUILT_IN (fndecl))
4232 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4233 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4236 TREE_TYPE (fndecl) = new_type;
4237 DECL_VIRTUAL_P (fndecl) = 0;
4238 DECL_LANG_SPECIFIC (fndecl) = NULL;
4239 otypes.release ();
4240 oparms.release ();
4243 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4244 If this is a directly recursive call, CS must be NULL. Otherwise it must
4245 contain the corresponding call graph edge. */
4247 void
4248 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4249 ipa_parm_adjustment_vec adjustments)
4251 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4252 vec<tree> vargs;
4253 vec<tree, va_gc> **debug_args = NULL;
4254 gcall *new_stmt;
4255 gimple_stmt_iterator gsi, prev_gsi;
4256 tree callee_decl;
4257 int i, len;
4259 len = adjustments.length ();
4260 vargs.create (len);
4261 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4262 current_node->remove_stmt_references (stmt);
4264 gsi = gsi_for_stmt (stmt);
4265 prev_gsi = gsi;
4266 gsi_prev (&prev_gsi);
4267 for (i = 0; i < len; i++)
4269 struct ipa_parm_adjustment *adj;
4271 adj = &adjustments[i];
4273 if (adj->op == IPA_PARM_OP_COPY)
4275 tree arg = gimple_call_arg (stmt, adj->base_index);
4277 vargs.quick_push (arg);
4279 else if (adj->op != IPA_PARM_OP_REMOVE)
4281 tree expr, base, off;
4282 location_t loc;
4283 unsigned int deref_align = 0;
4284 bool deref_base = false;
4286 /* We create a new parameter out of the value of the old one, we can
4287 do the following kind of transformations:
4289 - A scalar passed by reference is converted to a scalar passed by
4290 value. (adj->by_ref is false and the type of the original
4291 actual argument is a pointer to a scalar).
4293 - A part of an aggregate is passed instead of the whole aggregate.
4294 The part can be passed either by value or by reference, this is
4295 determined by value of adj->by_ref. Moreover, the code below
4296 handles both situations when the original aggregate is passed by
4297 value (its type is not a pointer) and when it is passed by
4298 reference (it is a pointer to an aggregate).
4300 When the new argument is passed by reference (adj->by_ref is true)
4301 it must be a part of an aggregate and therefore we form it by
4302 simply taking the address of a reference inside the original
4303 aggregate. */
4305 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4306 base = gimple_call_arg (stmt, adj->base_index);
4307 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4308 : EXPR_LOCATION (base);
4310 if (TREE_CODE (base) != ADDR_EXPR
4311 && POINTER_TYPE_P (TREE_TYPE (base)))
4312 off = build_int_cst (adj->alias_ptr_type,
4313 adj->offset / BITS_PER_UNIT);
4314 else
4316 HOST_WIDE_INT base_offset;
4317 tree prev_base;
4318 bool addrof;
4320 if (TREE_CODE (base) == ADDR_EXPR)
4322 base = TREE_OPERAND (base, 0);
4323 addrof = true;
4325 else
4326 addrof = false;
4327 prev_base = base;
4328 base = get_addr_base_and_unit_offset (base, &base_offset);
4329 /* Aggregate arguments can have non-invariant addresses. */
4330 if (!base)
4332 base = build_fold_addr_expr (prev_base);
4333 off = build_int_cst (adj->alias_ptr_type,
4334 adj->offset / BITS_PER_UNIT);
4336 else if (TREE_CODE (base) == MEM_REF)
4338 if (!addrof)
4340 deref_base = true;
4341 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4343 off = build_int_cst (adj->alias_ptr_type,
4344 base_offset
4345 + adj->offset / BITS_PER_UNIT);
4346 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4347 off);
4348 base = TREE_OPERAND (base, 0);
4350 else
4352 off = build_int_cst (adj->alias_ptr_type,
4353 base_offset
4354 + adj->offset / BITS_PER_UNIT);
4355 base = build_fold_addr_expr (base);
4359 if (!adj->by_ref)
4361 tree type = adj->type;
4362 unsigned int align;
4363 unsigned HOST_WIDE_INT misalign;
4365 if (deref_base)
4367 align = deref_align;
4368 misalign = 0;
4370 else
4372 get_pointer_alignment_1 (base, &align, &misalign);
4373 if (TYPE_ALIGN (type) > align)
4374 align = TYPE_ALIGN (type);
4376 misalign += (offset_int::from (wi::to_wide (off),
4377 SIGNED).to_short_addr ()
4378 * BITS_PER_UNIT);
4379 misalign = misalign & (align - 1);
4380 if (misalign != 0)
4381 align = least_bit_hwi (misalign);
4382 if (align < TYPE_ALIGN (type))
4383 type = build_aligned_type (type, align);
4384 base = force_gimple_operand_gsi (&gsi, base,
4385 true, NULL, true, GSI_SAME_STMT);
4386 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4387 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4388 /* If expr is not a valid gimple call argument emit
4389 a load into a temporary. */
4390 if (is_gimple_reg_type (TREE_TYPE (expr)))
4392 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4393 if (gimple_in_ssa_p (cfun))
4395 gimple_set_vuse (tem, gimple_vuse (stmt));
4396 expr = make_ssa_name (TREE_TYPE (expr), tem);
4398 else
4399 expr = create_tmp_reg (TREE_TYPE (expr));
4400 gimple_assign_set_lhs (tem, expr);
4401 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4404 else
4406 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4407 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4408 expr = build_fold_addr_expr (expr);
4409 expr = force_gimple_operand_gsi (&gsi, expr,
4410 true, NULL, true, GSI_SAME_STMT);
4412 vargs.quick_push (expr);
4414 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4416 unsigned int ix;
4417 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4418 gimple *def_temp;
4420 arg = gimple_call_arg (stmt, adj->base_index);
4421 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4423 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4424 continue;
4425 arg = fold_convert_loc (gimple_location (stmt),
4426 TREE_TYPE (origin), arg);
4428 if (debug_args == NULL)
4429 debug_args = decl_debug_args_insert (callee_decl);
4430 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4431 if (ddecl == origin)
4433 ddecl = (**debug_args)[ix + 1];
4434 break;
4436 if (ddecl == NULL)
4438 ddecl = make_node (DEBUG_EXPR_DECL);
4439 DECL_ARTIFICIAL (ddecl) = 1;
4440 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4441 SET_DECL_MODE (ddecl, DECL_MODE (origin));
4443 vec_safe_push (*debug_args, origin);
4444 vec_safe_push (*debug_args, ddecl);
4446 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4447 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4451 if (dump_file && (dump_flags & TDF_DETAILS))
4453 fprintf (dump_file, "replacing stmt:");
4454 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
4457 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4458 vargs.release ();
4459 if (gimple_call_lhs (stmt))
4460 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4462 gimple_set_block (new_stmt, gimple_block (stmt));
4463 if (gimple_has_location (stmt))
4464 gimple_set_location (new_stmt, gimple_location (stmt));
4465 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4466 gimple_call_copy_flags (new_stmt, stmt);
4467 if (gimple_in_ssa_p (cfun))
4469 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4470 if (gimple_vdef (stmt))
4472 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4473 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4477 if (dump_file && (dump_flags & TDF_DETAILS))
4479 fprintf (dump_file, "with stmt:");
4480 print_gimple_stmt (dump_file, new_stmt, 0);
4481 fprintf (dump_file, "\n");
4483 gsi_replace (&gsi, new_stmt, true);
4484 if (cs)
4485 cs->set_call_stmt (new_stmt);
4488 current_node->record_stmt_references (gsi_stmt (gsi));
4489 gsi_prev (&gsi);
4491 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4494 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4495 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4496 specifies whether the function should care about type incompatibility the
4497 current and new expressions. If it is false, the function will leave
4498 incompatibility issues to the caller. Return true iff the expression
4499 was modified. */
4501 bool
4502 ipa_modify_expr (tree *expr, bool convert,
4503 ipa_parm_adjustment_vec adjustments)
4505 struct ipa_parm_adjustment *cand
4506 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4507 if (!cand)
4508 return false;
4510 tree src;
4511 if (cand->by_ref)
4513 src = build_simple_mem_ref (cand->new_decl);
4514 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4516 else
4517 src = cand->new_decl;
4519 if (dump_file && (dump_flags & TDF_DETAILS))
4521 fprintf (dump_file, "About to replace expr ");
4522 print_generic_expr (dump_file, *expr);
4523 fprintf (dump_file, " with ");
4524 print_generic_expr (dump_file, src);
4525 fprintf (dump_file, "\n");
4528 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4530 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4531 *expr = vce;
4533 else
4534 *expr = src;
4535 return true;
4538 /* If T is an SSA_NAME, return NULL if it is not a default def or
4539 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4540 the base variable is always returned, regardless if it is a default
4541 def. Return T if it is not an SSA_NAME. */
4543 static tree
4544 get_ssa_base_param (tree t, bool ignore_default_def)
4546 if (TREE_CODE (t) == SSA_NAME)
4548 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4549 return SSA_NAME_VAR (t);
4550 else
4551 return NULL_TREE;
4553 return t;
4556 /* Given an expression, return an adjustment entry specifying the
4557 transformation to be done on EXPR. If no suitable adjustment entry
4558 was found, returns NULL.
4560 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4561 default def, otherwise bail on them.
4563 If CONVERT is non-NULL, this function will set *CONVERT if the
4564 expression provided is a component reference. ADJUSTMENTS is the
4565 adjustments vector. */
4567 ipa_parm_adjustment *
4568 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4569 ipa_parm_adjustment_vec adjustments,
4570 bool ignore_default_def)
4572 if (TREE_CODE (**expr) == BIT_FIELD_REF
4573 || TREE_CODE (**expr) == IMAGPART_EXPR
4574 || TREE_CODE (**expr) == REALPART_EXPR)
4576 *expr = &TREE_OPERAND (**expr, 0);
4577 if (convert)
4578 *convert = true;
4581 HOST_WIDE_INT offset, size, max_size;
4582 bool reverse;
4583 tree base
4584 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4585 if (!base || size == -1 || max_size == -1)
4586 return NULL;
4588 if (TREE_CODE (base) == MEM_REF)
4590 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4591 base = TREE_OPERAND (base, 0);
4594 base = get_ssa_base_param (base, ignore_default_def);
4595 if (!base || TREE_CODE (base) != PARM_DECL)
4596 return NULL;
4598 struct ipa_parm_adjustment *cand = NULL;
4599 unsigned int len = adjustments.length ();
4600 for (unsigned i = 0; i < len; i++)
4602 struct ipa_parm_adjustment *adj = &adjustments[i];
4604 if (adj->base == base
4605 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4607 cand = adj;
4608 break;
4612 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4613 return NULL;
4614 return cand;
4617 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4619 static bool
4620 index_in_adjustments_multiple_times_p (int base_index,
4621 ipa_parm_adjustment_vec adjustments)
4623 int i, len = adjustments.length ();
4624 bool one = false;
4626 for (i = 0; i < len; i++)
4628 struct ipa_parm_adjustment *adj;
4629 adj = &adjustments[i];
4631 if (adj->base_index == base_index)
4633 if (one)
4634 return true;
4635 else
4636 one = true;
4639 return false;
4643 /* Return adjustments that should have the same effect on function parameters
4644 and call arguments as if they were first changed according to adjustments in
4645 INNER and then by adjustments in OUTER. */
4647 ipa_parm_adjustment_vec
4648 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4649 ipa_parm_adjustment_vec outer)
4651 int i, outlen = outer.length ();
4652 int inlen = inner.length ();
4653 int removals = 0;
4654 ipa_parm_adjustment_vec adjustments, tmp;
4656 tmp.create (inlen);
4657 for (i = 0; i < inlen; i++)
4659 struct ipa_parm_adjustment *n;
4660 n = &inner[i];
4662 if (n->op == IPA_PARM_OP_REMOVE)
4663 removals++;
4664 else
4666 /* FIXME: Handling of new arguments are not implemented yet. */
4667 gcc_assert (n->op != IPA_PARM_OP_NEW);
4668 tmp.quick_push (*n);
4672 adjustments.create (outlen + removals);
4673 for (i = 0; i < outlen; i++)
4675 struct ipa_parm_adjustment r;
4676 struct ipa_parm_adjustment *out = &outer[i];
4677 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4679 memset (&r, 0, sizeof (r));
4680 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4681 if (out->op == IPA_PARM_OP_REMOVE)
4683 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4685 r.op = IPA_PARM_OP_REMOVE;
4686 adjustments.quick_push (r);
4688 continue;
4690 else
4692 /* FIXME: Handling of new arguments are not implemented yet. */
4693 gcc_assert (out->op != IPA_PARM_OP_NEW);
4696 r.base_index = in->base_index;
4697 r.type = out->type;
4699 /* FIXME: Create nonlocal value too. */
4701 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4702 r.op = IPA_PARM_OP_COPY;
4703 else if (in->op == IPA_PARM_OP_COPY)
4704 r.offset = out->offset;
4705 else if (out->op == IPA_PARM_OP_COPY)
4706 r.offset = in->offset;
4707 else
4708 r.offset = in->offset + out->offset;
4709 adjustments.quick_push (r);
4712 for (i = 0; i < inlen; i++)
4714 struct ipa_parm_adjustment *n = &inner[i];
4716 if (n->op == IPA_PARM_OP_REMOVE)
4717 adjustments.quick_push (*n);
4720 tmp.release ();
4721 return adjustments;
4724 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4725 friendly way, assuming they are meant to be applied to FNDECL. */
4727 void
4728 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4729 tree fndecl)
4731 int i, len = adjustments.length ();
4732 bool first = true;
4733 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4735 fprintf (file, "IPA param adjustments: ");
4736 for (i = 0; i < len; i++)
4738 struct ipa_parm_adjustment *adj;
4739 adj = &adjustments[i];
4741 if (!first)
4742 fprintf (file, " ");
4743 else
4744 first = false;
4746 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4747 print_generic_expr (file, parms[adj->base_index]);
4748 if (adj->base)
4750 fprintf (file, ", base: ");
4751 print_generic_expr (file, adj->base);
4753 if (adj->new_decl)
4755 fprintf (file, ", new_decl: ");
4756 print_generic_expr (file, adj->new_decl);
4758 if (adj->new_ssa_base)
4760 fprintf (file, ", new_ssa_base: ");
4761 print_generic_expr (file, adj->new_ssa_base);
4764 if (adj->op == IPA_PARM_OP_COPY)
4765 fprintf (file, ", copy_param");
4766 else if (adj->op == IPA_PARM_OP_REMOVE)
4767 fprintf (file, ", remove_param");
4768 else
4769 fprintf (file, ", offset %li", (long) adj->offset);
4770 if (adj->by_ref)
4771 fprintf (file, ", by_ref");
4772 print_node_brief (file, ", type: ", adj->type, 0);
4773 fprintf (file, "\n");
4775 parms.release ();
4778 /* Dump the AV linked list. */
4780 void
4781 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4783 bool comma = false;
4784 fprintf (f, " Aggregate replacements:");
4785 for (; av; av = av->next)
4787 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4788 av->index, av->offset);
4789 print_generic_expr (f, av->value);
4790 comma = true;
4792 fprintf (f, "\n");
4795 /* Stream out jump function JUMP_FUNC to OB. */
4797 static void
4798 ipa_write_jump_function (struct output_block *ob,
4799 struct ipa_jump_func *jump_func)
4801 struct ipa_agg_jf_item *item;
4802 struct bitpack_d bp;
4803 int i, count;
4805 streamer_write_uhwi (ob, jump_func->type);
4806 switch (jump_func->type)
4808 case IPA_JF_UNKNOWN:
4809 break;
4810 case IPA_JF_CONST:
4811 gcc_assert (
4812 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4813 stream_write_tree (ob, jump_func->value.constant.value, true);
4814 break;
4815 case IPA_JF_PASS_THROUGH:
4816 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4817 if (jump_func->value.pass_through.operation == NOP_EXPR)
4819 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4820 bp = bitpack_create (ob->main_stream);
4821 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4822 streamer_write_bitpack (&bp);
4824 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4825 == tcc_unary)
4826 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4827 else
4829 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4830 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4832 break;
4833 case IPA_JF_ANCESTOR:
4834 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4835 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4836 bp = bitpack_create (ob->main_stream);
4837 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4838 streamer_write_bitpack (&bp);
4839 break;
4842 count = vec_safe_length (jump_func->agg.items);
4843 streamer_write_uhwi (ob, count);
4844 if (count)
4846 bp = bitpack_create (ob->main_stream);
4847 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4848 streamer_write_bitpack (&bp);
4851 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4853 streamer_write_uhwi (ob, item->offset);
4854 stream_write_tree (ob, item->value, true);
4857 bp = bitpack_create (ob->main_stream);
4858 bp_pack_value (&bp, !!jump_func->bits, 1);
4859 streamer_write_bitpack (&bp);
4860 if (jump_func->bits)
4862 streamer_write_widest_int (ob, jump_func->bits->value);
4863 streamer_write_widest_int (ob, jump_func->bits->mask);
4865 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4866 streamer_write_bitpack (&bp);
4867 if (jump_func->m_vr)
4869 streamer_write_enum (ob->main_stream, value_rang_type,
4870 VR_LAST, jump_func->m_vr->type);
4871 stream_write_tree (ob, jump_func->m_vr->min, true);
4872 stream_write_tree (ob, jump_func->m_vr->max, true);
4876 /* Read in jump function JUMP_FUNC from IB. */
4878 static void
4879 ipa_read_jump_function (struct lto_input_block *ib,
4880 struct ipa_jump_func *jump_func,
4881 struct cgraph_edge *cs,
4882 struct data_in *data_in)
4884 enum jump_func_type jftype;
4885 enum tree_code operation;
4886 int i, count;
4888 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4889 switch (jftype)
4891 case IPA_JF_UNKNOWN:
4892 ipa_set_jf_unknown (jump_func);
4893 break;
4894 case IPA_JF_CONST:
4895 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4896 break;
4897 case IPA_JF_PASS_THROUGH:
4898 operation = (enum tree_code) streamer_read_uhwi (ib);
4899 if (operation == NOP_EXPR)
4901 int formal_id = streamer_read_uhwi (ib);
4902 struct bitpack_d bp = streamer_read_bitpack (ib);
4903 bool agg_preserved = bp_unpack_value (&bp, 1);
4904 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4906 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4908 int formal_id = streamer_read_uhwi (ib);
4909 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4911 else
4913 tree operand = stream_read_tree (ib, data_in);
4914 int formal_id = streamer_read_uhwi (ib);
4915 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4916 operation);
4918 break;
4919 case IPA_JF_ANCESTOR:
4921 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4922 int formal_id = streamer_read_uhwi (ib);
4923 struct bitpack_d bp = streamer_read_bitpack (ib);
4924 bool agg_preserved = bp_unpack_value (&bp, 1);
4925 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4926 break;
4930 count = streamer_read_uhwi (ib);
4931 vec_alloc (jump_func->agg.items, count);
4932 if (count)
4934 struct bitpack_d bp = streamer_read_bitpack (ib);
4935 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4937 for (i = 0; i < count; i++)
4939 struct ipa_agg_jf_item item;
4940 item.offset = streamer_read_uhwi (ib);
4941 item.value = stream_read_tree (ib, data_in);
4942 jump_func->agg.items->quick_push (item);
4945 struct bitpack_d bp = streamer_read_bitpack (ib);
4946 bool bits_known = bp_unpack_value (&bp, 1);
4947 if (bits_known)
4949 widest_int value = streamer_read_widest_int (ib);
4950 widest_int mask = streamer_read_widest_int (ib);
4951 ipa_set_jfunc_bits (jump_func, value, mask);
4953 else
4954 jump_func->bits = NULL;
4956 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4957 bool vr_known = bp_unpack_value (&vr_bp, 1);
4958 if (vr_known)
4960 enum value_range_type type = streamer_read_enum (ib, value_range_type,
4961 VR_LAST);
4962 tree min = stream_read_tree (ib, data_in);
4963 tree max = stream_read_tree (ib, data_in);
4964 ipa_set_jfunc_vr (jump_func, type, min, max);
4966 else
4967 jump_func->m_vr = NULL;
4970 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4971 relevant to indirect inlining to OB. */
4973 static void
4974 ipa_write_indirect_edge_info (struct output_block *ob,
4975 struct cgraph_edge *cs)
4977 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4978 struct bitpack_d bp;
4980 streamer_write_hwi (ob, ii->param_index);
4981 bp = bitpack_create (ob->main_stream);
4982 bp_pack_value (&bp, ii->polymorphic, 1);
4983 bp_pack_value (&bp, ii->agg_contents, 1);
4984 bp_pack_value (&bp, ii->member_ptr, 1);
4985 bp_pack_value (&bp, ii->by_ref, 1);
4986 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4987 bp_pack_value (&bp, ii->vptr_changed, 1);
4988 streamer_write_bitpack (&bp);
4989 if (ii->agg_contents || ii->polymorphic)
4990 streamer_write_hwi (ob, ii->offset);
4991 else
4992 gcc_assert (ii->offset == 0);
4994 if (ii->polymorphic)
4996 streamer_write_hwi (ob, ii->otr_token);
4997 stream_write_tree (ob, ii->otr_type, true);
4998 ii->context.stream_out (ob);
5002 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5003 relevant to indirect inlining from IB. */
5005 static void
5006 ipa_read_indirect_edge_info (struct lto_input_block *ib,
5007 struct data_in *data_in,
5008 struct cgraph_edge *cs)
5010 struct cgraph_indirect_call_info *ii = cs->indirect_info;
5011 struct bitpack_d bp;
5013 ii->param_index = (int) streamer_read_hwi (ib);
5014 bp = streamer_read_bitpack (ib);
5015 ii->polymorphic = bp_unpack_value (&bp, 1);
5016 ii->agg_contents = bp_unpack_value (&bp, 1);
5017 ii->member_ptr = bp_unpack_value (&bp, 1);
5018 ii->by_ref = bp_unpack_value (&bp, 1);
5019 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
5020 ii->vptr_changed = bp_unpack_value (&bp, 1);
5021 if (ii->agg_contents || ii->polymorphic)
5022 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5023 else
5024 ii->offset = 0;
5025 if (ii->polymorphic)
5027 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
5028 ii->otr_type = stream_read_tree (ib, data_in);
5029 ii->context.stream_in (ib, data_in);
5033 /* Stream out NODE info to OB. */
5035 static void
5036 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5038 int node_ref;
5039 lto_symtab_encoder_t encoder;
5040 struct ipa_node_params *info = IPA_NODE_REF (node);
5041 int j;
5042 struct cgraph_edge *e;
5043 struct bitpack_d bp;
5045 encoder = ob->decl_state->symtab_node_encoder;
5046 node_ref = lto_symtab_encoder_encode (encoder, node);
5047 streamer_write_uhwi (ob, node_ref);
5049 streamer_write_uhwi (ob, ipa_get_param_count (info));
5050 for (j = 0; j < ipa_get_param_count (info); j++)
5051 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5052 bp = bitpack_create (ob->main_stream);
5053 gcc_assert (info->analysis_done
5054 || ipa_get_param_count (info) == 0);
5055 gcc_assert (!info->node_enqueued);
5056 gcc_assert (!info->ipcp_orig_node);
5057 for (j = 0; j < ipa_get_param_count (info); j++)
5058 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5059 streamer_write_bitpack (&bp);
5060 for (j = 0; j < ipa_get_param_count (info); j++)
5062 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5063 stream_write_tree (ob, ipa_get_type (info, j), true);
5065 for (e = node->callees; e; e = e->next_callee)
5067 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5069 streamer_write_uhwi (ob,
5070 ipa_get_cs_argument_count (args) * 2
5071 + (args->polymorphic_call_contexts != NULL));
5072 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5074 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5075 if (args->polymorphic_call_contexts != NULL)
5076 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5079 for (e = node->indirect_calls; e; e = e->next_callee)
5081 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5083 streamer_write_uhwi (ob,
5084 ipa_get_cs_argument_count (args) * 2
5085 + (args->polymorphic_call_contexts != NULL));
5086 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5088 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5089 if (args->polymorphic_call_contexts != NULL)
5090 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5092 ipa_write_indirect_edge_info (ob, e);
5096 /* Stream in NODE info from IB. */
5098 static void
5099 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
5100 struct data_in *data_in)
5102 struct ipa_node_params *info = IPA_NODE_REF (node);
5103 int k;
5104 struct cgraph_edge *e;
5105 struct bitpack_d bp;
5107 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
5109 for (k = 0; k < ipa_get_param_count (info); k++)
5110 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5112 bp = streamer_read_bitpack (ib);
5113 if (ipa_get_param_count (info) != 0)
5114 info->analysis_done = true;
5115 info->node_enqueued = false;
5116 for (k = 0; k < ipa_get_param_count (info); k++)
5117 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
5118 for (k = 0; k < ipa_get_param_count (info); k++)
5120 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
5121 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
5123 for (e = node->callees; e; e = e->next_callee)
5125 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5126 int count = streamer_read_uhwi (ib);
5127 bool contexts_computed = count & 1;
5128 count /= 2;
5130 if (!count)
5131 continue;
5132 vec_safe_grow_cleared (args->jump_functions, count);
5133 if (contexts_computed)
5134 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5136 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5138 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5139 data_in);
5140 if (contexts_computed)
5141 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5144 for (e = node->indirect_calls; e; e = e->next_callee)
5146 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5147 int count = streamer_read_uhwi (ib);
5148 bool contexts_computed = count & 1;
5149 count /= 2;
5151 if (count)
5153 vec_safe_grow_cleared (args->jump_functions, count);
5154 if (contexts_computed)
5155 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5156 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5158 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5159 data_in);
5160 if (contexts_computed)
5161 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5164 ipa_read_indirect_edge_info (ib, data_in, e);
5168 /* Write jump functions for nodes in SET. */
5170 void
5171 ipa_prop_write_jump_functions (void)
5173 struct cgraph_node *node;
5174 struct output_block *ob;
5175 unsigned int count = 0;
5176 lto_symtab_encoder_iterator lsei;
5177 lto_symtab_encoder_t encoder;
5179 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5180 return;
5182 ob = create_output_block (LTO_section_jump_functions);
5183 encoder = ob->decl_state->symtab_node_encoder;
5184 ob->symbol = NULL;
5185 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5186 lsei_next_function_in_partition (&lsei))
5188 node = lsei_cgraph_node (lsei);
5189 if (node->has_gimple_body_p ()
5190 && IPA_NODE_REF (node) != NULL)
5191 count++;
5194 streamer_write_uhwi (ob, count);
5196 /* Process all of the functions. */
5197 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5198 lsei_next_function_in_partition (&lsei))
5200 node = lsei_cgraph_node (lsei);
5201 if (node->has_gimple_body_p ()
5202 && IPA_NODE_REF (node) != NULL)
5203 ipa_write_node_info (ob, node);
5205 streamer_write_char_stream (ob->main_stream, 0);
5206 produce_asm (ob, NULL);
5207 destroy_output_block (ob);
5210 /* Read section in file FILE_DATA of length LEN with data DATA. */
5212 static void
5213 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5214 size_t len)
5216 const struct lto_function_header *header =
5217 (const struct lto_function_header *) data;
5218 const int cfg_offset = sizeof (struct lto_function_header);
5219 const int main_offset = cfg_offset + header->cfg_size;
5220 const int string_offset = main_offset + header->main_size;
5221 struct data_in *data_in;
5222 unsigned int i;
5223 unsigned int count;
5225 lto_input_block ib_main ((const char *) data + main_offset,
5226 header->main_size, file_data->mode_table);
5228 data_in =
5229 lto_data_in_create (file_data, (const char *) data + string_offset,
5230 header->string_size, vNULL);
5231 count = streamer_read_uhwi (&ib_main);
5233 for (i = 0; i < count; i++)
5235 unsigned int index;
5236 struct cgraph_node *node;
5237 lto_symtab_encoder_t encoder;
5239 index = streamer_read_uhwi (&ib_main);
5240 encoder = file_data->symtab_node_encoder;
5241 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5242 index));
5243 gcc_assert (node->definition);
5244 ipa_read_node_info (&ib_main, node, data_in);
5246 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5247 len);
5248 lto_data_in_delete (data_in);
5251 /* Read ipcp jump functions. */
5253 void
5254 ipa_prop_read_jump_functions (void)
5256 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5257 struct lto_file_decl_data *file_data;
5258 unsigned int j = 0;
5260 ipa_check_create_node_params ();
5261 ipa_check_create_edge_args ();
5262 ipa_register_cgraph_hooks ();
5264 while ((file_data = file_data_vec[j++]))
5266 size_t len;
5267 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5269 if (data)
5270 ipa_prop_read_section (file_data, data, len);
5274 void
5275 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5277 int node_ref;
5278 unsigned int count = 0;
5279 lto_symtab_encoder_t encoder;
5280 struct ipa_agg_replacement_value *aggvals, *av;
5282 aggvals = ipa_get_agg_replacements_for_node (node);
5283 encoder = ob->decl_state->symtab_node_encoder;
5284 node_ref = lto_symtab_encoder_encode (encoder, node);
5285 streamer_write_uhwi (ob, node_ref);
5287 for (av = aggvals; av; av = av->next)
5288 count++;
5289 streamer_write_uhwi (ob, count);
5291 for (av = aggvals; av; av = av->next)
5293 struct bitpack_d bp;
5295 streamer_write_uhwi (ob, av->offset);
5296 streamer_write_uhwi (ob, av->index);
5297 stream_write_tree (ob, av->value, true);
5299 bp = bitpack_create (ob->main_stream);
5300 bp_pack_value (&bp, av->by_ref, 1);
5301 streamer_write_bitpack (&bp);
5304 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5305 if (ts && vec_safe_length (ts->m_vr) > 0)
5307 count = ts->m_vr->length ();
5308 streamer_write_uhwi (ob, count);
5309 for (unsigned i = 0; i < count; ++i)
5311 struct bitpack_d bp;
5312 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5313 bp = bitpack_create (ob->main_stream);
5314 bp_pack_value (&bp, parm_vr->known, 1);
5315 streamer_write_bitpack (&bp);
5316 if (parm_vr->known)
5318 streamer_write_enum (ob->main_stream, value_rang_type,
5319 VR_LAST, parm_vr->type);
5320 streamer_write_wide_int (ob, parm_vr->min);
5321 streamer_write_wide_int (ob, parm_vr->max);
5325 else
5326 streamer_write_uhwi (ob, 0);
5328 if (ts && vec_safe_length (ts->bits) > 0)
5330 count = ts->bits->length ();
5331 streamer_write_uhwi (ob, count);
5333 for (unsigned i = 0; i < count; ++i)
5335 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5336 struct bitpack_d bp = bitpack_create (ob->main_stream);
5337 bp_pack_value (&bp, !!bits_jfunc, 1);
5338 streamer_write_bitpack (&bp);
5339 if (bits_jfunc)
5341 streamer_write_widest_int (ob, bits_jfunc->value);
5342 streamer_write_widest_int (ob, bits_jfunc->mask);
5346 else
5347 streamer_write_uhwi (ob, 0);
5350 /* Stream in the aggregate value replacement chain for NODE from IB. */
5352 static void
5353 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5354 data_in *data_in)
5356 struct ipa_agg_replacement_value *aggvals = NULL;
5357 unsigned int count, i;
5359 count = streamer_read_uhwi (ib);
5360 for (i = 0; i <count; i++)
5362 struct ipa_agg_replacement_value *av;
5363 struct bitpack_d bp;
5365 av = ggc_alloc<ipa_agg_replacement_value> ();
5366 av->offset = streamer_read_uhwi (ib);
5367 av->index = streamer_read_uhwi (ib);
5368 av->value = stream_read_tree (ib, data_in);
5369 bp = streamer_read_bitpack (ib);
5370 av->by_ref = bp_unpack_value (&bp, 1);
5371 av->next = aggvals;
5372 aggvals = av;
5374 ipa_set_node_agg_value_chain (node, aggvals);
5376 count = streamer_read_uhwi (ib);
5377 if (count > 0)
5379 ipcp_grow_transformations_if_necessary ();
5381 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5382 vec_safe_grow_cleared (ts->m_vr, count);
5383 for (i = 0; i < count; i++)
5385 ipa_vr *parm_vr;
5386 parm_vr = &(*ts->m_vr)[i];
5387 struct bitpack_d bp;
5388 bp = streamer_read_bitpack (ib);
5389 parm_vr->known = bp_unpack_value (&bp, 1);
5390 if (parm_vr->known)
5392 parm_vr->type = streamer_read_enum (ib, value_range_type,
5393 VR_LAST);
5394 parm_vr->min = streamer_read_wide_int (ib);
5395 parm_vr->max = streamer_read_wide_int (ib);
5399 count = streamer_read_uhwi (ib);
5400 if (count > 0)
5402 ipcp_grow_transformations_if_necessary ();
5404 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5405 vec_safe_grow_cleared (ts->bits, count);
5407 for (i = 0; i < count; i++)
5409 struct bitpack_d bp = streamer_read_bitpack (ib);
5410 bool known = bp_unpack_value (&bp, 1);
5411 if (known)
5413 ipa_bits *bits
5414 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
5415 streamer_read_widest_int (ib));
5416 (*ts->bits)[i] = bits;
5422 /* Write all aggregate replacement for nodes in set. */
5424 void
5425 ipcp_write_transformation_summaries (void)
5427 struct cgraph_node *node;
5428 struct output_block *ob;
5429 unsigned int count = 0;
5430 lto_symtab_encoder_iterator lsei;
5431 lto_symtab_encoder_t encoder;
5433 ob = create_output_block (LTO_section_ipcp_transform);
5434 encoder = ob->decl_state->symtab_node_encoder;
5435 ob->symbol = NULL;
5436 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5437 lsei_next_function_in_partition (&lsei))
5439 node = lsei_cgraph_node (lsei);
5440 if (node->has_gimple_body_p ())
5441 count++;
5444 streamer_write_uhwi (ob, count);
5446 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5447 lsei_next_function_in_partition (&lsei))
5449 node = lsei_cgraph_node (lsei);
5450 if (node->has_gimple_body_p ())
5451 write_ipcp_transformation_info (ob, node);
5453 streamer_write_char_stream (ob->main_stream, 0);
5454 produce_asm (ob, NULL);
5455 destroy_output_block (ob);
5458 /* Read replacements section in file FILE_DATA of length LEN with data
5459 DATA. */
5461 static void
5462 read_replacements_section (struct lto_file_decl_data *file_data,
5463 const char *data,
5464 size_t len)
5466 const struct lto_function_header *header =
5467 (const struct lto_function_header *) data;
5468 const int cfg_offset = sizeof (struct lto_function_header);
5469 const int main_offset = cfg_offset + header->cfg_size;
5470 const int string_offset = main_offset + header->main_size;
5471 struct data_in *data_in;
5472 unsigned int i;
5473 unsigned int count;
5475 lto_input_block ib_main ((const char *) data + main_offset,
5476 header->main_size, file_data->mode_table);
5478 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5479 header->string_size, vNULL);
5480 count = streamer_read_uhwi (&ib_main);
5482 for (i = 0; i < count; i++)
5484 unsigned int index;
5485 struct cgraph_node *node;
5486 lto_symtab_encoder_t encoder;
5488 index = streamer_read_uhwi (&ib_main);
5489 encoder = file_data->symtab_node_encoder;
5490 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5491 index));
5492 gcc_assert (node->definition);
5493 read_ipcp_transformation_info (&ib_main, node, data_in);
5495 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5496 len);
5497 lto_data_in_delete (data_in);
5500 /* Read IPA-CP aggregate replacements. */
5502 void
5503 ipcp_read_transformation_summaries (void)
5505 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5506 struct lto_file_decl_data *file_data;
5507 unsigned int j = 0;
5509 while ((file_data = file_data_vec[j++]))
5511 size_t len;
5512 const char *data = lto_get_section_data (file_data,
5513 LTO_section_ipcp_transform,
5514 NULL, &len);
5515 if (data)
5516 read_replacements_section (file_data, data, len);
5520 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5521 NODE. */
5523 static void
5524 adjust_agg_replacement_values (struct cgraph_node *node,
5525 struct ipa_agg_replacement_value *aggval)
5527 struct ipa_agg_replacement_value *v;
5528 int i, c = 0, d = 0, *adj;
5530 if (!node->clone.combined_args_to_skip)
5531 return;
5533 for (v = aggval; v; v = v->next)
5535 gcc_assert (v->index >= 0);
5536 if (c < v->index)
5537 c = v->index;
5539 c++;
5541 adj = XALLOCAVEC (int, c);
5542 for (i = 0; i < c; i++)
5543 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5545 adj[i] = -1;
5546 d++;
5548 else
5549 adj[i] = i - d;
5551 for (v = aggval; v; v = v->next)
5552 v->index = adj[v->index];
5555 /* Dominator walker driving the ipcp modification phase. */
5557 class ipcp_modif_dom_walker : public dom_walker
5559 public:
5560 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5561 vec<ipa_param_descriptor, va_gc> *descs,
5562 struct ipa_agg_replacement_value *av,
5563 bool *sc, bool *cc)
5564 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5565 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5567 virtual edge before_dom_children (basic_block);
5569 private:
5570 struct ipa_func_body_info *m_fbi;
5571 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5572 struct ipa_agg_replacement_value *m_aggval;
5573 bool *m_something_changed, *m_cfg_changed;
5576 edge
5577 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5579 gimple_stmt_iterator gsi;
5580 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5582 struct ipa_agg_replacement_value *v;
5583 gimple *stmt = gsi_stmt (gsi);
5584 tree rhs, val, t;
5585 HOST_WIDE_INT offset, size;
5586 int index;
5587 bool by_ref, vce;
5589 if (!gimple_assign_load_p (stmt))
5590 continue;
5591 rhs = gimple_assign_rhs1 (stmt);
5592 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5593 continue;
5595 vce = false;
5596 t = rhs;
5597 while (handled_component_p (t))
5599 /* V_C_E can do things like convert an array of integers to one
5600 bigger integer and similar things we do not handle below. */
5601 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5603 vce = true;
5604 break;
5606 t = TREE_OPERAND (t, 0);
5608 if (vce)
5609 continue;
5611 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5612 &offset, &size, &by_ref))
5613 continue;
5614 for (v = m_aggval; v; v = v->next)
5615 if (v->index == index
5616 && v->offset == offset)
5617 break;
5618 if (!v
5619 || v->by_ref != by_ref
5620 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5621 continue;
5623 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5624 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5626 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5627 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5628 else if (TYPE_SIZE (TREE_TYPE (rhs))
5629 == TYPE_SIZE (TREE_TYPE (v->value)))
5630 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5631 else
5633 if (dump_file)
5635 fprintf (dump_file, " const ");
5636 print_generic_expr (dump_file, v->value);
5637 fprintf (dump_file, " can't be converted to type of ");
5638 print_generic_expr (dump_file, rhs);
5639 fprintf (dump_file, "\n");
5641 continue;
5644 else
5645 val = v->value;
5647 if (dump_file && (dump_flags & TDF_DETAILS))
5649 fprintf (dump_file, "Modifying stmt:\n ");
5650 print_gimple_stmt (dump_file, stmt, 0);
5652 gimple_assign_set_rhs_from_tree (&gsi, val);
5653 update_stmt (stmt);
5655 if (dump_file && (dump_flags & TDF_DETAILS))
5657 fprintf (dump_file, "into:\n ");
5658 print_gimple_stmt (dump_file, stmt, 0);
5659 fprintf (dump_file, "\n");
5662 *m_something_changed = true;
5663 if (maybe_clean_eh_stmt (stmt)
5664 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5665 *m_cfg_changed = true;
5667 return NULL;
5670 /* Update bits info of formal parameters as described in
5671 ipcp_transformation_summary. */
5673 static void
5674 ipcp_update_bits (struct cgraph_node *node)
5676 tree parm = DECL_ARGUMENTS (node->decl);
5677 tree next_parm = parm;
5678 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5680 if (!ts || vec_safe_length (ts->bits) == 0)
5681 return;
5683 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5684 unsigned count = bits.length ();
5686 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5688 if (node->clone.combined_args_to_skip
5689 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5690 continue;
5692 gcc_checking_assert (parm);
5693 next_parm = DECL_CHAIN (parm);
5695 if (!bits[i]
5696 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5697 || POINTER_TYPE_P (TREE_TYPE (parm)))
5698 || !is_gimple_reg (parm))
5699 continue;
5701 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5702 if (!ddef)
5703 continue;
5705 if (dump_file)
5707 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5708 print_hex (bits[i]->mask, dump_file);
5709 fprintf (dump_file, "\n");
5712 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5714 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5715 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5717 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5718 | wide_int::from (bits[i]->value, prec, sgn);
5719 set_nonzero_bits (ddef, nonzero_bits);
5721 else
5723 unsigned tem = bits[i]->mask.to_uhwi ();
5724 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5725 unsigned align = tem & -tem;
5726 unsigned misalign = bitpos & (align - 1);
5728 if (align > 1)
5730 if (dump_file)
5731 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5733 unsigned old_align, old_misalign;
5734 struct ptr_info_def *pi = get_ptr_info (ddef);
5735 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5737 if (old_known
5738 && old_align > align)
5740 if (dump_file)
5742 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5743 if ((old_misalign & (align - 1)) != misalign)
5744 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5745 old_misalign, misalign);
5747 continue;
5750 if (old_known
5751 && ((misalign & (old_align - 1)) != old_misalign)
5752 && dump_file)
5753 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5754 old_misalign, misalign);
5756 set_ptr_info_alignment (pi, align, misalign);
5762 /* Update value range of formal parameters as described in
5763 ipcp_transformation_summary. */
5765 static void
5766 ipcp_update_vr (struct cgraph_node *node)
5768 tree fndecl = node->decl;
5769 tree parm = DECL_ARGUMENTS (fndecl);
5770 tree next_parm = parm;
5771 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5772 if (!ts || vec_safe_length (ts->m_vr) == 0)
5773 return;
5774 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5775 unsigned count = vr.length ();
5777 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5779 if (node->clone.combined_args_to_skip
5780 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5781 continue;
5782 gcc_checking_assert (parm);
5783 next_parm = DECL_CHAIN (parm);
5784 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5786 if (!ddef || !is_gimple_reg (parm))
5787 continue;
5789 if (vr[i].known
5790 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5792 tree type = TREE_TYPE (ddef);
5793 unsigned prec = TYPE_PRECISION (type);
5794 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5796 if (dump_file)
5798 fprintf (dump_file, "Setting value range of param %u ", i);
5799 fprintf (dump_file, "%s[",
5800 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5801 print_decs (vr[i].min, dump_file);
5802 fprintf (dump_file, ", ");
5803 print_decs (vr[i].max, dump_file);
5804 fprintf (dump_file, "]\n");
5806 set_range_info (ddef, vr[i].type,
5807 wide_int_storage::from (vr[i].min, prec,
5808 TYPE_SIGN (type)),
5809 wide_int_storage::from (vr[i].max, prec,
5810 TYPE_SIGN (type)));
5812 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5813 && vr[i].type == VR_ANTI_RANGE
5814 && wi::eq_p (vr[i].min, 0)
5815 && wi::eq_p (vr[i].max, 0))
5817 if (dump_file)
5818 fprintf (dump_file, "Setting nonnull for %u\n", i);
5819 set_ptr_nonnull (ddef);
5825 /* IPCP transformation phase doing propagation of aggregate values. */
5827 unsigned int
5828 ipcp_transform_function (struct cgraph_node *node)
5830 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5831 struct ipa_func_body_info fbi;
5832 struct ipa_agg_replacement_value *aggval;
5833 int param_count;
5834 bool cfg_changed = false, something_changed = false;
5836 gcc_checking_assert (cfun);
5837 gcc_checking_assert (current_function_decl);
5839 if (dump_file)
5840 fprintf (dump_file, "Modification phase of node %s\n",
5841 node->dump_name ());
5843 ipcp_update_bits (node);
5844 ipcp_update_vr (node);
5845 aggval = ipa_get_agg_replacements_for_node (node);
5846 if (!aggval)
5847 return 0;
5848 param_count = count_formal_params (node->decl);
5849 if (param_count == 0)
5850 return 0;
5851 adjust_agg_replacement_values (node, aggval);
5852 if (dump_file)
5853 ipa_dump_agg_replacement_values (dump_file, aggval);
5855 fbi.node = node;
5856 fbi.info = NULL;
5857 fbi.bb_infos = vNULL;
5858 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5859 fbi.param_count = param_count;
5860 fbi.aa_walked = 0;
5862 vec_safe_grow_cleared (descriptors, param_count);
5863 ipa_populate_param_decls (node, *descriptors);
5864 calculate_dominance_info (CDI_DOMINATORS);
5865 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5866 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5868 int i;
5869 struct ipa_bb_info *bi;
5870 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5871 free_ipa_bb_info (bi);
5872 fbi.bb_infos.release ();
5873 free_dominance_info (CDI_DOMINATORS);
5874 (*ipcp_transformations)[node->uid].agg_values = NULL;
5875 (*ipcp_transformations)[node->uid].bits = NULL;
5876 (*ipcp_transformations)[node->uid].m_vr = NULL;
5878 vec_free (descriptors);
5880 if (!something_changed)
5881 return 0;
5882 else if (cfg_changed)
5883 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5884 else
5885 return TODO_update_ssa_only_virtuals;
5888 #include "gt-ipa-prop.h"