2017-05-11 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / ipa-prop.c
blob5819f78793bbaf95cf2dcb8c29b743caf6136e4d
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Edge summary for IPA-CP edge information. */
61 ipa_edge_args_sum_t *ipa_edge_args_sum;
63 /* Traits for a hash table for reusing already existing ipa_bits. */
65 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67 typedef ipa_bits *value_type;
68 typedef ipa_bits *compare_type;
69 static hashval_t
70 hash (const ipa_bits *p)
72 hashval_t t = (hashval_t) p->value.to_shwi ();
73 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 static bool
76 equal (const ipa_bits *a, const ipa_bits *b)
78 return a->value == b->value && a->mask == b->mask;
80 static void
81 mark_empty (ipa_bits *&p)
83 p = NULL;
85 static bool
86 is_empty (const ipa_bits *p)
88 return p == NULL;
90 static bool
91 is_deleted (const ipa_bits *p)
93 return p == reinterpret_cast<const ipa_bits *> (1);
95 static void
96 mark_deleted (ipa_bits *&p)
98 p = reinterpret_cast<ipa_bits *> (1);
102 /* Hash table for avoid repeated allocations of equal ipa_bits. */
103 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
105 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
106 the equiv bitmap is not hashed and is expected to be NULL. */
108 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
110 typedef value_range *value_type;
111 typedef value_range *compare_type;
112 static hashval_t
113 hash (const value_range *p)
115 gcc_checking_assert (!p->equiv);
116 hashval_t t = (hashval_t) p->type;
117 t = iterative_hash_expr (p->min, t);
118 return iterative_hash_expr (p->max, t);
120 static bool
121 equal (const value_range *a, const value_range *b)
123 return a->type == b->type && a->min == b->min && a->max == b->max;
125 static void
126 mark_empty (value_range *&p)
128 p = NULL;
130 static bool
131 is_empty (const value_range *p)
133 return p == NULL;
135 static bool
136 is_deleted (const value_range *p)
138 return p == reinterpret_cast<const value_range *> (1);
140 static void
141 mark_deleted (value_range *&p)
143 p = reinterpret_cast<value_range *> (1);
147 /* Hash table for avoid repeated allocations of equal value_ranges. */
148 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
150 /* Holders of ipa cgraph hooks: */
151 static struct cgraph_node_hook_list *function_insertion_hook_holder;
153 /* Description of a reference to an IPA constant. */
154 struct ipa_cst_ref_desc
156 /* Edge that corresponds to the statement which took the reference. */
157 struct cgraph_edge *cs;
158 /* Linked list of duplicates created when call graph edges are cloned. */
159 struct ipa_cst_ref_desc *next_duplicate;
160 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
161 if out of control. */
162 int refcount;
165 /* Allocation pool for reference descriptions. */
167 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
168 ("IPA-PROP ref descriptions");
170 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
171 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
173 static bool
174 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
176 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
178 if (!fs_opts)
179 return false;
180 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
183 /* Return index of the formal whose tree is PTREE in function which corresponds
184 to INFO. */
186 static int
187 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
188 tree ptree)
190 int i, count;
192 count = vec_safe_length (descriptors);
193 for (i = 0; i < count; i++)
194 if ((*descriptors)[i].decl_or_type == ptree)
195 return i;
197 return -1;
200 /* Return index of the formal whose tree is PTREE in function which corresponds
201 to INFO. */
204 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
206 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
209 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
210 NODE. */
212 static void
213 ipa_populate_param_decls (struct cgraph_node *node,
214 vec<ipa_param_descriptor, va_gc> &descriptors)
216 tree fndecl;
217 tree fnargs;
218 tree parm;
219 int param_num;
221 fndecl = node->decl;
222 gcc_assert (gimple_has_body_p (fndecl));
223 fnargs = DECL_ARGUMENTS (fndecl);
224 param_num = 0;
225 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
227 descriptors[param_num].decl_or_type = parm;
228 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
229 true);
230 param_num++;
234 /* Return how many formal parameters FNDECL has. */
237 count_formal_params (tree fndecl)
239 tree parm;
240 int count = 0;
241 gcc_assert (gimple_has_body_p (fndecl));
243 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
244 count++;
246 return count;
249 /* Return the declaration of Ith formal parameter of the function corresponding
250 to INFO. Note there is no setter function as this array is built just once
251 using ipa_initialize_node_params. */
253 void
254 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
256 fprintf (file, "param #%i", i);
257 if ((*info->descriptors)[i].decl_or_type)
259 fprintf (file, " ");
260 print_generic_expr (file, (*info->descriptors)[i].decl_or_type, 0);
264 /* If necessary, allocate vector of parameter descriptors in info of NODE.
265 Return true if they were allocated, false if not. */
267 static bool
268 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
270 struct ipa_node_params *info = IPA_NODE_REF (node);
272 if (!info->descriptors && param_count)
274 vec_safe_grow_cleared (info->descriptors, param_count);
275 return true;
277 else
278 return false;
281 /* Initialize the ipa_node_params structure associated with NODE by counting
282 the function parameters, creating the descriptors and populating their
283 param_decls. */
285 void
286 ipa_initialize_node_params (struct cgraph_node *node)
288 struct ipa_node_params *info = IPA_NODE_REF (node);
290 if (!info->descriptors
291 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
292 ipa_populate_param_decls (node, *info->descriptors);
295 /* Print the jump functions associated with call graph edge CS to file F. */
297 static void
298 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
300 int i, count;
302 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
303 for (i = 0; i < count; i++)
305 struct ipa_jump_func *jump_func;
306 enum jump_func_type type;
308 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
309 type = jump_func->type;
311 fprintf (f, " param %d: ", i);
312 if (type == IPA_JF_UNKNOWN)
313 fprintf (f, "UNKNOWN\n");
314 else if (type == IPA_JF_CONST)
316 tree val = jump_func->value.constant.value;
317 fprintf (f, "CONST: ");
318 print_generic_expr (f, val, 0);
319 if (TREE_CODE (val) == ADDR_EXPR
320 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
322 fprintf (f, " -> ");
323 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
326 fprintf (f, "\n");
328 else if (type == IPA_JF_PASS_THROUGH)
330 fprintf (f, "PASS THROUGH: ");
331 fprintf (f, "%d, op %s",
332 jump_func->value.pass_through.formal_id,
333 get_tree_code_name(jump_func->value.pass_through.operation));
334 if (jump_func->value.pass_through.operation != NOP_EXPR)
336 fprintf (f, " ");
337 print_generic_expr (f,
338 jump_func->value.pass_through.operand, 0);
340 if (jump_func->value.pass_through.agg_preserved)
341 fprintf (f, ", agg_preserved");
342 fprintf (f, "\n");
344 else if (type == IPA_JF_ANCESTOR)
346 fprintf (f, "ANCESTOR: ");
347 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
348 jump_func->value.ancestor.formal_id,
349 jump_func->value.ancestor.offset);
350 if (jump_func->value.ancestor.agg_preserved)
351 fprintf (f, ", agg_preserved");
352 fprintf (f, "\n");
355 if (jump_func->agg.items)
357 struct ipa_agg_jf_item *item;
358 int j;
360 fprintf (f, " Aggregate passed by %s:\n",
361 jump_func->agg.by_ref ? "reference" : "value");
362 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
364 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
365 item->offset);
366 if (TYPE_P (item->value))
367 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
368 tree_to_uhwi (TYPE_SIZE (item->value)));
369 else
371 fprintf (f, "cst: ");
372 print_generic_expr (f, item->value, 0);
374 fprintf (f, "\n");
378 struct ipa_polymorphic_call_context *ctx
379 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
380 if (ctx && !ctx->useless_p ())
382 fprintf (f, " Context: ");
383 ctx->dump (dump_file);
386 if (jump_func->bits)
388 fprintf (f, " value: ");
389 print_hex (jump_func->bits->value, f);
390 fprintf (f, ", mask: ");
391 print_hex (jump_func->bits->mask, f);
392 fprintf (f, "\n");
394 else
395 fprintf (f, " Unknown bits\n");
397 if (jump_func->m_vr)
399 fprintf (f, " VR ");
400 fprintf (f, "%s[",
401 (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
402 print_decs (jump_func->m_vr->min, f);
403 fprintf (f, ", ");
404 print_decs (jump_func->m_vr->max, f);
405 fprintf (f, "]\n");
407 else
408 fprintf (f, " Unknown VR\n");
413 /* Print the jump functions of all arguments on all call graph edges going from
414 NODE to file F. */
416 void
417 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
419 struct cgraph_edge *cs;
421 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
422 node->order);
423 for (cs = node->callees; cs; cs = cs->next_callee)
425 if (!ipa_edge_args_info_available_for_edge_p (cs))
426 continue;
428 fprintf (f, " callsite %s/%i -> %s/%i : \n",
429 xstrdup_for_dump (node->name ()), node->order,
430 xstrdup_for_dump (cs->callee->name ()),
431 cs->callee->order);
432 ipa_print_node_jump_functions_for_edge (f, cs);
435 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
437 struct cgraph_indirect_call_info *ii;
438 if (!ipa_edge_args_info_available_for_edge_p (cs))
439 continue;
441 ii = cs->indirect_info;
442 if (ii->agg_contents)
443 fprintf (f, " indirect %s callsite, calling param %i, "
444 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
445 ii->member_ptr ? "member ptr" : "aggregate",
446 ii->param_index, ii->offset,
447 ii->by_ref ? "by reference" : "by_value");
448 else
449 fprintf (f, " indirect %s callsite, calling param %i, "
450 "offset " HOST_WIDE_INT_PRINT_DEC,
451 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
452 ii->offset);
454 if (cs->call_stmt)
456 fprintf (f, ", for stmt ");
457 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
459 else
460 fprintf (f, "\n");
461 if (ii->polymorphic)
462 ii->context.dump (f);
463 ipa_print_node_jump_functions_for_edge (f, cs);
467 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
469 void
470 ipa_print_all_jump_functions (FILE *f)
472 struct cgraph_node *node;
474 fprintf (f, "\nJump functions:\n");
475 FOR_EACH_FUNCTION (node)
477 ipa_print_node_jump_functions (f, node);
481 /* Set jfunc to be a know-really nothing jump function. */
483 static void
484 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
486 jfunc->type = IPA_JF_UNKNOWN;
487 jfunc->bits = NULL;
488 jfunc->m_vr = NULL;
491 /* Set JFUNC to be a copy of another jmp (to be used by jump function
492 combination code). The two functions will share their rdesc. */
494 static void
495 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
496 struct ipa_jump_func *src)
499 gcc_checking_assert (src->type == IPA_JF_CONST);
500 dst->type = IPA_JF_CONST;
501 dst->value.constant = src->value.constant;
504 /* Set JFUNC to be a constant jmp function. */
506 static void
507 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
508 struct cgraph_edge *cs)
510 jfunc->type = IPA_JF_CONST;
511 jfunc->value.constant.value = unshare_expr_without_location (constant);
513 if (TREE_CODE (constant) == ADDR_EXPR
514 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
516 struct ipa_cst_ref_desc *rdesc;
518 rdesc = ipa_refdesc_pool.allocate ();
519 rdesc->cs = cs;
520 rdesc->next_duplicate = NULL;
521 rdesc->refcount = 1;
522 jfunc->value.constant.rdesc = rdesc;
524 else
525 jfunc->value.constant.rdesc = NULL;
528 /* Set JFUNC to be a simple pass-through jump function. */
529 static void
530 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
531 bool agg_preserved)
533 jfunc->type = IPA_JF_PASS_THROUGH;
534 jfunc->value.pass_through.operand = NULL_TREE;
535 jfunc->value.pass_through.formal_id = formal_id;
536 jfunc->value.pass_through.operation = NOP_EXPR;
537 jfunc->value.pass_through.agg_preserved = agg_preserved;
540 /* Set JFUNC to be an unary pass through jump function. */
542 static void
543 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
544 enum tree_code operation)
546 jfunc->type = IPA_JF_PASS_THROUGH;
547 jfunc->value.pass_through.operand = NULL_TREE;
548 jfunc->value.pass_through.formal_id = formal_id;
549 jfunc->value.pass_through.operation = operation;
550 jfunc->value.pass_through.agg_preserved = false;
552 /* Set JFUNC to be an arithmetic pass through jump function. */
554 static void
555 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
556 tree operand, enum tree_code operation)
558 jfunc->type = IPA_JF_PASS_THROUGH;
559 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
560 jfunc->value.pass_through.formal_id = formal_id;
561 jfunc->value.pass_through.operation = operation;
562 jfunc->value.pass_through.agg_preserved = false;
565 /* Set JFUNC to be an ancestor jump function. */
567 static void
568 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
569 int formal_id, bool agg_preserved)
571 jfunc->type = IPA_JF_ANCESTOR;
572 jfunc->value.ancestor.formal_id = formal_id;
573 jfunc->value.ancestor.offset = offset;
574 jfunc->value.ancestor.agg_preserved = agg_preserved;
577 /* Get IPA BB information about the given BB. FBI is the context of analyzis
578 of this function body. */
580 static struct ipa_bb_info *
581 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
583 gcc_checking_assert (fbi);
584 return &fbi->bb_infos[bb->index];
587 /* Structure to be passed in between detect_type_change and
588 check_stmt_for_type_change. */
590 struct prop_type_change_info
592 /* Offset into the object where there is the virtual method pointer we are
593 looking for. */
594 HOST_WIDE_INT offset;
595 /* The declaration or SSA_NAME pointer of the base that we are checking for
596 type change. */
597 tree object;
598 /* Set to true if dynamic type change has been detected. */
599 bool type_maybe_changed;
602 /* Return true if STMT can modify a virtual method table pointer.
604 This function makes special assumptions about both constructors and
605 destructors which are all the functions that are allowed to alter the VMT
606 pointers. It assumes that destructors begin with assignment into all VMT
607 pointers and that constructors essentially look in the following way:
609 1) The very first thing they do is that they call constructors of ancestor
610 sub-objects that have them.
612 2) Then VMT pointers of this and all its ancestors is set to new values
613 corresponding to the type corresponding to the constructor.
615 3) Only afterwards, other stuff such as constructor of member sub-objects
616 and the code written by the user is run. Only this may include calling
617 virtual functions, directly or indirectly.
619 There is no way to call a constructor of an ancestor sub-object in any
620 other way.
622 This means that we do not have to care whether constructors get the correct
623 type information because they will always change it (in fact, if we define
624 the type to be given by the VMT pointer, it is undefined).
626 The most important fact to derive from the above is that if, for some
627 statement in the section 3, we try to detect whether the dynamic type has
628 changed, we can safely ignore all calls as we examine the function body
629 backwards until we reach statements in section 2 because these calls cannot
630 be ancestor constructors or destructors (if the input is not bogus) and so
631 do not change the dynamic type (this holds true only for automatically
632 allocated objects but at the moment we devirtualize only these). We then
633 must detect that statements in section 2 change the dynamic type and can try
634 to derive the new type. That is enough and we can stop, we will never see
635 the calls into constructors of sub-objects in this code. Therefore we can
636 safely ignore all call statements that we traverse.
639 static bool
640 stmt_may_be_vtbl_ptr_store (gimple *stmt)
642 if (is_gimple_call (stmt))
643 return false;
644 if (gimple_clobber_p (stmt))
645 return false;
646 else if (is_gimple_assign (stmt))
648 tree lhs = gimple_assign_lhs (stmt);
650 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
652 if (flag_strict_aliasing
653 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
654 return false;
656 if (TREE_CODE (lhs) == COMPONENT_REF
657 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
658 return false;
659 /* In the future we might want to use get_base_ref_and_offset to find
660 if there is a field corresponding to the offset and if so, proceed
661 almost like if it was a component ref. */
664 return true;
667 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
668 to check whether a particular statement may modify the virtual table
669 pointerIt stores its result into DATA, which points to a
670 prop_type_change_info structure. */
672 static bool
673 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
675 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
676 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
678 if (stmt_may_be_vtbl_ptr_store (stmt))
680 tci->type_maybe_changed = true;
681 return true;
683 else
684 return false;
687 /* See if ARG is PARAM_DECl describing instance passed by pointer
688 or reference in FUNCTION. Return false if the dynamic type may change
689 in between beggining of the function until CALL is invoked.
691 Generally functions are not allowed to change type of such instances,
692 but they call destructors. We assume that methods can not destroy the THIS
693 pointer. Also as a special cases, constructor and destructors may change
694 type of the THIS pointer. */
696 static bool
697 param_type_may_change_p (tree function, tree arg, gimple *call)
699 /* Pure functions can not do any changes on the dynamic type;
700 that require writting to memory. */
701 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
702 return false;
703 /* We need to check if we are within inlined consturctor
704 or destructor (ideally we would have way to check that the
705 inline cdtor is actually working on ARG, but we don't have
706 easy tie on this, so punt on all non-pure cdtors.
707 We may also record the types of cdtors and once we know type
708 of the instance match them.
710 Also code unification optimizations may merge calls from
711 different blocks making return values unreliable. So
712 do nothing during late optimization. */
713 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
714 return true;
715 if (TREE_CODE (arg) == SSA_NAME
716 && SSA_NAME_IS_DEFAULT_DEF (arg)
717 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
719 /* Normal (non-THIS) argument. */
720 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
721 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
722 /* THIS pointer of an method - here we want to watch constructors
723 and destructors as those definitely may change the dynamic
724 type. */
725 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
726 && !DECL_CXX_CONSTRUCTOR_P (function)
727 && !DECL_CXX_DESTRUCTOR_P (function)
728 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
730 /* Walk the inline stack and watch out for ctors/dtors. */
731 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
732 block = BLOCK_SUPERCONTEXT (block))
733 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
734 return true;
735 return false;
738 return true;
741 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
742 callsite CALL) by looking for assignments to its virtual table pointer. If
743 it is, return true and fill in the jump function JFUNC with relevant type
744 information or set it to unknown. ARG is the object itself (not a pointer
745 to it, unless dereferenced). BASE is the base of the memory access as
746 returned by get_ref_base_and_extent, as is the offset.
748 This is helper function for detect_type_change and detect_type_change_ssa
749 that does the heavy work which is usually unnecesary. */
751 static bool
752 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
753 gcall *call, struct ipa_jump_func *jfunc,
754 HOST_WIDE_INT offset)
756 struct prop_type_change_info tci;
757 ao_ref ao;
758 bool entry_reached = false;
760 gcc_checking_assert (DECL_P (arg)
761 || TREE_CODE (arg) == MEM_REF
762 || handled_component_p (arg));
764 comp_type = TYPE_MAIN_VARIANT (comp_type);
766 /* Const calls cannot call virtual methods through VMT and so type changes do
767 not matter. */
768 if (!flag_devirtualize || !gimple_vuse (call)
769 /* Be sure expected_type is polymorphic. */
770 || !comp_type
771 || TREE_CODE (comp_type) != RECORD_TYPE
772 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
773 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
774 return true;
776 ao_ref_init (&ao, arg);
777 ao.base = base;
778 ao.offset = offset;
779 ao.size = POINTER_SIZE;
780 ao.max_size = ao.size;
782 tci.offset = offset;
783 tci.object = get_base_address (arg);
784 tci.type_maybe_changed = false;
786 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
787 &tci, NULL, &entry_reached);
788 if (!tci.type_maybe_changed)
789 return false;
791 ipa_set_jf_unknown (jfunc);
792 return true;
795 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
796 If it is, return true and fill in the jump function JFUNC with relevant type
797 information or set it to unknown. ARG is the object itself (not a pointer
798 to it, unless dereferenced). BASE is the base of the memory access as
799 returned by get_ref_base_and_extent, as is the offset. */
801 static bool
802 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
803 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
805 if (!flag_devirtualize)
806 return false;
808 if (TREE_CODE (base) == MEM_REF
809 && !param_type_may_change_p (current_function_decl,
810 TREE_OPERAND (base, 0),
811 call))
812 return false;
813 return detect_type_change_from_memory_writes (arg, base, comp_type,
814 call, jfunc, offset);
817 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
818 SSA name (its dereference will become the base and the offset is assumed to
819 be zero). */
821 static bool
822 detect_type_change_ssa (tree arg, tree comp_type,
823 gcall *call, struct ipa_jump_func *jfunc)
825 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
826 if (!flag_devirtualize
827 || !POINTER_TYPE_P (TREE_TYPE (arg)))
828 return false;
830 if (!param_type_may_change_p (current_function_decl, arg, call))
831 return false;
833 arg = build2 (MEM_REF, ptr_type_node, arg,
834 build_int_cst (ptr_type_node, 0));
836 return detect_type_change_from_memory_writes (arg, arg, comp_type,
837 call, jfunc, 0);
840 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
841 boolean variable pointed to by DATA. */
843 static bool
844 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
845 void *data)
847 bool *b = (bool *) data;
848 *b = true;
849 return true;
852 /* Return true if we have already walked so many statements in AA that we
853 should really just start giving up. */
855 static bool
856 aa_overwalked (struct ipa_func_body_info *fbi)
858 gcc_checking_assert (fbi);
859 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
862 /* Find the nearest valid aa status for parameter specified by INDEX that
863 dominates BB. */
865 static struct ipa_param_aa_status *
866 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
867 int index)
869 while (true)
871 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
872 if (!bb)
873 return NULL;
874 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
875 if (!bi->param_aa_statuses.is_empty ()
876 && bi->param_aa_statuses[index].valid)
877 return &bi->param_aa_statuses[index];
881 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
882 structures and/or intialize the result with a dominating description as
883 necessary. */
885 static struct ipa_param_aa_status *
886 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
887 int index)
889 gcc_checking_assert (fbi);
890 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
891 if (bi->param_aa_statuses.is_empty ())
892 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
893 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
894 if (!paa->valid)
896 gcc_checking_assert (!paa->parm_modified
897 && !paa->ref_modified
898 && !paa->pt_modified);
899 struct ipa_param_aa_status *dom_paa;
900 dom_paa = find_dominating_aa_status (fbi, bb, index);
901 if (dom_paa)
902 *paa = *dom_paa;
903 else
904 paa->valid = true;
907 return paa;
910 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
911 a value known not to be modified in this function before reaching the
912 statement STMT. FBI holds information about the function we have so far
913 gathered but do not survive the summary building stage. */
915 static bool
916 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
917 gimple *stmt, tree parm_load)
919 struct ipa_param_aa_status *paa;
920 bool modified = false;
921 ao_ref refd;
923 tree base = get_base_address (parm_load);
924 gcc_assert (TREE_CODE (base) == PARM_DECL);
925 if (TREE_READONLY (base))
926 return true;
928 /* FIXME: FBI can be NULL if we are being called from outside
929 ipa_node_analysis or ipcp_transform_function, which currently happens
930 during inlining analysis. It would be great to extend fbi's lifetime and
931 always have it. Currently, we are just not afraid of too much walking in
932 that case. */
933 if (fbi)
935 if (aa_overwalked (fbi))
936 return false;
937 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
938 if (paa->parm_modified)
939 return false;
941 else
942 paa = NULL;
944 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
945 ao_ref_init (&refd, parm_load);
946 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
947 &modified, NULL);
948 if (fbi)
949 fbi->aa_walked += walked;
950 if (paa && modified)
951 paa->parm_modified = true;
952 return !modified;
955 /* If STMT is an assignment that loads a value from an parameter declaration,
956 return the index of the parameter in ipa_node_params which has not been
957 modified. Otherwise return -1. */
959 static int
960 load_from_unmodified_param (struct ipa_func_body_info *fbi,
961 vec<ipa_param_descriptor, va_gc> *descriptors,
962 gimple *stmt)
964 int index;
965 tree op1;
967 if (!gimple_assign_single_p (stmt))
968 return -1;
970 op1 = gimple_assign_rhs1 (stmt);
971 if (TREE_CODE (op1) != PARM_DECL)
972 return -1;
974 index = ipa_get_param_decl_index_1 (descriptors, op1);
975 if (index < 0
976 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
977 return -1;
979 return index;
982 /* Return true if memory reference REF (which must be a load through parameter
983 with INDEX) loads data that are known to be unmodified in this function
984 before reaching statement STMT. */
986 static bool
987 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
988 int index, gimple *stmt, tree ref)
990 struct ipa_param_aa_status *paa;
991 bool modified = false;
992 ao_ref refd;
994 /* FIXME: FBI can be NULL if we are being called from outside
995 ipa_node_analysis or ipcp_transform_function, which currently happens
996 during inlining analysis. It would be great to extend fbi's lifetime and
997 always have it. Currently, we are just not afraid of too much walking in
998 that case. */
999 if (fbi)
1001 if (aa_overwalked (fbi))
1002 return false;
1003 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1004 if (paa->ref_modified)
1005 return false;
1007 else
1008 paa = NULL;
1010 gcc_checking_assert (gimple_vuse (stmt));
1011 ao_ref_init (&refd, ref);
1012 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1013 &modified, NULL);
1014 if (fbi)
1015 fbi->aa_walked += walked;
1016 if (paa && modified)
1017 paa->ref_modified = true;
1018 return !modified;
1021 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1022 is known to be unmodified in this function before reaching call statement
1023 CALL into which it is passed. FBI describes the function body. */
1025 static bool
1026 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1027 gimple *call, tree parm)
1029 bool modified = false;
1030 ao_ref refd;
1032 /* It's unnecessary to calculate anything about memory contnets for a const
1033 function because it is not goin to use it. But do not cache the result
1034 either. Also, no such calculations for non-pointers. */
1035 if (!gimple_vuse (call)
1036 || !POINTER_TYPE_P (TREE_TYPE (parm))
1037 || aa_overwalked (fbi))
1038 return false;
1040 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1041 gimple_bb (call),
1042 index);
1043 if (paa->pt_modified)
1044 return false;
1046 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1047 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1048 &modified, NULL);
1049 fbi->aa_walked += walked;
1050 if (modified)
1051 paa->pt_modified = true;
1052 return !modified;
1055 /* Return true if we can prove that OP is a memory reference loading
1056 data from an aggregate passed as a parameter.
1058 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1059 false if it cannot prove that the value has not been modified before the
1060 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1061 if it cannot prove the value has not been modified, in that case it will
1062 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1064 INFO and PARMS_AINFO describe parameters of the current function (but the
1065 latter can be NULL), STMT is the load statement. If function returns true,
1066 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1067 within the aggregate and whether it is a load from a value passed by
1068 reference respectively. */
1070 bool
1071 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1072 vec<ipa_param_descriptor, va_gc> *descriptors,
1073 gimple *stmt, tree op, int *index_p,
1074 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1075 bool *by_ref_p, bool *guaranteed_unmodified)
1077 int index;
1078 HOST_WIDE_INT size, max_size;
1079 bool reverse;
1080 tree base
1081 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
1083 if (max_size == -1 || max_size != size || *offset_p < 0)
1084 return false;
1086 if (DECL_P (base))
1088 int index = ipa_get_param_decl_index_1 (descriptors, base);
1089 if (index >= 0
1090 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1092 *index_p = index;
1093 *by_ref_p = false;
1094 if (size_p)
1095 *size_p = size;
1096 if (guaranteed_unmodified)
1097 *guaranteed_unmodified = true;
1098 return true;
1100 return false;
1103 if (TREE_CODE (base) != MEM_REF
1104 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1105 || !integer_zerop (TREE_OPERAND (base, 1)))
1106 return false;
1108 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1110 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1111 index = ipa_get_param_decl_index_1 (descriptors, parm);
1113 else
1115 /* This branch catches situations where a pointer parameter is not a
1116 gimple register, for example:
1118 void hip7(S*) (struct S * p)
1120 void (*<T2e4>) (struct S *) D.1867;
1121 struct S * p.1;
1123 <bb 2>:
1124 p.1_1 = p;
1125 D.1867_2 = p.1_1->f;
1126 D.1867_2 ();
1127 gdp = &p;
1130 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1131 index = load_from_unmodified_param (fbi, descriptors, def);
1134 if (index >= 0)
1136 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1137 if (!data_preserved && !guaranteed_unmodified)
1138 return false;
1140 *index_p = index;
1141 *by_ref_p = true;
1142 if (size_p)
1143 *size_p = size;
1144 if (guaranteed_unmodified)
1145 *guaranteed_unmodified = data_preserved;
1146 return true;
1148 return false;
1151 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1152 of an assignment statement STMT, try to determine whether we are actually
1153 handling any of the following cases and construct an appropriate jump
1154 function into JFUNC if so:
1156 1) The passed value is loaded from a formal parameter which is not a gimple
1157 register (most probably because it is addressable, the value has to be
1158 scalar) and we can guarantee the value has not changed. This case can
1159 therefore be described by a simple pass-through jump function. For example:
1161 foo (int a)
1163 int a.0;
1165 a.0_2 = a;
1166 bar (a.0_2);
1168 2) The passed value can be described by a simple arithmetic pass-through
1169 jump function. E.g.
1171 foo (int a)
1173 int D.2064;
1175 D.2064_4 = a.1(D) + 4;
1176 bar (D.2064_4);
1178 This case can also occur in combination of the previous one, e.g.:
1180 foo (int a, int z)
1182 int a.0;
1183 int D.2064;
1185 a.0_3 = a;
1186 D.2064_4 = a.0_3 + 4;
1187 foo (D.2064_4);
1189 3) The passed value is an address of an object within another one (which
1190 also passed by reference). Such situations are described by an ancestor
1191 jump function and describe situations such as:
1193 B::foo() (struct B * const this)
1195 struct A * D.1845;
1197 D.1845_2 = &this_1(D)->D.1748;
1198 A::bar (D.1845_2);
1200 INFO is the structure describing individual parameters access different
1201 stages of IPA optimizations. PARMS_AINFO contains the information that is
1202 only needed for intraprocedural analysis. */
1204 static void
1205 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1206 struct ipa_node_params *info,
1207 struct ipa_jump_func *jfunc,
1208 gcall *call, gimple *stmt, tree name,
1209 tree param_type)
1211 HOST_WIDE_INT offset, size, max_size;
1212 tree op1, tc_ssa, base, ssa;
1213 bool reverse;
1214 int index;
1216 op1 = gimple_assign_rhs1 (stmt);
1218 if (TREE_CODE (op1) == SSA_NAME)
1220 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1221 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1222 else
1223 index = load_from_unmodified_param (fbi, info->descriptors,
1224 SSA_NAME_DEF_STMT (op1));
1225 tc_ssa = op1;
1227 else
1229 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1230 tc_ssa = gimple_assign_lhs (stmt);
1233 if (index >= 0)
1235 switch (gimple_assign_rhs_class (stmt))
1237 case GIMPLE_BINARY_RHS:
1239 tree op2 = gimple_assign_rhs2 (stmt);
1240 if (!is_gimple_ip_invariant (op2)
1241 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1242 != tcc_comparison)
1243 && !useless_type_conversion_p (TREE_TYPE (name),
1244 TREE_TYPE (op1))))
1245 return;
1247 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1248 gimple_assign_rhs_code (stmt));
1249 break;
1251 case GIMPLE_SINGLE_RHS:
1253 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1254 tc_ssa);
1255 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1256 break;
1258 case GIMPLE_UNARY_RHS:
1259 if (is_gimple_assign (stmt)
1260 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1261 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1262 ipa_set_jf_unary_pass_through (jfunc, index,
1263 gimple_assign_rhs_code (stmt));
1264 default:;
1266 return;
1269 if (TREE_CODE (op1) != ADDR_EXPR)
1270 return;
1271 op1 = TREE_OPERAND (op1, 0);
1272 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1273 return;
1274 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1275 if (TREE_CODE (base) != MEM_REF
1276 /* If this is a varying address, punt. */
1277 || max_size == -1
1278 || max_size != size)
1279 return;
1280 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1281 ssa = TREE_OPERAND (base, 0);
1282 if (TREE_CODE (ssa) != SSA_NAME
1283 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1284 || offset < 0)
1285 return;
1287 /* Dynamic types are changed in constructors and destructors. */
1288 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1289 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1290 ipa_set_ancestor_jf (jfunc, offset, index,
1291 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1294 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1295 it looks like:
1297 iftmp.1_3 = &obj_2(D)->D.1762;
1299 The base of the MEM_REF must be a default definition SSA NAME of a
1300 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1301 whole MEM_REF expression is returned and the offset calculated from any
1302 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1303 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1305 static tree
1306 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1308 HOST_WIDE_INT size, max_size;
1309 tree expr, parm, obj;
1310 bool reverse;
1312 if (!gimple_assign_single_p (assign))
1313 return NULL_TREE;
1314 expr = gimple_assign_rhs1 (assign);
1316 if (TREE_CODE (expr) != ADDR_EXPR)
1317 return NULL_TREE;
1318 expr = TREE_OPERAND (expr, 0);
1319 obj = expr;
1320 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1322 if (TREE_CODE (expr) != MEM_REF
1323 /* If this is a varying address, punt. */
1324 || max_size == -1
1325 || max_size != size
1326 || *offset < 0)
1327 return NULL_TREE;
1328 parm = TREE_OPERAND (expr, 0);
1329 if (TREE_CODE (parm) != SSA_NAME
1330 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1331 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1332 return NULL_TREE;
1334 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1335 *obj_p = obj;
1336 return expr;
1340 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1341 statement PHI, try to find out whether NAME is in fact a
1342 multiple-inheritance typecast from a descendant into an ancestor of a formal
1343 parameter and thus can be described by an ancestor jump function and if so,
1344 write the appropriate function into JFUNC.
1346 Essentially we want to match the following pattern:
1348 if (obj_2(D) != 0B)
1349 goto <bb 3>;
1350 else
1351 goto <bb 4>;
1353 <bb 3>:
1354 iftmp.1_3 = &obj_2(D)->D.1762;
1356 <bb 4>:
1357 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1358 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1359 return D.1879_6; */
1361 static void
1362 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1363 struct ipa_node_params *info,
1364 struct ipa_jump_func *jfunc,
1365 gcall *call, gphi *phi)
1367 HOST_WIDE_INT offset;
1368 gimple *assign, *cond;
1369 basic_block phi_bb, assign_bb, cond_bb;
1370 tree tmp, parm, expr, obj;
1371 int index, i;
1373 if (gimple_phi_num_args (phi) != 2)
1374 return;
1376 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1377 tmp = PHI_ARG_DEF (phi, 0);
1378 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1379 tmp = PHI_ARG_DEF (phi, 1);
1380 else
1381 return;
1382 if (TREE_CODE (tmp) != SSA_NAME
1383 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1384 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1385 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1386 return;
1388 assign = SSA_NAME_DEF_STMT (tmp);
1389 assign_bb = gimple_bb (assign);
1390 if (!single_pred_p (assign_bb))
1391 return;
1392 expr = get_ancestor_addr_info (assign, &obj, &offset);
1393 if (!expr)
1394 return;
1395 parm = TREE_OPERAND (expr, 0);
1396 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1397 if (index < 0)
1398 return;
1400 cond_bb = single_pred (assign_bb);
1401 cond = last_stmt (cond_bb);
1402 if (!cond
1403 || gimple_code (cond) != GIMPLE_COND
1404 || gimple_cond_code (cond) != NE_EXPR
1405 || gimple_cond_lhs (cond) != parm
1406 || !integer_zerop (gimple_cond_rhs (cond)))
1407 return;
1409 phi_bb = gimple_bb (phi);
1410 for (i = 0; i < 2; i++)
1412 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1413 if (pred != assign_bb && pred != cond_bb)
1414 return;
1417 ipa_set_ancestor_jf (jfunc, offset, index,
1418 parm_ref_data_pass_through_p (fbi, index, call, parm));
1421 /* Inspect the given TYPE and return true iff it has the same structure (the
1422 same number of fields of the same types) as a C++ member pointer. If
1423 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1424 corresponding fields there. */
1426 static bool
1427 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1429 tree fld;
1431 if (TREE_CODE (type) != RECORD_TYPE)
1432 return false;
1434 fld = TYPE_FIELDS (type);
1435 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1436 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1437 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1438 return false;
1440 if (method_ptr)
1441 *method_ptr = fld;
1443 fld = DECL_CHAIN (fld);
1444 if (!fld || INTEGRAL_TYPE_P (fld)
1445 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1446 return false;
1447 if (delta)
1448 *delta = fld;
1450 if (DECL_CHAIN (fld))
1451 return false;
1453 return true;
1456 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1457 return the rhs of its defining statement. Otherwise return RHS as it
1458 is. */
1460 static inline tree
1461 get_ssa_def_if_simple_copy (tree rhs)
1463 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1465 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1467 if (gimple_assign_single_p (def_stmt))
1468 rhs = gimple_assign_rhs1 (def_stmt);
1469 else
1470 break;
1472 return rhs;
1475 /* Simple linked list, describing known contents of an aggregate beforere
1476 call. */
1478 struct ipa_known_agg_contents_list
1480 /* Offset and size of the described part of the aggregate. */
1481 HOST_WIDE_INT offset, size;
1482 /* Known constant value or NULL if the contents is known to be unknown. */
1483 tree constant;
1484 /* Pointer to the next structure in the list. */
1485 struct ipa_known_agg_contents_list *next;
1488 /* Find the proper place in linked list of ipa_known_agg_contents_list
1489 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1490 unless there is a partial overlap, in which case return NULL, or such
1491 element is already there, in which case set *ALREADY_THERE to true. */
1493 static struct ipa_known_agg_contents_list **
1494 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1495 HOST_WIDE_INT lhs_offset,
1496 HOST_WIDE_INT lhs_size,
1497 bool *already_there)
1499 struct ipa_known_agg_contents_list **p = list;
1500 while (*p && (*p)->offset < lhs_offset)
1502 if ((*p)->offset + (*p)->size > lhs_offset)
1503 return NULL;
1504 p = &(*p)->next;
1507 if (*p && (*p)->offset < lhs_offset + lhs_size)
1509 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1510 /* We already know this value is subsequently overwritten with
1511 something else. */
1512 *already_there = true;
1513 else
1514 /* Otherwise this is a partial overlap which we cannot
1515 represent. */
1516 return NULL;
1518 return p;
1521 /* Build aggregate jump function from LIST, assuming there are exactly
1522 CONST_COUNT constant entries there and that th offset of the passed argument
1523 is ARG_OFFSET and store it into JFUNC. */
1525 static void
1526 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1527 int const_count, HOST_WIDE_INT arg_offset,
1528 struct ipa_jump_func *jfunc)
1530 vec_alloc (jfunc->agg.items, const_count);
1531 while (list)
1533 if (list->constant)
1535 struct ipa_agg_jf_item item;
1536 item.offset = list->offset - arg_offset;
1537 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1538 item.value = unshare_expr_without_location (list->constant);
1539 jfunc->agg.items->quick_push (item);
1541 list = list->next;
1545 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1546 in ARG is filled in with constant values. ARG can either be an aggregate
1547 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1548 aggregate. JFUNC is the jump function into which the constants are
1549 subsequently stored. */
1551 static void
1552 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1553 tree arg_type,
1554 struct ipa_jump_func *jfunc)
1556 struct ipa_known_agg_contents_list *list = NULL;
1557 int item_count = 0, const_count = 0;
1558 HOST_WIDE_INT arg_offset, arg_size;
1559 gimple_stmt_iterator gsi;
1560 tree arg_base;
1561 bool check_ref, by_ref;
1562 ao_ref r;
1564 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1565 return;
1567 /* The function operates in three stages. First, we prepare check_ref, r,
1568 arg_base and arg_offset based on what is actually passed as an actual
1569 argument. */
1571 if (POINTER_TYPE_P (arg_type))
1573 by_ref = true;
1574 if (TREE_CODE (arg) == SSA_NAME)
1576 tree type_size;
1577 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1578 return;
1579 check_ref = true;
1580 arg_base = arg;
1581 arg_offset = 0;
1582 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1583 arg_size = tree_to_uhwi (type_size);
1584 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1586 else if (TREE_CODE (arg) == ADDR_EXPR)
1588 HOST_WIDE_INT arg_max_size;
1589 bool reverse;
1591 arg = TREE_OPERAND (arg, 0);
1592 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1593 &arg_max_size, &reverse);
1594 if (arg_max_size == -1
1595 || arg_max_size != arg_size
1596 || arg_offset < 0)
1597 return;
1598 if (DECL_P (arg_base))
1600 check_ref = false;
1601 ao_ref_init (&r, arg_base);
1603 else
1604 return;
1606 else
1607 return;
1609 else
1611 HOST_WIDE_INT arg_max_size;
1612 bool reverse;
1614 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1616 by_ref = false;
1617 check_ref = false;
1618 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1619 &arg_max_size, &reverse);
1620 if (arg_max_size == -1
1621 || arg_max_size != arg_size
1622 || arg_offset < 0)
1623 return;
1625 ao_ref_init (&r, arg);
1628 /* Second stage walks back the BB, looks at individual statements and as long
1629 as it is confident of how the statements affect contents of the
1630 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1631 describing it. */
1632 gsi = gsi_for_stmt (call);
1633 gsi_prev (&gsi);
1634 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1636 struct ipa_known_agg_contents_list *n, **p;
1637 gimple *stmt = gsi_stmt (gsi);
1638 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1639 tree lhs, rhs, lhs_base;
1640 bool reverse;
1642 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1643 continue;
1644 if (!gimple_assign_single_p (stmt))
1645 break;
1647 lhs = gimple_assign_lhs (stmt);
1648 rhs = gimple_assign_rhs1 (stmt);
1649 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1650 || TREE_CODE (lhs) == BIT_FIELD_REF
1651 || contains_bitfld_component_ref_p (lhs))
1652 break;
1654 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1655 &lhs_max_size, &reverse);
1656 if (lhs_max_size == -1
1657 || lhs_max_size != lhs_size)
1658 break;
1660 if (check_ref)
1662 if (TREE_CODE (lhs_base) != MEM_REF
1663 || TREE_OPERAND (lhs_base, 0) != arg_base
1664 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1665 break;
1667 else if (lhs_base != arg_base)
1669 if (DECL_P (lhs_base))
1670 continue;
1671 else
1672 break;
1675 bool already_there = false;
1676 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1677 &already_there);
1678 if (!p)
1679 break;
1680 if (already_there)
1681 continue;
1683 rhs = get_ssa_def_if_simple_copy (rhs);
1684 n = XALLOCA (struct ipa_known_agg_contents_list);
1685 n->size = lhs_size;
1686 n->offset = lhs_offset;
1687 if (is_gimple_ip_invariant (rhs))
1689 n->constant = rhs;
1690 const_count++;
1692 else
1693 n->constant = NULL_TREE;
1694 n->next = *p;
1695 *p = n;
1697 item_count++;
1698 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1699 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1700 break;
1703 /* Third stage just goes over the list and creates an appropriate vector of
1704 ipa_agg_jf_item structures out of it, of sourse only if there are
1705 any known constants to begin with. */
1707 if (const_count)
1709 jfunc->agg.by_ref = by_ref;
1710 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1714 /* Return the Ith param type of callee associated with call graph
1715 edge E. */
1717 tree
1718 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1720 int n;
1721 tree type = (e->callee
1722 ? TREE_TYPE (e->callee->decl)
1723 : gimple_call_fntype (e->call_stmt));
1724 tree t = TYPE_ARG_TYPES (type);
1726 for (n = 0; n < i; n++)
1728 if (!t)
1729 break;
1730 t = TREE_CHAIN (t);
1732 if (t)
1733 return TREE_VALUE (t);
1734 if (!e->callee)
1735 return NULL;
1736 t = DECL_ARGUMENTS (e->callee->decl);
1737 for (n = 0; n < i; n++)
1739 if (!t)
1740 return NULL;
1741 t = TREE_CHAIN (t);
1743 if (t)
1744 return TREE_TYPE (t);
1745 return NULL;
1748 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1749 allocated structure or a previously existing one shared with other jump
1750 functions and/or transformation summaries. */
1752 ipa_bits *
1753 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1755 ipa_bits tmp;
1756 tmp.value = value;
1757 tmp.mask = mask;
1759 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1760 if (*slot)
1761 return *slot;
1763 ipa_bits *res = ggc_alloc<ipa_bits> ();
1764 res->value = value;
1765 res->mask = mask;
1766 *slot = res;
1768 return res;
1771 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1772 table in order to avoid creating multiple same ipa_bits structures. */
1774 static void
1775 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1776 const widest_int &mask)
1778 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1781 /* Return a pointer to a value_range just like *TMP, but either find it in
1782 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1784 static value_range *
1785 ipa_get_value_range (value_range *tmp)
1787 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1788 if (*slot)
1789 return *slot;
1791 value_range *vr = ggc_alloc<value_range> ();
1792 *vr = *tmp;
1793 *slot = vr;
1795 return vr;
1798 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1799 equiv set. Use hash table in order to avoid creating multiple same copies of
1800 value_ranges. */
1802 static value_range *
1803 ipa_get_value_range (enum value_range_type type, tree min, tree max)
1805 value_range tmp;
1806 tmp.type = type;
1807 tmp.min = min;
1808 tmp.max = max;
1809 tmp.equiv = NULL;
1810 return ipa_get_value_range (&tmp);
1813 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1814 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1815 same value_range structures. */
1817 static void
1818 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1819 tree min, tree max)
1821 jf->m_vr = ipa_get_value_range (type, min, max);
1824 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1825 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1827 static void
1828 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1830 jf->m_vr = ipa_get_value_range (tmp);
1833 /* Compute jump function for all arguments of callsite CS and insert the
1834 information in the jump_functions array in the ipa_edge_args corresponding
1835 to this callsite. */
1837 static void
1838 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1839 struct cgraph_edge *cs)
1841 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1842 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1843 gcall *call = cs->call_stmt;
1844 int n, arg_num = gimple_call_num_args (call);
1845 bool useful_context = false;
1847 if (arg_num == 0 || args->jump_functions)
1848 return;
1849 vec_safe_grow_cleared (args->jump_functions, arg_num);
1850 if (flag_devirtualize)
1851 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1853 if (gimple_call_internal_p (call))
1854 return;
1855 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1856 return;
1858 for (n = 0; n < arg_num; n++)
1860 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1861 tree arg = gimple_call_arg (call, n);
1862 tree param_type = ipa_get_callee_param_type (cs, n);
1863 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1865 tree instance;
1866 struct ipa_polymorphic_call_context context (cs->caller->decl,
1867 arg, cs->call_stmt,
1868 &instance);
1869 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1870 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1871 if (!context.useless_p ())
1872 useful_context = true;
1875 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1877 bool addr_nonzero = false;
1878 bool strict_overflow = false;
1880 if (TREE_CODE (arg) == SSA_NAME
1881 && param_type
1882 && get_ptr_nonnull (arg))
1883 addr_nonzero = true;
1884 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1885 addr_nonzero = true;
1887 if (addr_nonzero)
1889 tree z = build_int_cst (TREE_TYPE (arg), 0);
1890 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1892 else
1893 gcc_assert (!jfunc->m_vr);
1895 else
1897 wide_int min, max;
1898 value_range_type type;
1899 if (TREE_CODE (arg) == SSA_NAME
1900 && param_type
1901 && (type = get_range_info (arg, &min, &max))
1902 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1904 value_range tmpvr,resvr;
1906 tmpvr.type = type;
1907 tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1908 tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1909 tmpvr.equiv = NULL;
1910 memset (&resvr, 0, sizeof (resvr));
1911 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1912 &tmpvr, TREE_TYPE (arg));
1913 if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1914 ipa_set_jfunc_vr (jfunc, &resvr);
1915 else
1916 gcc_assert (!jfunc->m_vr);
1918 else
1919 gcc_assert (!jfunc->m_vr);
1922 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1923 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1925 if (TREE_CODE (arg) == SSA_NAME)
1926 ipa_set_jfunc_bits (jfunc, 0,
1927 widest_int::from (get_nonzero_bits (arg),
1928 TYPE_SIGN (TREE_TYPE (arg))));
1929 else
1930 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1932 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1934 unsigned HOST_WIDE_INT bitpos;
1935 unsigned align;
1937 get_pointer_alignment_1 (arg, &align, &bitpos);
1938 widest_int mask
1939 = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1940 .and_not (align / BITS_PER_UNIT - 1);
1941 widest_int value = bitpos / BITS_PER_UNIT;
1942 ipa_set_jfunc_bits (jfunc, value, mask);
1944 else
1945 gcc_assert (!jfunc->bits);
1947 if (is_gimple_ip_invariant (arg)
1948 || (VAR_P (arg)
1949 && is_global_var (arg)
1950 && TREE_READONLY (arg)))
1951 ipa_set_jf_constant (jfunc, arg, cs);
1952 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1953 && TREE_CODE (arg) == PARM_DECL)
1955 int index = ipa_get_param_decl_index (info, arg);
1957 gcc_assert (index >=0);
1958 /* Aggregate passed by value, check for pass-through, otherwise we
1959 will attempt to fill in aggregate contents later in this
1960 for cycle. */
1961 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1963 ipa_set_jf_simple_pass_through (jfunc, index, false);
1964 continue;
1967 else if (TREE_CODE (arg) == SSA_NAME)
1969 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1971 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1972 if (index >= 0)
1974 bool agg_p;
1975 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1976 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1979 else
1981 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1982 if (is_gimple_assign (stmt))
1983 compute_complex_assign_jump_func (fbi, info, jfunc,
1984 call, stmt, arg, param_type);
1985 else if (gimple_code (stmt) == GIMPLE_PHI)
1986 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1987 call,
1988 as_a <gphi *> (stmt));
1992 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1993 passed (because type conversions are ignored in gimple). Usually we can
1994 safely get type from function declaration, but in case of K&R prototypes or
1995 variadic functions we can try our luck with type of the pointer passed.
1996 TODO: Since we look for actual initialization of the memory object, we may better
1997 work out the type based on the memory stores we find. */
1998 if (!param_type)
1999 param_type = TREE_TYPE (arg);
2001 if ((jfunc->type != IPA_JF_PASS_THROUGH
2002 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2003 && (jfunc->type != IPA_JF_ANCESTOR
2004 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2005 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2006 || POINTER_TYPE_P (param_type)))
2007 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
2009 if (!useful_context)
2010 vec_free (args->polymorphic_call_contexts);
2013 /* Compute jump functions for all edges - both direct and indirect - outgoing
2014 from BB. */
2016 static void
2017 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2019 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2020 int i;
2021 struct cgraph_edge *cs;
2023 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2025 struct cgraph_node *callee = cs->callee;
2027 if (callee)
2029 callee->ultimate_alias_target ();
2030 /* We do not need to bother analyzing calls to unknown functions
2031 unless they may become known during lto/whopr. */
2032 if (!callee->definition && !flag_lto)
2033 continue;
2035 ipa_compute_jump_functions_for_edge (fbi, cs);
2039 /* If STMT looks like a statement loading a value from a member pointer formal
2040 parameter, return that parameter and store the offset of the field to
2041 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2042 might be clobbered). If USE_DELTA, then we look for a use of the delta
2043 field rather than the pfn. */
2045 static tree
2046 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2047 HOST_WIDE_INT *offset_p)
2049 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2051 if (!gimple_assign_single_p (stmt))
2052 return NULL_TREE;
2054 rhs = gimple_assign_rhs1 (stmt);
2055 if (TREE_CODE (rhs) == COMPONENT_REF)
2057 ref_field = TREE_OPERAND (rhs, 1);
2058 rhs = TREE_OPERAND (rhs, 0);
2060 else
2061 ref_field = NULL_TREE;
2062 if (TREE_CODE (rhs) != MEM_REF)
2063 return NULL_TREE;
2064 rec = TREE_OPERAND (rhs, 0);
2065 if (TREE_CODE (rec) != ADDR_EXPR)
2066 return NULL_TREE;
2067 rec = TREE_OPERAND (rec, 0);
2068 if (TREE_CODE (rec) != PARM_DECL
2069 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2070 return NULL_TREE;
2071 ref_offset = TREE_OPERAND (rhs, 1);
2073 if (use_delta)
2074 fld = delta_field;
2075 else
2076 fld = ptr_field;
2077 if (offset_p)
2078 *offset_p = int_bit_position (fld);
2080 if (ref_field)
2082 if (integer_nonzerop (ref_offset))
2083 return NULL_TREE;
2084 return ref_field == fld ? rec : NULL_TREE;
2086 else
2087 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2088 : NULL_TREE;
2091 /* Returns true iff T is an SSA_NAME defined by a statement. */
2093 static bool
2094 ipa_is_ssa_with_stmt_def (tree t)
2096 if (TREE_CODE (t) == SSA_NAME
2097 && !SSA_NAME_IS_DEFAULT_DEF (t))
2098 return true;
2099 else
2100 return false;
2103 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2104 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2105 indirect call graph edge. */
2107 static struct cgraph_edge *
2108 ipa_note_param_call (struct cgraph_node *node, int param_index,
2109 gcall *stmt)
2111 struct cgraph_edge *cs;
2113 cs = node->get_edge (stmt);
2114 cs->indirect_info->param_index = param_index;
2115 cs->indirect_info->agg_contents = 0;
2116 cs->indirect_info->member_ptr = 0;
2117 cs->indirect_info->guaranteed_unmodified = 0;
2118 return cs;
2121 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2122 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2123 intermediate information about each formal parameter. Currently it checks
2124 whether the call calls a pointer that is a formal parameter and if so, the
2125 parameter is marked with the called flag and an indirect call graph edge
2126 describing the call is created. This is very simple for ordinary pointers
2127 represented in SSA but not-so-nice when it comes to member pointers. The
2128 ugly part of this function does nothing more than trying to match the
2129 pattern of such a call. An example of such a pattern is the gimple dump
2130 below, the call is on the last line:
2132 <bb 2>:
2133 f$__delta_5 = f.__delta;
2134 f$__pfn_24 = f.__pfn;
2137 <bb 2>:
2138 f$__delta_5 = MEM[(struct *)&f];
2139 f$__pfn_24 = MEM[(struct *)&f + 4B];
2141 and a few lines below:
2143 <bb 5>
2144 D.2496_3 = (int) f$__pfn_24;
2145 D.2497_4 = D.2496_3 & 1;
2146 if (D.2497_4 != 0)
2147 goto <bb 3>;
2148 else
2149 goto <bb 4>;
2151 <bb 6>:
2152 D.2500_7 = (unsigned int) f$__delta_5;
2153 D.2501_8 = &S + D.2500_7;
2154 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2155 D.2503_10 = *D.2502_9;
2156 D.2504_12 = f$__pfn_24 + -1;
2157 D.2505_13 = (unsigned int) D.2504_12;
2158 D.2506_14 = D.2503_10 + D.2505_13;
2159 D.2507_15 = *D.2506_14;
2160 iftmp.11_16 = (String:: *) D.2507_15;
2162 <bb 7>:
2163 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2164 D.2500_19 = (unsigned int) f$__delta_5;
2165 D.2508_20 = &S + D.2500_19;
2166 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2168 Such patterns are results of simple calls to a member pointer:
2170 int doprinting (int (MyString::* f)(int) const)
2172 MyString S ("somestring");
2174 return (S.*f)(4);
2177 Moreover, the function also looks for called pointers loaded from aggregates
2178 passed by value or reference. */
2180 static void
2181 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2182 tree target)
2184 struct ipa_node_params *info = fbi->info;
2185 HOST_WIDE_INT offset;
2186 bool by_ref;
2188 if (SSA_NAME_IS_DEFAULT_DEF (target))
2190 tree var = SSA_NAME_VAR (target);
2191 int index = ipa_get_param_decl_index (info, var);
2192 if (index >= 0)
2193 ipa_note_param_call (fbi->node, index, call);
2194 return;
2197 int index;
2198 gimple *def = SSA_NAME_DEF_STMT (target);
2199 bool guaranteed_unmodified;
2200 if (gimple_assign_single_p (def)
2201 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2202 gimple_assign_rhs1 (def), &index, &offset,
2203 NULL, &by_ref, &guaranteed_unmodified))
2205 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2206 cs->indirect_info->offset = offset;
2207 cs->indirect_info->agg_contents = 1;
2208 cs->indirect_info->by_ref = by_ref;
2209 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2210 return;
2213 /* Now we need to try to match the complex pattern of calling a member
2214 pointer. */
2215 if (gimple_code (def) != GIMPLE_PHI
2216 || gimple_phi_num_args (def) != 2
2217 || !POINTER_TYPE_P (TREE_TYPE (target))
2218 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2219 return;
2221 /* First, we need to check whether one of these is a load from a member
2222 pointer that is a parameter to this function. */
2223 tree n1 = PHI_ARG_DEF (def, 0);
2224 tree n2 = PHI_ARG_DEF (def, 1);
2225 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2226 return;
2227 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2228 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2230 tree rec;
2231 basic_block bb, virt_bb;
2232 basic_block join = gimple_bb (def);
2233 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2235 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2236 return;
2238 bb = EDGE_PRED (join, 0)->src;
2239 virt_bb = gimple_bb (d2);
2241 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2243 bb = EDGE_PRED (join, 1)->src;
2244 virt_bb = gimple_bb (d1);
2246 else
2247 return;
2249 /* Second, we need to check that the basic blocks are laid out in the way
2250 corresponding to the pattern. */
2252 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2253 || single_pred (virt_bb) != bb
2254 || single_succ (virt_bb) != join)
2255 return;
2257 /* Third, let's see that the branching is done depending on the least
2258 significant bit of the pfn. */
2260 gimple *branch = last_stmt (bb);
2261 if (!branch || gimple_code (branch) != GIMPLE_COND)
2262 return;
2264 if ((gimple_cond_code (branch) != NE_EXPR
2265 && gimple_cond_code (branch) != EQ_EXPR)
2266 || !integer_zerop (gimple_cond_rhs (branch)))
2267 return;
2269 tree cond = gimple_cond_lhs (branch);
2270 if (!ipa_is_ssa_with_stmt_def (cond))
2271 return;
2273 def = SSA_NAME_DEF_STMT (cond);
2274 if (!is_gimple_assign (def)
2275 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2276 || !integer_onep (gimple_assign_rhs2 (def)))
2277 return;
2279 cond = gimple_assign_rhs1 (def);
2280 if (!ipa_is_ssa_with_stmt_def (cond))
2281 return;
2283 def = SSA_NAME_DEF_STMT (cond);
2285 if (is_gimple_assign (def)
2286 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2288 cond = gimple_assign_rhs1 (def);
2289 if (!ipa_is_ssa_with_stmt_def (cond))
2290 return;
2291 def = SSA_NAME_DEF_STMT (cond);
2294 tree rec2;
2295 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2296 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2297 == ptrmemfunc_vbit_in_delta),
2298 NULL);
2299 if (rec != rec2)
2300 return;
2302 index = ipa_get_param_decl_index (info, rec);
2303 if (index >= 0
2304 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2306 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2307 cs->indirect_info->offset = offset;
2308 cs->indirect_info->agg_contents = 1;
2309 cs->indirect_info->member_ptr = 1;
2310 cs->indirect_info->guaranteed_unmodified = 1;
2313 return;
2316 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2317 object referenced in the expression is a formal parameter of the caller
2318 FBI->node (described by FBI->info), create a call note for the
2319 statement. */
2321 static void
2322 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2323 gcall *call, tree target)
2325 tree obj = OBJ_TYPE_REF_OBJECT (target);
2326 int index;
2327 HOST_WIDE_INT anc_offset;
2329 if (!flag_devirtualize)
2330 return;
2332 if (TREE_CODE (obj) != SSA_NAME)
2333 return;
2335 struct ipa_node_params *info = fbi->info;
2336 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2338 struct ipa_jump_func jfunc;
2339 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2340 return;
2342 anc_offset = 0;
2343 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2344 gcc_assert (index >= 0);
2345 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2346 call, &jfunc))
2347 return;
2349 else
2351 struct ipa_jump_func jfunc;
2352 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2353 tree expr;
2355 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2356 if (!expr)
2357 return;
2358 index = ipa_get_param_decl_index (info,
2359 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2360 gcc_assert (index >= 0);
2361 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2362 call, &jfunc, anc_offset))
2363 return;
2366 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2367 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2368 ii->offset = anc_offset;
2369 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2370 ii->otr_type = obj_type_ref_class (target);
2371 ii->polymorphic = 1;
2374 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2375 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2376 containing intermediate information about each formal parameter. */
2378 static void
2379 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2381 tree target = gimple_call_fn (call);
2383 if (!target
2384 || (TREE_CODE (target) != SSA_NAME
2385 && !virtual_method_call_p (target)))
2386 return;
2388 struct cgraph_edge *cs = fbi->node->get_edge (call);
2389 /* If we previously turned the call into a direct call, there is
2390 no need to analyze. */
2391 if (cs && !cs->indirect_unknown_callee)
2392 return;
2394 if (cs->indirect_info->polymorphic && flag_devirtualize)
2396 tree instance;
2397 tree target = gimple_call_fn (call);
2398 ipa_polymorphic_call_context context (current_function_decl,
2399 target, call, &instance);
2401 gcc_checking_assert (cs->indirect_info->otr_type
2402 == obj_type_ref_class (target));
2403 gcc_checking_assert (cs->indirect_info->otr_token
2404 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2406 cs->indirect_info->vptr_changed
2407 = !context.get_dynamic_type (instance,
2408 OBJ_TYPE_REF_OBJECT (target),
2409 obj_type_ref_class (target), call);
2410 cs->indirect_info->context = context;
2413 if (TREE_CODE (target) == SSA_NAME)
2414 ipa_analyze_indirect_call_uses (fbi, call, target);
2415 else if (virtual_method_call_p (target))
2416 ipa_analyze_virtual_call_uses (fbi, call, target);
2420 /* Analyze the call statement STMT with respect to formal parameters (described
2421 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2422 formal parameters are called. */
2424 static void
2425 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2427 if (is_gimple_call (stmt))
2428 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2431 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2432 If OP is a parameter declaration, mark it as used in the info structure
2433 passed in DATA. */
2435 static bool
2436 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2438 struct ipa_node_params *info = (struct ipa_node_params *) data;
2440 op = get_base_address (op);
2441 if (op
2442 && TREE_CODE (op) == PARM_DECL)
2444 int index = ipa_get_param_decl_index (info, op);
2445 gcc_assert (index >= 0);
2446 ipa_set_param_used (info, index, true);
2449 return false;
2452 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2453 the findings in various structures of the associated ipa_node_params
2454 structure, such as parameter flags, notes etc. FBI holds various data about
2455 the function being analyzed. */
2457 static void
2458 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2460 gimple_stmt_iterator gsi;
2461 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2463 gimple *stmt = gsi_stmt (gsi);
2465 if (is_gimple_debug (stmt))
2466 continue;
2468 ipa_analyze_stmt_uses (fbi, stmt);
2469 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2470 visit_ref_for_mod_analysis,
2471 visit_ref_for_mod_analysis,
2472 visit_ref_for_mod_analysis);
2474 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2475 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2476 visit_ref_for_mod_analysis,
2477 visit_ref_for_mod_analysis,
2478 visit_ref_for_mod_analysis);
2481 /* Calculate controlled uses of parameters of NODE. */
2483 static void
2484 ipa_analyze_controlled_uses (struct cgraph_node *node)
2486 struct ipa_node_params *info = IPA_NODE_REF (node);
2488 for (int i = 0; i < ipa_get_param_count (info); i++)
2490 tree parm = ipa_get_param (info, i);
2491 int controlled_uses = 0;
2493 /* For SSA regs see if parameter is used. For non-SSA we compute
2494 the flag during modification analysis. */
2495 if (is_gimple_reg (parm))
2497 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2498 parm);
2499 if (ddef && !has_zero_uses (ddef))
2501 imm_use_iterator imm_iter;
2502 use_operand_p use_p;
2504 ipa_set_param_used (info, i, true);
2505 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2506 if (!is_gimple_call (USE_STMT (use_p)))
2508 if (!is_gimple_debug (USE_STMT (use_p)))
2510 controlled_uses = IPA_UNDESCRIBED_USE;
2511 break;
2514 else
2515 controlled_uses++;
2517 else
2518 controlled_uses = 0;
2520 else
2521 controlled_uses = IPA_UNDESCRIBED_USE;
2522 ipa_set_controlled_uses (info, i, controlled_uses);
2526 /* Free stuff in BI. */
2528 static void
2529 free_ipa_bb_info (struct ipa_bb_info *bi)
2531 bi->cg_edges.release ();
2532 bi->param_aa_statuses.release ();
2535 /* Dominator walker driving the analysis. */
2537 class analysis_dom_walker : public dom_walker
2539 public:
2540 analysis_dom_walker (struct ipa_func_body_info *fbi)
2541 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2543 virtual edge before_dom_children (basic_block);
2545 private:
2546 struct ipa_func_body_info *m_fbi;
2549 edge
2550 analysis_dom_walker::before_dom_children (basic_block bb)
2552 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2553 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2554 return NULL;
2557 /* Release body info FBI. */
2559 void
2560 ipa_release_body_info (struct ipa_func_body_info *fbi)
2562 int i;
2563 struct ipa_bb_info *bi;
2565 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2566 free_ipa_bb_info (bi);
2567 fbi->bb_infos.release ();
2570 /* Initialize the array describing properties of formal parameters
2571 of NODE, analyze their uses and compute jump functions associated
2572 with actual arguments of calls from within NODE. */
2574 void
2575 ipa_analyze_node (struct cgraph_node *node)
2577 struct ipa_func_body_info fbi;
2578 struct ipa_node_params *info;
2580 ipa_check_create_node_params ();
2581 ipa_check_create_edge_args ();
2582 info = IPA_NODE_REF (node);
2584 if (info->analysis_done)
2585 return;
2586 info->analysis_done = 1;
2588 if (ipa_func_spec_opts_forbid_analysis_p (node))
2590 for (int i = 0; i < ipa_get_param_count (info); i++)
2592 ipa_set_param_used (info, i, true);
2593 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2595 return;
2598 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2599 push_cfun (func);
2600 calculate_dominance_info (CDI_DOMINATORS);
2601 ipa_initialize_node_params (node);
2602 ipa_analyze_controlled_uses (node);
2604 fbi.node = node;
2605 fbi.info = IPA_NODE_REF (node);
2606 fbi.bb_infos = vNULL;
2607 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2608 fbi.param_count = ipa_get_param_count (info);
2609 fbi.aa_walked = 0;
2611 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2613 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2614 bi->cg_edges.safe_push (cs);
2617 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2619 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2620 bi->cg_edges.safe_push (cs);
2623 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2625 ipa_release_body_info (&fbi);
2626 free_dominance_info (CDI_DOMINATORS);
2627 pop_cfun ();
2630 /* Update the jump functions associated with call graph edge E when the call
2631 graph edge CS is being inlined, assuming that E->caller is already (possibly
2632 indirectly) inlined into CS->callee and that E has not been inlined. */
2634 static void
2635 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2636 struct cgraph_edge *e)
2638 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2639 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2640 int count = ipa_get_cs_argument_count (args);
2641 int i;
2643 for (i = 0; i < count; i++)
2645 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2646 struct ipa_polymorphic_call_context *dst_ctx
2647 = ipa_get_ith_polymorhic_call_context (args, i);
2649 if (dst->type == IPA_JF_ANCESTOR)
2651 struct ipa_jump_func *src;
2652 int dst_fid = dst->value.ancestor.formal_id;
2653 struct ipa_polymorphic_call_context *src_ctx
2654 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2656 /* Variable number of arguments can cause havoc if we try to access
2657 one that does not exist in the inlined edge. So make sure we
2658 don't. */
2659 if (dst_fid >= ipa_get_cs_argument_count (top))
2661 ipa_set_jf_unknown (dst);
2662 continue;
2665 src = ipa_get_ith_jump_func (top, dst_fid);
2667 if (src_ctx && !src_ctx->useless_p ())
2669 struct ipa_polymorphic_call_context ctx = *src_ctx;
2671 /* TODO: Make type preserved safe WRT contexts. */
2672 if (!ipa_get_jf_ancestor_type_preserved (dst))
2673 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2674 ctx.offset_by (dst->value.ancestor.offset);
2675 if (!ctx.useless_p ())
2677 if (!dst_ctx)
2679 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2680 count);
2681 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2684 dst_ctx->combine_with (ctx);
2688 if (src->agg.items
2689 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2691 struct ipa_agg_jf_item *item;
2692 int j;
2694 /* Currently we do not produce clobber aggregate jump functions,
2695 replace with merging when we do. */
2696 gcc_assert (!dst->agg.items);
2698 dst->agg.items = vec_safe_copy (src->agg.items);
2699 dst->agg.by_ref = src->agg.by_ref;
2700 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2701 item->offset -= dst->value.ancestor.offset;
2704 if (src->type == IPA_JF_PASS_THROUGH
2705 && src->value.pass_through.operation == NOP_EXPR)
2707 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2708 dst->value.ancestor.agg_preserved &=
2709 src->value.pass_through.agg_preserved;
2711 else if (src->type == IPA_JF_PASS_THROUGH
2712 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2714 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2715 dst->value.ancestor.agg_preserved = false;
2717 else if (src->type == IPA_JF_ANCESTOR)
2719 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2720 dst->value.ancestor.offset += src->value.ancestor.offset;
2721 dst->value.ancestor.agg_preserved &=
2722 src->value.ancestor.agg_preserved;
2724 else
2725 ipa_set_jf_unknown (dst);
2727 else if (dst->type == IPA_JF_PASS_THROUGH)
2729 struct ipa_jump_func *src;
2730 /* We must check range due to calls with variable number of arguments
2731 and we cannot combine jump functions with operations. */
2732 if (dst->value.pass_through.operation == NOP_EXPR
2733 && (dst->value.pass_through.formal_id
2734 < ipa_get_cs_argument_count (top)))
2736 int dst_fid = dst->value.pass_through.formal_id;
2737 src = ipa_get_ith_jump_func (top, dst_fid);
2738 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2739 struct ipa_polymorphic_call_context *src_ctx
2740 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2742 if (src_ctx && !src_ctx->useless_p ())
2744 struct ipa_polymorphic_call_context ctx = *src_ctx;
2746 /* TODO: Make type preserved safe WRT contexts. */
2747 if (!ipa_get_jf_pass_through_type_preserved (dst))
2748 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2749 if (!ctx.useless_p ())
2751 if (!dst_ctx)
2753 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2754 count);
2755 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2757 dst_ctx->combine_with (ctx);
2760 switch (src->type)
2762 case IPA_JF_UNKNOWN:
2763 ipa_set_jf_unknown (dst);
2764 break;
2765 case IPA_JF_CONST:
2766 ipa_set_jf_cst_copy (dst, src);
2767 break;
2769 case IPA_JF_PASS_THROUGH:
2771 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2772 enum tree_code operation;
2773 operation = ipa_get_jf_pass_through_operation (src);
2775 if (operation == NOP_EXPR)
2777 bool agg_p;
2778 agg_p = dst_agg_p
2779 && ipa_get_jf_pass_through_agg_preserved (src);
2780 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2782 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2783 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2784 else
2786 tree operand = ipa_get_jf_pass_through_operand (src);
2787 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2788 operation);
2790 break;
2792 case IPA_JF_ANCESTOR:
2794 bool agg_p;
2795 agg_p = dst_agg_p
2796 && ipa_get_jf_ancestor_agg_preserved (src);
2797 ipa_set_ancestor_jf (dst,
2798 ipa_get_jf_ancestor_offset (src),
2799 ipa_get_jf_ancestor_formal_id (src),
2800 agg_p);
2801 break;
2803 default:
2804 gcc_unreachable ();
2807 if (src->agg.items
2808 && (dst_agg_p || !src->agg.by_ref))
2810 /* Currently we do not produce clobber aggregate jump
2811 functions, replace with merging when we do. */
2812 gcc_assert (!dst->agg.items);
2814 dst->agg.by_ref = src->agg.by_ref;
2815 dst->agg.items = vec_safe_copy (src->agg.items);
2818 else
2819 ipa_set_jf_unknown (dst);
2824 /* If TARGET is an addr_expr of a function declaration, make it the
2825 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2826 Otherwise, return NULL. */
2828 struct cgraph_edge *
2829 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2830 bool speculative)
2832 struct cgraph_node *callee;
2833 struct inline_edge_summary *es = inline_edge_summary (ie);
2834 bool unreachable = false;
2836 if (TREE_CODE (target) == ADDR_EXPR)
2837 target = TREE_OPERAND (target, 0);
2838 if (TREE_CODE (target) != FUNCTION_DECL)
2840 target = canonicalize_constructor_val (target, NULL);
2841 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2843 /* Member pointer call that goes through a VMT lookup. */
2844 if (ie->indirect_info->member_ptr
2845 /* Or if target is not an invariant expression and we do not
2846 know if it will evaulate to function at runtime.
2847 This can happen when folding through &VAR, where &VAR
2848 is IP invariant, but VAR itself is not.
2850 TODO: Revisit this when GCC 5 is branched. It seems that
2851 member_ptr check is not needed and that we may try to fold
2852 the expression and see if VAR is readonly. */
2853 || !is_gimple_ip_invariant (target))
2855 if (dump_enabled_p ())
2857 location_t loc = gimple_location_safe (ie->call_stmt);
2858 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2859 "discovered direct call non-invariant "
2860 "%s/%i\n",
2861 ie->caller->name (), ie->caller->order);
2863 return NULL;
2867 if (dump_enabled_p ())
2869 location_t loc = gimple_location_safe (ie->call_stmt);
2870 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2871 "discovered direct call to non-function in %s/%i, "
2872 "making it __builtin_unreachable\n",
2873 ie->caller->name (), ie->caller->order);
2876 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2877 callee = cgraph_node::get_create (target);
2878 unreachable = true;
2880 else
2881 callee = cgraph_node::get (target);
2883 else
2884 callee = cgraph_node::get (target);
2886 /* Because may-edges are not explicitely represented and vtable may be external,
2887 we may create the first reference to the object in the unit. */
2888 if (!callee || callee->global.inlined_to)
2891 /* We are better to ensure we can refer to it.
2892 In the case of static functions we are out of luck, since we already
2893 removed its body. In the case of public functions we may or may
2894 not introduce the reference. */
2895 if (!canonicalize_constructor_val (target, NULL)
2896 || !TREE_PUBLIC (target))
2898 if (dump_file)
2899 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2900 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2901 xstrdup_for_dump (ie->caller->name ()),
2902 ie->caller->order,
2903 xstrdup_for_dump (ie->callee->name ()),
2904 ie->callee->order);
2905 return NULL;
2907 callee = cgraph_node::get_create (target);
2910 /* If the edge is already speculated. */
2911 if (speculative && ie->speculative)
2913 struct cgraph_edge *e2;
2914 struct ipa_ref *ref;
2915 ie->speculative_call_info (e2, ie, ref);
2916 if (e2->callee->ultimate_alias_target ()
2917 != callee->ultimate_alias_target ())
2919 if (dump_file)
2920 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2921 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2922 xstrdup_for_dump (ie->caller->name ()),
2923 ie->caller->order,
2924 xstrdup_for_dump (callee->name ()),
2925 callee->order,
2926 xstrdup_for_dump (e2->callee->name ()),
2927 e2->callee->order);
2929 else
2931 if (dump_file)
2932 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2933 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2934 xstrdup_for_dump (ie->caller->name ()),
2935 ie->caller->order,
2936 xstrdup_for_dump (callee->name ()),
2937 callee->order);
2939 return NULL;
2942 if (!dbg_cnt (devirt))
2943 return NULL;
2945 ipa_check_create_node_params ();
2947 /* We can not make edges to inline clones. It is bug that someone removed
2948 the cgraph node too early. */
2949 gcc_assert (!callee->global.inlined_to);
2951 if (dump_file && !unreachable)
2953 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2954 "(%s/%i -> %s/%i), for stmt ",
2955 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2956 speculative ? "speculative" : "known",
2957 xstrdup_for_dump (ie->caller->name ()),
2958 ie->caller->order,
2959 xstrdup_for_dump (callee->name ()),
2960 callee->order);
2961 if (ie->call_stmt)
2962 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2963 else
2964 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2966 if (dump_enabled_p ())
2968 location_t loc = gimple_location_safe (ie->call_stmt);
2970 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2971 "converting indirect call in %s to direct call to %s\n",
2972 ie->caller->name (), callee->name ());
2974 if (!speculative)
2976 struct cgraph_edge *orig = ie;
2977 ie = ie->make_direct (callee);
2978 /* If we resolved speculative edge the cost is already up to date
2979 for direct call (adjusted by inline_edge_duplication_hook). */
2980 if (ie == orig)
2982 es = inline_edge_summary (ie);
2983 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2984 - eni_size_weights.call_cost);
2985 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2986 - eni_time_weights.call_cost);
2989 else
2991 if (!callee->can_be_discarded_p ())
2993 cgraph_node *alias;
2994 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2995 if (alias)
2996 callee = alias;
2998 /* make_speculative will update ie's cost to direct call cost. */
2999 ie = ie->make_speculative
3000 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
3003 return ie;
3006 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3007 CONSTRUCTOR and return it. Return NULL if the search fails for some
3008 reason. */
3010 static tree
3011 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3013 tree type = TREE_TYPE (constructor);
3014 if (TREE_CODE (type) != ARRAY_TYPE
3015 && TREE_CODE (type) != RECORD_TYPE)
3016 return NULL;
3018 unsigned ix;
3019 tree index, val;
3020 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3022 HOST_WIDE_INT elt_offset;
3023 if (TREE_CODE (type) == ARRAY_TYPE)
3025 offset_int off;
3026 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3027 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3029 if (index)
3031 off = wi::to_offset (index);
3032 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3034 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3035 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3036 off = wi::sext (off - wi::to_offset (low_bound),
3037 TYPE_PRECISION (TREE_TYPE (index)));
3039 off *= wi::to_offset (unit_size);
3041 else
3042 off = wi::to_offset (unit_size) * ix;
3044 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3045 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3046 continue;
3047 elt_offset = off.to_shwi ();
3049 else if (TREE_CODE (type) == RECORD_TYPE)
3051 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3052 if (DECL_BIT_FIELD (index))
3053 continue;
3054 elt_offset = int_bit_position (index);
3056 else
3057 gcc_unreachable ();
3059 if (elt_offset > req_offset)
3060 return NULL;
3062 if (TREE_CODE (val) == CONSTRUCTOR)
3063 return find_constructor_constant_at_offset (val,
3064 req_offset - elt_offset);
3066 if (elt_offset == req_offset
3067 && is_gimple_reg_type (TREE_TYPE (val))
3068 && is_gimple_ip_invariant (val))
3069 return val;
3071 return NULL;
3074 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3075 invariant from a static constructor and if so, return it. Otherwise return
3076 NULL. */
3078 static tree
3079 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3081 if (by_ref)
3083 if (TREE_CODE (scalar) != ADDR_EXPR)
3084 return NULL;
3085 scalar = TREE_OPERAND (scalar, 0);
3088 if (!VAR_P (scalar)
3089 || !is_global_var (scalar)
3090 || !TREE_READONLY (scalar)
3091 || !DECL_INITIAL (scalar)
3092 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3093 return NULL;
3095 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3098 /* Retrieve value from aggregate jump function AGG or static initializer of
3099 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3100 none. BY_REF specifies whether the value has to be passed by reference or
3101 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3102 to is set to true if the value comes from an initializer of a constant. */
3104 tree
3105 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3106 HOST_WIDE_INT offset, bool by_ref,
3107 bool *from_global_constant)
3109 struct ipa_agg_jf_item *item;
3110 int i;
3112 if (scalar)
3114 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3115 if (res)
3117 if (from_global_constant)
3118 *from_global_constant = true;
3119 return res;
3123 if (!agg
3124 || by_ref != agg->by_ref)
3125 return NULL;
3127 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3128 if (item->offset == offset)
3130 /* Currently we do not have clobber values, return NULL for them once
3131 we do. */
3132 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3133 if (from_global_constant)
3134 *from_global_constant = false;
3135 return item->value;
3137 return NULL;
3140 /* Remove a reference to SYMBOL from the list of references of a node given by
3141 reference description RDESC. Return true if the reference has been
3142 successfully found and removed. */
3144 static bool
3145 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3147 struct ipa_ref *to_del;
3148 struct cgraph_edge *origin;
3150 origin = rdesc->cs;
3151 if (!origin)
3152 return false;
3153 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3154 origin->lto_stmt_uid);
3155 if (!to_del)
3156 return false;
3158 to_del->remove_reference ();
3159 if (dump_file)
3160 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3161 xstrdup_for_dump (origin->caller->name ()),
3162 origin->caller->order, xstrdup_for_dump (symbol->name ()));
3163 return true;
3166 /* If JFUNC has a reference description with refcount different from
3167 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3168 NULL. JFUNC must be a constant jump function. */
3170 static struct ipa_cst_ref_desc *
3171 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3173 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3174 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3175 return rdesc;
3176 else
3177 return NULL;
3180 /* If the value of constant jump function JFUNC is an address of a function
3181 declaration, return the associated call graph node. Otherwise return
3182 NULL. */
3184 static cgraph_node *
3185 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3187 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3188 tree cst = ipa_get_jf_constant (jfunc);
3189 if (TREE_CODE (cst) != ADDR_EXPR
3190 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3191 return NULL;
3193 return cgraph_node::get (TREE_OPERAND (cst, 0));
3197 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3198 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3199 the edge specified in the rdesc. Return false if either the symbol or the
3200 reference could not be found, otherwise return true. */
3202 static bool
3203 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3205 struct ipa_cst_ref_desc *rdesc;
3206 if (jfunc->type == IPA_JF_CONST
3207 && (rdesc = jfunc_rdesc_usable (jfunc))
3208 && --rdesc->refcount == 0)
3210 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3211 if (!symbol)
3212 return false;
3214 return remove_described_reference (symbol, rdesc);
3216 return true;
3219 /* Try to find a destination for indirect edge IE that corresponds to a simple
3220 call or a call of a member function pointer and where the destination is a
3221 pointer formal parameter described by jump function JFUNC. If it can be
3222 determined, return the newly direct edge, otherwise return NULL.
3223 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3225 static struct cgraph_edge *
3226 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3227 struct ipa_jump_func *jfunc,
3228 struct ipa_node_params *new_root_info)
3230 struct cgraph_edge *cs;
3231 tree target;
3232 bool agg_contents = ie->indirect_info->agg_contents;
3233 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3234 if (agg_contents)
3236 bool from_global_constant;
3237 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3238 ie->indirect_info->offset,
3239 ie->indirect_info->by_ref,
3240 &from_global_constant);
3241 if (target
3242 && !from_global_constant
3243 && !ie->indirect_info->guaranteed_unmodified)
3244 return NULL;
3246 else
3247 target = scalar;
3248 if (!target)
3249 return NULL;
3250 cs = ipa_make_edge_direct_to_target (ie, target);
3252 if (cs && !agg_contents)
3254 bool ok;
3255 gcc_checking_assert (cs->callee
3256 && (cs != ie
3257 || jfunc->type != IPA_JF_CONST
3258 || !cgraph_node_for_jfunc (jfunc)
3259 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3260 ok = try_decrement_rdesc_refcount (jfunc);
3261 gcc_checking_assert (ok);
3264 return cs;
3267 /* Return the target to be used in cases of impossible devirtualization. IE
3268 and target (the latter can be NULL) are dumped when dumping is enabled. */
3270 tree
3271 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3273 if (dump_file)
3275 if (target)
3276 fprintf (dump_file,
3277 "Type inconsistent devirtualization: %s/%i->%s\n",
3278 ie->caller->name (), ie->caller->order,
3279 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3280 else
3281 fprintf (dump_file,
3282 "No devirtualization target in %s/%i\n",
3283 ie->caller->name (), ie->caller->order);
3285 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3286 cgraph_node::get_create (new_target);
3287 return new_target;
3290 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3291 call based on a formal parameter which is described by jump function JFUNC
3292 and if it can be determined, make it direct and return the direct edge.
3293 Otherwise, return NULL. CTX describes the polymorphic context that the
3294 parameter the call is based on brings along with it. */
3296 static struct cgraph_edge *
3297 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3298 struct ipa_jump_func *jfunc,
3299 struct ipa_polymorphic_call_context ctx)
3301 tree target = NULL;
3302 bool speculative = false;
3304 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3305 return NULL;
3307 gcc_assert (!ie->indirect_info->by_ref);
3309 /* Try to do lookup via known virtual table pointer value. */
3310 if (!ie->indirect_info->vptr_changed
3311 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3313 tree vtable;
3314 unsigned HOST_WIDE_INT offset;
3315 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3316 : NULL;
3317 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3318 ie->indirect_info->offset,
3319 true);
3320 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3322 bool can_refer;
3323 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3324 vtable, offset, &can_refer);
3325 if (can_refer)
3327 if (!t
3328 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3329 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3330 || !possible_polymorphic_call_target_p
3331 (ie, cgraph_node::get (t)))
3333 /* Do not speculate builtin_unreachable, it is stupid! */
3334 if (!ie->indirect_info->vptr_changed)
3335 target = ipa_impossible_devirt_target (ie, target);
3336 else
3337 target = NULL;
3339 else
3341 target = t;
3342 speculative = ie->indirect_info->vptr_changed;
3348 ipa_polymorphic_call_context ie_context (ie);
3349 vec <cgraph_node *>targets;
3350 bool final;
3352 ctx.offset_by (ie->indirect_info->offset);
3353 if (ie->indirect_info->vptr_changed)
3354 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3355 ie->indirect_info->otr_type);
3356 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3357 targets = possible_polymorphic_call_targets
3358 (ie->indirect_info->otr_type,
3359 ie->indirect_info->otr_token,
3360 ctx, &final);
3361 if (final && targets.length () <= 1)
3363 speculative = false;
3364 if (targets.length () == 1)
3365 target = targets[0]->decl;
3366 else
3367 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3369 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3370 && !ie->speculative && ie->maybe_hot_p ())
3372 cgraph_node *n;
3373 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3374 ie->indirect_info->otr_token,
3375 ie->indirect_info->context);
3376 if (n)
3378 target = n->decl;
3379 speculative = true;
3383 if (target)
3385 if (!possible_polymorphic_call_target_p
3386 (ie, cgraph_node::get_create (target)))
3388 if (speculative)
3389 return NULL;
3390 target = ipa_impossible_devirt_target (ie, target);
3392 return ipa_make_edge_direct_to_target (ie, target, speculative);
3394 else
3395 return NULL;
3398 /* Update the param called notes associated with NODE when CS is being inlined,
3399 assuming NODE is (potentially indirectly) inlined into CS->callee.
3400 Moreover, if the callee is discovered to be constant, create a new cgraph
3401 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3402 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3404 static bool
3405 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3406 struct cgraph_node *node,
3407 vec<cgraph_edge *> *new_edges)
3409 struct ipa_edge_args *top;
3410 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3411 struct ipa_node_params *new_root_info;
3412 bool res = false;
3414 ipa_check_create_edge_args ();
3415 top = IPA_EDGE_REF (cs);
3416 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3417 ? cs->caller->global.inlined_to
3418 : cs->caller);
3420 for (ie = node->indirect_calls; ie; ie = next_ie)
3422 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3423 struct ipa_jump_func *jfunc;
3424 int param_index;
3425 cgraph_node *spec_target = NULL;
3427 next_ie = ie->next_callee;
3429 if (ici->param_index == -1)
3430 continue;
3432 /* We must check range due to calls with variable number of arguments: */
3433 if (ici->param_index >= ipa_get_cs_argument_count (top))
3435 ici->param_index = -1;
3436 continue;
3439 param_index = ici->param_index;
3440 jfunc = ipa_get_ith_jump_func (top, param_index);
3442 if (ie->speculative)
3444 struct cgraph_edge *de;
3445 struct ipa_ref *ref;
3446 ie->speculative_call_info (de, ie, ref);
3447 spec_target = de->callee;
3450 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3451 new_direct_edge = NULL;
3452 else if (ici->polymorphic)
3454 ipa_polymorphic_call_context ctx;
3455 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3456 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3458 else
3459 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3460 new_root_info);
3461 /* If speculation was removed, then we need to do nothing. */
3462 if (new_direct_edge && new_direct_edge != ie
3463 && new_direct_edge->callee == spec_target)
3465 new_direct_edge->indirect_inlining_edge = 1;
3466 top = IPA_EDGE_REF (cs);
3467 res = true;
3468 if (!new_direct_edge->speculative)
3469 continue;
3471 else if (new_direct_edge)
3473 new_direct_edge->indirect_inlining_edge = 1;
3474 if (new_direct_edge->call_stmt)
3475 new_direct_edge->call_stmt_cannot_inline_p
3476 = !gimple_check_call_matching_types (
3477 new_direct_edge->call_stmt,
3478 new_direct_edge->callee->decl, false);
3479 if (new_edges)
3481 new_edges->safe_push (new_direct_edge);
3482 res = true;
3484 top = IPA_EDGE_REF (cs);
3485 /* If speculative edge was introduced we still need to update
3486 call info of the indirect edge. */
3487 if (!new_direct_edge->speculative)
3488 continue;
3490 if (jfunc->type == IPA_JF_PASS_THROUGH
3491 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3493 if (ici->agg_contents
3494 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3495 && !ici->polymorphic)
3496 ici->param_index = -1;
3497 else
3499 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3500 if (ici->polymorphic
3501 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3502 ici->vptr_changed = true;
3505 else if (jfunc->type == IPA_JF_ANCESTOR)
3507 if (ici->agg_contents
3508 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3509 && !ici->polymorphic)
3510 ici->param_index = -1;
3511 else
3513 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3514 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3515 if (ici->polymorphic
3516 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3517 ici->vptr_changed = true;
3520 else
3521 /* Either we can find a destination for this edge now or never. */
3522 ici->param_index = -1;
3525 return res;
3528 /* Recursively traverse subtree of NODE (including node) made of inlined
3529 cgraph_edges when CS has been inlined and invoke
3530 update_indirect_edges_after_inlining on all nodes and
3531 update_jump_functions_after_inlining on all non-inlined edges that lead out
3532 of this subtree. Newly discovered indirect edges will be added to
3533 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3534 created. */
3536 static bool
3537 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3538 struct cgraph_node *node,
3539 vec<cgraph_edge *> *new_edges)
3541 struct cgraph_edge *e;
3542 bool res;
3544 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3546 for (e = node->callees; e; e = e->next_callee)
3547 if (!e->inline_failed)
3548 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3549 else
3550 update_jump_functions_after_inlining (cs, e);
3551 for (e = node->indirect_calls; e; e = e->next_callee)
3552 update_jump_functions_after_inlining (cs, e);
3554 return res;
3557 /* Combine two controlled uses counts as done during inlining. */
3559 static int
3560 combine_controlled_uses_counters (int c, int d)
3562 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3563 return IPA_UNDESCRIBED_USE;
3564 else
3565 return c + d - 1;
3568 /* Propagate number of controlled users from CS->caleee to the new root of the
3569 tree of inlined nodes. */
3571 static void
3572 propagate_controlled_uses (struct cgraph_edge *cs)
3574 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3575 struct cgraph_node *new_root = cs->caller->global.inlined_to
3576 ? cs->caller->global.inlined_to : cs->caller;
3577 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3578 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3579 int count, i;
3581 count = MIN (ipa_get_cs_argument_count (args),
3582 ipa_get_param_count (old_root_info));
3583 for (i = 0; i < count; i++)
3585 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3586 struct ipa_cst_ref_desc *rdesc;
3588 if (jf->type == IPA_JF_PASS_THROUGH)
3590 int src_idx, c, d;
3591 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3592 c = ipa_get_controlled_uses (new_root_info, src_idx);
3593 d = ipa_get_controlled_uses (old_root_info, i);
3595 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3596 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3597 c = combine_controlled_uses_counters (c, d);
3598 ipa_set_controlled_uses (new_root_info, src_idx, c);
3599 if (c == 0 && new_root_info->ipcp_orig_node)
3601 struct cgraph_node *n;
3602 struct ipa_ref *ref;
3603 tree t = new_root_info->known_csts[src_idx];
3605 if (t && TREE_CODE (t) == ADDR_EXPR
3606 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3607 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3608 && (ref = new_root->find_reference (n, NULL, 0)))
3610 if (dump_file)
3611 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3612 "reference from %s/%i to %s/%i.\n",
3613 xstrdup_for_dump (new_root->name ()),
3614 new_root->order,
3615 xstrdup_for_dump (n->name ()), n->order);
3616 ref->remove_reference ();
3620 else if (jf->type == IPA_JF_CONST
3621 && (rdesc = jfunc_rdesc_usable (jf)))
3623 int d = ipa_get_controlled_uses (old_root_info, i);
3624 int c = rdesc->refcount;
3625 rdesc->refcount = combine_controlled_uses_counters (c, d);
3626 if (rdesc->refcount == 0)
3628 tree cst = ipa_get_jf_constant (jf);
3629 struct cgraph_node *n;
3630 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3631 && TREE_CODE (TREE_OPERAND (cst, 0))
3632 == FUNCTION_DECL);
3633 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3634 if (n)
3636 struct cgraph_node *clone;
3637 bool ok;
3638 ok = remove_described_reference (n, rdesc);
3639 gcc_checking_assert (ok);
3641 clone = cs->caller;
3642 while (clone->global.inlined_to
3643 && clone != rdesc->cs->caller
3644 && IPA_NODE_REF (clone)->ipcp_orig_node)
3646 struct ipa_ref *ref;
3647 ref = clone->find_reference (n, NULL, 0);
3648 if (ref)
3650 if (dump_file)
3651 fprintf (dump_file, "ipa-prop: Removing "
3652 "cloning-created reference "
3653 "from %s/%i to %s/%i.\n",
3654 xstrdup_for_dump (clone->name ()),
3655 clone->order,
3656 xstrdup_for_dump (n->name ()),
3657 n->order);
3658 ref->remove_reference ();
3660 clone = clone->callers->caller;
3667 for (i = ipa_get_param_count (old_root_info);
3668 i < ipa_get_cs_argument_count (args);
3669 i++)
3671 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3673 if (jf->type == IPA_JF_CONST)
3675 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3676 if (rdesc)
3677 rdesc->refcount = IPA_UNDESCRIBED_USE;
3679 else if (jf->type == IPA_JF_PASS_THROUGH)
3680 ipa_set_controlled_uses (new_root_info,
3681 jf->value.pass_through.formal_id,
3682 IPA_UNDESCRIBED_USE);
3686 /* Update jump functions and call note functions on inlining the call site CS.
3687 CS is expected to lead to a node already cloned by
3688 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3689 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3690 created. */
3692 bool
3693 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3694 vec<cgraph_edge *> *new_edges)
3696 bool changed;
3697 /* Do nothing if the preparation phase has not been carried out yet
3698 (i.e. during early inlining). */
3699 if (!ipa_node_params_sum)
3700 return false;
3701 gcc_assert (ipa_edge_args_sum);
3703 propagate_controlled_uses (cs);
3704 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3706 return changed;
3709 /* Ensure that array of edge arguments infos is big enough to accommodate a
3710 structure for all edges and reallocates it if not. Also, allocate
3711 associated hash tables is they do not already exist. */
3713 void
3714 ipa_check_create_edge_args (void)
3716 if (!ipa_edge_args_sum)
3717 ipa_edge_args_sum
3718 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3719 ipa_edge_args_sum_t (symtab, true));
3720 if (!ipa_bits_hash_table)
3721 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3722 if (!ipa_vr_hash_table)
3723 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3726 /* Frees all dynamically allocated structures that the argument info points
3727 to. */
3729 void
3730 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3732 vec_free (args->jump_functions);
3733 memset (args, 0, sizeof (*args));
3736 /* Free all ipa_edge structures. */
3738 void
3739 ipa_free_all_edge_args (void)
3741 if (!ipa_edge_args_sum)
3742 return;
3744 ipa_edge_args_sum->release ();
3745 ipa_edge_args_sum = NULL;
3748 /* Free all ipa_node_params structures. */
3750 void
3751 ipa_free_all_node_params (void)
3753 ipa_node_params_sum->release ();
3754 ipa_node_params_sum = NULL;
3757 /* Grow ipcp_transformations if necessary. Also allocate any necessary hash
3758 tables if they do not already exist. */
3760 void
3761 ipcp_grow_transformations_if_necessary (void)
3763 if (vec_safe_length (ipcp_transformations)
3764 <= (unsigned) symtab->cgraph_max_uid)
3765 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3766 if (!ipa_bits_hash_table)
3767 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3768 if (!ipa_vr_hash_table)
3769 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3772 /* Set the aggregate replacements of NODE to be AGGVALS. */
3774 void
3775 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3776 struct ipa_agg_replacement_value *aggvals)
3778 ipcp_grow_transformations_if_necessary ();
3779 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3782 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3783 count data structures accordingly. */
3785 void
3786 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3788 if (args->jump_functions)
3790 struct ipa_jump_func *jf;
3791 int i;
3792 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3794 struct ipa_cst_ref_desc *rdesc;
3795 try_decrement_rdesc_refcount (jf);
3796 if (jf->type == IPA_JF_CONST
3797 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3798 && rdesc->cs == cs)
3799 rdesc->cs = NULL;
3804 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3805 reference count data strucutres accordingly. */
3807 void
3808 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3809 ipa_edge_args *old_args, ipa_edge_args *new_args)
3811 unsigned int i;
3813 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3814 if (old_args->polymorphic_call_contexts)
3815 new_args->polymorphic_call_contexts
3816 = vec_safe_copy (old_args->polymorphic_call_contexts);
3818 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3820 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3821 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3823 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3825 if (src_jf->type == IPA_JF_CONST)
3827 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3829 if (!src_rdesc)
3830 dst_jf->value.constant.rdesc = NULL;
3831 else if (src->caller == dst->caller)
3833 struct ipa_ref *ref;
3834 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3835 gcc_checking_assert (n);
3836 ref = src->caller->find_reference (n, src->call_stmt,
3837 src->lto_stmt_uid);
3838 gcc_checking_assert (ref);
3839 dst->caller->clone_reference (ref, ref->stmt);
3841 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3842 dst_rdesc->cs = dst;
3843 dst_rdesc->refcount = src_rdesc->refcount;
3844 dst_rdesc->next_duplicate = NULL;
3845 dst_jf->value.constant.rdesc = dst_rdesc;
3847 else if (src_rdesc->cs == src)
3849 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3850 dst_rdesc->cs = dst;
3851 dst_rdesc->refcount = src_rdesc->refcount;
3852 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3853 src_rdesc->next_duplicate = dst_rdesc;
3854 dst_jf->value.constant.rdesc = dst_rdesc;
3856 else
3858 struct ipa_cst_ref_desc *dst_rdesc;
3859 /* This can happen during inlining, when a JFUNC can refer to a
3860 reference taken in a function up in the tree of inline clones.
3861 We need to find the duplicate that refers to our tree of
3862 inline clones. */
3864 gcc_assert (dst->caller->global.inlined_to);
3865 for (dst_rdesc = src_rdesc->next_duplicate;
3866 dst_rdesc;
3867 dst_rdesc = dst_rdesc->next_duplicate)
3869 struct cgraph_node *top;
3870 top = dst_rdesc->cs->caller->global.inlined_to
3871 ? dst_rdesc->cs->caller->global.inlined_to
3872 : dst_rdesc->cs->caller;
3873 if (dst->caller->global.inlined_to == top)
3874 break;
3876 gcc_assert (dst_rdesc);
3877 dst_jf->value.constant.rdesc = dst_rdesc;
3880 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3881 && src->caller == dst->caller)
3883 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3884 ? dst->caller->global.inlined_to : dst->caller;
3885 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3886 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3888 int c = ipa_get_controlled_uses (root_info, idx);
3889 if (c != IPA_UNDESCRIBED_USE)
3891 c++;
3892 ipa_set_controlled_uses (root_info, idx, c);
3898 /* Analyze newly added function into callgraph. */
3900 static void
3901 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3903 if (node->has_gimple_body_p ())
3904 ipa_analyze_node (node);
3907 /* Hook that is called by summary when a node is duplicated. */
3909 void
3910 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3911 ipa_node_params *old_info,
3912 ipa_node_params *new_info)
3914 ipa_agg_replacement_value *old_av, *new_av;
3916 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3917 new_info->lattices = NULL;
3918 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3919 new_info->known_csts = old_info->known_csts.copy ();
3920 new_info->known_contexts = old_info->known_contexts.copy ();
3922 new_info->analysis_done = old_info->analysis_done;
3923 new_info->node_enqueued = old_info->node_enqueued;
3924 new_info->versionable = old_info->versionable;
3926 old_av = ipa_get_agg_replacements_for_node (src);
3927 if (old_av)
3929 new_av = NULL;
3930 while (old_av)
3932 struct ipa_agg_replacement_value *v;
3934 v = ggc_alloc<ipa_agg_replacement_value> ();
3935 memcpy (v, old_av, sizeof (*v));
3936 v->next = new_av;
3937 new_av = v;
3938 old_av = old_av->next;
3940 ipa_set_node_agg_value_chain (dst, new_av);
3943 ipcp_transformation_summary *src_trans
3944 = ipcp_get_transformation_summary (src);
3946 if (src_trans)
3948 ipcp_grow_transformations_if_necessary ();
3949 src_trans = ipcp_get_transformation_summary (src);
3950 ipcp_transformation_summary *dst_trans
3951 = ipcp_get_transformation_summary (dst);
3953 dst_trans->bits = vec_safe_copy (src_trans->bits);
3955 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3956 vec<ipa_vr, va_gc> *&dst_vr
3957 = ipcp_get_transformation_summary (dst)->m_vr;
3958 if (vec_safe_length (src_trans->m_vr) > 0)
3960 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3961 for (unsigned i = 0; i < src_vr->length (); ++i)
3962 dst_vr->quick_push ((*src_vr)[i]);
3967 /* Register our cgraph hooks if they are not already there. */
3969 void
3970 ipa_register_cgraph_hooks (void)
3972 ipa_check_create_node_params ();
3973 ipa_check_create_edge_args ();
3975 function_insertion_hook_holder =
3976 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3979 /* Unregister our cgraph hooks if they are not already there. */
3981 static void
3982 ipa_unregister_cgraph_hooks (void)
3984 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3985 function_insertion_hook_holder = NULL;
3988 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3989 longer needed after ipa-cp. */
3991 void
3992 ipa_free_all_structures_after_ipa_cp (void)
3994 if (!optimize && !in_lto_p)
3996 ipa_free_all_edge_args ();
3997 ipa_free_all_node_params ();
3998 ipcp_sources_pool.release ();
3999 ipcp_cst_values_pool.release ();
4000 ipcp_poly_ctx_values_pool.release ();
4001 ipcp_agg_lattice_pool.release ();
4002 ipa_unregister_cgraph_hooks ();
4003 ipa_refdesc_pool.release ();
4007 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4008 longer needed after indirect inlining. */
4010 void
4011 ipa_free_all_structures_after_iinln (void)
4013 ipa_free_all_edge_args ();
4014 ipa_free_all_node_params ();
4015 ipa_unregister_cgraph_hooks ();
4016 ipcp_sources_pool.release ();
4017 ipcp_cst_values_pool.release ();
4018 ipcp_poly_ctx_values_pool.release ();
4019 ipcp_agg_lattice_pool.release ();
4020 ipa_refdesc_pool.release ();
4023 /* Print ipa_tree_map data structures of all functions in the
4024 callgraph to F. */
4026 void
4027 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4029 int i, count;
4030 struct ipa_node_params *info;
4032 if (!node->definition)
4033 return;
4034 info = IPA_NODE_REF (node);
4035 fprintf (f, " function %s/%i parameter descriptors:\n",
4036 node->name (), node->order);
4037 count = ipa_get_param_count (info);
4038 for (i = 0; i < count; i++)
4040 int c;
4042 fprintf (f, " ");
4043 ipa_dump_param (f, info, i);
4044 if (ipa_is_param_used (info, i))
4045 fprintf (f, " used");
4046 c = ipa_get_controlled_uses (info, i);
4047 if (c == IPA_UNDESCRIBED_USE)
4048 fprintf (f, " undescribed_use");
4049 else
4050 fprintf (f, " controlled_uses=%i", c);
4051 fprintf (f, "\n");
4055 /* Print ipa_tree_map data structures of all functions in the
4056 callgraph to F. */
4058 void
4059 ipa_print_all_params (FILE * f)
4061 struct cgraph_node *node;
4063 fprintf (f, "\nFunction parameters:\n");
4064 FOR_EACH_FUNCTION (node)
4065 ipa_print_node_params (f, node);
4068 /* Return a heap allocated vector containing formal parameters of FNDECL. */
4070 vec<tree>
4071 ipa_get_vector_of_formal_parms (tree fndecl)
4073 vec<tree> args;
4074 int count;
4075 tree parm;
4077 gcc_assert (!flag_wpa);
4078 count = count_formal_params (fndecl);
4079 args.create (count);
4080 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4081 args.quick_push (parm);
4083 return args;
4086 /* Return a heap allocated vector containing types of formal parameters of
4087 function type FNTYPE. */
4089 vec<tree>
4090 ipa_get_vector_of_formal_parm_types (tree fntype)
4092 vec<tree> types;
4093 int count = 0;
4094 tree t;
4096 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4097 count++;
4099 types.create (count);
4100 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4101 types.quick_push (TREE_VALUE (t));
4103 return types;
4106 /* Modify the function declaration FNDECL and its type according to the plan in
4107 ADJUSTMENTS. It also sets base fields of individual adjustments structures
4108 to reflect the actual parameters being modified which are determined by the
4109 base_index field. */
4111 void
4112 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
4114 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4115 tree orig_type = TREE_TYPE (fndecl);
4116 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4118 /* The following test is an ugly hack, some functions simply don't have any
4119 arguments in their type. This is probably a bug but well... */
4120 bool care_for_types = (old_arg_types != NULL_TREE);
4121 bool last_parm_void;
4122 vec<tree> otypes;
4123 if (care_for_types)
4125 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4126 == void_type_node);
4127 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4128 if (last_parm_void)
4129 gcc_assert (oparms.length () + 1 == otypes.length ());
4130 else
4131 gcc_assert (oparms.length () == otypes.length ());
4133 else
4135 last_parm_void = false;
4136 otypes.create (0);
4139 int len = adjustments.length ();
4140 tree *link = &DECL_ARGUMENTS (fndecl);
4141 tree new_arg_types = NULL;
4142 for (int i = 0; i < len; i++)
4144 struct ipa_parm_adjustment *adj;
4145 gcc_assert (link);
4147 adj = &adjustments[i];
4148 tree parm;
4149 if (adj->op == IPA_PARM_OP_NEW)
4150 parm = NULL;
4151 else
4152 parm = oparms[adj->base_index];
4153 adj->base = parm;
4155 if (adj->op == IPA_PARM_OP_COPY)
4157 if (care_for_types)
4158 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4159 new_arg_types);
4160 *link = parm;
4161 link = &DECL_CHAIN (parm);
4163 else if (adj->op != IPA_PARM_OP_REMOVE)
4165 tree new_parm;
4166 tree ptype;
4168 if (adj->by_ref)
4169 ptype = build_pointer_type (adj->type);
4170 else
4172 ptype = adj->type;
4173 if (is_gimple_reg_type (ptype)
4174 && TYPE_MODE (ptype) != BLKmode)
4176 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4177 if (TYPE_ALIGN (ptype) != malign)
4178 ptype = build_aligned_type (ptype, malign);
4182 if (care_for_types)
4183 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4185 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4186 ptype);
4187 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4188 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4189 DECL_ARTIFICIAL (new_parm) = 1;
4190 DECL_ARG_TYPE (new_parm) = ptype;
4191 DECL_CONTEXT (new_parm) = fndecl;
4192 TREE_USED (new_parm) = 1;
4193 DECL_IGNORED_P (new_parm) = 1;
4194 layout_decl (new_parm, 0);
4196 if (adj->op == IPA_PARM_OP_NEW)
4197 adj->base = NULL;
4198 else
4199 adj->base = parm;
4200 adj->new_decl = new_parm;
4202 *link = new_parm;
4203 link = &DECL_CHAIN (new_parm);
4207 *link = NULL_TREE;
4209 tree new_reversed = NULL;
4210 if (care_for_types)
4212 new_reversed = nreverse (new_arg_types);
4213 if (last_parm_void)
4215 if (new_reversed)
4216 TREE_CHAIN (new_arg_types) = void_list_node;
4217 else
4218 new_reversed = void_list_node;
4222 /* Use copy_node to preserve as much as possible from original type
4223 (debug info, attribute lists etc.)
4224 Exception is METHOD_TYPEs must have THIS argument.
4225 When we are asked to remove it, we need to build new FUNCTION_TYPE
4226 instead. */
4227 tree new_type = NULL;
4228 if (TREE_CODE (orig_type) != METHOD_TYPE
4229 || (adjustments[0].op == IPA_PARM_OP_COPY
4230 && adjustments[0].base_index == 0))
4232 new_type = build_distinct_type_copy (orig_type);
4233 TYPE_ARG_TYPES (new_type) = new_reversed;
4235 else
4237 new_type
4238 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4239 new_reversed));
4240 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4241 DECL_VINDEX (fndecl) = NULL_TREE;
4244 /* When signature changes, we need to clear builtin info. */
4245 if (DECL_BUILT_IN (fndecl))
4247 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4248 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4251 TREE_TYPE (fndecl) = new_type;
4252 DECL_VIRTUAL_P (fndecl) = 0;
4253 DECL_LANG_SPECIFIC (fndecl) = NULL;
4254 otypes.release ();
4255 oparms.release ();
4258 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4259 If this is a directly recursive call, CS must be NULL. Otherwise it must
4260 contain the corresponding call graph edge. */
4262 void
4263 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4264 ipa_parm_adjustment_vec adjustments)
4266 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4267 vec<tree> vargs;
4268 vec<tree, va_gc> **debug_args = NULL;
4269 gcall *new_stmt;
4270 gimple_stmt_iterator gsi, prev_gsi;
4271 tree callee_decl;
4272 int i, len;
4274 len = adjustments.length ();
4275 vargs.create (len);
4276 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4277 current_node->remove_stmt_references (stmt);
4279 gsi = gsi_for_stmt (stmt);
4280 prev_gsi = gsi;
4281 gsi_prev (&prev_gsi);
4282 for (i = 0; i < len; i++)
4284 struct ipa_parm_adjustment *adj;
4286 adj = &adjustments[i];
4288 if (adj->op == IPA_PARM_OP_COPY)
4290 tree arg = gimple_call_arg (stmt, adj->base_index);
4292 vargs.quick_push (arg);
4294 else if (adj->op != IPA_PARM_OP_REMOVE)
4296 tree expr, base, off;
4297 location_t loc;
4298 unsigned int deref_align = 0;
4299 bool deref_base = false;
4301 /* We create a new parameter out of the value of the old one, we can
4302 do the following kind of transformations:
4304 - A scalar passed by reference is converted to a scalar passed by
4305 value. (adj->by_ref is false and the type of the original
4306 actual argument is a pointer to a scalar).
4308 - A part of an aggregate is passed instead of the whole aggregate.
4309 The part can be passed either by value or by reference, this is
4310 determined by value of adj->by_ref. Moreover, the code below
4311 handles both situations when the original aggregate is passed by
4312 value (its type is not a pointer) and when it is passed by
4313 reference (it is a pointer to an aggregate).
4315 When the new argument is passed by reference (adj->by_ref is true)
4316 it must be a part of an aggregate and therefore we form it by
4317 simply taking the address of a reference inside the original
4318 aggregate. */
4320 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4321 base = gimple_call_arg (stmt, adj->base_index);
4322 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4323 : EXPR_LOCATION (base);
4325 if (TREE_CODE (base) != ADDR_EXPR
4326 && POINTER_TYPE_P (TREE_TYPE (base)))
4327 off = build_int_cst (adj->alias_ptr_type,
4328 adj->offset / BITS_PER_UNIT);
4329 else
4331 HOST_WIDE_INT base_offset;
4332 tree prev_base;
4333 bool addrof;
4335 if (TREE_CODE (base) == ADDR_EXPR)
4337 base = TREE_OPERAND (base, 0);
4338 addrof = true;
4340 else
4341 addrof = false;
4342 prev_base = base;
4343 base = get_addr_base_and_unit_offset (base, &base_offset);
4344 /* Aggregate arguments can have non-invariant addresses. */
4345 if (!base)
4347 base = build_fold_addr_expr (prev_base);
4348 off = build_int_cst (adj->alias_ptr_type,
4349 adj->offset / BITS_PER_UNIT);
4351 else if (TREE_CODE (base) == MEM_REF)
4353 if (!addrof)
4355 deref_base = true;
4356 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4358 off = build_int_cst (adj->alias_ptr_type,
4359 base_offset
4360 + adj->offset / BITS_PER_UNIT);
4361 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4362 off);
4363 base = TREE_OPERAND (base, 0);
4365 else
4367 off = build_int_cst (adj->alias_ptr_type,
4368 base_offset
4369 + adj->offset / BITS_PER_UNIT);
4370 base = build_fold_addr_expr (base);
4374 if (!adj->by_ref)
4376 tree type = adj->type;
4377 unsigned int align;
4378 unsigned HOST_WIDE_INT misalign;
4380 if (deref_base)
4382 align = deref_align;
4383 misalign = 0;
4385 else
4387 get_pointer_alignment_1 (base, &align, &misalign);
4388 if (TYPE_ALIGN (type) > align)
4389 align = TYPE_ALIGN (type);
4391 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4392 * BITS_PER_UNIT);
4393 misalign = misalign & (align - 1);
4394 if (misalign != 0)
4395 align = least_bit_hwi (misalign);
4396 if (align < TYPE_ALIGN (type))
4397 type = build_aligned_type (type, align);
4398 base = force_gimple_operand_gsi (&gsi, base,
4399 true, NULL, true, GSI_SAME_STMT);
4400 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4401 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4402 /* If expr is not a valid gimple call argument emit
4403 a load into a temporary. */
4404 if (is_gimple_reg_type (TREE_TYPE (expr)))
4406 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4407 if (gimple_in_ssa_p (cfun))
4409 gimple_set_vuse (tem, gimple_vuse (stmt));
4410 expr = make_ssa_name (TREE_TYPE (expr), tem);
4412 else
4413 expr = create_tmp_reg (TREE_TYPE (expr));
4414 gimple_assign_set_lhs (tem, expr);
4415 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4418 else
4420 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4421 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4422 expr = build_fold_addr_expr (expr);
4423 expr = force_gimple_operand_gsi (&gsi, expr,
4424 true, NULL, true, GSI_SAME_STMT);
4426 vargs.quick_push (expr);
4428 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4430 unsigned int ix;
4431 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4432 gimple *def_temp;
4434 arg = gimple_call_arg (stmt, adj->base_index);
4435 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4437 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4438 continue;
4439 arg = fold_convert_loc (gimple_location (stmt),
4440 TREE_TYPE (origin), arg);
4442 if (debug_args == NULL)
4443 debug_args = decl_debug_args_insert (callee_decl);
4444 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4445 if (ddecl == origin)
4447 ddecl = (**debug_args)[ix + 1];
4448 break;
4450 if (ddecl == NULL)
4452 ddecl = make_node (DEBUG_EXPR_DECL);
4453 DECL_ARTIFICIAL (ddecl) = 1;
4454 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4455 SET_DECL_MODE (ddecl, DECL_MODE (origin));
4457 vec_safe_push (*debug_args, origin);
4458 vec_safe_push (*debug_args, ddecl);
4460 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4461 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4465 if (dump_file && (dump_flags & TDF_DETAILS))
4467 fprintf (dump_file, "replacing stmt:");
4468 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4471 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4472 vargs.release ();
4473 if (gimple_call_lhs (stmt))
4474 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4476 gimple_set_block (new_stmt, gimple_block (stmt));
4477 if (gimple_has_location (stmt))
4478 gimple_set_location (new_stmt, gimple_location (stmt));
4479 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4480 gimple_call_copy_flags (new_stmt, stmt);
4481 if (gimple_in_ssa_p (cfun))
4483 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4484 if (gimple_vdef (stmt))
4486 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4487 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4491 if (dump_file && (dump_flags & TDF_DETAILS))
4493 fprintf (dump_file, "with stmt:");
4494 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4495 fprintf (dump_file, "\n");
4497 gsi_replace (&gsi, new_stmt, true);
4498 if (cs)
4499 cs->set_call_stmt (new_stmt);
4502 current_node->record_stmt_references (gsi_stmt (gsi));
4503 gsi_prev (&gsi);
4505 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4508 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4509 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4510 specifies whether the function should care about type incompatibility the
4511 current and new expressions. If it is false, the function will leave
4512 incompatibility issues to the caller. Return true iff the expression
4513 was modified. */
4515 bool
4516 ipa_modify_expr (tree *expr, bool convert,
4517 ipa_parm_adjustment_vec adjustments)
4519 struct ipa_parm_adjustment *cand
4520 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4521 if (!cand)
4522 return false;
4524 tree src;
4525 if (cand->by_ref)
4527 src = build_simple_mem_ref (cand->new_decl);
4528 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4530 else
4531 src = cand->new_decl;
4533 if (dump_file && (dump_flags & TDF_DETAILS))
4535 fprintf (dump_file, "About to replace expr ");
4536 print_generic_expr (dump_file, *expr, 0);
4537 fprintf (dump_file, " with ");
4538 print_generic_expr (dump_file, src, 0);
4539 fprintf (dump_file, "\n");
4542 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4544 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4545 *expr = vce;
4547 else
4548 *expr = src;
4549 return true;
4552 /* If T is an SSA_NAME, return NULL if it is not a default def or
4553 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4554 the base variable is always returned, regardless if it is a default
4555 def. Return T if it is not an SSA_NAME. */
4557 static tree
4558 get_ssa_base_param (tree t, bool ignore_default_def)
4560 if (TREE_CODE (t) == SSA_NAME)
4562 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4563 return SSA_NAME_VAR (t);
4564 else
4565 return NULL_TREE;
4567 return t;
4570 /* Given an expression, return an adjustment entry specifying the
4571 transformation to be done on EXPR. If no suitable adjustment entry
4572 was found, returns NULL.
4574 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4575 default def, otherwise bail on them.
4577 If CONVERT is non-NULL, this function will set *CONVERT if the
4578 expression provided is a component reference. ADJUSTMENTS is the
4579 adjustments vector. */
4581 ipa_parm_adjustment *
4582 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4583 ipa_parm_adjustment_vec adjustments,
4584 bool ignore_default_def)
4586 if (TREE_CODE (**expr) == BIT_FIELD_REF
4587 || TREE_CODE (**expr) == IMAGPART_EXPR
4588 || TREE_CODE (**expr) == REALPART_EXPR)
4590 *expr = &TREE_OPERAND (**expr, 0);
4591 if (convert)
4592 *convert = true;
4595 HOST_WIDE_INT offset, size, max_size;
4596 bool reverse;
4597 tree base
4598 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4599 if (!base || size == -1 || max_size == -1)
4600 return NULL;
4602 if (TREE_CODE (base) == MEM_REF)
4604 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4605 base = TREE_OPERAND (base, 0);
4608 base = get_ssa_base_param (base, ignore_default_def);
4609 if (!base || TREE_CODE (base) != PARM_DECL)
4610 return NULL;
4612 struct ipa_parm_adjustment *cand = NULL;
4613 unsigned int len = adjustments.length ();
4614 for (unsigned i = 0; i < len; i++)
4616 struct ipa_parm_adjustment *adj = &adjustments[i];
4618 if (adj->base == base
4619 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4621 cand = adj;
4622 break;
4626 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4627 return NULL;
4628 return cand;
4631 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4633 static bool
4634 index_in_adjustments_multiple_times_p (int base_index,
4635 ipa_parm_adjustment_vec adjustments)
4637 int i, len = adjustments.length ();
4638 bool one = false;
4640 for (i = 0; i < len; i++)
4642 struct ipa_parm_adjustment *adj;
4643 adj = &adjustments[i];
4645 if (adj->base_index == base_index)
4647 if (one)
4648 return true;
4649 else
4650 one = true;
4653 return false;
4657 /* Return adjustments that should have the same effect on function parameters
4658 and call arguments as if they were first changed according to adjustments in
4659 INNER and then by adjustments in OUTER. */
4661 ipa_parm_adjustment_vec
4662 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4663 ipa_parm_adjustment_vec outer)
4665 int i, outlen = outer.length ();
4666 int inlen = inner.length ();
4667 int removals = 0;
4668 ipa_parm_adjustment_vec adjustments, tmp;
4670 tmp.create (inlen);
4671 for (i = 0; i < inlen; i++)
4673 struct ipa_parm_adjustment *n;
4674 n = &inner[i];
4676 if (n->op == IPA_PARM_OP_REMOVE)
4677 removals++;
4678 else
4680 /* FIXME: Handling of new arguments are not implemented yet. */
4681 gcc_assert (n->op != IPA_PARM_OP_NEW);
4682 tmp.quick_push (*n);
4686 adjustments.create (outlen + removals);
4687 for (i = 0; i < outlen; i++)
4689 struct ipa_parm_adjustment r;
4690 struct ipa_parm_adjustment *out = &outer[i];
4691 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4693 memset (&r, 0, sizeof (r));
4694 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4695 if (out->op == IPA_PARM_OP_REMOVE)
4697 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4699 r.op = IPA_PARM_OP_REMOVE;
4700 adjustments.quick_push (r);
4702 continue;
4704 else
4706 /* FIXME: Handling of new arguments are not implemented yet. */
4707 gcc_assert (out->op != IPA_PARM_OP_NEW);
4710 r.base_index = in->base_index;
4711 r.type = out->type;
4713 /* FIXME: Create nonlocal value too. */
4715 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4716 r.op = IPA_PARM_OP_COPY;
4717 else if (in->op == IPA_PARM_OP_COPY)
4718 r.offset = out->offset;
4719 else if (out->op == IPA_PARM_OP_COPY)
4720 r.offset = in->offset;
4721 else
4722 r.offset = in->offset + out->offset;
4723 adjustments.quick_push (r);
4726 for (i = 0; i < inlen; i++)
4728 struct ipa_parm_adjustment *n = &inner[i];
4730 if (n->op == IPA_PARM_OP_REMOVE)
4731 adjustments.quick_push (*n);
4734 tmp.release ();
4735 return adjustments;
4738 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4739 friendly way, assuming they are meant to be applied to FNDECL. */
4741 void
4742 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4743 tree fndecl)
4745 int i, len = adjustments.length ();
4746 bool first = true;
4747 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4749 fprintf (file, "IPA param adjustments: ");
4750 for (i = 0; i < len; i++)
4752 struct ipa_parm_adjustment *adj;
4753 adj = &adjustments[i];
4755 if (!first)
4756 fprintf (file, " ");
4757 else
4758 first = false;
4760 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4761 print_generic_expr (file, parms[adj->base_index], 0);
4762 if (adj->base)
4764 fprintf (file, ", base: ");
4765 print_generic_expr (file, adj->base, 0);
4767 if (adj->new_decl)
4769 fprintf (file, ", new_decl: ");
4770 print_generic_expr (file, adj->new_decl, 0);
4772 if (adj->new_ssa_base)
4774 fprintf (file, ", new_ssa_base: ");
4775 print_generic_expr (file, adj->new_ssa_base, 0);
4778 if (adj->op == IPA_PARM_OP_COPY)
4779 fprintf (file, ", copy_param");
4780 else if (adj->op == IPA_PARM_OP_REMOVE)
4781 fprintf (file, ", remove_param");
4782 else
4783 fprintf (file, ", offset %li", (long) adj->offset);
4784 if (adj->by_ref)
4785 fprintf (file, ", by_ref");
4786 print_node_brief (file, ", type: ", adj->type, 0);
4787 fprintf (file, "\n");
4789 parms.release ();
4792 /* Dump the AV linked list. */
4794 void
4795 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4797 bool comma = false;
4798 fprintf (f, " Aggregate replacements:");
4799 for (; av; av = av->next)
4801 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4802 av->index, av->offset);
4803 print_generic_expr (f, av->value, 0);
4804 comma = true;
4806 fprintf (f, "\n");
4809 /* Stream out jump function JUMP_FUNC to OB. */
4811 static void
4812 ipa_write_jump_function (struct output_block *ob,
4813 struct ipa_jump_func *jump_func)
4815 struct ipa_agg_jf_item *item;
4816 struct bitpack_d bp;
4817 int i, count;
4819 streamer_write_uhwi (ob, jump_func->type);
4820 switch (jump_func->type)
4822 case IPA_JF_UNKNOWN:
4823 break;
4824 case IPA_JF_CONST:
4825 gcc_assert (
4826 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4827 stream_write_tree (ob, jump_func->value.constant.value, true);
4828 break;
4829 case IPA_JF_PASS_THROUGH:
4830 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4831 if (jump_func->value.pass_through.operation == NOP_EXPR)
4833 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4834 bp = bitpack_create (ob->main_stream);
4835 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4836 streamer_write_bitpack (&bp);
4838 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4839 == tcc_unary)
4840 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4841 else
4843 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4844 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4846 break;
4847 case IPA_JF_ANCESTOR:
4848 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4849 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4850 bp = bitpack_create (ob->main_stream);
4851 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4852 streamer_write_bitpack (&bp);
4853 break;
4856 count = vec_safe_length (jump_func->agg.items);
4857 streamer_write_uhwi (ob, count);
4858 if (count)
4860 bp = bitpack_create (ob->main_stream);
4861 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4862 streamer_write_bitpack (&bp);
4865 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4867 streamer_write_uhwi (ob, item->offset);
4868 stream_write_tree (ob, item->value, true);
4871 bp = bitpack_create (ob->main_stream);
4872 bp_pack_value (&bp, !!jump_func->bits, 1);
4873 streamer_write_bitpack (&bp);
4874 if (jump_func->bits)
4876 streamer_write_widest_int (ob, jump_func->bits->value);
4877 streamer_write_widest_int (ob, jump_func->bits->mask);
4879 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4880 streamer_write_bitpack (&bp);
4881 if (jump_func->m_vr)
4883 streamer_write_enum (ob->main_stream, value_rang_type,
4884 VR_LAST, jump_func->m_vr->type);
4885 stream_write_tree (ob, jump_func->m_vr->min, true);
4886 stream_write_tree (ob, jump_func->m_vr->max, true);
4890 /* Read in jump function JUMP_FUNC from IB. */
4892 static void
4893 ipa_read_jump_function (struct lto_input_block *ib,
4894 struct ipa_jump_func *jump_func,
4895 struct cgraph_edge *cs,
4896 struct data_in *data_in)
4898 enum jump_func_type jftype;
4899 enum tree_code operation;
4900 int i, count;
4902 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4903 switch (jftype)
4905 case IPA_JF_UNKNOWN:
4906 ipa_set_jf_unknown (jump_func);
4907 break;
4908 case IPA_JF_CONST:
4909 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4910 break;
4911 case IPA_JF_PASS_THROUGH:
4912 operation = (enum tree_code) streamer_read_uhwi (ib);
4913 if (operation == NOP_EXPR)
4915 int formal_id = streamer_read_uhwi (ib);
4916 struct bitpack_d bp = streamer_read_bitpack (ib);
4917 bool agg_preserved = bp_unpack_value (&bp, 1);
4918 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4920 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4922 int formal_id = streamer_read_uhwi (ib);
4923 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4925 else
4927 tree operand = stream_read_tree (ib, data_in);
4928 int formal_id = streamer_read_uhwi (ib);
4929 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4930 operation);
4932 break;
4933 case IPA_JF_ANCESTOR:
4935 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4936 int formal_id = streamer_read_uhwi (ib);
4937 struct bitpack_d bp = streamer_read_bitpack (ib);
4938 bool agg_preserved = bp_unpack_value (&bp, 1);
4939 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4940 break;
4944 count = streamer_read_uhwi (ib);
4945 vec_alloc (jump_func->agg.items, count);
4946 if (count)
4948 struct bitpack_d bp = streamer_read_bitpack (ib);
4949 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4951 for (i = 0; i < count; i++)
4953 struct ipa_agg_jf_item item;
4954 item.offset = streamer_read_uhwi (ib);
4955 item.value = stream_read_tree (ib, data_in);
4956 jump_func->agg.items->quick_push (item);
4959 struct bitpack_d bp = streamer_read_bitpack (ib);
4960 bool bits_known = bp_unpack_value (&bp, 1);
4961 if (bits_known)
4963 widest_int value = streamer_read_widest_int (ib);
4964 widest_int mask = streamer_read_widest_int (ib);
4965 ipa_set_jfunc_bits (jump_func, value, mask);
4967 else
4968 jump_func->bits = NULL;
4970 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4971 bool vr_known = bp_unpack_value (&vr_bp, 1);
4972 if (vr_known)
4974 enum value_range_type type = streamer_read_enum (ib, value_range_type,
4975 VR_LAST);
4976 tree min = stream_read_tree (ib, data_in);
4977 tree max = stream_read_tree (ib, data_in);
4978 ipa_set_jfunc_vr (jump_func, type, min, max);
4980 else
4981 jump_func->m_vr = NULL;
4984 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4985 relevant to indirect inlining to OB. */
4987 static void
4988 ipa_write_indirect_edge_info (struct output_block *ob,
4989 struct cgraph_edge *cs)
4991 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4992 struct bitpack_d bp;
4994 streamer_write_hwi (ob, ii->param_index);
4995 bp = bitpack_create (ob->main_stream);
4996 bp_pack_value (&bp, ii->polymorphic, 1);
4997 bp_pack_value (&bp, ii->agg_contents, 1);
4998 bp_pack_value (&bp, ii->member_ptr, 1);
4999 bp_pack_value (&bp, ii->by_ref, 1);
5000 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
5001 bp_pack_value (&bp, ii->vptr_changed, 1);
5002 streamer_write_bitpack (&bp);
5003 if (ii->agg_contents || ii->polymorphic)
5004 streamer_write_hwi (ob, ii->offset);
5005 else
5006 gcc_assert (ii->offset == 0);
5008 if (ii->polymorphic)
5010 streamer_write_hwi (ob, ii->otr_token);
5011 stream_write_tree (ob, ii->otr_type, true);
5012 ii->context.stream_out (ob);
5016 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5017 relevant to indirect inlining from IB. */
5019 static void
5020 ipa_read_indirect_edge_info (struct lto_input_block *ib,
5021 struct data_in *data_in,
5022 struct cgraph_edge *cs)
5024 struct cgraph_indirect_call_info *ii = cs->indirect_info;
5025 struct bitpack_d bp;
5027 ii->param_index = (int) streamer_read_hwi (ib);
5028 bp = streamer_read_bitpack (ib);
5029 ii->polymorphic = bp_unpack_value (&bp, 1);
5030 ii->agg_contents = bp_unpack_value (&bp, 1);
5031 ii->member_ptr = bp_unpack_value (&bp, 1);
5032 ii->by_ref = bp_unpack_value (&bp, 1);
5033 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
5034 ii->vptr_changed = bp_unpack_value (&bp, 1);
5035 if (ii->agg_contents || ii->polymorphic)
5036 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5037 else
5038 ii->offset = 0;
5039 if (ii->polymorphic)
5041 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
5042 ii->otr_type = stream_read_tree (ib, data_in);
5043 ii->context.stream_in (ib, data_in);
5047 /* Stream out NODE info to OB. */
5049 static void
5050 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5052 int node_ref;
5053 lto_symtab_encoder_t encoder;
5054 struct ipa_node_params *info = IPA_NODE_REF (node);
5055 int j;
5056 struct cgraph_edge *e;
5057 struct bitpack_d bp;
5059 encoder = ob->decl_state->symtab_node_encoder;
5060 node_ref = lto_symtab_encoder_encode (encoder, node);
5061 streamer_write_uhwi (ob, node_ref);
5063 streamer_write_uhwi (ob, ipa_get_param_count (info));
5064 for (j = 0; j < ipa_get_param_count (info); j++)
5065 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5066 bp = bitpack_create (ob->main_stream);
5067 gcc_assert (info->analysis_done
5068 || ipa_get_param_count (info) == 0);
5069 gcc_assert (!info->node_enqueued);
5070 gcc_assert (!info->ipcp_orig_node);
5071 for (j = 0; j < ipa_get_param_count (info); j++)
5072 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5073 streamer_write_bitpack (&bp);
5074 for (j = 0; j < ipa_get_param_count (info); j++)
5076 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5077 stream_write_tree (ob, ipa_get_type (info, j), true);
5079 for (e = node->callees; e; e = e->next_callee)
5081 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5083 streamer_write_uhwi (ob,
5084 ipa_get_cs_argument_count (args) * 2
5085 + (args->polymorphic_call_contexts != NULL));
5086 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5088 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5089 if (args->polymorphic_call_contexts != NULL)
5090 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5093 for (e = node->indirect_calls; e; e = e->next_callee)
5095 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5097 streamer_write_uhwi (ob,
5098 ipa_get_cs_argument_count (args) * 2
5099 + (args->polymorphic_call_contexts != NULL));
5100 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5102 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5103 if (args->polymorphic_call_contexts != NULL)
5104 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5106 ipa_write_indirect_edge_info (ob, e);
5110 /* Stream in NODE info from IB. */
5112 static void
5113 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
5114 struct data_in *data_in)
5116 struct ipa_node_params *info = IPA_NODE_REF (node);
5117 int k;
5118 struct cgraph_edge *e;
5119 struct bitpack_d bp;
5121 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
5123 for (k = 0; k < ipa_get_param_count (info); k++)
5124 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5126 bp = streamer_read_bitpack (ib);
5127 if (ipa_get_param_count (info) != 0)
5128 info->analysis_done = true;
5129 info->node_enqueued = false;
5130 for (k = 0; k < ipa_get_param_count (info); k++)
5131 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
5132 for (k = 0; k < ipa_get_param_count (info); k++)
5134 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
5135 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
5137 for (e = node->callees; e; e = e->next_callee)
5139 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5140 int count = streamer_read_uhwi (ib);
5141 bool contexts_computed = count & 1;
5142 count /= 2;
5144 if (!count)
5145 continue;
5146 vec_safe_grow_cleared (args->jump_functions, count);
5147 if (contexts_computed)
5148 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5150 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5152 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5153 data_in);
5154 if (contexts_computed)
5155 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5158 for (e = node->indirect_calls; e; e = e->next_callee)
5160 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5161 int count = streamer_read_uhwi (ib);
5162 bool contexts_computed = count & 1;
5163 count /= 2;
5165 if (count)
5167 vec_safe_grow_cleared (args->jump_functions, count);
5168 if (contexts_computed)
5169 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5170 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5172 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5173 data_in);
5174 if (contexts_computed)
5175 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5178 ipa_read_indirect_edge_info (ib, data_in, e);
5182 /* Write jump functions for nodes in SET. */
5184 void
5185 ipa_prop_write_jump_functions (void)
5187 struct cgraph_node *node;
5188 struct output_block *ob;
5189 unsigned int count = 0;
5190 lto_symtab_encoder_iterator lsei;
5191 lto_symtab_encoder_t encoder;
5193 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5194 return;
5196 ob = create_output_block (LTO_section_jump_functions);
5197 encoder = ob->decl_state->symtab_node_encoder;
5198 ob->symbol = NULL;
5199 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5200 lsei_next_function_in_partition (&lsei))
5202 node = lsei_cgraph_node (lsei);
5203 if (node->has_gimple_body_p ()
5204 && IPA_NODE_REF (node) != NULL)
5205 count++;
5208 streamer_write_uhwi (ob, count);
5210 /* Process all of the functions. */
5211 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5212 lsei_next_function_in_partition (&lsei))
5214 node = lsei_cgraph_node (lsei);
5215 if (node->has_gimple_body_p ()
5216 && IPA_NODE_REF (node) != NULL)
5217 ipa_write_node_info (ob, node);
5219 streamer_write_char_stream (ob->main_stream, 0);
5220 produce_asm (ob, NULL);
5221 destroy_output_block (ob);
5224 /* Read section in file FILE_DATA of length LEN with data DATA. */
5226 static void
5227 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5228 size_t len)
5230 const struct lto_function_header *header =
5231 (const struct lto_function_header *) data;
5232 const int cfg_offset = sizeof (struct lto_function_header);
5233 const int main_offset = cfg_offset + header->cfg_size;
5234 const int string_offset = main_offset + header->main_size;
5235 struct data_in *data_in;
5236 unsigned int i;
5237 unsigned int count;
5239 lto_input_block ib_main ((const char *) data + main_offset,
5240 header->main_size, file_data->mode_table);
5242 data_in =
5243 lto_data_in_create (file_data, (const char *) data + string_offset,
5244 header->string_size, vNULL);
5245 count = streamer_read_uhwi (&ib_main);
5247 for (i = 0; i < count; i++)
5249 unsigned int index;
5250 struct cgraph_node *node;
5251 lto_symtab_encoder_t encoder;
5253 index = streamer_read_uhwi (&ib_main);
5254 encoder = file_data->symtab_node_encoder;
5255 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5256 index));
5257 gcc_assert (node->definition);
5258 ipa_read_node_info (&ib_main, node, data_in);
5260 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5261 len);
5262 lto_data_in_delete (data_in);
5265 /* Read ipcp jump functions. */
5267 void
5268 ipa_prop_read_jump_functions (void)
5270 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5271 struct lto_file_decl_data *file_data;
5272 unsigned int j = 0;
5274 ipa_check_create_node_params ();
5275 ipa_check_create_edge_args ();
5276 ipa_register_cgraph_hooks ();
5278 while ((file_data = file_data_vec[j++]))
5280 size_t len;
5281 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5283 if (data)
5284 ipa_prop_read_section (file_data, data, len);
5288 void
5289 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5291 int node_ref;
5292 unsigned int count = 0;
5293 lto_symtab_encoder_t encoder;
5294 struct ipa_agg_replacement_value *aggvals, *av;
5296 aggvals = ipa_get_agg_replacements_for_node (node);
5297 encoder = ob->decl_state->symtab_node_encoder;
5298 node_ref = lto_symtab_encoder_encode (encoder, node);
5299 streamer_write_uhwi (ob, node_ref);
5301 for (av = aggvals; av; av = av->next)
5302 count++;
5303 streamer_write_uhwi (ob, count);
5305 for (av = aggvals; av; av = av->next)
5307 struct bitpack_d bp;
5309 streamer_write_uhwi (ob, av->offset);
5310 streamer_write_uhwi (ob, av->index);
5311 stream_write_tree (ob, av->value, true);
5313 bp = bitpack_create (ob->main_stream);
5314 bp_pack_value (&bp, av->by_ref, 1);
5315 streamer_write_bitpack (&bp);
5318 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5319 if (ts && vec_safe_length (ts->m_vr) > 0)
5321 count = ts->m_vr->length ();
5322 streamer_write_uhwi (ob, count);
5323 for (unsigned i = 0; i < count; ++i)
5325 struct bitpack_d bp;
5326 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5327 bp = bitpack_create (ob->main_stream);
5328 bp_pack_value (&bp, parm_vr->known, 1);
5329 streamer_write_bitpack (&bp);
5330 if (parm_vr->known)
5332 streamer_write_enum (ob->main_stream, value_rang_type,
5333 VR_LAST, parm_vr->type);
5334 streamer_write_wide_int (ob, parm_vr->min);
5335 streamer_write_wide_int (ob, parm_vr->max);
5339 else
5340 streamer_write_uhwi (ob, 0);
5342 if (ts && vec_safe_length (ts->bits) > 0)
5344 count = ts->bits->length ();
5345 streamer_write_uhwi (ob, count);
5347 for (unsigned i = 0; i < count; ++i)
5349 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5350 struct bitpack_d bp = bitpack_create (ob->main_stream);
5351 bp_pack_value (&bp, !!bits_jfunc, 1);
5352 streamer_write_bitpack (&bp);
5353 if (bits_jfunc)
5355 streamer_write_widest_int (ob, bits_jfunc->value);
5356 streamer_write_widest_int (ob, bits_jfunc->mask);
5360 else
5361 streamer_write_uhwi (ob, 0);
5364 /* Stream in the aggregate value replacement chain for NODE from IB. */
5366 static void
5367 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5368 data_in *data_in)
5370 struct ipa_agg_replacement_value *aggvals = NULL;
5371 unsigned int count, i;
5373 count = streamer_read_uhwi (ib);
5374 for (i = 0; i <count; i++)
5376 struct ipa_agg_replacement_value *av;
5377 struct bitpack_d bp;
5379 av = ggc_alloc<ipa_agg_replacement_value> ();
5380 av->offset = streamer_read_uhwi (ib);
5381 av->index = streamer_read_uhwi (ib);
5382 av->value = stream_read_tree (ib, data_in);
5383 bp = streamer_read_bitpack (ib);
5384 av->by_ref = bp_unpack_value (&bp, 1);
5385 av->next = aggvals;
5386 aggvals = av;
5388 ipa_set_node_agg_value_chain (node, aggvals);
5390 count = streamer_read_uhwi (ib);
5391 if (count > 0)
5393 ipcp_grow_transformations_if_necessary ();
5395 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5396 vec_safe_grow_cleared (ts->m_vr, count);
5397 for (i = 0; i < count; i++)
5399 ipa_vr *parm_vr;
5400 parm_vr = &(*ts->m_vr)[i];
5401 struct bitpack_d bp;
5402 bp = streamer_read_bitpack (ib);
5403 parm_vr->known = bp_unpack_value (&bp, 1);
5404 if (parm_vr->known)
5406 parm_vr->type = streamer_read_enum (ib, value_range_type,
5407 VR_LAST);
5408 parm_vr->min = streamer_read_wide_int (ib);
5409 parm_vr->max = streamer_read_wide_int (ib);
5413 count = streamer_read_uhwi (ib);
5414 if (count > 0)
5416 ipcp_grow_transformations_if_necessary ();
5418 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5419 vec_safe_grow_cleared (ts->bits, count);
5421 for (i = 0; i < count; i++)
5423 struct bitpack_d bp = streamer_read_bitpack (ib);
5424 bool known = bp_unpack_value (&bp, 1);
5425 if (known)
5427 ipa_bits *bits
5428 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
5429 streamer_read_widest_int (ib));
5430 (*ts->bits)[i] = bits;
5436 /* Write all aggregate replacement for nodes in set. */
5438 void
5439 ipcp_write_transformation_summaries (void)
5441 struct cgraph_node *node;
5442 struct output_block *ob;
5443 unsigned int count = 0;
5444 lto_symtab_encoder_iterator lsei;
5445 lto_symtab_encoder_t encoder;
5447 ob = create_output_block (LTO_section_ipcp_transform);
5448 encoder = ob->decl_state->symtab_node_encoder;
5449 ob->symbol = NULL;
5450 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5451 lsei_next_function_in_partition (&lsei))
5453 node = lsei_cgraph_node (lsei);
5454 if (node->has_gimple_body_p ())
5455 count++;
5458 streamer_write_uhwi (ob, count);
5460 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5461 lsei_next_function_in_partition (&lsei))
5463 node = lsei_cgraph_node (lsei);
5464 if (node->has_gimple_body_p ())
5465 write_ipcp_transformation_info (ob, node);
5467 streamer_write_char_stream (ob->main_stream, 0);
5468 produce_asm (ob, NULL);
5469 destroy_output_block (ob);
5472 /* Read replacements section in file FILE_DATA of length LEN with data
5473 DATA. */
5475 static void
5476 read_replacements_section (struct lto_file_decl_data *file_data,
5477 const char *data,
5478 size_t len)
5480 const struct lto_function_header *header =
5481 (const struct lto_function_header *) data;
5482 const int cfg_offset = sizeof (struct lto_function_header);
5483 const int main_offset = cfg_offset + header->cfg_size;
5484 const int string_offset = main_offset + header->main_size;
5485 struct data_in *data_in;
5486 unsigned int i;
5487 unsigned int count;
5489 lto_input_block ib_main ((const char *) data + main_offset,
5490 header->main_size, file_data->mode_table);
5492 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5493 header->string_size, vNULL);
5494 count = streamer_read_uhwi (&ib_main);
5496 for (i = 0; i < count; i++)
5498 unsigned int index;
5499 struct cgraph_node *node;
5500 lto_symtab_encoder_t encoder;
5502 index = streamer_read_uhwi (&ib_main);
5503 encoder = file_data->symtab_node_encoder;
5504 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5505 index));
5506 gcc_assert (node->definition);
5507 read_ipcp_transformation_info (&ib_main, node, data_in);
5509 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5510 len);
5511 lto_data_in_delete (data_in);
5514 /* Read IPA-CP aggregate replacements. */
5516 void
5517 ipcp_read_transformation_summaries (void)
5519 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5520 struct lto_file_decl_data *file_data;
5521 unsigned int j = 0;
5523 while ((file_data = file_data_vec[j++]))
5525 size_t len;
5526 const char *data = lto_get_section_data (file_data,
5527 LTO_section_ipcp_transform,
5528 NULL, &len);
5529 if (data)
5530 read_replacements_section (file_data, data, len);
5534 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5535 NODE. */
5537 static void
5538 adjust_agg_replacement_values (struct cgraph_node *node,
5539 struct ipa_agg_replacement_value *aggval)
5541 struct ipa_agg_replacement_value *v;
5542 int i, c = 0, d = 0, *adj;
5544 if (!node->clone.combined_args_to_skip)
5545 return;
5547 for (v = aggval; v; v = v->next)
5549 gcc_assert (v->index >= 0);
5550 if (c < v->index)
5551 c = v->index;
5553 c++;
5555 adj = XALLOCAVEC (int, c);
5556 for (i = 0; i < c; i++)
5557 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5559 adj[i] = -1;
5560 d++;
5562 else
5563 adj[i] = i - d;
5565 for (v = aggval; v; v = v->next)
5566 v->index = adj[v->index];
5569 /* Dominator walker driving the ipcp modification phase. */
5571 class ipcp_modif_dom_walker : public dom_walker
5573 public:
5574 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5575 vec<ipa_param_descriptor, va_gc> *descs,
5576 struct ipa_agg_replacement_value *av,
5577 bool *sc, bool *cc)
5578 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5579 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5581 virtual edge before_dom_children (basic_block);
5583 private:
5584 struct ipa_func_body_info *m_fbi;
5585 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5586 struct ipa_agg_replacement_value *m_aggval;
5587 bool *m_something_changed, *m_cfg_changed;
5590 edge
5591 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5593 gimple_stmt_iterator gsi;
5594 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5596 struct ipa_agg_replacement_value *v;
5597 gimple *stmt = gsi_stmt (gsi);
5598 tree rhs, val, t;
5599 HOST_WIDE_INT offset, size;
5600 int index;
5601 bool by_ref, vce;
5603 if (!gimple_assign_load_p (stmt))
5604 continue;
5605 rhs = gimple_assign_rhs1 (stmt);
5606 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5607 continue;
5609 vce = false;
5610 t = rhs;
5611 while (handled_component_p (t))
5613 /* V_C_E can do things like convert an array of integers to one
5614 bigger integer and similar things we do not handle below. */
5615 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5617 vce = true;
5618 break;
5620 t = TREE_OPERAND (t, 0);
5622 if (vce)
5623 continue;
5625 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5626 &offset, &size, &by_ref))
5627 continue;
5628 for (v = m_aggval; v; v = v->next)
5629 if (v->index == index
5630 && v->offset == offset)
5631 break;
5632 if (!v
5633 || v->by_ref != by_ref
5634 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5635 continue;
5637 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5638 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5640 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5641 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5642 else if (TYPE_SIZE (TREE_TYPE (rhs))
5643 == TYPE_SIZE (TREE_TYPE (v->value)))
5644 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5645 else
5647 if (dump_file)
5649 fprintf (dump_file, " const ");
5650 print_generic_expr (dump_file, v->value, 0);
5651 fprintf (dump_file, " can't be converted to type of ");
5652 print_generic_expr (dump_file, rhs, 0);
5653 fprintf (dump_file, "\n");
5655 continue;
5658 else
5659 val = v->value;
5661 if (dump_file && (dump_flags & TDF_DETAILS))
5663 fprintf (dump_file, "Modifying stmt:\n ");
5664 print_gimple_stmt (dump_file, stmt, 0, 0);
5666 gimple_assign_set_rhs_from_tree (&gsi, val);
5667 update_stmt (stmt);
5669 if (dump_file && (dump_flags & TDF_DETAILS))
5671 fprintf (dump_file, "into:\n ");
5672 print_gimple_stmt (dump_file, stmt, 0, 0);
5673 fprintf (dump_file, "\n");
5676 *m_something_changed = true;
5677 if (maybe_clean_eh_stmt (stmt)
5678 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5679 *m_cfg_changed = true;
5681 return NULL;
5684 /* Update bits info of formal parameters as described in
5685 ipcp_transformation_summary. */
5687 static void
5688 ipcp_update_bits (struct cgraph_node *node)
5690 tree parm = DECL_ARGUMENTS (node->decl);
5691 tree next_parm = parm;
5692 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5694 if (!ts || vec_safe_length (ts->bits) == 0)
5695 return;
5697 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5698 unsigned count = bits.length ();
5700 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5702 if (node->clone.combined_args_to_skip
5703 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5704 continue;
5706 gcc_checking_assert (parm);
5707 next_parm = DECL_CHAIN (parm);
5709 if (!bits[i]
5710 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5711 || POINTER_TYPE_P (TREE_TYPE (parm)))
5712 || !is_gimple_reg (parm))
5713 continue;
5715 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5716 if (!ddef)
5717 continue;
5719 if (dump_file)
5721 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5722 print_hex (bits[i]->mask, dump_file);
5723 fprintf (dump_file, "\n");
5726 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5728 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5729 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5731 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5732 | wide_int::from (bits[i]->value, prec, sgn);
5733 set_nonzero_bits (ddef, nonzero_bits);
5735 else
5737 unsigned tem = bits[i]->mask.to_uhwi ();
5738 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5739 unsigned align = tem & -tem;
5740 unsigned misalign = bitpos & (align - 1);
5742 if (align > 1)
5744 if (dump_file)
5745 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5747 unsigned old_align, old_misalign;
5748 struct ptr_info_def *pi = get_ptr_info (ddef);
5749 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5751 if (old_known
5752 && old_align > align)
5754 if (dump_file)
5756 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5757 if ((old_misalign & (align - 1)) != misalign)
5758 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5759 old_misalign, misalign);
5761 continue;
5764 if (old_known
5765 && ((misalign & (old_align - 1)) != old_misalign)
5766 && dump_file)
5767 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5768 old_misalign, misalign);
5770 set_ptr_info_alignment (pi, align, misalign);
5776 /* Update value range of formal parameters as described in
5777 ipcp_transformation_summary. */
5779 static void
5780 ipcp_update_vr (struct cgraph_node *node)
5782 tree fndecl = node->decl;
5783 tree parm = DECL_ARGUMENTS (fndecl);
5784 tree next_parm = parm;
5785 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5786 if (!ts || vec_safe_length (ts->m_vr) == 0)
5787 return;
5788 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5789 unsigned count = vr.length ();
5791 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5793 if (node->clone.combined_args_to_skip
5794 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5795 continue;
5796 gcc_checking_assert (parm);
5797 next_parm = DECL_CHAIN (parm);
5798 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5800 if (!ddef || !is_gimple_reg (parm))
5801 continue;
5803 if (vr[i].known
5804 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5806 tree type = TREE_TYPE (ddef);
5807 unsigned prec = TYPE_PRECISION (type);
5808 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5810 if (dump_file)
5812 fprintf (dump_file, "Setting value range of param %u ", i);
5813 fprintf (dump_file, "%s[",
5814 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5815 print_decs (vr[i].min, dump_file);
5816 fprintf (dump_file, ", ");
5817 print_decs (vr[i].max, dump_file);
5818 fprintf (dump_file, "]\n");
5820 set_range_info (ddef, vr[i].type,
5821 wide_int_storage::from (vr[i].min, prec,
5822 TYPE_SIGN (type)),
5823 wide_int_storage::from (vr[i].max, prec,
5824 TYPE_SIGN (type)));
5826 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5827 && vr[i].type == VR_ANTI_RANGE
5828 && wi::eq_p (vr[i].min, 0)
5829 && wi::eq_p (vr[i].max, 0))
5831 if (dump_file)
5832 fprintf (dump_file, "Setting nonnull for %u\n", i);
5833 set_ptr_nonnull (ddef);
5839 /* IPCP transformation phase doing propagation of aggregate values. */
5841 unsigned int
5842 ipcp_transform_function (struct cgraph_node *node)
5844 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5845 struct ipa_func_body_info fbi;
5846 struct ipa_agg_replacement_value *aggval;
5847 int param_count;
5848 bool cfg_changed = false, something_changed = false;
5850 gcc_checking_assert (cfun);
5851 gcc_checking_assert (current_function_decl);
5853 if (dump_file)
5854 fprintf (dump_file, "Modification phase of node %s/%i\n",
5855 node->name (), node->order);
5857 ipcp_update_bits (node);
5858 ipcp_update_vr (node);
5859 aggval = ipa_get_agg_replacements_for_node (node);
5860 if (!aggval)
5861 return 0;
5862 param_count = count_formal_params (node->decl);
5863 if (param_count == 0)
5864 return 0;
5865 adjust_agg_replacement_values (node, aggval);
5866 if (dump_file)
5867 ipa_dump_agg_replacement_values (dump_file, aggval);
5869 fbi.node = node;
5870 fbi.info = NULL;
5871 fbi.bb_infos = vNULL;
5872 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5873 fbi.param_count = param_count;
5874 fbi.aa_walked = 0;
5876 vec_safe_grow_cleared (descriptors, param_count);
5877 ipa_populate_param_decls (node, *descriptors);
5878 calculate_dominance_info (CDI_DOMINATORS);
5879 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5880 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5882 int i;
5883 struct ipa_bb_info *bi;
5884 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5885 free_ipa_bb_info (bi);
5886 fbi.bb_infos.release ();
5887 free_dominance_info (CDI_DOMINATORS);
5888 (*ipcp_transformations)[node->uid].agg_values = NULL;
5889 (*ipcp_transformations)[node->uid].bits = NULL;
5890 (*ipcp_transformations)[node->uid].m_vr = NULL;
5892 vec_free (descriptors);
5894 if (!something_changed)
5895 return 0;
5896 else if (cfg_changed)
5897 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5898 else
5899 return TODO_update_ssa_only_virtuals;
5902 #include "gt-ipa-prop.h"