Small ChangeLog tweak.
[official-gcc.git] / gcc / ipa-prop.c
blob81fbb520938eaf2357d01c1a7b41bffd95f9b7ee
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Edge summary for IPA-CP edge information. */
61 ipa_edge_args_sum_t *ipa_edge_args_sum;
63 /* Traits for a hash table for reusing already existing ipa_bits. */
65 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67 typedef ipa_bits *value_type;
68 typedef ipa_bits *compare_type;
69 static hashval_t
70 hash (const ipa_bits *p)
72 hashval_t t = (hashval_t) p->value.to_shwi ();
73 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 static bool
76 equal (const ipa_bits *a, const ipa_bits *b)
78 return a->value == b->value && a->mask == b->mask;
80 static void
81 mark_empty (ipa_bits *&p)
83 p = NULL;
85 static bool
86 is_empty (const ipa_bits *p)
88 return p == NULL;
90 static bool
91 is_deleted (const ipa_bits *p)
93 return p == reinterpret_cast<const ipa_bits *> (1);
95 static void
96 mark_deleted (ipa_bits *&p)
98 p = reinterpret_cast<ipa_bits *> (1);
102 /* Hash table for avoid repeated allocations of equal ipa_bits. */
103 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
105 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
106 the equiv bitmap is not hashed and is expected to be NULL. */
108 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
110 typedef value_range *value_type;
111 typedef value_range *compare_type;
112 static hashval_t
113 hash (const value_range *p)
115 gcc_checking_assert (!p->equiv);
116 hashval_t t = (hashval_t) p->type;
117 t = iterative_hash_expr (p->min, t);
118 return iterative_hash_expr (p->max, t);
120 static bool
121 equal (const value_range *a, const value_range *b)
123 return a->type == b->type && a->min == b->min && a->max == b->max;
125 static void
126 mark_empty (value_range *&p)
128 p = NULL;
130 static bool
131 is_empty (const value_range *p)
133 return p == NULL;
135 static bool
136 is_deleted (const value_range *p)
138 return p == reinterpret_cast<const value_range *> (1);
140 static void
141 mark_deleted (value_range *&p)
143 p = reinterpret_cast<value_range *> (1);
147 /* Hash table for avoid repeated allocations of equal value_ranges. */
148 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
150 /* Holders of ipa cgraph hooks: */
151 static struct cgraph_node_hook_list *function_insertion_hook_holder;
153 /* Description of a reference to an IPA constant. */
154 struct ipa_cst_ref_desc
156 /* Edge that corresponds to the statement which took the reference. */
157 struct cgraph_edge *cs;
158 /* Linked list of duplicates created when call graph edges are cloned. */
159 struct ipa_cst_ref_desc *next_duplicate;
160 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
161 if out of control. */
162 int refcount;
165 /* Allocation pool for reference descriptions. */
167 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
168 ("IPA-PROP ref descriptions");
170 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
171 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
173 static bool
174 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
176 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
178 if (!fs_opts)
179 return false;
180 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
183 /* Return index of the formal whose tree is PTREE in function which corresponds
184 to INFO. */
186 static int
187 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
188 tree ptree)
190 int i, count;
192 count = vec_safe_length (descriptors);
193 for (i = 0; i < count; i++)
194 if ((*descriptors)[i].decl_or_type == ptree)
195 return i;
197 return -1;
200 /* Return index of the formal whose tree is PTREE in function which corresponds
201 to INFO. */
204 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
206 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
209 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
210 NODE. */
212 static void
213 ipa_populate_param_decls (struct cgraph_node *node,
214 vec<ipa_param_descriptor, va_gc> &descriptors)
216 tree fndecl;
217 tree fnargs;
218 tree parm;
219 int param_num;
221 fndecl = node->decl;
222 gcc_assert (gimple_has_body_p (fndecl));
223 fnargs = DECL_ARGUMENTS (fndecl);
224 param_num = 0;
225 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
227 descriptors[param_num].decl_or_type = parm;
228 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
229 true);
230 param_num++;
234 /* Return how many formal parameters FNDECL has. */
237 count_formal_params (tree fndecl)
239 tree parm;
240 int count = 0;
241 gcc_assert (gimple_has_body_p (fndecl));
243 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
244 count++;
246 return count;
249 /* Return the declaration of Ith formal parameter of the function corresponding
250 to INFO. Note there is no setter function as this array is built just once
251 using ipa_initialize_node_params. */
253 void
254 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
256 fprintf (file, "param #%i", i);
257 if ((*info->descriptors)[i].decl_or_type)
259 fprintf (file, " ");
260 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
264 /* If necessary, allocate vector of parameter descriptors in info of NODE.
265 Return true if they were allocated, false if not. */
267 static bool
268 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
270 struct ipa_node_params *info = IPA_NODE_REF (node);
272 if (!info->descriptors && param_count)
274 vec_safe_grow_cleared (info->descriptors, param_count);
275 return true;
277 else
278 return false;
281 /* Initialize the ipa_node_params structure associated with NODE by counting
282 the function parameters, creating the descriptors and populating their
283 param_decls. */
285 void
286 ipa_initialize_node_params (struct cgraph_node *node)
288 struct ipa_node_params *info = IPA_NODE_REF (node);
290 if (!info->descriptors
291 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
292 ipa_populate_param_decls (node, *info->descriptors);
295 /* Print the jump functions associated with call graph edge CS to file F. */
297 static void
298 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
300 int i, count;
302 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
303 for (i = 0; i < count; i++)
305 struct ipa_jump_func *jump_func;
306 enum jump_func_type type;
308 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
309 type = jump_func->type;
311 fprintf (f, " param %d: ", i);
312 if (type == IPA_JF_UNKNOWN)
313 fprintf (f, "UNKNOWN\n");
314 else if (type == IPA_JF_CONST)
316 tree val = jump_func->value.constant.value;
317 fprintf (f, "CONST: ");
318 print_generic_expr (f, val);
319 if (TREE_CODE (val) == ADDR_EXPR
320 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
322 fprintf (f, " -> ");
323 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
325 fprintf (f, "\n");
327 else if (type == IPA_JF_PASS_THROUGH)
329 fprintf (f, "PASS THROUGH: ");
330 fprintf (f, "%d, op %s",
331 jump_func->value.pass_through.formal_id,
332 get_tree_code_name(jump_func->value.pass_through.operation));
333 if (jump_func->value.pass_through.operation != NOP_EXPR)
335 fprintf (f, " ");
336 print_generic_expr (f, jump_func->value.pass_through.operand);
338 if (jump_func->value.pass_through.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 fprintf (f, "\n");
342 else if (type == IPA_JF_ANCESTOR)
344 fprintf (f, "ANCESTOR: ");
345 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
346 jump_func->value.ancestor.formal_id,
347 jump_func->value.ancestor.offset);
348 if (jump_func->value.ancestor.agg_preserved)
349 fprintf (f, ", agg_preserved");
350 fprintf (f, "\n");
353 if (jump_func->agg.items)
355 struct ipa_agg_jf_item *item;
356 int j;
358 fprintf (f, " Aggregate passed by %s:\n",
359 jump_func->agg.by_ref ? "reference" : "value");
360 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
362 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
363 item->offset);
364 if (TYPE_P (item->value))
365 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
366 tree_to_uhwi (TYPE_SIZE (item->value)));
367 else
369 fprintf (f, "cst: ");
370 print_generic_expr (f, item->value);
372 fprintf (f, "\n");
376 struct ipa_polymorphic_call_context *ctx
377 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
378 if (ctx && !ctx->useless_p ())
380 fprintf (f, " Context: ");
381 ctx->dump (dump_file);
384 if (jump_func->bits)
386 fprintf (f, " value: ");
387 print_hex (jump_func->bits->value, f);
388 fprintf (f, ", mask: ");
389 print_hex (jump_func->bits->mask, f);
390 fprintf (f, "\n");
392 else
393 fprintf (f, " Unknown bits\n");
395 if (jump_func->m_vr)
397 fprintf (f, " VR ");
398 fprintf (f, "%s[",
399 (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
400 print_decs (jump_func->m_vr->min, f);
401 fprintf (f, ", ");
402 print_decs (jump_func->m_vr->max, f);
403 fprintf (f, "]\n");
405 else
406 fprintf (f, " Unknown VR\n");
411 /* Print the jump functions of all arguments on all call graph edges going from
412 NODE to file F. */
414 void
415 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
417 struct cgraph_edge *cs;
419 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
420 for (cs = node->callees; cs; cs = cs->next_callee)
422 if (!ipa_edge_args_info_available_for_edge_p (cs))
423 continue;
425 fprintf (f, " callsite %s -> %s : \n",
426 node->dump_name (),
427 cs->callee->dump_name ());
428 ipa_print_node_jump_functions_for_edge (f, cs);
431 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
433 struct cgraph_indirect_call_info *ii;
434 if (!ipa_edge_args_info_available_for_edge_p (cs))
435 continue;
437 ii = cs->indirect_info;
438 if (ii->agg_contents)
439 fprintf (f, " indirect %s callsite, calling param %i, "
440 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
441 ii->member_ptr ? "member ptr" : "aggregate",
442 ii->param_index, ii->offset,
443 ii->by_ref ? "by reference" : "by_value");
444 else
445 fprintf (f, " indirect %s callsite, calling param %i, "
446 "offset " HOST_WIDE_INT_PRINT_DEC,
447 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
448 ii->offset);
450 if (cs->call_stmt)
452 fprintf (f, ", for stmt ");
453 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
455 else
456 fprintf (f, "\n");
457 if (ii->polymorphic)
458 ii->context.dump (f);
459 ipa_print_node_jump_functions_for_edge (f, cs);
463 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
465 void
466 ipa_print_all_jump_functions (FILE *f)
468 struct cgraph_node *node;
470 fprintf (f, "\nJump functions:\n");
471 FOR_EACH_FUNCTION (node)
473 ipa_print_node_jump_functions (f, node);
477 /* Set jfunc to be a know-really nothing jump function. */
479 static void
480 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
482 jfunc->type = IPA_JF_UNKNOWN;
483 jfunc->bits = NULL;
484 jfunc->m_vr = NULL;
487 /* Set JFUNC to be a copy of another jmp (to be used by jump function
488 combination code). The two functions will share their rdesc. */
490 static void
491 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
492 struct ipa_jump_func *src)
495 gcc_checking_assert (src->type == IPA_JF_CONST);
496 dst->type = IPA_JF_CONST;
497 dst->value.constant = src->value.constant;
500 /* Set JFUNC to be a constant jmp function. */
502 static void
503 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
504 struct cgraph_edge *cs)
506 jfunc->type = IPA_JF_CONST;
507 jfunc->value.constant.value = unshare_expr_without_location (constant);
509 if (TREE_CODE (constant) == ADDR_EXPR
510 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
512 struct ipa_cst_ref_desc *rdesc;
514 rdesc = ipa_refdesc_pool.allocate ();
515 rdesc->cs = cs;
516 rdesc->next_duplicate = NULL;
517 rdesc->refcount = 1;
518 jfunc->value.constant.rdesc = rdesc;
520 else
521 jfunc->value.constant.rdesc = NULL;
524 /* Set JFUNC to be a simple pass-through jump function. */
525 static void
526 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
527 bool agg_preserved)
529 jfunc->type = IPA_JF_PASS_THROUGH;
530 jfunc->value.pass_through.operand = NULL_TREE;
531 jfunc->value.pass_through.formal_id = formal_id;
532 jfunc->value.pass_through.operation = NOP_EXPR;
533 jfunc->value.pass_through.agg_preserved = agg_preserved;
536 /* Set JFUNC to be an unary pass through jump function. */
538 static void
539 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
540 enum tree_code operation)
542 jfunc->type = IPA_JF_PASS_THROUGH;
543 jfunc->value.pass_through.operand = NULL_TREE;
544 jfunc->value.pass_through.formal_id = formal_id;
545 jfunc->value.pass_through.operation = operation;
546 jfunc->value.pass_through.agg_preserved = false;
548 /* Set JFUNC to be an arithmetic pass through jump function. */
550 static void
551 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
552 tree operand, enum tree_code operation)
554 jfunc->type = IPA_JF_PASS_THROUGH;
555 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
556 jfunc->value.pass_through.formal_id = formal_id;
557 jfunc->value.pass_through.operation = operation;
558 jfunc->value.pass_through.agg_preserved = false;
561 /* Set JFUNC to be an ancestor jump function. */
563 static void
564 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
565 int formal_id, bool agg_preserved)
567 jfunc->type = IPA_JF_ANCESTOR;
568 jfunc->value.ancestor.formal_id = formal_id;
569 jfunc->value.ancestor.offset = offset;
570 jfunc->value.ancestor.agg_preserved = agg_preserved;
573 /* Get IPA BB information about the given BB. FBI is the context of analyzis
574 of this function body. */
576 static struct ipa_bb_info *
577 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
579 gcc_checking_assert (fbi);
580 return &fbi->bb_infos[bb->index];
583 /* Structure to be passed in between detect_type_change and
584 check_stmt_for_type_change. */
586 struct prop_type_change_info
588 /* Offset into the object where there is the virtual method pointer we are
589 looking for. */
590 HOST_WIDE_INT offset;
591 /* The declaration or SSA_NAME pointer of the base that we are checking for
592 type change. */
593 tree object;
594 /* Set to true if dynamic type change has been detected. */
595 bool type_maybe_changed;
598 /* Return true if STMT can modify a virtual method table pointer.
600 This function makes special assumptions about both constructors and
601 destructors which are all the functions that are allowed to alter the VMT
602 pointers. It assumes that destructors begin with assignment into all VMT
603 pointers and that constructors essentially look in the following way:
605 1) The very first thing they do is that they call constructors of ancestor
606 sub-objects that have them.
608 2) Then VMT pointers of this and all its ancestors is set to new values
609 corresponding to the type corresponding to the constructor.
611 3) Only afterwards, other stuff such as constructor of member sub-objects
612 and the code written by the user is run. Only this may include calling
613 virtual functions, directly or indirectly.
615 There is no way to call a constructor of an ancestor sub-object in any
616 other way.
618 This means that we do not have to care whether constructors get the correct
619 type information because they will always change it (in fact, if we define
620 the type to be given by the VMT pointer, it is undefined).
622 The most important fact to derive from the above is that if, for some
623 statement in the section 3, we try to detect whether the dynamic type has
624 changed, we can safely ignore all calls as we examine the function body
625 backwards until we reach statements in section 2 because these calls cannot
626 be ancestor constructors or destructors (if the input is not bogus) and so
627 do not change the dynamic type (this holds true only for automatically
628 allocated objects but at the moment we devirtualize only these). We then
629 must detect that statements in section 2 change the dynamic type and can try
630 to derive the new type. That is enough and we can stop, we will never see
631 the calls into constructors of sub-objects in this code. Therefore we can
632 safely ignore all call statements that we traverse.
635 static bool
636 stmt_may_be_vtbl_ptr_store (gimple *stmt)
638 if (is_gimple_call (stmt))
639 return false;
640 if (gimple_clobber_p (stmt))
641 return false;
642 else if (is_gimple_assign (stmt))
644 tree lhs = gimple_assign_lhs (stmt);
646 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
648 if (flag_strict_aliasing
649 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
650 return false;
652 if (TREE_CODE (lhs) == COMPONENT_REF
653 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
654 return false;
655 /* In the future we might want to use get_base_ref_and_offset to find
656 if there is a field corresponding to the offset and if so, proceed
657 almost like if it was a component ref. */
660 return true;
663 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
664 to check whether a particular statement may modify the virtual table
665 pointerIt stores its result into DATA, which points to a
666 prop_type_change_info structure. */
668 static bool
669 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
671 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
672 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
674 if (stmt_may_be_vtbl_ptr_store (stmt))
676 tci->type_maybe_changed = true;
677 return true;
679 else
680 return false;
683 /* See if ARG is PARAM_DECl describing instance passed by pointer
684 or reference in FUNCTION. Return false if the dynamic type may change
685 in between beggining of the function until CALL is invoked.
687 Generally functions are not allowed to change type of such instances,
688 but they call destructors. We assume that methods can not destroy the THIS
689 pointer. Also as a special cases, constructor and destructors may change
690 type of the THIS pointer. */
692 static bool
693 param_type_may_change_p (tree function, tree arg, gimple *call)
695 /* Pure functions can not do any changes on the dynamic type;
696 that require writting to memory. */
697 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
698 return false;
699 /* We need to check if we are within inlined consturctor
700 or destructor (ideally we would have way to check that the
701 inline cdtor is actually working on ARG, but we don't have
702 easy tie on this, so punt on all non-pure cdtors.
703 We may also record the types of cdtors and once we know type
704 of the instance match them.
706 Also code unification optimizations may merge calls from
707 different blocks making return values unreliable. So
708 do nothing during late optimization. */
709 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
710 return true;
711 if (TREE_CODE (arg) == SSA_NAME
712 && SSA_NAME_IS_DEFAULT_DEF (arg)
713 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
715 /* Normal (non-THIS) argument. */
716 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
717 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
718 /* THIS pointer of an method - here we want to watch constructors
719 and destructors as those definitely may change the dynamic
720 type. */
721 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
722 && !DECL_CXX_CONSTRUCTOR_P (function)
723 && !DECL_CXX_DESTRUCTOR_P (function)
724 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
726 /* Walk the inline stack and watch out for ctors/dtors. */
727 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
728 block = BLOCK_SUPERCONTEXT (block))
729 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
730 return true;
731 return false;
734 return true;
737 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
738 callsite CALL) by looking for assignments to its virtual table pointer. If
739 it is, return true and fill in the jump function JFUNC with relevant type
740 information or set it to unknown. ARG is the object itself (not a pointer
741 to it, unless dereferenced). BASE is the base of the memory access as
742 returned by get_ref_base_and_extent, as is the offset.
744 This is helper function for detect_type_change and detect_type_change_ssa
745 that does the heavy work which is usually unnecesary. */
747 static bool
748 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
749 gcall *call, struct ipa_jump_func *jfunc,
750 HOST_WIDE_INT offset)
752 struct prop_type_change_info tci;
753 ao_ref ao;
754 bool entry_reached = false;
756 gcc_checking_assert (DECL_P (arg)
757 || TREE_CODE (arg) == MEM_REF
758 || handled_component_p (arg));
760 comp_type = TYPE_MAIN_VARIANT (comp_type);
762 /* Const calls cannot call virtual methods through VMT and so type changes do
763 not matter. */
764 if (!flag_devirtualize || !gimple_vuse (call)
765 /* Be sure expected_type is polymorphic. */
766 || !comp_type
767 || TREE_CODE (comp_type) != RECORD_TYPE
768 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
769 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
770 return true;
772 ao_ref_init (&ao, arg);
773 ao.base = base;
774 ao.offset = offset;
775 ao.size = POINTER_SIZE;
776 ao.max_size = ao.size;
778 tci.offset = offset;
779 tci.object = get_base_address (arg);
780 tci.type_maybe_changed = false;
782 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
783 &tci, NULL, &entry_reached);
784 if (!tci.type_maybe_changed)
785 return false;
787 ipa_set_jf_unknown (jfunc);
788 return true;
791 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
792 If it is, return true and fill in the jump function JFUNC with relevant type
793 information or set it to unknown. ARG is the object itself (not a pointer
794 to it, unless dereferenced). BASE is the base of the memory access as
795 returned by get_ref_base_and_extent, as is the offset. */
797 static bool
798 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
799 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
801 if (!flag_devirtualize)
802 return false;
804 if (TREE_CODE (base) == MEM_REF
805 && !param_type_may_change_p (current_function_decl,
806 TREE_OPERAND (base, 0),
807 call))
808 return false;
809 return detect_type_change_from_memory_writes (arg, base, comp_type,
810 call, jfunc, offset);
813 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
814 SSA name (its dereference will become the base and the offset is assumed to
815 be zero). */
817 static bool
818 detect_type_change_ssa (tree arg, tree comp_type,
819 gcall *call, struct ipa_jump_func *jfunc)
821 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
822 if (!flag_devirtualize
823 || !POINTER_TYPE_P (TREE_TYPE (arg)))
824 return false;
826 if (!param_type_may_change_p (current_function_decl, arg, call))
827 return false;
829 arg = build2 (MEM_REF, ptr_type_node, arg,
830 build_int_cst (ptr_type_node, 0));
832 return detect_type_change_from_memory_writes (arg, arg, comp_type,
833 call, jfunc, 0);
836 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
837 boolean variable pointed to by DATA. */
839 static bool
840 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
841 void *data)
843 bool *b = (bool *) data;
844 *b = true;
845 return true;
848 /* Return true if we have already walked so many statements in AA that we
849 should really just start giving up. */
851 static bool
852 aa_overwalked (struct ipa_func_body_info *fbi)
854 gcc_checking_assert (fbi);
855 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
858 /* Find the nearest valid aa status for parameter specified by INDEX that
859 dominates BB. */
861 static struct ipa_param_aa_status *
862 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
863 int index)
865 while (true)
867 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
868 if (!bb)
869 return NULL;
870 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
871 if (!bi->param_aa_statuses.is_empty ()
872 && bi->param_aa_statuses[index].valid)
873 return &bi->param_aa_statuses[index];
877 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
878 structures and/or intialize the result with a dominating description as
879 necessary. */
881 static struct ipa_param_aa_status *
882 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
883 int index)
885 gcc_checking_assert (fbi);
886 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
887 if (bi->param_aa_statuses.is_empty ())
888 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
889 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
890 if (!paa->valid)
892 gcc_checking_assert (!paa->parm_modified
893 && !paa->ref_modified
894 && !paa->pt_modified);
895 struct ipa_param_aa_status *dom_paa;
896 dom_paa = find_dominating_aa_status (fbi, bb, index);
897 if (dom_paa)
898 *paa = *dom_paa;
899 else
900 paa->valid = true;
903 return paa;
906 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
907 a value known not to be modified in this function before reaching the
908 statement STMT. FBI holds information about the function we have so far
909 gathered but do not survive the summary building stage. */
911 static bool
912 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
913 gimple *stmt, tree parm_load)
915 struct ipa_param_aa_status *paa;
916 bool modified = false;
917 ao_ref refd;
919 tree base = get_base_address (parm_load);
920 gcc_assert (TREE_CODE (base) == PARM_DECL);
921 if (TREE_READONLY (base))
922 return true;
924 /* FIXME: FBI can be NULL if we are being called from outside
925 ipa_node_analysis or ipcp_transform_function, which currently happens
926 during inlining analysis. It would be great to extend fbi's lifetime and
927 always have it. Currently, we are just not afraid of too much walking in
928 that case. */
929 if (fbi)
931 if (aa_overwalked (fbi))
932 return false;
933 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
934 if (paa->parm_modified)
935 return false;
937 else
938 paa = NULL;
940 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
941 ao_ref_init (&refd, parm_load);
942 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
943 &modified, NULL);
944 if (fbi)
945 fbi->aa_walked += walked;
946 if (paa && modified)
947 paa->parm_modified = true;
948 return !modified;
951 /* If STMT is an assignment that loads a value from an parameter declaration,
952 return the index of the parameter in ipa_node_params which has not been
953 modified. Otherwise return -1. */
955 static int
956 load_from_unmodified_param (struct ipa_func_body_info *fbi,
957 vec<ipa_param_descriptor, va_gc> *descriptors,
958 gimple *stmt)
960 int index;
961 tree op1;
963 if (!gimple_assign_single_p (stmt))
964 return -1;
966 op1 = gimple_assign_rhs1 (stmt);
967 if (TREE_CODE (op1) != PARM_DECL)
968 return -1;
970 index = ipa_get_param_decl_index_1 (descriptors, op1);
971 if (index < 0
972 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
973 return -1;
975 return index;
978 /* Return true if memory reference REF (which must be a load through parameter
979 with INDEX) loads data that are known to be unmodified in this function
980 before reaching statement STMT. */
982 static bool
983 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
984 int index, gimple *stmt, tree ref)
986 struct ipa_param_aa_status *paa;
987 bool modified = false;
988 ao_ref refd;
990 /* FIXME: FBI can be NULL if we are being called from outside
991 ipa_node_analysis or ipcp_transform_function, which currently happens
992 during inlining analysis. It would be great to extend fbi's lifetime and
993 always have it. Currently, we are just not afraid of too much walking in
994 that case. */
995 if (fbi)
997 if (aa_overwalked (fbi))
998 return false;
999 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1000 if (paa->ref_modified)
1001 return false;
1003 else
1004 paa = NULL;
1006 gcc_checking_assert (gimple_vuse (stmt));
1007 ao_ref_init (&refd, ref);
1008 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1009 &modified, NULL);
1010 if (fbi)
1011 fbi->aa_walked += walked;
1012 if (paa && modified)
1013 paa->ref_modified = true;
1014 return !modified;
1017 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1018 is known to be unmodified in this function before reaching call statement
1019 CALL into which it is passed. FBI describes the function body. */
1021 static bool
1022 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1023 gimple *call, tree parm)
1025 bool modified = false;
1026 ao_ref refd;
1028 /* It's unnecessary to calculate anything about memory contnets for a const
1029 function because it is not goin to use it. But do not cache the result
1030 either. Also, no such calculations for non-pointers. */
1031 if (!gimple_vuse (call)
1032 || !POINTER_TYPE_P (TREE_TYPE (parm))
1033 || aa_overwalked (fbi))
1034 return false;
1036 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1037 gimple_bb (call),
1038 index);
1039 if (paa->pt_modified)
1040 return false;
1042 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1043 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1044 &modified, NULL);
1045 fbi->aa_walked += walked;
1046 if (modified)
1047 paa->pt_modified = true;
1048 return !modified;
1051 /* Return true if we can prove that OP is a memory reference loading
1052 data from an aggregate passed as a parameter.
1054 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1055 false if it cannot prove that the value has not been modified before the
1056 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1057 if it cannot prove the value has not been modified, in that case it will
1058 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1060 INFO and PARMS_AINFO describe parameters of the current function (but the
1061 latter can be NULL), STMT is the load statement. If function returns true,
1062 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1063 within the aggregate and whether it is a load from a value passed by
1064 reference respectively. */
1066 bool
1067 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1068 vec<ipa_param_descriptor, va_gc> *descriptors,
1069 gimple *stmt, tree op, int *index_p,
1070 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1071 bool *by_ref_p, bool *guaranteed_unmodified)
1073 int index;
1074 HOST_WIDE_INT size, max_size;
1075 bool reverse;
1076 tree base
1077 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
1079 if (max_size == -1 || max_size != size || *offset_p < 0)
1080 return false;
1082 if (DECL_P (base))
1084 int index = ipa_get_param_decl_index_1 (descriptors, base);
1085 if (index >= 0
1086 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1088 *index_p = index;
1089 *by_ref_p = false;
1090 if (size_p)
1091 *size_p = size;
1092 if (guaranteed_unmodified)
1093 *guaranteed_unmodified = true;
1094 return true;
1096 return false;
1099 if (TREE_CODE (base) != MEM_REF
1100 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1101 || !integer_zerop (TREE_OPERAND (base, 1)))
1102 return false;
1104 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1106 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1107 index = ipa_get_param_decl_index_1 (descriptors, parm);
1109 else
1111 /* This branch catches situations where a pointer parameter is not a
1112 gimple register, for example:
1114 void hip7(S*) (struct S * p)
1116 void (*<T2e4>) (struct S *) D.1867;
1117 struct S * p.1;
1119 <bb 2>:
1120 p.1_1 = p;
1121 D.1867_2 = p.1_1->f;
1122 D.1867_2 ();
1123 gdp = &p;
1126 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1127 index = load_from_unmodified_param (fbi, descriptors, def);
1130 if (index >= 0)
1132 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1133 if (!data_preserved && !guaranteed_unmodified)
1134 return false;
1136 *index_p = index;
1137 *by_ref_p = true;
1138 if (size_p)
1139 *size_p = size;
1140 if (guaranteed_unmodified)
1141 *guaranteed_unmodified = data_preserved;
1142 return true;
1144 return false;
1147 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1148 of an assignment statement STMT, try to determine whether we are actually
1149 handling any of the following cases and construct an appropriate jump
1150 function into JFUNC if so:
1152 1) The passed value is loaded from a formal parameter which is not a gimple
1153 register (most probably because it is addressable, the value has to be
1154 scalar) and we can guarantee the value has not changed. This case can
1155 therefore be described by a simple pass-through jump function. For example:
1157 foo (int a)
1159 int a.0;
1161 a.0_2 = a;
1162 bar (a.0_2);
1164 2) The passed value can be described by a simple arithmetic pass-through
1165 jump function. E.g.
1167 foo (int a)
1169 int D.2064;
1171 D.2064_4 = a.1(D) + 4;
1172 bar (D.2064_4);
1174 This case can also occur in combination of the previous one, e.g.:
1176 foo (int a, int z)
1178 int a.0;
1179 int D.2064;
1181 a.0_3 = a;
1182 D.2064_4 = a.0_3 + 4;
1183 foo (D.2064_4);
1185 3) The passed value is an address of an object within another one (which
1186 also passed by reference). Such situations are described by an ancestor
1187 jump function and describe situations such as:
1189 B::foo() (struct B * const this)
1191 struct A * D.1845;
1193 D.1845_2 = &this_1(D)->D.1748;
1194 A::bar (D.1845_2);
1196 INFO is the structure describing individual parameters access different
1197 stages of IPA optimizations. PARMS_AINFO contains the information that is
1198 only needed for intraprocedural analysis. */
1200 static void
1201 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1202 struct ipa_node_params *info,
1203 struct ipa_jump_func *jfunc,
1204 gcall *call, gimple *stmt, tree name,
1205 tree param_type)
1207 HOST_WIDE_INT offset, size, max_size;
1208 tree op1, tc_ssa, base, ssa;
1209 bool reverse;
1210 int index;
1212 op1 = gimple_assign_rhs1 (stmt);
1214 if (TREE_CODE (op1) == SSA_NAME)
1216 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1217 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1218 else
1219 index = load_from_unmodified_param (fbi, info->descriptors,
1220 SSA_NAME_DEF_STMT (op1));
1221 tc_ssa = op1;
1223 else
1225 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1226 tc_ssa = gimple_assign_lhs (stmt);
1229 if (index >= 0)
1231 switch (gimple_assign_rhs_class (stmt))
1233 case GIMPLE_BINARY_RHS:
1235 tree op2 = gimple_assign_rhs2 (stmt);
1236 if (!is_gimple_ip_invariant (op2)
1237 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1238 != tcc_comparison)
1239 && !useless_type_conversion_p (TREE_TYPE (name),
1240 TREE_TYPE (op1))))
1241 return;
1243 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1244 gimple_assign_rhs_code (stmt));
1245 break;
1247 case GIMPLE_SINGLE_RHS:
1249 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1250 tc_ssa);
1251 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1252 break;
1254 case GIMPLE_UNARY_RHS:
1255 if (is_gimple_assign (stmt)
1256 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1257 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1258 ipa_set_jf_unary_pass_through (jfunc, index,
1259 gimple_assign_rhs_code (stmt));
1260 default:;
1262 return;
1265 if (TREE_CODE (op1) != ADDR_EXPR)
1266 return;
1267 op1 = TREE_OPERAND (op1, 0);
1268 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1269 return;
1270 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1271 if (TREE_CODE (base) != MEM_REF
1272 /* If this is a varying address, punt. */
1273 || max_size == -1
1274 || max_size != size)
1275 return;
1276 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1277 ssa = TREE_OPERAND (base, 0);
1278 if (TREE_CODE (ssa) != SSA_NAME
1279 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1280 || offset < 0)
1281 return;
1283 /* Dynamic types are changed in constructors and destructors. */
1284 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1285 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1286 ipa_set_ancestor_jf (jfunc, offset, index,
1287 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1290 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1291 it looks like:
1293 iftmp.1_3 = &obj_2(D)->D.1762;
1295 The base of the MEM_REF must be a default definition SSA NAME of a
1296 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1297 whole MEM_REF expression is returned and the offset calculated from any
1298 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1299 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1301 static tree
1302 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1304 HOST_WIDE_INT size, max_size;
1305 tree expr, parm, obj;
1306 bool reverse;
1308 if (!gimple_assign_single_p (assign))
1309 return NULL_TREE;
1310 expr = gimple_assign_rhs1 (assign);
1312 if (TREE_CODE (expr) != ADDR_EXPR)
1313 return NULL_TREE;
1314 expr = TREE_OPERAND (expr, 0);
1315 obj = expr;
1316 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1318 if (TREE_CODE (expr) != MEM_REF
1319 /* If this is a varying address, punt. */
1320 || max_size == -1
1321 || max_size != size
1322 || *offset < 0)
1323 return NULL_TREE;
1324 parm = TREE_OPERAND (expr, 0);
1325 if (TREE_CODE (parm) != SSA_NAME
1326 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1327 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1328 return NULL_TREE;
1330 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1331 *obj_p = obj;
1332 return expr;
1336 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1337 statement PHI, try to find out whether NAME is in fact a
1338 multiple-inheritance typecast from a descendant into an ancestor of a formal
1339 parameter and thus can be described by an ancestor jump function and if so,
1340 write the appropriate function into JFUNC.
1342 Essentially we want to match the following pattern:
1344 if (obj_2(D) != 0B)
1345 goto <bb 3>;
1346 else
1347 goto <bb 4>;
1349 <bb 3>:
1350 iftmp.1_3 = &obj_2(D)->D.1762;
1352 <bb 4>:
1353 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1354 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1355 return D.1879_6; */
1357 static void
1358 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1359 struct ipa_node_params *info,
1360 struct ipa_jump_func *jfunc,
1361 gcall *call, gphi *phi)
1363 HOST_WIDE_INT offset;
1364 gimple *assign, *cond;
1365 basic_block phi_bb, assign_bb, cond_bb;
1366 tree tmp, parm, expr, obj;
1367 int index, i;
1369 if (gimple_phi_num_args (phi) != 2)
1370 return;
1372 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1373 tmp = PHI_ARG_DEF (phi, 0);
1374 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1375 tmp = PHI_ARG_DEF (phi, 1);
1376 else
1377 return;
1378 if (TREE_CODE (tmp) != SSA_NAME
1379 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1380 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1381 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1382 return;
1384 assign = SSA_NAME_DEF_STMT (tmp);
1385 assign_bb = gimple_bb (assign);
1386 if (!single_pred_p (assign_bb))
1387 return;
1388 expr = get_ancestor_addr_info (assign, &obj, &offset);
1389 if (!expr)
1390 return;
1391 parm = TREE_OPERAND (expr, 0);
1392 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1393 if (index < 0)
1394 return;
1396 cond_bb = single_pred (assign_bb);
1397 cond = last_stmt (cond_bb);
1398 if (!cond
1399 || gimple_code (cond) != GIMPLE_COND
1400 || gimple_cond_code (cond) != NE_EXPR
1401 || gimple_cond_lhs (cond) != parm
1402 || !integer_zerop (gimple_cond_rhs (cond)))
1403 return;
1405 phi_bb = gimple_bb (phi);
1406 for (i = 0; i < 2; i++)
1408 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1409 if (pred != assign_bb && pred != cond_bb)
1410 return;
1413 ipa_set_ancestor_jf (jfunc, offset, index,
1414 parm_ref_data_pass_through_p (fbi, index, call, parm));
1417 /* Inspect the given TYPE and return true iff it has the same structure (the
1418 same number of fields of the same types) as a C++ member pointer. If
1419 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1420 corresponding fields there. */
1422 static bool
1423 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1425 tree fld;
1427 if (TREE_CODE (type) != RECORD_TYPE)
1428 return false;
1430 fld = TYPE_FIELDS (type);
1431 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1432 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1433 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1434 return false;
1436 if (method_ptr)
1437 *method_ptr = fld;
1439 fld = DECL_CHAIN (fld);
1440 if (!fld || INTEGRAL_TYPE_P (fld)
1441 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1442 return false;
1443 if (delta)
1444 *delta = fld;
1446 if (DECL_CHAIN (fld))
1447 return false;
1449 return true;
1452 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1453 return the rhs of its defining statement. Otherwise return RHS as it
1454 is. */
1456 static inline tree
1457 get_ssa_def_if_simple_copy (tree rhs)
1459 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1461 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1463 if (gimple_assign_single_p (def_stmt))
1464 rhs = gimple_assign_rhs1 (def_stmt);
1465 else
1466 break;
1468 return rhs;
1471 /* Simple linked list, describing known contents of an aggregate beforere
1472 call. */
1474 struct ipa_known_agg_contents_list
1476 /* Offset and size of the described part of the aggregate. */
1477 HOST_WIDE_INT offset, size;
1478 /* Known constant value or NULL if the contents is known to be unknown. */
1479 tree constant;
1480 /* Pointer to the next structure in the list. */
1481 struct ipa_known_agg_contents_list *next;
1484 /* Find the proper place in linked list of ipa_known_agg_contents_list
1485 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1486 unless there is a partial overlap, in which case return NULL, or such
1487 element is already there, in which case set *ALREADY_THERE to true. */
1489 static struct ipa_known_agg_contents_list **
1490 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1491 HOST_WIDE_INT lhs_offset,
1492 HOST_WIDE_INT lhs_size,
1493 bool *already_there)
1495 struct ipa_known_agg_contents_list **p = list;
1496 while (*p && (*p)->offset < lhs_offset)
1498 if ((*p)->offset + (*p)->size > lhs_offset)
1499 return NULL;
1500 p = &(*p)->next;
1503 if (*p && (*p)->offset < lhs_offset + lhs_size)
1505 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1506 /* We already know this value is subsequently overwritten with
1507 something else. */
1508 *already_there = true;
1509 else
1510 /* Otherwise this is a partial overlap which we cannot
1511 represent. */
1512 return NULL;
1514 return p;
1517 /* Build aggregate jump function from LIST, assuming there are exactly
1518 CONST_COUNT constant entries there and that th offset of the passed argument
1519 is ARG_OFFSET and store it into JFUNC. */
1521 static void
1522 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1523 int const_count, HOST_WIDE_INT arg_offset,
1524 struct ipa_jump_func *jfunc)
1526 vec_alloc (jfunc->agg.items, const_count);
1527 while (list)
1529 if (list->constant)
1531 struct ipa_agg_jf_item item;
1532 item.offset = list->offset - arg_offset;
1533 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1534 item.value = unshare_expr_without_location (list->constant);
1535 jfunc->agg.items->quick_push (item);
1537 list = list->next;
1541 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1542 in ARG is filled in with constant values. ARG can either be an aggregate
1543 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1544 aggregate. JFUNC is the jump function into which the constants are
1545 subsequently stored. */
1547 static void
1548 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1549 tree arg_type,
1550 struct ipa_jump_func *jfunc)
1552 struct ipa_known_agg_contents_list *list = NULL;
1553 int item_count = 0, const_count = 0;
1554 HOST_WIDE_INT arg_offset, arg_size;
1555 gimple_stmt_iterator gsi;
1556 tree arg_base;
1557 bool check_ref, by_ref;
1558 ao_ref r;
1560 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1561 return;
1563 /* The function operates in three stages. First, we prepare check_ref, r,
1564 arg_base and arg_offset based on what is actually passed as an actual
1565 argument. */
1567 if (POINTER_TYPE_P (arg_type))
1569 by_ref = true;
1570 if (TREE_CODE (arg) == SSA_NAME)
1572 tree type_size;
1573 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1574 return;
1575 check_ref = true;
1576 arg_base = arg;
1577 arg_offset = 0;
1578 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1579 arg_size = tree_to_uhwi (type_size);
1580 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1582 else if (TREE_CODE (arg) == ADDR_EXPR)
1584 HOST_WIDE_INT arg_max_size;
1585 bool reverse;
1587 arg = TREE_OPERAND (arg, 0);
1588 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1589 &arg_max_size, &reverse);
1590 if (arg_max_size == -1
1591 || arg_max_size != arg_size
1592 || arg_offset < 0)
1593 return;
1594 if (DECL_P (arg_base))
1596 check_ref = false;
1597 ao_ref_init (&r, arg_base);
1599 else
1600 return;
1602 else
1603 return;
1605 else
1607 HOST_WIDE_INT arg_max_size;
1608 bool reverse;
1610 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1612 by_ref = false;
1613 check_ref = false;
1614 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1615 &arg_max_size, &reverse);
1616 if (arg_max_size == -1
1617 || arg_max_size != arg_size
1618 || arg_offset < 0)
1619 return;
1621 ao_ref_init (&r, arg);
1624 /* Second stage walks back the BB, looks at individual statements and as long
1625 as it is confident of how the statements affect contents of the
1626 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1627 describing it. */
1628 gsi = gsi_for_stmt (call);
1629 gsi_prev (&gsi);
1630 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1632 struct ipa_known_agg_contents_list *n, **p;
1633 gimple *stmt = gsi_stmt (gsi);
1634 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1635 tree lhs, rhs, lhs_base;
1636 bool reverse;
1638 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1639 continue;
1640 if (!gimple_assign_single_p (stmt))
1641 break;
1643 lhs = gimple_assign_lhs (stmt);
1644 rhs = gimple_assign_rhs1 (stmt);
1645 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1646 || TREE_CODE (lhs) == BIT_FIELD_REF
1647 || contains_bitfld_component_ref_p (lhs))
1648 break;
1650 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1651 &lhs_max_size, &reverse);
1652 if (lhs_max_size == -1
1653 || lhs_max_size != lhs_size)
1654 break;
1656 if (check_ref)
1658 if (TREE_CODE (lhs_base) != MEM_REF
1659 || TREE_OPERAND (lhs_base, 0) != arg_base
1660 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1661 break;
1663 else if (lhs_base != arg_base)
1665 if (DECL_P (lhs_base))
1666 continue;
1667 else
1668 break;
1671 bool already_there = false;
1672 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1673 &already_there);
1674 if (!p)
1675 break;
1676 if (already_there)
1677 continue;
1679 rhs = get_ssa_def_if_simple_copy (rhs);
1680 n = XALLOCA (struct ipa_known_agg_contents_list);
1681 n->size = lhs_size;
1682 n->offset = lhs_offset;
1683 if (is_gimple_ip_invariant (rhs))
1685 n->constant = rhs;
1686 const_count++;
1688 else
1689 n->constant = NULL_TREE;
1690 n->next = *p;
1691 *p = n;
1693 item_count++;
1694 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1695 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1696 break;
1699 /* Third stage just goes over the list and creates an appropriate vector of
1700 ipa_agg_jf_item structures out of it, of sourse only if there are
1701 any known constants to begin with. */
1703 if (const_count)
1705 jfunc->agg.by_ref = by_ref;
1706 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1710 /* Return the Ith param type of callee associated with call graph
1711 edge E. */
1713 tree
1714 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1716 int n;
1717 tree type = (e->callee
1718 ? TREE_TYPE (e->callee->decl)
1719 : gimple_call_fntype (e->call_stmt));
1720 tree t = TYPE_ARG_TYPES (type);
1722 for (n = 0; n < i; n++)
1724 if (!t)
1725 break;
1726 t = TREE_CHAIN (t);
1728 if (t)
1729 return TREE_VALUE (t);
1730 if (!e->callee)
1731 return NULL;
1732 t = DECL_ARGUMENTS (e->callee->decl);
1733 for (n = 0; n < i; n++)
1735 if (!t)
1736 return NULL;
1737 t = TREE_CHAIN (t);
1739 if (t)
1740 return TREE_TYPE (t);
1741 return NULL;
1744 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1745 allocated structure or a previously existing one shared with other jump
1746 functions and/or transformation summaries. */
1748 ipa_bits *
1749 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1751 ipa_bits tmp;
1752 tmp.value = value;
1753 tmp.mask = mask;
1755 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1756 if (*slot)
1757 return *slot;
1759 ipa_bits *res = ggc_alloc<ipa_bits> ();
1760 res->value = value;
1761 res->mask = mask;
1762 *slot = res;
1764 return res;
1767 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1768 table in order to avoid creating multiple same ipa_bits structures. */
1770 static void
1771 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1772 const widest_int &mask)
1774 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1777 /* Return a pointer to a value_range just like *TMP, but either find it in
1778 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1780 static value_range *
1781 ipa_get_value_range (value_range *tmp)
1783 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1784 if (*slot)
1785 return *slot;
1787 value_range *vr = ggc_alloc<value_range> ();
1788 *vr = *tmp;
1789 *slot = vr;
1791 return vr;
1794 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1795 equiv set. Use hash table in order to avoid creating multiple same copies of
1796 value_ranges. */
1798 static value_range *
1799 ipa_get_value_range (enum value_range_type type, tree min, tree max)
1801 value_range tmp;
1802 tmp.type = type;
1803 tmp.min = min;
1804 tmp.max = max;
1805 tmp.equiv = NULL;
1806 return ipa_get_value_range (&tmp);
1809 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1810 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1811 same value_range structures. */
1813 static void
1814 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1815 tree min, tree max)
1817 jf->m_vr = ipa_get_value_range (type, min, max);
1820 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1821 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1823 static void
1824 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1826 jf->m_vr = ipa_get_value_range (tmp);
1829 /* Compute jump function for all arguments of callsite CS and insert the
1830 information in the jump_functions array in the ipa_edge_args corresponding
1831 to this callsite. */
1833 static void
1834 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1835 struct cgraph_edge *cs)
1837 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1838 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1839 gcall *call = cs->call_stmt;
1840 int n, arg_num = gimple_call_num_args (call);
1841 bool useful_context = false;
1843 if (arg_num == 0 || args->jump_functions)
1844 return;
1845 vec_safe_grow_cleared (args->jump_functions, arg_num);
1846 if (flag_devirtualize)
1847 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1849 if (gimple_call_internal_p (call))
1850 return;
1851 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1852 return;
1854 for (n = 0; n < arg_num; n++)
1856 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1857 tree arg = gimple_call_arg (call, n);
1858 tree param_type = ipa_get_callee_param_type (cs, n);
1859 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1861 tree instance;
1862 struct ipa_polymorphic_call_context context (cs->caller->decl,
1863 arg, cs->call_stmt,
1864 &instance);
1865 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1866 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1867 if (!context.useless_p ())
1868 useful_context = true;
1871 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1873 bool addr_nonzero = false;
1874 bool strict_overflow = false;
1876 if (TREE_CODE (arg) == SSA_NAME
1877 && param_type
1878 && get_ptr_nonnull (arg))
1879 addr_nonzero = true;
1880 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1881 addr_nonzero = true;
1883 if (addr_nonzero)
1885 tree z = build_int_cst (TREE_TYPE (arg), 0);
1886 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1888 else
1889 gcc_assert (!jfunc->m_vr);
1891 else
1893 wide_int min, max;
1894 value_range_type type;
1895 if (TREE_CODE (arg) == SSA_NAME
1896 && param_type
1897 && (type = get_range_info (arg, &min, &max))
1898 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1900 value_range tmpvr,resvr;
1902 tmpvr.type = type;
1903 tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1904 tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1905 tmpvr.equiv = NULL;
1906 memset (&resvr, 0, sizeof (resvr));
1907 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1908 &tmpvr, TREE_TYPE (arg));
1909 if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1910 ipa_set_jfunc_vr (jfunc, &resvr);
1911 else
1912 gcc_assert (!jfunc->m_vr);
1914 else
1915 gcc_assert (!jfunc->m_vr);
1918 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1919 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1921 if (TREE_CODE (arg) == SSA_NAME)
1922 ipa_set_jfunc_bits (jfunc, 0,
1923 widest_int::from (get_nonzero_bits (arg),
1924 TYPE_SIGN (TREE_TYPE (arg))));
1925 else
1926 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1928 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1930 unsigned HOST_WIDE_INT bitpos;
1931 unsigned align;
1933 get_pointer_alignment_1 (arg, &align, &bitpos);
1934 widest_int mask
1935 = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1936 .and_not (align / BITS_PER_UNIT - 1);
1937 widest_int value = bitpos / BITS_PER_UNIT;
1938 ipa_set_jfunc_bits (jfunc, value, mask);
1940 else
1941 gcc_assert (!jfunc->bits);
1943 if (is_gimple_ip_invariant (arg)
1944 || (VAR_P (arg)
1945 && is_global_var (arg)
1946 && TREE_READONLY (arg)))
1947 ipa_set_jf_constant (jfunc, arg, cs);
1948 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1949 && TREE_CODE (arg) == PARM_DECL)
1951 int index = ipa_get_param_decl_index (info, arg);
1953 gcc_assert (index >=0);
1954 /* Aggregate passed by value, check for pass-through, otherwise we
1955 will attempt to fill in aggregate contents later in this
1956 for cycle. */
1957 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1959 ipa_set_jf_simple_pass_through (jfunc, index, false);
1960 continue;
1963 else if (TREE_CODE (arg) == SSA_NAME)
1965 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1967 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1968 if (index >= 0)
1970 bool agg_p;
1971 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1972 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1975 else
1977 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1978 if (is_gimple_assign (stmt))
1979 compute_complex_assign_jump_func (fbi, info, jfunc,
1980 call, stmt, arg, param_type);
1981 else if (gimple_code (stmt) == GIMPLE_PHI)
1982 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1983 call,
1984 as_a <gphi *> (stmt));
1988 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1989 passed (because type conversions are ignored in gimple). Usually we can
1990 safely get type from function declaration, but in case of K&R prototypes or
1991 variadic functions we can try our luck with type of the pointer passed.
1992 TODO: Since we look for actual initialization of the memory object, we may better
1993 work out the type based on the memory stores we find. */
1994 if (!param_type)
1995 param_type = TREE_TYPE (arg);
1997 if ((jfunc->type != IPA_JF_PASS_THROUGH
1998 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1999 && (jfunc->type != IPA_JF_ANCESTOR
2000 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2001 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2002 || POINTER_TYPE_P (param_type)))
2003 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
2005 if (!useful_context)
2006 vec_free (args->polymorphic_call_contexts);
2009 /* Compute jump functions for all edges - both direct and indirect - outgoing
2010 from BB. */
2012 static void
2013 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2015 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2016 int i;
2017 struct cgraph_edge *cs;
2019 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2021 struct cgraph_node *callee = cs->callee;
2023 if (callee)
2025 callee->ultimate_alias_target ();
2026 /* We do not need to bother analyzing calls to unknown functions
2027 unless they may become known during lto/whopr. */
2028 if (!callee->definition && !flag_lto)
2029 continue;
2031 ipa_compute_jump_functions_for_edge (fbi, cs);
2035 /* If STMT looks like a statement loading a value from a member pointer formal
2036 parameter, return that parameter and store the offset of the field to
2037 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2038 might be clobbered). If USE_DELTA, then we look for a use of the delta
2039 field rather than the pfn. */
2041 static tree
2042 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2043 HOST_WIDE_INT *offset_p)
2045 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2047 if (!gimple_assign_single_p (stmt))
2048 return NULL_TREE;
2050 rhs = gimple_assign_rhs1 (stmt);
2051 if (TREE_CODE (rhs) == COMPONENT_REF)
2053 ref_field = TREE_OPERAND (rhs, 1);
2054 rhs = TREE_OPERAND (rhs, 0);
2056 else
2057 ref_field = NULL_TREE;
2058 if (TREE_CODE (rhs) != MEM_REF)
2059 return NULL_TREE;
2060 rec = TREE_OPERAND (rhs, 0);
2061 if (TREE_CODE (rec) != ADDR_EXPR)
2062 return NULL_TREE;
2063 rec = TREE_OPERAND (rec, 0);
2064 if (TREE_CODE (rec) != PARM_DECL
2065 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2066 return NULL_TREE;
2067 ref_offset = TREE_OPERAND (rhs, 1);
2069 if (use_delta)
2070 fld = delta_field;
2071 else
2072 fld = ptr_field;
2073 if (offset_p)
2074 *offset_p = int_bit_position (fld);
2076 if (ref_field)
2078 if (integer_nonzerop (ref_offset))
2079 return NULL_TREE;
2080 return ref_field == fld ? rec : NULL_TREE;
2082 else
2083 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2084 : NULL_TREE;
2087 /* Returns true iff T is an SSA_NAME defined by a statement. */
2089 static bool
2090 ipa_is_ssa_with_stmt_def (tree t)
2092 if (TREE_CODE (t) == SSA_NAME
2093 && !SSA_NAME_IS_DEFAULT_DEF (t))
2094 return true;
2095 else
2096 return false;
2099 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2100 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2101 indirect call graph edge. */
2103 static struct cgraph_edge *
2104 ipa_note_param_call (struct cgraph_node *node, int param_index,
2105 gcall *stmt)
2107 struct cgraph_edge *cs;
2109 cs = node->get_edge (stmt);
2110 cs->indirect_info->param_index = param_index;
2111 cs->indirect_info->agg_contents = 0;
2112 cs->indirect_info->member_ptr = 0;
2113 cs->indirect_info->guaranteed_unmodified = 0;
2114 return cs;
2117 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2118 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2119 intermediate information about each formal parameter. Currently it checks
2120 whether the call calls a pointer that is a formal parameter and if so, the
2121 parameter is marked with the called flag and an indirect call graph edge
2122 describing the call is created. This is very simple for ordinary pointers
2123 represented in SSA but not-so-nice when it comes to member pointers. The
2124 ugly part of this function does nothing more than trying to match the
2125 pattern of such a call. An example of such a pattern is the gimple dump
2126 below, the call is on the last line:
2128 <bb 2>:
2129 f$__delta_5 = f.__delta;
2130 f$__pfn_24 = f.__pfn;
2133 <bb 2>:
2134 f$__delta_5 = MEM[(struct *)&f];
2135 f$__pfn_24 = MEM[(struct *)&f + 4B];
2137 and a few lines below:
2139 <bb 5>
2140 D.2496_3 = (int) f$__pfn_24;
2141 D.2497_4 = D.2496_3 & 1;
2142 if (D.2497_4 != 0)
2143 goto <bb 3>;
2144 else
2145 goto <bb 4>;
2147 <bb 6>:
2148 D.2500_7 = (unsigned int) f$__delta_5;
2149 D.2501_8 = &S + D.2500_7;
2150 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2151 D.2503_10 = *D.2502_9;
2152 D.2504_12 = f$__pfn_24 + -1;
2153 D.2505_13 = (unsigned int) D.2504_12;
2154 D.2506_14 = D.2503_10 + D.2505_13;
2155 D.2507_15 = *D.2506_14;
2156 iftmp.11_16 = (String:: *) D.2507_15;
2158 <bb 7>:
2159 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2160 D.2500_19 = (unsigned int) f$__delta_5;
2161 D.2508_20 = &S + D.2500_19;
2162 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2164 Such patterns are results of simple calls to a member pointer:
2166 int doprinting (int (MyString::* f)(int) const)
2168 MyString S ("somestring");
2170 return (S.*f)(4);
2173 Moreover, the function also looks for called pointers loaded from aggregates
2174 passed by value or reference. */
2176 static void
2177 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2178 tree target)
2180 struct ipa_node_params *info = fbi->info;
2181 HOST_WIDE_INT offset;
2182 bool by_ref;
2184 if (SSA_NAME_IS_DEFAULT_DEF (target))
2186 tree var = SSA_NAME_VAR (target);
2187 int index = ipa_get_param_decl_index (info, var);
2188 if (index >= 0)
2189 ipa_note_param_call (fbi->node, index, call);
2190 return;
2193 int index;
2194 gimple *def = SSA_NAME_DEF_STMT (target);
2195 bool guaranteed_unmodified;
2196 if (gimple_assign_single_p (def)
2197 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2198 gimple_assign_rhs1 (def), &index, &offset,
2199 NULL, &by_ref, &guaranteed_unmodified))
2201 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2202 cs->indirect_info->offset = offset;
2203 cs->indirect_info->agg_contents = 1;
2204 cs->indirect_info->by_ref = by_ref;
2205 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2206 return;
2209 /* Now we need to try to match the complex pattern of calling a member
2210 pointer. */
2211 if (gimple_code (def) != GIMPLE_PHI
2212 || gimple_phi_num_args (def) != 2
2213 || !POINTER_TYPE_P (TREE_TYPE (target))
2214 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2215 return;
2217 /* First, we need to check whether one of these is a load from a member
2218 pointer that is a parameter to this function. */
2219 tree n1 = PHI_ARG_DEF (def, 0);
2220 tree n2 = PHI_ARG_DEF (def, 1);
2221 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2222 return;
2223 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2224 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2226 tree rec;
2227 basic_block bb, virt_bb;
2228 basic_block join = gimple_bb (def);
2229 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2231 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2232 return;
2234 bb = EDGE_PRED (join, 0)->src;
2235 virt_bb = gimple_bb (d2);
2237 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2239 bb = EDGE_PRED (join, 1)->src;
2240 virt_bb = gimple_bb (d1);
2242 else
2243 return;
2245 /* Second, we need to check that the basic blocks are laid out in the way
2246 corresponding to the pattern. */
2248 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2249 || single_pred (virt_bb) != bb
2250 || single_succ (virt_bb) != join)
2251 return;
2253 /* Third, let's see that the branching is done depending on the least
2254 significant bit of the pfn. */
2256 gimple *branch = last_stmt (bb);
2257 if (!branch || gimple_code (branch) != GIMPLE_COND)
2258 return;
2260 if ((gimple_cond_code (branch) != NE_EXPR
2261 && gimple_cond_code (branch) != EQ_EXPR)
2262 || !integer_zerop (gimple_cond_rhs (branch)))
2263 return;
2265 tree cond = gimple_cond_lhs (branch);
2266 if (!ipa_is_ssa_with_stmt_def (cond))
2267 return;
2269 def = SSA_NAME_DEF_STMT (cond);
2270 if (!is_gimple_assign (def)
2271 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2272 || !integer_onep (gimple_assign_rhs2 (def)))
2273 return;
2275 cond = gimple_assign_rhs1 (def);
2276 if (!ipa_is_ssa_with_stmt_def (cond))
2277 return;
2279 def = SSA_NAME_DEF_STMT (cond);
2281 if (is_gimple_assign (def)
2282 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2284 cond = gimple_assign_rhs1 (def);
2285 if (!ipa_is_ssa_with_stmt_def (cond))
2286 return;
2287 def = SSA_NAME_DEF_STMT (cond);
2290 tree rec2;
2291 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2292 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2293 == ptrmemfunc_vbit_in_delta),
2294 NULL);
2295 if (rec != rec2)
2296 return;
2298 index = ipa_get_param_decl_index (info, rec);
2299 if (index >= 0
2300 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2302 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2303 cs->indirect_info->offset = offset;
2304 cs->indirect_info->agg_contents = 1;
2305 cs->indirect_info->member_ptr = 1;
2306 cs->indirect_info->guaranteed_unmodified = 1;
2309 return;
2312 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2313 object referenced in the expression is a formal parameter of the caller
2314 FBI->node (described by FBI->info), create a call note for the
2315 statement. */
2317 static void
2318 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2319 gcall *call, tree target)
2321 tree obj = OBJ_TYPE_REF_OBJECT (target);
2322 int index;
2323 HOST_WIDE_INT anc_offset;
2325 if (!flag_devirtualize)
2326 return;
2328 if (TREE_CODE (obj) != SSA_NAME)
2329 return;
2331 struct ipa_node_params *info = fbi->info;
2332 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2334 struct ipa_jump_func jfunc;
2335 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2336 return;
2338 anc_offset = 0;
2339 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2340 gcc_assert (index >= 0);
2341 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2342 call, &jfunc))
2343 return;
2345 else
2347 struct ipa_jump_func jfunc;
2348 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2349 tree expr;
2351 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2352 if (!expr)
2353 return;
2354 index = ipa_get_param_decl_index (info,
2355 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2356 gcc_assert (index >= 0);
2357 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2358 call, &jfunc, anc_offset))
2359 return;
2362 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2363 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2364 ii->offset = anc_offset;
2365 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2366 ii->otr_type = obj_type_ref_class (target);
2367 ii->polymorphic = 1;
2370 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2371 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2372 containing intermediate information about each formal parameter. */
2374 static void
2375 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2377 tree target = gimple_call_fn (call);
2379 if (!target
2380 || (TREE_CODE (target) != SSA_NAME
2381 && !virtual_method_call_p (target)))
2382 return;
2384 struct cgraph_edge *cs = fbi->node->get_edge (call);
2385 /* If we previously turned the call into a direct call, there is
2386 no need to analyze. */
2387 if (cs && !cs->indirect_unknown_callee)
2388 return;
2390 if (cs->indirect_info->polymorphic && flag_devirtualize)
2392 tree instance;
2393 tree target = gimple_call_fn (call);
2394 ipa_polymorphic_call_context context (current_function_decl,
2395 target, call, &instance);
2397 gcc_checking_assert (cs->indirect_info->otr_type
2398 == obj_type_ref_class (target));
2399 gcc_checking_assert (cs->indirect_info->otr_token
2400 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2402 cs->indirect_info->vptr_changed
2403 = !context.get_dynamic_type (instance,
2404 OBJ_TYPE_REF_OBJECT (target),
2405 obj_type_ref_class (target), call);
2406 cs->indirect_info->context = context;
2409 if (TREE_CODE (target) == SSA_NAME)
2410 ipa_analyze_indirect_call_uses (fbi, call, target);
2411 else if (virtual_method_call_p (target))
2412 ipa_analyze_virtual_call_uses (fbi, call, target);
2416 /* Analyze the call statement STMT with respect to formal parameters (described
2417 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2418 formal parameters are called. */
2420 static void
2421 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2423 if (is_gimple_call (stmt))
2424 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2427 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2428 If OP is a parameter declaration, mark it as used in the info structure
2429 passed in DATA. */
2431 static bool
2432 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2434 struct ipa_node_params *info = (struct ipa_node_params *) data;
2436 op = get_base_address (op);
2437 if (op
2438 && TREE_CODE (op) == PARM_DECL)
2440 int index = ipa_get_param_decl_index (info, op);
2441 gcc_assert (index >= 0);
2442 ipa_set_param_used (info, index, true);
2445 return false;
2448 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2449 the findings in various structures of the associated ipa_node_params
2450 structure, such as parameter flags, notes etc. FBI holds various data about
2451 the function being analyzed. */
2453 static void
2454 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2456 gimple_stmt_iterator gsi;
2457 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2459 gimple *stmt = gsi_stmt (gsi);
2461 if (is_gimple_debug (stmt))
2462 continue;
2464 ipa_analyze_stmt_uses (fbi, stmt);
2465 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2466 visit_ref_for_mod_analysis,
2467 visit_ref_for_mod_analysis,
2468 visit_ref_for_mod_analysis);
2470 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2471 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2472 visit_ref_for_mod_analysis,
2473 visit_ref_for_mod_analysis,
2474 visit_ref_for_mod_analysis);
2477 /* Calculate controlled uses of parameters of NODE. */
2479 static void
2480 ipa_analyze_controlled_uses (struct cgraph_node *node)
2482 struct ipa_node_params *info = IPA_NODE_REF (node);
2484 for (int i = 0; i < ipa_get_param_count (info); i++)
2486 tree parm = ipa_get_param (info, i);
2487 int controlled_uses = 0;
2489 /* For SSA regs see if parameter is used. For non-SSA we compute
2490 the flag during modification analysis. */
2491 if (is_gimple_reg (parm))
2493 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2494 parm);
2495 if (ddef && !has_zero_uses (ddef))
2497 imm_use_iterator imm_iter;
2498 use_operand_p use_p;
2500 ipa_set_param_used (info, i, true);
2501 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2502 if (!is_gimple_call (USE_STMT (use_p)))
2504 if (!is_gimple_debug (USE_STMT (use_p)))
2506 controlled_uses = IPA_UNDESCRIBED_USE;
2507 break;
2510 else
2511 controlled_uses++;
2513 else
2514 controlled_uses = 0;
2516 else
2517 controlled_uses = IPA_UNDESCRIBED_USE;
2518 ipa_set_controlled_uses (info, i, controlled_uses);
2522 /* Free stuff in BI. */
2524 static void
2525 free_ipa_bb_info (struct ipa_bb_info *bi)
2527 bi->cg_edges.release ();
2528 bi->param_aa_statuses.release ();
2531 /* Dominator walker driving the analysis. */
2533 class analysis_dom_walker : public dom_walker
2535 public:
2536 analysis_dom_walker (struct ipa_func_body_info *fbi)
2537 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2539 virtual edge before_dom_children (basic_block);
2541 private:
2542 struct ipa_func_body_info *m_fbi;
2545 edge
2546 analysis_dom_walker::before_dom_children (basic_block bb)
2548 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2549 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2550 return NULL;
2553 /* Release body info FBI. */
2555 void
2556 ipa_release_body_info (struct ipa_func_body_info *fbi)
2558 int i;
2559 struct ipa_bb_info *bi;
2561 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2562 free_ipa_bb_info (bi);
2563 fbi->bb_infos.release ();
2566 /* Initialize the array describing properties of formal parameters
2567 of NODE, analyze their uses and compute jump functions associated
2568 with actual arguments of calls from within NODE. */
2570 void
2571 ipa_analyze_node (struct cgraph_node *node)
2573 struct ipa_func_body_info fbi;
2574 struct ipa_node_params *info;
2576 ipa_check_create_node_params ();
2577 ipa_check_create_edge_args ();
2578 info = IPA_NODE_REF (node);
2580 if (info->analysis_done)
2581 return;
2582 info->analysis_done = 1;
2584 if (ipa_func_spec_opts_forbid_analysis_p (node))
2586 for (int i = 0; i < ipa_get_param_count (info); i++)
2588 ipa_set_param_used (info, i, true);
2589 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2591 return;
2594 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2595 push_cfun (func);
2596 calculate_dominance_info (CDI_DOMINATORS);
2597 ipa_initialize_node_params (node);
2598 ipa_analyze_controlled_uses (node);
2600 fbi.node = node;
2601 fbi.info = IPA_NODE_REF (node);
2602 fbi.bb_infos = vNULL;
2603 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2604 fbi.param_count = ipa_get_param_count (info);
2605 fbi.aa_walked = 0;
2607 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2609 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2610 bi->cg_edges.safe_push (cs);
2613 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2615 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2616 bi->cg_edges.safe_push (cs);
2619 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2621 ipa_release_body_info (&fbi);
2622 free_dominance_info (CDI_DOMINATORS);
2623 pop_cfun ();
2626 /* Update the jump functions associated with call graph edge E when the call
2627 graph edge CS is being inlined, assuming that E->caller is already (possibly
2628 indirectly) inlined into CS->callee and that E has not been inlined. */
2630 static void
2631 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2632 struct cgraph_edge *e)
2634 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2635 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2636 int count = ipa_get_cs_argument_count (args);
2637 int i;
2639 for (i = 0; i < count; i++)
2641 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2642 struct ipa_polymorphic_call_context *dst_ctx
2643 = ipa_get_ith_polymorhic_call_context (args, i);
2645 if (dst->type == IPA_JF_ANCESTOR)
2647 struct ipa_jump_func *src;
2648 int dst_fid = dst->value.ancestor.formal_id;
2649 struct ipa_polymorphic_call_context *src_ctx
2650 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2652 /* Variable number of arguments can cause havoc if we try to access
2653 one that does not exist in the inlined edge. So make sure we
2654 don't. */
2655 if (dst_fid >= ipa_get_cs_argument_count (top))
2657 ipa_set_jf_unknown (dst);
2658 continue;
2661 src = ipa_get_ith_jump_func (top, dst_fid);
2663 if (src_ctx && !src_ctx->useless_p ())
2665 struct ipa_polymorphic_call_context ctx = *src_ctx;
2667 /* TODO: Make type preserved safe WRT contexts. */
2668 if (!ipa_get_jf_ancestor_type_preserved (dst))
2669 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2670 ctx.offset_by (dst->value.ancestor.offset);
2671 if (!ctx.useless_p ())
2673 if (!dst_ctx)
2675 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2676 count);
2677 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2680 dst_ctx->combine_with (ctx);
2684 if (src->agg.items
2685 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2687 struct ipa_agg_jf_item *item;
2688 int j;
2690 /* Currently we do not produce clobber aggregate jump functions,
2691 replace with merging when we do. */
2692 gcc_assert (!dst->agg.items);
2694 dst->agg.items = vec_safe_copy (src->agg.items);
2695 dst->agg.by_ref = src->agg.by_ref;
2696 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2697 item->offset -= dst->value.ancestor.offset;
2700 if (src->type == IPA_JF_PASS_THROUGH
2701 && src->value.pass_through.operation == NOP_EXPR)
2703 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2704 dst->value.ancestor.agg_preserved &=
2705 src->value.pass_through.agg_preserved;
2707 else if (src->type == IPA_JF_PASS_THROUGH
2708 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2710 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2711 dst->value.ancestor.agg_preserved = false;
2713 else if (src->type == IPA_JF_ANCESTOR)
2715 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2716 dst->value.ancestor.offset += src->value.ancestor.offset;
2717 dst->value.ancestor.agg_preserved &=
2718 src->value.ancestor.agg_preserved;
2720 else
2721 ipa_set_jf_unknown (dst);
2723 else if (dst->type == IPA_JF_PASS_THROUGH)
2725 struct ipa_jump_func *src;
2726 /* We must check range due to calls with variable number of arguments
2727 and we cannot combine jump functions with operations. */
2728 if (dst->value.pass_through.operation == NOP_EXPR
2729 && (dst->value.pass_through.formal_id
2730 < ipa_get_cs_argument_count (top)))
2732 int dst_fid = dst->value.pass_through.formal_id;
2733 src = ipa_get_ith_jump_func (top, dst_fid);
2734 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2735 struct ipa_polymorphic_call_context *src_ctx
2736 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2738 if (src_ctx && !src_ctx->useless_p ())
2740 struct ipa_polymorphic_call_context ctx = *src_ctx;
2742 /* TODO: Make type preserved safe WRT contexts. */
2743 if (!ipa_get_jf_pass_through_type_preserved (dst))
2744 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2745 if (!ctx.useless_p ())
2747 if (!dst_ctx)
2749 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2750 count);
2751 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2753 dst_ctx->combine_with (ctx);
2756 switch (src->type)
2758 case IPA_JF_UNKNOWN:
2759 ipa_set_jf_unknown (dst);
2760 break;
2761 case IPA_JF_CONST:
2762 ipa_set_jf_cst_copy (dst, src);
2763 break;
2765 case IPA_JF_PASS_THROUGH:
2767 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2768 enum tree_code operation;
2769 operation = ipa_get_jf_pass_through_operation (src);
2771 if (operation == NOP_EXPR)
2773 bool agg_p;
2774 agg_p = dst_agg_p
2775 && ipa_get_jf_pass_through_agg_preserved (src);
2776 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2778 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2779 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2780 else
2782 tree operand = ipa_get_jf_pass_through_operand (src);
2783 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2784 operation);
2786 break;
2788 case IPA_JF_ANCESTOR:
2790 bool agg_p;
2791 agg_p = dst_agg_p
2792 && ipa_get_jf_ancestor_agg_preserved (src);
2793 ipa_set_ancestor_jf (dst,
2794 ipa_get_jf_ancestor_offset (src),
2795 ipa_get_jf_ancestor_formal_id (src),
2796 agg_p);
2797 break;
2799 default:
2800 gcc_unreachable ();
2803 if (src->agg.items
2804 && (dst_agg_p || !src->agg.by_ref))
2806 /* Currently we do not produce clobber aggregate jump
2807 functions, replace with merging when we do. */
2808 gcc_assert (!dst->agg.items);
2810 dst->agg.by_ref = src->agg.by_ref;
2811 dst->agg.items = vec_safe_copy (src->agg.items);
2814 else
2815 ipa_set_jf_unknown (dst);
2820 /* If TARGET is an addr_expr of a function declaration, make it the
2821 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2822 Otherwise, return NULL. */
2824 struct cgraph_edge *
2825 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2826 bool speculative)
2828 struct cgraph_node *callee;
2829 struct ipa_call_summary *es = ipa_call_summaries->get (ie);
2830 bool unreachable = false;
2832 if (TREE_CODE (target) == ADDR_EXPR)
2833 target = TREE_OPERAND (target, 0);
2834 if (TREE_CODE (target) != FUNCTION_DECL)
2836 target = canonicalize_constructor_val (target, NULL);
2837 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2839 /* Member pointer call that goes through a VMT lookup. */
2840 if (ie->indirect_info->member_ptr
2841 /* Or if target is not an invariant expression and we do not
2842 know if it will evaulate to function at runtime.
2843 This can happen when folding through &VAR, where &VAR
2844 is IP invariant, but VAR itself is not.
2846 TODO: Revisit this when GCC 5 is branched. It seems that
2847 member_ptr check is not needed and that we may try to fold
2848 the expression and see if VAR is readonly. */
2849 || !is_gimple_ip_invariant (target))
2851 if (dump_enabled_p ())
2853 location_t loc = gimple_location_safe (ie->call_stmt);
2854 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2855 "discovered direct call non-invariant %s\n",
2856 ie->caller->dump_name ());
2858 return NULL;
2862 if (dump_enabled_p ())
2864 location_t loc = gimple_location_safe (ie->call_stmt);
2865 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2866 "discovered direct call to non-function in %s, "
2867 "making it __builtin_unreachable\n",
2868 ie->caller->dump_name ());
2871 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2872 callee = cgraph_node::get_create (target);
2873 unreachable = true;
2875 else
2876 callee = cgraph_node::get (target);
2878 else
2879 callee = cgraph_node::get (target);
2881 /* Because may-edges are not explicitely represented and vtable may be external,
2882 we may create the first reference to the object in the unit. */
2883 if (!callee || callee->global.inlined_to)
2886 /* We are better to ensure we can refer to it.
2887 In the case of static functions we are out of luck, since we already
2888 removed its body. In the case of public functions we may or may
2889 not introduce the reference. */
2890 if (!canonicalize_constructor_val (target, NULL)
2891 || !TREE_PUBLIC (target))
2893 if (dump_file)
2894 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2895 "(%s -> %s) but can not refer to it. Giving up.\n",
2896 ie->caller->dump_name (),
2897 ie->callee->dump_name ());
2898 return NULL;
2900 callee = cgraph_node::get_create (target);
2903 /* If the edge is already speculated. */
2904 if (speculative && ie->speculative)
2906 struct cgraph_edge *e2;
2907 struct ipa_ref *ref;
2908 ie->speculative_call_info (e2, ie, ref);
2909 if (e2->callee->ultimate_alias_target ()
2910 != callee->ultimate_alias_target ())
2912 if (dump_file)
2913 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2914 "target (%s -> %s) but the call is already "
2915 "speculated to %s. Giving up.\n",
2916 ie->caller->dump_name (), callee->dump_name (),
2917 e2->callee->dump_name ());
2919 else
2921 if (dump_file)
2922 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2923 "(%s -> %s) this agree with previous speculation.\n",
2924 ie->caller->dump_name (), callee->dump_name ());
2926 return NULL;
2929 if (!dbg_cnt (devirt))
2930 return NULL;
2932 ipa_check_create_node_params ();
2934 /* We can not make edges to inline clones. It is bug that someone removed
2935 the cgraph node too early. */
2936 gcc_assert (!callee->global.inlined_to);
2938 if (dump_file && !unreachable)
2940 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2941 "(%s -> %s), for stmt ",
2942 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2943 speculative ? "speculative" : "known",
2944 ie->caller->dump_name (),
2945 callee->dump_name ());
2946 if (ie->call_stmt)
2947 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2948 else
2949 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2951 if (dump_enabled_p ())
2953 location_t loc = gimple_location_safe (ie->call_stmt);
2955 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2956 "converting indirect call in %s to direct call to %s\n",
2957 ie->caller->name (), callee->name ());
2959 if (!speculative)
2961 struct cgraph_edge *orig = ie;
2962 ie = ie->make_direct (callee);
2963 /* If we resolved speculative edge the cost is already up to date
2964 for direct call (adjusted by inline_edge_duplication_hook). */
2965 if (ie == orig)
2967 es = ipa_call_summaries->get (ie);
2968 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2969 - eni_size_weights.call_cost);
2970 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2971 - eni_time_weights.call_cost);
2974 else
2976 if (!callee->can_be_discarded_p ())
2978 cgraph_node *alias;
2979 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2980 if (alias)
2981 callee = alias;
2983 /* make_speculative will update ie's cost to direct call cost. */
2984 ie = ie->make_speculative
2985 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2988 return ie;
2991 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2992 CONSTRUCTOR and return it. Return NULL if the search fails for some
2993 reason. */
2995 static tree
2996 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2998 tree type = TREE_TYPE (constructor);
2999 if (TREE_CODE (type) != ARRAY_TYPE
3000 && TREE_CODE (type) != RECORD_TYPE)
3001 return NULL;
3003 unsigned ix;
3004 tree index, val;
3005 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3007 HOST_WIDE_INT elt_offset;
3008 if (TREE_CODE (type) == ARRAY_TYPE)
3010 offset_int off;
3011 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3012 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3014 if (index)
3016 off = wi::to_offset (index);
3017 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3019 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3020 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3021 off = wi::sext (off - wi::to_offset (low_bound),
3022 TYPE_PRECISION (TREE_TYPE (index)));
3024 off *= wi::to_offset (unit_size);
3026 else
3027 off = wi::to_offset (unit_size) * ix;
3029 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3030 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3031 continue;
3032 elt_offset = off.to_shwi ();
3034 else if (TREE_CODE (type) == RECORD_TYPE)
3036 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3037 if (DECL_BIT_FIELD (index))
3038 continue;
3039 elt_offset = int_bit_position (index);
3041 else
3042 gcc_unreachable ();
3044 if (elt_offset > req_offset)
3045 return NULL;
3047 if (TREE_CODE (val) == CONSTRUCTOR)
3048 return find_constructor_constant_at_offset (val,
3049 req_offset - elt_offset);
3051 if (elt_offset == req_offset
3052 && is_gimple_reg_type (TREE_TYPE (val))
3053 && is_gimple_ip_invariant (val))
3054 return val;
3056 return NULL;
3059 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3060 invariant from a static constructor and if so, return it. Otherwise return
3061 NULL. */
3063 static tree
3064 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3066 if (by_ref)
3068 if (TREE_CODE (scalar) != ADDR_EXPR)
3069 return NULL;
3070 scalar = TREE_OPERAND (scalar, 0);
3073 if (!VAR_P (scalar)
3074 || !is_global_var (scalar)
3075 || !TREE_READONLY (scalar)
3076 || !DECL_INITIAL (scalar)
3077 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3078 return NULL;
3080 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3083 /* Retrieve value from aggregate jump function AGG or static initializer of
3084 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3085 none. BY_REF specifies whether the value has to be passed by reference or
3086 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3087 to is set to true if the value comes from an initializer of a constant. */
3089 tree
3090 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3091 HOST_WIDE_INT offset, bool by_ref,
3092 bool *from_global_constant)
3094 struct ipa_agg_jf_item *item;
3095 int i;
3097 if (scalar)
3099 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3100 if (res)
3102 if (from_global_constant)
3103 *from_global_constant = true;
3104 return res;
3108 if (!agg
3109 || by_ref != agg->by_ref)
3110 return NULL;
3112 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3113 if (item->offset == offset)
3115 /* Currently we do not have clobber values, return NULL for them once
3116 we do. */
3117 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3118 if (from_global_constant)
3119 *from_global_constant = false;
3120 return item->value;
3122 return NULL;
3125 /* Remove a reference to SYMBOL from the list of references of a node given by
3126 reference description RDESC. Return true if the reference has been
3127 successfully found and removed. */
3129 static bool
3130 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3132 struct ipa_ref *to_del;
3133 struct cgraph_edge *origin;
3135 origin = rdesc->cs;
3136 if (!origin)
3137 return false;
3138 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3139 origin->lto_stmt_uid);
3140 if (!to_del)
3141 return false;
3143 to_del->remove_reference ();
3144 if (dump_file)
3145 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3146 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3147 return true;
3150 /* If JFUNC has a reference description with refcount different from
3151 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3152 NULL. JFUNC must be a constant jump function. */
3154 static struct ipa_cst_ref_desc *
3155 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3157 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3158 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3159 return rdesc;
3160 else
3161 return NULL;
3164 /* If the value of constant jump function JFUNC is an address of a function
3165 declaration, return the associated call graph node. Otherwise return
3166 NULL. */
3168 static cgraph_node *
3169 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3171 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3172 tree cst = ipa_get_jf_constant (jfunc);
3173 if (TREE_CODE (cst) != ADDR_EXPR
3174 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3175 return NULL;
3177 return cgraph_node::get (TREE_OPERAND (cst, 0));
3181 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3182 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3183 the edge specified in the rdesc. Return false if either the symbol or the
3184 reference could not be found, otherwise return true. */
3186 static bool
3187 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3189 struct ipa_cst_ref_desc *rdesc;
3190 if (jfunc->type == IPA_JF_CONST
3191 && (rdesc = jfunc_rdesc_usable (jfunc))
3192 && --rdesc->refcount == 0)
3194 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3195 if (!symbol)
3196 return false;
3198 return remove_described_reference (symbol, rdesc);
3200 return true;
3203 /* Try to find a destination for indirect edge IE that corresponds to a simple
3204 call or a call of a member function pointer and where the destination is a
3205 pointer formal parameter described by jump function JFUNC. If it can be
3206 determined, return the newly direct edge, otherwise return NULL.
3207 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3209 static struct cgraph_edge *
3210 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3211 struct ipa_jump_func *jfunc,
3212 struct ipa_node_params *new_root_info)
3214 struct cgraph_edge *cs;
3215 tree target;
3216 bool agg_contents = ie->indirect_info->agg_contents;
3217 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3218 if (agg_contents)
3220 bool from_global_constant;
3221 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3222 ie->indirect_info->offset,
3223 ie->indirect_info->by_ref,
3224 &from_global_constant);
3225 if (target
3226 && !from_global_constant
3227 && !ie->indirect_info->guaranteed_unmodified)
3228 return NULL;
3230 else
3231 target = scalar;
3232 if (!target)
3233 return NULL;
3234 cs = ipa_make_edge_direct_to_target (ie, target);
3236 if (cs && !agg_contents)
3238 bool ok;
3239 gcc_checking_assert (cs->callee
3240 && (cs != ie
3241 || jfunc->type != IPA_JF_CONST
3242 || !cgraph_node_for_jfunc (jfunc)
3243 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3244 ok = try_decrement_rdesc_refcount (jfunc);
3245 gcc_checking_assert (ok);
3248 return cs;
3251 /* Return the target to be used in cases of impossible devirtualization. IE
3252 and target (the latter can be NULL) are dumped when dumping is enabled. */
3254 tree
3255 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3257 if (dump_file)
3259 if (target)
3260 fprintf (dump_file,
3261 "Type inconsistent devirtualization: %s->%s\n",
3262 ie->caller->dump_name (),
3263 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3264 else
3265 fprintf (dump_file,
3266 "No devirtualization target in %s\n",
3267 ie->caller->dump_name ());
3269 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3270 cgraph_node::get_create (new_target);
3271 return new_target;
3274 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3275 call based on a formal parameter which is described by jump function JFUNC
3276 and if it can be determined, make it direct and return the direct edge.
3277 Otherwise, return NULL. CTX describes the polymorphic context that the
3278 parameter the call is based on brings along with it. */
3280 static struct cgraph_edge *
3281 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3282 struct ipa_jump_func *jfunc,
3283 struct ipa_polymorphic_call_context ctx)
3285 tree target = NULL;
3286 bool speculative = false;
3288 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3289 return NULL;
3291 gcc_assert (!ie->indirect_info->by_ref);
3293 /* Try to do lookup via known virtual table pointer value. */
3294 if (!ie->indirect_info->vptr_changed
3295 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3297 tree vtable;
3298 unsigned HOST_WIDE_INT offset;
3299 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3300 : NULL;
3301 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3302 ie->indirect_info->offset,
3303 true);
3304 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3306 bool can_refer;
3307 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3308 vtable, offset, &can_refer);
3309 if (can_refer)
3311 if (!t
3312 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3313 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3314 || !possible_polymorphic_call_target_p
3315 (ie, cgraph_node::get (t)))
3317 /* Do not speculate builtin_unreachable, it is stupid! */
3318 if (!ie->indirect_info->vptr_changed)
3319 target = ipa_impossible_devirt_target (ie, target);
3320 else
3321 target = NULL;
3323 else
3325 target = t;
3326 speculative = ie->indirect_info->vptr_changed;
3332 ipa_polymorphic_call_context ie_context (ie);
3333 vec <cgraph_node *>targets;
3334 bool final;
3336 ctx.offset_by (ie->indirect_info->offset);
3337 if (ie->indirect_info->vptr_changed)
3338 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3339 ie->indirect_info->otr_type);
3340 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3341 targets = possible_polymorphic_call_targets
3342 (ie->indirect_info->otr_type,
3343 ie->indirect_info->otr_token,
3344 ctx, &final);
3345 if (final && targets.length () <= 1)
3347 speculative = false;
3348 if (targets.length () == 1)
3349 target = targets[0]->decl;
3350 else
3351 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3353 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3354 && !ie->speculative && ie->maybe_hot_p ())
3356 cgraph_node *n;
3357 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3358 ie->indirect_info->otr_token,
3359 ie->indirect_info->context);
3360 if (n)
3362 target = n->decl;
3363 speculative = true;
3367 if (target)
3369 if (!possible_polymorphic_call_target_p
3370 (ie, cgraph_node::get_create (target)))
3372 if (speculative)
3373 return NULL;
3374 target = ipa_impossible_devirt_target (ie, target);
3376 return ipa_make_edge_direct_to_target (ie, target, speculative);
3378 else
3379 return NULL;
3382 /* Update the param called notes associated with NODE when CS is being inlined,
3383 assuming NODE is (potentially indirectly) inlined into CS->callee.
3384 Moreover, if the callee is discovered to be constant, create a new cgraph
3385 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3386 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3388 static bool
3389 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3390 struct cgraph_node *node,
3391 vec<cgraph_edge *> *new_edges)
3393 struct ipa_edge_args *top;
3394 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3395 struct ipa_node_params *new_root_info;
3396 bool res = false;
3398 ipa_check_create_edge_args ();
3399 top = IPA_EDGE_REF (cs);
3400 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3401 ? cs->caller->global.inlined_to
3402 : cs->caller);
3404 for (ie = node->indirect_calls; ie; ie = next_ie)
3406 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3407 struct ipa_jump_func *jfunc;
3408 int param_index;
3409 cgraph_node *spec_target = NULL;
3411 next_ie = ie->next_callee;
3413 if (ici->param_index == -1)
3414 continue;
3416 /* We must check range due to calls with variable number of arguments: */
3417 if (ici->param_index >= ipa_get_cs_argument_count (top))
3419 ici->param_index = -1;
3420 continue;
3423 param_index = ici->param_index;
3424 jfunc = ipa_get_ith_jump_func (top, param_index);
3426 if (ie->speculative)
3428 struct cgraph_edge *de;
3429 struct ipa_ref *ref;
3430 ie->speculative_call_info (de, ie, ref);
3431 spec_target = de->callee;
3434 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3435 new_direct_edge = NULL;
3436 else if (ici->polymorphic)
3438 ipa_polymorphic_call_context ctx;
3439 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3440 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3442 else
3443 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3444 new_root_info);
3445 /* If speculation was removed, then we need to do nothing. */
3446 if (new_direct_edge && new_direct_edge != ie
3447 && new_direct_edge->callee == spec_target)
3449 new_direct_edge->indirect_inlining_edge = 1;
3450 top = IPA_EDGE_REF (cs);
3451 res = true;
3452 if (!new_direct_edge->speculative)
3453 continue;
3455 else if (new_direct_edge)
3457 new_direct_edge->indirect_inlining_edge = 1;
3458 if (new_direct_edge->call_stmt)
3459 new_direct_edge->call_stmt_cannot_inline_p
3460 = !gimple_check_call_matching_types (
3461 new_direct_edge->call_stmt,
3462 new_direct_edge->callee->decl, false);
3463 if (new_edges)
3465 new_edges->safe_push (new_direct_edge);
3466 res = true;
3468 top = IPA_EDGE_REF (cs);
3469 /* If speculative edge was introduced we still need to update
3470 call info of the indirect edge. */
3471 if (!new_direct_edge->speculative)
3472 continue;
3474 if (jfunc->type == IPA_JF_PASS_THROUGH
3475 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3477 if (ici->agg_contents
3478 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3479 && !ici->polymorphic)
3480 ici->param_index = -1;
3481 else
3483 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3484 if (ici->polymorphic
3485 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3486 ici->vptr_changed = true;
3489 else if (jfunc->type == IPA_JF_ANCESTOR)
3491 if (ici->agg_contents
3492 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3493 && !ici->polymorphic)
3494 ici->param_index = -1;
3495 else
3497 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3498 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3499 if (ici->polymorphic
3500 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3501 ici->vptr_changed = true;
3504 else
3505 /* Either we can find a destination for this edge now or never. */
3506 ici->param_index = -1;
3509 return res;
3512 /* Recursively traverse subtree of NODE (including node) made of inlined
3513 cgraph_edges when CS has been inlined and invoke
3514 update_indirect_edges_after_inlining on all nodes and
3515 update_jump_functions_after_inlining on all non-inlined edges that lead out
3516 of this subtree. Newly discovered indirect edges will be added to
3517 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3518 created. */
3520 static bool
3521 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3522 struct cgraph_node *node,
3523 vec<cgraph_edge *> *new_edges)
3525 struct cgraph_edge *e;
3526 bool res;
3528 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3530 for (e = node->callees; e; e = e->next_callee)
3531 if (!e->inline_failed)
3532 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3533 else
3534 update_jump_functions_after_inlining (cs, e);
3535 for (e = node->indirect_calls; e; e = e->next_callee)
3536 update_jump_functions_after_inlining (cs, e);
3538 return res;
3541 /* Combine two controlled uses counts as done during inlining. */
3543 static int
3544 combine_controlled_uses_counters (int c, int d)
3546 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3547 return IPA_UNDESCRIBED_USE;
3548 else
3549 return c + d - 1;
3552 /* Propagate number of controlled users from CS->caleee to the new root of the
3553 tree of inlined nodes. */
3555 static void
3556 propagate_controlled_uses (struct cgraph_edge *cs)
3558 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3559 struct cgraph_node *new_root = cs->caller->global.inlined_to
3560 ? cs->caller->global.inlined_to : cs->caller;
3561 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3562 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3563 int count, i;
3565 count = MIN (ipa_get_cs_argument_count (args),
3566 ipa_get_param_count (old_root_info));
3567 for (i = 0; i < count; i++)
3569 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3570 struct ipa_cst_ref_desc *rdesc;
3572 if (jf->type == IPA_JF_PASS_THROUGH)
3574 int src_idx, c, d;
3575 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3576 c = ipa_get_controlled_uses (new_root_info, src_idx);
3577 d = ipa_get_controlled_uses (old_root_info, i);
3579 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3580 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3581 c = combine_controlled_uses_counters (c, d);
3582 ipa_set_controlled_uses (new_root_info, src_idx, c);
3583 if (c == 0 && new_root_info->ipcp_orig_node)
3585 struct cgraph_node *n;
3586 struct ipa_ref *ref;
3587 tree t = new_root_info->known_csts[src_idx];
3589 if (t && TREE_CODE (t) == ADDR_EXPR
3590 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3591 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3592 && (ref = new_root->find_reference (n, NULL, 0)))
3594 if (dump_file)
3595 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3596 "reference from %s to %s.\n",
3597 new_root->dump_name (),
3598 n->dump_name ());
3599 ref->remove_reference ();
3603 else if (jf->type == IPA_JF_CONST
3604 && (rdesc = jfunc_rdesc_usable (jf)))
3606 int d = ipa_get_controlled_uses (old_root_info, i);
3607 int c = rdesc->refcount;
3608 rdesc->refcount = combine_controlled_uses_counters (c, d);
3609 if (rdesc->refcount == 0)
3611 tree cst = ipa_get_jf_constant (jf);
3612 struct cgraph_node *n;
3613 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3614 && TREE_CODE (TREE_OPERAND (cst, 0))
3615 == FUNCTION_DECL);
3616 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3617 if (n)
3619 struct cgraph_node *clone;
3620 bool ok;
3621 ok = remove_described_reference (n, rdesc);
3622 gcc_checking_assert (ok);
3624 clone = cs->caller;
3625 while (clone->global.inlined_to
3626 && clone != rdesc->cs->caller
3627 && IPA_NODE_REF (clone)->ipcp_orig_node)
3629 struct ipa_ref *ref;
3630 ref = clone->find_reference (n, NULL, 0);
3631 if (ref)
3633 if (dump_file)
3634 fprintf (dump_file, "ipa-prop: Removing "
3635 "cloning-created reference "
3636 "from %s to %s.\n",
3637 clone->dump_name (),
3638 n->dump_name ());
3639 ref->remove_reference ();
3641 clone = clone->callers->caller;
3648 for (i = ipa_get_param_count (old_root_info);
3649 i < ipa_get_cs_argument_count (args);
3650 i++)
3652 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3654 if (jf->type == IPA_JF_CONST)
3656 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3657 if (rdesc)
3658 rdesc->refcount = IPA_UNDESCRIBED_USE;
3660 else if (jf->type == IPA_JF_PASS_THROUGH)
3661 ipa_set_controlled_uses (new_root_info,
3662 jf->value.pass_through.formal_id,
3663 IPA_UNDESCRIBED_USE);
3667 /* Update jump functions and call note functions on inlining the call site CS.
3668 CS is expected to lead to a node already cloned by
3669 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3670 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3671 created. */
3673 bool
3674 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3675 vec<cgraph_edge *> *new_edges)
3677 bool changed;
3678 /* Do nothing if the preparation phase has not been carried out yet
3679 (i.e. during early inlining). */
3680 if (!ipa_node_params_sum)
3681 return false;
3682 gcc_assert (ipa_edge_args_sum);
3684 propagate_controlled_uses (cs);
3685 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3687 return changed;
3690 /* Ensure that array of edge arguments infos is big enough to accommodate a
3691 structure for all edges and reallocates it if not. Also, allocate
3692 associated hash tables is they do not already exist. */
3694 void
3695 ipa_check_create_edge_args (void)
3697 if (!ipa_edge_args_sum)
3698 ipa_edge_args_sum
3699 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3700 ipa_edge_args_sum_t (symtab, true));
3701 if (!ipa_bits_hash_table)
3702 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3703 if (!ipa_vr_hash_table)
3704 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3707 /* Frees all dynamically allocated structures that the argument info points
3708 to. */
3710 void
3711 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3713 vec_free (args->jump_functions);
3714 memset (args, 0, sizeof (*args));
3717 /* Free all ipa_edge structures. */
3719 void
3720 ipa_free_all_edge_args (void)
3722 if (!ipa_edge_args_sum)
3723 return;
3725 ipa_edge_args_sum->release ();
3726 ipa_edge_args_sum = NULL;
3729 /* Free all ipa_node_params structures. */
3731 void
3732 ipa_free_all_node_params (void)
3734 ipa_node_params_sum->release ();
3735 ipa_node_params_sum = NULL;
3738 /* Grow ipcp_transformations if necessary. Also allocate any necessary hash
3739 tables if they do not already exist. */
3741 void
3742 ipcp_grow_transformations_if_necessary (void)
3744 if (vec_safe_length (ipcp_transformations)
3745 <= (unsigned) symtab->cgraph_max_uid)
3746 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3747 if (!ipa_bits_hash_table)
3748 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3749 if (!ipa_vr_hash_table)
3750 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3753 /* Set the aggregate replacements of NODE to be AGGVALS. */
3755 void
3756 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3757 struct ipa_agg_replacement_value *aggvals)
3759 ipcp_grow_transformations_if_necessary ();
3760 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3763 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3764 count data structures accordingly. */
3766 void
3767 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3769 if (args->jump_functions)
3771 struct ipa_jump_func *jf;
3772 int i;
3773 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3775 struct ipa_cst_ref_desc *rdesc;
3776 try_decrement_rdesc_refcount (jf);
3777 if (jf->type == IPA_JF_CONST
3778 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3779 && rdesc->cs == cs)
3780 rdesc->cs = NULL;
3785 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3786 reference count data strucutres accordingly. */
3788 void
3789 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3790 ipa_edge_args *old_args, ipa_edge_args *new_args)
3792 unsigned int i;
3794 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3795 if (old_args->polymorphic_call_contexts)
3796 new_args->polymorphic_call_contexts
3797 = vec_safe_copy (old_args->polymorphic_call_contexts);
3799 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3801 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3802 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3804 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3806 if (src_jf->type == IPA_JF_CONST)
3808 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3810 if (!src_rdesc)
3811 dst_jf->value.constant.rdesc = NULL;
3812 else if (src->caller == dst->caller)
3814 struct ipa_ref *ref;
3815 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3816 gcc_checking_assert (n);
3817 ref = src->caller->find_reference (n, src->call_stmt,
3818 src->lto_stmt_uid);
3819 gcc_checking_assert (ref);
3820 dst->caller->clone_reference (ref, ref->stmt);
3822 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3823 dst_rdesc->cs = dst;
3824 dst_rdesc->refcount = src_rdesc->refcount;
3825 dst_rdesc->next_duplicate = NULL;
3826 dst_jf->value.constant.rdesc = dst_rdesc;
3828 else if (src_rdesc->cs == src)
3830 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3831 dst_rdesc->cs = dst;
3832 dst_rdesc->refcount = src_rdesc->refcount;
3833 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3834 src_rdesc->next_duplicate = dst_rdesc;
3835 dst_jf->value.constant.rdesc = dst_rdesc;
3837 else
3839 struct ipa_cst_ref_desc *dst_rdesc;
3840 /* This can happen during inlining, when a JFUNC can refer to a
3841 reference taken in a function up in the tree of inline clones.
3842 We need to find the duplicate that refers to our tree of
3843 inline clones. */
3845 gcc_assert (dst->caller->global.inlined_to);
3846 for (dst_rdesc = src_rdesc->next_duplicate;
3847 dst_rdesc;
3848 dst_rdesc = dst_rdesc->next_duplicate)
3850 struct cgraph_node *top;
3851 top = dst_rdesc->cs->caller->global.inlined_to
3852 ? dst_rdesc->cs->caller->global.inlined_to
3853 : dst_rdesc->cs->caller;
3854 if (dst->caller->global.inlined_to == top)
3855 break;
3857 gcc_assert (dst_rdesc);
3858 dst_jf->value.constant.rdesc = dst_rdesc;
3861 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3862 && src->caller == dst->caller)
3864 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3865 ? dst->caller->global.inlined_to : dst->caller;
3866 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3867 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3869 int c = ipa_get_controlled_uses (root_info, idx);
3870 if (c != IPA_UNDESCRIBED_USE)
3872 c++;
3873 ipa_set_controlled_uses (root_info, idx, c);
3879 /* Analyze newly added function into callgraph. */
3881 static void
3882 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3884 if (node->has_gimple_body_p ())
3885 ipa_analyze_node (node);
3888 /* Hook that is called by summary when a node is duplicated. */
3890 void
3891 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3892 ipa_node_params *old_info,
3893 ipa_node_params *new_info)
3895 ipa_agg_replacement_value *old_av, *new_av;
3897 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3898 new_info->lattices = NULL;
3899 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3900 new_info->known_csts = old_info->known_csts.copy ();
3901 new_info->known_contexts = old_info->known_contexts.copy ();
3903 new_info->analysis_done = old_info->analysis_done;
3904 new_info->node_enqueued = old_info->node_enqueued;
3905 new_info->versionable = old_info->versionable;
3907 old_av = ipa_get_agg_replacements_for_node (src);
3908 if (old_av)
3910 new_av = NULL;
3911 while (old_av)
3913 struct ipa_agg_replacement_value *v;
3915 v = ggc_alloc<ipa_agg_replacement_value> ();
3916 memcpy (v, old_av, sizeof (*v));
3917 v->next = new_av;
3918 new_av = v;
3919 old_av = old_av->next;
3921 ipa_set_node_agg_value_chain (dst, new_av);
3924 ipcp_transformation_summary *src_trans
3925 = ipcp_get_transformation_summary (src);
3927 if (src_trans)
3929 ipcp_grow_transformations_if_necessary ();
3930 src_trans = ipcp_get_transformation_summary (src);
3931 ipcp_transformation_summary *dst_trans
3932 = ipcp_get_transformation_summary (dst);
3934 dst_trans->bits = vec_safe_copy (src_trans->bits);
3936 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3937 vec<ipa_vr, va_gc> *&dst_vr
3938 = ipcp_get_transformation_summary (dst)->m_vr;
3939 if (vec_safe_length (src_trans->m_vr) > 0)
3941 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3942 for (unsigned i = 0; i < src_vr->length (); ++i)
3943 dst_vr->quick_push ((*src_vr)[i]);
3948 /* Register our cgraph hooks if they are not already there. */
3950 void
3951 ipa_register_cgraph_hooks (void)
3953 ipa_check_create_node_params ();
3954 ipa_check_create_edge_args ();
3956 function_insertion_hook_holder =
3957 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3960 /* Unregister our cgraph hooks if they are not already there. */
3962 static void
3963 ipa_unregister_cgraph_hooks (void)
3965 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3966 function_insertion_hook_holder = NULL;
3969 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3970 longer needed after ipa-cp. */
3972 void
3973 ipa_free_all_structures_after_ipa_cp (void)
3975 if (!optimize && !in_lto_p)
3977 ipa_free_all_edge_args ();
3978 ipa_free_all_node_params ();
3979 ipcp_sources_pool.release ();
3980 ipcp_cst_values_pool.release ();
3981 ipcp_poly_ctx_values_pool.release ();
3982 ipcp_agg_lattice_pool.release ();
3983 ipa_unregister_cgraph_hooks ();
3984 ipa_refdesc_pool.release ();
3988 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3989 longer needed after indirect inlining. */
3991 void
3992 ipa_free_all_structures_after_iinln (void)
3994 ipa_free_all_edge_args ();
3995 ipa_free_all_node_params ();
3996 ipa_unregister_cgraph_hooks ();
3997 ipcp_sources_pool.release ();
3998 ipcp_cst_values_pool.release ();
3999 ipcp_poly_ctx_values_pool.release ();
4000 ipcp_agg_lattice_pool.release ();
4001 ipa_refdesc_pool.release ();
4004 /* Print ipa_tree_map data structures of all functions in the
4005 callgraph to F. */
4007 void
4008 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4010 int i, count;
4011 struct ipa_node_params *info;
4013 if (!node->definition)
4014 return;
4015 info = IPA_NODE_REF (node);
4016 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4017 count = ipa_get_param_count (info);
4018 for (i = 0; i < count; i++)
4020 int c;
4022 fprintf (f, " ");
4023 ipa_dump_param (f, info, i);
4024 if (ipa_is_param_used (info, i))
4025 fprintf (f, " used");
4026 c = ipa_get_controlled_uses (info, i);
4027 if (c == IPA_UNDESCRIBED_USE)
4028 fprintf (f, " undescribed_use");
4029 else
4030 fprintf (f, " controlled_uses=%i", c);
4031 fprintf (f, "\n");
4035 /* Print ipa_tree_map data structures of all functions in the
4036 callgraph to F. */
4038 void
4039 ipa_print_all_params (FILE * f)
4041 struct cgraph_node *node;
4043 fprintf (f, "\nFunction parameters:\n");
4044 FOR_EACH_FUNCTION (node)
4045 ipa_print_node_params (f, node);
4048 /* Return a heap allocated vector containing formal parameters of FNDECL. */
4050 vec<tree>
4051 ipa_get_vector_of_formal_parms (tree fndecl)
4053 vec<tree> args;
4054 int count;
4055 tree parm;
4057 gcc_assert (!flag_wpa);
4058 count = count_formal_params (fndecl);
4059 args.create (count);
4060 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4061 args.quick_push (parm);
4063 return args;
4066 /* Return a heap allocated vector containing types of formal parameters of
4067 function type FNTYPE. */
4069 vec<tree>
4070 ipa_get_vector_of_formal_parm_types (tree fntype)
4072 vec<tree> types;
4073 int count = 0;
4074 tree t;
4076 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4077 count++;
4079 types.create (count);
4080 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4081 types.quick_push (TREE_VALUE (t));
4083 return types;
4086 /* Modify the function declaration FNDECL and its type according to the plan in
4087 ADJUSTMENTS. It also sets base fields of individual adjustments structures
4088 to reflect the actual parameters being modified which are determined by the
4089 base_index field. */
4091 void
4092 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
4094 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4095 tree orig_type = TREE_TYPE (fndecl);
4096 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4098 /* The following test is an ugly hack, some functions simply don't have any
4099 arguments in their type. This is probably a bug but well... */
4100 bool care_for_types = (old_arg_types != NULL_TREE);
4101 bool last_parm_void;
4102 vec<tree> otypes;
4103 if (care_for_types)
4105 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4106 == void_type_node);
4107 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4108 if (last_parm_void)
4109 gcc_assert (oparms.length () + 1 == otypes.length ());
4110 else
4111 gcc_assert (oparms.length () == otypes.length ());
4113 else
4115 last_parm_void = false;
4116 otypes.create (0);
4119 int len = adjustments.length ();
4120 tree *link = &DECL_ARGUMENTS (fndecl);
4121 tree new_arg_types = NULL;
4122 for (int i = 0; i < len; i++)
4124 struct ipa_parm_adjustment *adj;
4125 gcc_assert (link);
4127 adj = &adjustments[i];
4128 tree parm;
4129 if (adj->op == IPA_PARM_OP_NEW)
4130 parm = NULL;
4131 else
4132 parm = oparms[adj->base_index];
4133 adj->base = parm;
4135 if (adj->op == IPA_PARM_OP_COPY)
4137 if (care_for_types)
4138 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4139 new_arg_types);
4140 *link = parm;
4141 link = &DECL_CHAIN (parm);
4143 else if (adj->op != IPA_PARM_OP_REMOVE)
4145 tree new_parm;
4146 tree ptype;
4148 if (adj->by_ref)
4149 ptype = build_pointer_type (adj->type);
4150 else
4152 ptype = adj->type;
4153 if (is_gimple_reg_type (ptype)
4154 && TYPE_MODE (ptype) != BLKmode)
4156 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4157 if (TYPE_ALIGN (ptype) != malign)
4158 ptype = build_aligned_type (ptype, malign);
4162 if (care_for_types)
4163 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4165 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4166 ptype);
4167 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4168 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4169 DECL_ARTIFICIAL (new_parm) = 1;
4170 DECL_ARG_TYPE (new_parm) = ptype;
4171 DECL_CONTEXT (new_parm) = fndecl;
4172 TREE_USED (new_parm) = 1;
4173 DECL_IGNORED_P (new_parm) = 1;
4174 layout_decl (new_parm, 0);
4176 if (adj->op == IPA_PARM_OP_NEW)
4177 adj->base = NULL;
4178 else
4179 adj->base = parm;
4180 adj->new_decl = new_parm;
4182 *link = new_parm;
4183 link = &DECL_CHAIN (new_parm);
4187 *link = NULL_TREE;
4189 tree new_reversed = NULL;
4190 if (care_for_types)
4192 new_reversed = nreverse (new_arg_types);
4193 if (last_parm_void)
4195 if (new_reversed)
4196 TREE_CHAIN (new_arg_types) = void_list_node;
4197 else
4198 new_reversed = void_list_node;
4202 /* Use copy_node to preserve as much as possible from original type
4203 (debug info, attribute lists etc.)
4204 Exception is METHOD_TYPEs must have THIS argument.
4205 When we are asked to remove it, we need to build new FUNCTION_TYPE
4206 instead. */
4207 tree new_type = NULL;
4208 if (TREE_CODE (orig_type) != METHOD_TYPE
4209 || (adjustments[0].op == IPA_PARM_OP_COPY
4210 && adjustments[0].base_index == 0))
4212 new_type = build_distinct_type_copy (orig_type);
4213 TYPE_ARG_TYPES (new_type) = new_reversed;
4215 else
4217 new_type
4218 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4219 new_reversed));
4220 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4221 DECL_VINDEX (fndecl) = NULL_TREE;
4224 /* When signature changes, we need to clear builtin info. */
4225 if (DECL_BUILT_IN (fndecl))
4227 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4228 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4231 TREE_TYPE (fndecl) = new_type;
4232 DECL_VIRTUAL_P (fndecl) = 0;
4233 DECL_LANG_SPECIFIC (fndecl) = NULL;
4234 otypes.release ();
4235 oparms.release ();
4238 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4239 If this is a directly recursive call, CS must be NULL. Otherwise it must
4240 contain the corresponding call graph edge. */
4242 void
4243 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4244 ipa_parm_adjustment_vec adjustments)
4246 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4247 vec<tree> vargs;
4248 vec<tree, va_gc> **debug_args = NULL;
4249 gcall *new_stmt;
4250 gimple_stmt_iterator gsi, prev_gsi;
4251 tree callee_decl;
4252 int i, len;
4254 len = adjustments.length ();
4255 vargs.create (len);
4256 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4257 current_node->remove_stmt_references (stmt);
4259 gsi = gsi_for_stmt (stmt);
4260 prev_gsi = gsi;
4261 gsi_prev (&prev_gsi);
4262 for (i = 0; i < len; i++)
4264 struct ipa_parm_adjustment *adj;
4266 adj = &adjustments[i];
4268 if (adj->op == IPA_PARM_OP_COPY)
4270 tree arg = gimple_call_arg (stmt, adj->base_index);
4272 vargs.quick_push (arg);
4274 else if (adj->op != IPA_PARM_OP_REMOVE)
4276 tree expr, base, off;
4277 location_t loc;
4278 unsigned int deref_align = 0;
4279 bool deref_base = false;
4281 /* We create a new parameter out of the value of the old one, we can
4282 do the following kind of transformations:
4284 - A scalar passed by reference is converted to a scalar passed by
4285 value. (adj->by_ref is false and the type of the original
4286 actual argument is a pointer to a scalar).
4288 - A part of an aggregate is passed instead of the whole aggregate.
4289 The part can be passed either by value or by reference, this is
4290 determined by value of adj->by_ref. Moreover, the code below
4291 handles both situations when the original aggregate is passed by
4292 value (its type is not a pointer) and when it is passed by
4293 reference (it is a pointer to an aggregate).
4295 When the new argument is passed by reference (adj->by_ref is true)
4296 it must be a part of an aggregate and therefore we form it by
4297 simply taking the address of a reference inside the original
4298 aggregate. */
4300 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4301 base = gimple_call_arg (stmt, adj->base_index);
4302 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4303 : EXPR_LOCATION (base);
4305 if (TREE_CODE (base) != ADDR_EXPR
4306 && POINTER_TYPE_P (TREE_TYPE (base)))
4307 off = build_int_cst (adj->alias_ptr_type,
4308 adj->offset / BITS_PER_UNIT);
4309 else
4311 HOST_WIDE_INT base_offset;
4312 tree prev_base;
4313 bool addrof;
4315 if (TREE_CODE (base) == ADDR_EXPR)
4317 base = TREE_OPERAND (base, 0);
4318 addrof = true;
4320 else
4321 addrof = false;
4322 prev_base = base;
4323 base = get_addr_base_and_unit_offset (base, &base_offset);
4324 /* Aggregate arguments can have non-invariant addresses. */
4325 if (!base)
4327 base = build_fold_addr_expr (prev_base);
4328 off = build_int_cst (adj->alias_ptr_type,
4329 adj->offset / BITS_PER_UNIT);
4331 else if (TREE_CODE (base) == MEM_REF)
4333 if (!addrof)
4335 deref_base = true;
4336 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4338 off = build_int_cst (adj->alias_ptr_type,
4339 base_offset
4340 + adj->offset / BITS_PER_UNIT);
4341 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4342 off);
4343 base = TREE_OPERAND (base, 0);
4345 else
4347 off = build_int_cst (adj->alias_ptr_type,
4348 base_offset
4349 + adj->offset / BITS_PER_UNIT);
4350 base = build_fold_addr_expr (base);
4354 if (!adj->by_ref)
4356 tree type = adj->type;
4357 unsigned int align;
4358 unsigned HOST_WIDE_INT misalign;
4360 if (deref_base)
4362 align = deref_align;
4363 misalign = 0;
4365 else
4367 get_pointer_alignment_1 (base, &align, &misalign);
4368 if (TYPE_ALIGN (type) > align)
4369 align = TYPE_ALIGN (type);
4371 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4372 * BITS_PER_UNIT);
4373 misalign = misalign & (align - 1);
4374 if (misalign != 0)
4375 align = least_bit_hwi (misalign);
4376 if (align < TYPE_ALIGN (type))
4377 type = build_aligned_type (type, align);
4378 base = force_gimple_operand_gsi (&gsi, base,
4379 true, NULL, true, GSI_SAME_STMT);
4380 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4381 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4382 /* If expr is not a valid gimple call argument emit
4383 a load into a temporary. */
4384 if (is_gimple_reg_type (TREE_TYPE (expr)))
4386 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4387 if (gimple_in_ssa_p (cfun))
4389 gimple_set_vuse (tem, gimple_vuse (stmt));
4390 expr = make_ssa_name (TREE_TYPE (expr), tem);
4392 else
4393 expr = create_tmp_reg (TREE_TYPE (expr));
4394 gimple_assign_set_lhs (tem, expr);
4395 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4398 else
4400 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4401 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4402 expr = build_fold_addr_expr (expr);
4403 expr = force_gimple_operand_gsi (&gsi, expr,
4404 true, NULL, true, GSI_SAME_STMT);
4406 vargs.quick_push (expr);
4408 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4410 unsigned int ix;
4411 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4412 gimple *def_temp;
4414 arg = gimple_call_arg (stmt, adj->base_index);
4415 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4417 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4418 continue;
4419 arg = fold_convert_loc (gimple_location (stmt),
4420 TREE_TYPE (origin), arg);
4422 if (debug_args == NULL)
4423 debug_args = decl_debug_args_insert (callee_decl);
4424 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4425 if (ddecl == origin)
4427 ddecl = (**debug_args)[ix + 1];
4428 break;
4430 if (ddecl == NULL)
4432 ddecl = make_node (DEBUG_EXPR_DECL);
4433 DECL_ARTIFICIAL (ddecl) = 1;
4434 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4435 SET_DECL_MODE (ddecl, DECL_MODE (origin));
4437 vec_safe_push (*debug_args, origin);
4438 vec_safe_push (*debug_args, ddecl);
4440 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4441 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4445 if (dump_file && (dump_flags & TDF_DETAILS))
4447 fprintf (dump_file, "replacing stmt:");
4448 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
4451 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4452 vargs.release ();
4453 if (gimple_call_lhs (stmt))
4454 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4456 gimple_set_block (new_stmt, gimple_block (stmt));
4457 if (gimple_has_location (stmt))
4458 gimple_set_location (new_stmt, gimple_location (stmt));
4459 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4460 gimple_call_copy_flags (new_stmt, stmt);
4461 if (gimple_in_ssa_p (cfun))
4463 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4464 if (gimple_vdef (stmt))
4466 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4467 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4471 if (dump_file && (dump_flags & TDF_DETAILS))
4473 fprintf (dump_file, "with stmt:");
4474 print_gimple_stmt (dump_file, new_stmt, 0);
4475 fprintf (dump_file, "\n");
4477 gsi_replace (&gsi, new_stmt, true);
4478 if (cs)
4479 cs->set_call_stmt (new_stmt);
4482 current_node->record_stmt_references (gsi_stmt (gsi));
4483 gsi_prev (&gsi);
4485 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4488 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4489 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4490 specifies whether the function should care about type incompatibility the
4491 current and new expressions. If it is false, the function will leave
4492 incompatibility issues to the caller. Return true iff the expression
4493 was modified. */
4495 bool
4496 ipa_modify_expr (tree *expr, bool convert,
4497 ipa_parm_adjustment_vec adjustments)
4499 struct ipa_parm_adjustment *cand
4500 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4501 if (!cand)
4502 return false;
4504 tree src;
4505 if (cand->by_ref)
4507 src = build_simple_mem_ref (cand->new_decl);
4508 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4510 else
4511 src = cand->new_decl;
4513 if (dump_file && (dump_flags & TDF_DETAILS))
4515 fprintf (dump_file, "About to replace expr ");
4516 print_generic_expr (dump_file, *expr);
4517 fprintf (dump_file, " with ");
4518 print_generic_expr (dump_file, src);
4519 fprintf (dump_file, "\n");
4522 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4524 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4525 *expr = vce;
4527 else
4528 *expr = src;
4529 return true;
4532 /* If T is an SSA_NAME, return NULL if it is not a default def or
4533 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4534 the base variable is always returned, regardless if it is a default
4535 def. Return T if it is not an SSA_NAME. */
4537 static tree
4538 get_ssa_base_param (tree t, bool ignore_default_def)
4540 if (TREE_CODE (t) == SSA_NAME)
4542 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4543 return SSA_NAME_VAR (t);
4544 else
4545 return NULL_TREE;
4547 return t;
4550 /* Given an expression, return an adjustment entry specifying the
4551 transformation to be done on EXPR. If no suitable adjustment entry
4552 was found, returns NULL.
4554 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4555 default def, otherwise bail on them.
4557 If CONVERT is non-NULL, this function will set *CONVERT if the
4558 expression provided is a component reference. ADJUSTMENTS is the
4559 adjustments vector. */
4561 ipa_parm_adjustment *
4562 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4563 ipa_parm_adjustment_vec adjustments,
4564 bool ignore_default_def)
4566 if (TREE_CODE (**expr) == BIT_FIELD_REF
4567 || TREE_CODE (**expr) == IMAGPART_EXPR
4568 || TREE_CODE (**expr) == REALPART_EXPR)
4570 *expr = &TREE_OPERAND (**expr, 0);
4571 if (convert)
4572 *convert = true;
4575 HOST_WIDE_INT offset, size, max_size;
4576 bool reverse;
4577 tree base
4578 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4579 if (!base || size == -1 || max_size == -1)
4580 return NULL;
4582 if (TREE_CODE (base) == MEM_REF)
4584 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4585 base = TREE_OPERAND (base, 0);
4588 base = get_ssa_base_param (base, ignore_default_def);
4589 if (!base || TREE_CODE (base) != PARM_DECL)
4590 return NULL;
4592 struct ipa_parm_adjustment *cand = NULL;
4593 unsigned int len = adjustments.length ();
4594 for (unsigned i = 0; i < len; i++)
4596 struct ipa_parm_adjustment *adj = &adjustments[i];
4598 if (adj->base == base
4599 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4601 cand = adj;
4602 break;
4606 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4607 return NULL;
4608 return cand;
4611 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4613 static bool
4614 index_in_adjustments_multiple_times_p (int base_index,
4615 ipa_parm_adjustment_vec adjustments)
4617 int i, len = adjustments.length ();
4618 bool one = false;
4620 for (i = 0; i < len; i++)
4622 struct ipa_parm_adjustment *adj;
4623 adj = &adjustments[i];
4625 if (adj->base_index == base_index)
4627 if (one)
4628 return true;
4629 else
4630 one = true;
4633 return false;
4637 /* Return adjustments that should have the same effect on function parameters
4638 and call arguments as if they were first changed according to adjustments in
4639 INNER and then by adjustments in OUTER. */
4641 ipa_parm_adjustment_vec
4642 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4643 ipa_parm_adjustment_vec outer)
4645 int i, outlen = outer.length ();
4646 int inlen = inner.length ();
4647 int removals = 0;
4648 ipa_parm_adjustment_vec adjustments, tmp;
4650 tmp.create (inlen);
4651 for (i = 0; i < inlen; i++)
4653 struct ipa_parm_adjustment *n;
4654 n = &inner[i];
4656 if (n->op == IPA_PARM_OP_REMOVE)
4657 removals++;
4658 else
4660 /* FIXME: Handling of new arguments are not implemented yet. */
4661 gcc_assert (n->op != IPA_PARM_OP_NEW);
4662 tmp.quick_push (*n);
4666 adjustments.create (outlen + removals);
4667 for (i = 0; i < outlen; i++)
4669 struct ipa_parm_adjustment r;
4670 struct ipa_parm_adjustment *out = &outer[i];
4671 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4673 memset (&r, 0, sizeof (r));
4674 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4675 if (out->op == IPA_PARM_OP_REMOVE)
4677 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4679 r.op = IPA_PARM_OP_REMOVE;
4680 adjustments.quick_push (r);
4682 continue;
4684 else
4686 /* FIXME: Handling of new arguments are not implemented yet. */
4687 gcc_assert (out->op != IPA_PARM_OP_NEW);
4690 r.base_index = in->base_index;
4691 r.type = out->type;
4693 /* FIXME: Create nonlocal value too. */
4695 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4696 r.op = IPA_PARM_OP_COPY;
4697 else if (in->op == IPA_PARM_OP_COPY)
4698 r.offset = out->offset;
4699 else if (out->op == IPA_PARM_OP_COPY)
4700 r.offset = in->offset;
4701 else
4702 r.offset = in->offset + out->offset;
4703 adjustments.quick_push (r);
4706 for (i = 0; i < inlen; i++)
4708 struct ipa_parm_adjustment *n = &inner[i];
4710 if (n->op == IPA_PARM_OP_REMOVE)
4711 adjustments.quick_push (*n);
4714 tmp.release ();
4715 return adjustments;
4718 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4719 friendly way, assuming they are meant to be applied to FNDECL. */
4721 void
4722 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4723 tree fndecl)
4725 int i, len = adjustments.length ();
4726 bool first = true;
4727 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4729 fprintf (file, "IPA param adjustments: ");
4730 for (i = 0; i < len; i++)
4732 struct ipa_parm_adjustment *adj;
4733 adj = &adjustments[i];
4735 if (!first)
4736 fprintf (file, " ");
4737 else
4738 first = false;
4740 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4741 print_generic_expr (file, parms[adj->base_index]);
4742 if (adj->base)
4744 fprintf (file, ", base: ");
4745 print_generic_expr (file, adj->base);
4747 if (adj->new_decl)
4749 fprintf (file, ", new_decl: ");
4750 print_generic_expr (file, adj->new_decl);
4752 if (adj->new_ssa_base)
4754 fprintf (file, ", new_ssa_base: ");
4755 print_generic_expr (file, adj->new_ssa_base);
4758 if (adj->op == IPA_PARM_OP_COPY)
4759 fprintf (file, ", copy_param");
4760 else if (adj->op == IPA_PARM_OP_REMOVE)
4761 fprintf (file, ", remove_param");
4762 else
4763 fprintf (file, ", offset %li", (long) adj->offset);
4764 if (adj->by_ref)
4765 fprintf (file, ", by_ref");
4766 print_node_brief (file, ", type: ", adj->type, 0);
4767 fprintf (file, "\n");
4769 parms.release ();
4772 /* Dump the AV linked list. */
4774 void
4775 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4777 bool comma = false;
4778 fprintf (f, " Aggregate replacements:");
4779 for (; av; av = av->next)
4781 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4782 av->index, av->offset);
4783 print_generic_expr (f, av->value);
4784 comma = true;
4786 fprintf (f, "\n");
4789 /* Stream out jump function JUMP_FUNC to OB. */
4791 static void
4792 ipa_write_jump_function (struct output_block *ob,
4793 struct ipa_jump_func *jump_func)
4795 struct ipa_agg_jf_item *item;
4796 struct bitpack_d bp;
4797 int i, count;
4799 streamer_write_uhwi (ob, jump_func->type);
4800 switch (jump_func->type)
4802 case IPA_JF_UNKNOWN:
4803 break;
4804 case IPA_JF_CONST:
4805 gcc_assert (
4806 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4807 stream_write_tree (ob, jump_func->value.constant.value, true);
4808 break;
4809 case IPA_JF_PASS_THROUGH:
4810 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4811 if (jump_func->value.pass_through.operation == NOP_EXPR)
4813 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4814 bp = bitpack_create (ob->main_stream);
4815 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4816 streamer_write_bitpack (&bp);
4818 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4819 == tcc_unary)
4820 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4821 else
4823 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4824 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4826 break;
4827 case IPA_JF_ANCESTOR:
4828 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4829 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4830 bp = bitpack_create (ob->main_stream);
4831 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4832 streamer_write_bitpack (&bp);
4833 break;
4836 count = vec_safe_length (jump_func->agg.items);
4837 streamer_write_uhwi (ob, count);
4838 if (count)
4840 bp = bitpack_create (ob->main_stream);
4841 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4842 streamer_write_bitpack (&bp);
4845 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4847 streamer_write_uhwi (ob, item->offset);
4848 stream_write_tree (ob, item->value, true);
4851 bp = bitpack_create (ob->main_stream);
4852 bp_pack_value (&bp, !!jump_func->bits, 1);
4853 streamer_write_bitpack (&bp);
4854 if (jump_func->bits)
4856 streamer_write_widest_int (ob, jump_func->bits->value);
4857 streamer_write_widest_int (ob, jump_func->bits->mask);
4859 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4860 streamer_write_bitpack (&bp);
4861 if (jump_func->m_vr)
4863 streamer_write_enum (ob->main_stream, value_rang_type,
4864 VR_LAST, jump_func->m_vr->type);
4865 stream_write_tree (ob, jump_func->m_vr->min, true);
4866 stream_write_tree (ob, jump_func->m_vr->max, true);
4870 /* Read in jump function JUMP_FUNC from IB. */
4872 static void
4873 ipa_read_jump_function (struct lto_input_block *ib,
4874 struct ipa_jump_func *jump_func,
4875 struct cgraph_edge *cs,
4876 struct data_in *data_in)
4878 enum jump_func_type jftype;
4879 enum tree_code operation;
4880 int i, count;
4882 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4883 switch (jftype)
4885 case IPA_JF_UNKNOWN:
4886 ipa_set_jf_unknown (jump_func);
4887 break;
4888 case IPA_JF_CONST:
4889 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4890 break;
4891 case IPA_JF_PASS_THROUGH:
4892 operation = (enum tree_code) streamer_read_uhwi (ib);
4893 if (operation == NOP_EXPR)
4895 int formal_id = streamer_read_uhwi (ib);
4896 struct bitpack_d bp = streamer_read_bitpack (ib);
4897 bool agg_preserved = bp_unpack_value (&bp, 1);
4898 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4900 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4902 int formal_id = streamer_read_uhwi (ib);
4903 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4905 else
4907 tree operand = stream_read_tree (ib, data_in);
4908 int formal_id = streamer_read_uhwi (ib);
4909 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4910 operation);
4912 break;
4913 case IPA_JF_ANCESTOR:
4915 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4916 int formal_id = streamer_read_uhwi (ib);
4917 struct bitpack_d bp = streamer_read_bitpack (ib);
4918 bool agg_preserved = bp_unpack_value (&bp, 1);
4919 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4920 break;
4924 count = streamer_read_uhwi (ib);
4925 vec_alloc (jump_func->agg.items, count);
4926 if (count)
4928 struct bitpack_d bp = streamer_read_bitpack (ib);
4929 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4931 for (i = 0; i < count; i++)
4933 struct ipa_agg_jf_item item;
4934 item.offset = streamer_read_uhwi (ib);
4935 item.value = stream_read_tree (ib, data_in);
4936 jump_func->agg.items->quick_push (item);
4939 struct bitpack_d bp = streamer_read_bitpack (ib);
4940 bool bits_known = bp_unpack_value (&bp, 1);
4941 if (bits_known)
4943 widest_int value = streamer_read_widest_int (ib);
4944 widest_int mask = streamer_read_widest_int (ib);
4945 ipa_set_jfunc_bits (jump_func, value, mask);
4947 else
4948 jump_func->bits = NULL;
4950 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4951 bool vr_known = bp_unpack_value (&vr_bp, 1);
4952 if (vr_known)
4954 enum value_range_type type = streamer_read_enum (ib, value_range_type,
4955 VR_LAST);
4956 tree min = stream_read_tree (ib, data_in);
4957 tree max = stream_read_tree (ib, data_in);
4958 ipa_set_jfunc_vr (jump_func, type, min, max);
4960 else
4961 jump_func->m_vr = NULL;
4964 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4965 relevant to indirect inlining to OB. */
4967 static void
4968 ipa_write_indirect_edge_info (struct output_block *ob,
4969 struct cgraph_edge *cs)
4971 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4972 struct bitpack_d bp;
4974 streamer_write_hwi (ob, ii->param_index);
4975 bp = bitpack_create (ob->main_stream);
4976 bp_pack_value (&bp, ii->polymorphic, 1);
4977 bp_pack_value (&bp, ii->agg_contents, 1);
4978 bp_pack_value (&bp, ii->member_ptr, 1);
4979 bp_pack_value (&bp, ii->by_ref, 1);
4980 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4981 bp_pack_value (&bp, ii->vptr_changed, 1);
4982 streamer_write_bitpack (&bp);
4983 if (ii->agg_contents || ii->polymorphic)
4984 streamer_write_hwi (ob, ii->offset);
4985 else
4986 gcc_assert (ii->offset == 0);
4988 if (ii->polymorphic)
4990 streamer_write_hwi (ob, ii->otr_token);
4991 stream_write_tree (ob, ii->otr_type, true);
4992 ii->context.stream_out (ob);
4996 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4997 relevant to indirect inlining from IB. */
4999 static void
5000 ipa_read_indirect_edge_info (struct lto_input_block *ib,
5001 struct data_in *data_in,
5002 struct cgraph_edge *cs)
5004 struct cgraph_indirect_call_info *ii = cs->indirect_info;
5005 struct bitpack_d bp;
5007 ii->param_index = (int) streamer_read_hwi (ib);
5008 bp = streamer_read_bitpack (ib);
5009 ii->polymorphic = bp_unpack_value (&bp, 1);
5010 ii->agg_contents = bp_unpack_value (&bp, 1);
5011 ii->member_ptr = bp_unpack_value (&bp, 1);
5012 ii->by_ref = bp_unpack_value (&bp, 1);
5013 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
5014 ii->vptr_changed = bp_unpack_value (&bp, 1);
5015 if (ii->agg_contents || ii->polymorphic)
5016 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5017 else
5018 ii->offset = 0;
5019 if (ii->polymorphic)
5021 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
5022 ii->otr_type = stream_read_tree (ib, data_in);
5023 ii->context.stream_in (ib, data_in);
5027 /* Stream out NODE info to OB. */
5029 static void
5030 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5032 int node_ref;
5033 lto_symtab_encoder_t encoder;
5034 struct ipa_node_params *info = IPA_NODE_REF (node);
5035 int j;
5036 struct cgraph_edge *e;
5037 struct bitpack_d bp;
5039 encoder = ob->decl_state->symtab_node_encoder;
5040 node_ref = lto_symtab_encoder_encode (encoder, node);
5041 streamer_write_uhwi (ob, node_ref);
5043 streamer_write_uhwi (ob, ipa_get_param_count (info));
5044 for (j = 0; j < ipa_get_param_count (info); j++)
5045 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5046 bp = bitpack_create (ob->main_stream);
5047 gcc_assert (info->analysis_done
5048 || ipa_get_param_count (info) == 0);
5049 gcc_assert (!info->node_enqueued);
5050 gcc_assert (!info->ipcp_orig_node);
5051 for (j = 0; j < ipa_get_param_count (info); j++)
5052 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5053 streamer_write_bitpack (&bp);
5054 for (j = 0; j < ipa_get_param_count (info); j++)
5056 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5057 stream_write_tree (ob, ipa_get_type (info, j), true);
5059 for (e = node->callees; e; e = e->next_callee)
5061 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5063 streamer_write_uhwi (ob,
5064 ipa_get_cs_argument_count (args) * 2
5065 + (args->polymorphic_call_contexts != NULL));
5066 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5068 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5069 if (args->polymorphic_call_contexts != NULL)
5070 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5073 for (e = node->indirect_calls; e; e = e->next_callee)
5075 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5077 streamer_write_uhwi (ob,
5078 ipa_get_cs_argument_count (args) * 2
5079 + (args->polymorphic_call_contexts != NULL));
5080 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5082 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5083 if (args->polymorphic_call_contexts != NULL)
5084 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5086 ipa_write_indirect_edge_info (ob, e);
5090 /* Stream in NODE info from IB. */
5092 static void
5093 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
5094 struct data_in *data_in)
5096 struct ipa_node_params *info = IPA_NODE_REF (node);
5097 int k;
5098 struct cgraph_edge *e;
5099 struct bitpack_d bp;
5101 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
5103 for (k = 0; k < ipa_get_param_count (info); k++)
5104 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5106 bp = streamer_read_bitpack (ib);
5107 if (ipa_get_param_count (info) != 0)
5108 info->analysis_done = true;
5109 info->node_enqueued = false;
5110 for (k = 0; k < ipa_get_param_count (info); k++)
5111 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
5112 for (k = 0; k < ipa_get_param_count (info); k++)
5114 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
5115 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
5117 for (e = node->callees; e; e = e->next_callee)
5119 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5120 int count = streamer_read_uhwi (ib);
5121 bool contexts_computed = count & 1;
5122 count /= 2;
5124 if (!count)
5125 continue;
5126 vec_safe_grow_cleared (args->jump_functions, count);
5127 if (contexts_computed)
5128 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5130 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5132 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5133 data_in);
5134 if (contexts_computed)
5135 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5138 for (e = node->indirect_calls; e; e = e->next_callee)
5140 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5141 int count = streamer_read_uhwi (ib);
5142 bool contexts_computed = count & 1;
5143 count /= 2;
5145 if (count)
5147 vec_safe_grow_cleared (args->jump_functions, count);
5148 if (contexts_computed)
5149 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5150 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5152 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5153 data_in);
5154 if (contexts_computed)
5155 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5158 ipa_read_indirect_edge_info (ib, data_in, e);
5162 /* Write jump functions for nodes in SET. */
5164 void
5165 ipa_prop_write_jump_functions (void)
5167 struct cgraph_node *node;
5168 struct output_block *ob;
5169 unsigned int count = 0;
5170 lto_symtab_encoder_iterator lsei;
5171 lto_symtab_encoder_t encoder;
5173 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5174 return;
5176 ob = create_output_block (LTO_section_jump_functions);
5177 encoder = ob->decl_state->symtab_node_encoder;
5178 ob->symbol = NULL;
5179 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5180 lsei_next_function_in_partition (&lsei))
5182 node = lsei_cgraph_node (lsei);
5183 if (node->has_gimple_body_p ()
5184 && IPA_NODE_REF (node) != NULL)
5185 count++;
5188 streamer_write_uhwi (ob, count);
5190 /* Process all of the functions. */
5191 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5192 lsei_next_function_in_partition (&lsei))
5194 node = lsei_cgraph_node (lsei);
5195 if (node->has_gimple_body_p ()
5196 && IPA_NODE_REF (node) != NULL)
5197 ipa_write_node_info (ob, node);
5199 streamer_write_char_stream (ob->main_stream, 0);
5200 produce_asm (ob, NULL);
5201 destroy_output_block (ob);
5204 /* Read section in file FILE_DATA of length LEN with data DATA. */
5206 static void
5207 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5208 size_t len)
5210 const struct lto_function_header *header =
5211 (const struct lto_function_header *) data;
5212 const int cfg_offset = sizeof (struct lto_function_header);
5213 const int main_offset = cfg_offset + header->cfg_size;
5214 const int string_offset = main_offset + header->main_size;
5215 struct data_in *data_in;
5216 unsigned int i;
5217 unsigned int count;
5219 lto_input_block ib_main ((const char *) data + main_offset,
5220 header->main_size, file_data->mode_table);
5222 data_in =
5223 lto_data_in_create (file_data, (const char *) data + string_offset,
5224 header->string_size, vNULL);
5225 count = streamer_read_uhwi (&ib_main);
5227 for (i = 0; i < count; i++)
5229 unsigned int index;
5230 struct cgraph_node *node;
5231 lto_symtab_encoder_t encoder;
5233 index = streamer_read_uhwi (&ib_main);
5234 encoder = file_data->symtab_node_encoder;
5235 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5236 index));
5237 gcc_assert (node->definition);
5238 ipa_read_node_info (&ib_main, node, data_in);
5240 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5241 len);
5242 lto_data_in_delete (data_in);
5245 /* Read ipcp jump functions. */
5247 void
5248 ipa_prop_read_jump_functions (void)
5250 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5251 struct lto_file_decl_data *file_data;
5252 unsigned int j = 0;
5254 ipa_check_create_node_params ();
5255 ipa_check_create_edge_args ();
5256 ipa_register_cgraph_hooks ();
5258 while ((file_data = file_data_vec[j++]))
5260 size_t len;
5261 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5263 if (data)
5264 ipa_prop_read_section (file_data, data, len);
5268 void
5269 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5271 int node_ref;
5272 unsigned int count = 0;
5273 lto_symtab_encoder_t encoder;
5274 struct ipa_agg_replacement_value *aggvals, *av;
5276 aggvals = ipa_get_agg_replacements_for_node (node);
5277 encoder = ob->decl_state->symtab_node_encoder;
5278 node_ref = lto_symtab_encoder_encode (encoder, node);
5279 streamer_write_uhwi (ob, node_ref);
5281 for (av = aggvals; av; av = av->next)
5282 count++;
5283 streamer_write_uhwi (ob, count);
5285 for (av = aggvals; av; av = av->next)
5287 struct bitpack_d bp;
5289 streamer_write_uhwi (ob, av->offset);
5290 streamer_write_uhwi (ob, av->index);
5291 stream_write_tree (ob, av->value, true);
5293 bp = bitpack_create (ob->main_stream);
5294 bp_pack_value (&bp, av->by_ref, 1);
5295 streamer_write_bitpack (&bp);
5298 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5299 if (ts && vec_safe_length (ts->m_vr) > 0)
5301 count = ts->m_vr->length ();
5302 streamer_write_uhwi (ob, count);
5303 for (unsigned i = 0; i < count; ++i)
5305 struct bitpack_d bp;
5306 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5307 bp = bitpack_create (ob->main_stream);
5308 bp_pack_value (&bp, parm_vr->known, 1);
5309 streamer_write_bitpack (&bp);
5310 if (parm_vr->known)
5312 streamer_write_enum (ob->main_stream, value_rang_type,
5313 VR_LAST, parm_vr->type);
5314 streamer_write_wide_int (ob, parm_vr->min);
5315 streamer_write_wide_int (ob, parm_vr->max);
5319 else
5320 streamer_write_uhwi (ob, 0);
5322 if (ts && vec_safe_length (ts->bits) > 0)
5324 count = ts->bits->length ();
5325 streamer_write_uhwi (ob, count);
5327 for (unsigned i = 0; i < count; ++i)
5329 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5330 struct bitpack_d bp = bitpack_create (ob->main_stream);
5331 bp_pack_value (&bp, !!bits_jfunc, 1);
5332 streamer_write_bitpack (&bp);
5333 if (bits_jfunc)
5335 streamer_write_widest_int (ob, bits_jfunc->value);
5336 streamer_write_widest_int (ob, bits_jfunc->mask);
5340 else
5341 streamer_write_uhwi (ob, 0);
5344 /* Stream in the aggregate value replacement chain for NODE from IB. */
5346 static void
5347 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5348 data_in *data_in)
5350 struct ipa_agg_replacement_value *aggvals = NULL;
5351 unsigned int count, i;
5353 count = streamer_read_uhwi (ib);
5354 for (i = 0; i <count; i++)
5356 struct ipa_agg_replacement_value *av;
5357 struct bitpack_d bp;
5359 av = ggc_alloc<ipa_agg_replacement_value> ();
5360 av->offset = streamer_read_uhwi (ib);
5361 av->index = streamer_read_uhwi (ib);
5362 av->value = stream_read_tree (ib, data_in);
5363 bp = streamer_read_bitpack (ib);
5364 av->by_ref = bp_unpack_value (&bp, 1);
5365 av->next = aggvals;
5366 aggvals = av;
5368 ipa_set_node_agg_value_chain (node, aggvals);
5370 count = streamer_read_uhwi (ib);
5371 if (count > 0)
5373 ipcp_grow_transformations_if_necessary ();
5375 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5376 vec_safe_grow_cleared (ts->m_vr, count);
5377 for (i = 0; i < count; i++)
5379 ipa_vr *parm_vr;
5380 parm_vr = &(*ts->m_vr)[i];
5381 struct bitpack_d bp;
5382 bp = streamer_read_bitpack (ib);
5383 parm_vr->known = bp_unpack_value (&bp, 1);
5384 if (parm_vr->known)
5386 parm_vr->type = streamer_read_enum (ib, value_range_type,
5387 VR_LAST);
5388 parm_vr->min = streamer_read_wide_int (ib);
5389 parm_vr->max = streamer_read_wide_int (ib);
5393 count = streamer_read_uhwi (ib);
5394 if (count > 0)
5396 ipcp_grow_transformations_if_necessary ();
5398 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5399 vec_safe_grow_cleared (ts->bits, count);
5401 for (i = 0; i < count; i++)
5403 struct bitpack_d bp = streamer_read_bitpack (ib);
5404 bool known = bp_unpack_value (&bp, 1);
5405 if (known)
5407 ipa_bits *bits
5408 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
5409 streamer_read_widest_int (ib));
5410 (*ts->bits)[i] = bits;
5416 /* Write all aggregate replacement for nodes in set. */
5418 void
5419 ipcp_write_transformation_summaries (void)
5421 struct cgraph_node *node;
5422 struct output_block *ob;
5423 unsigned int count = 0;
5424 lto_symtab_encoder_iterator lsei;
5425 lto_symtab_encoder_t encoder;
5427 ob = create_output_block (LTO_section_ipcp_transform);
5428 encoder = ob->decl_state->symtab_node_encoder;
5429 ob->symbol = NULL;
5430 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5431 lsei_next_function_in_partition (&lsei))
5433 node = lsei_cgraph_node (lsei);
5434 if (node->has_gimple_body_p ())
5435 count++;
5438 streamer_write_uhwi (ob, count);
5440 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5441 lsei_next_function_in_partition (&lsei))
5443 node = lsei_cgraph_node (lsei);
5444 if (node->has_gimple_body_p ())
5445 write_ipcp_transformation_info (ob, node);
5447 streamer_write_char_stream (ob->main_stream, 0);
5448 produce_asm (ob, NULL);
5449 destroy_output_block (ob);
5452 /* Read replacements section in file FILE_DATA of length LEN with data
5453 DATA. */
5455 static void
5456 read_replacements_section (struct lto_file_decl_data *file_data,
5457 const char *data,
5458 size_t len)
5460 const struct lto_function_header *header =
5461 (const struct lto_function_header *) data;
5462 const int cfg_offset = sizeof (struct lto_function_header);
5463 const int main_offset = cfg_offset + header->cfg_size;
5464 const int string_offset = main_offset + header->main_size;
5465 struct data_in *data_in;
5466 unsigned int i;
5467 unsigned int count;
5469 lto_input_block ib_main ((const char *) data + main_offset,
5470 header->main_size, file_data->mode_table);
5472 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5473 header->string_size, vNULL);
5474 count = streamer_read_uhwi (&ib_main);
5476 for (i = 0; i < count; i++)
5478 unsigned int index;
5479 struct cgraph_node *node;
5480 lto_symtab_encoder_t encoder;
5482 index = streamer_read_uhwi (&ib_main);
5483 encoder = file_data->symtab_node_encoder;
5484 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5485 index));
5486 gcc_assert (node->definition);
5487 read_ipcp_transformation_info (&ib_main, node, data_in);
5489 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5490 len);
5491 lto_data_in_delete (data_in);
5494 /* Read IPA-CP aggregate replacements. */
5496 void
5497 ipcp_read_transformation_summaries (void)
5499 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5500 struct lto_file_decl_data *file_data;
5501 unsigned int j = 0;
5503 while ((file_data = file_data_vec[j++]))
5505 size_t len;
5506 const char *data = lto_get_section_data (file_data,
5507 LTO_section_ipcp_transform,
5508 NULL, &len);
5509 if (data)
5510 read_replacements_section (file_data, data, len);
5514 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5515 NODE. */
5517 static void
5518 adjust_agg_replacement_values (struct cgraph_node *node,
5519 struct ipa_agg_replacement_value *aggval)
5521 struct ipa_agg_replacement_value *v;
5522 int i, c = 0, d = 0, *adj;
5524 if (!node->clone.combined_args_to_skip)
5525 return;
5527 for (v = aggval; v; v = v->next)
5529 gcc_assert (v->index >= 0);
5530 if (c < v->index)
5531 c = v->index;
5533 c++;
5535 adj = XALLOCAVEC (int, c);
5536 for (i = 0; i < c; i++)
5537 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5539 adj[i] = -1;
5540 d++;
5542 else
5543 adj[i] = i - d;
5545 for (v = aggval; v; v = v->next)
5546 v->index = adj[v->index];
5549 /* Dominator walker driving the ipcp modification phase. */
5551 class ipcp_modif_dom_walker : public dom_walker
5553 public:
5554 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5555 vec<ipa_param_descriptor, va_gc> *descs,
5556 struct ipa_agg_replacement_value *av,
5557 bool *sc, bool *cc)
5558 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5559 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5561 virtual edge before_dom_children (basic_block);
5563 private:
5564 struct ipa_func_body_info *m_fbi;
5565 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5566 struct ipa_agg_replacement_value *m_aggval;
5567 bool *m_something_changed, *m_cfg_changed;
5570 edge
5571 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5573 gimple_stmt_iterator gsi;
5574 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5576 struct ipa_agg_replacement_value *v;
5577 gimple *stmt = gsi_stmt (gsi);
5578 tree rhs, val, t;
5579 HOST_WIDE_INT offset, size;
5580 int index;
5581 bool by_ref, vce;
5583 if (!gimple_assign_load_p (stmt))
5584 continue;
5585 rhs = gimple_assign_rhs1 (stmt);
5586 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5587 continue;
5589 vce = false;
5590 t = rhs;
5591 while (handled_component_p (t))
5593 /* V_C_E can do things like convert an array of integers to one
5594 bigger integer and similar things we do not handle below. */
5595 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5597 vce = true;
5598 break;
5600 t = TREE_OPERAND (t, 0);
5602 if (vce)
5603 continue;
5605 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5606 &offset, &size, &by_ref))
5607 continue;
5608 for (v = m_aggval; v; v = v->next)
5609 if (v->index == index
5610 && v->offset == offset)
5611 break;
5612 if (!v
5613 || v->by_ref != by_ref
5614 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5615 continue;
5617 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5618 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5620 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5621 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5622 else if (TYPE_SIZE (TREE_TYPE (rhs))
5623 == TYPE_SIZE (TREE_TYPE (v->value)))
5624 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5625 else
5627 if (dump_file)
5629 fprintf (dump_file, " const ");
5630 print_generic_expr (dump_file, v->value);
5631 fprintf (dump_file, " can't be converted to type of ");
5632 print_generic_expr (dump_file, rhs);
5633 fprintf (dump_file, "\n");
5635 continue;
5638 else
5639 val = v->value;
5641 if (dump_file && (dump_flags & TDF_DETAILS))
5643 fprintf (dump_file, "Modifying stmt:\n ");
5644 print_gimple_stmt (dump_file, stmt, 0);
5646 gimple_assign_set_rhs_from_tree (&gsi, val);
5647 update_stmt (stmt);
5649 if (dump_file && (dump_flags & TDF_DETAILS))
5651 fprintf (dump_file, "into:\n ");
5652 print_gimple_stmt (dump_file, stmt, 0);
5653 fprintf (dump_file, "\n");
5656 *m_something_changed = true;
5657 if (maybe_clean_eh_stmt (stmt)
5658 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5659 *m_cfg_changed = true;
5661 return NULL;
5664 /* Update bits info of formal parameters as described in
5665 ipcp_transformation_summary. */
5667 static void
5668 ipcp_update_bits (struct cgraph_node *node)
5670 tree parm = DECL_ARGUMENTS (node->decl);
5671 tree next_parm = parm;
5672 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5674 if (!ts || vec_safe_length (ts->bits) == 0)
5675 return;
5677 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5678 unsigned count = bits.length ();
5680 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5682 if (node->clone.combined_args_to_skip
5683 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5684 continue;
5686 gcc_checking_assert (parm);
5687 next_parm = DECL_CHAIN (parm);
5689 if (!bits[i]
5690 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5691 || POINTER_TYPE_P (TREE_TYPE (parm)))
5692 || !is_gimple_reg (parm))
5693 continue;
5695 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5696 if (!ddef)
5697 continue;
5699 if (dump_file)
5701 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5702 print_hex (bits[i]->mask, dump_file);
5703 fprintf (dump_file, "\n");
5706 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5708 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5709 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5711 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5712 | wide_int::from (bits[i]->value, prec, sgn);
5713 set_nonzero_bits (ddef, nonzero_bits);
5715 else
5717 unsigned tem = bits[i]->mask.to_uhwi ();
5718 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5719 unsigned align = tem & -tem;
5720 unsigned misalign = bitpos & (align - 1);
5722 if (align > 1)
5724 if (dump_file)
5725 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5727 unsigned old_align, old_misalign;
5728 struct ptr_info_def *pi = get_ptr_info (ddef);
5729 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5731 if (old_known
5732 && old_align > align)
5734 if (dump_file)
5736 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5737 if ((old_misalign & (align - 1)) != misalign)
5738 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5739 old_misalign, misalign);
5741 continue;
5744 if (old_known
5745 && ((misalign & (old_align - 1)) != old_misalign)
5746 && dump_file)
5747 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5748 old_misalign, misalign);
5750 set_ptr_info_alignment (pi, align, misalign);
5756 /* Update value range of formal parameters as described in
5757 ipcp_transformation_summary. */
5759 static void
5760 ipcp_update_vr (struct cgraph_node *node)
5762 tree fndecl = node->decl;
5763 tree parm = DECL_ARGUMENTS (fndecl);
5764 tree next_parm = parm;
5765 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5766 if (!ts || vec_safe_length (ts->m_vr) == 0)
5767 return;
5768 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5769 unsigned count = vr.length ();
5771 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5773 if (node->clone.combined_args_to_skip
5774 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5775 continue;
5776 gcc_checking_assert (parm);
5777 next_parm = DECL_CHAIN (parm);
5778 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5780 if (!ddef || !is_gimple_reg (parm))
5781 continue;
5783 if (vr[i].known
5784 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5786 tree type = TREE_TYPE (ddef);
5787 unsigned prec = TYPE_PRECISION (type);
5788 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5790 if (dump_file)
5792 fprintf (dump_file, "Setting value range of param %u ", i);
5793 fprintf (dump_file, "%s[",
5794 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5795 print_decs (vr[i].min, dump_file);
5796 fprintf (dump_file, ", ");
5797 print_decs (vr[i].max, dump_file);
5798 fprintf (dump_file, "]\n");
5800 set_range_info (ddef, vr[i].type,
5801 wide_int_storage::from (vr[i].min, prec,
5802 TYPE_SIGN (type)),
5803 wide_int_storage::from (vr[i].max, prec,
5804 TYPE_SIGN (type)));
5806 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5807 && vr[i].type == VR_ANTI_RANGE
5808 && wi::eq_p (vr[i].min, 0)
5809 && wi::eq_p (vr[i].max, 0))
5811 if (dump_file)
5812 fprintf (dump_file, "Setting nonnull for %u\n", i);
5813 set_ptr_nonnull (ddef);
5819 /* IPCP transformation phase doing propagation of aggregate values. */
5821 unsigned int
5822 ipcp_transform_function (struct cgraph_node *node)
5824 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5825 struct ipa_func_body_info fbi;
5826 struct ipa_agg_replacement_value *aggval;
5827 int param_count;
5828 bool cfg_changed = false, something_changed = false;
5830 gcc_checking_assert (cfun);
5831 gcc_checking_assert (current_function_decl);
5833 if (dump_file)
5834 fprintf (dump_file, "Modification phase of node %s\n",
5835 node->dump_name ());
5837 ipcp_update_bits (node);
5838 ipcp_update_vr (node);
5839 aggval = ipa_get_agg_replacements_for_node (node);
5840 if (!aggval)
5841 return 0;
5842 param_count = count_formal_params (node->decl);
5843 if (param_count == 0)
5844 return 0;
5845 adjust_agg_replacement_values (node, aggval);
5846 if (dump_file)
5847 ipa_dump_agg_replacement_values (dump_file, aggval);
5849 fbi.node = node;
5850 fbi.info = NULL;
5851 fbi.bb_infos = vNULL;
5852 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5853 fbi.param_count = param_count;
5854 fbi.aa_walked = 0;
5856 vec_safe_grow_cleared (descriptors, param_count);
5857 ipa_populate_param_decls (node, *descriptors);
5858 calculate_dominance_info (CDI_DOMINATORS);
5859 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5860 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5862 int i;
5863 struct ipa_bb_info *bi;
5864 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5865 free_ipa_bb_info (bi);
5866 fbi.bb_infos.release ();
5867 free_dominance_info (CDI_DOMINATORS);
5868 (*ipcp_transformations)[node->uid].agg_values = NULL;
5869 (*ipcp_transformations)[node->uid].bits = NULL;
5870 (*ipcp_transformations)[node->uid].m_vr = NULL;
5872 vec_free (descriptors);
5874 if (!something_changed)
5875 return 0;
5876 else if (cfg_changed)
5877 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5878 else
5879 return TODO_update_ssa_only_virtuals;
5882 #include "gt-ipa-prop.h"