Update changelog entry.
[official-gcc.git] / gcc / ipa-prop.c
blob74052350ac1d9e90be306ebd9e750690a1871f47
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
59 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
61 /* Edge summary for IPA-CP edge information. */
62 ipa_edge_args_sum_t *ipa_edge_args_sum;
64 /* Traits for a hash table for reusing already existing ipa_bits. */
66 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
68 typedef ipa_bits *value_type;
69 typedef ipa_bits *compare_type;
70 static hashval_t
71 hash (const ipa_bits *p)
73 hashval_t t = (hashval_t) p->value.to_shwi ();
74 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
76 static bool
77 equal (const ipa_bits *a, const ipa_bits *b)
79 return a->value == b->value && a->mask == b->mask;
81 static void
82 mark_empty (ipa_bits *&p)
84 p = NULL;
86 static bool
87 is_empty (const ipa_bits *p)
89 return p == NULL;
91 static bool
92 is_deleted (const ipa_bits *p)
94 return p == reinterpret_cast<const ipa_bits *> (1);
96 static void
97 mark_deleted (ipa_bits *&p)
99 p = reinterpret_cast<ipa_bits *> (1);
103 /* Hash table for avoid repeated allocations of equal ipa_bits. */
104 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
107 the equiv bitmap is not hashed and is expected to be NULL. */
109 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range_base *>
111 typedef value_range_base *value_type;
112 typedef value_range_base *compare_type;
113 static hashval_t
114 hash (const value_range_base *p)
116 inchash::hash hstate (p->kind ());
117 inchash::add_expr (p->min (), hstate);
118 inchash::add_expr (p->max (), hstate);
119 return hstate.end ();
121 static bool
122 equal (const value_range_base *a, const value_range_base *b)
124 return a->equal_p (*b);
126 static void
127 mark_empty (value_range_base *&p)
129 p = NULL;
131 static bool
132 is_empty (const value_range_base *p)
134 return p == NULL;
136 static bool
137 is_deleted (const value_range_base *p)
139 return p == reinterpret_cast<const value_range_base *> (1);
141 static void
142 mark_deleted (value_range_base *&p)
144 p = reinterpret_cast<value_range_base *> (1);
148 /* Hash table for avoid repeated allocations of equal value_ranges. */
149 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
151 /* Holders of ipa cgraph hooks: */
152 static struct cgraph_node_hook_list *function_insertion_hook_holder;
154 /* Description of a reference to an IPA constant. */
155 struct ipa_cst_ref_desc
157 /* Edge that corresponds to the statement which took the reference. */
158 struct cgraph_edge *cs;
159 /* Linked list of duplicates created when call graph edges are cloned. */
160 struct ipa_cst_ref_desc *next_duplicate;
161 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
162 if out of control. */
163 int refcount;
166 /* Allocation pool for reference descriptions. */
168 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
169 ("IPA-PROP ref descriptions");
171 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
172 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
174 static bool
175 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
177 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
179 if (!fs_opts)
180 return false;
181 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
184 /* Return index of the formal whose tree is PTREE in function which corresponds
185 to INFO. */
187 static int
188 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
189 tree ptree)
191 int i, count;
193 count = vec_safe_length (descriptors);
194 for (i = 0; i < count; i++)
195 if ((*descriptors)[i].decl_or_type == ptree)
196 return i;
198 return -1;
201 /* Return index of the formal whose tree is PTREE in function which corresponds
202 to INFO. */
205 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
207 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
210 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
211 NODE. */
213 static void
214 ipa_populate_param_decls (struct cgraph_node *node,
215 vec<ipa_param_descriptor, va_gc> &descriptors)
217 tree fndecl;
218 tree fnargs;
219 tree parm;
220 int param_num;
222 fndecl = node->decl;
223 gcc_assert (gimple_has_body_p (fndecl));
224 fnargs = DECL_ARGUMENTS (fndecl);
225 param_num = 0;
226 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
228 descriptors[param_num].decl_or_type = parm;
229 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
230 true);
231 param_num++;
235 /* Return how many formal parameters FNDECL has. */
238 count_formal_params (tree fndecl)
240 tree parm;
241 int count = 0;
242 gcc_assert (gimple_has_body_p (fndecl));
244 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
245 count++;
247 return count;
250 /* Return the declaration of Ith formal parameter of the function corresponding
251 to INFO. Note there is no setter function as this array is built just once
252 using ipa_initialize_node_params. */
254 void
255 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
257 fprintf (file, "param #%i", i);
258 if ((*info->descriptors)[i].decl_or_type)
260 fprintf (file, " ");
261 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
265 /* If necessary, allocate vector of parameter descriptors in info of NODE.
266 Return true if they were allocated, false if not. */
268 static bool
269 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
271 struct ipa_node_params *info = IPA_NODE_REF (node);
273 if (!info->descriptors && param_count)
275 vec_safe_grow_cleared (info->descriptors, param_count);
276 return true;
278 else
279 return false;
282 /* Initialize the ipa_node_params structure associated with NODE by counting
283 the function parameters, creating the descriptors and populating their
284 param_decls. */
286 void
287 ipa_initialize_node_params (struct cgraph_node *node)
289 struct ipa_node_params *info = IPA_NODE_REF (node);
291 if (!info->descriptors
292 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
293 ipa_populate_param_decls (node, *info->descriptors);
296 /* Print the jump functions associated with call graph edge CS to file F. */
298 static void
299 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
301 int i, count;
303 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
304 for (i = 0; i < count; i++)
306 struct ipa_jump_func *jump_func;
307 enum jump_func_type type;
309 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
310 type = jump_func->type;
312 fprintf (f, " param %d: ", i);
313 if (type == IPA_JF_UNKNOWN)
314 fprintf (f, "UNKNOWN\n");
315 else if (type == IPA_JF_CONST)
317 tree val = jump_func->value.constant.value;
318 fprintf (f, "CONST: ");
319 print_generic_expr (f, val);
320 if (TREE_CODE (val) == ADDR_EXPR
321 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
323 fprintf (f, " -> ");
324 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
326 fprintf (f, "\n");
328 else if (type == IPA_JF_PASS_THROUGH)
330 fprintf (f, "PASS THROUGH: ");
331 fprintf (f, "%d, op %s",
332 jump_func->value.pass_through.formal_id,
333 get_tree_code_name(jump_func->value.pass_through.operation));
334 if (jump_func->value.pass_through.operation != NOP_EXPR)
336 fprintf (f, " ");
337 print_generic_expr (f, jump_func->value.pass_through.operand);
339 if (jump_func->value.pass_through.agg_preserved)
340 fprintf (f, ", agg_preserved");
341 fprintf (f, "\n");
343 else if (type == IPA_JF_ANCESTOR)
345 fprintf (f, "ANCESTOR: ");
346 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
347 jump_func->value.ancestor.formal_id,
348 jump_func->value.ancestor.offset);
349 if (jump_func->value.ancestor.agg_preserved)
350 fprintf (f, ", agg_preserved");
351 fprintf (f, "\n");
354 if (jump_func->agg.items)
356 struct ipa_agg_jf_item *item;
357 int j;
359 fprintf (f, " Aggregate passed by %s:\n",
360 jump_func->agg.by_ref ? "reference" : "value");
361 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
363 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
364 item->offset);
365 if (TYPE_P (item->value))
366 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
367 tree_to_uhwi (TYPE_SIZE (item->value)));
368 else
370 fprintf (f, "cst: ");
371 print_generic_expr (f, item->value);
373 fprintf (f, "\n");
377 struct ipa_polymorphic_call_context *ctx
378 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
379 if (ctx && !ctx->useless_p ())
381 fprintf (f, " Context: ");
382 ctx->dump (dump_file);
385 if (jump_func->bits)
387 fprintf (f, " value: ");
388 print_hex (jump_func->bits->value, f);
389 fprintf (f, ", mask: ");
390 print_hex (jump_func->bits->mask, f);
391 fprintf (f, "\n");
393 else
394 fprintf (f, " Unknown bits\n");
396 if (jump_func->m_vr)
398 fprintf (f, " VR ");
399 fprintf (f, "%s[",
400 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
401 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
402 fprintf (f, ", ");
403 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
404 fprintf (f, "]\n");
406 else
407 fprintf (f, " Unknown VR\n");
412 /* Print the jump functions of all arguments on all call graph edges going from
413 NODE to file F. */
415 void
416 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
418 struct cgraph_edge *cs;
420 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
421 for (cs = node->callees; cs; cs = cs->next_callee)
423 if (!ipa_edge_args_info_available_for_edge_p (cs))
424 continue;
426 fprintf (f, " callsite %s -> %s : \n",
427 node->dump_name (),
428 cs->callee->dump_name ());
429 ipa_print_node_jump_functions_for_edge (f, cs);
432 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
434 struct cgraph_indirect_call_info *ii;
435 if (!ipa_edge_args_info_available_for_edge_p (cs))
436 continue;
438 ii = cs->indirect_info;
439 if (ii->agg_contents)
440 fprintf (f, " indirect %s callsite, calling param %i, "
441 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
442 ii->member_ptr ? "member ptr" : "aggregate",
443 ii->param_index, ii->offset,
444 ii->by_ref ? "by reference" : "by_value");
445 else
446 fprintf (f, " indirect %s callsite, calling param %i, "
447 "offset " HOST_WIDE_INT_PRINT_DEC,
448 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
449 ii->offset);
451 if (cs->call_stmt)
453 fprintf (f, ", for stmt ");
454 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
456 else
457 fprintf (f, "\n");
458 if (ii->polymorphic)
459 ii->context.dump (f);
460 ipa_print_node_jump_functions_for_edge (f, cs);
464 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
466 void
467 ipa_print_all_jump_functions (FILE *f)
469 struct cgraph_node *node;
471 fprintf (f, "\nJump functions:\n");
472 FOR_EACH_FUNCTION (node)
474 ipa_print_node_jump_functions (f, node);
478 /* Set jfunc to be a know-really nothing jump function. */
480 static void
481 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
483 jfunc->type = IPA_JF_UNKNOWN;
484 jfunc->bits = NULL;
485 jfunc->m_vr = NULL;
488 /* Set JFUNC to be a copy of another jmp (to be used by jump function
489 combination code). The two functions will share their rdesc. */
491 static void
492 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
493 struct ipa_jump_func *src)
496 gcc_checking_assert (src->type == IPA_JF_CONST);
497 dst->type = IPA_JF_CONST;
498 dst->value.constant = src->value.constant;
501 /* Set JFUNC to be a constant jmp function. */
503 static void
504 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
505 struct cgraph_edge *cs)
507 jfunc->type = IPA_JF_CONST;
508 jfunc->value.constant.value = unshare_expr_without_location (constant);
510 if (TREE_CODE (constant) == ADDR_EXPR
511 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
513 struct ipa_cst_ref_desc *rdesc;
515 rdesc = ipa_refdesc_pool.allocate ();
516 rdesc->cs = cs;
517 rdesc->next_duplicate = NULL;
518 rdesc->refcount = 1;
519 jfunc->value.constant.rdesc = rdesc;
521 else
522 jfunc->value.constant.rdesc = NULL;
525 /* Set JFUNC to be a simple pass-through jump function. */
526 static void
527 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
528 bool agg_preserved)
530 jfunc->type = IPA_JF_PASS_THROUGH;
531 jfunc->value.pass_through.operand = NULL_TREE;
532 jfunc->value.pass_through.formal_id = formal_id;
533 jfunc->value.pass_through.operation = NOP_EXPR;
534 jfunc->value.pass_through.agg_preserved = agg_preserved;
537 /* Set JFUNC to be an unary pass through jump function. */
539 static void
540 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
541 enum tree_code operation)
543 jfunc->type = IPA_JF_PASS_THROUGH;
544 jfunc->value.pass_through.operand = NULL_TREE;
545 jfunc->value.pass_through.formal_id = formal_id;
546 jfunc->value.pass_through.operation = operation;
547 jfunc->value.pass_through.agg_preserved = false;
549 /* Set JFUNC to be an arithmetic pass through jump function. */
551 static void
552 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
553 tree operand, enum tree_code operation)
555 jfunc->type = IPA_JF_PASS_THROUGH;
556 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
557 jfunc->value.pass_through.formal_id = formal_id;
558 jfunc->value.pass_through.operation = operation;
559 jfunc->value.pass_through.agg_preserved = false;
562 /* Set JFUNC to be an ancestor jump function. */
564 static void
565 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
566 int formal_id, bool agg_preserved)
568 jfunc->type = IPA_JF_ANCESTOR;
569 jfunc->value.ancestor.formal_id = formal_id;
570 jfunc->value.ancestor.offset = offset;
571 jfunc->value.ancestor.agg_preserved = agg_preserved;
574 /* Get IPA BB information about the given BB. FBI is the context of analyzis
575 of this function body. */
577 static struct ipa_bb_info *
578 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
580 gcc_checking_assert (fbi);
581 return &fbi->bb_infos[bb->index];
584 /* Structure to be passed in between detect_type_change and
585 check_stmt_for_type_change. */
587 struct prop_type_change_info
589 /* Offset into the object where there is the virtual method pointer we are
590 looking for. */
591 HOST_WIDE_INT offset;
592 /* The declaration or SSA_NAME pointer of the base that we are checking for
593 type change. */
594 tree object;
595 /* Set to true if dynamic type change has been detected. */
596 bool type_maybe_changed;
599 /* Return true if STMT can modify a virtual method table pointer.
601 This function makes special assumptions about both constructors and
602 destructors which are all the functions that are allowed to alter the VMT
603 pointers. It assumes that destructors begin with assignment into all VMT
604 pointers and that constructors essentially look in the following way:
606 1) The very first thing they do is that they call constructors of ancestor
607 sub-objects that have them.
609 2) Then VMT pointers of this and all its ancestors is set to new values
610 corresponding to the type corresponding to the constructor.
612 3) Only afterwards, other stuff such as constructor of member sub-objects
613 and the code written by the user is run. Only this may include calling
614 virtual functions, directly or indirectly.
616 There is no way to call a constructor of an ancestor sub-object in any
617 other way.
619 This means that we do not have to care whether constructors get the correct
620 type information because they will always change it (in fact, if we define
621 the type to be given by the VMT pointer, it is undefined).
623 The most important fact to derive from the above is that if, for some
624 statement in the section 3, we try to detect whether the dynamic type has
625 changed, we can safely ignore all calls as we examine the function body
626 backwards until we reach statements in section 2 because these calls cannot
627 be ancestor constructors or destructors (if the input is not bogus) and so
628 do not change the dynamic type (this holds true only for automatically
629 allocated objects but at the moment we devirtualize only these). We then
630 must detect that statements in section 2 change the dynamic type and can try
631 to derive the new type. That is enough and we can stop, we will never see
632 the calls into constructors of sub-objects in this code. Therefore we can
633 safely ignore all call statements that we traverse.
636 static bool
637 stmt_may_be_vtbl_ptr_store (gimple *stmt)
639 if (is_gimple_call (stmt))
640 return false;
641 if (gimple_clobber_p (stmt))
642 return false;
643 else if (is_gimple_assign (stmt))
645 tree lhs = gimple_assign_lhs (stmt);
647 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
649 if (flag_strict_aliasing
650 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
651 return false;
653 if (TREE_CODE (lhs) == COMPONENT_REF
654 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
655 return false;
656 /* In the future we might want to use get_ref_base_and_extent to find
657 if there is a field corresponding to the offset and if so, proceed
658 almost like if it was a component ref. */
661 return true;
664 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
665 to check whether a particular statement may modify the virtual table
666 pointerIt stores its result into DATA, which points to a
667 prop_type_change_info structure. */
669 static bool
670 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
672 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
673 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
675 if (stmt_may_be_vtbl_ptr_store (stmt))
677 tci->type_maybe_changed = true;
678 return true;
680 else
681 return false;
684 /* See if ARG is PARAM_DECl describing instance passed by pointer
685 or reference in FUNCTION. Return false if the dynamic type may change
686 in between beggining of the function until CALL is invoked.
688 Generally functions are not allowed to change type of such instances,
689 but they call destructors. We assume that methods can not destroy the THIS
690 pointer. Also as a special cases, constructor and destructors may change
691 type of the THIS pointer. */
693 static bool
694 param_type_may_change_p (tree function, tree arg, gimple *call)
696 /* Pure functions can not do any changes on the dynamic type;
697 that require writting to memory. */
698 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
699 return false;
700 /* We need to check if we are within inlined consturctor
701 or destructor (ideally we would have way to check that the
702 inline cdtor is actually working on ARG, but we don't have
703 easy tie on this, so punt on all non-pure cdtors.
704 We may also record the types of cdtors and once we know type
705 of the instance match them.
707 Also code unification optimizations may merge calls from
708 different blocks making return values unreliable. So
709 do nothing during late optimization. */
710 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
711 return true;
712 if (TREE_CODE (arg) == SSA_NAME
713 && SSA_NAME_IS_DEFAULT_DEF (arg)
714 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
716 /* Normal (non-THIS) argument. */
717 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
718 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
719 /* THIS pointer of an method - here we want to watch constructors
720 and destructors as those definitely may change the dynamic
721 type. */
722 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
723 && !DECL_CXX_CONSTRUCTOR_P (function)
724 && !DECL_CXX_DESTRUCTOR_P (function)
725 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
727 /* Walk the inline stack and watch out for ctors/dtors. */
728 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
729 block = BLOCK_SUPERCONTEXT (block))
730 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
731 return true;
732 return false;
735 return true;
738 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
739 callsite CALL) by looking for assignments to its virtual table pointer. If
740 it is, return true and fill in the jump function JFUNC with relevant type
741 information or set it to unknown. ARG is the object itself (not a pointer
742 to it, unless dereferenced). BASE is the base of the memory access as
743 returned by get_ref_base_and_extent, as is the offset.
745 This is helper function for detect_type_change and detect_type_change_ssa
746 that does the heavy work which is usually unnecesary. */
748 static bool
749 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
750 gcall *call, struct ipa_jump_func *jfunc,
751 HOST_WIDE_INT offset)
753 struct prop_type_change_info tci;
754 ao_ref ao;
755 bool entry_reached = false;
757 gcc_checking_assert (DECL_P (arg)
758 || TREE_CODE (arg) == MEM_REF
759 || handled_component_p (arg));
761 comp_type = TYPE_MAIN_VARIANT (comp_type);
763 /* Const calls cannot call virtual methods through VMT and so type changes do
764 not matter. */
765 if (!flag_devirtualize || !gimple_vuse (call)
766 /* Be sure expected_type is polymorphic. */
767 || !comp_type
768 || TREE_CODE (comp_type) != RECORD_TYPE
769 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
770 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
771 return true;
773 ao_ref_init (&ao, arg);
774 ao.base = base;
775 ao.offset = offset;
776 ao.size = POINTER_SIZE;
777 ao.max_size = ao.size;
779 tci.offset = offset;
780 tci.object = get_base_address (arg);
781 tci.type_maybe_changed = false;
783 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
784 &tci, NULL, &entry_reached);
785 if (!tci.type_maybe_changed)
786 return false;
788 ipa_set_jf_unknown (jfunc);
789 return true;
792 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
793 If it is, return true and fill in the jump function JFUNC with relevant type
794 information or set it to unknown. ARG is the object itself (not a pointer
795 to it, unless dereferenced). BASE is the base of the memory access as
796 returned by get_ref_base_and_extent, as is the offset. */
798 static bool
799 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
800 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
802 if (!flag_devirtualize)
803 return false;
805 if (TREE_CODE (base) == MEM_REF
806 && !param_type_may_change_p (current_function_decl,
807 TREE_OPERAND (base, 0),
808 call))
809 return false;
810 return detect_type_change_from_memory_writes (arg, base, comp_type,
811 call, jfunc, offset);
814 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
815 SSA name (its dereference will become the base and the offset is assumed to
816 be zero). */
818 static bool
819 detect_type_change_ssa (tree arg, tree comp_type,
820 gcall *call, struct ipa_jump_func *jfunc)
822 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
823 if (!flag_devirtualize
824 || !POINTER_TYPE_P (TREE_TYPE (arg)))
825 return false;
827 if (!param_type_may_change_p (current_function_decl, arg, call))
828 return false;
830 arg = build2 (MEM_REF, ptr_type_node, arg,
831 build_int_cst (ptr_type_node, 0));
833 return detect_type_change_from_memory_writes (arg, arg, comp_type,
834 call, jfunc, 0);
837 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
838 boolean variable pointed to by DATA. */
840 static bool
841 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
842 void *data)
844 bool *b = (bool *) data;
845 *b = true;
846 return true;
849 /* Return true if we have already walked so many statements in AA that we
850 should really just start giving up. */
852 static bool
853 aa_overwalked (struct ipa_func_body_info *fbi)
855 gcc_checking_assert (fbi);
856 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
859 /* Find the nearest valid aa status for parameter specified by INDEX that
860 dominates BB. */
862 static struct ipa_param_aa_status *
863 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
864 int index)
866 while (true)
868 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
869 if (!bb)
870 return NULL;
871 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
872 if (!bi->param_aa_statuses.is_empty ()
873 && bi->param_aa_statuses[index].valid)
874 return &bi->param_aa_statuses[index];
878 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
879 structures and/or intialize the result with a dominating description as
880 necessary. */
882 static struct ipa_param_aa_status *
883 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
884 int index)
886 gcc_checking_assert (fbi);
887 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
888 if (bi->param_aa_statuses.is_empty ())
889 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
890 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
891 if (!paa->valid)
893 gcc_checking_assert (!paa->parm_modified
894 && !paa->ref_modified
895 && !paa->pt_modified);
896 struct ipa_param_aa_status *dom_paa;
897 dom_paa = find_dominating_aa_status (fbi, bb, index);
898 if (dom_paa)
899 *paa = *dom_paa;
900 else
901 paa->valid = true;
904 return paa;
907 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
908 a value known not to be modified in this function before reaching the
909 statement STMT. FBI holds information about the function we have so far
910 gathered but do not survive the summary building stage. */
912 static bool
913 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
914 gimple *stmt, tree parm_load)
916 struct ipa_param_aa_status *paa;
917 bool modified = false;
918 ao_ref refd;
920 tree base = get_base_address (parm_load);
921 gcc_assert (TREE_CODE (base) == PARM_DECL);
922 if (TREE_READONLY (base))
923 return true;
925 /* FIXME: FBI can be NULL if we are being called from outside
926 ipa_node_analysis or ipcp_transform_function, which currently happens
927 during inlining analysis. It would be great to extend fbi's lifetime and
928 always have it. Currently, we are just not afraid of too much walking in
929 that case. */
930 if (fbi)
932 if (aa_overwalked (fbi))
933 return false;
934 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
935 if (paa->parm_modified)
936 return false;
938 else
939 paa = NULL;
941 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
942 ao_ref_init (&refd, parm_load);
943 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
944 &modified, NULL);
945 if (fbi)
946 fbi->aa_walked += walked;
947 if (paa && modified)
948 paa->parm_modified = true;
949 return !modified;
952 /* If STMT is an assignment that loads a value from an parameter declaration,
953 return the index of the parameter in ipa_node_params which has not been
954 modified. Otherwise return -1. */
956 static int
957 load_from_unmodified_param (struct ipa_func_body_info *fbi,
958 vec<ipa_param_descriptor, va_gc> *descriptors,
959 gimple *stmt)
961 int index;
962 tree op1;
964 if (!gimple_assign_single_p (stmt))
965 return -1;
967 op1 = gimple_assign_rhs1 (stmt);
968 if (TREE_CODE (op1) != PARM_DECL)
969 return -1;
971 index = ipa_get_param_decl_index_1 (descriptors, op1);
972 if (index < 0
973 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
974 return -1;
976 return index;
979 /* Return true if memory reference REF (which must be a load through parameter
980 with INDEX) loads data that are known to be unmodified in this function
981 before reaching statement STMT. */
983 static bool
984 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
985 int index, gimple *stmt, tree ref)
987 struct ipa_param_aa_status *paa;
988 bool modified = false;
989 ao_ref refd;
991 /* FIXME: FBI can be NULL if we are being called from outside
992 ipa_node_analysis or ipcp_transform_function, which currently happens
993 during inlining analysis. It would be great to extend fbi's lifetime and
994 always have it. Currently, we are just not afraid of too much walking in
995 that case. */
996 if (fbi)
998 if (aa_overwalked (fbi))
999 return false;
1000 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1001 if (paa->ref_modified)
1002 return false;
1004 else
1005 paa = NULL;
1007 gcc_checking_assert (gimple_vuse (stmt));
1008 ao_ref_init (&refd, ref);
1009 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1010 &modified, NULL);
1011 if (fbi)
1012 fbi->aa_walked += walked;
1013 if (paa && modified)
1014 paa->ref_modified = true;
1015 return !modified;
1018 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1019 is known to be unmodified in this function before reaching call statement
1020 CALL into which it is passed. FBI describes the function body. */
1022 static bool
1023 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1024 gimple *call, tree parm)
1026 bool modified = false;
1027 ao_ref refd;
1029 /* It's unnecessary to calculate anything about memory contnets for a const
1030 function because it is not goin to use it. But do not cache the result
1031 either. Also, no such calculations for non-pointers. */
1032 if (!gimple_vuse (call)
1033 || !POINTER_TYPE_P (TREE_TYPE (parm))
1034 || aa_overwalked (fbi))
1035 return false;
1037 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1038 gimple_bb (call),
1039 index);
1040 if (paa->pt_modified)
1041 return false;
1043 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1044 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1045 &modified, NULL);
1046 fbi->aa_walked += walked;
1047 if (modified)
1048 paa->pt_modified = true;
1049 return !modified;
1052 /* Return true if we can prove that OP is a memory reference loading
1053 data from an aggregate passed as a parameter.
1055 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1056 false if it cannot prove that the value has not been modified before the
1057 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1058 if it cannot prove the value has not been modified, in that case it will
1059 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1061 INFO and PARMS_AINFO describe parameters of the current function (but the
1062 latter can be NULL), STMT is the load statement. If function returns true,
1063 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1064 within the aggregate and whether it is a load from a value passed by
1065 reference respectively. */
1067 bool
1068 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1069 vec<ipa_param_descriptor, va_gc> *descriptors,
1070 gimple *stmt, tree op, int *index_p,
1071 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1072 bool *by_ref_p, bool *guaranteed_unmodified)
1074 int index;
1075 HOST_WIDE_INT size;
1076 bool reverse;
1077 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1079 if (!base)
1080 return false;
1082 if (DECL_P (base))
1084 int index = ipa_get_param_decl_index_1 (descriptors, base);
1085 if (index >= 0
1086 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1088 *index_p = index;
1089 *by_ref_p = false;
1090 if (size_p)
1091 *size_p = size;
1092 if (guaranteed_unmodified)
1093 *guaranteed_unmodified = true;
1094 return true;
1096 return false;
1099 if (TREE_CODE (base) != MEM_REF
1100 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1101 || !integer_zerop (TREE_OPERAND (base, 1)))
1102 return false;
1104 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1106 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1107 index = ipa_get_param_decl_index_1 (descriptors, parm);
1109 else
1111 /* This branch catches situations where a pointer parameter is not a
1112 gimple register, for example:
1114 void hip7(S*) (struct S * p)
1116 void (*<T2e4>) (struct S *) D.1867;
1117 struct S * p.1;
1119 <bb 2>:
1120 p.1_1 = p;
1121 D.1867_2 = p.1_1->f;
1122 D.1867_2 ();
1123 gdp = &p;
1126 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1127 index = load_from_unmodified_param (fbi, descriptors, def);
1130 if (index >= 0)
1132 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1133 if (!data_preserved && !guaranteed_unmodified)
1134 return false;
1136 *index_p = index;
1137 *by_ref_p = true;
1138 if (size_p)
1139 *size_p = size;
1140 if (guaranteed_unmodified)
1141 *guaranteed_unmodified = data_preserved;
1142 return true;
1144 return false;
1147 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1148 of an assignment statement STMT, try to determine whether we are actually
1149 handling any of the following cases and construct an appropriate jump
1150 function into JFUNC if so:
1152 1) The passed value is loaded from a formal parameter which is not a gimple
1153 register (most probably because it is addressable, the value has to be
1154 scalar) and we can guarantee the value has not changed. This case can
1155 therefore be described by a simple pass-through jump function. For example:
1157 foo (int a)
1159 int a.0;
1161 a.0_2 = a;
1162 bar (a.0_2);
1164 2) The passed value can be described by a simple arithmetic pass-through
1165 jump function. E.g.
1167 foo (int a)
1169 int D.2064;
1171 D.2064_4 = a.1(D) + 4;
1172 bar (D.2064_4);
1174 This case can also occur in combination of the previous one, e.g.:
1176 foo (int a, int z)
1178 int a.0;
1179 int D.2064;
1181 a.0_3 = a;
1182 D.2064_4 = a.0_3 + 4;
1183 foo (D.2064_4);
1185 3) The passed value is an address of an object within another one (which
1186 also passed by reference). Such situations are described by an ancestor
1187 jump function and describe situations such as:
1189 B::foo() (struct B * const this)
1191 struct A * D.1845;
1193 D.1845_2 = &this_1(D)->D.1748;
1194 A::bar (D.1845_2);
1196 INFO is the structure describing individual parameters access different
1197 stages of IPA optimizations. PARMS_AINFO contains the information that is
1198 only needed for intraprocedural analysis. */
1200 static void
1201 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1202 struct ipa_node_params *info,
1203 struct ipa_jump_func *jfunc,
1204 gcall *call, gimple *stmt, tree name,
1205 tree param_type)
1207 HOST_WIDE_INT offset, size;
1208 tree op1, tc_ssa, base, ssa;
1209 bool reverse;
1210 int index;
1212 op1 = gimple_assign_rhs1 (stmt);
1214 if (TREE_CODE (op1) == SSA_NAME)
1216 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1217 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1218 else
1219 index = load_from_unmodified_param (fbi, info->descriptors,
1220 SSA_NAME_DEF_STMT (op1));
1221 tc_ssa = op1;
1223 else
1225 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1226 tc_ssa = gimple_assign_lhs (stmt);
1229 if (index >= 0)
1231 switch (gimple_assign_rhs_class (stmt))
1233 case GIMPLE_BINARY_RHS:
1235 tree op2 = gimple_assign_rhs2 (stmt);
1236 if (!is_gimple_ip_invariant (op2)
1237 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1238 != tcc_comparison)
1239 && !useless_type_conversion_p (TREE_TYPE (name),
1240 TREE_TYPE (op1))))
1241 return;
1243 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1244 gimple_assign_rhs_code (stmt));
1245 break;
1247 case GIMPLE_SINGLE_RHS:
1249 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1250 tc_ssa);
1251 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1252 break;
1254 case GIMPLE_UNARY_RHS:
1255 if (is_gimple_assign (stmt)
1256 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1257 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1258 ipa_set_jf_unary_pass_through (jfunc, index,
1259 gimple_assign_rhs_code (stmt));
1260 default:;
1262 return;
1265 if (TREE_CODE (op1) != ADDR_EXPR)
1266 return;
1267 op1 = TREE_OPERAND (op1, 0);
1268 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1269 return;
1270 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1271 offset_int mem_offset;
1272 if (!base
1273 || TREE_CODE (base) != MEM_REF
1274 || !mem_ref_offset (base).is_constant (&mem_offset))
1275 return;
1276 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1277 ssa = TREE_OPERAND (base, 0);
1278 if (TREE_CODE (ssa) != SSA_NAME
1279 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1280 || offset < 0)
1281 return;
1283 /* Dynamic types are changed in constructors and destructors. */
1284 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1285 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1286 ipa_set_ancestor_jf (jfunc, offset, index,
1287 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1290 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1291 it looks like:
1293 iftmp.1_3 = &obj_2(D)->D.1762;
1295 The base of the MEM_REF must be a default definition SSA NAME of a
1296 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1297 whole MEM_REF expression is returned and the offset calculated from any
1298 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1299 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1301 static tree
1302 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1304 HOST_WIDE_INT size;
1305 tree expr, parm, obj;
1306 bool reverse;
1308 if (!gimple_assign_single_p (assign))
1309 return NULL_TREE;
1310 expr = gimple_assign_rhs1 (assign);
1312 if (TREE_CODE (expr) != ADDR_EXPR)
1313 return NULL_TREE;
1314 expr = TREE_OPERAND (expr, 0);
1315 obj = expr;
1316 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1318 offset_int mem_offset;
1319 if (!expr
1320 || TREE_CODE (expr) != MEM_REF
1321 || !mem_ref_offset (expr).is_constant (&mem_offset))
1322 return NULL_TREE;
1323 parm = TREE_OPERAND (expr, 0);
1324 if (TREE_CODE (parm) != SSA_NAME
1325 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1326 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1327 return NULL_TREE;
1329 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1330 *obj_p = obj;
1331 return expr;
1335 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1336 statement PHI, try to find out whether NAME is in fact a
1337 multiple-inheritance typecast from a descendant into an ancestor of a formal
1338 parameter and thus can be described by an ancestor jump function and if so,
1339 write the appropriate function into JFUNC.
1341 Essentially we want to match the following pattern:
1343 if (obj_2(D) != 0B)
1344 goto <bb 3>;
1345 else
1346 goto <bb 4>;
1348 <bb 3>:
1349 iftmp.1_3 = &obj_2(D)->D.1762;
1351 <bb 4>:
1352 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1353 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1354 return D.1879_6; */
1356 static void
1357 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1358 struct ipa_node_params *info,
1359 struct ipa_jump_func *jfunc,
1360 gcall *call, gphi *phi)
1362 HOST_WIDE_INT offset;
1363 gimple *assign, *cond;
1364 basic_block phi_bb, assign_bb, cond_bb;
1365 tree tmp, parm, expr, obj;
1366 int index, i;
1368 if (gimple_phi_num_args (phi) != 2)
1369 return;
1371 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1372 tmp = PHI_ARG_DEF (phi, 0);
1373 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1374 tmp = PHI_ARG_DEF (phi, 1);
1375 else
1376 return;
1377 if (TREE_CODE (tmp) != SSA_NAME
1378 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1379 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1380 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1381 return;
1383 assign = SSA_NAME_DEF_STMT (tmp);
1384 assign_bb = gimple_bb (assign);
1385 if (!single_pred_p (assign_bb))
1386 return;
1387 expr = get_ancestor_addr_info (assign, &obj, &offset);
1388 if (!expr)
1389 return;
1390 parm = TREE_OPERAND (expr, 0);
1391 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1392 if (index < 0)
1393 return;
1395 cond_bb = single_pred (assign_bb);
1396 cond = last_stmt (cond_bb);
1397 if (!cond
1398 || gimple_code (cond) != GIMPLE_COND
1399 || gimple_cond_code (cond) != NE_EXPR
1400 || gimple_cond_lhs (cond) != parm
1401 || !integer_zerop (gimple_cond_rhs (cond)))
1402 return;
1404 phi_bb = gimple_bb (phi);
1405 for (i = 0; i < 2; i++)
1407 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1408 if (pred != assign_bb && pred != cond_bb)
1409 return;
1412 ipa_set_ancestor_jf (jfunc, offset, index,
1413 parm_ref_data_pass_through_p (fbi, index, call, parm));
1416 /* Inspect the given TYPE and return true iff it has the same structure (the
1417 same number of fields of the same types) as a C++ member pointer. If
1418 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1419 corresponding fields there. */
1421 static bool
1422 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1424 tree fld;
1426 if (TREE_CODE (type) != RECORD_TYPE)
1427 return false;
1429 fld = TYPE_FIELDS (type);
1430 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1431 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1432 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1433 return false;
1435 if (method_ptr)
1436 *method_ptr = fld;
1438 fld = DECL_CHAIN (fld);
1439 if (!fld || INTEGRAL_TYPE_P (fld)
1440 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1441 return false;
1442 if (delta)
1443 *delta = fld;
1445 if (DECL_CHAIN (fld))
1446 return false;
1448 return true;
1451 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1452 return the rhs of its defining statement. Otherwise return RHS as it
1453 is. */
1455 static inline tree
1456 get_ssa_def_if_simple_copy (tree rhs)
1458 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1460 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1462 if (gimple_assign_single_p (def_stmt))
1463 rhs = gimple_assign_rhs1 (def_stmt);
1464 else
1465 break;
1467 return rhs;
1470 /* Simple linked list, describing known contents of an aggregate beforere
1471 call. */
1473 struct ipa_known_agg_contents_list
1475 /* Offset and size of the described part of the aggregate. */
1476 HOST_WIDE_INT offset, size;
1477 /* Known constant value or NULL if the contents is known to be unknown. */
1478 tree constant;
1479 /* Pointer to the next structure in the list. */
1480 struct ipa_known_agg_contents_list *next;
1483 /* Find the proper place in linked list of ipa_known_agg_contents_list
1484 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1485 unless there is a partial overlap, in which case return NULL, or such
1486 element is already there, in which case set *ALREADY_THERE to true. */
1488 static struct ipa_known_agg_contents_list **
1489 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1490 HOST_WIDE_INT lhs_offset,
1491 HOST_WIDE_INT lhs_size,
1492 bool *already_there)
1494 struct ipa_known_agg_contents_list **p = list;
1495 while (*p && (*p)->offset < lhs_offset)
1497 if ((*p)->offset + (*p)->size > lhs_offset)
1498 return NULL;
1499 p = &(*p)->next;
1502 if (*p && (*p)->offset < lhs_offset + lhs_size)
1504 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1505 /* We already know this value is subsequently overwritten with
1506 something else. */
1507 *already_there = true;
1508 else
1509 /* Otherwise this is a partial overlap which we cannot
1510 represent. */
1511 return NULL;
1513 return p;
1516 /* Build aggregate jump function from LIST, assuming there are exactly
1517 CONST_COUNT constant entries there and that th offset of the passed argument
1518 is ARG_OFFSET and store it into JFUNC. */
1520 static void
1521 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1522 int const_count, HOST_WIDE_INT arg_offset,
1523 struct ipa_jump_func *jfunc)
1525 vec_alloc (jfunc->agg.items, const_count);
1526 while (list)
1528 if (list->constant)
1530 struct ipa_agg_jf_item item;
1531 item.offset = list->offset - arg_offset;
1532 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1533 item.value = unshare_expr_without_location (list->constant);
1534 jfunc->agg.items->quick_push (item);
1536 list = list->next;
1540 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1541 in ARG is filled in with constant values. ARG can either be an aggregate
1542 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1543 aggregate. JFUNC is the jump function into which the constants are
1544 subsequently stored. */
1546 static void
1547 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1548 tree arg_type,
1549 struct ipa_jump_func *jfunc)
1551 struct ipa_known_agg_contents_list *list = NULL;
1552 int item_count = 0, const_count = 0;
1553 HOST_WIDE_INT arg_offset, arg_size;
1554 gimple_stmt_iterator gsi;
1555 tree arg_base;
1556 bool check_ref, by_ref;
1557 ao_ref r;
1559 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1560 return;
1562 /* The function operates in three stages. First, we prepare check_ref, r,
1563 arg_base and arg_offset based on what is actually passed as an actual
1564 argument. */
1566 if (POINTER_TYPE_P (arg_type))
1568 by_ref = true;
1569 if (TREE_CODE (arg) == SSA_NAME)
1571 tree type_size;
1572 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1573 return;
1574 check_ref = true;
1575 arg_base = arg;
1576 arg_offset = 0;
1577 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1578 arg_size = tree_to_uhwi (type_size);
1579 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1581 else if (TREE_CODE (arg) == ADDR_EXPR)
1583 bool reverse;
1585 arg = TREE_OPERAND (arg, 0);
1586 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1587 &arg_size, &reverse);
1588 if (!arg_base)
1589 return;
1590 if (DECL_P (arg_base))
1592 check_ref = false;
1593 ao_ref_init (&r, arg_base);
1595 else
1596 return;
1598 else
1599 return;
1601 else
1603 bool reverse;
1605 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1607 by_ref = false;
1608 check_ref = false;
1609 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1610 &arg_size, &reverse);
1611 if (!arg_base)
1612 return;
1614 ao_ref_init (&r, arg);
1617 /* Second stage walks back the BB, looks at individual statements and as long
1618 as it is confident of how the statements affect contents of the
1619 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1620 describing it. */
1621 gsi = gsi_for_stmt (call);
1622 gsi_prev (&gsi);
1623 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1625 struct ipa_known_agg_contents_list *n, **p;
1626 gimple *stmt = gsi_stmt (gsi);
1627 HOST_WIDE_INT lhs_offset, lhs_size;
1628 tree lhs, rhs, lhs_base;
1629 bool reverse;
1631 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1632 continue;
1633 if (!gimple_assign_single_p (stmt))
1634 break;
1636 lhs = gimple_assign_lhs (stmt);
1637 rhs = gimple_assign_rhs1 (stmt);
1638 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1639 || TREE_CODE (lhs) == BIT_FIELD_REF
1640 || contains_bitfld_component_ref_p (lhs))
1641 break;
1643 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1644 &lhs_size, &reverse);
1645 if (!lhs_base)
1646 break;
1648 if (check_ref)
1650 if (TREE_CODE (lhs_base) != MEM_REF
1651 || TREE_OPERAND (lhs_base, 0) != arg_base
1652 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1653 break;
1655 else if (lhs_base != arg_base)
1657 if (DECL_P (lhs_base))
1658 continue;
1659 else
1660 break;
1663 bool already_there = false;
1664 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1665 &already_there);
1666 if (!p)
1667 break;
1668 if (already_there)
1669 continue;
1671 rhs = get_ssa_def_if_simple_copy (rhs);
1672 n = XALLOCA (struct ipa_known_agg_contents_list);
1673 n->size = lhs_size;
1674 n->offset = lhs_offset;
1675 if (is_gimple_ip_invariant (rhs))
1677 n->constant = rhs;
1678 const_count++;
1680 else
1681 n->constant = NULL_TREE;
1682 n->next = *p;
1683 *p = n;
1685 item_count++;
1686 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1687 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1688 break;
1691 /* Third stage just goes over the list and creates an appropriate vector of
1692 ipa_agg_jf_item structures out of it, of sourse only if there are
1693 any known constants to begin with. */
1695 if (const_count)
1697 jfunc->agg.by_ref = by_ref;
1698 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1702 /* Return the Ith param type of callee associated with call graph
1703 edge E. */
1705 tree
1706 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1708 int n;
1709 tree type = (e->callee
1710 ? TREE_TYPE (e->callee->decl)
1711 : gimple_call_fntype (e->call_stmt));
1712 tree t = TYPE_ARG_TYPES (type);
1714 for (n = 0; n < i; n++)
1716 if (!t)
1717 break;
1718 t = TREE_CHAIN (t);
1720 if (t)
1721 return TREE_VALUE (t);
1722 if (!e->callee)
1723 return NULL;
1724 t = DECL_ARGUMENTS (e->callee->decl);
1725 for (n = 0; n < i; n++)
1727 if (!t)
1728 return NULL;
1729 t = TREE_CHAIN (t);
1731 if (t)
1732 return TREE_TYPE (t);
1733 return NULL;
1736 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1737 allocated structure or a previously existing one shared with other jump
1738 functions and/or transformation summaries. */
1740 ipa_bits *
1741 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1743 ipa_bits tmp;
1744 tmp.value = value;
1745 tmp.mask = mask;
1747 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1748 if (*slot)
1749 return *slot;
1751 ipa_bits *res = ggc_alloc<ipa_bits> ();
1752 res->value = value;
1753 res->mask = mask;
1754 *slot = res;
1756 return res;
1759 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1760 table in order to avoid creating multiple same ipa_bits structures. */
1762 static void
1763 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1764 const widest_int &mask)
1766 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1769 /* Return a pointer to a value_range just like *TMP, but either find it in
1770 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1772 static value_range_base *
1773 ipa_get_value_range (value_range_base *tmp)
1775 value_range_base **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1776 if (*slot)
1777 return *slot;
1779 value_range_base *vr = ggc_alloc<value_range_base> ();
1780 *vr = *tmp;
1781 *slot = vr;
1783 return vr;
1786 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1787 equiv set. Use hash table in order to avoid creating multiple same copies of
1788 value_ranges. */
1790 static value_range_base *
1791 ipa_get_value_range (enum value_range_kind type, tree min, tree max)
1793 value_range_base tmp (type, min, max);
1794 return ipa_get_value_range (&tmp);
1797 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1798 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1799 same value_range structures. */
1801 static void
1802 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
1803 tree min, tree max)
1805 jf->m_vr = ipa_get_value_range (type, min, max);
1808 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1809 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1811 static void
1812 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range_base *tmp)
1814 jf->m_vr = ipa_get_value_range (tmp);
1817 /* Compute jump function for all arguments of callsite CS and insert the
1818 information in the jump_functions array in the ipa_edge_args corresponding
1819 to this callsite. */
1821 static void
1822 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1823 struct cgraph_edge *cs)
1825 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1826 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1827 gcall *call = cs->call_stmt;
1828 int n, arg_num = gimple_call_num_args (call);
1829 bool useful_context = false;
1831 if (arg_num == 0 || args->jump_functions)
1832 return;
1833 vec_safe_grow_cleared (args->jump_functions, arg_num);
1834 if (flag_devirtualize)
1835 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1837 if (gimple_call_internal_p (call))
1838 return;
1839 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1840 return;
1842 for (n = 0; n < arg_num; n++)
1844 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1845 tree arg = gimple_call_arg (call, n);
1846 tree param_type = ipa_get_callee_param_type (cs, n);
1847 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1849 tree instance;
1850 struct ipa_polymorphic_call_context context (cs->caller->decl,
1851 arg, cs->call_stmt,
1852 &instance);
1853 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1854 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1855 if (!context.useless_p ())
1856 useful_context = true;
1859 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1861 bool addr_nonzero = false;
1862 bool strict_overflow = false;
1864 if (TREE_CODE (arg) == SSA_NAME
1865 && param_type
1866 && get_ptr_nonnull (arg))
1867 addr_nonzero = true;
1868 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1869 addr_nonzero = true;
1871 if (addr_nonzero)
1873 tree z = build_int_cst (TREE_TYPE (arg), 0);
1874 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1876 else
1877 gcc_assert (!jfunc->m_vr);
1879 else
1881 wide_int min, max;
1882 value_range_kind type;
1883 if (TREE_CODE (arg) == SSA_NAME
1884 && param_type
1885 && (type = get_range_info (arg, &min, &max))
1886 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1888 value_range_base resvr;
1889 value_range_base tmpvr (type,
1890 wide_int_to_tree (TREE_TYPE (arg), min),
1891 wide_int_to_tree (TREE_TYPE (arg), max));
1892 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1893 &tmpvr, TREE_TYPE (arg));
1894 if (!resvr.undefined_p () && !resvr.varying_p ())
1895 ipa_set_jfunc_vr (jfunc, &resvr);
1896 else
1897 gcc_assert (!jfunc->m_vr);
1899 else
1900 gcc_assert (!jfunc->m_vr);
1903 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1904 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1906 if (TREE_CODE (arg) == SSA_NAME)
1907 ipa_set_jfunc_bits (jfunc, 0,
1908 widest_int::from (get_nonzero_bits (arg),
1909 TYPE_SIGN (TREE_TYPE (arg))));
1910 else
1911 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1913 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1915 unsigned HOST_WIDE_INT bitpos;
1916 unsigned align;
1918 get_pointer_alignment_1 (arg, &align, &bitpos);
1919 widest_int mask = wi::bit_and_not
1920 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1921 align / BITS_PER_UNIT - 1);
1922 widest_int value = bitpos / BITS_PER_UNIT;
1923 ipa_set_jfunc_bits (jfunc, value, mask);
1925 else
1926 gcc_assert (!jfunc->bits);
1928 if (is_gimple_ip_invariant (arg)
1929 || (VAR_P (arg)
1930 && is_global_var (arg)
1931 && TREE_READONLY (arg)))
1932 ipa_set_jf_constant (jfunc, arg, cs);
1933 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1934 && TREE_CODE (arg) == PARM_DECL)
1936 int index = ipa_get_param_decl_index (info, arg);
1938 gcc_assert (index >=0);
1939 /* Aggregate passed by value, check for pass-through, otherwise we
1940 will attempt to fill in aggregate contents later in this
1941 for cycle. */
1942 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1944 ipa_set_jf_simple_pass_through (jfunc, index, false);
1945 continue;
1948 else if (TREE_CODE (arg) == SSA_NAME)
1950 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1952 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1953 if (index >= 0)
1955 bool agg_p;
1956 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1957 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1960 else
1962 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1963 if (is_gimple_assign (stmt))
1964 compute_complex_assign_jump_func (fbi, info, jfunc,
1965 call, stmt, arg, param_type);
1966 else if (gimple_code (stmt) == GIMPLE_PHI)
1967 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1968 call,
1969 as_a <gphi *> (stmt));
1973 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1974 passed (because type conversions are ignored in gimple). Usually we can
1975 safely get type from function declaration, but in case of K&R prototypes or
1976 variadic functions we can try our luck with type of the pointer passed.
1977 TODO: Since we look for actual initialization of the memory object, we may better
1978 work out the type based on the memory stores we find. */
1979 if (!param_type)
1980 param_type = TREE_TYPE (arg);
1982 if ((jfunc->type != IPA_JF_PASS_THROUGH
1983 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1984 && (jfunc->type != IPA_JF_ANCESTOR
1985 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1986 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1987 || POINTER_TYPE_P (param_type)))
1988 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1990 if (!useful_context)
1991 vec_free (args->polymorphic_call_contexts);
1994 /* Compute jump functions for all edges - both direct and indirect - outgoing
1995 from BB. */
1997 static void
1998 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2000 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2001 int i;
2002 struct cgraph_edge *cs;
2004 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2006 struct cgraph_node *callee = cs->callee;
2008 if (callee)
2010 callee->ultimate_alias_target ();
2011 /* We do not need to bother analyzing calls to unknown functions
2012 unless they may become known during lto/whopr. */
2013 if (!callee->definition && !flag_lto)
2014 continue;
2016 ipa_compute_jump_functions_for_edge (fbi, cs);
2020 /* If STMT looks like a statement loading a value from a member pointer formal
2021 parameter, return that parameter and store the offset of the field to
2022 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2023 might be clobbered). If USE_DELTA, then we look for a use of the delta
2024 field rather than the pfn. */
2026 static tree
2027 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2028 HOST_WIDE_INT *offset_p)
2030 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2032 if (!gimple_assign_single_p (stmt))
2033 return NULL_TREE;
2035 rhs = gimple_assign_rhs1 (stmt);
2036 if (TREE_CODE (rhs) == COMPONENT_REF)
2038 ref_field = TREE_OPERAND (rhs, 1);
2039 rhs = TREE_OPERAND (rhs, 0);
2041 else
2042 ref_field = NULL_TREE;
2043 if (TREE_CODE (rhs) != MEM_REF)
2044 return NULL_TREE;
2045 rec = TREE_OPERAND (rhs, 0);
2046 if (TREE_CODE (rec) != ADDR_EXPR)
2047 return NULL_TREE;
2048 rec = TREE_OPERAND (rec, 0);
2049 if (TREE_CODE (rec) != PARM_DECL
2050 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2051 return NULL_TREE;
2052 ref_offset = TREE_OPERAND (rhs, 1);
2054 if (use_delta)
2055 fld = delta_field;
2056 else
2057 fld = ptr_field;
2058 if (offset_p)
2059 *offset_p = int_bit_position (fld);
2061 if (ref_field)
2063 if (integer_nonzerop (ref_offset))
2064 return NULL_TREE;
2065 return ref_field == fld ? rec : NULL_TREE;
2067 else
2068 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2069 : NULL_TREE;
2072 /* Returns true iff T is an SSA_NAME defined by a statement. */
2074 static bool
2075 ipa_is_ssa_with_stmt_def (tree t)
2077 if (TREE_CODE (t) == SSA_NAME
2078 && !SSA_NAME_IS_DEFAULT_DEF (t))
2079 return true;
2080 else
2081 return false;
2084 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2085 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2086 indirect call graph edge. */
2088 static struct cgraph_edge *
2089 ipa_note_param_call (struct cgraph_node *node, int param_index,
2090 gcall *stmt)
2092 struct cgraph_edge *cs;
2094 cs = node->get_edge (stmt);
2095 cs->indirect_info->param_index = param_index;
2096 cs->indirect_info->agg_contents = 0;
2097 cs->indirect_info->member_ptr = 0;
2098 cs->indirect_info->guaranteed_unmodified = 0;
2099 return cs;
2102 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2103 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2104 intermediate information about each formal parameter. Currently it checks
2105 whether the call calls a pointer that is a formal parameter and if so, the
2106 parameter is marked with the called flag and an indirect call graph edge
2107 describing the call is created. This is very simple for ordinary pointers
2108 represented in SSA but not-so-nice when it comes to member pointers. The
2109 ugly part of this function does nothing more than trying to match the
2110 pattern of such a call. An example of such a pattern is the gimple dump
2111 below, the call is on the last line:
2113 <bb 2>:
2114 f$__delta_5 = f.__delta;
2115 f$__pfn_24 = f.__pfn;
2118 <bb 2>:
2119 f$__delta_5 = MEM[(struct *)&f];
2120 f$__pfn_24 = MEM[(struct *)&f + 4B];
2122 and a few lines below:
2124 <bb 5>
2125 D.2496_3 = (int) f$__pfn_24;
2126 D.2497_4 = D.2496_3 & 1;
2127 if (D.2497_4 != 0)
2128 goto <bb 3>;
2129 else
2130 goto <bb 4>;
2132 <bb 6>:
2133 D.2500_7 = (unsigned int) f$__delta_5;
2134 D.2501_8 = &S + D.2500_7;
2135 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2136 D.2503_10 = *D.2502_9;
2137 D.2504_12 = f$__pfn_24 + -1;
2138 D.2505_13 = (unsigned int) D.2504_12;
2139 D.2506_14 = D.2503_10 + D.2505_13;
2140 D.2507_15 = *D.2506_14;
2141 iftmp.11_16 = (String:: *) D.2507_15;
2143 <bb 7>:
2144 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2145 D.2500_19 = (unsigned int) f$__delta_5;
2146 D.2508_20 = &S + D.2500_19;
2147 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2149 Such patterns are results of simple calls to a member pointer:
2151 int doprinting (int (MyString::* f)(int) const)
2153 MyString S ("somestring");
2155 return (S.*f)(4);
2158 Moreover, the function also looks for called pointers loaded from aggregates
2159 passed by value or reference. */
2161 static void
2162 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2163 tree target)
2165 struct ipa_node_params *info = fbi->info;
2166 HOST_WIDE_INT offset;
2167 bool by_ref;
2169 if (SSA_NAME_IS_DEFAULT_DEF (target))
2171 tree var = SSA_NAME_VAR (target);
2172 int index = ipa_get_param_decl_index (info, var);
2173 if (index >= 0)
2174 ipa_note_param_call (fbi->node, index, call);
2175 return;
2178 int index;
2179 gimple *def = SSA_NAME_DEF_STMT (target);
2180 bool guaranteed_unmodified;
2181 if (gimple_assign_single_p (def)
2182 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2183 gimple_assign_rhs1 (def), &index, &offset,
2184 NULL, &by_ref, &guaranteed_unmodified))
2186 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2187 cs->indirect_info->offset = offset;
2188 cs->indirect_info->agg_contents = 1;
2189 cs->indirect_info->by_ref = by_ref;
2190 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2191 return;
2194 /* Now we need to try to match the complex pattern of calling a member
2195 pointer. */
2196 if (gimple_code (def) != GIMPLE_PHI
2197 || gimple_phi_num_args (def) != 2
2198 || !POINTER_TYPE_P (TREE_TYPE (target))
2199 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2200 return;
2202 /* First, we need to check whether one of these is a load from a member
2203 pointer that is a parameter to this function. */
2204 tree n1 = PHI_ARG_DEF (def, 0);
2205 tree n2 = PHI_ARG_DEF (def, 1);
2206 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2207 return;
2208 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2209 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2211 tree rec;
2212 basic_block bb, virt_bb;
2213 basic_block join = gimple_bb (def);
2214 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2216 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2217 return;
2219 bb = EDGE_PRED (join, 0)->src;
2220 virt_bb = gimple_bb (d2);
2222 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2224 bb = EDGE_PRED (join, 1)->src;
2225 virt_bb = gimple_bb (d1);
2227 else
2228 return;
2230 /* Second, we need to check that the basic blocks are laid out in the way
2231 corresponding to the pattern. */
2233 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2234 || single_pred (virt_bb) != bb
2235 || single_succ (virt_bb) != join)
2236 return;
2238 /* Third, let's see that the branching is done depending on the least
2239 significant bit of the pfn. */
2241 gimple *branch = last_stmt (bb);
2242 if (!branch || gimple_code (branch) != GIMPLE_COND)
2243 return;
2245 if ((gimple_cond_code (branch) != NE_EXPR
2246 && gimple_cond_code (branch) != EQ_EXPR)
2247 || !integer_zerop (gimple_cond_rhs (branch)))
2248 return;
2250 tree cond = gimple_cond_lhs (branch);
2251 if (!ipa_is_ssa_with_stmt_def (cond))
2252 return;
2254 def = SSA_NAME_DEF_STMT (cond);
2255 if (!is_gimple_assign (def)
2256 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2257 || !integer_onep (gimple_assign_rhs2 (def)))
2258 return;
2260 cond = gimple_assign_rhs1 (def);
2261 if (!ipa_is_ssa_with_stmt_def (cond))
2262 return;
2264 def = SSA_NAME_DEF_STMT (cond);
2266 if (is_gimple_assign (def)
2267 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2269 cond = gimple_assign_rhs1 (def);
2270 if (!ipa_is_ssa_with_stmt_def (cond))
2271 return;
2272 def = SSA_NAME_DEF_STMT (cond);
2275 tree rec2;
2276 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2277 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2278 == ptrmemfunc_vbit_in_delta),
2279 NULL);
2280 if (rec != rec2)
2281 return;
2283 index = ipa_get_param_decl_index (info, rec);
2284 if (index >= 0
2285 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2287 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2288 cs->indirect_info->offset = offset;
2289 cs->indirect_info->agg_contents = 1;
2290 cs->indirect_info->member_ptr = 1;
2291 cs->indirect_info->guaranteed_unmodified = 1;
2294 return;
2297 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2298 object referenced in the expression is a formal parameter of the caller
2299 FBI->node (described by FBI->info), create a call note for the
2300 statement. */
2302 static void
2303 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2304 gcall *call, tree target)
2306 tree obj = OBJ_TYPE_REF_OBJECT (target);
2307 int index;
2308 HOST_WIDE_INT anc_offset;
2310 if (!flag_devirtualize)
2311 return;
2313 if (TREE_CODE (obj) != SSA_NAME)
2314 return;
2316 struct ipa_node_params *info = fbi->info;
2317 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2319 struct ipa_jump_func jfunc;
2320 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2321 return;
2323 anc_offset = 0;
2324 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2325 gcc_assert (index >= 0);
2326 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2327 call, &jfunc))
2328 return;
2330 else
2332 struct ipa_jump_func jfunc;
2333 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2334 tree expr;
2336 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2337 if (!expr)
2338 return;
2339 index = ipa_get_param_decl_index (info,
2340 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2341 gcc_assert (index >= 0);
2342 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2343 call, &jfunc, anc_offset))
2344 return;
2347 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2348 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2349 ii->offset = anc_offset;
2350 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2351 ii->otr_type = obj_type_ref_class (target);
2352 ii->polymorphic = 1;
2355 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2356 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2357 containing intermediate information about each formal parameter. */
2359 static void
2360 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2362 tree target = gimple_call_fn (call);
2364 if (!target
2365 || (TREE_CODE (target) != SSA_NAME
2366 && !virtual_method_call_p (target)))
2367 return;
2369 struct cgraph_edge *cs = fbi->node->get_edge (call);
2370 /* If we previously turned the call into a direct call, there is
2371 no need to analyze. */
2372 if (cs && !cs->indirect_unknown_callee)
2373 return;
2375 if (cs->indirect_info->polymorphic && flag_devirtualize)
2377 tree instance;
2378 tree target = gimple_call_fn (call);
2379 ipa_polymorphic_call_context context (current_function_decl,
2380 target, call, &instance);
2382 gcc_checking_assert (cs->indirect_info->otr_type
2383 == obj_type_ref_class (target));
2384 gcc_checking_assert (cs->indirect_info->otr_token
2385 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2387 cs->indirect_info->vptr_changed
2388 = !context.get_dynamic_type (instance,
2389 OBJ_TYPE_REF_OBJECT (target),
2390 obj_type_ref_class (target), call);
2391 cs->indirect_info->context = context;
2394 if (TREE_CODE (target) == SSA_NAME)
2395 ipa_analyze_indirect_call_uses (fbi, call, target);
2396 else if (virtual_method_call_p (target))
2397 ipa_analyze_virtual_call_uses (fbi, call, target);
2401 /* Analyze the call statement STMT with respect to formal parameters (described
2402 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2403 formal parameters are called. */
2405 static void
2406 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2408 if (is_gimple_call (stmt))
2409 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2412 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2413 If OP is a parameter declaration, mark it as used in the info structure
2414 passed in DATA. */
2416 static bool
2417 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2419 struct ipa_node_params *info = (struct ipa_node_params *) data;
2421 op = get_base_address (op);
2422 if (op
2423 && TREE_CODE (op) == PARM_DECL)
2425 int index = ipa_get_param_decl_index (info, op);
2426 gcc_assert (index >= 0);
2427 ipa_set_param_used (info, index, true);
2430 return false;
2433 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2434 the findings in various structures of the associated ipa_node_params
2435 structure, such as parameter flags, notes etc. FBI holds various data about
2436 the function being analyzed. */
2438 static void
2439 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2441 gimple_stmt_iterator gsi;
2442 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2444 gimple *stmt = gsi_stmt (gsi);
2446 if (is_gimple_debug (stmt))
2447 continue;
2449 ipa_analyze_stmt_uses (fbi, stmt);
2450 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2451 visit_ref_for_mod_analysis,
2452 visit_ref_for_mod_analysis,
2453 visit_ref_for_mod_analysis);
2455 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2456 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2457 visit_ref_for_mod_analysis,
2458 visit_ref_for_mod_analysis,
2459 visit_ref_for_mod_analysis);
2462 /* Calculate controlled uses of parameters of NODE. */
2464 static void
2465 ipa_analyze_controlled_uses (struct cgraph_node *node)
2467 struct ipa_node_params *info = IPA_NODE_REF (node);
2469 for (int i = 0; i < ipa_get_param_count (info); i++)
2471 tree parm = ipa_get_param (info, i);
2472 int controlled_uses = 0;
2474 /* For SSA regs see if parameter is used. For non-SSA we compute
2475 the flag during modification analysis. */
2476 if (is_gimple_reg (parm))
2478 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2479 parm);
2480 if (ddef && !has_zero_uses (ddef))
2482 imm_use_iterator imm_iter;
2483 use_operand_p use_p;
2485 ipa_set_param_used (info, i, true);
2486 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2487 if (!is_gimple_call (USE_STMT (use_p)))
2489 if (!is_gimple_debug (USE_STMT (use_p)))
2491 controlled_uses = IPA_UNDESCRIBED_USE;
2492 break;
2495 else
2496 controlled_uses++;
2498 else
2499 controlled_uses = 0;
2501 else
2502 controlled_uses = IPA_UNDESCRIBED_USE;
2503 ipa_set_controlled_uses (info, i, controlled_uses);
2507 /* Free stuff in BI. */
2509 static void
2510 free_ipa_bb_info (struct ipa_bb_info *bi)
2512 bi->cg_edges.release ();
2513 bi->param_aa_statuses.release ();
2516 /* Dominator walker driving the analysis. */
2518 class analysis_dom_walker : public dom_walker
2520 public:
2521 analysis_dom_walker (struct ipa_func_body_info *fbi)
2522 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2524 virtual edge before_dom_children (basic_block);
2526 private:
2527 struct ipa_func_body_info *m_fbi;
2530 edge
2531 analysis_dom_walker::before_dom_children (basic_block bb)
2533 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2534 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2535 return NULL;
2538 /* Release body info FBI. */
2540 void
2541 ipa_release_body_info (struct ipa_func_body_info *fbi)
2543 int i;
2544 struct ipa_bb_info *bi;
2546 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2547 free_ipa_bb_info (bi);
2548 fbi->bb_infos.release ();
2551 /* Initialize the array describing properties of formal parameters
2552 of NODE, analyze their uses and compute jump functions associated
2553 with actual arguments of calls from within NODE. */
2555 void
2556 ipa_analyze_node (struct cgraph_node *node)
2558 struct ipa_func_body_info fbi;
2559 struct ipa_node_params *info;
2561 ipa_check_create_node_params ();
2562 ipa_check_create_edge_args ();
2563 info = IPA_NODE_REF (node);
2565 if (info->analysis_done)
2566 return;
2567 info->analysis_done = 1;
2569 if (ipa_func_spec_opts_forbid_analysis_p (node))
2571 for (int i = 0; i < ipa_get_param_count (info); i++)
2573 ipa_set_param_used (info, i, true);
2574 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2576 return;
2579 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2580 push_cfun (func);
2581 calculate_dominance_info (CDI_DOMINATORS);
2582 ipa_initialize_node_params (node);
2583 ipa_analyze_controlled_uses (node);
2585 fbi.node = node;
2586 fbi.info = IPA_NODE_REF (node);
2587 fbi.bb_infos = vNULL;
2588 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2589 fbi.param_count = ipa_get_param_count (info);
2590 fbi.aa_walked = 0;
2592 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2594 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2595 bi->cg_edges.safe_push (cs);
2598 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2600 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2601 bi->cg_edges.safe_push (cs);
2604 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2606 ipa_release_body_info (&fbi);
2607 free_dominance_info (CDI_DOMINATORS);
2608 pop_cfun ();
2611 /* Update the jump functions associated with call graph edge E when the call
2612 graph edge CS is being inlined, assuming that E->caller is already (possibly
2613 indirectly) inlined into CS->callee and that E has not been inlined. */
2615 static void
2616 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2617 struct cgraph_edge *e)
2619 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2620 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2621 int count = ipa_get_cs_argument_count (args);
2622 int i;
2624 for (i = 0; i < count; i++)
2626 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2627 struct ipa_polymorphic_call_context *dst_ctx
2628 = ipa_get_ith_polymorhic_call_context (args, i);
2630 if (dst->type == IPA_JF_ANCESTOR)
2632 struct ipa_jump_func *src;
2633 int dst_fid = dst->value.ancestor.formal_id;
2634 struct ipa_polymorphic_call_context *src_ctx
2635 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2637 /* Variable number of arguments can cause havoc if we try to access
2638 one that does not exist in the inlined edge. So make sure we
2639 don't. */
2640 if (dst_fid >= ipa_get_cs_argument_count (top))
2642 ipa_set_jf_unknown (dst);
2643 continue;
2646 src = ipa_get_ith_jump_func (top, dst_fid);
2648 if (src_ctx && !src_ctx->useless_p ())
2650 struct ipa_polymorphic_call_context ctx = *src_ctx;
2652 /* TODO: Make type preserved safe WRT contexts. */
2653 if (!ipa_get_jf_ancestor_type_preserved (dst))
2654 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2655 ctx.offset_by (dst->value.ancestor.offset);
2656 if (!ctx.useless_p ())
2658 if (!dst_ctx)
2660 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2661 count);
2662 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2665 dst_ctx->combine_with (ctx);
2669 if (src->agg.items
2670 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2672 struct ipa_agg_jf_item *item;
2673 int j;
2675 /* Currently we do not produce clobber aggregate jump functions,
2676 replace with merging when we do. */
2677 gcc_assert (!dst->agg.items);
2679 dst->agg.items = vec_safe_copy (src->agg.items);
2680 dst->agg.by_ref = src->agg.by_ref;
2681 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2682 item->offset -= dst->value.ancestor.offset;
2685 if (src->type == IPA_JF_PASS_THROUGH
2686 && src->value.pass_through.operation == NOP_EXPR)
2688 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2689 dst->value.ancestor.agg_preserved &=
2690 src->value.pass_through.agg_preserved;
2692 else if (src->type == IPA_JF_PASS_THROUGH
2693 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2695 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2696 dst->value.ancestor.agg_preserved = false;
2698 else if (src->type == IPA_JF_ANCESTOR)
2700 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2701 dst->value.ancestor.offset += src->value.ancestor.offset;
2702 dst->value.ancestor.agg_preserved &=
2703 src->value.ancestor.agg_preserved;
2705 else
2706 ipa_set_jf_unknown (dst);
2708 else if (dst->type == IPA_JF_PASS_THROUGH)
2710 struct ipa_jump_func *src;
2711 /* We must check range due to calls with variable number of arguments
2712 and we cannot combine jump functions with operations. */
2713 if (dst->value.pass_through.operation == NOP_EXPR
2714 && (dst->value.pass_through.formal_id
2715 < ipa_get_cs_argument_count (top)))
2717 int dst_fid = dst->value.pass_through.formal_id;
2718 src = ipa_get_ith_jump_func (top, dst_fid);
2719 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2720 struct ipa_polymorphic_call_context *src_ctx
2721 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2723 if (src_ctx && !src_ctx->useless_p ())
2725 struct ipa_polymorphic_call_context ctx = *src_ctx;
2727 /* TODO: Make type preserved safe WRT contexts. */
2728 if (!ipa_get_jf_pass_through_type_preserved (dst))
2729 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2730 if (!ctx.useless_p ())
2732 if (!dst_ctx)
2734 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2735 count);
2736 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2738 dst_ctx->combine_with (ctx);
2741 switch (src->type)
2743 case IPA_JF_UNKNOWN:
2744 ipa_set_jf_unknown (dst);
2745 break;
2746 case IPA_JF_CONST:
2747 ipa_set_jf_cst_copy (dst, src);
2748 break;
2750 case IPA_JF_PASS_THROUGH:
2752 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2753 enum tree_code operation;
2754 operation = ipa_get_jf_pass_through_operation (src);
2756 if (operation == NOP_EXPR)
2758 bool agg_p;
2759 agg_p = dst_agg_p
2760 && ipa_get_jf_pass_through_agg_preserved (src);
2761 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2763 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2764 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2765 else
2767 tree operand = ipa_get_jf_pass_through_operand (src);
2768 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2769 operation);
2771 break;
2773 case IPA_JF_ANCESTOR:
2775 bool agg_p;
2776 agg_p = dst_agg_p
2777 && ipa_get_jf_ancestor_agg_preserved (src);
2778 ipa_set_ancestor_jf (dst,
2779 ipa_get_jf_ancestor_offset (src),
2780 ipa_get_jf_ancestor_formal_id (src),
2781 agg_p);
2782 break;
2784 default:
2785 gcc_unreachable ();
2788 if (src->agg.items
2789 && (dst_agg_p || !src->agg.by_ref))
2791 /* Currently we do not produce clobber aggregate jump
2792 functions, replace with merging when we do. */
2793 gcc_assert (!dst->agg.items);
2795 dst->agg.by_ref = src->agg.by_ref;
2796 dst->agg.items = vec_safe_copy (src->agg.items);
2799 else
2800 ipa_set_jf_unknown (dst);
2805 /* If TARGET is an addr_expr of a function declaration, make it the
2806 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2807 Otherwise, return NULL. */
2809 struct cgraph_edge *
2810 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2811 bool speculative)
2813 struct cgraph_node *callee;
2814 bool unreachable = false;
2816 if (TREE_CODE (target) == ADDR_EXPR)
2817 target = TREE_OPERAND (target, 0);
2818 if (TREE_CODE (target) != FUNCTION_DECL)
2820 target = canonicalize_constructor_val (target, NULL);
2821 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2823 /* Member pointer call that goes through a VMT lookup. */
2824 if (ie->indirect_info->member_ptr
2825 /* Or if target is not an invariant expression and we do not
2826 know if it will evaulate to function at runtime.
2827 This can happen when folding through &VAR, where &VAR
2828 is IP invariant, but VAR itself is not.
2830 TODO: Revisit this when GCC 5 is branched. It seems that
2831 member_ptr check is not needed and that we may try to fold
2832 the expression and see if VAR is readonly. */
2833 || !is_gimple_ip_invariant (target))
2835 if (dump_enabled_p ())
2837 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2838 "discovered direct call non-invariant %s\n",
2839 ie->caller->dump_name ());
2841 return NULL;
2845 if (dump_enabled_p ())
2847 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2848 "discovered direct call to non-function in %s, "
2849 "making it __builtin_unreachable\n",
2850 ie->caller->dump_name ());
2853 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2854 callee = cgraph_node::get_create (target);
2855 unreachable = true;
2857 else
2858 callee = cgraph_node::get (target);
2860 else
2861 callee = cgraph_node::get (target);
2863 /* Because may-edges are not explicitely represented and vtable may be external,
2864 we may create the first reference to the object in the unit. */
2865 if (!callee || callee->global.inlined_to)
2868 /* We are better to ensure we can refer to it.
2869 In the case of static functions we are out of luck, since we already
2870 removed its body. In the case of public functions we may or may
2871 not introduce the reference. */
2872 if (!canonicalize_constructor_val (target, NULL)
2873 || !TREE_PUBLIC (target))
2875 if (dump_file)
2876 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2877 "(%s -> %s) but can not refer to it. Giving up.\n",
2878 ie->caller->dump_name (),
2879 ie->callee->dump_name ());
2880 return NULL;
2882 callee = cgraph_node::get_create (target);
2885 /* If the edge is already speculated. */
2886 if (speculative && ie->speculative)
2888 struct cgraph_edge *e2;
2889 struct ipa_ref *ref;
2890 ie->speculative_call_info (e2, ie, ref);
2891 if (e2->callee->ultimate_alias_target ()
2892 != callee->ultimate_alias_target ())
2894 if (dump_file)
2895 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2896 "target (%s -> %s) but the call is already "
2897 "speculated to %s. Giving up.\n",
2898 ie->caller->dump_name (), callee->dump_name (),
2899 e2->callee->dump_name ());
2901 else
2903 if (dump_file)
2904 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2905 "(%s -> %s) this agree with previous speculation.\n",
2906 ie->caller->dump_name (), callee->dump_name ());
2908 return NULL;
2911 if (!dbg_cnt (devirt))
2912 return NULL;
2914 ipa_check_create_node_params ();
2916 /* We can not make edges to inline clones. It is bug that someone removed
2917 the cgraph node too early. */
2918 gcc_assert (!callee->global.inlined_to);
2920 if (dump_file && !unreachable)
2922 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2923 "(%s -> %s), for stmt ",
2924 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2925 speculative ? "speculative" : "known",
2926 ie->caller->dump_name (),
2927 callee->dump_name ());
2928 if (ie->call_stmt)
2929 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2930 else
2931 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2933 if (dump_enabled_p ())
2935 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2936 "converting indirect call in %s to direct call to %s\n",
2937 ie->caller->name (), callee->name ());
2939 if (!speculative)
2941 struct cgraph_edge *orig = ie;
2942 ie = ie->make_direct (callee);
2943 /* If we resolved speculative edge the cost is already up to date
2944 for direct call (adjusted by inline_edge_duplication_hook). */
2945 if (ie == orig)
2947 ipa_call_summary *es = ipa_call_summaries->get (ie);
2948 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2949 - eni_size_weights.call_cost);
2950 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2951 - eni_time_weights.call_cost);
2954 else
2956 if (!callee->can_be_discarded_p ())
2958 cgraph_node *alias;
2959 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2960 if (alias)
2961 callee = alias;
2963 /* make_speculative will update ie's cost to direct call cost. */
2964 ie = ie->make_speculative
2965 (callee, ie->count.apply_scale (8, 10));
2968 return ie;
2971 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2972 CONSTRUCTOR and return it. Return NULL if the search fails for some
2973 reason. */
2975 static tree
2976 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2978 tree type = TREE_TYPE (constructor);
2979 if (TREE_CODE (type) != ARRAY_TYPE
2980 && TREE_CODE (type) != RECORD_TYPE)
2981 return NULL;
2983 unsigned ix;
2984 tree index, val;
2985 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2987 HOST_WIDE_INT elt_offset;
2988 if (TREE_CODE (type) == ARRAY_TYPE)
2990 offset_int off;
2991 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2992 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2994 if (index)
2996 if (TREE_CODE (index) == RANGE_EXPR)
2997 off = wi::to_offset (TREE_OPERAND (index, 0));
2998 else
2999 off = wi::to_offset (index);
3000 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3002 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3003 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3004 off = wi::sext (off - wi::to_offset (low_bound),
3005 TYPE_PRECISION (TREE_TYPE (index)));
3007 off *= wi::to_offset (unit_size);
3008 /* ??? Handle more than just the first index of a
3009 RANGE_EXPR. */
3011 else
3012 off = wi::to_offset (unit_size) * ix;
3014 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3015 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3016 continue;
3017 elt_offset = off.to_shwi ();
3019 else if (TREE_CODE (type) == RECORD_TYPE)
3021 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3022 if (DECL_BIT_FIELD (index))
3023 continue;
3024 elt_offset = int_bit_position (index);
3026 else
3027 gcc_unreachable ();
3029 if (elt_offset > req_offset)
3030 return NULL;
3032 if (TREE_CODE (val) == CONSTRUCTOR)
3033 return find_constructor_constant_at_offset (val,
3034 req_offset - elt_offset);
3036 if (elt_offset == req_offset
3037 && is_gimple_reg_type (TREE_TYPE (val))
3038 && is_gimple_ip_invariant (val))
3039 return val;
3041 return NULL;
3044 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3045 invariant from a static constructor and if so, return it. Otherwise return
3046 NULL. */
3048 static tree
3049 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3051 if (by_ref)
3053 if (TREE_CODE (scalar) != ADDR_EXPR)
3054 return NULL;
3055 scalar = TREE_OPERAND (scalar, 0);
3058 if (!VAR_P (scalar)
3059 || !is_global_var (scalar)
3060 || !TREE_READONLY (scalar)
3061 || !DECL_INITIAL (scalar)
3062 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3063 return NULL;
3065 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3068 /* Retrieve value from aggregate jump function AGG or static initializer of
3069 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3070 none. BY_REF specifies whether the value has to be passed by reference or
3071 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3072 to is set to true if the value comes from an initializer of a constant. */
3074 tree
3075 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3076 HOST_WIDE_INT offset, bool by_ref,
3077 bool *from_global_constant)
3079 struct ipa_agg_jf_item *item;
3080 int i;
3082 if (scalar)
3084 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3085 if (res)
3087 if (from_global_constant)
3088 *from_global_constant = true;
3089 return res;
3093 if (!agg
3094 || by_ref != agg->by_ref)
3095 return NULL;
3097 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3098 if (item->offset == offset)
3100 /* Currently we do not have clobber values, return NULL for them once
3101 we do. */
3102 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3103 if (from_global_constant)
3104 *from_global_constant = false;
3105 return item->value;
3107 return NULL;
3110 /* Remove a reference to SYMBOL from the list of references of a node given by
3111 reference description RDESC. Return true if the reference has been
3112 successfully found and removed. */
3114 static bool
3115 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3117 struct ipa_ref *to_del;
3118 struct cgraph_edge *origin;
3120 origin = rdesc->cs;
3121 if (!origin)
3122 return false;
3123 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3124 origin->lto_stmt_uid);
3125 if (!to_del)
3126 return false;
3128 to_del->remove_reference ();
3129 if (dump_file)
3130 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3131 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3132 return true;
3135 /* If JFUNC has a reference description with refcount different from
3136 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3137 NULL. JFUNC must be a constant jump function. */
3139 static struct ipa_cst_ref_desc *
3140 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3142 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3143 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3144 return rdesc;
3145 else
3146 return NULL;
3149 /* If the value of constant jump function JFUNC is an address of a function
3150 declaration, return the associated call graph node. Otherwise return
3151 NULL. */
3153 static cgraph_node *
3154 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3156 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3157 tree cst = ipa_get_jf_constant (jfunc);
3158 if (TREE_CODE (cst) != ADDR_EXPR
3159 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3160 return NULL;
3162 return cgraph_node::get (TREE_OPERAND (cst, 0));
3166 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3167 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3168 the edge specified in the rdesc. Return false if either the symbol or the
3169 reference could not be found, otherwise return true. */
3171 static bool
3172 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3174 struct ipa_cst_ref_desc *rdesc;
3175 if (jfunc->type == IPA_JF_CONST
3176 && (rdesc = jfunc_rdesc_usable (jfunc))
3177 && --rdesc->refcount == 0)
3179 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3180 if (!symbol)
3181 return false;
3183 return remove_described_reference (symbol, rdesc);
3185 return true;
3188 /* Try to find a destination for indirect edge IE that corresponds to a simple
3189 call or a call of a member function pointer and where the destination is a
3190 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3191 the type of the parameter to which the result of JFUNC is passed. If it can
3192 be determined, return the newly direct edge, otherwise return NULL.
3193 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3195 static struct cgraph_edge *
3196 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3197 struct ipa_jump_func *jfunc, tree target_type,
3198 struct ipa_node_params *new_root_info)
3200 struct cgraph_edge *cs;
3201 tree target;
3202 bool agg_contents = ie->indirect_info->agg_contents;
3203 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3204 if (agg_contents)
3206 bool from_global_constant;
3207 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3208 ie->indirect_info->offset,
3209 ie->indirect_info->by_ref,
3210 &from_global_constant);
3211 if (target
3212 && !from_global_constant
3213 && !ie->indirect_info->guaranteed_unmodified)
3214 return NULL;
3216 else
3217 target = scalar;
3218 if (!target)
3219 return NULL;
3220 cs = ipa_make_edge_direct_to_target (ie, target);
3222 if (cs && !agg_contents)
3224 bool ok;
3225 gcc_checking_assert (cs->callee
3226 && (cs != ie
3227 || jfunc->type != IPA_JF_CONST
3228 || !cgraph_node_for_jfunc (jfunc)
3229 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3230 ok = try_decrement_rdesc_refcount (jfunc);
3231 gcc_checking_assert (ok);
3234 return cs;
3237 /* Return the target to be used in cases of impossible devirtualization. IE
3238 and target (the latter can be NULL) are dumped when dumping is enabled. */
3240 tree
3241 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3243 if (dump_file)
3245 if (target)
3246 fprintf (dump_file,
3247 "Type inconsistent devirtualization: %s->%s\n",
3248 ie->caller->dump_name (),
3249 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3250 else
3251 fprintf (dump_file,
3252 "No devirtualization target in %s\n",
3253 ie->caller->dump_name ());
3255 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3256 cgraph_node::get_create (new_target);
3257 return new_target;
3260 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3261 call based on a formal parameter which is described by jump function JFUNC
3262 and if it can be determined, make it direct and return the direct edge.
3263 Otherwise, return NULL. CTX describes the polymorphic context that the
3264 parameter the call is based on brings along with it. */
3266 static struct cgraph_edge *
3267 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3268 struct ipa_jump_func *jfunc,
3269 struct ipa_polymorphic_call_context ctx)
3271 tree target = NULL;
3272 bool speculative = false;
3274 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3275 return NULL;
3277 gcc_assert (!ie->indirect_info->by_ref);
3279 /* Try to do lookup via known virtual table pointer value. */
3280 if (!ie->indirect_info->vptr_changed
3281 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3283 tree vtable;
3284 unsigned HOST_WIDE_INT offset;
3285 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3286 : NULL;
3287 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3288 ie->indirect_info->offset,
3289 true);
3290 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3292 bool can_refer;
3293 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3294 vtable, offset, &can_refer);
3295 if (can_refer)
3297 if (!t
3298 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3299 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3300 || !possible_polymorphic_call_target_p
3301 (ie, cgraph_node::get (t)))
3303 /* Do not speculate builtin_unreachable, it is stupid! */
3304 if (!ie->indirect_info->vptr_changed)
3305 target = ipa_impossible_devirt_target (ie, target);
3306 else
3307 target = NULL;
3309 else
3311 target = t;
3312 speculative = ie->indirect_info->vptr_changed;
3318 ipa_polymorphic_call_context ie_context (ie);
3319 vec <cgraph_node *>targets;
3320 bool final;
3322 ctx.offset_by (ie->indirect_info->offset);
3323 if (ie->indirect_info->vptr_changed)
3324 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3325 ie->indirect_info->otr_type);
3326 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3327 targets = possible_polymorphic_call_targets
3328 (ie->indirect_info->otr_type,
3329 ie->indirect_info->otr_token,
3330 ctx, &final);
3331 if (final && targets.length () <= 1)
3333 speculative = false;
3334 if (targets.length () == 1)
3335 target = targets[0]->decl;
3336 else
3337 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3339 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3340 && !ie->speculative && ie->maybe_hot_p ())
3342 cgraph_node *n;
3343 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3344 ie->indirect_info->otr_token,
3345 ie->indirect_info->context);
3346 if (n)
3348 target = n->decl;
3349 speculative = true;
3353 if (target)
3355 if (!possible_polymorphic_call_target_p
3356 (ie, cgraph_node::get_create (target)))
3358 if (speculative)
3359 return NULL;
3360 target = ipa_impossible_devirt_target (ie, target);
3362 return ipa_make_edge_direct_to_target (ie, target, speculative);
3364 else
3365 return NULL;
3368 /* Update the param called notes associated with NODE when CS is being inlined,
3369 assuming NODE is (potentially indirectly) inlined into CS->callee.
3370 Moreover, if the callee is discovered to be constant, create a new cgraph
3371 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3372 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3374 static bool
3375 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3376 struct cgraph_node *node,
3377 vec<cgraph_edge *> *new_edges)
3379 struct ipa_edge_args *top;
3380 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3381 struct ipa_node_params *new_root_info, *inlined_node_info;
3382 bool res = false;
3384 ipa_check_create_edge_args ();
3385 top = IPA_EDGE_REF (cs);
3386 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3387 ? cs->caller->global.inlined_to
3388 : cs->caller);
3389 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3391 for (ie = node->indirect_calls; ie; ie = next_ie)
3393 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3394 struct ipa_jump_func *jfunc;
3395 int param_index;
3396 cgraph_node *spec_target = NULL;
3398 next_ie = ie->next_callee;
3400 if (ici->param_index == -1)
3401 continue;
3403 /* We must check range due to calls with variable number of arguments: */
3404 if (ici->param_index >= ipa_get_cs_argument_count (top))
3406 ici->param_index = -1;
3407 continue;
3410 param_index = ici->param_index;
3411 jfunc = ipa_get_ith_jump_func (top, param_index);
3413 if (ie->speculative)
3415 struct cgraph_edge *de;
3416 struct ipa_ref *ref;
3417 ie->speculative_call_info (de, ie, ref);
3418 spec_target = de->callee;
3421 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3422 new_direct_edge = NULL;
3423 else if (ici->polymorphic)
3425 ipa_polymorphic_call_context ctx;
3426 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3427 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3429 else
3431 tree target_type = ipa_get_type (inlined_node_info, param_index);
3432 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3433 target_type,
3434 new_root_info);
3437 /* If speculation was removed, then we need to do nothing. */
3438 if (new_direct_edge && new_direct_edge != ie
3439 && new_direct_edge->callee == spec_target)
3441 new_direct_edge->indirect_inlining_edge = 1;
3442 top = IPA_EDGE_REF (cs);
3443 res = true;
3444 if (!new_direct_edge->speculative)
3445 continue;
3447 else if (new_direct_edge)
3449 new_direct_edge->indirect_inlining_edge = 1;
3450 if (new_direct_edge->call_stmt)
3451 new_direct_edge->call_stmt_cannot_inline_p
3452 = !gimple_check_call_matching_types (
3453 new_direct_edge->call_stmt,
3454 new_direct_edge->callee->decl, false);
3455 if (new_edges)
3457 new_edges->safe_push (new_direct_edge);
3458 res = true;
3460 top = IPA_EDGE_REF (cs);
3461 /* If speculative edge was introduced we still need to update
3462 call info of the indirect edge. */
3463 if (!new_direct_edge->speculative)
3464 continue;
3466 if (jfunc->type == IPA_JF_PASS_THROUGH
3467 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3469 if (ici->agg_contents
3470 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3471 && !ici->polymorphic)
3472 ici->param_index = -1;
3473 else
3475 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3476 if (ici->polymorphic
3477 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3478 ici->vptr_changed = true;
3481 else if (jfunc->type == IPA_JF_ANCESTOR)
3483 if (ici->agg_contents
3484 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3485 && !ici->polymorphic)
3486 ici->param_index = -1;
3487 else
3489 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3490 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3491 if (ici->polymorphic
3492 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3493 ici->vptr_changed = true;
3496 else
3497 /* Either we can find a destination for this edge now or never. */
3498 ici->param_index = -1;
3501 return res;
3504 /* Recursively traverse subtree of NODE (including node) made of inlined
3505 cgraph_edges when CS has been inlined and invoke
3506 update_indirect_edges_after_inlining on all nodes and
3507 update_jump_functions_after_inlining on all non-inlined edges that lead out
3508 of this subtree. Newly discovered indirect edges will be added to
3509 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3510 created. */
3512 static bool
3513 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3514 struct cgraph_node *node,
3515 vec<cgraph_edge *> *new_edges)
3517 struct cgraph_edge *e;
3518 bool res;
3520 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3522 for (e = node->callees; e; e = e->next_callee)
3523 if (!e->inline_failed)
3524 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3525 else
3526 update_jump_functions_after_inlining (cs, e);
3527 for (e = node->indirect_calls; e; e = e->next_callee)
3528 update_jump_functions_after_inlining (cs, e);
3530 return res;
3533 /* Combine two controlled uses counts as done during inlining. */
3535 static int
3536 combine_controlled_uses_counters (int c, int d)
3538 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3539 return IPA_UNDESCRIBED_USE;
3540 else
3541 return c + d - 1;
3544 /* Propagate number of controlled users from CS->caleee to the new root of the
3545 tree of inlined nodes. */
3547 static void
3548 propagate_controlled_uses (struct cgraph_edge *cs)
3550 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3551 struct cgraph_node *new_root = cs->caller->global.inlined_to
3552 ? cs->caller->global.inlined_to : cs->caller;
3553 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3554 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3555 int count, i;
3557 count = MIN (ipa_get_cs_argument_count (args),
3558 ipa_get_param_count (old_root_info));
3559 for (i = 0; i < count; i++)
3561 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3562 struct ipa_cst_ref_desc *rdesc;
3564 if (jf->type == IPA_JF_PASS_THROUGH)
3566 int src_idx, c, d;
3567 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3568 c = ipa_get_controlled_uses (new_root_info, src_idx);
3569 d = ipa_get_controlled_uses (old_root_info, i);
3571 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3572 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3573 c = combine_controlled_uses_counters (c, d);
3574 ipa_set_controlled_uses (new_root_info, src_idx, c);
3575 if (c == 0 && new_root_info->ipcp_orig_node)
3577 struct cgraph_node *n;
3578 struct ipa_ref *ref;
3579 tree t = new_root_info->known_csts[src_idx];
3581 if (t && TREE_CODE (t) == ADDR_EXPR
3582 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3583 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3584 && (ref = new_root->find_reference (n, NULL, 0)))
3586 if (dump_file)
3587 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3588 "reference from %s to %s.\n",
3589 new_root->dump_name (),
3590 n->dump_name ());
3591 ref->remove_reference ();
3595 else if (jf->type == IPA_JF_CONST
3596 && (rdesc = jfunc_rdesc_usable (jf)))
3598 int d = ipa_get_controlled_uses (old_root_info, i);
3599 int c = rdesc->refcount;
3600 rdesc->refcount = combine_controlled_uses_counters (c, d);
3601 if (rdesc->refcount == 0)
3603 tree cst = ipa_get_jf_constant (jf);
3604 struct cgraph_node *n;
3605 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3606 && TREE_CODE (TREE_OPERAND (cst, 0))
3607 == FUNCTION_DECL);
3608 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3609 if (n)
3611 struct cgraph_node *clone;
3612 bool ok;
3613 ok = remove_described_reference (n, rdesc);
3614 gcc_checking_assert (ok);
3616 clone = cs->caller;
3617 while (clone->global.inlined_to
3618 && clone != rdesc->cs->caller
3619 && IPA_NODE_REF (clone)->ipcp_orig_node)
3621 struct ipa_ref *ref;
3622 ref = clone->find_reference (n, NULL, 0);
3623 if (ref)
3625 if (dump_file)
3626 fprintf (dump_file, "ipa-prop: Removing "
3627 "cloning-created reference "
3628 "from %s to %s.\n",
3629 clone->dump_name (),
3630 n->dump_name ());
3631 ref->remove_reference ();
3633 clone = clone->callers->caller;
3640 for (i = ipa_get_param_count (old_root_info);
3641 i < ipa_get_cs_argument_count (args);
3642 i++)
3644 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3646 if (jf->type == IPA_JF_CONST)
3648 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3649 if (rdesc)
3650 rdesc->refcount = IPA_UNDESCRIBED_USE;
3652 else if (jf->type == IPA_JF_PASS_THROUGH)
3653 ipa_set_controlled_uses (new_root_info,
3654 jf->value.pass_through.formal_id,
3655 IPA_UNDESCRIBED_USE);
3659 /* Update jump functions and call note functions on inlining the call site CS.
3660 CS is expected to lead to a node already cloned by
3661 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3662 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3663 created. */
3665 bool
3666 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3667 vec<cgraph_edge *> *new_edges)
3669 bool changed;
3670 /* Do nothing if the preparation phase has not been carried out yet
3671 (i.e. during early inlining). */
3672 if (!ipa_node_params_sum)
3673 return false;
3674 gcc_assert (ipa_edge_args_sum);
3676 propagate_controlled_uses (cs);
3677 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3679 return changed;
3682 /* Ensure that array of edge arguments infos is big enough to accommodate a
3683 structure for all edges and reallocates it if not. Also, allocate
3684 associated hash tables is they do not already exist. */
3686 void
3687 ipa_check_create_edge_args (void)
3689 if (!ipa_edge_args_sum)
3690 ipa_edge_args_sum
3691 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3692 ipa_edge_args_sum_t (symtab, true));
3693 if (!ipa_bits_hash_table)
3694 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3695 if (!ipa_vr_hash_table)
3696 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3699 /* Free all ipa_edge structures. */
3701 void
3702 ipa_free_all_edge_args (void)
3704 if (!ipa_edge_args_sum)
3705 return;
3707 ipa_edge_args_sum->release ();
3708 ipa_edge_args_sum = NULL;
3711 /* Free all ipa_node_params structures. */
3713 void
3714 ipa_free_all_node_params (void)
3716 ipa_node_params_sum->release ();
3717 ipa_node_params_sum = NULL;
3720 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3721 tables if they do not already exist. */
3723 void
3724 ipcp_transformation_initialize (void)
3726 if (!ipa_bits_hash_table)
3727 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3728 if (!ipa_vr_hash_table)
3729 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3730 if (ipcp_transformation_sum == NULL)
3731 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
3734 /* Set the aggregate replacements of NODE to be AGGVALS. */
3736 void
3737 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3738 struct ipa_agg_replacement_value *aggvals)
3740 ipcp_transformation_initialize ();
3741 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3742 s->agg_values = aggvals;
3745 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3746 count data structures accordingly. */
3748 void
3749 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3751 if (args->jump_functions)
3753 struct ipa_jump_func *jf;
3754 int i;
3755 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3757 struct ipa_cst_ref_desc *rdesc;
3758 try_decrement_rdesc_refcount (jf);
3759 if (jf->type == IPA_JF_CONST
3760 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3761 && rdesc->cs == cs)
3762 rdesc->cs = NULL;
3767 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3768 reference count data strucutres accordingly. */
3770 void
3771 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3772 ipa_edge_args *old_args, ipa_edge_args *new_args)
3774 unsigned int i;
3776 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3777 if (old_args->polymorphic_call_contexts)
3778 new_args->polymorphic_call_contexts
3779 = vec_safe_copy (old_args->polymorphic_call_contexts);
3781 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3783 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3784 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3786 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3788 if (src_jf->type == IPA_JF_CONST)
3790 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3792 if (!src_rdesc)
3793 dst_jf->value.constant.rdesc = NULL;
3794 else if (src->caller == dst->caller)
3796 struct ipa_ref *ref;
3797 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3798 gcc_checking_assert (n);
3799 ref = src->caller->find_reference (n, src->call_stmt,
3800 src->lto_stmt_uid);
3801 gcc_checking_assert (ref);
3802 dst->caller->clone_reference (ref, ref->stmt);
3804 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3805 dst_rdesc->cs = dst;
3806 dst_rdesc->refcount = src_rdesc->refcount;
3807 dst_rdesc->next_duplicate = NULL;
3808 dst_jf->value.constant.rdesc = dst_rdesc;
3810 else if (src_rdesc->cs == src)
3812 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3813 dst_rdesc->cs = dst;
3814 dst_rdesc->refcount = src_rdesc->refcount;
3815 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3816 src_rdesc->next_duplicate = dst_rdesc;
3817 dst_jf->value.constant.rdesc = dst_rdesc;
3819 else
3821 struct ipa_cst_ref_desc *dst_rdesc;
3822 /* This can happen during inlining, when a JFUNC can refer to a
3823 reference taken in a function up in the tree of inline clones.
3824 We need to find the duplicate that refers to our tree of
3825 inline clones. */
3827 gcc_assert (dst->caller->global.inlined_to);
3828 for (dst_rdesc = src_rdesc->next_duplicate;
3829 dst_rdesc;
3830 dst_rdesc = dst_rdesc->next_duplicate)
3832 struct cgraph_node *top;
3833 top = dst_rdesc->cs->caller->global.inlined_to
3834 ? dst_rdesc->cs->caller->global.inlined_to
3835 : dst_rdesc->cs->caller;
3836 if (dst->caller->global.inlined_to == top)
3837 break;
3839 gcc_assert (dst_rdesc);
3840 dst_jf->value.constant.rdesc = dst_rdesc;
3843 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3844 && src->caller == dst->caller)
3846 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3847 ? dst->caller->global.inlined_to : dst->caller;
3848 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3849 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3851 int c = ipa_get_controlled_uses (root_info, idx);
3852 if (c != IPA_UNDESCRIBED_USE)
3854 c++;
3855 ipa_set_controlled_uses (root_info, idx, c);
3861 /* Analyze newly added function into callgraph. */
3863 static void
3864 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3866 if (node->has_gimple_body_p ())
3867 ipa_analyze_node (node);
3870 /* Hook that is called by summary when a node is duplicated. */
3872 void
3873 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3874 ipa_node_params *old_info,
3875 ipa_node_params *new_info)
3877 ipa_agg_replacement_value *old_av, *new_av;
3879 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3880 new_info->lattices = NULL;
3881 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3882 new_info->known_csts = old_info->known_csts.copy ();
3883 new_info->known_contexts = old_info->known_contexts.copy ();
3885 new_info->analysis_done = old_info->analysis_done;
3886 new_info->node_enqueued = old_info->node_enqueued;
3887 new_info->versionable = old_info->versionable;
3889 old_av = ipa_get_agg_replacements_for_node (src);
3890 if (old_av)
3892 new_av = NULL;
3893 while (old_av)
3895 struct ipa_agg_replacement_value *v;
3897 v = ggc_alloc<ipa_agg_replacement_value> ();
3898 memcpy (v, old_av, sizeof (*v));
3899 v->next = new_av;
3900 new_av = v;
3901 old_av = old_av->next;
3903 ipa_set_node_agg_value_chain (dst, new_av);
3906 ipcp_transformation *src_trans = ipcp_get_transformation_summary (src);
3908 if (src_trans)
3910 ipcp_transformation_initialize ();
3911 src_trans = ipcp_transformation_sum->get_create (src);
3912 ipcp_transformation *dst_trans
3913 = ipcp_transformation_sum->get_create (dst);
3915 dst_trans->bits = vec_safe_copy (src_trans->bits);
3917 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3918 vec<ipa_vr, va_gc> *&dst_vr
3919 = ipcp_get_transformation_summary (dst)->m_vr;
3920 if (vec_safe_length (src_trans->m_vr) > 0)
3922 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3923 for (unsigned i = 0; i < src_vr->length (); ++i)
3924 dst_vr->quick_push ((*src_vr)[i]);
3929 /* Register our cgraph hooks if they are not already there. */
3931 void
3932 ipa_register_cgraph_hooks (void)
3934 ipa_check_create_node_params ();
3935 ipa_check_create_edge_args ();
3937 function_insertion_hook_holder =
3938 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3941 /* Unregister our cgraph hooks if they are not already there. */
3943 static void
3944 ipa_unregister_cgraph_hooks (void)
3946 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3947 function_insertion_hook_holder = NULL;
3950 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3951 longer needed after ipa-cp. */
3953 void
3954 ipa_free_all_structures_after_ipa_cp (void)
3956 if (!optimize && !in_lto_p)
3958 ipa_free_all_edge_args ();
3959 ipa_free_all_node_params ();
3960 ipcp_sources_pool.release ();
3961 ipcp_cst_values_pool.release ();
3962 ipcp_poly_ctx_values_pool.release ();
3963 ipcp_agg_lattice_pool.release ();
3964 ipa_unregister_cgraph_hooks ();
3965 ipa_refdesc_pool.release ();
3969 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3970 longer needed after indirect inlining. */
3972 void
3973 ipa_free_all_structures_after_iinln (void)
3975 ipa_free_all_edge_args ();
3976 ipa_free_all_node_params ();
3977 ipa_unregister_cgraph_hooks ();
3978 ipcp_sources_pool.release ();
3979 ipcp_cst_values_pool.release ();
3980 ipcp_poly_ctx_values_pool.release ();
3981 ipcp_agg_lattice_pool.release ();
3982 ipa_refdesc_pool.release ();
3985 /* Print ipa_tree_map data structures of all functions in the
3986 callgraph to F. */
3988 void
3989 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3991 int i, count;
3992 struct ipa_node_params *info;
3994 if (!node->definition)
3995 return;
3996 info = IPA_NODE_REF (node);
3997 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
3998 count = ipa_get_param_count (info);
3999 for (i = 0; i < count; i++)
4001 int c;
4003 fprintf (f, " ");
4004 ipa_dump_param (f, info, i);
4005 if (ipa_is_param_used (info, i))
4006 fprintf (f, " used");
4007 c = ipa_get_controlled_uses (info, i);
4008 if (c == IPA_UNDESCRIBED_USE)
4009 fprintf (f, " undescribed_use");
4010 else
4011 fprintf (f, " controlled_uses=%i", c);
4012 fprintf (f, "\n");
4016 /* Print ipa_tree_map data structures of all functions in the
4017 callgraph to F. */
4019 void
4020 ipa_print_all_params (FILE * f)
4022 struct cgraph_node *node;
4024 fprintf (f, "\nFunction parameters:\n");
4025 FOR_EACH_FUNCTION (node)
4026 ipa_print_node_params (f, node);
4029 /* Dump the AV linked list. */
4031 void
4032 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4034 bool comma = false;
4035 fprintf (f, " Aggregate replacements:");
4036 for (; av; av = av->next)
4038 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4039 av->index, av->offset);
4040 print_generic_expr (f, av->value);
4041 comma = true;
4043 fprintf (f, "\n");
4046 /* Stream out jump function JUMP_FUNC to OB. */
4048 static void
4049 ipa_write_jump_function (struct output_block *ob,
4050 struct ipa_jump_func *jump_func)
4052 struct ipa_agg_jf_item *item;
4053 struct bitpack_d bp;
4054 int i, count;
4056 streamer_write_uhwi (ob, jump_func->type);
4057 switch (jump_func->type)
4059 case IPA_JF_UNKNOWN:
4060 break;
4061 case IPA_JF_CONST:
4062 gcc_assert (
4063 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4064 stream_write_tree (ob, jump_func->value.constant.value, true);
4065 break;
4066 case IPA_JF_PASS_THROUGH:
4067 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4068 if (jump_func->value.pass_through.operation == NOP_EXPR)
4070 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4071 bp = bitpack_create (ob->main_stream);
4072 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4073 streamer_write_bitpack (&bp);
4075 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4076 == tcc_unary)
4077 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4078 else
4080 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4081 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4083 break;
4084 case IPA_JF_ANCESTOR:
4085 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4086 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4087 bp = bitpack_create (ob->main_stream);
4088 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4089 streamer_write_bitpack (&bp);
4090 break;
4093 count = vec_safe_length (jump_func->agg.items);
4094 streamer_write_uhwi (ob, count);
4095 if (count)
4097 bp = bitpack_create (ob->main_stream);
4098 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4099 streamer_write_bitpack (&bp);
4102 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4104 streamer_write_uhwi (ob, item->offset);
4105 stream_write_tree (ob, item->value, true);
4108 bp = bitpack_create (ob->main_stream);
4109 bp_pack_value (&bp, !!jump_func->bits, 1);
4110 streamer_write_bitpack (&bp);
4111 if (jump_func->bits)
4113 streamer_write_widest_int (ob, jump_func->bits->value);
4114 streamer_write_widest_int (ob, jump_func->bits->mask);
4116 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4117 streamer_write_bitpack (&bp);
4118 if (jump_func->m_vr)
4120 streamer_write_enum (ob->main_stream, value_rang_type,
4121 VR_LAST, jump_func->m_vr->kind ());
4122 stream_write_tree (ob, jump_func->m_vr->min (), true);
4123 stream_write_tree (ob, jump_func->m_vr->max (), true);
4127 /* Read in jump function JUMP_FUNC from IB. */
4129 static void
4130 ipa_read_jump_function (struct lto_input_block *ib,
4131 struct ipa_jump_func *jump_func,
4132 struct cgraph_edge *cs,
4133 struct data_in *data_in)
4135 enum jump_func_type jftype;
4136 enum tree_code operation;
4137 int i, count;
4139 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4140 switch (jftype)
4142 case IPA_JF_UNKNOWN:
4143 ipa_set_jf_unknown (jump_func);
4144 break;
4145 case IPA_JF_CONST:
4146 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4147 break;
4148 case IPA_JF_PASS_THROUGH:
4149 operation = (enum tree_code) streamer_read_uhwi (ib);
4150 if (operation == NOP_EXPR)
4152 int formal_id = streamer_read_uhwi (ib);
4153 struct bitpack_d bp = streamer_read_bitpack (ib);
4154 bool agg_preserved = bp_unpack_value (&bp, 1);
4155 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4157 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4159 int formal_id = streamer_read_uhwi (ib);
4160 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4162 else
4164 tree operand = stream_read_tree (ib, data_in);
4165 int formal_id = streamer_read_uhwi (ib);
4166 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4167 operation);
4169 break;
4170 case IPA_JF_ANCESTOR:
4172 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4173 int formal_id = streamer_read_uhwi (ib);
4174 struct bitpack_d bp = streamer_read_bitpack (ib);
4175 bool agg_preserved = bp_unpack_value (&bp, 1);
4176 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4177 break;
4181 count = streamer_read_uhwi (ib);
4182 vec_alloc (jump_func->agg.items, count);
4183 if (count)
4185 struct bitpack_d bp = streamer_read_bitpack (ib);
4186 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4188 for (i = 0; i < count; i++)
4190 struct ipa_agg_jf_item item;
4191 item.offset = streamer_read_uhwi (ib);
4192 item.value = stream_read_tree (ib, data_in);
4193 jump_func->agg.items->quick_push (item);
4196 struct bitpack_d bp = streamer_read_bitpack (ib);
4197 bool bits_known = bp_unpack_value (&bp, 1);
4198 if (bits_known)
4200 widest_int value = streamer_read_widest_int (ib);
4201 widest_int mask = streamer_read_widest_int (ib);
4202 ipa_set_jfunc_bits (jump_func, value, mask);
4204 else
4205 jump_func->bits = NULL;
4207 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4208 bool vr_known = bp_unpack_value (&vr_bp, 1);
4209 if (vr_known)
4211 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4212 VR_LAST);
4213 tree min = stream_read_tree (ib, data_in);
4214 tree max = stream_read_tree (ib, data_in);
4215 ipa_set_jfunc_vr (jump_func, type, min, max);
4217 else
4218 jump_func->m_vr = NULL;
4221 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4222 relevant to indirect inlining to OB. */
4224 static void
4225 ipa_write_indirect_edge_info (struct output_block *ob,
4226 struct cgraph_edge *cs)
4228 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4229 struct bitpack_d bp;
4231 streamer_write_hwi (ob, ii->param_index);
4232 bp = bitpack_create (ob->main_stream);
4233 bp_pack_value (&bp, ii->polymorphic, 1);
4234 bp_pack_value (&bp, ii->agg_contents, 1);
4235 bp_pack_value (&bp, ii->member_ptr, 1);
4236 bp_pack_value (&bp, ii->by_ref, 1);
4237 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4238 bp_pack_value (&bp, ii->vptr_changed, 1);
4239 streamer_write_bitpack (&bp);
4240 if (ii->agg_contents || ii->polymorphic)
4241 streamer_write_hwi (ob, ii->offset);
4242 else
4243 gcc_assert (ii->offset == 0);
4245 if (ii->polymorphic)
4247 streamer_write_hwi (ob, ii->otr_token);
4248 stream_write_tree (ob, ii->otr_type, true);
4249 ii->context.stream_out (ob);
4253 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4254 relevant to indirect inlining from IB. */
4256 static void
4257 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4258 struct data_in *data_in,
4259 struct cgraph_edge *cs)
4261 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4262 struct bitpack_d bp;
4264 ii->param_index = (int) streamer_read_hwi (ib);
4265 bp = streamer_read_bitpack (ib);
4266 ii->polymorphic = bp_unpack_value (&bp, 1);
4267 ii->agg_contents = bp_unpack_value (&bp, 1);
4268 ii->member_ptr = bp_unpack_value (&bp, 1);
4269 ii->by_ref = bp_unpack_value (&bp, 1);
4270 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4271 ii->vptr_changed = bp_unpack_value (&bp, 1);
4272 if (ii->agg_contents || ii->polymorphic)
4273 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4274 else
4275 ii->offset = 0;
4276 if (ii->polymorphic)
4278 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4279 ii->otr_type = stream_read_tree (ib, data_in);
4280 ii->context.stream_in (ib, data_in);
4284 /* Stream out NODE info to OB. */
4286 static void
4287 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4289 int node_ref;
4290 lto_symtab_encoder_t encoder;
4291 struct ipa_node_params *info = IPA_NODE_REF (node);
4292 int j;
4293 struct cgraph_edge *e;
4294 struct bitpack_d bp;
4296 encoder = ob->decl_state->symtab_node_encoder;
4297 node_ref = lto_symtab_encoder_encode (encoder, node);
4298 streamer_write_uhwi (ob, node_ref);
4300 streamer_write_uhwi (ob, ipa_get_param_count (info));
4301 for (j = 0; j < ipa_get_param_count (info); j++)
4302 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4303 bp = bitpack_create (ob->main_stream);
4304 gcc_assert (info->analysis_done
4305 || ipa_get_param_count (info) == 0);
4306 gcc_assert (!info->node_enqueued);
4307 gcc_assert (!info->ipcp_orig_node);
4308 for (j = 0; j < ipa_get_param_count (info); j++)
4309 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4310 streamer_write_bitpack (&bp);
4311 for (j = 0; j < ipa_get_param_count (info); j++)
4313 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4314 stream_write_tree (ob, ipa_get_type (info, j), true);
4316 for (e = node->callees; e; e = e->next_callee)
4318 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4320 streamer_write_uhwi (ob,
4321 ipa_get_cs_argument_count (args) * 2
4322 + (args->polymorphic_call_contexts != NULL));
4323 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4325 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4326 if (args->polymorphic_call_contexts != NULL)
4327 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4330 for (e = node->indirect_calls; e; e = e->next_callee)
4332 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4334 streamer_write_uhwi (ob,
4335 ipa_get_cs_argument_count (args) * 2
4336 + (args->polymorphic_call_contexts != NULL));
4337 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4339 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4340 if (args->polymorphic_call_contexts != NULL)
4341 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4343 ipa_write_indirect_edge_info (ob, e);
4347 /* Stream in NODE info from IB. */
4349 static void
4350 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4351 struct data_in *data_in)
4353 struct ipa_node_params *info = IPA_NODE_REF (node);
4354 int k;
4355 struct cgraph_edge *e;
4356 struct bitpack_d bp;
4358 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4360 for (k = 0; k < ipa_get_param_count (info); k++)
4361 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4363 bp = streamer_read_bitpack (ib);
4364 if (ipa_get_param_count (info) != 0)
4365 info->analysis_done = true;
4366 info->node_enqueued = false;
4367 for (k = 0; k < ipa_get_param_count (info); k++)
4368 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4369 for (k = 0; k < ipa_get_param_count (info); k++)
4371 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4372 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
4374 for (e = node->callees; e; e = e->next_callee)
4376 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4377 int count = streamer_read_uhwi (ib);
4378 bool contexts_computed = count & 1;
4379 count /= 2;
4381 if (!count)
4382 continue;
4383 vec_safe_grow_cleared (args->jump_functions, count);
4384 if (contexts_computed)
4385 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4387 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4389 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4390 data_in);
4391 if (contexts_computed)
4392 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4395 for (e = node->indirect_calls; e; e = e->next_callee)
4397 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4398 int count = streamer_read_uhwi (ib);
4399 bool contexts_computed = count & 1;
4400 count /= 2;
4402 if (count)
4404 vec_safe_grow_cleared (args->jump_functions, count);
4405 if (contexts_computed)
4406 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4407 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4409 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4410 data_in);
4411 if (contexts_computed)
4412 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4415 ipa_read_indirect_edge_info (ib, data_in, e);
4419 /* Write jump functions for nodes in SET. */
4421 void
4422 ipa_prop_write_jump_functions (void)
4424 struct cgraph_node *node;
4425 struct output_block *ob;
4426 unsigned int count = 0;
4427 lto_symtab_encoder_iterator lsei;
4428 lto_symtab_encoder_t encoder;
4430 if (!ipa_node_params_sum || !ipa_edge_args_sum)
4431 return;
4433 ob = create_output_block (LTO_section_jump_functions);
4434 encoder = ob->decl_state->symtab_node_encoder;
4435 ob->symbol = NULL;
4436 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4437 lsei_next_function_in_partition (&lsei))
4439 node = lsei_cgraph_node (lsei);
4440 if (node->has_gimple_body_p ()
4441 && IPA_NODE_REF (node) != NULL)
4442 count++;
4445 streamer_write_uhwi (ob, count);
4447 /* Process all of the functions. */
4448 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4449 lsei_next_function_in_partition (&lsei))
4451 node = lsei_cgraph_node (lsei);
4452 if (node->has_gimple_body_p ()
4453 && IPA_NODE_REF (node) != NULL)
4454 ipa_write_node_info (ob, node);
4456 streamer_write_char_stream (ob->main_stream, 0);
4457 produce_asm (ob, NULL);
4458 destroy_output_block (ob);
4461 /* Read section in file FILE_DATA of length LEN with data DATA. */
4463 static void
4464 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4465 size_t len)
4467 const struct lto_function_header *header =
4468 (const struct lto_function_header *) data;
4469 const int cfg_offset = sizeof (struct lto_function_header);
4470 const int main_offset = cfg_offset + header->cfg_size;
4471 const int string_offset = main_offset + header->main_size;
4472 struct data_in *data_in;
4473 unsigned int i;
4474 unsigned int count;
4476 lto_input_block ib_main ((const char *) data + main_offset,
4477 header->main_size, file_data->mode_table);
4479 data_in =
4480 lto_data_in_create (file_data, (const char *) data + string_offset,
4481 header->string_size, vNULL);
4482 count = streamer_read_uhwi (&ib_main);
4484 for (i = 0; i < count; i++)
4486 unsigned int index;
4487 struct cgraph_node *node;
4488 lto_symtab_encoder_t encoder;
4490 index = streamer_read_uhwi (&ib_main);
4491 encoder = file_data->symtab_node_encoder;
4492 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4493 index));
4494 gcc_assert (node->definition);
4495 ipa_read_node_info (&ib_main, node, data_in);
4497 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4498 len);
4499 lto_data_in_delete (data_in);
4502 /* Read ipcp jump functions. */
4504 void
4505 ipa_prop_read_jump_functions (void)
4507 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4508 struct lto_file_decl_data *file_data;
4509 unsigned int j = 0;
4511 ipa_check_create_node_params ();
4512 ipa_check_create_edge_args ();
4513 ipa_register_cgraph_hooks ();
4515 while ((file_data = file_data_vec[j++]))
4517 size_t len;
4518 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4520 if (data)
4521 ipa_prop_read_section (file_data, data, len);
4525 void
4526 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4528 int node_ref;
4529 unsigned int count = 0;
4530 lto_symtab_encoder_t encoder;
4531 struct ipa_agg_replacement_value *aggvals, *av;
4533 aggvals = ipa_get_agg_replacements_for_node (node);
4534 encoder = ob->decl_state->symtab_node_encoder;
4535 node_ref = lto_symtab_encoder_encode (encoder, node);
4536 streamer_write_uhwi (ob, node_ref);
4538 for (av = aggvals; av; av = av->next)
4539 count++;
4540 streamer_write_uhwi (ob, count);
4542 for (av = aggvals; av; av = av->next)
4544 struct bitpack_d bp;
4546 streamer_write_uhwi (ob, av->offset);
4547 streamer_write_uhwi (ob, av->index);
4548 stream_write_tree (ob, av->value, true);
4550 bp = bitpack_create (ob->main_stream);
4551 bp_pack_value (&bp, av->by_ref, 1);
4552 streamer_write_bitpack (&bp);
4555 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4556 if (ts && vec_safe_length (ts->m_vr) > 0)
4558 count = ts->m_vr->length ();
4559 streamer_write_uhwi (ob, count);
4560 for (unsigned i = 0; i < count; ++i)
4562 struct bitpack_d bp;
4563 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4564 bp = bitpack_create (ob->main_stream);
4565 bp_pack_value (&bp, parm_vr->known, 1);
4566 streamer_write_bitpack (&bp);
4567 if (parm_vr->known)
4569 streamer_write_enum (ob->main_stream, value_rang_type,
4570 VR_LAST, parm_vr->type);
4571 streamer_write_wide_int (ob, parm_vr->min);
4572 streamer_write_wide_int (ob, parm_vr->max);
4576 else
4577 streamer_write_uhwi (ob, 0);
4579 if (ts && vec_safe_length (ts->bits) > 0)
4581 count = ts->bits->length ();
4582 streamer_write_uhwi (ob, count);
4584 for (unsigned i = 0; i < count; ++i)
4586 const ipa_bits *bits_jfunc = (*ts->bits)[i];
4587 struct bitpack_d bp = bitpack_create (ob->main_stream);
4588 bp_pack_value (&bp, !!bits_jfunc, 1);
4589 streamer_write_bitpack (&bp);
4590 if (bits_jfunc)
4592 streamer_write_widest_int (ob, bits_jfunc->value);
4593 streamer_write_widest_int (ob, bits_jfunc->mask);
4597 else
4598 streamer_write_uhwi (ob, 0);
4601 /* Stream in the aggregate value replacement chain for NODE from IB. */
4603 static void
4604 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4605 data_in *data_in)
4607 struct ipa_agg_replacement_value *aggvals = NULL;
4608 unsigned int count, i;
4610 count = streamer_read_uhwi (ib);
4611 for (i = 0; i <count; i++)
4613 struct ipa_agg_replacement_value *av;
4614 struct bitpack_d bp;
4616 av = ggc_alloc<ipa_agg_replacement_value> ();
4617 av->offset = streamer_read_uhwi (ib);
4618 av->index = streamer_read_uhwi (ib);
4619 av->value = stream_read_tree (ib, data_in);
4620 bp = streamer_read_bitpack (ib);
4621 av->by_ref = bp_unpack_value (&bp, 1);
4622 av->next = aggvals;
4623 aggvals = av;
4625 ipa_set_node_agg_value_chain (node, aggvals);
4627 count = streamer_read_uhwi (ib);
4628 if (count > 0)
4630 ipcp_transformation_initialize ();
4631 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4632 vec_safe_grow_cleared (ts->m_vr, count);
4633 for (i = 0; i < count; i++)
4635 ipa_vr *parm_vr;
4636 parm_vr = &(*ts->m_vr)[i];
4637 struct bitpack_d bp;
4638 bp = streamer_read_bitpack (ib);
4639 parm_vr->known = bp_unpack_value (&bp, 1);
4640 if (parm_vr->known)
4642 parm_vr->type = streamer_read_enum (ib, value_range_kind,
4643 VR_LAST);
4644 parm_vr->min = streamer_read_wide_int (ib);
4645 parm_vr->max = streamer_read_wide_int (ib);
4649 count = streamer_read_uhwi (ib);
4650 if (count > 0)
4652 ipcp_transformation_initialize ();
4653 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4654 vec_safe_grow_cleared (ts->bits, count);
4656 for (i = 0; i < count; i++)
4658 struct bitpack_d bp = streamer_read_bitpack (ib);
4659 bool known = bp_unpack_value (&bp, 1);
4660 if (known)
4662 ipa_bits *bits
4663 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4664 streamer_read_widest_int (ib));
4665 (*ts->bits)[i] = bits;
4671 /* Write all aggregate replacement for nodes in set. */
4673 void
4674 ipcp_write_transformation_summaries (void)
4676 struct cgraph_node *node;
4677 struct output_block *ob;
4678 unsigned int count = 0;
4679 lto_symtab_encoder_iterator lsei;
4680 lto_symtab_encoder_t encoder;
4682 ob = create_output_block (LTO_section_ipcp_transform);
4683 encoder = ob->decl_state->symtab_node_encoder;
4684 ob->symbol = NULL;
4685 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4686 lsei_next_function_in_partition (&lsei))
4688 node = lsei_cgraph_node (lsei);
4689 if (node->has_gimple_body_p ())
4690 count++;
4693 streamer_write_uhwi (ob, count);
4695 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4696 lsei_next_function_in_partition (&lsei))
4698 node = lsei_cgraph_node (lsei);
4699 if (node->has_gimple_body_p ())
4700 write_ipcp_transformation_info (ob, node);
4702 streamer_write_char_stream (ob->main_stream, 0);
4703 produce_asm (ob, NULL);
4704 destroy_output_block (ob);
4707 /* Read replacements section in file FILE_DATA of length LEN with data
4708 DATA. */
4710 static void
4711 read_replacements_section (struct lto_file_decl_data *file_data,
4712 const char *data,
4713 size_t len)
4715 const struct lto_function_header *header =
4716 (const struct lto_function_header *) data;
4717 const int cfg_offset = sizeof (struct lto_function_header);
4718 const int main_offset = cfg_offset + header->cfg_size;
4719 const int string_offset = main_offset + header->main_size;
4720 struct data_in *data_in;
4721 unsigned int i;
4722 unsigned int count;
4724 lto_input_block ib_main ((const char *) data + main_offset,
4725 header->main_size, file_data->mode_table);
4727 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4728 header->string_size, vNULL);
4729 count = streamer_read_uhwi (&ib_main);
4731 for (i = 0; i < count; i++)
4733 unsigned int index;
4734 struct cgraph_node *node;
4735 lto_symtab_encoder_t encoder;
4737 index = streamer_read_uhwi (&ib_main);
4738 encoder = file_data->symtab_node_encoder;
4739 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4740 index));
4741 gcc_assert (node->definition);
4742 read_ipcp_transformation_info (&ib_main, node, data_in);
4744 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4745 len);
4746 lto_data_in_delete (data_in);
4749 /* Read IPA-CP aggregate replacements. */
4751 void
4752 ipcp_read_transformation_summaries (void)
4754 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4755 struct lto_file_decl_data *file_data;
4756 unsigned int j = 0;
4758 while ((file_data = file_data_vec[j++]))
4760 size_t len;
4761 const char *data = lto_get_section_data (file_data,
4762 LTO_section_ipcp_transform,
4763 NULL, &len);
4764 if (data)
4765 read_replacements_section (file_data, data, len);
4769 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4770 NODE. */
4772 static void
4773 adjust_agg_replacement_values (struct cgraph_node *node,
4774 struct ipa_agg_replacement_value *aggval)
4776 struct ipa_agg_replacement_value *v;
4777 int i, c = 0, d = 0, *adj;
4779 if (!node->clone.combined_args_to_skip)
4780 return;
4782 for (v = aggval; v; v = v->next)
4784 gcc_assert (v->index >= 0);
4785 if (c < v->index)
4786 c = v->index;
4788 c++;
4790 adj = XALLOCAVEC (int, c);
4791 for (i = 0; i < c; i++)
4792 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4794 adj[i] = -1;
4795 d++;
4797 else
4798 adj[i] = i - d;
4800 for (v = aggval; v; v = v->next)
4801 v->index = adj[v->index];
4804 /* Dominator walker driving the ipcp modification phase. */
4806 class ipcp_modif_dom_walker : public dom_walker
4808 public:
4809 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4810 vec<ipa_param_descriptor, va_gc> *descs,
4811 struct ipa_agg_replacement_value *av,
4812 bool *sc, bool *cc)
4813 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4814 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4816 virtual edge before_dom_children (basic_block);
4818 private:
4819 struct ipa_func_body_info *m_fbi;
4820 vec<ipa_param_descriptor, va_gc> *m_descriptors;
4821 struct ipa_agg_replacement_value *m_aggval;
4822 bool *m_something_changed, *m_cfg_changed;
4825 edge
4826 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4828 gimple_stmt_iterator gsi;
4829 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4831 struct ipa_agg_replacement_value *v;
4832 gimple *stmt = gsi_stmt (gsi);
4833 tree rhs, val, t;
4834 HOST_WIDE_INT offset, size;
4835 int index;
4836 bool by_ref, vce;
4838 if (!gimple_assign_load_p (stmt))
4839 continue;
4840 rhs = gimple_assign_rhs1 (stmt);
4841 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4842 continue;
4844 vce = false;
4845 t = rhs;
4846 while (handled_component_p (t))
4848 /* V_C_E can do things like convert an array of integers to one
4849 bigger integer and similar things we do not handle below. */
4850 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4852 vce = true;
4853 break;
4855 t = TREE_OPERAND (t, 0);
4857 if (vce)
4858 continue;
4860 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4861 &offset, &size, &by_ref))
4862 continue;
4863 for (v = m_aggval; v; v = v->next)
4864 if (v->index == index
4865 && v->offset == offset)
4866 break;
4867 if (!v
4868 || v->by_ref != by_ref
4869 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4870 continue;
4872 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4873 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4875 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4876 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4877 else if (TYPE_SIZE (TREE_TYPE (rhs))
4878 == TYPE_SIZE (TREE_TYPE (v->value)))
4879 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4880 else
4882 if (dump_file)
4884 fprintf (dump_file, " const ");
4885 print_generic_expr (dump_file, v->value);
4886 fprintf (dump_file, " can't be converted to type of ");
4887 print_generic_expr (dump_file, rhs);
4888 fprintf (dump_file, "\n");
4890 continue;
4893 else
4894 val = v->value;
4896 if (dump_file && (dump_flags & TDF_DETAILS))
4898 fprintf (dump_file, "Modifying stmt:\n ");
4899 print_gimple_stmt (dump_file, stmt, 0);
4901 gimple_assign_set_rhs_from_tree (&gsi, val);
4902 update_stmt (stmt);
4904 if (dump_file && (dump_flags & TDF_DETAILS))
4906 fprintf (dump_file, "into:\n ");
4907 print_gimple_stmt (dump_file, stmt, 0);
4908 fprintf (dump_file, "\n");
4911 *m_something_changed = true;
4912 if (maybe_clean_eh_stmt (stmt)
4913 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4914 *m_cfg_changed = true;
4916 return NULL;
4919 /* Update bits info of formal parameters as described in
4920 ipcp_transformation. */
4922 static void
4923 ipcp_update_bits (struct cgraph_node *node)
4925 tree parm = DECL_ARGUMENTS (node->decl);
4926 tree next_parm = parm;
4927 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4929 if (!ts || vec_safe_length (ts->bits) == 0)
4930 return;
4932 vec<ipa_bits *, va_gc> &bits = *ts->bits;
4933 unsigned count = bits.length ();
4935 for (unsigned i = 0; i < count; ++i, parm = next_parm)
4937 if (node->clone.combined_args_to_skip
4938 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
4939 continue;
4941 gcc_checking_assert (parm);
4942 next_parm = DECL_CHAIN (parm);
4944 if (!bits[i]
4945 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
4946 || POINTER_TYPE_P (TREE_TYPE (parm)))
4947 || !is_gimple_reg (parm))
4948 continue;
4950 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
4951 if (!ddef)
4952 continue;
4954 if (dump_file)
4956 fprintf (dump_file, "Adjusting mask for param %u to ", i);
4957 print_hex (bits[i]->mask, dump_file);
4958 fprintf (dump_file, "\n");
4961 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
4963 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
4964 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
4966 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
4967 | wide_int::from (bits[i]->value, prec, sgn);
4968 set_nonzero_bits (ddef, nonzero_bits);
4970 else
4972 unsigned tem = bits[i]->mask.to_uhwi ();
4973 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
4974 unsigned align = tem & -tem;
4975 unsigned misalign = bitpos & (align - 1);
4977 if (align > 1)
4979 if (dump_file)
4980 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
4982 unsigned old_align, old_misalign;
4983 struct ptr_info_def *pi = get_ptr_info (ddef);
4984 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
4986 if (old_known
4987 && old_align > align)
4989 if (dump_file)
4991 fprintf (dump_file, "But alignment was already %u.\n", old_align);
4992 if ((old_misalign & (align - 1)) != misalign)
4993 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
4994 old_misalign, misalign);
4996 continue;
4999 if (old_known
5000 && ((misalign & (old_align - 1)) != old_misalign)
5001 && dump_file)
5002 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5003 old_misalign, misalign);
5005 set_ptr_info_alignment (pi, align, misalign);
5011 /* Update value range of formal parameters as described in
5012 ipcp_transformation. */
5014 static void
5015 ipcp_update_vr (struct cgraph_node *node)
5017 tree fndecl = node->decl;
5018 tree parm = DECL_ARGUMENTS (fndecl);
5019 tree next_parm = parm;
5020 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5021 if (!ts || vec_safe_length (ts->m_vr) == 0)
5022 return;
5023 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5024 unsigned count = vr.length ();
5026 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5028 if (node->clone.combined_args_to_skip
5029 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5030 continue;
5031 gcc_checking_assert (parm);
5032 next_parm = DECL_CHAIN (parm);
5033 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5035 if (!ddef || !is_gimple_reg (parm))
5036 continue;
5038 if (vr[i].known
5039 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5041 tree type = TREE_TYPE (ddef);
5042 unsigned prec = TYPE_PRECISION (type);
5043 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5045 if (dump_file)
5047 fprintf (dump_file, "Setting value range of param %u ", i);
5048 fprintf (dump_file, "%s[",
5049 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5050 print_decs (vr[i].min, dump_file);
5051 fprintf (dump_file, ", ");
5052 print_decs (vr[i].max, dump_file);
5053 fprintf (dump_file, "]\n");
5055 set_range_info (ddef, vr[i].type,
5056 wide_int_storage::from (vr[i].min, prec,
5057 TYPE_SIGN (type)),
5058 wide_int_storage::from (vr[i].max, prec,
5059 TYPE_SIGN (type)));
5061 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5062 && vr[i].type == VR_ANTI_RANGE
5063 && wi::eq_p (vr[i].min, 0)
5064 && wi::eq_p (vr[i].max, 0))
5066 if (dump_file)
5067 fprintf (dump_file, "Setting nonnull for %u\n", i);
5068 set_ptr_nonnull (ddef);
5074 /* IPCP transformation phase doing propagation of aggregate values. */
5076 unsigned int
5077 ipcp_transform_function (struct cgraph_node *node)
5079 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5080 struct ipa_func_body_info fbi;
5081 struct ipa_agg_replacement_value *aggval;
5082 int param_count;
5083 bool cfg_changed = false, something_changed = false;
5085 gcc_checking_assert (cfun);
5086 gcc_checking_assert (current_function_decl);
5088 if (dump_file)
5089 fprintf (dump_file, "Modification phase of node %s\n",
5090 node->dump_name ());
5092 ipcp_update_bits (node);
5093 ipcp_update_vr (node);
5094 aggval = ipa_get_agg_replacements_for_node (node);
5095 if (!aggval)
5096 return 0;
5097 param_count = count_formal_params (node->decl);
5098 if (param_count == 0)
5099 return 0;
5100 adjust_agg_replacement_values (node, aggval);
5101 if (dump_file)
5102 ipa_dump_agg_replacement_values (dump_file, aggval);
5104 fbi.node = node;
5105 fbi.info = NULL;
5106 fbi.bb_infos = vNULL;
5107 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5108 fbi.param_count = param_count;
5109 fbi.aa_walked = 0;
5111 vec_safe_grow_cleared (descriptors, param_count);
5112 ipa_populate_param_decls (node, *descriptors);
5113 calculate_dominance_info (CDI_DOMINATORS);
5114 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5115 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5117 int i;
5118 struct ipa_bb_info *bi;
5119 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5120 free_ipa_bb_info (bi);
5121 fbi.bb_infos.release ();
5122 free_dominance_info (CDI_DOMINATORS);
5124 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5125 s->agg_values = NULL;
5126 s->bits = NULL;
5127 s->m_vr = NULL;
5129 vec_free (descriptors);
5131 if (!something_changed)
5132 return 0;
5133 else if (cfg_changed)
5134 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5135 else
5136 return TODO_update_ssa_only_virtuals;
5139 #include "gt-ipa-prop.h"