Merge trunk version 227333 into gupc branch.
[official-gcc.git] / gcc / tree-sra.c
blob6a07fbc709223a33b87511cb66e9d224ceb78a18
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2015 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "backend.h"
79 #include "predict.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "rtl.h"
83 #include "ssa.h"
84 #include "alias.h"
85 #include "fold-const.h"
86 #include "internal-fn.h"
87 #include "tree-eh.h"
88 #include "stor-layout.h"
89 #include "gimplify.h"
90 #include "gimple-iterator.h"
91 #include "gimplify-me.h"
92 #include "gimple-walk.h"
93 #include "tree-cfg.h"
94 #include "flags.h"
95 #include "insn-config.h"
96 #include "expmed.h"
97 #include "dojump.h"
98 #include "explow.h"
99 #include "calls.h"
100 #include "emit-rtl.h"
101 #include "varasm.h"
102 #include "stmt.h"
103 #include "expr.h"
104 #include "tree-dfa.h"
105 #include "tree-ssa.h"
106 #include "tree-pass.h"
107 #include "cgraph.h"
108 #include "symbol-summary.h"
109 #include "ipa-prop.h"
110 #include "params.h"
111 #include "target.h"
112 #include "dbgcnt.h"
113 #include "tree-inline.h"
114 #include "gimple-pretty-print.h"
115 #include "ipa-inline.h"
116 #include "ipa-utils.h"
117 #include "builtins.h"
119 /* Enumeration of all aggregate reductions we can do. */
120 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
121 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
122 SRA_MODE_INTRA }; /* late intraprocedural SRA */
124 /* Global variable describing which aggregate reduction we are performing at
125 the moment. */
126 static enum sra_mode sra_mode;
128 struct assign_link;
130 /* ACCESS represents each access to an aggregate variable (as a whole or a
131 part). It can also represent a group of accesses that refer to exactly the
132 same fragment of an aggregate (i.e. those that have exactly the same offset
133 and size). Such representatives for a single aggregate, once determined,
134 are linked in a linked list and have the group fields set.
136 Moreover, when doing intraprocedural SRA, a tree is built from those
137 representatives (by the means of first_child and next_sibling pointers), in
138 which all items in a subtree are "within" the root, i.e. their offset is
139 greater or equal to offset of the root and offset+size is smaller or equal
140 to offset+size of the root. Children of an access are sorted by offset.
142 Note that accesses to parts of vector and complex number types always
143 represented by an access to the whole complex number or a vector. It is a
144 duty of the modifying functions to replace them appropriately. */
146 struct access
148 /* Values returned by `get_ref_base_and_extent' for each component reference
149 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
150 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
151 HOST_WIDE_INT offset;
152 HOST_WIDE_INT size;
153 tree base;
155 /* Expression. It is context dependent so do not use it to create new
156 expressions to access the original aggregate. See PR 42154 for a
157 testcase. */
158 tree expr;
159 /* Type. */
160 tree type;
162 /* The statement this access belongs to. */
163 gimple stmt;
165 /* Next group representative for this aggregate. */
166 struct access *next_grp;
168 /* Pointer to the group representative. Pointer to itself if the struct is
169 the representative. */
170 struct access *group_representative;
172 /* If this access has any children (in terms of the definition above), this
173 points to the first one. */
174 struct access *first_child;
176 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
177 described above. In IPA-SRA this is a pointer to the next access
178 belonging to the same group (having the same representative). */
179 struct access *next_sibling;
181 /* Pointers to the first and last element in the linked list of assign
182 links. */
183 struct assign_link *first_link, *last_link;
185 /* Pointer to the next access in the work queue. */
186 struct access *next_queued;
188 /* Replacement variable for this access "region." Never to be accessed
189 directly, always only by the means of get_access_replacement() and only
190 when grp_to_be_replaced flag is set. */
191 tree replacement_decl;
193 /* Is this particular access write access? */
194 unsigned write : 1;
196 /* Is this access an access to a non-addressable field? */
197 unsigned non_addressable : 1;
199 /* Is this access currently in the work queue? */
200 unsigned grp_queued : 1;
202 /* Does this group contain a write access? This flag is propagated down the
203 access tree. */
204 unsigned grp_write : 1;
206 /* Does this group contain a read access? This flag is propagated down the
207 access tree. */
208 unsigned grp_read : 1;
210 /* Does this group contain a read access that comes from an assignment
211 statement? This flag is propagated down the access tree. */
212 unsigned grp_assignment_read : 1;
214 /* Does this group contain a write access that comes from an assignment
215 statement? This flag is propagated down the access tree. */
216 unsigned grp_assignment_write : 1;
218 /* Does this group contain a read access through a scalar type? This flag is
219 not propagated in the access tree in any direction. */
220 unsigned grp_scalar_read : 1;
222 /* Does this group contain a write access through a scalar type? This flag
223 is not propagated in the access tree in any direction. */
224 unsigned grp_scalar_write : 1;
226 /* Is this access an artificial one created to scalarize some record
227 entirely? */
228 unsigned grp_total_scalarization : 1;
230 /* Other passes of the analysis use this bit to make function
231 analyze_access_subtree create scalar replacements for this group if
232 possible. */
233 unsigned grp_hint : 1;
235 /* Is the subtree rooted in this access fully covered by scalar
236 replacements? */
237 unsigned grp_covered : 1;
239 /* If set to true, this access and all below it in an access tree must not be
240 scalarized. */
241 unsigned grp_unscalarizable_region : 1;
243 /* Whether data have been written to parts of the aggregate covered by this
244 access which is not to be scalarized. This flag is propagated up in the
245 access tree. */
246 unsigned grp_unscalarized_data : 1;
248 /* Does this access and/or group contain a write access through a
249 BIT_FIELD_REF? */
250 unsigned grp_partial_lhs : 1;
252 /* Set when a scalar replacement should be created for this variable. */
253 unsigned grp_to_be_replaced : 1;
255 /* Set when we want a replacement for the sole purpose of having it in
256 generated debug statements. */
257 unsigned grp_to_be_debug_replaced : 1;
259 /* Should TREE_NO_WARNING of a replacement be set? */
260 unsigned grp_no_warning : 1;
262 /* Is it possible that the group refers to data which might be (directly or
263 otherwise) modified? */
264 unsigned grp_maybe_modified : 1;
266 /* Set when this is a representative of a pointer to scalar (i.e. by
267 reference) parameter which we consider for turning into a plain scalar
268 (i.e. a by value parameter). */
269 unsigned grp_scalar_ptr : 1;
271 /* Set when we discover that this pointer is not safe to dereference in the
272 caller. */
273 unsigned grp_not_necessarilly_dereferenced : 1;
276 typedef struct access *access_p;
279 /* Alloc pool for allocating access structures. */
280 static object_allocator<struct access> access_pool ("SRA accesses", 16);
282 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
283 are used to propagate subaccesses from rhs to lhs as long as they don't
284 conflict with what is already there. */
285 struct assign_link
287 struct access *lacc, *racc;
288 struct assign_link *next;
291 /* Alloc pool for allocating assign link structures. */
292 static object_allocator<assign_link> assign_link_pool ("SRA links", 16);
294 /* Base (tree) -> Vector (vec<access_p> *) map. */
295 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
297 /* Candidate hash table helpers. */
299 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
301 static inline hashval_t hash (const tree_node *);
302 static inline bool equal (const tree_node *, const tree_node *);
305 /* Hash a tree in a uid_decl_map. */
307 inline hashval_t
308 uid_decl_hasher::hash (const tree_node *item)
310 return item->decl_minimal.uid;
313 /* Return true if the DECL_UID in both trees are equal. */
315 inline bool
316 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
318 return (a->decl_minimal.uid == b->decl_minimal.uid);
321 /* Set of candidates. */
322 static bitmap candidate_bitmap;
323 static hash_table<uid_decl_hasher> *candidates;
325 /* For a candidate UID return the candidates decl. */
327 static inline tree
328 candidate (unsigned uid)
330 tree_node t;
331 t.decl_minimal.uid = uid;
332 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
335 /* Bitmap of candidates which we should try to entirely scalarize away and
336 those which cannot be (because they are and need be used as a whole). */
337 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
339 /* Obstack for creation of fancy names. */
340 static struct obstack name_obstack;
342 /* Head of a linked list of accesses that need to have its subaccesses
343 propagated to their assignment counterparts. */
344 static struct access *work_queue_head;
346 /* Number of parameters of the analyzed function when doing early ipa SRA. */
347 static int func_param_count;
349 /* scan_function sets the following to true if it encounters a call to
350 __builtin_apply_args. */
351 static bool encountered_apply_args;
353 /* Set by scan_function when it finds a recursive call. */
354 static bool encountered_recursive_call;
356 /* Set by scan_function when it finds a recursive call with less actual
357 arguments than formal parameters.. */
358 static bool encountered_unchangable_recursive_call;
360 /* This is a table in which for each basic block and parameter there is a
361 distance (offset + size) in that parameter which is dereferenced and
362 accessed in that BB. */
363 static HOST_WIDE_INT *bb_dereferences;
364 /* Bitmap of BBs that can cause the function to "stop" progressing by
365 returning, throwing externally, looping infinitely or calling a function
366 which might abort etc.. */
367 static bitmap final_bbs;
369 /* Representative of no accesses at all. */
370 static struct access no_accesses_representant;
372 /* Predicate to test the special value. */
374 static inline bool
375 no_accesses_p (struct access *access)
377 return access == &no_accesses_representant;
380 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
381 representative fields are dumped, otherwise those which only describe the
382 individual access are. */
384 static struct
386 /* Number of processed aggregates is readily available in
387 analyze_all_variable_accesses and so is not stored here. */
389 /* Number of created scalar replacements. */
390 int replacements;
392 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
393 expression. */
394 int exprs;
396 /* Number of statements created by generate_subtree_copies. */
397 int subtree_copies;
399 /* Number of statements created by load_assign_lhs_subreplacements. */
400 int subreplacements;
402 /* Number of times sra_modify_assign has deleted a statement. */
403 int deleted;
405 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
406 RHS reparately due to type conversions or nonexistent matching
407 references. */
408 int separate_lhs_rhs_handling;
410 /* Number of parameters that were removed because they were unused. */
411 int deleted_unused_parameters;
413 /* Number of scalars passed as parameters by reference that have been
414 converted to be passed by value. */
415 int scalar_by_ref_to_by_val;
417 /* Number of aggregate parameters that were replaced by one or more of their
418 components. */
419 int aggregate_params_reduced;
421 /* Numbber of components created when splitting aggregate parameters. */
422 int param_reductions_created;
423 } sra_stats;
425 static void
426 dump_access (FILE *f, struct access *access, bool grp)
428 fprintf (f, "access { ");
429 fprintf (f, "base = (%d)'", DECL_UID (access->base));
430 print_generic_expr (f, access->base, 0);
431 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
432 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
433 fprintf (f, ", expr = ");
434 print_generic_expr (f, access->expr, 0);
435 fprintf (f, ", type = ");
436 print_generic_expr (f, access->type, 0);
437 if (grp)
438 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
439 "grp_assignment_write = %d, grp_scalar_read = %d, "
440 "grp_scalar_write = %d, grp_total_scalarization = %d, "
441 "grp_hint = %d, grp_covered = %d, "
442 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
443 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
444 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
445 "grp_not_necessarilly_dereferenced = %d\n",
446 access->grp_read, access->grp_write, access->grp_assignment_read,
447 access->grp_assignment_write, access->grp_scalar_read,
448 access->grp_scalar_write, access->grp_total_scalarization,
449 access->grp_hint, access->grp_covered,
450 access->grp_unscalarizable_region, access->grp_unscalarized_data,
451 access->grp_partial_lhs, access->grp_to_be_replaced,
452 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
453 access->grp_not_necessarilly_dereferenced);
454 else
455 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
456 "grp_partial_lhs = %d\n",
457 access->write, access->grp_total_scalarization,
458 access->grp_partial_lhs);
461 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
463 static void
464 dump_access_tree_1 (FILE *f, struct access *access, int level)
468 int i;
470 for (i = 0; i < level; i++)
471 fputs ("* ", dump_file);
473 dump_access (f, access, true);
475 if (access->first_child)
476 dump_access_tree_1 (f, access->first_child, level + 1);
478 access = access->next_sibling;
480 while (access);
483 /* Dump all access trees for a variable, given the pointer to the first root in
484 ACCESS. */
486 static void
487 dump_access_tree (FILE *f, struct access *access)
489 for (; access; access = access->next_grp)
490 dump_access_tree_1 (f, access, 0);
493 /* Return true iff ACC is non-NULL and has subaccesses. */
495 static inline bool
496 access_has_children_p (struct access *acc)
498 return acc && acc->first_child;
501 /* Return true iff ACC is (partly) covered by at least one replacement. */
503 static bool
504 access_has_replacements_p (struct access *acc)
506 struct access *child;
507 if (acc->grp_to_be_replaced)
508 return true;
509 for (child = acc->first_child; child; child = child->next_sibling)
510 if (access_has_replacements_p (child))
511 return true;
512 return false;
515 /* Return a vector of pointers to accesses for the variable given in BASE or
516 NULL if there is none. */
518 static vec<access_p> *
519 get_base_access_vector (tree base)
521 return base_access_vec->get (base);
524 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
525 in ACCESS. Return NULL if it cannot be found. */
527 static struct access *
528 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
529 HOST_WIDE_INT size)
531 while (access && (access->offset != offset || access->size != size))
533 struct access *child = access->first_child;
535 while (child && (child->offset + child->size <= offset))
536 child = child->next_sibling;
537 access = child;
540 return access;
543 /* Return the first group representative for DECL or NULL if none exists. */
545 static struct access *
546 get_first_repr_for_decl (tree base)
548 vec<access_p> *access_vec;
550 access_vec = get_base_access_vector (base);
551 if (!access_vec)
552 return NULL;
554 return (*access_vec)[0];
557 /* Find an access representative for the variable BASE and given OFFSET and
558 SIZE. Requires that access trees have already been built. Return NULL if
559 it cannot be found. */
561 static struct access *
562 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
563 HOST_WIDE_INT size)
565 struct access *access;
567 access = get_first_repr_for_decl (base);
568 while (access && (access->offset + access->size <= offset))
569 access = access->next_grp;
570 if (!access)
571 return NULL;
573 return find_access_in_subtree (access, offset, size);
576 /* Add LINK to the linked list of assign links of RACC. */
577 static void
578 add_link_to_rhs (struct access *racc, struct assign_link *link)
580 gcc_assert (link->racc == racc);
582 if (!racc->first_link)
584 gcc_assert (!racc->last_link);
585 racc->first_link = link;
587 else
588 racc->last_link->next = link;
590 racc->last_link = link;
591 link->next = NULL;
594 /* Move all link structures in their linked list in OLD_RACC to the linked list
595 in NEW_RACC. */
596 static void
597 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
599 if (!old_racc->first_link)
601 gcc_assert (!old_racc->last_link);
602 return;
605 if (new_racc->first_link)
607 gcc_assert (!new_racc->last_link->next);
608 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
610 new_racc->last_link->next = old_racc->first_link;
611 new_racc->last_link = old_racc->last_link;
613 else
615 gcc_assert (!new_racc->last_link);
617 new_racc->first_link = old_racc->first_link;
618 new_racc->last_link = old_racc->last_link;
620 old_racc->first_link = old_racc->last_link = NULL;
623 /* Add ACCESS to the work queue (which is actually a stack). */
625 static void
626 add_access_to_work_queue (struct access *access)
628 if (!access->grp_queued)
630 gcc_assert (!access->next_queued);
631 access->next_queued = work_queue_head;
632 access->grp_queued = 1;
633 work_queue_head = access;
637 /* Pop an access from the work queue, and return it, assuming there is one. */
639 static struct access *
640 pop_access_from_work_queue (void)
642 struct access *access = work_queue_head;
644 work_queue_head = access->next_queued;
645 access->next_queued = NULL;
646 access->grp_queued = 0;
647 return access;
651 /* Allocate necessary structures. */
653 static void
654 sra_initialize (void)
656 candidate_bitmap = BITMAP_ALLOC (NULL);
657 candidates = new hash_table<uid_decl_hasher>
658 (vec_safe_length (cfun->local_decls) / 2);
659 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
660 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
661 gcc_obstack_init (&name_obstack);
662 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
663 memset (&sra_stats, 0, sizeof (sra_stats));
664 encountered_apply_args = false;
665 encountered_recursive_call = false;
666 encountered_unchangable_recursive_call = false;
669 /* Deallocate all general structures. */
671 static void
672 sra_deinitialize (void)
674 BITMAP_FREE (candidate_bitmap);
675 delete candidates;
676 candidates = NULL;
677 BITMAP_FREE (should_scalarize_away_bitmap);
678 BITMAP_FREE (cannot_scalarize_away_bitmap);
679 access_pool.release ();
680 assign_link_pool.release ();
681 obstack_free (&name_obstack, NULL);
683 delete base_access_vec;
686 /* Remove DECL from candidates for SRA and write REASON to the dump file if
687 there is one. */
688 static void
689 disqualify_candidate (tree decl, const char *reason)
691 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
692 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
694 if (dump_file && (dump_flags & TDF_DETAILS))
696 fprintf (dump_file, "! Disqualifying ");
697 print_generic_expr (dump_file, decl, 0);
698 fprintf (dump_file, " - %s\n", reason);
702 /* Return true iff the type contains a field or an element which does not allow
703 scalarization. */
705 static bool
706 type_internals_preclude_sra_p (tree type, const char **msg)
708 tree fld;
709 tree et;
711 switch (TREE_CODE (type))
713 case RECORD_TYPE:
714 case UNION_TYPE:
715 case QUAL_UNION_TYPE:
716 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
717 if (TREE_CODE (fld) == FIELD_DECL)
719 tree ft = TREE_TYPE (fld);
721 if (TREE_THIS_VOLATILE (fld))
723 *msg = "volatile structure field";
724 return true;
726 if (!DECL_FIELD_OFFSET (fld))
728 *msg = "no structure field offset";
729 return true;
731 if (!DECL_SIZE (fld))
733 *msg = "zero structure field size";
734 return true;
736 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
738 *msg = "structure field offset not fixed";
739 return true;
741 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
743 *msg = "structure field size not fixed";
744 return true;
746 if (!tree_fits_shwi_p (bit_position (fld)))
748 *msg = "structure field size too big";
749 return true;
751 if (AGGREGATE_TYPE_P (ft)
752 && int_bit_position (fld) % BITS_PER_UNIT != 0)
754 *msg = "structure field is bit field";
755 return true;
758 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
759 return true;
762 return false;
764 case ARRAY_TYPE:
765 et = TREE_TYPE (type);
767 if (TYPE_VOLATILE (et))
769 *msg = "element type is volatile";
770 return true;
773 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
774 return true;
776 return false;
778 default:
779 return false;
783 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
784 base variable if it is. Return T if it is not an SSA_NAME. */
786 static tree
787 get_ssa_base_param (tree t)
789 if (TREE_CODE (t) == SSA_NAME)
791 if (SSA_NAME_IS_DEFAULT_DEF (t))
792 return SSA_NAME_VAR (t);
793 else
794 return NULL_TREE;
796 return t;
799 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
800 belongs to, unless the BB has already been marked as a potentially
801 final. */
803 static void
804 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
806 basic_block bb = gimple_bb (stmt);
807 int idx, parm_index = 0;
808 tree parm;
810 if (bitmap_bit_p (final_bbs, bb->index))
811 return;
813 for (parm = DECL_ARGUMENTS (current_function_decl);
814 parm && parm != base;
815 parm = DECL_CHAIN (parm))
816 parm_index++;
818 gcc_assert (parm_index < func_param_count);
820 idx = bb->index * func_param_count + parm_index;
821 if (bb_dereferences[idx] < dist)
822 bb_dereferences[idx] = dist;
825 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
826 the three fields. Also add it to the vector of accesses corresponding to
827 the base. Finally, return the new access. */
829 static struct access *
830 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
832 struct access *access = access_pool.allocate ();
834 memset (access, 0, sizeof (struct access));
835 access->base = base;
836 access->offset = offset;
837 access->size = size;
839 base_access_vec->get_or_insert (base).safe_push (access);
841 return access;
844 /* Create and insert access for EXPR. Return created access, or NULL if it is
845 not possible. */
847 static struct access *
848 create_access (tree expr, gimple stmt, bool write)
850 struct access *access;
851 HOST_WIDE_INT offset, size, max_size;
852 tree base = expr;
853 bool ptr, unscalarizable_region = false;
855 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
857 if (sra_mode == SRA_MODE_EARLY_IPA
858 && TREE_CODE (base) == MEM_REF)
860 base = get_ssa_base_param (TREE_OPERAND (base, 0));
861 if (!base)
862 return NULL;
863 ptr = true;
865 else
866 ptr = false;
868 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
869 return NULL;
871 if (sra_mode == SRA_MODE_EARLY_IPA)
873 if (size < 0 || size != max_size)
875 disqualify_candidate (base, "Encountered a variable sized access.");
876 return NULL;
878 if (TREE_CODE (expr) == COMPONENT_REF
879 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
881 disqualify_candidate (base, "Encountered a bit-field access.");
882 return NULL;
884 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
886 if (ptr)
887 mark_parm_dereference (base, offset + size, stmt);
889 else
891 if (size != max_size)
893 size = max_size;
894 unscalarizable_region = true;
896 if (size < 0)
898 disqualify_candidate (base, "Encountered an unconstrained access.");
899 return NULL;
903 access = create_access_1 (base, offset, size);
904 access->expr = expr;
905 access->type = TREE_TYPE (expr);
906 access->write = write;
907 access->grp_unscalarizable_region = unscalarizable_region;
908 access->stmt = stmt;
910 if (TREE_CODE (expr) == COMPONENT_REF
911 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
912 access->non_addressable = 1;
914 return access;
918 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
919 register types or (recursively) records with only these two kinds of fields.
920 It also returns false if any of these records contains a bit-field. */
922 static bool
923 type_consists_of_records_p (tree type)
925 tree fld;
927 if (TREE_CODE (type) != RECORD_TYPE)
928 return false;
930 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
931 if (TREE_CODE (fld) == FIELD_DECL)
933 tree ft = TREE_TYPE (fld);
935 if (DECL_BIT_FIELD (fld))
936 return false;
938 if (!is_gimple_reg_type (ft)
939 && !type_consists_of_records_p (ft))
940 return false;
943 return true;
946 /* Create total_scalarization accesses for all scalar type fields in DECL that
947 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
948 must be the top-most VAR_DECL representing the variable, OFFSET must be the
949 offset of DECL within BASE. REF must be the memory reference expression for
950 the given decl. */
952 static void
953 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
954 tree ref)
956 tree fld, decl_type = TREE_TYPE (decl);
958 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
959 if (TREE_CODE (fld) == FIELD_DECL)
961 HOST_WIDE_INT pos = offset + int_bit_position (fld);
962 tree ft = TREE_TYPE (fld);
963 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
964 NULL_TREE);
966 if (is_gimple_reg_type (ft))
968 struct access *access;
969 HOST_WIDE_INT size;
971 size = tree_to_uhwi (DECL_SIZE (fld));
972 access = create_access_1 (base, pos, size);
973 access->expr = nref;
974 access->type = ft;
975 access->grp_total_scalarization = 1;
976 /* Accesses for intraprocedural SRA can have their stmt NULL. */
978 else
979 completely_scalarize_record (base, fld, pos, nref);
983 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
984 RECORD_TYPE conforming to type_consists_of_records_p. */
986 static void
987 create_total_scalarization_access (tree var)
989 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
990 struct access *access;
992 access = create_access_1 (var, 0, size);
993 access->expr = var;
994 access->type = TREE_TYPE (var);
995 access->grp_total_scalarization = 1;
998 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1000 static inline bool
1001 contains_view_convert_expr_p (const_tree ref)
1003 while (handled_component_p (ref))
1005 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1006 return true;
1007 ref = TREE_OPERAND (ref, 0);
1010 return false;
1013 /* Search the given tree for a declaration by skipping handled components and
1014 exclude it from the candidates. */
1016 static void
1017 disqualify_base_of_expr (tree t, const char *reason)
1019 t = get_base_address (t);
1020 if (sra_mode == SRA_MODE_EARLY_IPA
1021 && TREE_CODE (t) == MEM_REF)
1022 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1024 if (t && DECL_P (t))
1025 disqualify_candidate (t, reason);
1028 /* Scan expression EXPR and create access structures for all accesses to
1029 candidates for scalarization. Return the created access or NULL if none is
1030 created. */
1032 static struct access *
1033 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1035 struct access *ret = NULL;
1036 bool partial_ref;
1038 if (TREE_CODE (expr) == BIT_FIELD_REF
1039 || TREE_CODE (expr) == IMAGPART_EXPR
1040 || TREE_CODE (expr) == REALPART_EXPR)
1042 expr = TREE_OPERAND (expr, 0);
1043 partial_ref = true;
1045 else
1046 partial_ref = false;
1048 /* We need to dive through V_C_Es in order to get the size of its parameter
1049 and not the result type. Ada produces such statements. We are also
1050 capable of handling the topmost V_C_E but not any of those buried in other
1051 handled components. */
1052 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1053 expr = TREE_OPERAND (expr, 0);
1055 if (contains_view_convert_expr_p (expr))
1057 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1058 "component.");
1059 return NULL;
1061 if (TREE_THIS_VOLATILE (expr))
1063 disqualify_base_of_expr (expr, "part of a volatile reference.");
1064 return NULL;
1067 switch (TREE_CODE (expr))
1069 case MEM_REF:
1070 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1071 && sra_mode != SRA_MODE_EARLY_IPA)
1072 return NULL;
1073 /* fall through */
1074 case VAR_DECL:
1075 case PARM_DECL:
1076 case RESULT_DECL:
1077 case COMPONENT_REF:
1078 case ARRAY_REF:
1079 case ARRAY_RANGE_REF:
1080 ret = create_access (expr, stmt, write);
1081 break;
1083 default:
1084 break;
1087 if (write && partial_ref && ret)
1088 ret->grp_partial_lhs = 1;
1090 return ret;
1093 /* Scan expression EXPR and create access structures for all accesses to
1094 candidates for scalarization. Return true if any access has been inserted.
1095 STMT must be the statement from which the expression is taken, WRITE must be
1096 true if the expression is a store and false otherwise. */
1098 static bool
1099 build_access_from_expr (tree expr, gimple stmt, bool write)
1101 struct access *access;
1103 access = build_access_from_expr_1 (expr, stmt, write);
1104 if (access)
1106 /* This means the aggregate is accesses as a whole in a way other than an
1107 assign statement and thus cannot be removed even if we had a scalar
1108 replacement for everything. */
1109 if (cannot_scalarize_away_bitmap)
1110 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1111 return true;
1113 return false;
1116 /* Return the single non-EH successor edge of BB or NULL if there is none or
1117 more than one. */
1119 static edge
1120 single_non_eh_succ (basic_block bb)
1122 edge e, res = NULL;
1123 edge_iterator ei;
1125 FOR_EACH_EDGE (e, ei, bb->succs)
1126 if (!(e->flags & EDGE_EH))
1128 if (res)
1129 return NULL;
1130 res = e;
1133 return res;
1136 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1137 there is no alternative spot where to put statements SRA might need to
1138 generate after it. The spot we are looking for is an edge leading to a
1139 single non-EH successor, if it exists and is indeed single. RHS may be
1140 NULL, in that case ignore it. */
1142 static bool
1143 disqualify_if_bad_bb_terminating_stmt (gimple stmt, tree lhs, tree rhs)
1145 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1146 && stmt_ends_bb_p (stmt))
1148 if (single_non_eh_succ (gimple_bb (stmt)))
1149 return false;
1151 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1152 if (rhs)
1153 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1154 return true;
1156 return false;
1159 /* Scan expressions occurring in STMT, create access structures for all accesses
1160 to candidates for scalarization and remove those candidates which occur in
1161 statements or expressions that prevent them from being split apart. Return
1162 true if any access has been inserted. */
1164 static bool
1165 build_accesses_from_assign (gimple stmt)
1167 tree lhs, rhs;
1168 struct access *lacc, *racc;
1170 if (!gimple_assign_single_p (stmt)
1171 /* Scope clobbers don't influence scalarization. */
1172 || gimple_clobber_p (stmt))
1173 return false;
1175 lhs = gimple_assign_lhs (stmt);
1176 rhs = gimple_assign_rhs1 (stmt);
1178 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1179 return false;
1181 racc = build_access_from_expr_1 (rhs, stmt, false);
1182 lacc = build_access_from_expr_1 (lhs, stmt, true);
1184 if (lacc)
1185 lacc->grp_assignment_write = 1;
1187 if (racc)
1189 racc->grp_assignment_read = 1;
1190 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1191 && !is_gimple_reg_type (racc->type))
1192 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1195 if (lacc && racc
1196 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1197 && !lacc->grp_unscalarizable_region
1198 && !racc->grp_unscalarizable_region
1199 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1200 && lacc->size == racc->size
1201 && useless_type_conversion_p (lacc->type, racc->type))
1203 struct assign_link *link;
1205 link = assign_link_pool.allocate ();
1206 memset (link, 0, sizeof (struct assign_link));
1208 link->lacc = lacc;
1209 link->racc = racc;
1211 add_link_to_rhs (racc, link);
1214 return lacc || racc;
1217 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1218 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1220 static bool
1221 asm_visit_addr (gimple, tree op, tree, void *)
1223 op = get_base_address (op);
1224 if (op
1225 && DECL_P (op))
1226 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1228 return false;
1231 /* Return true iff callsite CALL has at least as many actual arguments as there
1232 are formal parameters of the function currently processed by IPA-SRA and
1233 that their types match. */
1235 static inline bool
1236 callsite_arguments_match_p (gimple call)
1238 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1239 return false;
1241 tree parm;
1242 int i;
1243 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1244 parm;
1245 parm = DECL_CHAIN (parm), i++)
1247 tree arg = gimple_call_arg (call, i);
1248 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1249 return false;
1251 return true;
1254 /* Scan function and look for interesting expressions and create access
1255 structures for them. Return true iff any access is created. */
1257 static bool
1258 scan_function (void)
1260 basic_block bb;
1261 bool ret = false;
1263 FOR_EACH_BB_FN (bb, cfun)
1265 gimple_stmt_iterator gsi;
1266 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1268 gimple stmt = gsi_stmt (gsi);
1269 tree t;
1270 unsigned i;
1272 if (final_bbs && stmt_can_throw_external (stmt))
1273 bitmap_set_bit (final_bbs, bb->index);
1274 switch (gimple_code (stmt))
1276 case GIMPLE_RETURN:
1277 t = gimple_return_retval (as_a <greturn *> (stmt));
1278 if (t != NULL_TREE)
1279 ret |= build_access_from_expr (t, stmt, false);
1280 if (final_bbs)
1281 bitmap_set_bit (final_bbs, bb->index);
1282 break;
1284 case GIMPLE_ASSIGN:
1285 ret |= build_accesses_from_assign (stmt);
1286 break;
1288 case GIMPLE_CALL:
1289 for (i = 0; i < gimple_call_num_args (stmt); i++)
1290 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1291 stmt, false);
1293 if (sra_mode == SRA_MODE_EARLY_IPA)
1295 tree dest = gimple_call_fndecl (stmt);
1296 int flags = gimple_call_flags (stmt);
1298 if (dest)
1300 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1301 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1302 encountered_apply_args = true;
1303 if (recursive_call_p (current_function_decl, dest))
1305 encountered_recursive_call = true;
1306 if (!callsite_arguments_match_p (stmt))
1307 encountered_unchangable_recursive_call = true;
1311 if (final_bbs
1312 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1313 bitmap_set_bit (final_bbs, bb->index);
1316 t = gimple_call_lhs (stmt);
1317 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1318 ret |= build_access_from_expr (t, stmt, true);
1319 break;
1321 case GIMPLE_ASM:
1323 gasm *asm_stmt = as_a <gasm *> (stmt);
1324 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1325 asm_visit_addr);
1326 if (final_bbs)
1327 bitmap_set_bit (final_bbs, bb->index);
1329 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1331 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1332 ret |= build_access_from_expr (t, asm_stmt, false);
1334 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1336 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1337 ret |= build_access_from_expr (t, asm_stmt, true);
1340 break;
1342 default:
1343 break;
1348 return ret;
1351 /* Helper of QSORT function. There are pointers to accesses in the array. An
1352 access is considered smaller than another if it has smaller offset or if the
1353 offsets are the same but is size is bigger. */
1355 static int
1356 compare_access_positions (const void *a, const void *b)
1358 const access_p *fp1 = (const access_p *) a;
1359 const access_p *fp2 = (const access_p *) b;
1360 const access_p f1 = *fp1;
1361 const access_p f2 = *fp2;
1363 if (f1->offset != f2->offset)
1364 return f1->offset < f2->offset ? -1 : 1;
1366 if (f1->size == f2->size)
1368 if (f1->type == f2->type)
1369 return 0;
1370 /* Put any non-aggregate type before any aggregate type. */
1371 else if (!is_gimple_reg_type (f1->type)
1372 && is_gimple_reg_type (f2->type))
1373 return 1;
1374 else if (is_gimple_reg_type (f1->type)
1375 && !is_gimple_reg_type (f2->type))
1376 return -1;
1377 /* Put any complex or vector type before any other scalar type. */
1378 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1379 && TREE_CODE (f1->type) != VECTOR_TYPE
1380 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1381 || TREE_CODE (f2->type) == VECTOR_TYPE))
1382 return 1;
1383 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1384 || TREE_CODE (f1->type) == VECTOR_TYPE)
1385 && TREE_CODE (f2->type) != COMPLEX_TYPE
1386 && TREE_CODE (f2->type) != VECTOR_TYPE)
1387 return -1;
1388 /* Put the integral type with the bigger precision first. */
1389 else if (INTEGRAL_TYPE_P (f1->type)
1390 && INTEGRAL_TYPE_P (f2->type))
1391 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1392 /* Put any integral type with non-full precision last. */
1393 else if (INTEGRAL_TYPE_P (f1->type)
1394 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1395 != TYPE_PRECISION (f1->type)))
1396 return 1;
1397 else if (INTEGRAL_TYPE_P (f2->type)
1398 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1399 != TYPE_PRECISION (f2->type)))
1400 return -1;
1401 /* Stabilize the sort. */
1402 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1405 /* We want the bigger accesses first, thus the opposite operator in the next
1406 line: */
1407 return f1->size > f2->size ? -1 : 1;
1411 /* Append a name of the declaration to the name obstack. A helper function for
1412 make_fancy_name. */
1414 static void
1415 make_fancy_decl_name (tree decl)
1417 char buffer[32];
1419 tree name = DECL_NAME (decl);
1420 if (name)
1421 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1422 IDENTIFIER_LENGTH (name));
1423 else
1425 sprintf (buffer, "D%u", DECL_UID (decl));
1426 obstack_grow (&name_obstack, buffer, strlen (buffer));
1430 /* Helper for make_fancy_name. */
1432 static void
1433 make_fancy_name_1 (tree expr)
1435 char buffer[32];
1436 tree index;
1438 if (DECL_P (expr))
1440 make_fancy_decl_name (expr);
1441 return;
1444 switch (TREE_CODE (expr))
1446 case COMPONENT_REF:
1447 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1448 obstack_1grow (&name_obstack, '$');
1449 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1450 break;
1452 case ARRAY_REF:
1453 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1454 obstack_1grow (&name_obstack, '$');
1455 /* Arrays with only one element may not have a constant as their
1456 index. */
1457 index = TREE_OPERAND (expr, 1);
1458 if (TREE_CODE (index) != INTEGER_CST)
1459 break;
1460 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1461 obstack_grow (&name_obstack, buffer, strlen (buffer));
1462 break;
1464 case ADDR_EXPR:
1465 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1466 break;
1468 case MEM_REF:
1469 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1470 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1472 obstack_1grow (&name_obstack, '$');
1473 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1474 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1475 obstack_grow (&name_obstack, buffer, strlen (buffer));
1477 break;
1479 case BIT_FIELD_REF:
1480 case REALPART_EXPR:
1481 case IMAGPART_EXPR:
1482 gcc_unreachable (); /* we treat these as scalars. */
1483 break;
1484 default:
1485 break;
1489 /* Create a human readable name for replacement variable of ACCESS. */
1491 static char *
1492 make_fancy_name (tree expr)
1494 make_fancy_name_1 (expr);
1495 obstack_1grow (&name_obstack, '\0');
1496 return XOBFINISH (&name_obstack, char *);
1499 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1500 EXP_TYPE at the given OFFSET. If BASE is something for which
1501 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1502 to insert new statements either before or below the current one as specified
1503 by INSERT_AFTER. This function is not capable of handling bitfields.
1505 BASE must be either a declaration or a memory reference that has correct
1506 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1508 tree
1509 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1510 tree exp_type, gimple_stmt_iterator *gsi,
1511 bool insert_after)
1513 tree prev_base = base;
1514 tree off;
1515 tree mem_ref;
1516 HOST_WIDE_INT base_offset;
1517 unsigned HOST_WIDE_INT misalign;
1518 unsigned int align;
1520 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1521 get_object_alignment_1 (base, &align, &misalign);
1522 base = get_addr_base_and_unit_offset (base, &base_offset);
1524 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1525 offset such as array[var_index]. */
1526 if (!base)
1528 gassign *stmt;
1529 tree tmp, addr;
1531 gcc_checking_assert (gsi);
1532 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1533 addr = build_fold_addr_expr (unshare_expr (prev_base));
1534 STRIP_USELESS_TYPE_CONVERSION (addr);
1535 stmt = gimple_build_assign (tmp, addr);
1536 gimple_set_location (stmt, loc);
1537 if (insert_after)
1538 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1539 else
1540 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1542 off = build_int_cst (reference_alias_ptr_type (prev_base),
1543 offset / BITS_PER_UNIT);
1544 base = tmp;
1546 else if (TREE_CODE (base) == MEM_REF)
1548 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1549 base_offset + offset / BITS_PER_UNIT);
1550 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1551 base = unshare_expr (TREE_OPERAND (base, 0));
1553 else
1555 off = build_int_cst (reference_alias_ptr_type (base),
1556 base_offset + offset / BITS_PER_UNIT);
1557 base = build_fold_addr_expr (unshare_expr (base));
1560 misalign = (misalign + offset) & (align - 1);
1561 if (misalign != 0)
1562 align = (misalign & -misalign);
1563 if (align != TYPE_ALIGN (exp_type))
1564 exp_type = build_aligned_type (exp_type, align);
1566 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1567 if (TREE_THIS_VOLATILE (prev_base))
1568 TREE_THIS_VOLATILE (mem_ref) = 1;
1569 if (TREE_SIDE_EFFECTS (prev_base))
1570 TREE_SIDE_EFFECTS (mem_ref) = 1;
1571 return mem_ref;
1574 /* Construct a memory reference to a part of an aggregate BASE at the given
1575 OFFSET and of the same type as MODEL. In case this is a reference to a
1576 bit-field, the function will replicate the last component_ref of model's
1577 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1578 build_ref_for_offset. */
1580 static tree
1581 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1582 struct access *model, gimple_stmt_iterator *gsi,
1583 bool insert_after)
1585 if (TREE_CODE (model->expr) == COMPONENT_REF
1586 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1588 /* This access represents a bit-field. */
1589 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1591 offset -= int_bit_position (fld);
1592 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1593 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1594 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1595 NULL_TREE);
1597 else
1598 return build_ref_for_offset (loc, base, offset, model->type,
1599 gsi, insert_after);
1602 /* Attempt to build a memory reference that we could but into a gimple
1603 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1604 create statements and return s NULL instead. This function also ignores
1605 alignment issues and so its results should never end up in non-debug
1606 statements. */
1608 static tree
1609 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1610 struct access *model)
1612 HOST_WIDE_INT base_offset;
1613 tree off;
1615 if (TREE_CODE (model->expr) == COMPONENT_REF
1616 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1617 return NULL_TREE;
1619 base = get_addr_base_and_unit_offset (base, &base_offset);
1620 if (!base)
1621 return NULL_TREE;
1622 if (TREE_CODE (base) == MEM_REF)
1624 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1625 base_offset + offset / BITS_PER_UNIT);
1626 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1627 base = unshare_expr (TREE_OPERAND (base, 0));
1629 else
1631 off = build_int_cst (reference_alias_ptr_type (base),
1632 base_offset + offset / BITS_PER_UNIT);
1633 base = build_fold_addr_expr (unshare_expr (base));
1636 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1639 /* Construct a memory reference consisting of component_refs and array_refs to
1640 a part of an aggregate *RES (which is of type TYPE). The requested part
1641 should have type EXP_TYPE at be the given OFFSET. This function might not
1642 succeed, it returns true when it does and only then *RES points to something
1643 meaningful. This function should be used only to build expressions that we
1644 might need to present to user (e.g. in warnings). In all other situations,
1645 build_ref_for_model or build_ref_for_offset should be used instead. */
1647 static bool
1648 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1649 tree exp_type)
1651 while (1)
1653 tree fld;
1654 tree tr_size, index, minidx;
1655 HOST_WIDE_INT el_size;
1657 if (offset == 0 && exp_type
1658 && types_compatible_p (exp_type, type))
1659 return true;
1661 switch (TREE_CODE (type))
1663 case UNION_TYPE:
1664 case QUAL_UNION_TYPE:
1665 case RECORD_TYPE:
1666 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1668 HOST_WIDE_INT pos, size;
1669 tree tr_pos, expr, *expr_ptr;
1671 if (TREE_CODE (fld) != FIELD_DECL)
1672 continue;
1674 tr_pos = bit_position (fld);
1675 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1676 continue;
1677 pos = tree_to_uhwi (tr_pos);
1678 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1679 tr_size = DECL_SIZE (fld);
1680 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1681 continue;
1682 size = tree_to_uhwi (tr_size);
1683 if (size == 0)
1685 if (pos != offset)
1686 continue;
1688 else if (pos > offset || (pos + size) <= offset)
1689 continue;
1691 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1692 NULL_TREE);
1693 expr_ptr = &expr;
1694 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1695 offset - pos, exp_type))
1697 *res = expr;
1698 return true;
1701 return false;
1703 case ARRAY_TYPE:
1704 tr_size = TYPE_SIZE (TREE_TYPE (type));
1705 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1706 return false;
1707 el_size = tree_to_uhwi (tr_size);
1709 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1710 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1711 return false;
1712 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1713 if (!integer_zerop (minidx))
1714 index = int_const_binop (PLUS_EXPR, index, minidx);
1715 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1716 NULL_TREE, NULL_TREE);
1717 offset = offset % el_size;
1718 type = TREE_TYPE (type);
1719 break;
1721 default:
1722 if (offset != 0)
1723 return false;
1725 if (exp_type)
1726 return false;
1727 else
1728 return true;
1733 /* Return true iff TYPE is stdarg va_list type. */
1735 static inline bool
1736 is_va_list_type (tree type)
1738 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1741 /* Print message to dump file why a variable was rejected. */
1743 static void
1744 reject (tree var, const char *msg)
1746 if (dump_file && (dump_flags & TDF_DETAILS))
1748 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1749 print_generic_expr (dump_file, var, 0);
1750 fprintf (dump_file, "\n");
1754 /* Return true if VAR is a candidate for SRA. */
1756 static bool
1757 maybe_add_sra_candidate (tree var)
1759 tree type = TREE_TYPE (var);
1760 const char *msg;
1761 tree_node **slot;
1763 if (!AGGREGATE_TYPE_P (type))
1765 reject (var, "not aggregate");
1766 return false;
1768 if (needs_to_live_in_memory (var))
1770 reject (var, "needs to live in memory");
1771 return false;
1773 if (TREE_THIS_VOLATILE (var))
1775 reject (var, "is volatile");
1776 return false;
1778 if (!COMPLETE_TYPE_P (type))
1780 reject (var, "has incomplete type");
1781 return false;
1783 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1785 reject (var, "type size not fixed");
1786 return false;
1788 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1790 reject (var, "type size is zero");
1791 return false;
1793 if (type_internals_preclude_sra_p (type, &msg))
1795 reject (var, msg);
1796 return false;
1798 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1799 we also want to schedule it rather late. Thus we ignore it in
1800 the early pass. */
1801 (sra_mode == SRA_MODE_EARLY_INTRA
1802 && is_va_list_type (type)))
1804 reject (var, "is va_list");
1805 return false;
1808 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1809 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1810 *slot = var;
1812 if (dump_file && (dump_flags & TDF_DETAILS))
1814 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1815 print_generic_expr (dump_file, var, 0);
1816 fprintf (dump_file, "\n");
1819 return true;
1822 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1823 those with type which is suitable for scalarization. */
1825 static bool
1826 find_var_candidates (void)
1828 tree var, parm;
1829 unsigned int i;
1830 bool ret = false;
1832 for (parm = DECL_ARGUMENTS (current_function_decl);
1833 parm;
1834 parm = DECL_CHAIN (parm))
1835 ret |= maybe_add_sra_candidate (parm);
1837 FOR_EACH_LOCAL_DECL (cfun, i, var)
1839 if (TREE_CODE (var) != VAR_DECL)
1840 continue;
1842 ret |= maybe_add_sra_candidate (var);
1845 return ret;
1848 /* Sort all accesses for the given variable, check for partial overlaps and
1849 return NULL if there are any. If there are none, pick a representative for
1850 each combination of offset and size and create a linked list out of them.
1851 Return the pointer to the first representative and make sure it is the first
1852 one in the vector of accesses. */
1854 static struct access *
1855 sort_and_splice_var_accesses (tree var)
1857 int i, j, access_count;
1858 struct access *res, **prev_acc_ptr = &res;
1859 vec<access_p> *access_vec;
1860 bool first = true;
1861 HOST_WIDE_INT low = -1, high = 0;
1863 access_vec = get_base_access_vector (var);
1864 if (!access_vec)
1865 return NULL;
1866 access_count = access_vec->length ();
1868 /* Sort by <OFFSET, SIZE>. */
1869 access_vec->qsort (compare_access_positions);
1871 i = 0;
1872 while (i < access_count)
1874 struct access *access = (*access_vec)[i];
1875 bool grp_write = access->write;
1876 bool grp_read = !access->write;
1877 bool grp_scalar_write = access->write
1878 && is_gimple_reg_type (access->type);
1879 bool grp_scalar_read = !access->write
1880 && is_gimple_reg_type (access->type);
1881 bool grp_assignment_read = access->grp_assignment_read;
1882 bool grp_assignment_write = access->grp_assignment_write;
1883 bool multiple_scalar_reads = false;
1884 bool total_scalarization = access->grp_total_scalarization;
1885 bool grp_partial_lhs = access->grp_partial_lhs;
1886 bool first_scalar = is_gimple_reg_type (access->type);
1887 bool unscalarizable_region = access->grp_unscalarizable_region;
1889 if (first || access->offset >= high)
1891 first = false;
1892 low = access->offset;
1893 high = access->offset + access->size;
1895 else if (access->offset > low && access->offset + access->size > high)
1896 return NULL;
1897 else
1898 gcc_assert (access->offset >= low
1899 && access->offset + access->size <= high);
1901 j = i + 1;
1902 while (j < access_count)
1904 struct access *ac2 = (*access_vec)[j];
1905 if (ac2->offset != access->offset || ac2->size != access->size)
1906 break;
1907 if (ac2->write)
1909 grp_write = true;
1910 grp_scalar_write = (grp_scalar_write
1911 || is_gimple_reg_type (ac2->type));
1913 else
1915 grp_read = true;
1916 if (is_gimple_reg_type (ac2->type))
1918 if (grp_scalar_read)
1919 multiple_scalar_reads = true;
1920 else
1921 grp_scalar_read = true;
1924 grp_assignment_read |= ac2->grp_assignment_read;
1925 grp_assignment_write |= ac2->grp_assignment_write;
1926 grp_partial_lhs |= ac2->grp_partial_lhs;
1927 unscalarizable_region |= ac2->grp_unscalarizable_region;
1928 total_scalarization |= ac2->grp_total_scalarization;
1929 relink_to_new_repr (access, ac2);
1931 /* If there are both aggregate-type and scalar-type accesses with
1932 this combination of size and offset, the comparison function
1933 should have put the scalars first. */
1934 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1935 ac2->group_representative = access;
1936 j++;
1939 i = j;
1941 access->group_representative = access;
1942 access->grp_write = grp_write;
1943 access->grp_read = grp_read;
1944 access->grp_scalar_read = grp_scalar_read;
1945 access->grp_scalar_write = grp_scalar_write;
1946 access->grp_assignment_read = grp_assignment_read;
1947 access->grp_assignment_write = grp_assignment_write;
1948 access->grp_hint = multiple_scalar_reads || total_scalarization;
1949 access->grp_total_scalarization = total_scalarization;
1950 access->grp_partial_lhs = grp_partial_lhs;
1951 access->grp_unscalarizable_region = unscalarizable_region;
1952 if (access->first_link)
1953 add_access_to_work_queue (access);
1955 *prev_acc_ptr = access;
1956 prev_acc_ptr = &access->next_grp;
1959 gcc_assert (res == (*access_vec)[0]);
1960 return res;
1963 /* Create a variable for the given ACCESS which determines the type, name and a
1964 few other properties. Return the variable declaration and store it also to
1965 ACCESS->replacement. */
1967 static tree
1968 create_access_replacement (struct access *access)
1970 tree repl;
1972 if (access->grp_to_be_debug_replaced)
1974 repl = create_tmp_var_raw (access->type);
1975 DECL_CONTEXT (repl) = current_function_decl;
1977 else
1978 /* Drop any special alignment on the type if it's not on the main
1979 variant. This avoids issues with weirdo ABIs like AAPCS. */
1980 repl = create_tmp_var (build_qualified_type
1981 (TYPE_MAIN_VARIANT (access->type),
1982 TYPE_QUALS (access->type)), "SR");
1983 if (TREE_CODE (access->type) == COMPLEX_TYPE
1984 || TREE_CODE (access->type) == VECTOR_TYPE)
1986 if (!access->grp_partial_lhs)
1987 DECL_GIMPLE_REG_P (repl) = 1;
1989 else if (access->grp_partial_lhs
1990 && is_gimple_reg_type (access->type))
1991 TREE_ADDRESSABLE (repl) = 1;
1993 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1994 DECL_ARTIFICIAL (repl) = 1;
1995 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1997 if (DECL_NAME (access->base)
1998 && !DECL_IGNORED_P (access->base)
1999 && !DECL_ARTIFICIAL (access->base))
2001 char *pretty_name = make_fancy_name (access->expr);
2002 tree debug_expr = unshare_expr_without_location (access->expr), d;
2003 bool fail = false;
2005 DECL_NAME (repl) = get_identifier (pretty_name);
2006 obstack_free (&name_obstack, pretty_name);
2008 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2009 as DECL_DEBUG_EXPR isn't considered when looking for still
2010 used SSA_NAMEs and thus they could be freed. All debug info
2011 generation cares is whether something is constant or variable
2012 and that get_ref_base_and_extent works properly on the
2013 expression. It cannot handle accesses at a non-constant offset
2014 though, so just give up in those cases. */
2015 for (d = debug_expr;
2016 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2017 d = TREE_OPERAND (d, 0))
2018 switch (TREE_CODE (d))
2020 case ARRAY_REF:
2021 case ARRAY_RANGE_REF:
2022 if (TREE_OPERAND (d, 1)
2023 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2024 fail = true;
2025 if (TREE_OPERAND (d, 3)
2026 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2027 fail = true;
2028 /* FALLTHRU */
2029 case COMPONENT_REF:
2030 if (TREE_OPERAND (d, 2)
2031 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2032 fail = true;
2033 break;
2034 case MEM_REF:
2035 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2036 fail = true;
2037 else
2038 d = TREE_OPERAND (d, 0);
2039 break;
2040 default:
2041 break;
2043 if (!fail)
2045 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2046 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2048 if (access->grp_no_warning)
2049 TREE_NO_WARNING (repl) = 1;
2050 else
2051 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2053 else
2054 TREE_NO_WARNING (repl) = 1;
2056 if (dump_file)
2058 if (access->grp_to_be_debug_replaced)
2060 fprintf (dump_file, "Created a debug-only replacement for ");
2061 print_generic_expr (dump_file, access->base, 0);
2062 fprintf (dump_file, " offset: %u, size: %u\n",
2063 (unsigned) access->offset, (unsigned) access->size);
2065 else
2067 fprintf (dump_file, "Created a replacement for ");
2068 print_generic_expr (dump_file, access->base, 0);
2069 fprintf (dump_file, " offset: %u, size: %u: ",
2070 (unsigned) access->offset, (unsigned) access->size);
2071 print_generic_expr (dump_file, repl, 0);
2072 fprintf (dump_file, "\n");
2075 sra_stats.replacements++;
2077 return repl;
2080 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2082 static inline tree
2083 get_access_replacement (struct access *access)
2085 gcc_checking_assert (access->replacement_decl);
2086 return access->replacement_decl;
2090 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2091 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2092 to it is not "within" the root. Return false iff some accesses partially
2093 overlap. */
2095 static bool
2096 build_access_subtree (struct access **access)
2098 struct access *root = *access, *last_child = NULL;
2099 HOST_WIDE_INT limit = root->offset + root->size;
2101 *access = (*access)->next_grp;
2102 while (*access && (*access)->offset + (*access)->size <= limit)
2104 if (!last_child)
2105 root->first_child = *access;
2106 else
2107 last_child->next_sibling = *access;
2108 last_child = *access;
2110 if (!build_access_subtree (access))
2111 return false;
2114 if (*access && (*access)->offset < limit)
2115 return false;
2117 return true;
2120 /* Build a tree of access representatives, ACCESS is the pointer to the first
2121 one, others are linked in a list by the next_grp field. Return false iff
2122 some accesses partially overlap. */
2124 static bool
2125 build_access_trees (struct access *access)
2127 while (access)
2129 struct access *root = access;
2131 if (!build_access_subtree (&access))
2132 return false;
2133 root->next_grp = access;
2135 return true;
2138 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2139 array. */
2141 static bool
2142 expr_with_var_bounded_array_refs_p (tree expr)
2144 while (handled_component_p (expr))
2146 if (TREE_CODE (expr) == ARRAY_REF
2147 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2148 return true;
2149 expr = TREE_OPERAND (expr, 0);
2151 return false;
2154 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2155 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2156 sorts of access flags appropriately along the way, notably always set
2157 grp_read and grp_assign_read according to MARK_READ and grp_write when
2158 MARK_WRITE is true.
2160 Creating a replacement for a scalar access is considered beneficial if its
2161 grp_hint is set (this means we are either attempting total scalarization or
2162 there is more than one direct read access) or according to the following
2163 table:
2165 Access written to through a scalar type (once or more times)
2167 | Written to in an assignment statement
2169 | | Access read as scalar _once_
2170 | | |
2171 | | | Read in an assignment statement
2172 | | | |
2173 | | | | Scalarize Comment
2174 -----------------------------------------------------------------------------
2175 0 0 0 0 No access for the scalar
2176 0 0 0 1 No access for the scalar
2177 0 0 1 0 No Single read - won't help
2178 0 0 1 1 No The same case
2179 0 1 0 0 No access for the scalar
2180 0 1 0 1 No access for the scalar
2181 0 1 1 0 Yes s = *g; return s.i;
2182 0 1 1 1 Yes The same case as above
2183 1 0 0 0 No Won't help
2184 1 0 0 1 Yes s.i = 1; *g = s;
2185 1 0 1 0 Yes s.i = 5; g = s.i;
2186 1 0 1 1 Yes The same case as above
2187 1 1 0 0 No Won't help.
2188 1 1 0 1 Yes s.i = 1; *g = s;
2189 1 1 1 0 Yes s = *g; return s.i;
2190 1 1 1 1 Yes Any of the above yeses */
2192 static bool
2193 analyze_access_subtree (struct access *root, struct access *parent,
2194 bool allow_replacements)
2196 struct access *child;
2197 HOST_WIDE_INT limit = root->offset + root->size;
2198 HOST_WIDE_INT covered_to = root->offset;
2199 bool scalar = is_gimple_reg_type (root->type);
2200 bool hole = false, sth_created = false;
2202 if (parent)
2204 if (parent->grp_read)
2205 root->grp_read = 1;
2206 if (parent->grp_assignment_read)
2207 root->grp_assignment_read = 1;
2208 if (parent->grp_write)
2209 root->grp_write = 1;
2210 if (parent->grp_assignment_write)
2211 root->grp_assignment_write = 1;
2212 if (parent->grp_total_scalarization)
2213 root->grp_total_scalarization = 1;
2216 if (root->grp_unscalarizable_region)
2217 allow_replacements = false;
2219 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2220 allow_replacements = false;
2222 for (child = root->first_child; child; child = child->next_sibling)
2224 hole |= covered_to < child->offset;
2225 sth_created |= analyze_access_subtree (child, root,
2226 allow_replacements && !scalar);
2228 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2229 root->grp_total_scalarization &= child->grp_total_scalarization;
2230 if (child->grp_covered)
2231 covered_to += child->size;
2232 else
2233 hole = true;
2236 if (allow_replacements && scalar && !root->first_child
2237 && (root->grp_hint
2238 || ((root->grp_scalar_read || root->grp_assignment_read)
2239 && (root->grp_scalar_write || root->grp_assignment_write))))
2241 /* Always create access replacements that cover the whole access.
2242 For integral types this means the precision has to match.
2243 Avoid assumptions based on the integral type kind, too. */
2244 if (INTEGRAL_TYPE_P (root->type)
2245 && (TREE_CODE (root->type) != INTEGER_TYPE
2246 || TYPE_PRECISION (root->type) != root->size)
2247 /* But leave bitfield accesses alone. */
2248 && (TREE_CODE (root->expr) != COMPONENT_REF
2249 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2251 tree rt = root->type;
2252 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2253 && (root->size % BITS_PER_UNIT) == 0);
2254 root->type = build_nonstandard_integer_type (root->size,
2255 TYPE_UNSIGNED (rt));
2256 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2257 root->base, root->offset,
2258 root->type, NULL, false);
2260 if (dump_file && (dump_flags & TDF_DETAILS))
2262 fprintf (dump_file, "Changing the type of a replacement for ");
2263 print_generic_expr (dump_file, root->base, 0);
2264 fprintf (dump_file, " offset: %u, size: %u ",
2265 (unsigned) root->offset, (unsigned) root->size);
2266 fprintf (dump_file, " to an integer.\n");
2270 root->grp_to_be_replaced = 1;
2271 root->replacement_decl = create_access_replacement (root);
2272 sth_created = true;
2273 hole = false;
2275 else
2277 if (allow_replacements
2278 && scalar && !root->first_child
2279 && (root->grp_scalar_write || root->grp_assignment_write)
2280 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2281 DECL_UID (root->base)))
2283 gcc_checking_assert (!root->grp_scalar_read
2284 && !root->grp_assignment_read);
2285 sth_created = true;
2286 if (MAY_HAVE_DEBUG_STMTS)
2288 root->grp_to_be_debug_replaced = 1;
2289 root->replacement_decl = create_access_replacement (root);
2293 if (covered_to < limit)
2294 hole = true;
2295 if (scalar)
2296 root->grp_total_scalarization = 0;
2299 if (!hole || root->grp_total_scalarization)
2300 root->grp_covered = 1;
2301 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2302 root->grp_unscalarized_data = 1; /* not covered and written to */
2303 return sth_created;
2306 /* Analyze all access trees linked by next_grp by the means of
2307 analyze_access_subtree. */
2308 static bool
2309 analyze_access_trees (struct access *access)
2311 bool ret = false;
2313 while (access)
2315 if (analyze_access_subtree (access, NULL, true))
2316 ret = true;
2317 access = access->next_grp;
2320 return ret;
2323 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2324 SIZE would conflict with an already existing one. If exactly such a child
2325 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2327 static bool
2328 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2329 HOST_WIDE_INT size, struct access **exact_match)
2331 struct access *child;
2333 for (child = lacc->first_child; child; child = child->next_sibling)
2335 if (child->offset == norm_offset && child->size == size)
2337 *exact_match = child;
2338 return true;
2341 if (child->offset < norm_offset + size
2342 && child->offset + child->size > norm_offset)
2343 return true;
2346 return false;
2349 /* Create a new child access of PARENT, with all properties just like MODEL
2350 except for its offset and with its grp_write false and grp_read true.
2351 Return the new access or NULL if it cannot be created. Note that this access
2352 is created long after all splicing and sorting, it's not located in any
2353 access vector and is automatically a representative of its group. */
2355 static struct access *
2356 create_artificial_child_access (struct access *parent, struct access *model,
2357 HOST_WIDE_INT new_offset)
2359 struct access **child;
2360 tree expr = parent->base;
2362 gcc_assert (!model->grp_unscalarizable_region);
2364 struct access *access = access_pool.allocate ();
2365 memset (access, 0, sizeof (struct access));
2366 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2367 model->type))
2369 access->grp_no_warning = true;
2370 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2371 new_offset, model, NULL, false);
2374 access->base = parent->base;
2375 access->expr = expr;
2376 access->offset = new_offset;
2377 access->size = model->size;
2378 access->type = model->type;
2379 access->grp_write = true;
2380 access->grp_read = false;
2382 child = &parent->first_child;
2383 while (*child && (*child)->offset < new_offset)
2384 child = &(*child)->next_sibling;
2386 access->next_sibling = *child;
2387 *child = access;
2389 return access;
2393 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2394 true if any new subaccess was created. Additionally, if RACC is a scalar
2395 access but LACC is not, change the type of the latter, if possible. */
2397 static bool
2398 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2400 struct access *rchild;
2401 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2402 bool ret = false;
2404 if (is_gimple_reg_type (lacc->type)
2405 || lacc->grp_unscalarizable_region
2406 || racc->grp_unscalarizable_region)
2407 return false;
2409 if (is_gimple_reg_type (racc->type))
2411 if (!lacc->first_child && !racc->first_child)
2413 tree t = lacc->base;
2415 lacc->type = racc->type;
2416 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2417 lacc->offset, racc->type))
2418 lacc->expr = t;
2419 else
2421 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2422 lacc->base, lacc->offset,
2423 racc, NULL, false);
2424 lacc->grp_no_warning = true;
2427 return false;
2430 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2432 struct access *new_acc = NULL;
2433 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2435 if (rchild->grp_unscalarizable_region)
2436 continue;
2438 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2439 &new_acc))
2441 if (new_acc)
2443 rchild->grp_hint = 1;
2444 new_acc->grp_hint |= new_acc->grp_read;
2445 if (rchild->first_child)
2446 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2448 continue;
2451 rchild->grp_hint = 1;
2452 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2453 if (new_acc)
2455 ret = true;
2456 if (racc->first_child)
2457 propagate_subaccesses_across_link (new_acc, rchild);
2461 return ret;
2464 /* Propagate all subaccesses across assignment links. */
2466 static void
2467 propagate_all_subaccesses (void)
2469 while (work_queue_head)
2471 struct access *racc = pop_access_from_work_queue ();
2472 struct assign_link *link;
2474 gcc_assert (racc->first_link);
2476 for (link = racc->first_link; link; link = link->next)
2478 struct access *lacc = link->lacc;
2480 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2481 continue;
2482 lacc = lacc->group_representative;
2483 if (propagate_subaccesses_across_link (lacc, racc)
2484 && lacc->first_link)
2485 add_access_to_work_queue (lacc);
2490 /* Go through all accesses collected throughout the (intraprocedural) analysis
2491 stage, exclude overlapping ones, identify representatives and build trees
2492 out of them, making decisions about scalarization on the way. Return true
2493 iff there are any to-be-scalarized variables after this stage. */
2495 static bool
2496 analyze_all_variable_accesses (void)
2498 int res = 0;
2499 bitmap tmp = BITMAP_ALLOC (NULL);
2500 bitmap_iterator bi;
2501 unsigned i;
2502 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2504 enum compiler_param param = optimize_speed_p
2505 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2506 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2508 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2509 fall back to a target default. */
2510 unsigned HOST_WIDE_INT max_scalarization_size
2511 = global_options_set.x_param_values[param]
2512 ? PARAM_VALUE (param)
2513 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2515 max_scalarization_size *= BITS_PER_UNIT;
2517 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2518 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2519 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2521 tree var = candidate (i);
2523 if (TREE_CODE (var) == VAR_DECL
2524 && type_consists_of_records_p (TREE_TYPE (var)))
2526 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2527 <= max_scalarization_size)
2529 create_total_scalarization_access (var);
2530 completely_scalarize_record (var, var, 0, var);
2531 if (dump_file && (dump_flags & TDF_DETAILS))
2533 fprintf (dump_file, "Will attempt to totally scalarize ");
2534 print_generic_expr (dump_file, var, 0);
2535 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2538 else if (dump_file && (dump_flags & TDF_DETAILS))
2540 fprintf (dump_file, "Too big to totally scalarize: ");
2541 print_generic_expr (dump_file, var, 0);
2542 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2547 bitmap_copy (tmp, candidate_bitmap);
2548 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2550 tree var = candidate (i);
2551 struct access *access;
2553 access = sort_and_splice_var_accesses (var);
2554 if (!access || !build_access_trees (access))
2555 disqualify_candidate (var,
2556 "No or inhibitingly overlapping accesses.");
2559 propagate_all_subaccesses ();
2561 bitmap_copy (tmp, candidate_bitmap);
2562 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2564 tree var = candidate (i);
2565 struct access *access = get_first_repr_for_decl (var);
2567 if (analyze_access_trees (access))
2569 res++;
2570 if (dump_file && (dump_flags & TDF_DETAILS))
2572 fprintf (dump_file, "\nAccess trees for ");
2573 print_generic_expr (dump_file, var, 0);
2574 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2575 dump_access_tree (dump_file, access);
2576 fprintf (dump_file, "\n");
2579 else
2580 disqualify_candidate (var, "No scalar replacements to be created.");
2583 BITMAP_FREE (tmp);
2585 if (res)
2587 statistics_counter_event (cfun, "Scalarized aggregates", res);
2588 return true;
2590 else
2591 return false;
2594 /* Generate statements copying scalar replacements of accesses within a subtree
2595 into or out of AGG. ACCESS, all its children, siblings and their children
2596 are to be processed. AGG is an aggregate type expression (can be a
2597 declaration but does not have to be, it can for example also be a mem_ref or
2598 a series of handled components). TOP_OFFSET is the offset of the processed
2599 subtree which has to be subtracted from offsets of individual accesses to
2600 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2601 replacements in the interval <start_offset, start_offset + chunk_size>,
2602 otherwise copy all. GSI is a statement iterator used to place the new
2603 statements. WRITE should be true when the statements should write from AGG
2604 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2605 statements will be added after the current statement in GSI, they will be
2606 added before the statement otherwise. */
2608 static void
2609 generate_subtree_copies (struct access *access, tree agg,
2610 HOST_WIDE_INT top_offset,
2611 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2612 gimple_stmt_iterator *gsi, bool write,
2613 bool insert_after, location_t loc)
2617 if (chunk_size && access->offset >= start_offset + chunk_size)
2618 return;
2620 if (access->grp_to_be_replaced
2621 && (chunk_size == 0
2622 || access->offset + access->size > start_offset))
2624 tree expr, repl = get_access_replacement (access);
2625 gassign *stmt;
2627 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2628 access, gsi, insert_after);
2630 if (write)
2632 if (access->grp_partial_lhs)
2633 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2634 !insert_after,
2635 insert_after ? GSI_NEW_STMT
2636 : GSI_SAME_STMT);
2637 stmt = gimple_build_assign (repl, expr);
2639 else
2641 TREE_NO_WARNING (repl) = 1;
2642 if (access->grp_partial_lhs)
2643 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2644 !insert_after,
2645 insert_after ? GSI_NEW_STMT
2646 : GSI_SAME_STMT);
2647 stmt = gimple_build_assign (expr, repl);
2649 gimple_set_location (stmt, loc);
2651 if (insert_after)
2652 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2653 else
2654 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2655 update_stmt (stmt);
2656 sra_stats.subtree_copies++;
2658 else if (write
2659 && access->grp_to_be_debug_replaced
2660 && (chunk_size == 0
2661 || access->offset + access->size > start_offset))
2663 gdebug *ds;
2664 tree drhs = build_debug_ref_for_model (loc, agg,
2665 access->offset - top_offset,
2666 access);
2667 ds = gimple_build_debug_bind (get_access_replacement (access),
2668 drhs, gsi_stmt (*gsi));
2669 if (insert_after)
2670 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2671 else
2672 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2675 if (access->first_child)
2676 generate_subtree_copies (access->first_child, agg, top_offset,
2677 start_offset, chunk_size, gsi,
2678 write, insert_after, loc);
2680 access = access->next_sibling;
2682 while (access);
2685 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2686 the root of the subtree to be processed. GSI is the statement iterator used
2687 for inserting statements which are added after the current statement if
2688 INSERT_AFTER is true or before it otherwise. */
2690 static void
2691 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2692 bool insert_after, location_t loc)
2695 struct access *child;
2697 if (access->grp_to_be_replaced)
2699 gassign *stmt;
2701 stmt = gimple_build_assign (get_access_replacement (access),
2702 build_zero_cst (access->type));
2703 if (insert_after)
2704 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2705 else
2706 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2707 update_stmt (stmt);
2708 gimple_set_location (stmt, loc);
2710 else if (access->grp_to_be_debug_replaced)
2712 gdebug *ds
2713 = gimple_build_debug_bind (get_access_replacement (access),
2714 build_zero_cst (access->type),
2715 gsi_stmt (*gsi));
2716 if (insert_after)
2717 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2718 else
2719 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2722 for (child = access->first_child; child; child = child->next_sibling)
2723 init_subtree_with_zero (child, gsi, insert_after, loc);
2726 /* Clobber all scalar replacements in an access subtree. ACCESS is the the
2727 root of the subtree to be processed. GSI is the statement iterator used
2728 for inserting statements which are added after the current statement if
2729 INSERT_AFTER is true or before it otherwise. */
2731 static void
2732 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2733 bool insert_after, location_t loc)
2736 struct access *child;
2738 if (access->grp_to_be_replaced)
2740 tree rep = get_access_replacement (access);
2741 tree clobber = build_constructor (access->type, NULL);
2742 TREE_THIS_VOLATILE (clobber) = 1;
2743 gimple stmt = gimple_build_assign (rep, clobber);
2745 if (insert_after)
2746 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2747 else
2748 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2749 update_stmt (stmt);
2750 gimple_set_location (stmt, loc);
2753 for (child = access->first_child; child; child = child->next_sibling)
2754 clobber_subtree (child, gsi, insert_after, loc);
2757 /* Search for an access representative for the given expression EXPR and
2758 return it or NULL if it cannot be found. */
2760 static struct access *
2761 get_access_for_expr (tree expr)
2763 HOST_WIDE_INT offset, size, max_size;
2764 tree base;
2766 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2767 a different size than the size of its argument and we need the latter
2768 one. */
2769 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2770 expr = TREE_OPERAND (expr, 0);
2772 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2773 if (max_size == -1 || !DECL_P (base))
2774 return NULL;
2776 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2777 return NULL;
2779 return get_var_base_offset_size_access (base, offset, max_size);
2782 /* Replace the expression EXPR with a scalar replacement if there is one and
2783 generate other statements to do type conversion or subtree copying if
2784 necessary. GSI is used to place newly created statements, WRITE is true if
2785 the expression is being written to (it is on a LHS of a statement or output
2786 in an assembly statement). */
2788 static bool
2789 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2791 location_t loc;
2792 struct access *access;
2793 tree type, bfr, orig_expr;
2795 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2797 bfr = *expr;
2798 expr = &TREE_OPERAND (*expr, 0);
2800 else
2801 bfr = NULL_TREE;
2803 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2804 expr = &TREE_OPERAND (*expr, 0);
2805 access = get_access_for_expr (*expr);
2806 if (!access)
2807 return false;
2808 type = TREE_TYPE (*expr);
2809 orig_expr = *expr;
2811 loc = gimple_location (gsi_stmt (*gsi));
2812 gimple_stmt_iterator alt_gsi = gsi_none ();
2813 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2815 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2816 gsi = &alt_gsi;
2819 if (access->grp_to_be_replaced)
2821 tree repl = get_access_replacement (access);
2822 /* If we replace a non-register typed access simply use the original
2823 access expression to extract the scalar component afterwards.
2824 This happens if scalarizing a function return value or parameter
2825 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2826 gcc.c-torture/compile/20011217-1.c.
2828 We also want to use this when accessing a complex or vector which can
2829 be accessed as a different type too, potentially creating a need for
2830 type conversion (see PR42196) and when scalarized unions are involved
2831 in assembler statements (see PR42398). */
2832 if (!useless_type_conversion_p (type, access->type))
2834 tree ref;
2836 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2838 if (write)
2840 gassign *stmt;
2842 if (access->grp_partial_lhs)
2843 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2844 false, GSI_NEW_STMT);
2845 stmt = gimple_build_assign (repl, ref);
2846 gimple_set_location (stmt, loc);
2847 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2849 else
2851 gassign *stmt;
2853 if (access->grp_partial_lhs)
2854 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2855 true, GSI_SAME_STMT);
2856 stmt = gimple_build_assign (ref, repl);
2857 gimple_set_location (stmt, loc);
2858 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2861 else
2862 *expr = repl;
2863 sra_stats.exprs++;
2865 else if (write && access->grp_to_be_debug_replaced)
2867 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
2868 NULL_TREE,
2869 gsi_stmt (*gsi));
2870 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2873 if (access->first_child)
2875 HOST_WIDE_INT start_offset, chunk_size;
2876 if (bfr
2877 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
2878 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
2880 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
2881 start_offset = access->offset
2882 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
2884 else
2885 start_offset = chunk_size = 0;
2887 generate_subtree_copies (access->first_child, orig_expr, access->offset,
2888 start_offset, chunk_size, gsi, write, write,
2889 loc);
2891 return true;
2894 /* Where scalar replacements of the RHS have been written to when a replacement
2895 of a LHS of an assigments cannot be direclty loaded from a replacement of
2896 the RHS. */
2897 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2898 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2899 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2901 struct subreplacement_assignment_data
2903 /* Offset of the access representing the lhs of the assignment. */
2904 HOST_WIDE_INT left_offset;
2906 /* LHS and RHS of the original assignment. */
2907 tree assignment_lhs, assignment_rhs;
2909 /* Access representing the rhs of the whole assignment. */
2910 struct access *top_racc;
2912 /* Stmt iterator used for statement insertions after the original assignment.
2913 It points to the main GSI used to traverse a BB during function body
2914 modification. */
2915 gimple_stmt_iterator *new_gsi;
2917 /* Stmt iterator used for statement insertions before the original
2918 assignment. Keeps on pointing to the original statement. */
2919 gimple_stmt_iterator old_gsi;
2921 /* Location of the assignment. */
2922 location_t loc;
2924 /* Keeps the information whether we have needed to refresh replacements of
2925 the LHS and from which side of the assignments this takes place. */
2926 enum unscalarized_data_handling refreshed;
2929 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2930 base aggregate if there are unscalarized data or directly to LHS of the
2931 statement that is pointed to by GSI otherwise. */
2933 static void
2934 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
2936 tree src;
2937 if (sad->top_racc->grp_unscalarized_data)
2939 src = sad->assignment_rhs;
2940 sad->refreshed = SRA_UDH_RIGHT;
2942 else
2944 src = sad->assignment_lhs;
2945 sad->refreshed = SRA_UDH_LEFT;
2947 generate_subtree_copies (sad->top_racc->first_child, src,
2948 sad->top_racc->offset, 0, 0,
2949 &sad->old_gsi, false, false, sad->loc);
2952 /* Try to generate statements to load all sub-replacements in an access subtree
2953 formed by children of LACC from scalar replacements in the SAD->top_racc
2954 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
2955 and load the accesses from it. */
2957 static void
2958 load_assign_lhs_subreplacements (struct access *lacc,
2959 struct subreplacement_assignment_data *sad)
2961 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2963 HOST_WIDE_INT offset;
2964 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
2966 if (lacc->grp_to_be_replaced)
2968 struct access *racc;
2969 gassign *stmt;
2970 tree rhs;
2972 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
2973 if (racc && racc->grp_to_be_replaced)
2975 rhs = get_access_replacement (racc);
2976 if (!useless_type_conversion_p (lacc->type, racc->type))
2977 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
2978 lacc->type, rhs);
2980 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2981 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
2982 NULL_TREE, true, GSI_SAME_STMT);
2984 else
2986 /* No suitable access on the right hand side, need to load from
2987 the aggregate. See if we have to update it first... */
2988 if (sad->refreshed == SRA_UDH_NONE)
2989 handle_unscalarized_data_in_subtree (sad);
2991 if (sad->refreshed == SRA_UDH_LEFT)
2992 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
2993 lacc->offset - sad->left_offset,
2994 lacc, sad->new_gsi, true);
2995 else
2996 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
2997 lacc->offset - sad->left_offset,
2998 lacc, sad->new_gsi, true);
2999 if (lacc->grp_partial_lhs)
3000 rhs = force_gimple_operand_gsi (sad->new_gsi,
3001 rhs, true, NULL_TREE,
3002 false, GSI_NEW_STMT);
3005 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3006 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3007 gimple_set_location (stmt, sad->loc);
3008 update_stmt (stmt);
3009 sra_stats.subreplacements++;
3011 else
3013 if (sad->refreshed == SRA_UDH_NONE
3014 && lacc->grp_read && !lacc->grp_covered)
3015 handle_unscalarized_data_in_subtree (sad);
3017 if (lacc && lacc->grp_to_be_debug_replaced)
3019 gdebug *ds;
3020 tree drhs;
3021 struct access *racc = find_access_in_subtree (sad->top_racc,
3022 offset,
3023 lacc->size);
3025 if (racc && racc->grp_to_be_replaced)
3027 if (racc->grp_write)
3028 drhs = get_access_replacement (racc);
3029 else
3030 drhs = NULL;
3032 else if (sad->refreshed == SRA_UDH_LEFT)
3033 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3034 lacc->offset, lacc);
3035 else if (sad->refreshed == SRA_UDH_RIGHT)
3036 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3037 offset, lacc);
3038 else
3039 drhs = NULL_TREE;
3040 if (drhs
3041 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3042 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3043 lacc->type, drhs);
3044 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3045 drhs, gsi_stmt (sad->old_gsi));
3046 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3050 if (lacc->first_child)
3051 load_assign_lhs_subreplacements (lacc, sad);
3055 /* Result code for SRA assignment modification. */
3056 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3057 SRA_AM_MODIFIED, /* stmt changed but not
3058 removed */
3059 SRA_AM_REMOVED }; /* stmt eliminated */
3061 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3062 to the assignment and GSI is the statement iterator pointing at it. Returns
3063 the same values as sra_modify_assign. */
3065 static enum assignment_mod_result
3066 sra_modify_constructor_assign (gimple stmt, gimple_stmt_iterator *gsi)
3068 tree lhs = gimple_assign_lhs (stmt);
3069 struct access *acc = get_access_for_expr (lhs);
3070 if (!acc)
3071 return SRA_AM_NONE;
3072 location_t loc = gimple_location (stmt);
3074 if (gimple_clobber_p (stmt))
3076 /* Clobber the replacement variable. */
3077 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3078 /* Remove clobbers of fully scalarized variables, they are dead. */
3079 if (acc->grp_covered)
3081 unlink_stmt_vdef (stmt);
3082 gsi_remove (gsi, true);
3083 release_defs (stmt);
3084 return SRA_AM_REMOVED;
3086 else
3087 return SRA_AM_MODIFIED;
3090 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt))) > 0)
3092 /* I have never seen this code path trigger but if it can happen the
3093 following should handle it gracefully. */
3094 if (access_has_children_p (acc))
3095 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3096 true, true, loc);
3097 return SRA_AM_MODIFIED;
3100 if (acc->grp_covered)
3102 init_subtree_with_zero (acc, gsi, false, loc);
3103 unlink_stmt_vdef (stmt);
3104 gsi_remove (gsi, true);
3105 release_defs (stmt);
3106 return SRA_AM_REMOVED;
3108 else
3110 init_subtree_with_zero (acc, gsi, true, loc);
3111 return SRA_AM_MODIFIED;
3115 /* Create and return a new suitable default definition SSA_NAME for RACC which
3116 is an access describing an uninitialized part of an aggregate that is being
3117 loaded. */
3119 static tree
3120 get_repl_default_def_ssa_name (struct access *racc)
3122 gcc_checking_assert (!racc->grp_to_be_replaced
3123 && !racc->grp_to_be_debug_replaced);
3124 if (!racc->replacement_decl)
3125 racc->replacement_decl = create_access_replacement (racc);
3126 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3129 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3130 bit-field field declaration somewhere in it. */
3132 static inline bool
3133 contains_vce_or_bfcref_p (const_tree ref)
3135 while (handled_component_p (ref))
3137 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3138 || (TREE_CODE (ref) == COMPONENT_REF
3139 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3140 return true;
3141 ref = TREE_OPERAND (ref, 0);
3144 return false;
3147 /* Examine both sides of the assignment statement pointed to by STMT, replace
3148 them with a scalare replacement if there is one and generate copying of
3149 replacements if scalarized aggregates have been used in the assignment. GSI
3150 is used to hold generated statements for type conversions and subtree
3151 copying. */
3153 static enum assignment_mod_result
3154 sra_modify_assign (gimple stmt, gimple_stmt_iterator *gsi)
3156 struct access *lacc, *racc;
3157 tree lhs, rhs;
3158 bool modify_this_stmt = false;
3159 bool force_gimple_rhs = false;
3160 location_t loc;
3161 gimple_stmt_iterator orig_gsi = *gsi;
3163 if (!gimple_assign_single_p (stmt))
3164 return SRA_AM_NONE;
3165 lhs = gimple_assign_lhs (stmt);
3166 rhs = gimple_assign_rhs1 (stmt);
3168 if (TREE_CODE (rhs) == CONSTRUCTOR)
3169 return sra_modify_constructor_assign (stmt, gsi);
3171 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3172 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3173 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3175 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3176 gsi, false);
3177 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3178 gsi, true);
3179 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3182 lacc = get_access_for_expr (lhs);
3183 racc = get_access_for_expr (rhs);
3184 if (!lacc && !racc)
3185 return SRA_AM_NONE;
3187 loc = gimple_location (stmt);
3188 if (lacc && lacc->grp_to_be_replaced)
3190 lhs = get_access_replacement (lacc);
3191 gimple_assign_set_lhs (stmt, lhs);
3192 modify_this_stmt = true;
3193 if (lacc->grp_partial_lhs)
3194 force_gimple_rhs = true;
3195 sra_stats.exprs++;
3198 if (racc && racc->grp_to_be_replaced)
3200 rhs = get_access_replacement (racc);
3201 modify_this_stmt = true;
3202 if (racc->grp_partial_lhs)
3203 force_gimple_rhs = true;
3204 sra_stats.exprs++;
3206 else if (racc
3207 && !racc->grp_unscalarized_data
3208 && TREE_CODE (lhs) == SSA_NAME
3209 && !access_has_replacements_p (racc))
3211 rhs = get_repl_default_def_ssa_name (racc);
3212 modify_this_stmt = true;
3213 sra_stats.exprs++;
3216 if (modify_this_stmt)
3218 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3220 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3221 ??? This should move to fold_stmt which we simply should
3222 call after building a VIEW_CONVERT_EXPR here. */
3223 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3224 && !contains_bitfld_component_ref_p (lhs))
3226 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3227 gimple_assign_set_lhs (stmt, lhs);
3229 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3230 && !contains_vce_or_bfcref_p (rhs))
3231 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3233 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3235 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3236 rhs);
3237 if (is_gimple_reg_type (TREE_TYPE (lhs))
3238 && TREE_CODE (lhs) != SSA_NAME)
3239 force_gimple_rhs = true;
3244 if (lacc && lacc->grp_to_be_debug_replaced)
3246 tree dlhs = get_access_replacement (lacc);
3247 tree drhs = unshare_expr (rhs);
3248 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3250 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3251 && !contains_vce_or_bfcref_p (drhs))
3252 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3253 if (drhs
3254 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3255 TREE_TYPE (drhs)))
3256 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3257 TREE_TYPE (dlhs), drhs);
3259 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3260 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3263 /* From this point on, the function deals with assignments in between
3264 aggregates when at least one has scalar reductions of some of its
3265 components. There are three possible scenarios: Both the LHS and RHS have
3266 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3268 In the first case, we would like to load the LHS components from RHS
3269 components whenever possible. If that is not possible, we would like to
3270 read it directly from the RHS (after updating it by storing in it its own
3271 components). If there are some necessary unscalarized data in the LHS,
3272 those will be loaded by the original assignment too. If neither of these
3273 cases happen, the original statement can be removed. Most of this is done
3274 by load_assign_lhs_subreplacements.
3276 In the second case, we would like to store all RHS scalarized components
3277 directly into LHS and if they cover the aggregate completely, remove the
3278 statement too. In the third case, we want the LHS components to be loaded
3279 directly from the RHS (DSE will remove the original statement if it
3280 becomes redundant).
3282 This is a bit complex but manageable when types match and when unions do
3283 not cause confusion in a way that we cannot really load a component of LHS
3284 from the RHS or vice versa (the access representing this level can have
3285 subaccesses that are accessible only through a different union field at a
3286 higher level - different from the one used in the examined expression).
3287 Unions are fun.
3289 Therefore, I specially handle a fourth case, happening when there is a
3290 specific type cast or it is impossible to locate a scalarized subaccess on
3291 the other side of the expression. If that happens, I simply "refresh" the
3292 RHS by storing in it is scalarized components leave the original statement
3293 there to do the copying and then load the scalar replacements of the LHS.
3294 This is what the first branch does. */
3296 if (modify_this_stmt
3297 || gimple_has_volatile_ops (stmt)
3298 || contains_vce_or_bfcref_p (rhs)
3299 || contains_vce_or_bfcref_p (lhs)
3300 || stmt_ends_bb_p (stmt))
3302 if (access_has_children_p (racc))
3303 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3304 gsi, false, false, loc);
3305 if (access_has_children_p (lacc))
3307 gimple_stmt_iterator alt_gsi = gsi_none ();
3308 if (stmt_ends_bb_p (stmt))
3310 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3311 gsi = &alt_gsi;
3313 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3314 gsi, true, true, loc);
3316 sra_stats.separate_lhs_rhs_handling++;
3318 /* This gimplification must be done after generate_subtree_copies,
3319 lest we insert the subtree copies in the middle of the gimplified
3320 sequence. */
3321 if (force_gimple_rhs)
3322 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3323 true, GSI_SAME_STMT);
3324 if (gimple_assign_rhs1 (stmt) != rhs)
3326 modify_this_stmt = true;
3327 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3328 gcc_assert (stmt == gsi_stmt (orig_gsi));
3331 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3333 else
3335 if (access_has_children_p (lacc)
3336 && access_has_children_p (racc)
3337 /* When an access represents an unscalarizable region, it usually
3338 represents accesses with variable offset and thus must not be used
3339 to generate new memory accesses. */
3340 && !lacc->grp_unscalarizable_region
3341 && !racc->grp_unscalarizable_region)
3343 struct subreplacement_assignment_data sad;
3345 sad.left_offset = lacc->offset;
3346 sad.assignment_lhs = lhs;
3347 sad.assignment_rhs = rhs;
3348 sad.top_racc = racc;
3349 sad.old_gsi = *gsi;
3350 sad.new_gsi = gsi;
3351 sad.loc = gimple_location (stmt);
3352 sad.refreshed = SRA_UDH_NONE;
3354 if (lacc->grp_read && !lacc->grp_covered)
3355 handle_unscalarized_data_in_subtree (&sad);
3357 load_assign_lhs_subreplacements (lacc, &sad);
3358 if (sad.refreshed != SRA_UDH_RIGHT)
3360 gsi_next (gsi);
3361 unlink_stmt_vdef (stmt);
3362 gsi_remove (&sad.old_gsi, true);
3363 release_defs (stmt);
3364 sra_stats.deleted++;
3365 return SRA_AM_REMOVED;
3368 else
3370 if (access_has_children_p (racc)
3371 && !racc->grp_unscalarized_data)
3373 if (dump_file)
3375 fprintf (dump_file, "Removing load: ");
3376 print_gimple_stmt (dump_file, stmt, 0, 0);
3378 generate_subtree_copies (racc->first_child, lhs,
3379 racc->offset, 0, 0, gsi,
3380 false, false, loc);
3381 gcc_assert (stmt == gsi_stmt (*gsi));
3382 unlink_stmt_vdef (stmt);
3383 gsi_remove (gsi, true);
3384 release_defs (stmt);
3385 sra_stats.deleted++;
3386 return SRA_AM_REMOVED;
3388 /* Restore the aggregate RHS from its components so the
3389 prevailing aggregate copy does the right thing. */
3390 if (access_has_children_p (racc))
3391 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3392 gsi, false, false, loc);
3393 /* Re-load the components of the aggregate copy destination.
3394 But use the RHS aggregate to load from to expose more
3395 optimization opportunities. */
3396 if (access_has_children_p (lacc))
3397 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3398 0, 0, gsi, true, true, loc);
3401 return SRA_AM_NONE;
3405 /* Traverse the function body and all modifications as decided in
3406 analyze_all_variable_accesses. Return true iff the CFG has been
3407 changed. */
3409 static bool
3410 sra_modify_function_body (void)
3412 bool cfg_changed = false;
3413 basic_block bb;
3415 FOR_EACH_BB_FN (bb, cfun)
3417 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3418 while (!gsi_end_p (gsi))
3420 gimple stmt = gsi_stmt (gsi);
3421 enum assignment_mod_result assign_result;
3422 bool modified = false, deleted = false;
3423 tree *t;
3424 unsigned i;
3426 switch (gimple_code (stmt))
3428 case GIMPLE_RETURN:
3429 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3430 if (*t != NULL_TREE)
3431 modified |= sra_modify_expr (t, &gsi, false);
3432 break;
3434 case GIMPLE_ASSIGN:
3435 assign_result = sra_modify_assign (stmt, &gsi);
3436 modified |= assign_result == SRA_AM_MODIFIED;
3437 deleted = assign_result == SRA_AM_REMOVED;
3438 break;
3440 case GIMPLE_CALL:
3441 /* Operands must be processed before the lhs. */
3442 for (i = 0; i < gimple_call_num_args (stmt); i++)
3444 t = gimple_call_arg_ptr (stmt, i);
3445 modified |= sra_modify_expr (t, &gsi, false);
3448 if (gimple_call_lhs (stmt))
3450 t = gimple_call_lhs_ptr (stmt);
3451 modified |= sra_modify_expr (t, &gsi, true);
3453 break;
3455 case GIMPLE_ASM:
3457 gasm *asm_stmt = as_a <gasm *> (stmt);
3458 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3460 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3461 modified |= sra_modify_expr (t, &gsi, false);
3463 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3465 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3466 modified |= sra_modify_expr (t, &gsi, true);
3469 break;
3471 default:
3472 break;
3475 if (modified)
3477 update_stmt (stmt);
3478 if (maybe_clean_eh_stmt (stmt)
3479 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3480 cfg_changed = true;
3482 if (!deleted)
3483 gsi_next (&gsi);
3487 gsi_commit_edge_inserts ();
3488 return cfg_changed;
3491 /* Generate statements initializing scalar replacements of parts of function
3492 parameters. */
3494 static void
3495 initialize_parameter_reductions (void)
3497 gimple_stmt_iterator gsi;
3498 gimple_seq seq = NULL;
3499 tree parm;
3501 gsi = gsi_start (seq);
3502 for (parm = DECL_ARGUMENTS (current_function_decl);
3503 parm;
3504 parm = DECL_CHAIN (parm))
3506 vec<access_p> *access_vec;
3507 struct access *access;
3509 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3510 continue;
3511 access_vec = get_base_access_vector (parm);
3512 if (!access_vec)
3513 continue;
3515 for (access = (*access_vec)[0];
3516 access;
3517 access = access->next_grp)
3518 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3519 EXPR_LOCATION (parm));
3522 seq = gsi_seq (gsi);
3523 if (seq)
3524 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3527 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3528 it reveals there are components of some aggregates to be scalarized, it runs
3529 the required transformations. */
3530 static unsigned int
3531 perform_intra_sra (void)
3533 int ret = 0;
3534 sra_initialize ();
3536 if (!find_var_candidates ())
3537 goto out;
3539 if (!scan_function ())
3540 goto out;
3542 if (!analyze_all_variable_accesses ())
3543 goto out;
3545 if (sra_modify_function_body ())
3546 ret = TODO_update_ssa | TODO_cleanup_cfg;
3547 else
3548 ret = TODO_update_ssa;
3549 initialize_parameter_reductions ();
3551 statistics_counter_event (cfun, "Scalar replacements created",
3552 sra_stats.replacements);
3553 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3554 statistics_counter_event (cfun, "Subtree copy stmts",
3555 sra_stats.subtree_copies);
3556 statistics_counter_event (cfun, "Subreplacement stmts",
3557 sra_stats.subreplacements);
3558 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3559 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3560 sra_stats.separate_lhs_rhs_handling);
3562 out:
3563 sra_deinitialize ();
3564 return ret;
3567 /* Perform early intraprocedural SRA. */
3568 static unsigned int
3569 early_intra_sra (void)
3571 sra_mode = SRA_MODE_EARLY_INTRA;
3572 return perform_intra_sra ();
3575 /* Perform "late" intraprocedural SRA. */
3576 static unsigned int
3577 late_intra_sra (void)
3579 sra_mode = SRA_MODE_INTRA;
3580 return perform_intra_sra ();
3584 static bool
3585 gate_intra_sra (void)
3587 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3591 namespace {
3593 const pass_data pass_data_sra_early =
3595 GIMPLE_PASS, /* type */
3596 "esra", /* name */
3597 OPTGROUP_NONE, /* optinfo_flags */
3598 TV_TREE_SRA, /* tv_id */
3599 ( PROP_cfg | PROP_ssa ), /* properties_required */
3600 0, /* properties_provided */
3601 0, /* properties_destroyed */
3602 0, /* todo_flags_start */
3603 TODO_update_ssa, /* todo_flags_finish */
3606 class pass_sra_early : public gimple_opt_pass
3608 public:
3609 pass_sra_early (gcc::context *ctxt)
3610 : gimple_opt_pass (pass_data_sra_early, ctxt)
3613 /* opt_pass methods: */
3614 virtual bool gate (function *) { return gate_intra_sra (); }
3615 virtual unsigned int execute (function *) { return early_intra_sra (); }
3617 }; // class pass_sra_early
3619 } // anon namespace
3621 gimple_opt_pass *
3622 make_pass_sra_early (gcc::context *ctxt)
3624 return new pass_sra_early (ctxt);
3627 namespace {
3629 const pass_data pass_data_sra =
3631 GIMPLE_PASS, /* type */
3632 "sra", /* name */
3633 OPTGROUP_NONE, /* optinfo_flags */
3634 TV_TREE_SRA, /* tv_id */
3635 ( PROP_cfg | PROP_ssa ), /* properties_required */
3636 0, /* properties_provided */
3637 0, /* properties_destroyed */
3638 TODO_update_address_taken, /* todo_flags_start */
3639 TODO_update_ssa, /* todo_flags_finish */
3642 class pass_sra : public gimple_opt_pass
3644 public:
3645 pass_sra (gcc::context *ctxt)
3646 : gimple_opt_pass (pass_data_sra, ctxt)
3649 /* opt_pass methods: */
3650 virtual bool gate (function *) { return gate_intra_sra (); }
3651 virtual unsigned int execute (function *) { return late_intra_sra (); }
3653 }; // class pass_sra
3655 } // anon namespace
3657 gimple_opt_pass *
3658 make_pass_sra (gcc::context *ctxt)
3660 return new pass_sra (ctxt);
3664 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3665 parameter. */
3667 static bool
3668 is_unused_scalar_param (tree parm)
3670 tree name;
3671 return (is_gimple_reg (parm)
3672 && (!(name = ssa_default_def (cfun, parm))
3673 || has_zero_uses (name)));
3676 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3677 examine whether there are any direct or otherwise infeasible ones. If so,
3678 return true, otherwise return false. PARM must be a gimple register with a
3679 non-NULL default definition. */
3681 static bool
3682 ptr_parm_has_direct_uses (tree parm)
3684 imm_use_iterator ui;
3685 gimple stmt;
3686 tree name = ssa_default_def (cfun, parm);
3687 bool ret = false;
3689 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3691 int uses_ok = 0;
3692 use_operand_p use_p;
3694 if (is_gimple_debug (stmt))
3695 continue;
3697 /* Valid uses include dereferences on the lhs and the rhs. */
3698 if (gimple_has_lhs (stmt))
3700 tree lhs = gimple_get_lhs (stmt);
3701 while (handled_component_p (lhs))
3702 lhs = TREE_OPERAND (lhs, 0);
3703 if (TREE_CODE (lhs) == MEM_REF
3704 && TREE_OPERAND (lhs, 0) == name
3705 && integer_zerop (TREE_OPERAND (lhs, 1))
3706 && types_compatible_p (TREE_TYPE (lhs),
3707 TREE_TYPE (TREE_TYPE (name)))
3708 && !TREE_THIS_VOLATILE (lhs))
3709 uses_ok++;
3711 if (gimple_assign_single_p (stmt))
3713 tree rhs = gimple_assign_rhs1 (stmt);
3714 while (handled_component_p (rhs))
3715 rhs = TREE_OPERAND (rhs, 0);
3716 if (TREE_CODE (rhs) == MEM_REF
3717 && TREE_OPERAND (rhs, 0) == name
3718 && integer_zerop (TREE_OPERAND (rhs, 1))
3719 && types_compatible_p (TREE_TYPE (rhs),
3720 TREE_TYPE (TREE_TYPE (name)))
3721 && !TREE_THIS_VOLATILE (rhs))
3722 uses_ok++;
3724 else if (is_gimple_call (stmt))
3726 unsigned i;
3727 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3729 tree arg = gimple_call_arg (stmt, i);
3730 while (handled_component_p (arg))
3731 arg = TREE_OPERAND (arg, 0);
3732 if (TREE_CODE (arg) == MEM_REF
3733 && TREE_OPERAND (arg, 0) == name
3734 && integer_zerop (TREE_OPERAND (arg, 1))
3735 && types_compatible_p (TREE_TYPE (arg),
3736 TREE_TYPE (TREE_TYPE (name)))
3737 && !TREE_THIS_VOLATILE (arg))
3738 uses_ok++;
3742 /* If the number of valid uses does not match the number of
3743 uses in this stmt there is an unhandled use. */
3744 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3745 --uses_ok;
3747 if (uses_ok != 0)
3748 ret = true;
3750 if (ret)
3751 BREAK_FROM_IMM_USE_STMT (ui);
3754 return ret;
3757 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3758 them in candidate_bitmap. Note that these do not necessarily include
3759 parameter which are unused and thus can be removed. Return true iff any
3760 such candidate has been found. */
3762 static bool
3763 find_param_candidates (void)
3765 tree parm;
3766 int count = 0;
3767 bool ret = false;
3768 const char *msg;
3770 for (parm = DECL_ARGUMENTS (current_function_decl);
3771 parm;
3772 parm = DECL_CHAIN (parm))
3774 tree type = TREE_TYPE (parm);
3775 tree_node **slot;
3777 count++;
3779 if (TREE_THIS_VOLATILE (parm)
3780 || TREE_ADDRESSABLE (parm)
3781 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3782 continue;
3784 if (is_unused_scalar_param (parm))
3786 ret = true;
3787 continue;
3790 if (POINTER_TYPE_P (type))
3792 type = TREE_TYPE (type);
3794 if (TREE_CODE (type) == FUNCTION_TYPE
3795 || TYPE_VOLATILE (type)
3796 || upc_shared_type_p (type)
3797 || (TREE_CODE (type) == ARRAY_TYPE
3798 && TYPE_NONALIASED_COMPONENT (type))
3799 || !is_gimple_reg (parm)
3800 || is_va_list_type (type)
3801 || ptr_parm_has_direct_uses (parm))
3802 continue;
3804 else if (!AGGREGATE_TYPE_P (type))
3805 continue;
3807 if (!COMPLETE_TYPE_P (type)
3808 || !tree_fits_uhwi_p (TYPE_SIZE (type))
3809 || tree_to_uhwi (TYPE_SIZE (type)) == 0
3810 || (AGGREGATE_TYPE_P (type)
3811 && type_internals_preclude_sra_p (type, &msg)))
3812 continue;
3814 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3815 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
3816 *slot = parm;
3818 ret = true;
3819 if (dump_file && (dump_flags & TDF_DETAILS))
3821 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3822 print_generic_expr (dump_file, parm, 0);
3823 fprintf (dump_file, "\n");
3827 func_param_count = count;
3828 return ret;
3831 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3832 maybe_modified. */
3834 static bool
3835 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3836 void *data)
3838 struct access *repr = (struct access *) data;
3840 repr->grp_maybe_modified = 1;
3841 return true;
3844 /* Analyze what representatives (in linked lists accessible from
3845 REPRESENTATIVES) can be modified by side effects of statements in the
3846 current function. */
3848 static void
3849 analyze_modified_params (vec<access_p> representatives)
3851 int i;
3853 for (i = 0; i < func_param_count; i++)
3855 struct access *repr;
3857 for (repr = representatives[i];
3858 repr;
3859 repr = repr->next_grp)
3861 struct access *access;
3862 bitmap visited;
3863 ao_ref ar;
3865 if (no_accesses_p (repr))
3866 continue;
3867 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3868 || repr->grp_maybe_modified)
3869 continue;
3871 ao_ref_init (&ar, repr->expr);
3872 visited = BITMAP_ALLOC (NULL);
3873 for (access = repr; access; access = access->next_sibling)
3875 /* All accesses are read ones, otherwise grp_maybe_modified would
3876 be trivially set. */
3877 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3878 mark_maybe_modified, repr, &visited);
3879 if (repr->grp_maybe_modified)
3880 break;
3882 BITMAP_FREE (visited);
3887 /* Propagate distances in bb_dereferences in the opposite direction than the
3888 control flow edges, in each step storing the maximum of the current value
3889 and the minimum of all successors. These steps are repeated until the table
3890 stabilizes. Note that BBs which might terminate the functions (according to
3891 final_bbs bitmap) never updated in this way. */
3893 static void
3894 propagate_dereference_distances (void)
3896 basic_block bb;
3898 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
3899 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3900 FOR_EACH_BB_FN (bb, cfun)
3902 queue.quick_push (bb);
3903 bb->aux = bb;
3906 while (!queue.is_empty ())
3908 edge_iterator ei;
3909 edge e;
3910 bool change = false;
3911 int i;
3913 bb = queue.pop ();
3914 bb->aux = NULL;
3916 if (bitmap_bit_p (final_bbs, bb->index))
3917 continue;
3919 for (i = 0; i < func_param_count; i++)
3921 int idx = bb->index * func_param_count + i;
3922 bool first = true;
3923 HOST_WIDE_INT inh = 0;
3925 FOR_EACH_EDGE (e, ei, bb->succs)
3927 int succ_idx = e->dest->index * func_param_count + i;
3929 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
3930 continue;
3932 if (first)
3934 first = false;
3935 inh = bb_dereferences [succ_idx];
3937 else if (bb_dereferences [succ_idx] < inh)
3938 inh = bb_dereferences [succ_idx];
3941 if (!first && bb_dereferences[idx] < inh)
3943 bb_dereferences[idx] = inh;
3944 change = true;
3948 if (change && !bitmap_bit_p (final_bbs, bb->index))
3949 FOR_EACH_EDGE (e, ei, bb->preds)
3951 if (e->src->aux)
3952 continue;
3954 e->src->aux = e->src;
3955 queue.quick_push (e->src);
3960 /* Dump a dereferences TABLE with heading STR to file F. */
3962 static void
3963 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3965 basic_block bb;
3967 fprintf (dump_file, "%s", str);
3968 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
3969 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3971 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3972 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3974 int i;
3975 for (i = 0; i < func_param_count; i++)
3977 int idx = bb->index * func_param_count + i;
3978 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3981 fprintf (f, "\n");
3983 fprintf (dump_file, "\n");
3986 /* Determine what (parts of) parameters passed by reference that are not
3987 assigned to are not certainly dereferenced in this function and thus the
3988 dereferencing cannot be safely moved to the caller without potentially
3989 introducing a segfault. Mark such REPRESENTATIVES as
3990 grp_not_necessarilly_dereferenced.
3992 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3993 part is calculated rather than simple booleans are calculated for each
3994 pointer parameter to handle cases when only a fraction of the whole
3995 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3996 an example).
3998 The maximum dereference distances for each pointer parameter and BB are
3999 already stored in bb_dereference. This routine simply propagates these
4000 values upwards by propagate_dereference_distances and then compares the
4001 distances of individual parameters in the ENTRY BB to the equivalent
4002 distances of each representative of a (fraction of a) parameter. */
4004 static void
4005 analyze_caller_dereference_legality (vec<access_p> representatives)
4007 int i;
4009 if (dump_file && (dump_flags & TDF_DETAILS))
4010 dump_dereferences_table (dump_file,
4011 "Dereference table before propagation:\n",
4012 bb_dereferences);
4014 propagate_dereference_distances ();
4016 if (dump_file && (dump_flags & TDF_DETAILS))
4017 dump_dereferences_table (dump_file,
4018 "Dereference table after propagation:\n",
4019 bb_dereferences);
4021 for (i = 0; i < func_param_count; i++)
4023 struct access *repr = representatives[i];
4024 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4026 if (!repr || no_accesses_p (repr))
4027 continue;
4031 if ((repr->offset + repr->size) > bb_dereferences[idx])
4032 repr->grp_not_necessarilly_dereferenced = 1;
4033 repr = repr->next_grp;
4035 while (repr);
4039 /* Return the representative access for the parameter declaration PARM if it is
4040 a scalar passed by reference which is not written to and the pointer value
4041 is not used directly. Thus, if it is legal to dereference it in the caller
4042 and we can rule out modifications through aliases, such parameter should be
4043 turned into one passed by value. Return NULL otherwise. */
4045 static struct access *
4046 unmodified_by_ref_scalar_representative (tree parm)
4048 int i, access_count;
4049 struct access *repr;
4050 vec<access_p> *access_vec;
4052 access_vec = get_base_access_vector (parm);
4053 gcc_assert (access_vec);
4054 repr = (*access_vec)[0];
4055 if (repr->write)
4056 return NULL;
4057 repr->group_representative = repr;
4059 access_count = access_vec->length ();
4060 for (i = 1; i < access_count; i++)
4062 struct access *access = (*access_vec)[i];
4063 if (access->write)
4064 return NULL;
4065 access->group_representative = repr;
4066 access->next_sibling = repr->next_sibling;
4067 repr->next_sibling = access;
4070 repr->grp_read = 1;
4071 repr->grp_scalar_ptr = 1;
4072 return repr;
4075 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4076 associated with. REQ_ALIGN is the minimum required alignment. */
4078 static bool
4079 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4081 unsigned int exp_align;
4082 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4083 is incompatible assign in a call statement (and possibly even in asm
4084 statements). This can be relaxed by using a new temporary but only for
4085 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4086 intraprocedural SRA we deal with this by keeping the old aggregate around,
4087 something we cannot do in IPA-SRA.) */
4088 if (access->write
4089 && (is_gimple_call (access->stmt)
4090 || gimple_code (access->stmt) == GIMPLE_ASM))
4091 return true;
4093 exp_align = get_object_alignment (access->expr);
4094 if (exp_align < req_align)
4095 return true;
4097 return false;
4101 /* Sort collected accesses for parameter PARM, identify representatives for
4102 each accessed region and link them together. Return NULL if there are
4103 different but overlapping accesses, return the special ptr value meaning
4104 there are no accesses for this parameter if that is the case and return the
4105 first representative otherwise. Set *RO_GRP if there is a group of accesses
4106 with only read (i.e. no write) accesses. */
4108 static struct access *
4109 splice_param_accesses (tree parm, bool *ro_grp)
4111 int i, j, access_count, group_count;
4112 int agg_size, total_size = 0;
4113 struct access *access, *res, **prev_acc_ptr = &res;
4114 vec<access_p> *access_vec;
4116 access_vec = get_base_access_vector (parm);
4117 if (!access_vec)
4118 return &no_accesses_representant;
4119 access_count = access_vec->length ();
4121 access_vec->qsort (compare_access_positions);
4123 i = 0;
4124 total_size = 0;
4125 group_count = 0;
4126 while (i < access_count)
4128 bool modification;
4129 tree a1_alias_type;
4130 access = (*access_vec)[i];
4131 modification = access->write;
4132 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4133 return NULL;
4134 a1_alias_type = reference_alias_ptr_type (access->expr);
4136 /* Access is about to become group representative unless we find some
4137 nasty overlap which would preclude us from breaking this parameter
4138 apart. */
4140 j = i + 1;
4141 while (j < access_count)
4143 struct access *ac2 = (*access_vec)[j];
4144 if (ac2->offset != access->offset)
4146 /* All or nothing law for parameters. */
4147 if (access->offset + access->size > ac2->offset)
4148 return NULL;
4149 else
4150 break;
4152 else if (ac2->size != access->size)
4153 return NULL;
4155 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4156 || (ac2->type != access->type
4157 && (TREE_ADDRESSABLE (ac2->type)
4158 || TREE_ADDRESSABLE (access->type)))
4159 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4160 return NULL;
4162 modification |= ac2->write;
4163 ac2->group_representative = access;
4164 ac2->next_sibling = access->next_sibling;
4165 access->next_sibling = ac2;
4166 j++;
4169 group_count++;
4170 access->grp_maybe_modified = modification;
4171 if (!modification)
4172 *ro_grp = true;
4173 *prev_acc_ptr = access;
4174 prev_acc_ptr = &access->next_grp;
4175 total_size += access->size;
4176 i = j;
4179 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4180 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4181 else
4182 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4183 if (total_size >= agg_size)
4184 return NULL;
4186 gcc_assert (group_count > 0);
4187 return res;
4190 /* Decide whether parameters with representative accesses given by REPR should
4191 be reduced into components. */
4193 static int
4194 decide_one_param_reduction (struct access *repr)
4196 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4197 bool by_ref;
4198 tree parm;
4200 parm = repr->base;
4201 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4202 gcc_assert (cur_parm_size > 0);
4204 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4206 by_ref = true;
4207 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4209 else
4211 by_ref = false;
4212 agg_size = cur_parm_size;
4215 if (dump_file)
4217 struct access *acc;
4218 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4219 print_generic_expr (dump_file, parm, 0);
4220 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4221 for (acc = repr; acc; acc = acc->next_grp)
4222 dump_access (dump_file, acc, true);
4225 total_size = 0;
4226 new_param_count = 0;
4228 for (; repr; repr = repr->next_grp)
4230 gcc_assert (parm == repr->base);
4232 /* Taking the address of a non-addressable field is verboten. */
4233 if (by_ref && repr->non_addressable)
4234 return 0;
4236 /* Do not decompose a non-BLKmode param in a way that would
4237 create BLKmode params. Especially for by-reference passing
4238 (thus, pointer-type param) this is hardly worthwhile. */
4239 if (DECL_MODE (parm) != BLKmode
4240 && TYPE_MODE (repr->type) == BLKmode)
4241 return 0;
4243 if (!by_ref || (!repr->grp_maybe_modified
4244 && !repr->grp_not_necessarilly_dereferenced))
4245 total_size += repr->size;
4246 else
4247 total_size += cur_parm_size;
4249 new_param_count++;
4252 gcc_assert (new_param_count > 0);
4254 if (optimize_function_for_size_p (cfun))
4255 parm_size_limit = cur_parm_size;
4256 else
4257 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4258 * cur_parm_size);
4260 if (total_size < agg_size
4261 && total_size <= parm_size_limit)
4263 if (dump_file)
4264 fprintf (dump_file, " ....will be split into %i components\n",
4265 new_param_count);
4266 return new_param_count;
4268 else
4269 return 0;
4272 /* The order of the following enums is important, we need to do extra work for
4273 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4274 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4275 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4277 /* Identify representatives of all accesses to all candidate parameters for
4278 IPA-SRA. Return result based on what representatives have been found. */
4280 static enum ipa_splicing_result
4281 splice_all_param_accesses (vec<access_p> &representatives)
4283 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4284 tree parm;
4285 struct access *repr;
4287 representatives.create (func_param_count);
4289 for (parm = DECL_ARGUMENTS (current_function_decl);
4290 parm;
4291 parm = DECL_CHAIN (parm))
4293 if (is_unused_scalar_param (parm))
4295 representatives.quick_push (&no_accesses_representant);
4296 if (result == NO_GOOD_ACCESS)
4297 result = UNUSED_PARAMS;
4299 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4300 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4301 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4303 repr = unmodified_by_ref_scalar_representative (parm);
4304 representatives.quick_push (repr);
4305 if (repr)
4306 result = UNMODIF_BY_REF_ACCESSES;
4308 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4310 bool ro_grp = false;
4311 repr = splice_param_accesses (parm, &ro_grp);
4312 representatives.quick_push (repr);
4314 if (repr && !no_accesses_p (repr))
4316 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4318 if (ro_grp)
4319 result = UNMODIF_BY_REF_ACCESSES;
4320 else if (result < MODIF_BY_REF_ACCESSES)
4321 result = MODIF_BY_REF_ACCESSES;
4323 else if (result < BY_VAL_ACCESSES)
4324 result = BY_VAL_ACCESSES;
4326 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4327 result = UNUSED_PARAMS;
4329 else
4330 representatives.quick_push (NULL);
4333 if (result == NO_GOOD_ACCESS)
4335 representatives.release ();
4336 return NO_GOOD_ACCESS;
4339 return result;
4342 /* Return the index of BASE in PARMS. Abort if it is not found. */
4344 static inline int
4345 get_param_index (tree base, vec<tree> parms)
4347 int i, len;
4349 len = parms.length ();
4350 for (i = 0; i < len; i++)
4351 if (parms[i] == base)
4352 return i;
4353 gcc_unreachable ();
4356 /* Convert the decisions made at the representative level into compact
4357 parameter adjustments. REPRESENTATIVES are pointers to first
4358 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4359 final number of adjustments. */
4361 static ipa_parm_adjustment_vec
4362 turn_representatives_into_adjustments (vec<access_p> representatives,
4363 int adjustments_count)
4365 vec<tree> parms;
4366 ipa_parm_adjustment_vec adjustments;
4367 tree parm;
4368 int i;
4370 gcc_assert (adjustments_count > 0);
4371 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4372 adjustments.create (adjustments_count);
4373 parm = DECL_ARGUMENTS (current_function_decl);
4374 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4376 struct access *repr = representatives[i];
4378 if (!repr || no_accesses_p (repr))
4380 struct ipa_parm_adjustment adj;
4382 memset (&adj, 0, sizeof (adj));
4383 adj.base_index = get_param_index (parm, parms);
4384 adj.base = parm;
4385 if (!repr)
4386 adj.op = IPA_PARM_OP_COPY;
4387 else
4388 adj.op = IPA_PARM_OP_REMOVE;
4389 adj.arg_prefix = "ISRA";
4390 adjustments.quick_push (adj);
4392 else
4394 struct ipa_parm_adjustment adj;
4395 int index = get_param_index (parm, parms);
4397 for (; repr; repr = repr->next_grp)
4399 memset (&adj, 0, sizeof (adj));
4400 gcc_assert (repr->base == parm);
4401 adj.base_index = index;
4402 adj.base = repr->base;
4403 adj.type = repr->type;
4404 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4405 adj.offset = repr->offset;
4406 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4407 && (repr->grp_maybe_modified
4408 || repr->grp_not_necessarilly_dereferenced));
4409 adj.arg_prefix = "ISRA";
4410 adjustments.quick_push (adj);
4414 parms.release ();
4415 return adjustments;
4418 /* Analyze the collected accesses and produce a plan what to do with the
4419 parameters in the form of adjustments, NULL meaning nothing. */
4421 static ipa_parm_adjustment_vec
4422 analyze_all_param_acesses (void)
4424 enum ipa_splicing_result repr_state;
4425 bool proceed = false;
4426 int i, adjustments_count = 0;
4427 vec<access_p> representatives;
4428 ipa_parm_adjustment_vec adjustments;
4430 repr_state = splice_all_param_accesses (representatives);
4431 if (repr_state == NO_GOOD_ACCESS)
4432 return ipa_parm_adjustment_vec ();
4434 /* If there are any parameters passed by reference which are not modified
4435 directly, we need to check whether they can be modified indirectly. */
4436 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4438 analyze_caller_dereference_legality (representatives);
4439 analyze_modified_params (representatives);
4442 for (i = 0; i < func_param_count; i++)
4444 struct access *repr = representatives[i];
4446 if (repr && !no_accesses_p (repr))
4448 if (repr->grp_scalar_ptr)
4450 adjustments_count++;
4451 if (repr->grp_not_necessarilly_dereferenced
4452 || repr->grp_maybe_modified)
4453 representatives[i] = NULL;
4454 else
4456 proceed = true;
4457 sra_stats.scalar_by_ref_to_by_val++;
4460 else
4462 int new_components = decide_one_param_reduction (repr);
4464 if (new_components == 0)
4466 representatives[i] = NULL;
4467 adjustments_count++;
4469 else
4471 adjustments_count += new_components;
4472 sra_stats.aggregate_params_reduced++;
4473 sra_stats.param_reductions_created += new_components;
4474 proceed = true;
4478 else
4480 if (no_accesses_p (repr))
4482 proceed = true;
4483 sra_stats.deleted_unused_parameters++;
4485 adjustments_count++;
4489 if (!proceed && dump_file)
4490 fprintf (dump_file, "NOT proceeding to change params.\n");
4492 if (proceed)
4493 adjustments = turn_representatives_into_adjustments (representatives,
4494 adjustments_count);
4495 else
4496 adjustments = ipa_parm_adjustment_vec ();
4498 representatives.release ();
4499 return adjustments;
4502 /* If a parameter replacement identified by ADJ does not yet exist in the form
4503 of declaration, create it and record it, otherwise return the previously
4504 created one. */
4506 static tree
4507 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4509 tree repl;
4510 if (!adj->new_ssa_base)
4512 char *pretty_name = make_fancy_name (adj->base);
4514 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4515 DECL_NAME (repl) = get_identifier (pretty_name);
4516 obstack_free (&name_obstack, pretty_name);
4518 adj->new_ssa_base = repl;
4520 else
4521 repl = adj->new_ssa_base;
4522 return repl;
4525 /* Find the first adjustment for a particular parameter BASE in a vector of
4526 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4527 adjustment. */
4529 static struct ipa_parm_adjustment *
4530 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4532 int i, len;
4534 len = adjustments.length ();
4535 for (i = 0; i < len; i++)
4537 struct ipa_parm_adjustment *adj;
4539 adj = &adjustments[i];
4540 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4541 return adj;
4544 return NULL;
4547 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4548 removed because its value is not used, replace the SSA_NAME with a one
4549 relating to a created VAR_DECL together all of its uses and return true.
4550 ADJUSTMENTS is a pointer to an adjustments vector. */
4552 static bool
4553 replace_removed_params_ssa_names (gimple stmt,
4554 ipa_parm_adjustment_vec adjustments)
4556 struct ipa_parm_adjustment *adj;
4557 tree lhs, decl, repl, name;
4559 if (gimple_code (stmt) == GIMPLE_PHI)
4560 lhs = gimple_phi_result (stmt);
4561 else if (is_gimple_assign (stmt))
4562 lhs = gimple_assign_lhs (stmt);
4563 else if (is_gimple_call (stmt))
4564 lhs = gimple_call_lhs (stmt);
4565 else
4566 gcc_unreachable ();
4568 if (TREE_CODE (lhs) != SSA_NAME)
4569 return false;
4571 decl = SSA_NAME_VAR (lhs);
4572 if (decl == NULL_TREE
4573 || TREE_CODE (decl) != PARM_DECL)
4574 return false;
4576 adj = get_adjustment_for_base (adjustments, decl);
4577 if (!adj)
4578 return false;
4580 repl = get_replaced_param_substitute (adj);
4581 name = make_ssa_name (repl, stmt);
4583 if (dump_file)
4585 fprintf (dump_file, "replacing an SSA name of a removed param ");
4586 print_generic_expr (dump_file, lhs, 0);
4587 fprintf (dump_file, " with ");
4588 print_generic_expr (dump_file, name, 0);
4589 fprintf (dump_file, "\n");
4592 if (is_gimple_assign (stmt))
4593 gimple_assign_set_lhs (stmt, name);
4594 else if (is_gimple_call (stmt))
4595 gimple_call_set_lhs (stmt, name);
4596 else
4597 gimple_phi_set_result (as_a <gphi *> (stmt), name);
4599 replace_uses_by (lhs, name);
4600 release_ssa_name (lhs);
4601 return true;
4604 /* If the statement STMT contains any expressions that need to replaced with a
4605 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4606 incompatibilities (GSI is used to accommodate conversion statements and must
4607 point to the statement). Return true iff the statement was modified. */
4609 static bool
4610 sra_ipa_modify_assign (gimple stmt, gimple_stmt_iterator *gsi,
4611 ipa_parm_adjustment_vec adjustments)
4613 tree *lhs_p, *rhs_p;
4614 bool any;
4616 if (!gimple_assign_single_p (stmt))
4617 return false;
4619 rhs_p = gimple_assign_rhs1_ptr (stmt);
4620 lhs_p = gimple_assign_lhs_ptr (stmt);
4622 any = ipa_modify_expr (rhs_p, false, adjustments);
4623 any |= ipa_modify_expr (lhs_p, false, adjustments);
4624 if (any)
4626 tree new_rhs = NULL_TREE;
4628 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4630 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4632 /* V_C_Es of constructors can cause trouble (PR 42714). */
4633 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4634 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4635 else
4636 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4637 NULL);
4639 else
4640 new_rhs = fold_build1_loc (gimple_location (stmt),
4641 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4642 *rhs_p);
4644 else if (REFERENCE_CLASS_P (*rhs_p)
4645 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4646 && !is_gimple_reg (*lhs_p))
4647 /* This can happen when an assignment in between two single field
4648 structures is turned into an assignment in between two pointers to
4649 scalars (PR 42237). */
4650 new_rhs = *rhs_p;
4652 if (new_rhs)
4654 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4655 true, GSI_SAME_STMT);
4657 gimple_assign_set_rhs_from_tree (gsi, tmp);
4660 return true;
4663 return false;
4666 /* Traverse the function body and all modifications as described in
4667 ADJUSTMENTS. Return true iff the CFG has been changed. */
4669 bool
4670 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4672 bool cfg_changed = false;
4673 basic_block bb;
4675 FOR_EACH_BB_FN (bb, cfun)
4677 gimple_stmt_iterator gsi;
4679 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4680 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4682 gsi = gsi_start_bb (bb);
4683 while (!gsi_end_p (gsi))
4685 gimple stmt = gsi_stmt (gsi);
4686 bool modified = false;
4687 tree *t;
4688 unsigned i;
4690 switch (gimple_code (stmt))
4692 case GIMPLE_RETURN:
4693 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4694 if (*t != NULL_TREE)
4695 modified |= ipa_modify_expr (t, true, adjustments);
4696 break;
4698 case GIMPLE_ASSIGN:
4699 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4700 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4701 break;
4703 case GIMPLE_CALL:
4704 /* Operands must be processed before the lhs. */
4705 for (i = 0; i < gimple_call_num_args (stmt); i++)
4707 t = gimple_call_arg_ptr (stmt, i);
4708 modified |= ipa_modify_expr (t, true, adjustments);
4711 if (gimple_call_lhs (stmt))
4713 t = gimple_call_lhs_ptr (stmt);
4714 modified |= ipa_modify_expr (t, false, adjustments);
4715 modified |= replace_removed_params_ssa_names (stmt,
4716 adjustments);
4718 break;
4720 case GIMPLE_ASM:
4722 gasm *asm_stmt = as_a <gasm *> (stmt);
4723 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4725 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4726 modified |= ipa_modify_expr (t, true, adjustments);
4728 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4730 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4731 modified |= ipa_modify_expr (t, false, adjustments);
4734 break;
4736 default:
4737 break;
4740 if (modified)
4742 update_stmt (stmt);
4743 if (maybe_clean_eh_stmt (stmt)
4744 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4745 cfg_changed = true;
4747 gsi_next (&gsi);
4751 return cfg_changed;
4754 /* Call gimple_debug_bind_reset_value on all debug statements describing
4755 gimple register parameters that are being removed or replaced. */
4757 static void
4758 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4760 int i, len;
4761 gimple_stmt_iterator *gsip = NULL, gsi;
4763 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4765 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4766 gsip = &gsi;
4768 len = adjustments.length ();
4769 for (i = 0; i < len; i++)
4771 struct ipa_parm_adjustment *adj;
4772 imm_use_iterator ui;
4773 gimple stmt;
4774 gdebug *def_temp;
4775 tree name, vexpr, copy = NULL_TREE;
4776 use_operand_p use_p;
4778 adj = &adjustments[i];
4779 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4780 continue;
4781 name = ssa_default_def (cfun, adj->base);
4782 vexpr = NULL;
4783 if (name)
4784 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4786 if (gimple_clobber_p (stmt))
4788 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4789 unlink_stmt_vdef (stmt);
4790 gsi_remove (&cgsi, true);
4791 release_defs (stmt);
4792 continue;
4794 /* All other users must have been removed by
4795 ipa_sra_modify_function_body. */
4796 gcc_assert (is_gimple_debug (stmt));
4797 if (vexpr == NULL && gsip != NULL)
4799 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4800 vexpr = make_node (DEBUG_EXPR_DECL);
4801 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4802 NULL);
4803 DECL_ARTIFICIAL (vexpr) = 1;
4804 TREE_TYPE (vexpr) = TREE_TYPE (name);
4805 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4806 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4808 if (vexpr)
4810 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4811 SET_USE (use_p, vexpr);
4813 else
4814 gimple_debug_bind_reset_value (stmt);
4815 update_stmt (stmt);
4817 /* Create a VAR_DECL for debug info purposes. */
4818 if (!DECL_IGNORED_P (adj->base))
4820 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4821 VAR_DECL, DECL_NAME (adj->base),
4822 TREE_TYPE (adj->base));
4823 if (DECL_PT_UID_SET_P (adj->base))
4824 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4825 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4826 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4827 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4828 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4829 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4830 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4831 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4832 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4833 SET_DECL_RTL (copy, 0);
4834 TREE_USED (copy) = 1;
4835 DECL_CONTEXT (copy) = current_function_decl;
4836 add_local_decl (cfun, copy);
4837 DECL_CHAIN (copy) =
4838 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4839 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4841 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4843 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4844 if (vexpr)
4845 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4846 else
4847 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4848 NULL);
4849 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4854 /* Return false if all callers have at least as many actual arguments as there
4855 are formal parameters in the current function and that their types
4856 match. */
4858 static bool
4859 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
4860 void *data ATTRIBUTE_UNUSED)
4862 struct cgraph_edge *cs;
4863 for (cs = node->callers; cs; cs = cs->next_caller)
4864 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
4865 return true;
4867 return false;
4870 /* Return false if all callers have vuse attached to a call statement. */
4872 static bool
4873 some_callers_have_no_vuse_p (struct cgraph_node *node,
4874 void *data ATTRIBUTE_UNUSED)
4876 struct cgraph_edge *cs;
4877 for (cs = node->callers; cs; cs = cs->next_caller)
4878 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
4879 return true;
4881 return false;
4884 /* Convert all callers of NODE. */
4886 static bool
4887 convert_callers_for_node (struct cgraph_node *node,
4888 void *data)
4890 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4891 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4892 struct cgraph_edge *cs;
4894 for (cs = node->callers; cs; cs = cs->next_caller)
4896 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4898 if (dump_file)
4899 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
4900 xstrdup (cs->caller->name ()),
4901 cs->caller->order,
4902 xstrdup (cs->callee->name ()),
4903 cs->callee->order);
4905 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4907 pop_cfun ();
4910 for (cs = node->callers; cs; cs = cs->next_caller)
4911 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4912 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4913 compute_inline_parameters (cs->caller, true);
4914 BITMAP_FREE (recomputed_callers);
4916 return true;
4919 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4921 static void
4922 convert_callers (struct cgraph_node *node, tree old_decl,
4923 ipa_parm_adjustment_vec adjustments)
4925 basic_block this_block;
4927 node->call_for_symbol_and_aliases (convert_callers_for_node,
4928 &adjustments, false);
4930 if (!encountered_recursive_call)
4931 return;
4933 FOR_EACH_BB_FN (this_block, cfun)
4935 gimple_stmt_iterator gsi;
4937 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4939 gcall *stmt;
4940 tree call_fndecl;
4941 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
4942 if (!stmt)
4943 continue;
4944 call_fndecl = gimple_call_fndecl (stmt);
4945 if (call_fndecl == old_decl)
4947 if (dump_file)
4948 fprintf (dump_file, "Adjusting recursive call");
4949 gimple_call_set_fndecl (stmt, node->decl);
4950 ipa_modify_call_arguments (NULL, stmt, adjustments);
4955 return;
4958 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4959 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4961 static bool
4962 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4964 struct cgraph_node *new_node;
4965 bool cfg_changed;
4967 cgraph_edge::rebuild_edges ();
4968 free_dominance_info (CDI_DOMINATORS);
4969 pop_cfun ();
4971 /* This must be done after rebuilding cgraph edges for node above.
4972 Otherwise any recursive calls to node that are recorded in
4973 redirect_callers will be corrupted. */
4974 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
4975 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
4976 NULL, false, NULL, NULL,
4977 "isra");
4978 redirect_callers.release ();
4980 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4981 ipa_modify_formal_parameters (current_function_decl, adjustments);
4982 cfg_changed = ipa_sra_modify_function_body (adjustments);
4983 sra_ipa_reset_debug_stmts (adjustments);
4984 convert_callers (new_node, node->decl, adjustments);
4985 new_node->make_local ();
4986 return cfg_changed;
4989 /* Means of communication between ipa_sra_check_caller and
4990 ipa_sra_preliminary_function_checks. */
4992 struct ipa_sra_check_caller_data
4994 bool has_callers;
4995 bool bad_arg_alignment;
4996 bool has_thunk;
4999 /* If NODE has a caller, mark that fact in DATA which is pointer to
5000 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5001 calls if they are unit aligned and if not, set the appropriate flag in DATA
5002 too. */
5004 static bool
5005 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5007 if (!node->callers)
5008 return false;
5010 struct ipa_sra_check_caller_data *iscc;
5011 iscc = (struct ipa_sra_check_caller_data *) data;
5012 iscc->has_callers = true;
5014 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5016 if (cs->caller->thunk.thunk_p)
5018 iscc->has_thunk = true;
5019 return true;
5021 gimple call_stmt = cs->call_stmt;
5022 unsigned count = gimple_call_num_args (call_stmt);
5023 for (unsigned i = 0; i < count; i++)
5025 tree arg = gimple_call_arg (call_stmt, i);
5026 if (is_gimple_reg (arg))
5027 continue;
5029 tree offset;
5030 HOST_WIDE_INT bitsize, bitpos;
5031 machine_mode mode;
5032 int unsignedp, volatilep = 0;
5033 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5034 &unsignedp, &volatilep, false);
5035 if (bitpos % BITS_PER_UNIT)
5037 iscc->bad_arg_alignment = true;
5038 return true;
5043 return false;
5046 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5047 attributes, return true otherwise. NODE is the cgraph node of the current
5048 function. */
5050 static bool
5051 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5053 if (!node->can_be_local_p ())
5055 if (dump_file)
5056 fprintf (dump_file, "Function not local to this compilation unit.\n");
5057 return false;
5060 if (!node->local.can_change_signature)
5062 if (dump_file)
5063 fprintf (dump_file, "Function can not change signature.\n");
5064 return false;
5067 if (!tree_versionable_function_p (node->decl))
5069 if (dump_file)
5070 fprintf (dump_file, "Function is not versionable.\n");
5071 return false;
5074 if (!opt_for_fn (node->decl, optimize)
5075 || !opt_for_fn (node->decl, flag_ipa_sra))
5077 if (dump_file)
5078 fprintf (dump_file, "Function not optimized.\n");
5079 return false;
5082 if (DECL_VIRTUAL_P (current_function_decl))
5084 if (dump_file)
5085 fprintf (dump_file, "Function is a virtual method.\n");
5086 return false;
5089 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5090 && inline_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5092 if (dump_file)
5093 fprintf (dump_file, "Function too big to be made truly local.\n");
5094 return false;
5097 if (cfun->stdarg)
5099 if (dump_file)
5100 fprintf (dump_file, "Function uses stdarg. \n");
5101 return false;
5104 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5105 return false;
5107 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5109 if (dump_file)
5110 fprintf (dump_file, "Always inline function will be inlined "
5111 "anyway. \n");
5112 return false;
5115 struct ipa_sra_check_caller_data iscc;
5116 memset (&iscc, 0, sizeof(iscc));
5117 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5118 if (!iscc.has_callers)
5120 if (dump_file)
5121 fprintf (dump_file,
5122 "Function has no callers in this compilation unit.\n");
5123 return false;
5126 if (iscc.bad_arg_alignment)
5128 if (dump_file)
5129 fprintf (dump_file,
5130 "A function call has an argument with non-unit alignment.\n");
5131 return false;
5134 if (iscc.has_thunk)
5136 if (dump_file)
5137 fprintf (dump_file,
5138 "A has thunk.\n");
5139 return false;
5142 return true;
5145 /* Perform early interprocedural SRA. */
5147 static unsigned int
5148 ipa_early_sra (void)
5150 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5151 ipa_parm_adjustment_vec adjustments;
5152 int ret = 0;
5154 if (!ipa_sra_preliminary_function_checks (node))
5155 return 0;
5157 sra_initialize ();
5158 sra_mode = SRA_MODE_EARLY_IPA;
5160 if (!find_param_candidates ())
5162 if (dump_file)
5163 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5164 goto simple_out;
5167 if (node->call_for_symbol_and_aliases
5168 (some_callers_have_mismatched_arguments_p, NULL, true))
5170 if (dump_file)
5171 fprintf (dump_file, "There are callers with insufficient number of "
5172 "arguments or arguments with type mismatches.\n");
5173 goto simple_out;
5176 if (node->call_for_symbol_and_aliases
5177 (some_callers_have_no_vuse_p, NULL, true))
5179 if (dump_file)
5180 fprintf (dump_file, "There are callers with no VUSE attached "
5181 "to a call stmt.\n");
5182 goto simple_out;
5185 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5186 func_param_count
5187 * last_basic_block_for_fn (cfun));
5188 final_bbs = BITMAP_ALLOC (NULL);
5190 scan_function ();
5191 if (encountered_apply_args)
5193 if (dump_file)
5194 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5195 goto out;
5198 if (encountered_unchangable_recursive_call)
5200 if (dump_file)
5201 fprintf (dump_file, "Function calls itself with insufficient "
5202 "number of arguments.\n");
5203 goto out;
5206 adjustments = analyze_all_param_acesses ();
5207 if (!adjustments.exists ())
5208 goto out;
5209 if (dump_file)
5210 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5212 if (modify_function (node, adjustments))
5213 ret = TODO_update_ssa | TODO_cleanup_cfg;
5214 else
5215 ret = TODO_update_ssa;
5216 adjustments.release ();
5218 statistics_counter_event (cfun, "Unused parameters deleted",
5219 sra_stats.deleted_unused_parameters);
5220 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5221 sra_stats.scalar_by_ref_to_by_val);
5222 statistics_counter_event (cfun, "Aggregate parameters broken up",
5223 sra_stats.aggregate_params_reduced);
5224 statistics_counter_event (cfun, "Aggregate parameter components created",
5225 sra_stats.param_reductions_created);
5227 out:
5228 BITMAP_FREE (final_bbs);
5229 free (bb_dereferences);
5230 simple_out:
5231 sra_deinitialize ();
5232 return ret;
5235 namespace {
5237 const pass_data pass_data_early_ipa_sra =
5239 GIMPLE_PASS, /* type */
5240 "eipa_sra", /* name */
5241 OPTGROUP_NONE, /* optinfo_flags */
5242 TV_IPA_SRA, /* tv_id */
5243 0, /* properties_required */
5244 0, /* properties_provided */
5245 0, /* properties_destroyed */
5246 0, /* todo_flags_start */
5247 TODO_dump_symtab, /* todo_flags_finish */
5250 class pass_early_ipa_sra : public gimple_opt_pass
5252 public:
5253 pass_early_ipa_sra (gcc::context *ctxt)
5254 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5257 /* opt_pass methods: */
5258 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5259 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5261 }; // class pass_early_ipa_sra
5263 } // anon namespace
5265 gimple_opt_pass *
5266 make_pass_early_ipa_sra (gcc::context *ctxt)
5268 return new pass_early_ipa_sra (ctxt);