[Patch SRA] Fix PR66119 by calling get_move_ratio in SRA
[official-gcc.git] / gcc / tree-sra.c
blobe648061b2dc6de9390b198bcfcd159adbd652555
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2015 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "alias.h"
80 #include "symtab.h"
81 #include "tree.h"
82 #include "fold-const.h"
83 #include "predict.h"
84 #include "hard-reg-set.h"
85 #include "function.h"
86 #include "dominance.h"
87 #include "cfg.h"
88 #include "basic-block.h"
89 #include "tree-ssa-alias.h"
90 #include "internal-fn.h"
91 #include "tree-eh.h"
92 #include "gimple-expr.h"
93 #include "gimple.h"
94 #include "stor-layout.h"
95 #include "gimplify.h"
96 #include "gimple-iterator.h"
97 #include "gimplify-me.h"
98 #include "gimple-walk.h"
99 #include "bitmap.h"
100 #include "gimple-ssa.h"
101 #include "tree-cfg.h"
102 #include "tree-phinodes.h"
103 #include "ssa-iterators.h"
104 #include "stringpool.h"
105 #include "tree-ssanames.h"
106 #include "rtl.h"
107 #include "flags.h"
108 #include "insn-config.h"
109 #include "expmed.h"
110 #include "dojump.h"
111 #include "explow.h"
112 #include "calls.h"
113 #include "emit-rtl.h"
114 #include "varasm.h"
115 #include "stmt.h"
116 #include "expr.h"
117 #include "tree-dfa.h"
118 #include "tree-ssa.h"
119 #include "tree-pass.h"
120 #include "cgraph.h"
121 #include "symbol-summary.h"
122 #include "ipa-prop.h"
123 #include "params.h"
124 #include "target.h"
125 #include "dbgcnt.h"
126 #include "tree-inline.h"
127 #include "gimple-pretty-print.h"
128 #include "ipa-inline.h"
129 #include "ipa-utils.h"
130 #include "builtins.h"
132 /* Enumeration of all aggregate reductions we can do. */
133 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
134 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
135 SRA_MODE_INTRA }; /* late intraprocedural SRA */
137 /* Global variable describing which aggregate reduction we are performing at
138 the moment. */
139 static enum sra_mode sra_mode;
141 struct assign_link;
143 /* ACCESS represents each access to an aggregate variable (as a whole or a
144 part). It can also represent a group of accesses that refer to exactly the
145 same fragment of an aggregate (i.e. those that have exactly the same offset
146 and size). Such representatives for a single aggregate, once determined,
147 are linked in a linked list and have the group fields set.
149 Moreover, when doing intraprocedural SRA, a tree is built from those
150 representatives (by the means of first_child and next_sibling pointers), in
151 which all items in a subtree are "within" the root, i.e. their offset is
152 greater or equal to offset of the root and offset+size is smaller or equal
153 to offset+size of the root. Children of an access are sorted by offset.
155 Note that accesses to parts of vector and complex number types always
156 represented by an access to the whole complex number or a vector. It is a
157 duty of the modifying functions to replace them appropriately. */
159 struct access
161 /* Values returned by `get_ref_base_and_extent' for each component reference
162 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
163 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
164 HOST_WIDE_INT offset;
165 HOST_WIDE_INT size;
166 tree base;
168 /* Expression. It is context dependent so do not use it to create new
169 expressions to access the original aggregate. See PR 42154 for a
170 testcase. */
171 tree expr;
172 /* Type. */
173 tree type;
175 /* The statement this access belongs to. */
176 gimple stmt;
178 /* Next group representative for this aggregate. */
179 struct access *next_grp;
181 /* Pointer to the group representative. Pointer to itself if the struct is
182 the representative. */
183 struct access *group_representative;
185 /* If this access has any children (in terms of the definition above), this
186 points to the first one. */
187 struct access *first_child;
189 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
190 described above. In IPA-SRA this is a pointer to the next access
191 belonging to the same group (having the same representative). */
192 struct access *next_sibling;
194 /* Pointers to the first and last element in the linked list of assign
195 links. */
196 struct assign_link *first_link, *last_link;
198 /* Pointer to the next access in the work queue. */
199 struct access *next_queued;
201 /* Replacement variable for this access "region." Never to be accessed
202 directly, always only by the means of get_access_replacement() and only
203 when grp_to_be_replaced flag is set. */
204 tree replacement_decl;
206 /* Is this particular access write access? */
207 unsigned write : 1;
209 /* Is this access an access to a non-addressable field? */
210 unsigned non_addressable : 1;
212 /* Is this access currently in the work queue? */
213 unsigned grp_queued : 1;
215 /* Does this group contain a write access? This flag is propagated down the
216 access tree. */
217 unsigned grp_write : 1;
219 /* Does this group contain a read access? This flag is propagated down the
220 access tree. */
221 unsigned grp_read : 1;
223 /* Does this group contain a read access that comes from an assignment
224 statement? This flag is propagated down the access tree. */
225 unsigned grp_assignment_read : 1;
227 /* Does this group contain a write access that comes from an assignment
228 statement? This flag is propagated down the access tree. */
229 unsigned grp_assignment_write : 1;
231 /* Does this group contain a read access through a scalar type? This flag is
232 not propagated in the access tree in any direction. */
233 unsigned grp_scalar_read : 1;
235 /* Does this group contain a write access through a scalar type? This flag
236 is not propagated in the access tree in any direction. */
237 unsigned grp_scalar_write : 1;
239 /* Is this access an artificial one created to scalarize some record
240 entirely? */
241 unsigned grp_total_scalarization : 1;
243 /* Other passes of the analysis use this bit to make function
244 analyze_access_subtree create scalar replacements for this group if
245 possible. */
246 unsigned grp_hint : 1;
248 /* Is the subtree rooted in this access fully covered by scalar
249 replacements? */
250 unsigned grp_covered : 1;
252 /* If set to true, this access and all below it in an access tree must not be
253 scalarized. */
254 unsigned grp_unscalarizable_region : 1;
256 /* Whether data have been written to parts of the aggregate covered by this
257 access which is not to be scalarized. This flag is propagated up in the
258 access tree. */
259 unsigned grp_unscalarized_data : 1;
261 /* Does this access and/or group contain a write access through a
262 BIT_FIELD_REF? */
263 unsigned grp_partial_lhs : 1;
265 /* Set when a scalar replacement should be created for this variable. */
266 unsigned grp_to_be_replaced : 1;
268 /* Set when we want a replacement for the sole purpose of having it in
269 generated debug statements. */
270 unsigned grp_to_be_debug_replaced : 1;
272 /* Should TREE_NO_WARNING of a replacement be set? */
273 unsigned grp_no_warning : 1;
275 /* Is it possible that the group refers to data which might be (directly or
276 otherwise) modified? */
277 unsigned grp_maybe_modified : 1;
279 /* Set when this is a representative of a pointer to scalar (i.e. by
280 reference) parameter which we consider for turning into a plain scalar
281 (i.e. a by value parameter). */
282 unsigned grp_scalar_ptr : 1;
284 /* Set when we discover that this pointer is not safe to dereference in the
285 caller. */
286 unsigned grp_not_necessarilly_dereferenced : 1;
288 /* Pool allocation new operator. */
289 inline void *operator new (size_t)
291 return pool.allocate ();
294 /* Delete operator utilizing pool allocation. */
295 inline void operator delete (void *ptr)
297 pool.remove ((access *) ptr);
300 /* Memory allocation pool. */
301 static pool_allocator<access> pool;
304 typedef struct access *access_p;
307 /* Alloc pool for allocating access structures. */
308 pool_allocator<struct access> access::pool ("SRA accesses", 16);
310 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
311 are used to propagate subaccesses from rhs to lhs as long as they don't
312 conflict with what is already there. */
313 struct assign_link
315 struct access *lacc, *racc;
316 struct assign_link *next;
318 /* Pool allocation new operator. */
319 inline void *operator new (size_t)
321 return pool.allocate ();
324 /* Delete operator utilizing pool allocation. */
325 inline void operator delete (void *ptr)
327 pool.remove ((assign_link *) ptr);
330 /* Memory allocation pool. */
331 static pool_allocator<assign_link> pool;
334 /* Alloc pool for allocating assign link structures. */
335 pool_allocator<assign_link> assign_link::pool ("SRA links", 16);
337 /* Base (tree) -> Vector (vec<access_p> *) map. */
338 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
340 /* Candidate hash table helpers. */
342 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
344 static inline hashval_t hash (const tree_node *);
345 static inline bool equal (const tree_node *, const tree_node *);
348 /* Hash a tree in a uid_decl_map. */
350 inline hashval_t
351 uid_decl_hasher::hash (const tree_node *item)
353 return item->decl_minimal.uid;
356 /* Return true if the DECL_UID in both trees are equal. */
358 inline bool
359 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
361 return (a->decl_minimal.uid == b->decl_minimal.uid);
364 /* Set of candidates. */
365 static bitmap candidate_bitmap;
366 static hash_table<uid_decl_hasher> *candidates;
368 /* For a candidate UID return the candidates decl. */
370 static inline tree
371 candidate (unsigned uid)
373 tree_node t;
374 t.decl_minimal.uid = uid;
375 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
378 /* Bitmap of candidates which we should try to entirely scalarize away and
379 those which cannot be (because they are and need be used as a whole). */
380 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
382 /* Obstack for creation of fancy names. */
383 static struct obstack name_obstack;
385 /* Head of a linked list of accesses that need to have its subaccesses
386 propagated to their assignment counterparts. */
387 static struct access *work_queue_head;
389 /* Number of parameters of the analyzed function when doing early ipa SRA. */
390 static int func_param_count;
392 /* scan_function sets the following to true if it encounters a call to
393 __builtin_apply_args. */
394 static bool encountered_apply_args;
396 /* Set by scan_function when it finds a recursive call. */
397 static bool encountered_recursive_call;
399 /* Set by scan_function when it finds a recursive call with less actual
400 arguments than formal parameters.. */
401 static bool encountered_unchangable_recursive_call;
403 /* This is a table in which for each basic block and parameter there is a
404 distance (offset + size) in that parameter which is dereferenced and
405 accessed in that BB. */
406 static HOST_WIDE_INT *bb_dereferences;
407 /* Bitmap of BBs that can cause the function to "stop" progressing by
408 returning, throwing externally, looping infinitely or calling a function
409 which might abort etc.. */
410 static bitmap final_bbs;
412 /* Representative of no accesses at all. */
413 static struct access no_accesses_representant;
415 /* Predicate to test the special value. */
417 static inline bool
418 no_accesses_p (struct access *access)
420 return access == &no_accesses_representant;
423 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
424 representative fields are dumped, otherwise those which only describe the
425 individual access are. */
427 static struct
429 /* Number of processed aggregates is readily available in
430 analyze_all_variable_accesses and so is not stored here. */
432 /* Number of created scalar replacements. */
433 int replacements;
435 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
436 expression. */
437 int exprs;
439 /* Number of statements created by generate_subtree_copies. */
440 int subtree_copies;
442 /* Number of statements created by load_assign_lhs_subreplacements. */
443 int subreplacements;
445 /* Number of times sra_modify_assign has deleted a statement. */
446 int deleted;
448 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
449 RHS reparately due to type conversions or nonexistent matching
450 references. */
451 int separate_lhs_rhs_handling;
453 /* Number of parameters that were removed because they were unused. */
454 int deleted_unused_parameters;
456 /* Number of scalars passed as parameters by reference that have been
457 converted to be passed by value. */
458 int scalar_by_ref_to_by_val;
460 /* Number of aggregate parameters that were replaced by one or more of their
461 components. */
462 int aggregate_params_reduced;
464 /* Numbber of components created when splitting aggregate parameters. */
465 int param_reductions_created;
466 } sra_stats;
468 static void
469 dump_access (FILE *f, struct access *access, bool grp)
471 fprintf (f, "access { ");
472 fprintf (f, "base = (%d)'", DECL_UID (access->base));
473 print_generic_expr (f, access->base, 0);
474 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
475 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
476 fprintf (f, ", expr = ");
477 print_generic_expr (f, access->expr, 0);
478 fprintf (f, ", type = ");
479 print_generic_expr (f, access->type, 0);
480 if (grp)
481 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
482 "grp_assignment_write = %d, grp_scalar_read = %d, "
483 "grp_scalar_write = %d, grp_total_scalarization = %d, "
484 "grp_hint = %d, grp_covered = %d, "
485 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
486 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
487 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
488 "grp_not_necessarilly_dereferenced = %d\n",
489 access->grp_read, access->grp_write, access->grp_assignment_read,
490 access->grp_assignment_write, access->grp_scalar_read,
491 access->grp_scalar_write, access->grp_total_scalarization,
492 access->grp_hint, access->grp_covered,
493 access->grp_unscalarizable_region, access->grp_unscalarized_data,
494 access->grp_partial_lhs, access->grp_to_be_replaced,
495 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
496 access->grp_not_necessarilly_dereferenced);
497 else
498 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
499 "grp_partial_lhs = %d\n",
500 access->write, access->grp_total_scalarization,
501 access->grp_partial_lhs);
504 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
506 static void
507 dump_access_tree_1 (FILE *f, struct access *access, int level)
511 int i;
513 for (i = 0; i < level; i++)
514 fputs ("* ", dump_file);
516 dump_access (f, access, true);
518 if (access->first_child)
519 dump_access_tree_1 (f, access->first_child, level + 1);
521 access = access->next_sibling;
523 while (access);
526 /* Dump all access trees for a variable, given the pointer to the first root in
527 ACCESS. */
529 static void
530 dump_access_tree (FILE *f, struct access *access)
532 for (; access; access = access->next_grp)
533 dump_access_tree_1 (f, access, 0);
536 /* Return true iff ACC is non-NULL and has subaccesses. */
538 static inline bool
539 access_has_children_p (struct access *acc)
541 return acc && acc->first_child;
544 /* Return true iff ACC is (partly) covered by at least one replacement. */
546 static bool
547 access_has_replacements_p (struct access *acc)
549 struct access *child;
550 if (acc->grp_to_be_replaced)
551 return true;
552 for (child = acc->first_child; child; child = child->next_sibling)
553 if (access_has_replacements_p (child))
554 return true;
555 return false;
558 /* Return a vector of pointers to accesses for the variable given in BASE or
559 NULL if there is none. */
561 static vec<access_p> *
562 get_base_access_vector (tree base)
564 return base_access_vec->get (base);
567 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
568 in ACCESS. Return NULL if it cannot be found. */
570 static struct access *
571 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
572 HOST_WIDE_INT size)
574 while (access && (access->offset != offset || access->size != size))
576 struct access *child = access->first_child;
578 while (child && (child->offset + child->size <= offset))
579 child = child->next_sibling;
580 access = child;
583 return access;
586 /* Return the first group representative for DECL or NULL if none exists. */
588 static struct access *
589 get_first_repr_for_decl (tree base)
591 vec<access_p> *access_vec;
593 access_vec = get_base_access_vector (base);
594 if (!access_vec)
595 return NULL;
597 return (*access_vec)[0];
600 /* Find an access representative for the variable BASE and given OFFSET and
601 SIZE. Requires that access trees have already been built. Return NULL if
602 it cannot be found. */
604 static struct access *
605 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
606 HOST_WIDE_INT size)
608 struct access *access;
610 access = get_first_repr_for_decl (base);
611 while (access && (access->offset + access->size <= offset))
612 access = access->next_grp;
613 if (!access)
614 return NULL;
616 return find_access_in_subtree (access, offset, size);
619 /* Add LINK to the linked list of assign links of RACC. */
620 static void
621 add_link_to_rhs (struct access *racc, struct assign_link *link)
623 gcc_assert (link->racc == racc);
625 if (!racc->first_link)
627 gcc_assert (!racc->last_link);
628 racc->first_link = link;
630 else
631 racc->last_link->next = link;
633 racc->last_link = link;
634 link->next = NULL;
637 /* Move all link structures in their linked list in OLD_RACC to the linked list
638 in NEW_RACC. */
639 static void
640 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
642 if (!old_racc->first_link)
644 gcc_assert (!old_racc->last_link);
645 return;
648 if (new_racc->first_link)
650 gcc_assert (!new_racc->last_link->next);
651 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
653 new_racc->last_link->next = old_racc->first_link;
654 new_racc->last_link = old_racc->last_link;
656 else
658 gcc_assert (!new_racc->last_link);
660 new_racc->first_link = old_racc->first_link;
661 new_racc->last_link = old_racc->last_link;
663 old_racc->first_link = old_racc->last_link = NULL;
666 /* Add ACCESS to the work queue (which is actually a stack). */
668 static void
669 add_access_to_work_queue (struct access *access)
671 if (!access->grp_queued)
673 gcc_assert (!access->next_queued);
674 access->next_queued = work_queue_head;
675 access->grp_queued = 1;
676 work_queue_head = access;
680 /* Pop an access from the work queue, and return it, assuming there is one. */
682 static struct access *
683 pop_access_from_work_queue (void)
685 struct access *access = work_queue_head;
687 work_queue_head = access->next_queued;
688 access->next_queued = NULL;
689 access->grp_queued = 0;
690 return access;
694 /* Allocate necessary structures. */
696 static void
697 sra_initialize (void)
699 candidate_bitmap = BITMAP_ALLOC (NULL);
700 candidates = new hash_table<uid_decl_hasher>
701 (vec_safe_length (cfun->local_decls) / 2);
702 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
703 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
704 gcc_obstack_init (&name_obstack);
705 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
706 memset (&sra_stats, 0, sizeof (sra_stats));
707 encountered_apply_args = false;
708 encountered_recursive_call = false;
709 encountered_unchangable_recursive_call = false;
712 /* Deallocate all general structures. */
714 static void
715 sra_deinitialize (void)
717 BITMAP_FREE (candidate_bitmap);
718 delete candidates;
719 candidates = NULL;
720 BITMAP_FREE (should_scalarize_away_bitmap);
721 BITMAP_FREE (cannot_scalarize_away_bitmap);
722 access::pool.release ();
723 assign_link::pool.release ();
724 obstack_free (&name_obstack, NULL);
726 delete base_access_vec;
729 /* Remove DECL from candidates for SRA and write REASON to the dump file if
730 there is one. */
731 static void
732 disqualify_candidate (tree decl, const char *reason)
734 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
735 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
737 if (dump_file && (dump_flags & TDF_DETAILS))
739 fprintf (dump_file, "! Disqualifying ");
740 print_generic_expr (dump_file, decl, 0);
741 fprintf (dump_file, " - %s\n", reason);
745 /* Return true iff the type contains a field or an element which does not allow
746 scalarization. */
748 static bool
749 type_internals_preclude_sra_p (tree type, const char **msg)
751 tree fld;
752 tree et;
754 switch (TREE_CODE (type))
756 case RECORD_TYPE:
757 case UNION_TYPE:
758 case QUAL_UNION_TYPE:
759 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
760 if (TREE_CODE (fld) == FIELD_DECL)
762 tree ft = TREE_TYPE (fld);
764 if (TREE_THIS_VOLATILE (fld))
766 *msg = "volatile structure field";
767 return true;
769 if (!DECL_FIELD_OFFSET (fld))
771 *msg = "no structure field offset";
772 return true;
774 if (!DECL_SIZE (fld))
776 *msg = "zero structure field size";
777 return true;
779 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
781 *msg = "structure field offset not fixed";
782 return true;
784 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
786 *msg = "structure field size not fixed";
787 return true;
789 if (!tree_fits_shwi_p (bit_position (fld)))
791 *msg = "structure field size too big";
792 return true;
794 if (AGGREGATE_TYPE_P (ft)
795 && int_bit_position (fld) % BITS_PER_UNIT != 0)
797 *msg = "structure field is bit field";
798 return true;
801 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
802 return true;
805 return false;
807 case ARRAY_TYPE:
808 et = TREE_TYPE (type);
810 if (TYPE_VOLATILE (et))
812 *msg = "element type is volatile";
813 return true;
816 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
817 return true;
819 return false;
821 default:
822 return false;
826 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
827 base variable if it is. Return T if it is not an SSA_NAME. */
829 static tree
830 get_ssa_base_param (tree t)
832 if (TREE_CODE (t) == SSA_NAME)
834 if (SSA_NAME_IS_DEFAULT_DEF (t))
835 return SSA_NAME_VAR (t);
836 else
837 return NULL_TREE;
839 return t;
842 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
843 belongs to, unless the BB has already been marked as a potentially
844 final. */
846 static void
847 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
849 basic_block bb = gimple_bb (stmt);
850 int idx, parm_index = 0;
851 tree parm;
853 if (bitmap_bit_p (final_bbs, bb->index))
854 return;
856 for (parm = DECL_ARGUMENTS (current_function_decl);
857 parm && parm != base;
858 parm = DECL_CHAIN (parm))
859 parm_index++;
861 gcc_assert (parm_index < func_param_count);
863 idx = bb->index * func_param_count + parm_index;
864 if (bb_dereferences[idx] < dist)
865 bb_dereferences[idx] = dist;
868 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
869 the three fields. Also add it to the vector of accesses corresponding to
870 the base. Finally, return the new access. */
872 static struct access *
873 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
875 struct access *access = new struct access ();
877 memset (access, 0, sizeof (struct access));
878 access->base = base;
879 access->offset = offset;
880 access->size = size;
882 base_access_vec->get_or_insert (base).safe_push (access);
884 return access;
887 /* Create and insert access for EXPR. Return created access, or NULL if it is
888 not possible. */
890 static struct access *
891 create_access (tree expr, gimple stmt, bool write)
893 struct access *access;
894 HOST_WIDE_INT offset, size, max_size;
895 tree base = expr;
896 bool ptr, unscalarizable_region = false;
898 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
900 if (sra_mode == SRA_MODE_EARLY_IPA
901 && TREE_CODE (base) == MEM_REF)
903 base = get_ssa_base_param (TREE_OPERAND (base, 0));
904 if (!base)
905 return NULL;
906 ptr = true;
908 else
909 ptr = false;
911 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
912 return NULL;
914 if (sra_mode == SRA_MODE_EARLY_IPA)
916 if (size < 0 || size != max_size)
918 disqualify_candidate (base, "Encountered a variable sized access.");
919 return NULL;
921 if (TREE_CODE (expr) == COMPONENT_REF
922 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
924 disqualify_candidate (base, "Encountered a bit-field access.");
925 return NULL;
927 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
929 if (ptr)
930 mark_parm_dereference (base, offset + size, stmt);
932 else
934 if (size != max_size)
936 size = max_size;
937 unscalarizable_region = true;
939 if (size < 0)
941 disqualify_candidate (base, "Encountered an unconstrained access.");
942 return NULL;
946 access = create_access_1 (base, offset, size);
947 access->expr = expr;
948 access->type = TREE_TYPE (expr);
949 access->write = write;
950 access->grp_unscalarizable_region = unscalarizable_region;
951 access->stmt = stmt;
953 if (TREE_CODE (expr) == COMPONENT_REF
954 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
955 access->non_addressable = 1;
957 return access;
961 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
962 register types or (recursively) records with only these two kinds of fields.
963 It also returns false if any of these records contains a bit-field. */
965 static bool
966 type_consists_of_records_p (tree type)
968 tree fld;
970 if (TREE_CODE (type) != RECORD_TYPE)
971 return false;
973 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
974 if (TREE_CODE (fld) == FIELD_DECL)
976 tree ft = TREE_TYPE (fld);
978 if (DECL_BIT_FIELD (fld))
979 return false;
981 if (!is_gimple_reg_type (ft)
982 && !type_consists_of_records_p (ft))
983 return false;
986 return true;
989 /* Create total_scalarization accesses for all scalar type fields in DECL that
990 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
991 must be the top-most VAR_DECL representing the variable, OFFSET must be the
992 offset of DECL within BASE. REF must be the memory reference expression for
993 the given decl. */
995 static void
996 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
997 tree ref)
999 tree fld, decl_type = TREE_TYPE (decl);
1001 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1002 if (TREE_CODE (fld) == FIELD_DECL)
1004 HOST_WIDE_INT pos = offset + int_bit_position (fld);
1005 tree ft = TREE_TYPE (fld);
1006 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
1007 NULL_TREE);
1009 if (is_gimple_reg_type (ft))
1011 struct access *access;
1012 HOST_WIDE_INT size;
1014 size = tree_to_uhwi (DECL_SIZE (fld));
1015 access = create_access_1 (base, pos, size);
1016 access->expr = nref;
1017 access->type = ft;
1018 access->grp_total_scalarization = 1;
1019 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1021 else
1022 completely_scalarize_record (base, fld, pos, nref);
1026 /* Create total_scalarization accesses for all scalar type fields in VAR and
1027 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
1028 type_consists_of_records_p. */
1030 static void
1031 completely_scalarize_var (tree var)
1033 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1034 struct access *access;
1036 access = create_access_1 (var, 0, size);
1037 access->expr = var;
1038 access->type = TREE_TYPE (var);
1039 access->grp_total_scalarization = 1;
1041 completely_scalarize_record (var, var, 0, var);
1044 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1046 static inline bool
1047 contains_view_convert_expr_p (const_tree ref)
1049 while (handled_component_p (ref))
1051 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1052 return true;
1053 ref = TREE_OPERAND (ref, 0);
1056 return false;
1059 /* Search the given tree for a declaration by skipping handled components and
1060 exclude it from the candidates. */
1062 static void
1063 disqualify_base_of_expr (tree t, const char *reason)
1065 t = get_base_address (t);
1066 if (sra_mode == SRA_MODE_EARLY_IPA
1067 && TREE_CODE (t) == MEM_REF)
1068 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1070 if (t && DECL_P (t))
1071 disqualify_candidate (t, reason);
1074 /* Scan expression EXPR and create access structures for all accesses to
1075 candidates for scalarization. Return the created access or NULL if none is
1076 created. */
1078 static struct access *
1079 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1081 struct access *ret = NULL;
1082 bool partial_ref;
1084 if (TREE_CODE (expr) == BIT_FIELD_REF
1085 || TREE_CODE (expr) == IMAGPART_EXPR
1086 || TREE_CODE (expr) == REALPART_EXPR)
1088 expr = TREE_OPERAND (expr, 0);
1089 partial_ref = true;
1091 else
1092 partial_ref = false;
1094 /* We need to dive through V_C_Es in order to get the size of its parameter
1095 and not the result type. Ada produces such statements. We are also
1096 capable of handling the topmost V_C_E but not any of those buried in other
1097 handled components. */
1098 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1099 expr = TREE_OPERAND (expr, 0);
1101 if (contains_view_convert_expr_p (expr))
1103 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1104 "component.");
1105 return NULL;
1107 if (TREE_THIS_VOLATILE (expr))
1109 disqualify_base_of_expr (expr, "part of a volatile reference.");
1110 return NULL;
1113 switch (TREE_CODE (expr))
1115 case MEM_REF:
1116 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1117 && sra_mode != SRA_MODE_EARLY_IPA)
1118 return NULL;
1119 /* fall through */
1120 case VAR_DECL:
1121 case PARM_DECL:
1122 case RESULT_DECL:
1123 case COMPONENT_REF:
1124 case ARRAY_REF:
1125 case ARRAY_RANGE_REF:
1126 ret = create_access (expr, stmt, write);
1127 break;
1129 default:
1130 break;
1133 if (write && partial_ref && ret)
1134 ret->grp_partial_lhs = 1;
1136 return ret;
1139 /* Scan expression EXPR and create access structures for all accesses to
1140 candidates for scalarization. Return true if any access has been inserted.
1141 STMT must be the statement from which the expression is taken, WRITE must be
1142 true if the expression is a store and false otherwise. */
1144 static bool
1145 build_access_from_expr (tree expr, gimple stmt, bool write)
1147 struct access *access;
1149 access = build_access_from_expr_1 (expr, stmt, write);
1150 if (access)
1152 /* This means the aggregate is accesses as a whole in a way other than an
1153 assign statement and thus cannot be removed even if we had a scalar
1154 replacement for everything. */
1155 if (cannot_scalarize_away_bitmap)
1156 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1157 return true;
1159 return false;
1162 /* Return the single non-EH successor edge of BB or NULL if there is none or
1163 more than one. */
1165 static edge
1166 single_non_eh_succ (basic_block bb)
1168 edge e, res = NULL;
1169 edge_iterator ei;
1171 FOR_EACH_EDGE (e, ei, bb->succs)
1172 if (!(e->flags & EDGE_EH))
1174 if (res)
1175 return NULL;
1176 res = e;
1179 return res;
1182 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1183 there is no alternative spot where to put statements SRA might need to
1184 generate after it. The spot we are looking for is an edge leading to a
1185 single non-EH successor, if it exists and is indeed single. RHS may be
1186 NULL, in that case ignore it. */
1188 static bool
1189 disqualify_if_bad_bb_terminating_stmt (gimple stmt, tree lhs, tree rhs)
1191 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1192 && stmt_ends_bb_p (stmt))
1194 if (single_non_eh_succ (gimple_bb (stmt)))
1195 return false;
1197 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1198 if (rhs)
1199 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1200 return true;
1202 return false;
1205 /* Scan expressions occurring in STMT, create access structures for all accesses
1206 to candidates for scalarization and remove those candidates which occur in
1207 statements or expressions that prevent them from being split apart. Return
1208 true if any access has been inserted. */
1210 static bool
1211 build_accesses_from_assign (gimple stmt)
1213 tree lhs, rhs;
1214 struct access *lacc, *racc;
1216 if (!gimple_assign_single_p (stmt)
1217 /* Scope clobbers don't influence scalarization. */
1218 || gimple_clobber_p (stmt))
1219 return false;
1221 lhs = gimple_assign_lhs (stmt);
1222 rhs = gimple_assign_rhs1 (stmt);
1224 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1225 return false;
1227 racc = build_access_from_expr_1 (rhs, stmt, false);
1228 lacc = build_access_from_expr_1 (lhs, stmt, true);
1230 if (lacc)
1231 lacc->grp_assignment_write = 1;
1233 if (racc)
1235 racc->grp_assignment_read = 1;
1236 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1237 && !is_gimple_reg_type (racc->type))
1238 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1241 if (lacc && racc
1242 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1243 && !lacc->grp_unscalarizable_region
1244 && !racc->grp_unscalarizable_region
1245 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1246 && lacc->size == racc->size
1247 && useless_type_conversion_p (lacc->type, racc->type))
1249 struct assign_link *link;
1251 link = new assign_link;
1252 memset (link, 0, sizeof (struct assign_link));
1254 link->lacc = lacc;
1255 link->racc = racc;
1257 add_link_to_rhs (racc, link);
1260 return lacc || racc;
1263 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1264 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1266 static bool
1267 asm_visit_addr (gimple, tree op, tree, void *)
1269 op = get_base_address (op);
1270 if (op
1271 && DECL_P (op))
1272 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1274 return false;
1277 /* Return true iff callsite CALL has at least as many actual arguments as there
1278 are formal parameters of the function currently processed by IPA-SRA and
1279 that their types match. */
1281 static inline bool
1282 callsite_arguments_match_p (gimple call)
1284 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1285 return false;
1287 tree parm;
1288 int i;
1289 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1290 parm;
1291 parm = DECL_CHAIN (parm), i++)
1293 tree arg = gimple_call_arg (call, i);
1294 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1295 return false;
1297 return true;
1300 /* Scan function and look for interesting expressions and create access
1301 structures for them. Return true iff any access is created. */
1303 static bool
1304 scan_function (void)
1306 basic_block bb;
1307 bool ret = false;
1309 FOR_EACH_BB_FN (bb, cfun)
1311 gimple_stmt_iterator gsi;
1312 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1314 gimple stmt = gsi_stmt (gsi);
1315 tree t;
1316 unsigned i;
1318 if (final_bbs && stmt_can_throw_external (stmt))
1319 bitmap_set_bit (final_bbs, bb->index);
1320 switch (gimple_code (stmt))
1322 case GIMPLE_RETURN:
1323 t = gimple_return_retval (as_a <greturn *> (stmt));
1324 if (t != NULL_TREE)
1325 ret |= build_access_from_expr (t, stmt, false);
1326 if (final_bbs)
1327 bitmap_set_bit (final_bbs, bb->index);
1328 break;
1330 case GIMPLE_ASSIGN:
1331 ret |= build_accesses_from_assign (stmt);
1332 break;
1334 case GIMPLE_CALL:
1335 for (i = 0; i < gimple_call_num_args (stmt); i++)
1336 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1337 stmt, false);
1339 if (sra_mode == SRA_MODE_EARLY_IPA)
1341 tree dest = gimple_call_fndecl (stmt);
1342 int flags = gimple_call_flags (stmt);
1344 if (dest)
1346 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1347 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1348 encountered_apply_args = true;
1349 if (recursive_call_p (current_function_decl, dest))
1351 encountered_recursive_call = true;
1352 if (!callsite_arguments_match_p (stmt))
1353 encountered_unchangable_recursive_call = true;
1357 if (final_bbs
1358 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1359 bitmap_set_bit (final_bbs, bb->index);
1362 t = gimple_call_lhs (stmt);
1363 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1364 ret |= build_access_from_expr (t, stmt, true);
1365 break;
1367 case GIMPLE_ASM:
1369 gasm *asm_stmt = as_a <gasm *> (stmt);
1370 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1371 asm_visit_addr);
1372 if (final_bbs)
1373 bitmap_set_bit (final_bbs, bb->index);
1375 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1377 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1378 ret |= build_access_from_expr (t, asm_stmt, false);
1380 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1382 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1383 ret |= build_access_from_expr (t, asm_stmt, true);
1386 break;
1388 default:
1389 break;
1394 return ret;
1397 /* Helper of QSORT function. There are pointers to accesses in the array. An
1398 access is considered smaller than another if it has smaller offset or if the
1399 offsets are the same but is size is bigger. */
1401 static int
1402 compare_access_positions (const void *a, const void *b)
1404 const access_p *fp1 = (const access_p *) a;
1405 const access_p *fp2 = (const access_p *) b;
1406 const access_p f1 = *fp1;
1407 const access_p f2 = *fp2;
1409 if (f1->offset != f2->offset)
1410 return f1->offset < f2->offset ? -1 : 1;
1412 if (f1->size == f2->size)
1414 if (f1->type == f2->type)
1415 return 0;
1416 /* Put any non-aggregate type before any aggregate type. */
1417 else if (!is_gimple_reg_type (f1->type)
1418 && is_gimple_reg_type (f2->type))
1419 return 1;
1420 else if (is_gimple_reg_type (f1->type)
1421 && !is_gimple_reg_type (f2->type))
1422 return -1;
1423 /* Put any complex or vector type before any other scalar type. */
1424 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1425 && TREE_CODE (f1->type) != VECTOR_TYPE
1426 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1427 || TREE_CODE (f2->type) == VECTOR_TYPE))
1428 return 1;
1429 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1430 || TREE_CODE (f1->type) == VECTOR_TYPE)
1431 && TREE_CODE (f2->type) != COMPLEX_TYPE
1432 && TREE_CODE (f2->type) != VECTOR_TYPE)
1433 return -1;
1434 /* Put the integral type with the bigger precision first. */
1435 else if (INTEGRAL_TYPE_P (f1->type)
1436 && INTEGRAL_TYPE_P (f2->type))
1437 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1438 /* Put any integral type with non-full precision last. */
1439 else if (INTEGRAL_TYPE_P (f1->type)
1440 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1441 != TYPE_PRECISION (f1->type)))
1442 return 1;
1443 else if (INTEGRAL_TYPE_P (f2->type)
1444 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1445 != TYPE_PRECISION (f2->type)))
1446 return -1;
1447 /* Stabilize the sort. */
1448 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1451 /* We want the bigger accesses first, thus the opposite operator in the next
1452 line: */
1453 return f1->size > f2->size ? -1 : 1;
1457 /* Append a name of the declaration to the name obstack. A helper function for
1458 make_fancy_name. */
1460 static void
1461 make_fancy_decl_name (tree decl)
1463 char buffer[32];
1465 tree name = DECL_NAME (decl);
1466 if (name)
1467 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1468 IDENTIFIER_LENGTH (name));
1469 else
1471 sprintf (buffer, "D%u", DECL_UID (decl));
1472 obstack_grow (&name_obstack, buffer, strlen (buffer));
1476 /* Helper for make_fancy_name. */
1478 static void
1479 make_fancy_name_1 (tree expr)
1481 char buffer[32];
1482 tree index;
1484 if (DECL_P (expr))
1486 make_fancy_decl_name (expr);
1487 return;
1490 switch (TREE_CODE (expr))
1492 case COMPONENT_REF:
1493 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1494 obstack_1grow (&name_obstack, '$');
1495 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1496 break;
1498 case ARRAY_REF:
1499 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1500 obstack_1grow (&name_obstack, '$');
1501 /* Arrays with only one element may not have a constant as their
1502 index. */
1503 index = TREE_OPERAND (expr, 1);
1504 if (TREE_CODE (index) != INTEGER_CST)
1505 break;
1506 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1507 obstack_grow (&name_obstack, buffer, strlen (buffer));
1508 break;
1510 case ADDR_EXPR:
1511 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1512 break;
1514 case MEM_REF:
1515 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1516 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1518 obstack_1grow (&name_obstack, '$');
1519 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1520 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1521 obstack_grow (&name_obstack, buffer, strlen (buffer));
1523 break;
1525 case BIT_FIELD_REF:
1526 case REALPART_EXPR:
1527 case IMAGPART_EXPR:
1528 gcc_unreachable (); /* we treat these as scalars. */
1529 break;
1530 default:
1531 break;
1535 /* Create a human readable name for replacement variable of ACCESS. */
1537 static char *
1538 make_fancy_name (tree expr)
1540 make_fancy_name_1 (expr);
1541 obstack_1grow (&name_obstack, '\0');
1542 return XOBFINISH (&name_obstack, char *);
1545 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1546 EXP_TYPE at the given OFFSET. If BASE is something for which
1547 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1548 to insert new statements either before or below the current one as specified
1549 by INSERT_AFTER. This function is not capable of handling bitfields.
1551 BASE must be either a declaration or a memory reference that has correct
1552 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1554 tree
1555 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1556 tree exp_type, gimple_stmt_iterator *gsi,
1557 bool insert_after)
1559 tree prev_base = base;
1560 tree off;
1561 tree mem_ref;
1562 HOST_WIDE_INT base_offset;
1563 unsigned HOST_WIDE_INT misalign;
1564 unsigned int align;
1566 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1567 get_object_alignment_1 (base, &align, &misalign);
1568 base = get_addr_base_and_unit_offset (base, &base_offset);
1570 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1571 offset such as array[var_index]. */
1572 if (!base)
1574 gassign *stmt;
1575 tree tmp, addr;
1577 gcc_checking_assert (gsi);
1578 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1579 addr = build_fold_addr_expr (unshare_expr (prev_base));
1580 STRIP_USELESS_TYPE_CONVERSION (addr);
1581 stmt = gimple_build_assign (tmp, addr);
1582 gimple_set_location (stmt, loc);
1583 if (insert_after)
1584 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1585 else
1586 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1588 off = build_int_cst (reference_alias_ptr_type (prev_base),
1589 offset / BITS_PER_UNIT);
1590 base = tmp;
1592 else if (TREE_CODE (base) == MEM_REF)
1594 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1595 base_offset + offset / BITS_PER_UNIT);
1596 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1597 base = unshare_expr (TREE_OPERAND (base, 0));
1599 else
1601 off = build_int_cst (reference_alias_ptr_type (base),
1602 base_offset + offset / BITS_PER_UNIT);
1603 base = build_fold_addr_expr (unshare_expr (base));
1606 misalign = (misalign + offset) & (align - 1);
1607 if (misalign != 0)
1608 align = (misalign & -misalign);
1609 if (align != TYPE_ALIGN (exp_type))
1610 exp_type = build_aligned_type (exp_type, align);
1612 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1613 if (TREE_THIS_VOLATILE (prev_base))
1614 TREE_THIS_VOLATILE (mem_ref) = 1;
1615 if (TREE_SIDE_EFFECTS (prev_base))
1616 TREE_SIDE_EFFECTS (mem_ref) = 1;
1617 return mem_ref;
1620 /* Construct a memory reference to a part of an aggregate BASE at the given
1621 OFFSET and of the same type as MODEL. In case this is a reference to a
1622 bit-field, the function will replicate the last component_ref of model's
1623 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1624 build_ref_for_offset. */
1626 static tree
1627 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1628 struct access *model, gimple_stmt_iterator *gsi,
1629 bool insert_after)
1631 if (TREE_CODE (model->expr) == COMPONENT_REF
1632 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1634 /* This access represents a bit-field. */
1635 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1637 offset -= int_bit_position (fld);
1638 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1639 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1640 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1641 NULL_TREE);
1643 else
1644 return build_ref_for_offset (loc, base, offset, model->type,
1645 gsi, insert_after);
1648 /* Attempt to build a memory reference that we could but into a gimple
1649 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1650 create statements and return s NULL instead. This function also ignores
1651 alignment issues and so its results should never end up in non-debug
1652 statements. */
1654 static tree
1655 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1656 struct access *model)
1658 HOST_WIDE_INT base_offset;
1659 tree off;
1661 if (TREE_CODE (model->expr) == COMPONENT_REF
1662 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1663 return NULL_TREE;
1665 base = get_addr_base_and_unit_offset (base, &base_offset);
1666 if (!base)
1667 return NULL_TREE;
1668 if (TREE_CODE (base) == MEM_REF)
1670 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1671 base_offset + offset / BITS_PER_UNIT);
1672 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1673 base = unshare_expr (TREE_OPERAND (base, 0));
1675 else
1677 off = build_int_cst (reference_alias_ptr_type (base),
1678 base_offset + offset / BITS_PER_UNIT);
1679 base = build_fold_addr_expr (unshare_expr (base));
1682 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1685 /* Construct a memory reference consisting of component_refs and array_refs to
1686 a part of an aggregate *RES (which is of type TYPE). The requested part
1687 should have type EXP_TYPE at be the given OFFSET. This function might not
1688 succeed, it returns true when it does and only then *RES points to something
1689 meaningful. This function should be used only to build expressions that we
1690 might need to present to user (e.g. in warnings). In all other situations,
1691 build_ref_for_model or build_ref_for_offset should be used instead. */
1693 static bool
1694 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1695 tree exp_type)
1697 while (1)
1699 tree fld;
1700 tree tr_size, index, minidx;
1701 HOST_WIDE_INT el_size;
1703 if (offset == 0 && exp_type
1704 && types_compatible_p (exp_type, type))
1705 return true;
1707 switch (TREE_CODE (type))
1709 case UNION_TYPE:
1710 case QUAL_UNION_TYPE:
1711 case RECORD_TYPE:
1712 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1714 HOST_WIDE_INT pos, size;
1715 tree tr_pos, expr, *expr_ptr;
1717 if (TREE_CODE (fld) != FIELD_DECL)
1718 continue;
1720 tr_pos = bit_position (fld);
1721 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1722 continue;
1723 pos = tree_to_uhwi (tr_pos);
1724 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1725 tr_size = DECL_SIZE (fld);
1726 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1727 continue;
1728 size = tree_to_uhwi (tr_size);
1729 if (size == 0)
1731 if (pos != offset)
1732 continue;
1734 else if (pos > offset || (pos + size) <= offset)
1735 continue;
1737 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1738 NULL_TREE);
1739 expr_ptr = &expr;
1740 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1741 offset - pos, exp_type))
1743 *res = expr;
1744 return true;
1747 return false;
1749 case ARRAY_TYPE:
1750 tr_size = TYPE_SIZE (TREE_TYPE (type));
1751 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1752 return false;
1753 el_size = tree_to_uhwi (tr_size);
1755 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1756 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1757 return false;
1758 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1759 if (!integer_zerop (minidx))
1760 index = int_const_binop (PLUS_EXPR, index, minidx);
1761 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1762 NULL_TREE, NULL_TREE);
1763 offset = offset % el_size;
1764 type = TREE_TYPE (type);
1765 break;
1767 default:
1768 if (offset != 0)
1769 return false;
1771 if (exp_type)
1772 return false;
1773 else
1774 return true;
1779 /* Return true iff TYPE is stdarg va_list type. */
1781 static inline bool
1782 is_va_list_type (tree type)
1784 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1787 /* Print message to dump file why a variable was rejected. */
1789 static void
1790 reject (tree var, const char *msg)
1792 if (dump_file && (dump_flags & TDF_DETAILS))
1794 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1795 print_generic_expr (dump_file, var, 0);
1796 fprintf (dump_file, "\n");
1800 /* Return true if VAR is a candidate for SRA. */
1802 static bool
1803 maybe_add_sra_candidate (tree var)
1805 tree type = TREE_TYPE (var);
1806 const char *msg;
1807 tree_node **slot;
1809 if (!AGGREGATE_TYPE_P (type))
1811 reject (var, "not aggregate");
1812 return false;
1814 if (needs_to_live_in_memory (var))
1816 reject (var, "needs to live in memory");
1817 return false;
1819 if (TREE_THIS_VOLATILE (var))
1821 reject (var, "is volatile");
1822 return false;
1824 if (!COMPLETE_TYPE_P (type))
1826 reject (var, "has incomplete type");
1827 return false;
1829 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1831 reject (var, "type size not fixed");
1832 return false;
1834 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1836 reject (var, "type size is zero");
1837 return false;
1839 if (type_internals_preclude_sra_p (type, &msg))
1841 reject (var, msg);
1842 return false;
1844 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1845 we also want to schedule it rather late. Thus we ignore it in
1846 the early pass. */
1847 (sra_mode == SRA_MODE_EARLY_INTRA
1848 && is_va_list_type (type)))
1850 reject (var, "is va_list");
1851 return false;
1854 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1855 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1856 *slot = var;
1858 if (dump_file && (dump_flags & TDF_DETAILS))
1860 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1861 print_generic_expr (dump_file, var, 0);
1862 fprintf (dump_file, "\n");
1865 return true;
1868 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1869 those with type which is suitable for scalarization. */
1871 static bool
1872 find_var_candidates (void)
1874 tree var, parm;
1875 unsigned int i;
1876 bool ret = false;
1878 for (parm = DECL_ARGUMENTS (current_function_decl);
1879 parm;
1880 parm = DECL_CHAIN (parm))
1881 ret |= maybe_add_sra_candidate (parm);
1883 FOR_EACH_LOCAL_DECL (cfun, i, var)
1885 if (TREE_CODE (var) != VAR_DECL)
1886 continue;
1888 ret |= maybe_add_sra_candidate (var);
1891 return ret;
1894 /* Sort all accesses for the given variable, check for partial overlaps and
1895 return NULL if there are any. If there are none, pick a representative for
1896 each combination of offset and size and create a linked list out of them.
1897 Return the pointer to the first representative and make sure it is the first
1898 one in the vector of accesses. */
1900 static struct access *
1901 sort_and_splice_var_accesses (tree var)
1903 int i, j, access_count;
1904 struct access *res, **prev_acc_ptr = &res;
1905 vec<access_p> *access_vec;
1906 bool first = true;
1907 HOST_WIDE_INT low = -1, high = 0;
1909 access_vec = get_base_access_vector (var);
1910 if (!access_vec)
1911 return NULL;
1912 access_count = access_vec->length ();
1914 /* Sort by <OFFSET, SIZE>. */
1915 access_vec->qsort (compare_access_positions);
1917 i = 0;
1918 while (i < access_count)
1920 struct access *access = (*access_vec)[i];
1921 bool grp_write = access->write;
1922 bool grp_read = !access->write;
1923 bool grp_scalar_write = access->write
1924 && is_gimple_reg_type (access->type);
1925 bool grp_scalar_read = !access->write
1926 && is_gimple_reg_type (access->type);
1927 bool grp_assignment_read = access->grp_assignment_read;
1928 bool grp_assignment_write = access->grp_assignment_write;
1929 bool multiple_scalar_reads = false;
1930 bool total_scalarization = access->grp_total_scalarization;
1931 bool grp_partial_lhs = access->grp_partial_lhs;
1932 bool first_scalar = is_gimple_reg_type (access->type);
1933 bool unscalarizable_region = access->grp_unscalarizable_region;
1935 if (first || access->offset >= high)
1937 first = false;
1938 low = access->offset;
1939 high = access->offset + access->size;
1941 else if (access->offset > low && access->offset + access->size > high)
1942 return NULL;
1943 else
1944 gcc_assert (access->offset >= low
1945 && access->offset + access->size <= high);
1947 j = i + 1;
1948 while (j < access_count)
1950 struct access *ac2 = (*access_vec)[j];
1951 if (ac2->offset != access->offset || ac2->size != access->size)
1952 break;
1953 if (ac2->write)
1955 grp_write = true;
1956 grp_scalar_write = (grp_scalar_write
1957 || is_gimple_reg_type (ac2->type));
1959 else
1961 grp_read = true;
1962 if (is_gimple_reg_type (ac2->type))
1964 if (grp_scalar_read)
1965 multiple_scalar_reads = true;
1966 else
1967 grp_scalar_read = true;
1970 grp_assignment_read |= ac2->grp_assignment_read;
1971 grp_assignment_write |= ac2->grp_assignment_write;
1972 grp_partial_lhs |= ac2->grp_partial_lhs;
1973 unscalarizable_region |= ac2->grp_unscalarizable_region;
1974 total_scalarization |= ac2->grp_total_scalarization;
1975 relink_to_new_repr (access, ac2);
1977 /* If there are both aggregate-type and scalar-type accesses with
1978 this combination of size and offset, the comparison function
1979 should have put the scalars first. */
1980 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1981 ac2->group_representative = access;
1982 j++;
1985 i = j;
1987 access->group_representative = access;
1988 access->grp_write = grp_write;
1989 access->grp_read = grp_read;
1990 access->grp_scalar_read = grp_scalar_read;
1991 access->grp_scalar_write = grp_scalar_write;
1992 access->grp_assignment_read = grp_assignment_read;
1993 access->grp_assignment_write = grp_assignment_write;
1994 access->grp_hint = multiple_scalar_reads || total_scalarization;
1995 access->grp_total_scalarization = total_scalarization;
1996 access->grp_partial_lhs = grp_partial_lhs;
1997 access->grp_unscalarizable_region = unscalarizable_region;
1998 if (access->first_link)
1999 add_access_to_work_queue (access);
2001 *prev_acc_ptr = access;
2002 prev_acc_ptr = &access->next_grp;
2005 gcc_assert (res == (*access_vec)[0]);
2006 return res;
2009 /* Create a variable for the given ACCESS which determines the type, name and a
2010 few other properties. Return the variable declaration and store it also to
2011 ACCESS->replacement. */
2013 static tree
2014 create_access_replacement (struct access *access)
2016 tree repl;
2018 if (access->grp_to_be_debug_replaced)
2020 repl = create_tmp_var_raw (access->type);
2021 DECL_CONTEXT (repl) = current_function_decl;
2023 else
2024 /* Drop any special alignment on the type if it's not on the main
2025 variant. This avoids issues with weirdo ABIs like AAPCS. */
2026 repl = create_tmp_var (build_qualified_type
2027 (TYPE_MAIN_VARIANT (access->type),
2028 TYPE_QUALS (access->type)), "SR");
2029 if (TREE_CODE (access->type) == COMPLEX_TYPE
2030 || TREE_CODE (access->type) == VECTOR_TYPE)
2032 if (!access->grp_partial_lhs)
2033 DECL_GIMPLE_REG_P (repl) = 1;
2035 else if (access->grp_partial_lhs
2036 && is_gimple_reg_type (access->type))
2037 TREE_ADDRESSABLE (repl) = 1;
2039 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2040 DECL_ARTIFICIAL (repl) = 1;
2041 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2043 if (DECL_NAME (access->base)
2044 && !DECL_IGNORED_P (access->base)
2045 && !DECL_ARTIFICIAL (access->base))
2047 char *pretty_name = make_fancy_name (access->expr);
2048 tree debug_expr = unshare_expr_without_location (access->expr), d;
2049 bool fail = false;
2051 DECL_NAME (repl) = get_identifier (pretty_name);
2052 obstack_free (&name_obstack, pretty_name);
2054 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2055 as DECL_DEBUG_EXPR isn't considered when looking for still
2056 used SSA_NAMEs and thus they could be freed. All debug info
2057 generation cares is whether something is constant or variable
2058 and that get_ref_base_and_extent works properly on the
2059 expression. It cannot handle accesses at a non-constant offset
2060 though, so just give up in those cases. */
2061 for (d = debug_expr;
2062 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2063 d = TREE_OPERAND (d, 0))
2064 switch (TREE_CODE (d))
2066 case ARRAY_REF:
2067 case ARRAY_RANGE_REF:
2068 if (TREE_OPERAND (d, 1)
2069 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2070 fail = true;
2071 if (TREE_OPERAND (d, 3)
2072 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2073 fail = true;
2074 /* FALLTHRU */
2075 case COMPONENT_REF:
2076 if (TREE_OPERAND (d, 2)
2077 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2078 fail = true;
2079 break;
2080 case MEM_REF:
2081 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2082 fail = true;
2083 else
2084 d = TREE_OPERAND (d, 0);
2085 break;
2086 default:
2087 break;
2089 if (!fail)
2091 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2092 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2094 if (access->grp_no_warning)
2095 TREE_NO_WARNING (repl) = 1;
2096 else
2097 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2099 else
2100 TREE_NO_WARNING (repl) = 1;
2102 if (dump_file)
2104 if (access->grp_to_be_debug_replaced)
2106 fprintf (dump_file, "Created a debug-only replacement for ");
2107 print_generic_expr (dump_file, access->base, 0);
2108 fprintf (dump_file, " offset: %u, size: %u\n",
2109 (unsigned) access->offset, (unsigned) access->size);
2111 else
2113 fprintf (dump_file, "Created a replacement for ");
2114 print_generic_expr (dump_file, access->base, 0);
2115 fprintf (dump_file, " offset: %u, size: %u: ",
2116 (unsigned) access->offset, (unsigned) access->size);
2117 print_generic_expr (dump_file, repl, 0);
2118 fprintf (dump_file, "\n");
2121 sra_stats.replacements++;
2123 return repl;
2126 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2128 static inline tree
2129 get_access_replacement (struct access *access)
2131 gcc_checking_assert (access->replacement_decl);
2132 return access->replacement_decl;
2136 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2137 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2138 to it is not "within" the root. Return false iff some accesses partially
2139 overlap. */
2141 static bool
2142 build_access_subtree (struct access **access)
2144 struct access *root = *access, *last_child = NULL;
2145 HOST_WIDE_INT limit = root->offset + root->size;
2147 *access = (*access)->next_grp;
2148 while (*access && (*access)->offset + (*access)->size <= limit)
2150 if (!last_child)
2151 root->first_child = *access;
2152 else
2153 last_child->next_sibling = *access;
2154 last_child = *access;
2156 if (!build_access_subtree (access))
2157 return false;
2160 if (*access && (*access)->offset < limit)
2161 return false;
2163 return true;
2166 /* Build a tree of access representatives, ACCESS is the pointer to the first
2167 one, others are linked in a list by the next_grp field. Return false iff
2168 some accesses partially overlap. */
2170 static bool
2171 build_access_trees (struct access *access)
2173 while (access)
2175 struct access *root = access;
2177 if (!build_access_subtree (&access))
2178 return false;
2179 root->next_grp = access;
2181 return true;
2184 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2185 array. */
2187 static bool
2188 expr_with_var_bounded_array_refs_p (tree expr)
2190 while (handled_component_p (expr))
2192 if (TREE_CODE (expr) == ARRAY_REF
2193 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2194 return true;
2195 expr = TREE_OPERAND (expr, 0);
2197 return false;
2200 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2201 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2202 sorts of access flags appropriately along the way, notably always set
2203 grp_read and grp_assign_read according to MARK_READ and grp_write when
2204 MARK_WRITE is true.
2206 Creating a replacement for a scalar access is considered beneficial if its
2207 grp_hint is set (this means we are either attempting total scalarization or
2208 there is more than one direct read access) or according to the following
2209 table:
2211 Access written to through a scalar type (once or more times)
2213 | Written to in an assignment statement
2215 | | Access read as scalar _once_
2216 | | |
2217 | | | Read in an assignment statement
2218 | | | |
2219 | | | | Scalarize Comment
2220 -----------------------------------------------------------------------------
2221 0 0 0 0 No access for the scalar
2222 0 0 0 1 No access for the scalar
2223 0 0 1 0 No Single read - won't help
2224 0 0 1 1 No The same case
2225 0 1 0 0 No access for the scalar
2226 0 1 0 1 No access for the scalar
2227 0 1 1 0 Yes s = *g; return s.i;
2228 0 1 1 1 Yes The same case as above
2229 1 0 0 0 No Won't help
2230 1 0 0 1 Yes s.i = 1; *g = s;
2231 1 0 1 0 Yes s.i = 5; g = s.i;
2232 1 0 1 1 Yes The same case as above
2233 1 1 0 0 No Won't help.
2234 1 1 0 1 Yes s.i = 1; *g = s;
2235 1 1 1 0 Yes s = *g; return s.i;
2236 1 1 1 1 Yes Any of the above yeses */
2238 static bool
2239 analyze_access_subtree (struct access *root, struct access *parent,
2240 bool allow_replacements)
2242 struct access *child;
2243 HOST_WIDE_INT limit = root->offset + root->size;
2244 HOST_WIDE_INT covered_to = root->offset;
2245 bool scalar = is_gimple_reg_type (root->type);
2246 bool hole = false, sth_created = false;
2248 if (parent)
2250 if (parent->grp_read)
2251 root->grp_read = 1;
2252 if (parent->grp_assignment_read)
2253 root->grp_assignment_read = 1;
2254 if (parent->grp_write)
2255 root->grp_write = 1;
2256 if (parent->grp_assignment_write)
2257 root->grp_assignment_write = 1;
2258 if (parent->grp_total_scalarization)
2259 root->grp_total_scalarization = 1;
2262 if (root->grp_unscalarizable_region)
2263 allow_replacements = false;
2265 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2266 allow_replacements = false;
2268 for (child = root->first_child; child; child = child->next_sibling)
2270 hole |= covered_to < child->offset;
2271 sth_created |= analyze_access_subtree (child, root,
2272 allow_replacements && !scalar);
2274 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2275 root->grp_total_scalarization &= child->grp_total_scalarization;
2276 if (child->grp_covered)
2277 covered_to += child->size;
2278 else
2279 hole = true;
2282 if (allow_replacements && scalar && !root->first_child
2283 && (root->grp_hint
2284 || ((root->grp_scalar_read || root->grp_assignment_read)
2285 && (root->grp_scalar_write || root->grp_assignment_write))))
2287 /* Always create access replacements that cover the whole access.
2288 For integral types this means the precision has to match.
2289 Avoid assumptions based on the integral type kind, too. */
2290 if (INTEGRAL_TYPE_P (root->type)
2291 && (TREE_CODE (root->type) != INTEGER_TYPE
2292 || TYPE_PRECISION (root->type) != root->size)
2293 /* But leave bitfield accesses alone. */
2294 && (TREE_CODE (root->expr) != COMPONENT_REF
2295 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2297 tree rt = root->type;
2298 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2299 && (root->size % BITS_PER_UNIT) == 0);
2300 root->type = build_nonstandard_integer_type (root->size,
2301 TYPE_UNSIGNED (rt));
2302 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2303 root->base, root->offset,
2304 root->type, NULL, false);
2306 if (dump_file && (dump_flags & TDF_DETAILS))
2308 fprintf (dump_file, "Changing the type of a replacement for ");
2309 print_generic_expr (dump_file, root->base, 0);
2310 fprintf (dump_file, " offset: %u, size: %u ",
2311 (unsigned) root->offset, (unsigned) root->size);
2312 fprintf (dump_file, " to an integer.\n");
2316 root->grp_to_be_replaced = 1;
2317 root->replacement_decl = create_access_replacement (root);
2318 sth_created = true;
2319 hole = false;
2321 else
2323 if (allow_replacements
2324 && scalar && !root->first_child
2325 && (root->grp_scalar_write || root->grp_assignment_write)
2326 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2327 DECL_UID (root->base)))
2329 gcc_checking_assert (!root->grp_scalar_read
2330 && !root->grp_assignment_read);
2331 sth_created = true;
2332 if (MAY_HAVE_DEBUG_STMTS)
2334 root->grp_to_be_debug_replaced = 1;
2335 root->replacement_decl = create_access_replacement (root);
2339 if (covered_to < limit)
2340 hole = true;
2341 if (scalar)
2342 root->grp_total_scalarization = 0;
2345 if (!hole || root->grp_total_scalarization)
2346 root->grp_covered = 1;
2347 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2348 root->grp_unscalarized_data = 1; /* not covered and written to */
2349 return sth_created;
2352 /* Analyze all access trees linked by next_grp by the means of
2353 analyze_access_subtree. */
2354 static bool
2355 analyze_access_trees (struct access *access)
2357 bool ret = false;
2359 while (access)
2361 if (analyze_access_subtree (access, NULL, true))
2362 ret = true;
2363 access = access->next_grp;
2366 return ret;
2369 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2370 SIZE would conflict with an already existing one. If exactly such a child
2371 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2373 static bool
2374 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2375 HOST_WIDE_INT size, struct access **exact_match)
2377 struct access *child;
2379 for (child = lacc->first_child; child; child = child->next_sibling)
2381 if (child->offset == norm_offset && child->size == size)
2383 *exact_match = child;
2384 return true;
2387 if (child->offset < norm_offset + size
2388 && child->offset + child->size > norm_offset)
2389 return true;
2392 return false;
2395 /* Create a new child access of PARENT, with all properties just like MODEL
2396 except for its offset and with its grp_write false and grp_read true.
2397 Return the new access or NULL if it cannot be created. Note that this access
2398 is created long after all splicing and sorting, it's not located in any
2399 access vector and is automatically a representative of its group. */
2401 static struct access *
2402 create_artificial_child_access (struct access *parent, struct access *model,
2403 HOST_WIDE_INT new_offset)
2405 struct access **child;
2406 tree expr = parent->base;
2408 gcc_assert (!model->grp_unscalarizable_region);
2410 struct access *access = new struct access ();
2411 memset (access, 0, sizeof (struct access));
2412 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2413 model->type))
2415 access->grp_no_warning = true;
2416 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2417 new_offset, model, NULL, false);
2420 access->base = parent->base;
2421 access->expr = expr;
2422 access->offset = new_offset;
2423 access->size = model->size;
2424 access->type = model->type;
2425 access->grp_write = true;
2426 access->grp_read = false;
2428 child = &parent->first_child;
2429 while (*child && (*child)->offset < new_offset)
2430 child = &(*child)->next_sibling;
2432 access->next_sibling = *child;
2433 *child = access;
2435 return access;
2439 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2440 true if any new subaccess was created. Additionally, if RACC is a scalar
2441 access but LACC is not, change the type of the latter, if possible. */
2443 static bool
2444 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2446 struct access *rchild;
2447 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2448 bool ret = false;
2450 if (is_gimple_reg_type (lacc->type)
2451 || lacc->grp_unscalarizable_region
2452 || racc->grp_unscalarizable_region)
2453 return false;
2455 if (is_gimple_reg_type (racc->type))
2457 if (!lacc->first_child && !racc->first_child)
2459 tree t = lacc->base;
2461 lacc->type = racc->type;
2462 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2463 lacc->offset, racc->type))
2464 lacc->expr = t;
2465 else
2467 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2468 lacc->base, lacc->offset,
2469 racc, NULL, false);
2470 lacc->grp_no_warning = true;
2473 return false;
2476 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2478 struct access *new_acc = NULL;
2479 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2481 if (rchild->grp_unscalarizable_region)
2482 continue;
2484 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2485 &new_acc))
2487 if (new_acc)
2489 rchild->grp_hint = 1;
2490 new_acc->grp_hint |= new_acc->grp_read;
2491 if (rchild->first_child)
2492 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2494 continue;
2497 rchild->grp_hint = 1;
2498 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2499 if (new_acc)
2501 ret = true;
2502 if (racc->first_child)
2503 propagate_subaccesses_across_link (new_acc, rchild);
2507 return ret;
2510 /* Propagate all subaccesses across assignment links. */
2512 static void
2513 propagate_all_subaccesses (void)
2515 while (work_queue_head)
2517 struct access *racc = pop_access_from_work_queue ();
2518 struct assign_link *link;
2520 gcc_assert (racc->first_link);
2522 for (link = racc->first_link; link; link = link->next)
2524 struct access *lacc = link->lacc;
2526 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2527 continue;
2528 lacc = lacc->group_representative;
2529 if (propagate_subaccesses_across_link (lacc, racc)
2530 && lacc->first_link)
2531 add_access_to_work_queue (lacc);
2536 /* Go through all accesses collected throughout the (intraprocedural) analysis
2537 stage, exclude overlapping ones, identify representatives and build trees
2538 out of them, making decisions about scalarization on the way. Return true
2539 iff there are any to-be-scalarized variables after this stage. */
2541 static bool
2542 analyze_all_variable_accesses (void)
2544 int res = 0;
2545 bitmap tmp = BITMAP_ALLOC (NULL);
2546 bitmap_iterator bi;
2547 unsigned i;
2548 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2550 enum compiler_param param = optimize_speed_p
2551 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2552 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2554 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2555 fall back to a target default. */
2556 unsigned HOST_WIDE_INT max_scalarization_size
2557 = global_options_set.x_param_values[param]
2558 ? PARAM_VALUE (param)
2559 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2561 max_scalarization_size *= BITS_PER_UNIT;
2563 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2564 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2565 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2567 tree var = candidate (i);
2569 if (TREE_CODE (var) == VAR_DECL
2570 && type_consists_of_records_p (TREE_TYPE (var)))
2572 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2573 <= max_scalarization_size)
2575 completely_scalarize_var (var);
2576 if (dump_file && (dump_flags & TDF_DETAILS))
2578 fprintf (dump_file, "Will attempt to totally scalarize ");
2579 print_generic_expr (dump_file, var, 0);
2580 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2583 else if (dump_file && (dump_flags & TDF_DETAILS))
2585 fprintf (dump_file, "Too big to totally scalarize: ");
2586 print_generic_expr (dump_file, var, 0);
2587 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2592 bitmap_copy (tmp, candidate_bitmap);
2593 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2595 tree var = candidate (i);
2596 struct access *access;
2598 access = sort_and_splice_var_accesses (var);
2599 if (!access || !build_access_trees (access))
2600 disqualify_candidate (var,
2601 "No or inhibitingly overlapping accesses.");
2604 propagate_all_subaccesses ();
2606 bitmap_copy (tmp, candidate_bitmap);
2607 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2609 tree var = candidate (i);
2610 struct access *access = get_first_repr_for_decl (var);
2612 if (analyze_access_trees (access))
2614 res++;
2615 if (dump_file && (dump_flags & TDF_DETAILS))
2617 fprintf (dump_file, "\nAccess trees for ");
2618 print_generic_expr (dump_file, var, 0);
2619 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2620 dump_access_tree (dump_file, access);
2621 fprintf (dump_file, "\n");
2624 else
2625 disqualify_candidate (var, "No scalar replacements to be created.");
2628 BITMAP_FREE (tmp);
2630 if (res)
2632 statistics_counter_event (cfun, "Scalarized aggregates", res);
2633 return true;
2635 else
2636 return false;
2639 /* Generate statements copying scalar replacements of accesses within a subtree
2640 into or out of AGG. ACCESS, all its children, siblings and their children
2641 are to be processed. AGG is an aggregate type expression (can be a
2642 declaration but does not have to be, it can for example also be a mem_ref or
2643 a series of handled components). TOP_OFFSET is the offset of the processed
2644 subtree which has to be subtracted from offsets of individual accesses to
2645 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2646 replacements in the interval <start_offset, start_offset + chunk_size>,
2647 otherwise copy all. GSI is a statement iterator used to place the new
2648 statements. WRITE should be true when the statements should write from AGG
2649 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2650 statements will be added after the current statement in GSI, they will be
2651 added before the statement otherwise. */
2653 static void
2654 generate_subtree_copies (struct access *access, tree agg,
2655 HOST_WIDE_INT top_offset,
2656 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2657 gimple_stmt_iterator *gsi, bool write,
2658 bool insert_after, location_t loc)
2662 if (chunk_size && access->offset >= start_offset + chunk_size)
2663 return;
2665 if (access->grp_to_be_replaced
2666 && (chunk_size == 0
2667 || access->offset + access->size > start_offset))
2669 tree expr, repl = get_access_replacement (access);
2670 gassign *stmt;
2672 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2673 access, gsi, insert_after);
2675 if (write)
2677 if (access->grp_partial_lhs)
2678 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2679 !insert_after,
2680 insert_after ? GSI_NEW_STMT
2681 : GSI_SAME_STMT);
2682 stmt = gimple_build_assign (repl, expr);
2684 else
2686 TREE_NO_WARNING (repl) = 1;
2687 if (access->grp_partial_lhs)
2688 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2689 !insert_after,
2690 insert_after ? GSI_NEW_STMT
2691 : GSI_SAME_STMT);
2692 stmt = gimple_build_assign (expr, repl);
2694 gimple_set_location (stmt, loc);
2696 if (insert_after)
2697 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2698 else
2699 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2700 update_stmt (stmt);
2701 sra_stats.subtree_copies++;
2703 else if (write
2704 && access->grp_to_be_debug_replaced
2705 && (chunk_size == 0
2706 || access->offset + access->size > start_offset))
2708 gdebug *ds;
2709 tree drhs = build_debug_ref_for_model (loc, agg,
2710 access->offset - top_offset,
2711 access);
2712 ds = gimple_build_debug_bind (get_access_replacement (access),
2713 drhs, gsi_stmt (*gsi));
2714 if (insert_after)
2715 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2716 else
2717 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2720 if (access->first_child)
2721 generate_subtree_copies (access->first_child, agg, top_offset,
2722 start_offset, chunk_size, gsi,
2723 write, insert_after, loc);
2725 access = access->next_sibling;
2727 while (access);
2730 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2731 the root of the subtree to be processed. GSI is the statement iterator used
2732 for inserting statements which are added after the current statement if
2733 INSERT_AFTER is true or before it otherwise. */
2735 static void
2736 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2737 bool insert_after, location_t loc)
2740 struct access *child;
2742 if (access->grp_to_be_replaced)
2744 gassign *stmt;
2746 stmt = gimple_build_assign (get_access_replacement (access),
2747 build_zero_cst (access->type));
2748 if (insert_after)
2749 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2750 else
2751 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2752 update_stmt (stmt);
2753 gimple_set_location (stmt, loc);
2755 else if (access->grp_to_be_debug_replaced)
2757 gdebug *ds
2758 = gimple_build_debug_bind (get_access_replacement (access),
2759 build_zero_cst (access->type),
2760 gsi_stmt (*gsi));
2761 if (insert_after)
2762 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2763 else
2764 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2767 for (child = access->first_child; child; child = child->next_sibling)
2768 init_subtree_with_zero (child, gsi, insert_after, loc);
2771 /* Clobber all scalar replacements in an access subtree. ACCESS is the the
2772 root of the subtree to be processed. GSI is the statement iterator used
2773 for inserting statements which are added after the current statement if
2774 INSERT_AFTER is true or before it otherwise. */
2776 static void
2777 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2778 bool insert_after, location_t loc)
2781 struct access *child;
2783 if (access->grp_to_be_replaced)
2785 tree rep = get_access_replacement (access);
2786 tree clobber = build_constructor (access->type, NULL);
2787 TREE_THIS_VOLATILE (clobber) = 1;
2788 gimple stmt = gimple_build_assign (rep, clobber);
2790 if (insert_after)
2791 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2792 else
2793 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2794 update_stmt (stmt);
2795 gimple_set_location (stmt, loc);
2798 for (child = access->first_child; child; child = child->next_sibling)
2799 clobber_subtree (child, gsi, insert_after, loc);
2802 /* Search for an access representative for the given expression EXPR and
2803 return it or NULL if it cannot be found. */
2805 static struct access *
2806 get_access_for_expr (tree expr)
2808 HOST_WIDE_INT offset, size, max_size;
2809 tree base;
2811 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2812 a different size than the size of its argument and we need the latter
2813 one. */
2814 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2815 expr = TREE_OPERAND (expr, 0);
2817 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2818 if (max_size == -1 || !DECL_P (base))
2819 return NULL;
2821 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2822 return NULL;
2824 return get_var_base_offset_size_access (base, offset, max_size);
2827 /* Replace the expression EXPR with a scalar replacement if there is one and
2828 generate other statements to do type conversion or subtree copying if
2829 necessary. GSI is used to place newly created statements, WRITE is true if
2830 the expression is being written to (it is on a LHS of a statement or output
2831 in an assembly statement). */
2833 static bool
2834 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2836 location_t loc;
2837 struct access *access;
2838 tree type, bfr, orig_expr;
2840 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2842 bfr = *expr;
2843 expr = &TREE_OPERAND (*expr, 0);
2845 else
2846 bfr = NULL_TREE;
2848 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2849 expr = &TREE_OPERAND (*expr, 0);
2850 access = get_access_for_expr (*expr);
2851 if (!access)
2852 return false;
2853 type = TREE_TYPE (*expr);
2854 orig_expr = *expr;
2856 loc = gimple_location (gsi_stmt (*gsi));
2857 gimple_stmt_iterator alt_gsi = gsi_none ();
2858 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2860 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2861 gsi = &alt_gsi;
2864 if (access->grp_to_be_replaced)
2866 tree repl = get_access_replacement (access);
2867 /* If we replace a non-register typed access simply use the original
2868 access expression to extract the scalar component afterwards.
2869 This happens if scalarizing a function return value or parameter
2870 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2871 gcc.c-torture/compile/20011217-1.c.
2873 We also want to use this when accessing a complex or vector which can
2874 be accessed as a different type too, potentially creating a need for
2875 type conversion (see PR42196) and when scalarized unions are involved
2876 in assembler statements (see PR42398). */
2877 if (!useless_type_conversion_p (type, access->type))
2879 tree ref;
2881 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2883 if (write)
2885 gassign *stmt;
2887 if (access->grp_partial_lhs)
2888 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2889 false, GSI_NEW_STMT);
2890 stmt = gimple_build_assign (repl, ref);
2891 gimple_set_location (stmt, loc);
2892 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2894 else
2896 gassign *stmt;
2898 if (access->grp_partial_lhs)
2899 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2900 true, GSI_SAME_STMT);
2901 stmt = gimple_build_assign (ref, repl);
2902 gimple_set_location (stmt, loc);
2903 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2906 else
2907 *expr = repl;
2908 sra_stats.exprs++;
2910 else if (write && access->grp_to_be_debug_replaced)
2912 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
2913 NULL_TREE,
2914 gsi_stmt (*gsi));
2915 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2918 if (access->first_child)
2920 HOST_WIDE_INT start_offset, chunk_size;
2921 if (bfr
2922 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
2923 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
2925 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
2926 start_offset = access->offset
2927 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
2929 else
2930 start_offset = chunk_size = 0;
2932 generate_subtree_copies (access->first_child, orig_expr, access->offset,
2933 start_offset, chunk_size, gsi, write, write,
2934 loc);
2936 return true;
2939 /* Where scalar replacements of the RHS have been written to when a replacement
2940 of a LHS of an assigments cannot be direclty loaded from a replacement of
2941 the RHS. */
2942 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2943 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2944 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2946 struct subreplacement_assignment_data
2948 /* Offset of the access representing the lhs of the assignment. */
2949 HOST_WIDE_INT left_offset;
2951 /* LHS and RHS of the original assignment. */
2952 tree assignment_lhs, assignment_rhs;
2954 /* Access representing the rhs of the whole assignment. */
2955 struct access *top_racc;
2957 /* Stmt iterator used for statement insertions after the original assignment.
2958 It points to the main GSI used to traverse a BB during function body
2959 modification. */
2960 gimple_stmt_iterator *new_gsi;
2962 /* Stmt iterator used for statement insertions before the original
2963 assignment. Keeps on pointing to the original statement. */
2964 gimple_stmt_iterator old_gsi;
2966 /* Location of the assignment. */
2967 location_t loc;
2969 /* Keeps the information whether we have needed to refresh replacements of
2970 the LHS and from which side of the assignments this takes place. */
2971 enum unscalarized_data_handling refreshed;
2974 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2975 base aggregate if there are unscalarized data or directly to LHS of the
2976 statement that is pointed to by GSI otherwise. */
2978 static void
2979 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
2981 tree src;
2982 if (sad->top_racc->grp_unscalarized_data)
2984 src = sad->assignment_rhs;
2985 sad->refreshed = SRA_UDH_RIGHT;
2987 else
2989 src = sad->assignment_lhs;
2990 sad->refreshed = SRA_UDH_LEFT;
2992 generate_subtree_copies (sad->top_racc->first_child, src,
2993 sad->top_racc->offset, 0, 0,
2994 &sad->old_gsi, false, false, sad->loc);
2997 /* Try to generate statements to load all sub-replacements in an access subtree
2998 formed by children of LACC from scalar replacements in the SAD->top_racc
2999 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3000 and load the accesses from it. */
3002 static void
3003 load_assign_lhs_subreplacements (struct access *lacc,
3004 struct subreplacement_assignment_data *sad)
3006 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3008 HOST_WIDE_INT offset;
3009 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3011 if (lacc->grp_to_be_replaced)
3013 struct access *racc;
3014 gassign *stmt;
3015 tree rhs;
3017 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3018 if (racc && racc->grp_to_be_replaced)
3020 rhs = get_access_replacement (racc);
3021 if (!useless_type_conversion_p (lacc->type, racc->type))
3022 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3023 lacc->type, rhs);
3025 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3026 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3027 NULL_TREE, true, GSI_SAME_STMT);
3029 else
3031 /* No suitable access on the right hand side, need to load from
3032 the aggregate. See if we have to update it first... */
3033 if (sad->refreshed == SRA_UDH_NONE)
3034 handle_unscalarized_data_in_subtree (sad);
3036 if (sad->refreshed == SRA_UDH_LEFT)
3037 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3038 lacc->offset - sad->left_offset,
3039 lacc, sad->new_gsi, true);
3040 else
3041 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3042 lacc->offset - sad->left_offset,
3043 lacc, sad->new_gsi, true);
3044 if (lacc->grp_partial_lhs)
3045 rhs = force_gimple_operand_gsi (sad->new_gsi,
3046 rhs, true, NULL_TREE,
3047 false, GSI_NEW_STMT);
3050 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3051 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3052 gimple_set_location (stmt, sad->loc);
3053 update_stmt (stmt);
3054 sra_stats.subreplacements++;
3056 else
3058 if (sad->refreshed == SRA_UDH_NONE
3059 && lacc->grp_read && !lacc->grp_covered)
3060 handle_unscalarized_data_in_subtree (sad);
3062 if (lacc && lacc->grp_to_be_debug_replaced)
3064 gdebug *ds;
3065 tree drhs;
3066 struct access *racc = find_access_in_subtree (sad->top_racc,
3067 offset,
3068 lacc->size);
3070 if (racc && racc->grp_to_be_replaced)
3072 if (racc->grp_write)
3073 drhs = get_access_replacement (racc);
3074 else
3075 drhs = NULL;
3077 else if (sad->refreshed == SRA_UDH_LEFT)
3078 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3079 lacc->offset, lacc);
3080 else if (sad->refreshed == SRA_UDH_RIGHT)
3081 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3082 offset, lacc);
3083 else
3084 drhs = NULL_TREE;
3085 if (drhs
3086 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3087 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3088 lacc->type, drhs);
3089 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3090 drhs, gsi_stmt (sad->old_gsi));
3091 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3095 if (lacc->first_child)
3096 load_assign_lhs_subreplacements (lacc, sad);
3100 /* Result code for SRA assignment modification. */
3101 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3102 SRA_AM_MODIFIED, /* stmt changed but not
3103 removed */
3104 SRA_AM_REMOVED }; /* stmt eliminated */
3106 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3107 to the assignment and GSI is the statement iterator pointing at it. Returns
3108 the same values as sra_modify_assign. */
3110 static enum assignment_mod_result
3111 sra_modify_constructor_assign (gimple stmt, gimple_stmt_iterator *gsi)
3113 tree lhs = gimple_assign_lhs (stmt);
3114 struct access *acc = get_access_for_expr (lhs);
3115 if (!acc)
3116 return SRA_AM_NONE;
3117 location_t loc = gimple_location (stmt);
3119 if (gimple_clobber_p (stmt))
3121 /* Clobber the replacement variable. */
3122 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3123 /* Remove clobbers of fully scalarized variables, they are dead. */
3124 if (acc->grp_covered)
3126 unlink_stmt_vdef (stmt);
3127 gsi_remove (gsi, true);
3128 release_defs (stmt);
3129 return SRA_AM_REMOVED;
3131 else
3132 return SRA_AM_MODIFIED;
3135 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt))) > 0)
3137 /* I have never seen this code path trigger but if it can happen the
3138 following should handle it gracefully. */
3139 if (access_has_children_p (acc))
3140 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3141 true, true, loc);
3142 return SRA_AM_MODIFIED;
3145 if (acc->grp_covered)
3147 init_subtree_with_zero (acc, gsi, false, loc);
3148 unlink_stmt_vdef (stmt);
3149 gsi_remove (gsi, true);
3150 release_defs (stmt);
3151 return SRA_AM_REMOVED;
3153 else
3155 init_subtree_with_zero (acc, gsi, true, loc);
3156 return SRA_AM_MODIFIED;
3160 /* Create and return a new suitable default definition SSA_NAME for RACC which
3161 is an access describing an uninitialized part of an aggregate that is being
3162 loaded. */
3164 static tree
3165 get_repl_default_def_ssa_name (struct access *racc)
3167 gcc_checking_assert (!racc->grp_to_be_replaced
3168 && !racc->grp_to_be_debug_replaced);
3169 if (!racc->replacement_decl)
3170 racc->replacement_decl = create_access_replacement (racc);
3171 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3174 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3175 bit-field field declaration somewhere in it. */
3177 static inline bool
3178 contains_vce_or_bfcref_p (const_tree ref)
3180 while (handled_component_p (ref))
3182 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3183 || (TREE_CODE (ref) == COMPONENT_REF
3184 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3185 return true;
3186 ref = TREE_OPERAND (ref, 0);
3189 return false;
3192 /* Examine both sides of the assignment statement pointed to by STMT, replace
3193 them with a scalare replacement if there is one and generate copying of
3194 replacements if scalarized aggregates have been used in the assignment. GSI
3195 is used to hold generated statements for type conversions and subtree
3196 copying. */
3198 static enum assignment_mod_result
3199 sra_modify_assign (gimple stmt, gimple_stmt_iterator *gsi)
3201 struct access *lacc, *racc;
3202 tree lhs, rhs;
3203 bool modify_this_stmt = false;
3204 bool force_gimple_rhs = false;
3205 location_t loc;
3206 gimple_stmt_iterator orig_gsi = *gsi;
3208 if (!gimple_assign_single_p (stmt))
3209 return SRA_AM_NONE;
3210 lhs = gimple_assign_lhs (stmt);
3211 rhs = gimple_assign_rhs1 (stmt);
3213 if (TREE_CODE (rhs) == CONSTRUCTOR)
3214 return sra_modify_constructor_assign (stmt, gsi);
3216 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3217 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3218 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3220 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3221 gsi, false);
3222 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3223 gsi, true);
3224 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3227 lacc = get_access_for_expr (lhs);
3228 racc = get_access_for_expr (rhs);
3229 if (!lacc && !racc)
3230 return SRA_AM_NONE;
3232 loc = gimple_location (stmt);
3233 if (lacc && lacc->grp_to_be_replaced)
3235 lhs = get_access_replacement (lacc);
3236 gimple_assign_set_lhs (stmt, lhs);
3237 modify_this_stmt = true;
3238 if (lacc->grp_partial_lhs)
3239 force_gimple_rhs = true;
3240 sra_stats.exprs++;
3243 if (racc && racc->grp_to_be_replaced)
3245 rhs = get_access_replacement (racc);
3246 modify_this_stmt = true;
3247 if (racc->grp_partial_lhs)
3248 force_gimple_rhs = true;
3249 sra_stats.exprs++;
3251 else if (racc
3252 && !racc->grp_unscalarized_data
3253 && TREE_CODE (lhs) == SSA_NAME
3254 && !access_has_replacements_p (racc))
3256 rhs = get_repl_default_def_ssa_name (racc);
3257 modify_this_stmt = true;
3258 sra_stats.exprs++;
3261 if (modify_this_stmt)
3263 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3265 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3266 ??? This should move to fold_stmt which we simply should
3267 call after building a VIEW_CONVERT_EXPR here. */
3268 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3269 && !contains_bitfld_component_ref_p (lhs))
3271 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3272 gimple_assign_set_lhs (stmt, lhs);
3274 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3275 && !contains_vce_or_bfcref_p (rhs))
3276 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3278 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3280 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3281 rhs);
3282 if (is_gimple_reg_type (TREE_TYPE (lhs))
3283 && TREE_CODE (lhs) != SSA_NAME)
3284 force_gimple_rhs = true;
3289 if (lacc && lacc->grp_to_be_debug_replaced)
3291 tree dlhs = get_access_replacement (lacc);
3292 tree drhs = unshare_expr (rhs);
3293 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3295 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3296 && !contains_vce_or_bfcref_p (drhs))
3297 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3298 if (drhs
3299 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3300 TREE_TYPE (drhs)))
3301 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3302 TREE_TYPE (dlhs), drhs);
3304 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3305 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3308 /* From this point on, the function deals with assignments in between
3309 aggregates when at least one has scalar reductions of some of its
3310 components. There are three possible scenarios: Both the LHS and RHS have
3311 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3313 In the first case, we would like to load the LHS components from RHS
3314 components whenever possible. If that is not possible, we would like to
3315 read it directly from the RHS (after updating it by storing in it its own
3316 components). If there are some necessary unscalarized data in the LHS,
3317 those will be loaded by the original assignment too. If neither of these
3318 cases happen, the original statement can be removed. Most of this is done
3319 by load_assign_lhs_subreplacements.
3321 In the second case, we would like to store all RHS scalarized components
3322 directly into LHS and if they cover the aggregate completely, remove the
3323 statement too. In the third case, we want the LHS components to be loaded
3324 directly from the RHS (DSE will remove the original statement if it
3325 becomes redundant).
3327 This is a bit complex but manageable when types match and when unions do
3328 not cause confusion in a way that we cannot really load a component of LHS
3329 from the RHS or vice versa (the access representing this level can have
3330 subaccesses that are accessible only through a different union field at a
3331 higher level - different from the one used in the examined expression).
3332 Unions are fun.
3334 Therefore, I specially handle a fourth case, happening when there is a
3335 specific type cast or it is impossible to locate a scalarized subaccess on
3336 the other side of the expression. If that happens, I simply "refresh" the
3337 RHS by storing in it is scalarized components leave the original statement
3338 there to do the copying and then load the scalar replacements of the LHS.
3339 This is what the first branch does. */
3341 if (modify_this_stmt
3342 || gimple_has_volatile_ops (stmt)
3343 || contains_vce_or_bfcref_p (rhs)
3344 || contains_vce_or_bfcref_p (lhs)
3345 || stmt_ends_bb_p (stmt))
3347 if (access_has_children_p (racc))
3348 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3349 gsi, false, false, loc);
3350 if (access_has_children_p (lacc))
3352 gimple_stmt_iterator alt_gsi = gsi_none ();
3353 if (stmt_ends_bb_p (stmt))
3355 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3356 gsi = &alt_gsi;
3358 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3359 gsi, true, true, loc);
3361 sra_stats.separate_lhs_rhs_handling++;
3363 /* This gimplification must be done after generate_subtree_copies,
3364 lest we insert the subtree copies in the middle of the gimplified
3365 sequence. */
3366 if (force_gimple_rhs)
3367 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3368 true, GSI_SAME_STMT);
3369 if (gimple_assign_rhs1 (stmt) != rhs)
3371 modify_this_stmt = true;
3372 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3373 gcc_assert (stmt == gsi_stmt (orig_gsi));
3376 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3378 else
3380 if (access_has_children_p (lacc)
3381 && access_has_children_p (racc)
3382 /* When an access represents an unscalarizable region, it usually
3383 represents accesses with variable offset and thus must not be used
3384 to generate new memory accesses. */
3385 && !lacc->grp_unscalarizable_region
3386 && !racc->grp_unscalarizable_region)
3388 struct subreplacement_assignment_data sad;
3390 sad.left_offset = lacc->offset;
3391 sad.assignment_lhs = lhs;
3392 sad.assignment_rhs = rhs;
3393 sad.top_racc = racc;
3394 sad.old_gsi = *gsi;
3395 sad.new_gsi = gsi;
3396 sad.loc = gimple_location (stmt);
3397 sad.refreshed = SRA_UDH_NONE;
3399 if (lacc->grp_read && !lacc->grp_covered)
3400 handle_unscalarized_data_in_subtree (&sad);
3402 load_assign_lhs_subreplacements (lacc, &sad);
3403 if (sad.refreshed != SRA_UDH_RIGHT)
3405 gsi_next (gsi);
3406 unlink_stmt_vdef (stmt);
3407 gsi_remove (&sad.old_gsi, true);
3408 release_defs (stmt);
3409 sra_stats.deleted++;
3410 return SRA_AM_REMOVED;
3413 else
3415 if (access_has_children_p (racc)
3416 && !racc->grp_unscalarized_data)
3418 if (dump_file)
3420 fprintf (dump_file, "Removing load: ");
3421 print_gimple_stmt (dump_file, stmt, 0, 0);
3423 generate_subtree_copies (racc->first_child, lhs,
3424 racc->offset, 0, 0, gsi,
3425 false, false, loc);
3426 gcc_assert (stmt == gsi_stmt (*gsi));
3427 unlink_stmt_vdef (stmt);
3428 gsi_remove (gsi, true);
3429 release_defs (stmt);
3430 sra_stats.deleted++;
3431 return SRA_AM_REMOVED;
3433 /* Restore the aggregate RHS from its components so the
3434 prevailing aggregate copy does the right thing. */
3435 if (access_has_children_p (racc))
3436 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3437 gsi, false, false, loc);
3438 /* Re-load the components of the aggregate copy destination.
3439 But use the RHS aggregate to load from to expose more
3440 optimization opportunities. */
3441 if (access_has_children_p (lacc))
3442 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3443 0, 0, gsi, true, true, loc);
3446 return SRA_AM_NONE;
3450 /* Traverse the function body and all modifications as decided in
3451 analyze_all_variable_accesses. Return true iff the CFG has been
3452 changed. */
3454 static bool
3455 sra_modify_function_body (void)
3457 bool cfg_changed = false;
3458 basic_block bb;
3460 FOR_EACH_BB_FN (bb, cfun)
3462 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3463 while (!gsi_end_p (gsi))
3465 gimple stmt = gsi_stmt (gsi);
3466 enum assignment_mod_result assign_result;
3467 bool modified = false, deleted = false;
3468 tree *t;
3469 unsigned i;
3471 switch (gimple_code (stmt))
3473 case GIMPLE_RETURN:
3474 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3475 if (*t != NULL_TREE)
3476 modified |= sra_modify_expr (t, &gsi, false);
3477 break;
3479 case GIMPLE_ASSIGN:
3480 assign_result = sra_modify_assign (stmt, &gsi);
3481 modified |= assign_result == SRA_AM_MODIFIED;
3482 deleted = assign_result == SRA_AM_REMOVED;
3483 break;
3485 case GIMPLE_CALL:
3486 /* Operands must be processed before the lhs. */
3487 for (i = 0; i < gimple_call_num_args (stmt); i++)
3489 t = gimple_call_arg_ptr (stmt, i);
3490 modified |= sra_modify_expr (t, &gsi, false);
3493 if (gimple_call_lhs (stmt))
3495 t = gimple_call_lhs_ptr (stmt);
3496 modified |= sra_modify_expr (t, &gsi, true);
3498 break;
3500 case GIMPLE_ASM:
3502 gasm *asm_stmt = as_a <gasm *> (stmt);
3503 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3505 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3506 modified |= sra_modify_expr (t, &gsi, false);
3508 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3510 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3511 modified |= sra_modify_expr (t, &gsi, true);
3514 break;
3516 default:
3517 break;
3520 if (modified)
3522 update_stmt (stmt);
3523 if (maybe_clean_eh_stmt (stmt)
3524 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3525 cfg_changed = true;
3527 if (!deleted)
3528 gsi_next (&gsi);
3532 gsi_commit_edge_inserts ();
3533 return cfg_changed;
3536 /* Generate statements initializing scalar replacements of parts of function
3537 parameters. */
3539 static void
3540 initialize_parameter_reductions (void)
3542 gimple_stmt_iterator gsi;
3543 gimple_seq seq = NULL;
3544 tree parm;
3546 gsi = gsi_start (seq);
3547 for (parm = DECL_ARGUMENTS (current_function_decl);
3548 parm;
3549 parm = DECL_CHAIN (parm))
3551 vec<access_p> *access_vec;
3552 struct access *access;
3554 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3555 continue;
3556 access_vec = get_base_access_vector (parm);
3557 if (!access_vec)
3558 continue;
3560 for (access = (*access_vec)[0];
3561 access;
3562 access = access->next_grp)
3563 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3564 EXPR_LOCATION (parm));
3567 seq = gsi_seq (gsi);
3568 if (seq)
3569 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3572 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3573 it reveals there are components of some aggregates to be scalarized, it runs
3574 the required transformations. */
3575 static unsigned int
3576 perform_intra_sra (void)
3578 int ret = 0;
3579 sra_initialize ();
3581 if (!find_var_candidates ())
3582 goto out;
3584 if (!scan_function ())
3585 goto out;
3587 if (!analyze_all_variable_accesses ())
3588 goto out;
3590 if (sra_modify_function_body ())
3591 ret = TODO_update_ssa | TODO_cleanup_cfg;
3592 else
3593 ret = TODO_update_ssa;
3594 initialize_parameter_reductions ();
3596 statistics_counter_event (cfun, "Scalar replacements created",
3597 sra_stats.replacements);
3598 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3599 statistics_counter_event (cfun, "Subtree copy stmts",
3600 sra_stats.subtree_copies);
3601 statistics_counter_event (cfun, "Subreplacement stmts",
3602 sra_stats.subreplacements);
3603 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3604 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3605 sra_stats.separate_lhs_rhs_handling);
3607 out:
3608 sra_deinitialize ();
3609 return ret;
3612 /* Perform early intraprocedural SRA. */
3613 static unsigned int
3614 early_intra_sra (void)
3616 sra_mode = SRA_MODE_EARLY_INTRA;
3617 return perform_intra_sra ();
3620 /* Perform "late" intraprocedural SRA. */
3621 static unsigned int
3622 late_intra_sra (void)
3624 sra_mode = SRA_MODE_INTRA;
3625 return perform_intra_sra ();
3629 static bool
3630 gate_intra_sra (void)
3632 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3636 namespace {
3638 const pass_data pass_data_sra_early =
3640 GIMPLE_PASS, /* type */
3641 "esra", /* name */
3642 OPTGROUP_NONE, /* optinfo_flags */
3643 TV_TREE_SRA, /* tv_id */
3644 ( PROP_cfg | PROP_ssa ), /* properties_required */
3645 0, /* properties_provided */
3646 0, /* properties_destroyed */
3647 0, /* todo_flags_start */
3648 TODO_update_ssa, /* todo_flags_finish */
3651 class pass_sra_early : public gimple_opt_pass
3653 public:
3654 pass_sra_early (gcc::context *ctxt)
3655 : gimple_opt_pass (pass_data_sra_early, ctxt)
3658 /* opt_pass methods: */
3659 virtual bool gate (function *) { return gate_intra_sra (); }
3660 virtual unsigned int execute (function *) { return early_intra_sra (); }
3662 }; // class pass_sra_early
3664 } // anon namespace
3666 gimple_opt_pass *
3667 make_pass_sra_early (gcc::context *ctxt)
3669 return new pass_sra_early (ctxt);
3672 namespace {
3674 const pass_data pass_data_sra =
3676 GIMPLE_PASS, /* type */
3677 "sra", /* name */
3678 OPTGROUP_NONE, /* optinfo_flags */
3679 TV_TREE_SRA, /* tv_id */
3680 ( PROP_cfg | PROP_ssa ), /* properties_required */
3681 0, /* properties_provided */
3682 0, /* properties_destroyed */
3683 TODO_update_address_taken, /* todo_flags_start */
3684 TODO_update_ssa, /* todo_flags_finish */
3687 class pass_sra : public gimple_opt_pass
3689 public:
3690 pass_sra (gcc::context *ctxt)
3691 : gimple_opt_pass (pass_data_sra, ctxt)
3694 /* opt_pass methods: */
3695 virtual bool gate (function *) { return gate_intra_sra (); }
3696 virtual unsigned int execute (function *) { return late_intra_sra (); }
3698 }; // class pass_sra
3700 } // anon namespace
3702 gimple_opt_pass *
3703 make_pass_sra (gcc::context *ctxt)
3705 return new pass_sra (ctxt);
3709 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3710 parameter. */
3712 static bool
3713 is_unused_scalar_param (tree parm)
3715 tree name;
3716 return (is_gimple_reg (parm)
3717 && (!(name = ssa_default_def (cfun, parm))
3718 || has_zero_uses (name)));
3721 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3722 examine whether there are any direct or otherwise infeasible ones. If so,
3723 return true, otherwise return false. PARM must be a gimple register with a
3724 non-NULL default definition. */
3726 static bool
3727 ptr_parm_has_direct_uses (tree parm)
3729 imm_use_iterator ui;
3730 gimple stmt;
3731 tree name = ssa_default_def (cfun, parm);
3732 bool ret = false;
3734 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3736 int uses_ok = 0;
3737 use_operand_p use_p;
3739 if (is_gimple_debug (stmt))
3740 continue;
3742 /* Valid uses include dereferences on the lhs and the rhs. */
3743 if (gimple_has_lhs (stmt))
3745 tree lhs = gimple_get_lhs (stmt);
3746 while (handled_component_p (lhs))
3747 lhs = TREE_OPERAND (lhs, 0);
3748 if (TREE_CODE (lhs) == MEM_REF
3749 && TREE_OPERAND (lhs, 0) == name
3750 && integer_zerop (TREE_OPERAND (lhs, 1))
3751 && types_compatible_p (TREE_TYPE (lhs),
3752 TREE_TYPE (TREE_TYPE (name)))
3753 && !TREE_THIS_VOLATILE (lhs))
3754 uses_ok++;
3756 if (gimple_assign_single_p (stmt))
3758 tree rhs = gimple_assign_rhs1 (stmt);
3759 while (handled_component_p (rhs))
3760 rhs = TREE_OPERAND (rhs, 0);
3761 if (TREE_CODE (rhs) == MEM_REF
3762 && TREE_OPERAND (rhs, 0) == name
3763 && integer_zerop (TREE_OPERAND (rhs, 1))
3764 && types_compatible_p (TREE_TYPE (rhs),
3765 TREE_TYPE (TREE_TYPE (name)))
3766 && !TREE_THIS_VOLATILE (rhs))
3767 uses_ok++;
3769 else if (is_gimple_call (stmt))
3771 unsigned i;
3772 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3774 tree arg = gimple_call_arg (stmt, i);
3775 while (handled_component_p (arg))
3776 arg = TREE_OPERAND (arg, 0);
3777 if (TREE_CODE (arg) == MEM_REF
3778 && TREE_OPERAND (arg, 0) == name
3779 && integer_zerop (TREE_OPERAND (arg, 1))
3780 && types_compatible_p (TREE_TYPE (arg),
3781 TREE_TYPE (TREE_TYPE (name)))
3782 && !TREE_THIS_VOLATILE (arg))
3783 uses_ok++;
3787 /* If the number of valid uses does not match the number of
3788 uses in this stmt there is an unhandled use. */
3789 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3790 --uses_ok;
3792 if (uses_ok != 0)
3793 ret = true;
3795 if (ret)
3796 BREAK_FROM_IMM_USE_STMT (ui);
3799 return ret;
3802 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3803 them in candidate_bitmap. Note that these do not necessarily include
3804 parameter which are unused and thus can be removed. Return true iff any
3805 such candidate has been found. */
3807 static bool
3808 find_param_candidates (void)
3810 tree parm;
3811 int count = 0;
3812 bool ret = false;
3813 const char *msg;
3815 for (parm = DECL_ARGUMENTS (current_function_decl);
3816 parm;
3817 parm = DECL_CHAIN (parm))
3819 tree type = TREE_TYPE (parm);
3820 tree_node **slot;
3822 count++;
3824 if (TREE_THIS_VOLATILE (parm)
3825 || TREE_ADDRESSABLE (parm)
3826 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3827 continue;
3829 if (is_unused_scalar_param (parm))
3831 ret = true;
3832 continue;
3835 if (POINTER_TYPE_P (type))
3837 type = TREE_TYPE (type);
3839 if (TREE_CODE (type) == FUNCTION_TYPE
3840 || TYPE_VOLATILE (type)
3841 || (TREE_CODE (type) == ARRAY_TYPE
3842 && TYPE_NONALIASED_COMPONENT (type))
3843 || !is_gimple_reg (parm)
3844 || is_va_list_type (type)
3845 || ptr_parm_has_direct_uses (parm))
3846 continue;
3848 else if (!AGGREGATE_TYPE_P (type))
3849 continue;
3851 if (!COMPLETE_TYPE_P (type)
3852 || !tree_fits_uhwi_p (TYPE_SIZE (type))
3853 || tree_to_uhwi (TYPE_SIZE (type)) == 0
3854 || (AGGREGATE_TYPE_P (type)
3855 && type_internals_preclude_sra_p (type, &msg)))
3856 continue;
3858 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3859 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
3860 *slot = parm;
3862 ret = true;
3863 if (dump_file && (dump_flags & TDF_DETAILS))
3865 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3866 print_generic_expr (dump_file, parm, 0);
3867 fprintf (dump_file, "\n");
3871 func_param_count = count;
3872 return ret;
3875 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3876 maybe_modified. */
3878 static bool
3879 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3880 void *data)
3882 struct access *repr = (struct access *) data;
3884 repr->grp_maybe_modified = 1;
3885 return true;
3888 /* Analyze what representatives (in linked lists accessible from
3889 REPRESENTATIVES) can be modified by side effects of statements in the
3890 current function. */
3892 static void
3893 analyze_modified_params (vec<access_p> representatives)
3895 int i;
3897 for (i = 0; i < func_param_count; i++)
3899 struct access *repr;
3901 for (repr = representatives[i];
3902 repr;
3903 repr = repr->next_grp)
3905 struct access *access;
3906 bitmap visited;
3907 ao_ref ar;
3909 if (no_accesses_p (repr))
3910 continue;
3911 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3912 || repr->grp_maybe_modified)
3913 continue;
3915 ao_ref_init (&ar, repr->expr);
3916 visited = BITMAP_ALLOC (NULL);
3917 for (access = repr; access; access = access->next_sibling)
3919 /* All accesses are read ones, otherwise grp_maybe_modified would
3920 be trivially set. */
3921 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3922 mark_maybe_modified, repr, &visited);
3923 if (repr->grp_maybe_modified)
3924 break;
3926 BITMAP_FREE (visited);
3931 /* Propagate distances in bb_dereferences in the opposite direction than the
3932 control flow edges, in each step storing the maximum of the current value
3933 and the minimum of all successors. These steps are repeated until the table
3934 stabilizes. Note that BBs which might terminate the functions (according to
3935 final_bbs bitmap) never updated in this way. */
3937 static void
3938 propagate_dereference_distances (void)
3940 basic_block bb;
3942 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
3943 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3944 FOR_EACH_BB_FN (bb, cfun)
3946 queue.quick_push (bb);
3947 bb->aux = bb;
3950 while (!queue.is_empty ())
3952 edge_iterator ei;
3953 edge e;
3954 bool change = false;
3955 int i;
3957 bb = queue.pop ();
3958 bb->aux = NULL;
3960 if (bitmap_bit_p (final_bbs, bb->index))
3961 continue;
3963 for (i = 0; i < func_param_count; i++)
3965 int idx = bb->index * func_param_count + i;
3966 bool first = true;
3967 HOST_WIDE_INT inh = 0;
3969 FOR_EACH_EDGE (e, ei, bb->succs)
3971 int succ_idx = e->dest->index * func_param_count + i;
3973 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
3974 continue;
3976 if (first)
3978 first = false;
3979 inh = bb_dereferences [succ_idx];
3981 else if (bb_dereferences [succ_idx] < inh)
3982 inh = bb_dereferences [succ_idx];
3985 if (!first && bb_dereferences[idx] < inh)
3987 bb_dereferences[idx] = inh;
3988 change = true;
3992 if (change && !bitmap_bit_p (final_bbs, bb->index))
3993 FOR_EACH_EDGE (e, ei, bb->preds)
3995 if (e->src->aux)
3996 continue;
3998 e->src->aux = e->src;
3999 queue.quick_push (e->src);
4004 /* Dump a dereferences TABLE with heading STR to file F. */
4006 static void
4007 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4009 basic_block bb;
4011 fprintf (dump_file, "%s", str);
4012 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4013 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4015 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4016 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4018 int i;
4019 for (i = 0; i < func_param_count; i++)
4021 int idx = bb->index * func_param_count + i;
4022 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4025 fprintf (f, "\n");
4027 fprintf (dump_file, "\n");
4030 /* Determine what (parts of) parameters passed by reference that are not
4031 assigned to are not certainly dereferenced in this function and thus the
4032 dereferencing cannot be safely moved to the caller without potentially
4033 introducing a segfault. Mark such REPRESENTATIVES as
4034 grp_not_necessarilly_dereferenced.
4036 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4037 part is calculated rather than simple booleans are calculated for each
4038 pointer parameter to handle cases when only a fraction of the whole
4039 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4040 an example).
4042 The maximum dereference distances for each pointer parameter and BB are
4043 already stored in bb_dereference. This routine simply propagates these
4044 values upwards by propagate_dereference_distances and then compares the
4045 distances of individual parameters in the ENTRY BB to the equivalent
4046 distances of each representative of a (fraction of a) parameter. */
4048 static void
4049 analyze_caller_dereference_legality (vec<access_p> representatives)
4051 int i;
4053 if (dump_file && (dump_flags & TDF_DETAILS))
4054 dump_dereferences_table (dump_file,
4055 "Dereference table before propagation:\n",
4056 bb_dereferences);
4058 propagate_dereference_distances ();
4060 if (dump_file && (dump_flags & TDF_DETAILS))
4061 dump_dereferences_table (dump_file,
4062 "Dereference table after propagation:\n",
4063 bb_dereferences);
4065 for (i = 0; i < func_param_count; i++)
4067 struct access *repr = representatives[i];
4068 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4070 if (!repr || no_accesses_p (repr))
4071 continue;
4075 if ((repr->offset + repr->size) > bb_dereferences[idx])
4076 repr->grp_not_necessarilly_dereferenced = 1;
4077 repr = repr->next_grp;
4079 while (repr);
4083 /* Return the representative access for the parameter declaration PARM if it is
4084 a scalar passed by reference which is not written to and the pointer value
4085 is not used directly. Thus, if it is legal to dereference it in the caller
4086 and we can rule out modifications through aliases, such parameter should be
4087 turned into one passed by value. Return NULL otherwise. */
4089 static struct access *
4090 unmodified_by_ref_scalar_representative (tree parm)
4092 int i, access_count;
4093 struct access *repr;
4094 vec<access_p> *access_vec;
4096 access_vec = get_base_access_vector (parm);
4097 gcc_assert (access_vec);
4098 repr = (*access_vec)[0];
4099 if (repr->write)
4100 return NULL;
4101 repr->group_representative = repr;
4103 access_count = access_vec->length ();
4104 for (i = 1; i < access_count; i++)
4106 struct access *access = (*access_vec)[i];
4107 if (access->write)
4108 return NULL;
4109 access->group_representative = repr;
4110 access->next_sibling = repr->next_sibling;
4111 repr->next_sibling = access;
4114 repr->grp_read = 1;
4115 repr->grp_scalar_ptr = 1;
4116 return repr;
4119 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4120 associated with. REQ_ALIGN is the minimum required alignment. */
4122 static bool
4123 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4125 unsigned int exp_align;
4126 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4127 is incompatible assign in a call statement (and possibly even in asm
4128 statements). This can be relaxed by using a new temporary but only for
4129 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4130 intraprocedural SRA we deal with this by keeping the old aggregate around,
4131 something we cannot do in IPA-SRA.) */
4132 if (access->write
4133 && (is_gimple_call (access->stmt)
4134 || gimple_code (access->stmt) == GIMPLE_ASM))
4135 return true;
4137 exp_align = get_object_alignment (access->expr);
4138 if (exp_align < req_align)
4139 return true;
4141 return false;
4145 /* Sort collected accesses for parameter PARM, identify representatives for
4146 each accessed region and link them together. Return NULL if there are
4147 different but overlapping accesses, return the special ptr value meaning
4148 there are no accesses for this parameter if that is the case and return the
4149 first representative otherwise. Set *RO_GRP if there is a group of accesses
4150 with only read (i.e. no write) accesses. */
4152 static struct access *
4153 splice_param_accesses (tree parm, bool *ro_grp)
4155 int i, j, access_count, group_count;
4156 int agg_size, total_size = 0;
4157 struct access *access, *res, **prev_acc_ptr = &res;
4158 vec<access_p> *access_vec;
4160 access_vec = get_base_access_vector (parm);
4161 if (!access_vec)
4162 return &no_accesses_representant;
4163 access_count = access_vec->length ();
4165 access_vec->qsort (compare_access_positions);
4167 i = 0;
4168 total_size = 0;
4169 group_count = 0;
4170 while (i < access_count)
4172 bool modification;
4173 tree a1_alias_type;
4174 access = (*access_vec)[i];
4175 modification = access->write;
4176 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4177 return NULL;
4178 a1_alias_type = reference_alias_ptr_type (access->expr);
4180 /* Access is about to become group representative unless we find some
4181 nasty overlap which would preclude us from breaking this parameter
4182 apart. */
4184 j = i + 1;
4185 while (j < access_count)
4187 struct access *ac2 = (*access_vec)[j];
4188 if (ac2->offset != access->offset)
4190 /* All or nothing law for parameters. */
4191 if (access->offset + access->size > ac2->offset)
4192 return NULL;
4193 else
4194 break;
4196 else if (ac2->size != access->size)
4197 return NULL;
4199 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4200 || (ac2->type != access->type
4201 && (TREE_ADDRESSABLE (ac2->type)
4202 || TREE_ADDRESSABLE (access->type)))
4203 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4204 return NULL;
4206 modification |= ac2->write;
4207 ac2->group_representative = access;
4208 ac2->next_sibling = access->next_sibling;
4209 access->next_sibling = ac2;
4210 j++;
4213 group_count++;
4214 access->grp_maybe_modified = modification;
4215 if (!modification)
4216 *ro_grp = true;
4217 *prev_acc_ptr = access;
4218 prev_acc_ptr = &access->next_grp;
4219 total_size += access->size;
4220 i = j;
4223 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4224 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4225 else
4226 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4227 if (total_size >= agg_size)
4228 return NULL;
4230 gcc_assert (group_count > 0);
4231 return res;
4234 /* Decide whether parameters with representative accesses given by REPR should
4235 be reduced into components. */
4237 static int
4238 decide_one_param_reduction (struct access *repr)
4240 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4241 bool by_ref;
4242 tree parm;
4244 parm = repr->base;
4245 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4246 gcc_assert (cur_parm_size > 0);
4248 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4250 by_ref = true;
4251 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4253 else
4255 by_ref = false;
4256 agg_size = cur_parm_size;
4259 if (dump_file)
4261 struct access *acc;
4262 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4263 print_generic_expr (dump_file, parm, 0);
4264 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4265 for (acc = repr; acc; acc = acc->next_grp)
4266 dump_access (dump_file, acc, true);
4269 total_size = 0;
4270 new_param_count = 0;
4272 for (; repr; repr = repr->next_grp)
4274 gcc_assert (parm == repr->base);
4276 /* Taking the address of a non-addressable field is verboten. */
4277 if (by_ref && repr->non_addressable)
4278 return 0;
4280 /* Do not decompose a non-BLKmode param in a way that would
4281 create BLKmode params. Especially for by-reference passing
4282 (thus, pointer-type param) this is hardly worthwhile. */
4283 if (DECL_MODE (parm) != BLKmode
4284 && TYPE_MODE (repr->type) == BLKmode)
4285 return 0;
4287 if (!by_ref || (!repr->grp_maybe_modified
4288 && !repr->grp_not_necessarilly_dereferenced))
4289 total_size += repr->size;
4290 else
4291 total_size += cur_parm_size;
4293 new_param_count++;
4296 gcc_assert (new_param_count > 0);
4298 if (optimize_function_for_size_p (cfun))
4299 parm_size_limit = cur_parm_size;
4300 else
4301 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4302 * cur_parm_size);
4304 if (total_size < agg_size
4305 && total_size <= parm_size_limit)
4307 if (dump_file)
4308 fprintf (dump_file, " ....will be split into %i components\n",
4309 new_param_count);
4310 return new_param_count;
4312 else
4313 return 0;
4316 /* The order of the following enums is important, we need to do extra work for
4317 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4318 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4319 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4321 /* Identify representatives of all accesses to all candidate parameters for
4322 IPA-SRA. Return result based on what representatives have been found. */
4324 static enum ipa_splicing_result
4325 splice_all_param_accesses (vec<access_p> &representatives)
4327 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4328 tree parm;
4329 struct access *repr;
4331 representatives.create (func_param_count);
4333 for (parm = DECL_ARGUMENTS (current_function_decl);
4334 parm;
4335 parm = DECL_CHAIN (parm))
4337 if (is_unused_scalar_param (parm))
4339 representatives.quick_push (&no_accesses_representant);
4340 if (result == NO_GOOD_ACCESS)
4341 result = UNUSED_PARAMS;
4343 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4344 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4345 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4347 repr = unmodified_by_ref_scalar_representative (parm);
4348 representatives.quick_push (repr);
4349 if (repr)
4350 result = UNMODIF_BY_REF_ACCESSES;
4352 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4354 bool ro_grp = false;
4355 repr = splice_param_accesses (parm, &ro_grp);
4356 representatives.quick_push (repr);
4358 if (repr && !no_accesses_p (repr))
4360 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4362 if (ro_grp)
4363 result = UNMODIF_BY_REF_ACCESSES;
4364 else if (result < MODIF_BY_REF_ACCESSES)
4365 result = MODIF_BY_REF_ACCESSES;
4367 else if (result < BY_VAL_ACCESSES)
4368 result = BY_VAL_ACCESSES;
4370 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4371 result = UNUSED_PARAMS;
4373 else
4374 representatives.quick_push (NULL);
4377 if (result == NO_GOOD_ACCESS)
4379 representatives.release ();
4380 return NO_GOOD_ACCESS;
4383 return result;
4386 /* Return the index of BASE in PARMS. Abort if it is not found. */
4388 static inline int
4389 get_param_index (tree base, vec<tree> parms)
4391 int i, len;
4393 len = parms.length ();
4394 for (i = 0; i < len; i++)
4395 if (parms[i] == base)
4396 return i;
4397 gcc_unreachable ();
4400 /* Convert the decisions made at the representative level into compact
4401 parameter adjustments. REPRESENTATIVES are pointers to first
4402 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4403 final number of adjustments. */
4405 static ipa_parm_adjustment_vec
4406 turn_representatives_into_adjustments (vec<access_p> representatives,
4407 int adjustments_count)
4409 vec<tree> parms;
4410 ipa_parm_adjustment_vec adjustments;
4411 tree parm;
4412 int i;
4414 gcc_assert (adjustments_count > 0);
4415 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4416 adjustments.create (adjustments_count);
4417 parm = DECL_ARGUMENTS (current_function_decl);
4418 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4420 struct access *repr = representatives[i];
4422 if (!repr || no_accesses_p (repr))
4424 struct ipa_parm_adjustment adj;
4426 memset (&adj, 0, sizeof (adj));
4427 adj.base_index = get_param_index (parm, parms);
4428 adj.base = parm;
4429 if (!repr)
4430 adj.op = IPA_PARM_OP_COPY;
4431 else
4432 adj.op = IPA_PARM_OP_REMOVE;
4433 adj.arg_prefix = "ISRA";
4434 adjustments.quick_push (adj);
4436 else
4438 struct ipa_parm_adjustment adj;
4439 int index = get_param_index (parm, parms);
4441 for (; repr; repr = repr->next_grp)
4443 memset (&adj, 0, sizeof (adj));
4444 gcc_assert (repr->base == parm);
4445 adj.base_index = index;
4446 adj.base = repr->base;
4447 adj.type = repr->type;
4448 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4449 adj.offset = repr->offset;
4450 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4451 && (repr->grp_maybe_modified
4452 || repr->grp_not_necessarilly_dereferenced));
4453 adj.arg_prefix = "ISRA";
4454 adjustments.quick_push (adj);
4458 parms.release ();
4459 return adjustments;
4462 /* Analyze the collected accesses and produce a plan what to do with the
4463 parameters in the form of adjustments, NULL meaning nothing. */
4465 static ipa_parm_adjustment_vec
4466 analyze_all_param_acesses (void)
4468 enum ipa_splicing_result repr_state;
4469 bool proceed = false;
4470 int i, adjustments_count = 0;
4471 vec<access_p> representatives;
4472 ipa_parm_adjustment_vec adjustments;
4474 repr_state = splice_all_param_accesses (representatives);
4475 if (repr_state == NO_GOOD_ACCESS)
4476 return ipa_parm_adjustment_vec ();
4478 /* If there are any parameters passed by reference which are not modified
4479 directly, we need to check whether they can be modified indirectly. */
4480 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4482 analyze_caller_dereference_legality (representatives);
4483 analyze_modified_params (representatives);
4486 for (i = 0; i < func_param_count; i++)
4488 struct access *repr = representatives[i];
4490 if (repr && !no_accesses_p (repr))
4492 if (repr->grp_scalar_ptr)
4494 adjustments_count++;
4495 if (repr->grp_not_necessarilly_dereferenced
4496 || repr->grp_maybe_modified)
4497 representatives[i] = NULL;
4498 else
4500 proceed = true;
4501 sra_stats.scalar_by_ref_to_by_val++;
4504 else
4506 int new_components = decide_one_param_reduction (repr);
4508 if (new_components == 0)
4510 representatives[i] = NULL;
4511 adjustments_count++;
4513 else
4515 adjustments_count += new_components;
4516 sra_stats.aggregate_params_reduced++;
4517 sra_stats.param_reductions_created += new_components;
4518 proceed = true;
4522 else
4524 if (no_accesses_p (repr))
4526 proceed = true;
4527 sra_stats.deleted_unused_parameters++;
4529 adjustments_count++;
4533 if (!proceed && dump_file)
4534 fprintf (dump_file, "NOT proceeding to change params.\n");
4536 if (proceed)
4537 adjustments = turn_representatives_into_adjustments (representatives,
4538 adjustments_count);
4539 else
4540 adjustments = ipa_parm_adjustment_vec ();
4542 representatives.release ();
4543 return adjustments;
4546 /* If a parameter replacement identified by ADJ does not yet exist in the form
4547 of declaration, create it and record it, otherwise return the previously
4548 created one. */
4550 static tree
4551 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4553 tree repl;
4554 if (!adj->new_ssa_base)
4556 char *pretty_name = make_fancy_name (adj->base);
4558 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4559 DECL_NAME (repl) = get_identifier (pretty_name);
4560 obstack_free (&name_obstack, pretty_name);
4562 adj->new_ssa_base = repl;
4564 else
4565 repl = adj->new_ssa_base;
4566 return repl;
4569 /* Find the first adjustment for a particular parameter BASE in a vector of
4570 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4571 adjustment. */
4573 static struct ipa_parm_adjustment *
4574 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4576 int i, len;
4578 len = adjustments.length ();
4579 for (i = 0; i < len; i++)
4581 struct ipa_parm_adjustment *adj;
4583 adj = &adjustments[i];
4584 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4585 return adj;
4588 return NULL;
4591 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4592 removed because its value is not used, replace the SSA_NAME with a one
4593 relating to a created VAR_DECL together all of its uses and return true.
4594 ADJUSTMENTS is a pointer to an adjustments vector. */
4596 static bool
4597 replace_removed_params_ssa_names (gimple stmt,
4598 ipa_parm_adjustment_vec adjustments)
4600 struct ipa_parm_adjustment *adj;
4601 tree lhs, decl, repl, name;
4603 if (gimple_code (stmt) == GIMPLE_PHI)
4604 lhs = gimple_phi_result (stmt);
4605 else if (is_gimple_assign (stmt))
4606 lhs = gimple_assign_lhs (stmt);
4607 else if (is_gimple_call (stmt))
4608 lhs = gimple_call_lhs (stmt);
4609 else
4610 gcc_unreachable ();
4612 if (TREE_CODE (lhs) != SSA_NAME)
4613 return false;
4615 decl = SSA_NAME_VAR (lhs);
4616 if (decl == NULL_TREE
4617 || TREE_CODE (decl) != PARM_DECL)
4618 return false;
4620 adj = get_adjustment_for_base (adjustments, decl);
4621 if (!adj)
4622 return false;
4624 repl = get_replaced_param_substitute (adj);
4625 name = make_ssa_name (repl, stmt);
4627 if (dump_file)
4629 fprintf (dump_file, "replacing an SSA name of a removed param ");
4630 print_generic_expr (dump_file, lhs, 0);
4631 fprintf (dump_file, " with ");
4632 print_generic_expr (dump_file, name, 0);
4633 fprintf (dump_file, "\n");
4636 if (is_gimple_assign (stmt))
4637 gimple_assign_set_lhs (stmt, name);
4638 else if (is_gimple_call (stmt))
4639 gimple_call_set_lhs (stmt, name);
4640 else
4641 gimple_phi_set_result (as_a <gphi *> (stmt), name);
4643 replace_uses_by (lhs, name);
4644 release_ssa_name (lhs);
4645 return true;
4648 /* If the statement STMT contains any expressions that need to replaced with a
4649 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4650 incompatibilities (GSI is used to accommodate conversion statements and must
4651 point to the statement). Return true iff the statement was modified. */
4653 static bool
4654 sra_ipa_modify_assign (gimple stmt, gimple_stmt_iterator *gsi,
4655 ipa_parm_adjustment_vec adjustments)
4657 tree *lhs_p, *rhs_p;
4658 bool any;
4660 if (!gimple_assign_single_p (stmt))
4661 return false;
4663 rhs_p = gimple_assign_rhs1_ptr (stmt);
4664 lhs_p = gimple_assign_lhs_ptr (stmt);
4666 any = ipa_modify_expr (rhs_p, false, adjustments);
4667 any |= ipa_modify_expr (lhs_p, false, adjustments);
4668 if (any)
4670 tree new_rhs = NULL_TREE;
4672 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4674 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4676 /* V_C_Es of constructors can cause trouble (PR 42714). */
4677 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4678 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4679 else
4680 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4681 NULL);
4683 else
4684 new_rhs = fold_build1_loc (gimple_location (stmt),
4685 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4686 *rhs_p);
4688 else if (REFERENCE_CLASS_P (*rhs_p)
4689 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4690 && !is_gimple_reg (*lhs_p))
4691 /* This can happen when an assignment in between two single field
4692 structures is turned into an assignment in between two pointers to
4693 scalars (PR 42237). */
4694 new_rhs = *rhs_p;
4696 if (new_rhs)
4698 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4699 true, GSI_SAME_STMT);
4701 gimple_assign_set_rhs_from_tree (gsi, tmp);
4704 return true;
4707 return false;
4710 /* Traverse the function body and all modifications as described in
4711 ADJUSTMENTS. Return true iff the CFG has been changed. */
4713 bool
4714 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4716 bool cfg_changed = false;
4717 basic_block bb;
4719 FOR_EACH_BB_FN (bb, cfun)
4721 gimple_stmt_iterator gsi;
4723 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4724 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4726 gsi = gsi_start_bb (bb);
4727 while (!gsi_end_p (gsi))
4729 gimple stmt = gsi_stmt (gsi);
4730 bool modified = false;
4731 tree *t;
4732 unsigned i;
4734 switch (gimple_code (stmt))
4736 case GIMPLE_RETURN:
4737 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4738 if (*t != NULL_TREE)
4739 modified |= ipa_modify_expr (t, true, adjustments);
4740 break;
4742 case GIMPLE_ASSIGN:
4743 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4744 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4745 break;
4747 case GIMPLE_CALL:
4748 /* Operands must be processed before the lhs. */
4749 for (i = 0; i < gimple_call_num_args (stmt); i++)
4751 t = gimple_call_arg_ptr (stmt, i);
4752 modified |= ipa_modify_expr (t, true, adjustments);
4755 if (gimple_call_lhs (stmt))
4757 t = gimple_call_lhs_ptr (stmt);
4758 modified |= ipa_modify_expr (t, false, adjustments);
4759 modified |= replace_removed_params_ssa_names (stmt,
4760 adjustments);
4762 break;
4764 case GIMPLE_ASM:
4766 gasm *asm_stmt = as_a <gasm *> (stmt);
4767 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4769 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4770 modified |= ipa_modify_expr (t, true, adjustments);
4772 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4774 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4775 modified |= ipa_modify_expr (t, false, adjustments);
4778 break;
4780 default:
4781 break;
4784 if (modified)
4786 update_stmt (stmt);
4787 if (maybe_clean_eh_stmt (stmt)
4788 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4789 cfg_changed = true;
4791 gsi_next (&gsi);
4795 return cfg_changed;
4798 /* Call gimple_debug_bind_reset_value on all debug statements describing
4799 gimple register parameters that are being removed or replaced. */
4801 static void
4802 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4804 int i, len;
4805 gimple_stmt_iterator *gsip = NULL, gsi;
4807 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4809 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4810 gsip = &gsi;
4812 len = adjustments.length ();
4813 for (i = 0; i < len; i++)
4815 struct ipa_parm_adjustment *adj;
4816 imm_use_iterator ui;
4817 gimple stmt;
4818 gdebug *def_temp;
4819 tree name, vexpr, copy = NULL_TREE;
4820 use_operand_p use_p;
4822 adj = &adjustments[i];
4823 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4824 continue;
4825 name = ssa_default_def (cfun, adj->base);
4826 vexpr = NULL;
4827 if (name)
4828 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4830 if (gimple_clobber_p (stmt))
4832 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4833 unlink_stmt_vdef (stmt);
4834 gsi_remove (&cgsi, true);
4835 release_defs (stmt);
4836 continue;
4838 /* All other users must have been removed by
4839 ipa_sra_modify_function_body. */
4840 gcc_assert (is_gimple_debug (stmt));
4841 if (vexpr == NULL && gsip != NULL)
4843 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4844 vexpr = make_node (DEBUG_EXPR_DECL);
4845 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4846 NULL);
4847 DECL_ARTIFICIAL (vexpr) = 1;
4848 TREE_TYPE (vexpr) = TREE_TYPE (name);
4849 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4850 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4852 if (vexpr)
4854 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4855 SET_USE (use_p, vexpr);
4857 else
4858 gimple_debug_bind_reset_value (stmt);
4859 update_stmt (stmt);
4861 /* Create a VAR_DECL for debug info purposes. */
4862 if (!DECL_IGNORED_P (adj->base))
4864 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4865 VAR_DECL, DECL_NAME (adj->base),
4866 TREE_TYPE (adj->base));
4867 if (DECL_PT_UID_SET_P (adj->base))
4868 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4869 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4870 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4871 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4872 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4873 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4874 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4875 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4876 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4877 SET_DECL_RTL (copy, 0);
4878 TREE_USED (copy) = 1;
4879 DECL_CONTEXT (copy) = current_function_decl;
4880 add_local_decl (cfun, copy);
4881 DECL_CHAIN (copy) =
4882 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4883 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4885 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4887 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4888 if (vexpr)
4889 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4890 else
4891 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4892 NULL);
4893 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4898 /* Return false if all callers have at least as many actual arguments as there
4899 are formal parameters in the current function and that their types
4900 match. */
4902 static bool
4903 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
4904 void *data ATTRIBUTE_UNUSED)
4906 struct cgraph_edge *cs;
4907 for (cs = node->callers; cs; cs = cs->next_caller)
4908 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
4909 return true;
4911 return false;
4914 /* Return false if all callers have vuse attached to a call statement. */
4916 static bool
4917 some_callers_have_no_vuse_p (struct cgraph_node *node,
4918 void *data ATTRIBUTE_UNUSED)
4920 struct cgraph_edge *cs;
4921 for (cs = node->callers; cs; cs = cs->next_caller)
4922 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
4923 return true;
4925 return false;
4928 /* Convert all callers of NODE. */
4930 static bool
4931 convert_callers_for_node (struct cgraph_node *node,
4932 void *data)
4934 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4935 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4936 struct cgraph_edge *cs;
4938 for (cs = node->callers; cs; cs = cs->next_caller)
4940 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4942 if (dump_file)
4943 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
4944 xstrdup (cs->caller->name ()),
4945 cs->caller->order,
4946 xstrdup (cs->callee->name ()),
4947 cs->callee->order);
4949 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4951 pop_cfun ();
4954 for (cs = node->callers; cs; cs = cs->next_caller)
4955 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4956 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4957 compute_inline_parameters (cs->caller, true);
4958 BITMAP_FREE (recomputed_callers);
4960 return true;
4963 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4965 static void
4966 convert_callers (struct cgraph_node *node, tree old_decl,
4967 ipa_parm_adjustment_vec adjustments)
4969 basic_block this_block;
4971 node->call_for_symbol_and_aliases (convert_callers_for_node,
4972 &adjustments, false);
4974 if (!encountered_recursive_call)
4975 return;
4977 FOR_EACH_BB_FN (this_block, cfun)
4979 gimple_stmt_iterator gsi;
4981 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4983 gcall *stmt;
4984 tree call_fndecl;
4985 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
4986 if (!stmt)
4987 continue;
4988 call_fndecl = gimple_call_fndecl (stmt);
4989 if (call_fndecl == old_decl)
4991 if (dump_file)
4992 fprintf (dump_file, "Adjusting recursive call");
4993 gimple_call_set_fndecl (stmt, node->decl);
4994 ipa_modify_call_arguments (NULL, stmt, adjustments);
4999 return;
5002 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5003 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5005 static bool
5006 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5008 struct cgraph_node *new_node;
5009 bool cfg_changed;
5011 cgraph_edge::rebuild_edges ();
5012 free_dominance_info (CDI_DOMINATORS);
5013 pop_cfun ();
5015 /* This must be done after rebuilding cgraph edges for node above.
5016 Otherwise any recursive calls to node that are recorded in
5017 redirect_callers will be corrupted. */
5018 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5019 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5020 NULL, false, NULL, NULL,
5021 "isra");
5022 redirect_callers.release ();
5024 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5025 ipa_modify_formal_parameters (current_function_decl, adjustments);
5026 cfg_changed = ipa_sra_modify_function_body (adjustments);
5027 sra_ipa_reset_debug_stmts (adjustments);
5028 convert_callers (new_node, node->decl, adjustments);
5029 new_node->make_local ();
5030 return cfg_changed;
5033 /* Means of communication between ipa_sra_check_caller and
5034 ipa_sra_preliminary_function_checks. */
5036 struct ipa_sra_check_caller_data
5038 bool has_callers;
5039 bool bad_arg_alignment;
5040 bool has_thunk;
5043 /* If NODE has a caller, mark that fact in DATA which is pointer to
5044 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5045 calls if they are unit aligned and if not, set the appropriate flag in DATA
5046 too. */
5048 static bool
5049 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5051 if (!node->callers)
5052 return false;
5054 struct ipa_sra_check_caller_data *iscc;
5055 iscc = (struct ipa_sra_check_caller_data *) data;
5056 iscc->has_callers = true;
5058 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5060 if (cs->caller->thunk.thunk_p)
5062 iscc->has_thunk = true;
5063 return true;
5065 gimple call_stmt = cs->call_stmt;
5066 unsigned count = gimple_call_num_args (call_stmt);
5067 for (unsigned i = 0; i < count; i++)
5069 tree arg = gimple_call_arg (call_stmt, i);
5070 if (is_gimple_reg (arg))
5071 continue;
5073 tree offset;
5074 HOST_WIDE_INT bitsize, bitpos;
5075 machine_mode mode;
5076 int unsignedp, volatilep = 0;
5077 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5078 &unsignedp, &volatilep, false);
5079 if (bitpos % BITS_PER_UNIT)
5081 iscc->bad_arg_alignment = true;
5082 return true;
5087 return false;
5090 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5091 attributes, return true otherwise. NODE is the cgraph node of the current
5092 function. */
5094 static bool
5095 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5097 if (!node->can_be_local_p ())
5099 if (dump_file)
5100 fprintf (dump_file, "Function not local to this compilation unit.\n");
5101 return false;
5104 if (!node->local.can_change_signature)
5106 if (dump_file)
5107 fprintf (dump_file, "Function can not change signature.\n");
5108 return false;
5111 if (!tree_versionable_function_p (node->decl))
5113 if (dump_file)
5114 fprintf (dump_file, "Function is not versionable.\n");
5115 return false;
5118 if (!opt_for_fn (node->decl, optimize)
5119 || !opt_for_fn (node->decl, flag_ipa_sra))
5121 if (dump_file)
5122 fprintf (dump_file, "Function not optimized.\n");
5123 return false;
5126 if (DECL_VIRTUAL_P (current_function_decl))
5128 if (dump_file)
5129 fprintf (dump_file, "Function is a virtual method.\n");
5130 return false;
5133 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5134 && inline_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5136 if (dump_file)
5137 fprintf (dump_file, "Function too big to be made truly local.\n");
5138 return false;
5141 if (cfun->stdarg)
5143 if (dump_file)
5144 fprintf (dump_file, "Function uses stdarg. \n");
5145 return false;
5148 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5149 return false;
5151 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5153 if (dump_file)
5154 fprintf (dump_file, "Always inline function will be inlined "
5155 "anyway. \n");
5156 return false;
5159 struct ipa_sra_check_caller_data iscc;
5160 memset (&iscc, 0, sizeof(iscc));
5161 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5162 if (!iscc.has_callers)
5164 if (dump_file)
5165 fprintf (dump_file,
5166 "Function has no callers in this compilation unit.\n");
5167 return false;
5170 if (iscc.bad_arg_alignment)
5172 if (dump_file)
5173 fprintf (dump_file,
5174 "A function call has an argument with non-unit alignment.\n");
5175 return false;
5178 if (iscc.has_thunk)
5180 if (dump_file)
5181 fprintf (dump_file,
5182 "A has thunk.\n");
5183 return false;
5186 return true;
5189 /* Perform early interprocedural SRA. */
5191 static unsigned int
5192 ipa_early_sra (void)
5194 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5195 ipa_parm_adjustment_vec adjustments;
5196 int ret = 0;
5198 if (!ipa_sra_preliminary_function_checks (node))
5199 return 0;
5201 sra_initialize ();
5202 sra_mode = SRA_MODE_EARLY_IPA;
5204 if (!find_param_candidates ())
5206 if (dump_file)
5207 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5208 goto simple_out;
5211 if (node->call_for_symbol_and_aliases
5212 (some_callers_have_mismatched_arguments_p, NULL, true))
5214 if (dump_file)
5215 fprintf (dump_file, "There are callers with insufficient number of "
5216 "arguments or arguments with type mismatches.\n");
5217 goto simple_out;
5220 if (node->call_for_symbol_and_aliases
5221 (some_callers_have_no_vuse_p, NULL, true))
5223 if (dump_file)
5224 fprintf (dump_file, "There are callers with no VUSE attached "
5225 "to a call stmt.\n");
5226 goto simple_out;
5229 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5230 func_param_count
5231 * last_basic_block_for_fn (cfun));
5232 final_bbs = BITMAP_ALLOC (NULL);
5234 scan_function ();
5235 if (encountered_apply_args)
5237 if (dump_file)
5238 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5239 goto out;
5242 if (encountered_unchangable_recursive_call)
5244 if (dump_file)
5245 fprintf (dump_file, "Function calls itself with insufficient "
5246 "number of arguments.\n");
5247 goto out;
5250 adjustments = analyze_all_param_acesses ();
5251 if (!adjustments.exists ())
5252 goto out;
5253 if (dump_file)
5254 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5256 if (modify_function (node, adjustments))
5257 ret = TODO_update_ssa | TODO_cleanup_cfg;
5258 else
5259 ret = TODO_update_ssa;
5260 adjustments.release ();
5262 statistics_counter_event (cfun, "Unused parameters deleted",
5263 sra_stats.deleted_unused_parameters);
5264 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5265 sra_stats.scalar_by_ref_to_by_val);
5266 statistics_counter_event (cfun, "Aggregate parameters broken up",
5267 sra_stats.aggregate_params_reduced);
5268 statistics_counter_event (cfun, "Aggregate parameter components created",
5269 sra_stats.param_reductions_created);
5271 out:
5272 BITMAP_FREE (final_bbs);
5273 free (bb_dereferences);
5274 simple_out:
5275 sra_deinitialize ();
5276 return ret;
5279 namespace {
5281 const pass_data pass_data_early_ipa_sra =
5283 GIMPLE_PASS, /* type */
5284 "eipa_sra", /* name */
5285 OPTGROUP_NONE, /* optinfo_flags */
5286 TV_IPA_SRA, /* tv_id */
5287 0, /* properties_required */
5288 0, /* properties_provided */
5289 0, /* properties_destroyed */
5290 0, /* todo_flags_start */
5291 TODO_dump_symtab, /* todo_flags_finish */
5294 class pass_early_ipa_sra : public gimple_opt_pass
5296 public:
5297 pass_early_ipa_sra (gcc::context *ctxt)
5298 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5301 /* opt_pass methods: */
5302 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5303 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5305 }; // class pass_early_ipa_sra
5307 } // anon namespace
5309 gimple_opt_pass *
5310 make_pass_early_ipa_sra (gcc::context *ctxt)
5312 return new pass_early_ipa_sra (ctxt);