Merge trunk version 221445 into gupc branch.
[official-gcc.git] / gcc / tree-sra.c
blob4343a05b56bf247687e04aec282b5aa0d371c031
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2015 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "hash-map.h"
78 #include "hash-table.h"
79 #include "alloc-pool.h"
80 #include "tm.h"
81 #include "hash-set.h"
82 #include "machmode.h"
83 #include "vec.h"
84 #include "double-int.h"
85 #include "input.h"
86 #include "alias.h"
87 #include "symtab.h"
88 #include "wide-int.h"
89 #include "inchash.h"
90 #include "tree.h"
91 #include "fold-const.h"
92 #include "predict.h"
93 #include "hard-reg-set.h"
94 #include "function.h"
95 #include "dominance.h"
96 #include "cfg.h"
97 #include "basic-block.h"
98 #include "tree-ssa-alias.h"
99 #include "internal-fn.h"
100 #include "tree-eh.h"
101 #include "gimple-expr.h"
102 #include "is-a.h"
103 #include "gimple.h"
104 #include "stor-layout.h"
105 #include "gimplify.h"
106 #include "gimple-iterator.h"
107 #include "gimplify-me.h"
108 #include "gimple-walk.h"
109 #include "bitmap.h"
110 #include "gimple-ssa.h"
111 #include "tree-cfg.h"
112 #include "tree-phinodes.h"
113 #include "ssa-iterators.h"
114 #include "stringpool.h"
115 #include "tree-ssanames.h"
116 #include "hashtab.h"
117 #include "rtl.h"
118 #include "flags.h"
119 #include "statistics.h"
120 #include "real.h"
121 #include "fixed-value.h"
122 #include "insn-config.h"
123 #include "expmed.h"
124 #include "dojump.h"
125 #include "explow.h"
126 #include "calls.h"
127 #include "emit-rtl.h"
128 #include "varasm.h"
129 #include "stmt.h"
130 #include "expr.h"
131 #include "tree-dfa.h"
132 #include "tree-ssa.h"
133 #include "tree-pass.h"
134 #include "plugin-api.h"
135 #include "ipa-ref.h"
136 #include "cgraph.h"
137 #include "symbol-summary.h"
138 #include "ipa-prop.h"
139 #include "params.h"
140 #include "target.h"
141 #include "dbgcnt.h"
142 #include "tree-inline.h"
143 #include "gimple-pretty-print.h"
144 #include "ipa-inline.h"
145 #include "ipa-utils.h"
146 #include "builtins.h"
148 /* Enumeration of all aggregate reductions we can do. */
149 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
150 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
151 SRA_MODE_INTRA }; /* late intraprocedural SRA */
153 /* Global variable describing which aggregate reduction we are performing at
154 the moment. */
155 static enum sra_mode sra_mode;
157 struct assign_link;
159 /* ACCESS represents each access to an aggregate variable (as a whole or a
160 part). It can also represent a group of accesses that refer to exactly the
161 same fragment of an aggregate (i.e. those that have exactly the same offset
162 and size). Such representatives for a single aggregate, once determined,
163 are linked in a linked list and have the group fields set.
165 Moreover, when doing intraprocedural SRA, a tree is built from those
166 representatives (by the means of first_child and next_sibling pointers), in
167 which all items in a subtree are "within" the root, i.e. their offset is
168 greater or equal to offset of the root and offset+size is smaller or equal
169 to offset+size of the root. Children of an access are sorted by offset.
171 Note that accesses to parts of vector and complex number types always
172 represented by an access to the whole complex number or a vector. It is a
173 duty of the modifying functions to replace them appropriately. */
175 struct access
177 /* Values returned by `get_ref_base_and_extent' for each component reference
178 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
179 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
180 HOST_WIDE_INT offset;
181 HOST_WIDE_INT size;
182 tree base;
184 /* Expression. It is context dependent so do not use it to create new
185 expressions to access the original aggregate. See PR 42154 for a
186 testcase. */
187 tree expr;
188 /* Type. */
189 tree type;
191 /* The statement this access belongs to. */
192 gimple stmt;
194 /* Next group representative for this aggregate. */
195 struct access *next_grp;
197 /* Pointer to the group representative. Pointer to itself if the struct is
198 the representative. */
199 struct access *group_representative;
201 /* If this access has any children (in terms of the definition above), this
202 points to the first one. */
203 struct access *first_child;
205 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
206 described above. In IPA-SRA this is a pointer to the next access
207 belonging to the same group (having the same representative). */
208 struct access *next_sibling;
210 /* Pointers to the first and last element in the linked list of assign
211 links. */
212 struct assign_link *first_link, *last_link;
214 /* Pointer to the next access in the work queue. */
215 struct access *next_queued;
217 /* Replacement variable for this access "region." Never to be accessed
218 directly, always only by the means of get_access_replacement() and only
219 when grp_to_be_replaced flag is set. */
220 tree replacement_decl;
222 /* Is this particular access write access? */
223 unsigned write : 1;
225 /* Is this access an access to a non-addressable field? */
226 unsigned non_addressable : 1;
228 /* Is this access currently in the work queue? */
229 unsigned grp_queued : 1;
231 /* Does this group contain a write access? This flag is propagated down the
232 access tree. */
233 unsigned grp_write : 1;
235 /* Does this group contain a read access? This flag is propagated down the
236 access tree. */
237 unsigned grp_read : 1;
239 /* Does this group contain a read access that comes from an assignment
240 statement? This flag is propagated down the access tree. */
241 unsigned grp_assignment_read : 1;
243 /* Does this group contain a write access that comes from an assignment
244 statement? This flag is propagated down the access tree. */
245 unsigned grp_assignment_write : 1;
247 /* Does this group contain a read access through a scalar type? This flag is
248 not propagated in the access tree in any direction. */
249 unsigned grp_scalar_read : 1;
251 /* Does this group contain a write access through a scalar type? This flag
252 is not propagated in the access tree in any direction. */
253 unsigned grp_scalar_write : 1;
255 /* Is this access an artificial one created to scalarize some record
256 entirely? */
257 unsigned grp_total_scalarization : 1;
259 /* Other passes of the analysis use this bit to make function
260 analyze_access_subtree create scalar replacements for this group if
261 possible. */
262 unsigned grp_hint : 1;
264 /* Is the subtree rooted in this access fully covered by scalar
265 replacements? */
266 unsigned grp_covered : 1;
268 /* If set to true, this access and all below it in an access tree must not be
269 scalarized. */
270 unsigned grp_unscalarizable_region : 1;
272 /* Whether data have been written to parts of the aggregate covered by this
273 access which is not to be scalarized. This flag is propagated up in the
274 access tree. */
275 unsigned grp_unscalarized_data : 1;
277 /* Does this access and/or group contain a write access through a
278 BIT_FIELD_REF? */
279 unsigned grp_partial_lhs : 1;
281 /* Set when a scalar replacement should be created for this variable. */
282 unsigned grp_to_be_replaced : 1;
284 /* Set when we want a replacement for the sole purpose of having it in
285 generated debug statements. */
286 unsigned grp_to_be_debug_replaced : 1;
288 /* Should TREE_NO_WARNING of a replacement be set? */
289 unsigned grp_no_warning : 1;
291 /* Is it possible that the group refers to data which might be (directly or
292 otherwise) modified? */
293 unsigned grp_maybe_modified : 1;
295 /* Set when this is a representative of a pointer to scalar (i.e. by
296 reference) parameter which we consider for turning into a plain scalar
297 (i.e. a by value parameter). */
298 unsigned grp_scalar_ptr : 1;
300 /* Set when we discover that this pointer is not safe to dereference in the
301 caller. */
302 unsigned grp_not_necessarilly_dereferenced : 1;
305 typedef struct access *access_p;
308 /* Alloc pool for allocating access structures. */
309 static alloc_pool access_pool;
311 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
312 are used to propagate subaccesses from rhs to lhs as long as they don't
313 conflict with what is already there. */
314 struct assign_link
316 struct access *lacc, *racc;
317 struct assign_link *next;
320 /* Alloc pool for allocating assign link structures. */
321 static alloc_pool link_pool;
323 /* Base (tree) -> Vector (vec<access_p> *) map. */
324 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
326 /* Candidate hash table helpers. */
328 struct uid_decl_hasher : typed_noop_remove <tree_node>
330 typedef tree_node value_type;
331 typedef tree_node compare_type;
332 static inline hashval_t hash (const value_type *);
333 static inline bool equal (const value_type *, const compare_type *);
336 /* Hash a tree in a uid_decl_map. */
338 inline hashval_t
339 uid_decl_hasher::hash (const value_type *item)
341 return item->decl_minimal.uid;
344 /* Return true if the DECL_UID in both trees are equal. */
346 inline bool
347 uid_decl_hasher::equal (const value_type *a, const compare_type *b)
349 return (a->decl_minimal.uid == b->decl_minimal.uid);
352 /* Set of candidates. */
353 static bitmap candidate_bitmap;
354 static hash_table<uid_decl_hasher> *candidates;
356 /* For a candidate UID return the candidates decl. */
358 static inline tree
359 candidate (unsigned uid)
361 tree_node t;
362 t.decl_minimal.uid = uid;
363 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
366 /* Bitmap of candidates which we should try to entirely scalarize away and
367 those which cannot be (because they are and need be used as a whole). */
368 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
370 /* Obstack for creation of fancy names. */
371 static struct obstack name_obstack;
373 /* Head of a linked list of accesses that need to have its subaccesses
374 propagated to their assignment counterparts. */
375 static struct access *work_queue_head;
377 /* Number of parameters of the analyzed function when doing early ipa SRA. */
378 static int func_param_count;
380 /* scan_function sets the following to true if it encounters a call to
381 __builtin_apply_args. */
382 static bool encountered_apply_args;
384 /* Set by scan_function when it finds a recursive call. */
385 static bool encountered_recursive_call;
387 /* Set by scan_function when it finds a recursive call with less actual
388 arguments than formal parameters.. */
389 static bool encountered_unchangable_recursive_call;
391 /* This is a table in which for each basic block and parameter there is a
392 distance (offset + size) in that parameter which is dereferenced and
393 accessed in that BB. */
394 static HOST_WIDE_INT *bb_dereferences;
395 /* Bitmap of BBs that can cause the function to "stop" progressing by
396 returning, throwing externally, looping infinitely or calling a function
397 which might abort etc.. */
398 static bitmap final_bbs;
400 /* Representative of no accesses at all. */
401 static struct access no_accesses_representant;
403 /* Predicate to test the special value. */
405 static inline bool
406 no_accesses_p (struct access *access)
408 return access == &no_accesses_representant;
411 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
412 representative fields are dumped, otherwise those which only describe the
413 individual access are. */
415 static struct
417 /* Number of processed aggregates is readily available in
418 analyze_all_variable_accesses and so is not stored here. */
420 /* Number of created scalar replacements. */
421 int replacements;
423 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
424 expression. */
425 int exprs;
427 /* Number of statements created by generate_subtree_copies. */
428 int subtree_copies;
430 /* Number of statements created by load_assign_lhs_subreplacements. */
431 int subreplacements;
433 /* Number of times sra_modify_assign has deleted a statement. */
434 int deleted;
436 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
437 RHS reparately due to type conversions or nonexistent matching
438 references. */
439 int separate_lhs_rhs_handling;
441 /* Number of parameters that were removed because they were unused. */
442 int deleted_unused_parameters;
444 /* Number of scalars passed as parameters by reference that have been
445 converted to be passed by value. */
446 int scalar_by_ref_to_by_val;
448 /* Number of aggregate parameters that were replaced by one or more of their
449 components. */
450 int aggregate_params_reduced;
452 /* Numbber of components created when splitting aggregate parameters. */
453 int param_reductions_created;
454 } sra_stats;
456 static void
457 dump_access (FILE *f, struct access *access, bool grp)
459 fprintf (f, "access { ");
460 fprintf (f, "base = (%d)'", DECL_UID (access->base));
461 print_generic_expr (f, access->base, 0);
462 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
463 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
464 fprintf (f, ", expr = ");
465 print_generic_expr (f, access->expr, 0);
466 fprintf (f, ", type = ");
467 print_generic_expr (f, access->type, 0);
468 if (grp)
469 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
470 "grp_assignment_write = %d, grp_scalar_read = %d, "
471 "grp_scalar_write = %d, grp_total_scalarization = %d, "
472 "grp_hint = %d, grp_covered = %d, "
473 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
474 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
475 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
476 "grp_not_necessarilly_dereferenced = %d\n",
477 access->grp_read, access->grp_write, access->grp_assignment_read,
478 access->grp_assignment_write, access->grp_scalar_read,
479 access->grp_scalar_write, access->grp_total_scalarization,
480 access->grp_hint, access->grp_covered,
481 access->grp_unscalarizable_region, access->grp_unscalarized_data,
482 access->grp_partial_lhs, access->grp_to_be_replaced,
483 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
484 access->grp_not_necessarilly_dereferenced);
485 else
486 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
487 "grp_partial_lhs = %d\n",
488 access->write, access->grp_total_scalarization,
489 access->grp_partial_lhs);
492 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
494 static void
495 dump_access_tree_1 (FILE *f, struct access *access, int level)
499 int i;
501 for (i = 0; i < level; i++)
502 fputs ("* ", dump_file);
504 dump_access (f, access, true);
506 if (access->first_child)
507 dump_access_tree_1 (f, access->first_child, level + 1);
509 access = access->next_sibling;
511 while (access);
514 /* Dump all access trees for a variable, given the pointer to the first root in
515 ACCESS. */
517 static void
518 dump_access_tree (FILE *f, struct access *access)
520 for (; access; access = access->next_grp)
521 dump_access_tree_1 (f, access, 0);
524 /* Return true iff ACC is non-NULL and has subaccesses. */
526 static inline bool
527 access_has_children_p (struct access *acc)
529 return acc && acc->first_child;
532 /* Return true iff ACC is (partly) covered by at least one replacement. */
534 static bool
535 access_has_replacements_p (struct access *acc)
537 struct access *child;
538 if (acc->grp_to_be_replaced)
539 return true;
540 for (child = acc->first_child; child; child = child->next_sibling)
541 if (access_has_replacements_p (child))
542 return true;
543 return false;
546 /* Return a vector of pointers to accesses for the variable given in BASE or
547 NULL if there is none. */
549 static vec<access_p> *
550 get_base_access_vector (tree base)
552 return base_access_vec->get (base);
555 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
556 in ACCESS. Return NULL if it cannot be found. */
558 static struct access *
559 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
560 HOST_WIDE_INT size)
562 while (access && (access->offset != offset || access->size != size))
564 struct access *child = access->first_child;
566 while (child && (child->offset + child->size <= offset))
567 child = child->next_sibling;
568 access = child;
571 return access;
574 /* Return the first group representative for DECL or NULL if none exists. */
576 static struct access *
577 get_first_repr_for_decl (tree base)
579 vec<access_p> *access_vec;
581 access_vec = get_base_access_vector (base);
582 if (!access_vec)
583 return NULL;
585 return (*access_vec)[0];
588 /* Find an access representative for the variable BASE and given OFFSET and
589 SIZE. Requires that access trees have already been built. Return NULL if
590 it cannot be found. */
592 static struct access *
593 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
594 HOST_WIDE_INT size)
596 struct access *access;
598 access = get_first_repr_for_decl (base);
599 while (access && (access->offset + access->size <= offset))
600 access = access->next_grp;
601 if (!access)
602 return NULL;
604 return find_access_in_subtree (access, offset, size);
607 /* Add LINK to the linked list of assign links of RACC. */
608 static void
609 add_link_to_rhs (struct access *racc, struct assign_link *link)
611 gcc_assert (link->racc == racc);
613 if (!racc->first_link)
615 gcc_assert (!racc->last_link);
616 racc->first_link = link;
618 else
619 racc->last_link->next = link;
621 racc->last_link = link;
622 link->next = NULL;
625 /* Move all link structures in their linked list in OLD_RACC to the linked list
626 in NEW_RACC. */
627 static void
628 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
630 if (!old_racc->first_link)
632 gcc_assert (!old_racc->last_link);
633 return;
636 if (new_racc->first_link)
638 gcc_assert (!new_racc->last_link->next);
639 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
641 new_racc->last_link->next = old_racc->first_link;
642 new_racc->last_link = old_racc->last_link;
644 else
646 gcc_assert (!new_racc->last_link);
648 new_racc->first_link = old_racc->first_link;
649 new_racc->last_link = old_racc->last_link;
651 old_racc->first_link = old_racc->last_link = NULL;
654 /* Add ACCESS to the work queue (which is actually a stack). */
656 static void
657 add_access_to_work_queue (struct access *access)
659 if (!access->grp_queued)
661 gcc_assert (!access->next_queued);
662 access->next_queued = work_queue_head;
663 access->grp_queued = 1;
664 work_queue_head = access;
668 /* Pop an access from the work queue, and return it, assuming there is one. */
670 static struct access *
671 pop_access_from_work_queue (void)
673 struct access *access = work_queue_head;
675 work_queue_head = access->next_queued;
676 access->next_queued = NULL;
677 access->grp_queued = 0;
678 return access;
682 /* Allocate necessary structures. */
684 static void
685 sra_initialize (void)
687 candidate_bitmap = BITMAP_ALLOC (NULL);
688 candidates = new hash_table<uid_decl_hasher>
689 (vec_safe_length (cfun->local_decls) / 2);
690 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
691 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
692 gcc_obstack_init (&name_obstack);
693 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
694 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
695 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
696 memset (&sra_stats, 0, sizeof (sra_stats));
697 encountered_apply_args = false;
698 encountered_recursive_call = false;
699 encountered_unchangable_recursive_call = false;
702 /* Deallocate all general structures. */
704 static void
705 sra_deinitialize (void)
707 BITMAP_FREE (candidate_bitmap);
708 delete candidates;
709 candidates = NULL;
710 BITMAP_FREE (should_scalarize_away_bitmap);
711 BITMAP_FREE (cannot_scalarize_away_bitmap);
712 free_alloc_pool (access_pool);
713 free_alloc_pool (link_pool);
714 obstack_free (&name_obstack, NULL);
716 delete base_access_vec;
719 /* Remove DECL from candidates for SRA and write REASON to the dump file if
720 there is one. */
721 static void
722 disqualify_candidate (tree decl, const char *reason)
724 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
725 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
727 if (dump_file && (dump_flags & TDF_DETAILS))
729 fprintf (dump_file, "! Disqualifying ");
730 print_generic_expr (dump_file, decl, 0);
731 fprintf (dump_file, " - %s\n", reason);
735 /* Return true iff the type contains a field or an element which does not allow
736 scalarization. */
738 static bool
739 type_internals_preclude_sra_p (tree type, const char **msg)
741 tree fld;
742 tree et;
744 switch (TREE_CODE (type))
746 case RECORD_TYPE:
747 case UNION_TYPE:
748 case QUAL_UNION_TYPE:
749 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
750 if (TREE_CODE (fld) == FIELD_DECL)
752 tree ft = TREE_TYPE (fld);
754 if (TREE_THIS_VOLATILE (fld))
756 *msg = "volatile structure field";
757 return true;
759 if (!DECL_FIELD_OFFSET (fld))
761 *msg = "no structure field offset";
762 return true;
764 if (!DECL_SIZE (fld))
766 *msg = "zero structure field size";
767 return true;
769 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
771 *msg = "structure field offset not fixed";
772 return true;
774 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
776 *msg = "structure field size not fixed";
777 return true;
779 if (!tree_fits_shwi_p (bit_position (fld)))
781 *msg = "structure field size too big";
782 return true;
784 if (AGGREGATE_TYPE_P (ft)
785 && int_bit_position (fld) % BITS_PER_UNIT != 0)
787 *msg = "structure field is bit field";
788 return true;
791 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
792 return true;
795 return false;
797 case ARRAY_TYPE:
798 et = TREE_TYPE (type);
800 if (TYPE_VOLATILE (et))
802 *msg = "element type is volatile";
803 return true;
806 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
807 return true;
809 return false;
811 default:
812 return false;
816 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
817 base variable if it is. Return T if it is not an SSA_NAME. */
819 static tree
820 get_ssa_base_param (tree t)
822 if (TREE_CODE (t) == SSA_NAME)
824 if (SSA_NAME_IS_DEFAULT_DEF (t))
825 return SSA_NAME_VAR (t);
826 else
827 return NULL_TREE;
829 return t;
832 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
833 belongs to, unless the BB has already been marked as a potentially
834 final. */
836 static void
837 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
839 basic_block bb = gimple_bb (stmt);
840 int idx, parm_index = 0;
841 tree parm;
843 if (bitmap_bit_p (final_bbs, bb->index))
844 return;
846 for (parm = DECL_ARGUMENTS (current_function_decl);
847 parm && parm != base;
848 parm = DECL_CHAIN (parm))
849 parm_index++;
851 gcc_assert (parm_index < func_param_count);
853 idx = bb->index * func_param_count + parm_index;
854 if (bb_dereferences[idx] < dist)
855 bb_dereferences[idx] = dist;
858 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
859 the three fields. Also add it to the vector of accesses corresponding to
860 the base. Finally, return the new access. */
862 static struct access *
863 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
865 struct access *access;
867 access = (struct access *) pool_alloc (access_pool);
868 memset (access, 0, sizeof (struct access));
869 access->base = base;
870 access->offset = offset;
871 access->size = size;
873 base_access_vec->get_or_insert (base).safe_push (access);
875 return access;
878 /* Create and insert access for EXPR. Return created access, or NULL if it is
879 not possible. */
881 static struct access *
882 create_access (tree expr, gimple stmt, bool write)
884 struct access *access;
885 HOST_WIDE_INT offset, size, max_size;
886 tree base = expr;
887 bool ptr, unscalarizable_region = false;
889 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
891 if (sra_mode == SRA_MODE_EARLY_IPA
892 && TREE_CODE (base) == MEM_REF)
894 base = get_ssa_base_param (TREE_OPERAND (base, 0));
895 if (!base)
896 return NULL;
897 ptr = true;
899 else
900 ptr = false;
902 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
903 return NULL;
905 if (sra_mode == SRA_MODE_EARLY_IPA)
907 if (size < 0 || size != max_size)
909 disqualify_candidate (base, "Encountered a variable sized access.");
910 return NULL;
912 if (TREE_CODE (expr) == COMPONENT_REF
913 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
915 disqualify_candidate (base, "Encountered a bit-field access.");
916 return NULL;
918 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
920 if (ptr)
921 mark_parm_dereference (base, offset + size, stmt);
923 else
925 if (size != max_size)
927 size = max_size;
928 unscalarizable_region = true;
930 if (size < 0)
932 disqualify_candidate (base, "Encountered an unconstrained access.");
933 return NULL;
937 access = create_access_1 (base, offset, size);
938 access->expr = expr;
939 access->type = TREE_TYPE (expr);
940 access->write = write;
941 access->grp_unscalarizable_region = unscalarizable_region;
942 access->stmt = stmt;
944 if (TREE_CODE (expr) == COMPONENT_REF
945 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
946 access->non_addressable = 1;
948 return access;
952 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
953 register types or (recursively) records with only these two kinds of fields.
954 It also returns false if any of these records contains a bit-field. */
956 static bool
957 type_consists_of_records_p (tree type)
959 tree fld;
961 if (TREE_CODE (type) != RECORD_TYPE)
962 return false;
964 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
965 if (TREE_CODE (fld) == FIELD_DECL)
967 tree ft = TREE_TYPE (fld);
969 if (DECL_BIT_FIELD (fld))
970 return false;
972 if (!is_gimple_reg_type (ft)
973 && !type_consists_of_records_p (ft))
974 return false;
977 return true;
980 /* Create total_scalarization accesses for all scalar type fields in DECL that
981 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
982 must be the top-most VAR_DECL representing the variable, OFFSET must be the
983 offset of DECL within BASE. REF must be the memory reference expression for
984 the given decl. */
986 static void
987 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
988 tree ref)
990 tree fld, decl_type = TREE_TYPE (decl);
992 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
993 if (TREE_CODE (fld) == FIELD_DECL)
995 HOST_WIDE_INT pos = offset + int_bit_position (fld);
996 tree ft = TREE_TYPE (fld);
997 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
998 NULL_TREE);
1000 if (is_gimple_reg_type (ft))
1002 struct access *access;
1003 HOST_WIDE_INT size;
1005 size = tree_to_uhwi (DECL_SIZE (fld));
1006 access = create_access_1 (base, pos, size);
1007 access->expr = nref;
1008 access->type = ft;
1009 access->grp_total_scalarization = 1;
1010 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1012 else
1013 completely_scalarize_record (base, fld, pos, nref);
1017 /* Create total_scalarization accesses for all scalar type fields in VAR and
1018 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
1019 type_consists_of_records_p. */
1021 static void
1022 completely_scalarize_var (tree var)
1024 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1025 struct access *access;
1027 access = create_access_1 (var, 0, size);
1028 access->expr = var;
1029 access->type = TREE_TYPE (var);
1030 access->grp_total_scalarization = 1;
1032 completely_scalarize_record (var, var, 0, var);
1035 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1037 static inline bool
1038 contains_view_convert_expr_p (const_tree ref)
1040 while (handled_component_p (ref))
1042 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1043 return true;
1044 ref = TREE_OPERAND (ref, 0);
1047 return false;
1050 /* Search the given tree for a declaration by skipping handled components and
1051 exclude it from the candidates. */
1053 static void
1054 disqualify_base_of_expr (tree t, const char *reason)
1056 t = get_base_address (t);
1057 if (sra_mode == SRA_MODE_EARLY_IPA
1058 && TREE_CODE (t) == MEM_REF)
1059 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1061 if (t && DECL_P (t))
1062 disqualify_candidate (t, reason);
1065 /* Scan expression EXPR and create access structures for all accesses to
1066 candidates for scalarization. Return the created access or NULL if none is
1067 created. */
1069 static struct access *
1070 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1072 struct access *ret = NULL;
1073 bool partial_ref;
1075 if (TREE_CODE (expr) == BIT_FIELD_REF
1076 || TREE_CODE (expr) == IMAGPART_EXPR
1077 || TREE_CODE (expr) == REALPART_EXPR)
1079 expr = TREE_OPERAND (expr, 0);
1080 partial_ref = true;
1082 else
1083 partial_ref = false;
1085 /* We need to dive through V_C_Es in order to get the size of its parameter
1086 and not the result type. Ada produces such statements. We are also
1087 capable of handling the topmost V_C_E but not any of those buried in other
1088 handled components. */
1089 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1090 expr = TREE_OPERAND (expr, 0);
1092 if (contains_view_convert_expr_p (expr))
1094 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1095 "component.");
1096 return NULL;
1098 if (TREE_THIS_VOLATILE (expr))
1100 disqualify_base_of_expr (expr, "part of a volatile reference.");
1101 return NULL;
1104 switch (TREE_CODE (expr))
1106 case MEM_REF:
1107 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1108 && sra_mode != SRA_MODE_EARLY_IPA)
1109 return NULL;
1110 /* fall through */
1111 case VAR_DECL:
1112 case PARM_DECL:
1113 case RESULT_DECL:
1114 case COMPONENT_REF:
1115 case ARRAY_REF:
1116 case ARRAY_RANGE_REF:
1117 ret = create_access (expr, stmt, write);
1118 break;
1120 default:
1121 break;
1124 if (write && partial_ref && ret)
1125 ret->grp_partial_lhs = 1;
1127 return ret;
1130 /* Scan expression EXPR and create access structures for all accesses to
1131 candidates for scalarization. Return true if any access has been inserted.
1132 STMT must be the statement from which the expression is taken, WRITE must be
1133 true if the expression is a store and false otherwise. */
1135 static bool
1136 build_access_from_expr (tree expr, gimple stmt, bool write)
1138 struct access *access;
1140 access = build_access_from_expr_1 (expr, stmt, write);
1141 if (access)
1143 /* This means the aggregate is accesses as a whole in a way other than an
1144 assign statement and thus cannot be removed even if we had a scalar
1145 replacement for everything. */
1146 if (cannot_scalarize_away_bitmap)
1147 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1148 return true;
1150 return false;
1153 /* Return the single non-EH successor edge of BB or NULL if there is none or
1154 more than one. */
1156 static edge
1157 single_non_eh_succ (basic_block bb)
1159 edge e, res = NULL;
1160 edge_iterator ei;
1162 FOR_EACH_EDGE (e, ei, bb->succs)
1163 if (!(e->flags & EDGE_EH))
1165 if (res)
1166 return NULL;
1167 res = e;
1170 return res;
1173 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1174 there is no alternative spot where to put statements SRA might need to
1175 generate after it. The spot we are looking for is an edge leading to a
1176 single non-EH successor, if it exists and is indeed single. RHS may be
1177 NULL, in that case ignore it. */
1179 static bool
1180 disqualify_if_bad_bb_terminating_stmt (gimple stmt, tree lhs, tree rhs)
1182 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1183 && stmt_ends_bb_p (stmt))
1185 if (single_non_eh_succ (gimple_bb (stmt)))
1186 return false;
1188 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1189 if (rhs)
1190 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1191 return true;
1193 return false;
1196 /* Scan expressions occurring in STMT, create access structures for all accesses
1197 to candidates for scalarization and remove those candidates which occur in
1198 statements or expressions that prevent them from being split apart. Return
1199 true if any access has been inserted. */
1201 static bool
1202 build_accesses_from_assign (gimple stmt)
1204 tree lhs, rhs;
1205 struct access *lacc, *racc;
1207 if (!gimple_assign_single_p (stmt)
1208 /* Scope clobbers don't influence scalarization. */
1209 || gimple_clobber_p (stmt))
1210 return false;
1212 lhs = gimple_assign_lhs (stmt);
1213 rhs = gimple_assign_rhs1 (stmt);
1215 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1216 return false;
1218 racc = build_access_from_expr_1 (rhs, stmt, false);
1219 lacc = build_access_from_expr_1 (lhs, stmt, true);
1221 if (lacc)
1222 lacc->grp_assignment_write = 1;
1224 if (racc)
1226 racc->grp_assignment_read = 1;
1227 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1228 && !is_gimple_reg_type (racc->type))
1229 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1232 if (lacc && racc
1233 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1234 && !lacc->grp_unscalarizable_region
1235 && !racc->grp_unscalarizable_region
1236 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1237 && lacc->size == racc->size
1238 && useless_type_conversion_p (lacc->type, racc->type))
1240 struct assign_link *link;
1242 link = (struct assign_link *) pool_alloc (link_pool);
1243 memset (link, 0, sizeof (struct assign_link));
1245 link->lacc = lacc;
1246 link->racc = racc;
1248 add_link_to_rhs (racc, link);
1251 return lacc || racc;
1254 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1255 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1257 static bool
1258 asm_visit_addr (gimple, tree op, tree, void *)
1260 op = get_base_address (op);
1261 if (op
1262 && DECL_P (op))
1263 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1265 return false;
1268 /* Return true iff callsite CALL has at least as many actual arguments as there
1269 are formal parameters of the function currently processed by IPA-SRA and
1270 that their types match. */
1272 static inline bool
1273 callsite_arguments_match_p (gimple call)
1275 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1276 return false;
1278 tree parm;
1279 int i;
1280 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1281 parm;
1282 parm = DECL_CHAIN (parm), i++)
1284 tree arg = gimple_call_arg (call, i);
1285 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1286 return false;
1288 return true;
1291 /* Scan function and look for interesting expressions and create access
1292 structures for them. Return true iff any access is created. */
1294 static bool
1295 scan_function (void)
1297 basic_block bb;
1298 bool ret = false;
1300 FOR_EACH_BB_FN (bb, cfun)
1302 gimple_stmt_iterator gsi;
1303 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1305 gimple stmt = gsi_stmt (gsi);
1306 tree t;
1307 unsigned i;
1309 if (final_bbs && stmt_can_throw_external (stmt))
1310 bitmap_set_bit (final_bbs, bb->index);
1311 switch (gimple_code (stmt))
1313 case GIMPLE_RETURN:
1314 t = gimple_return_retval (as_a <greturn *> (stmt));
1315 if (t != NULL_TREE)
1316 ret |= build_access_from_expr (t, stmt, false);
1317 if (final_bbs)
1318 bitmap_set_bit (final_bbs, bb->index);
1319 break;
1321 case GIMPLE_ASSIGN:
1322 ret |= build_accesses_from_assign (stmt);
1323 break;
1325 case GIMPLE_CALL:
1326 for (i = 0; i < gimple_call_num_args (stmt); i++)
1327 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1328 stmt, false);
1330 if (sra_mode == SRA_MODE_EARLY_IPA)
1332 tree dest = gimple_call_fndecl (stmt);
1333 int flags = gimple_call_flags (stmt);
1335 if (dest)
1337 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1338 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1339 encountered_apply_args = true;
1340 if (recursive_call_p (current_function_decl, dest))
1342 encountered_recursive_call = true;
1343 if (!callsite_arguments_match_p (stmt))
1344 encountered_unchangable_recursive_call = true;
1348 if (final_bbs
1349 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1350 bitmap_set_bit (final_bbs, bb->index);
1353 t = gimple_call_lhs (stmt);
1354 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1355 ret |= build_access_from_expr (t, stmt, true);
1356 break;
1358 case GIMPLE_ASM:
1360 gasm *asm_stmt = as_a <gasm *> (stmt);
1361 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1362 asm_visit_addr);
1363 if (final_bbs)
1364 bitmap_set_bit (final_bbs, bb->index);
1366 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1368 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1369 ret |= build_access_from_expr (t, asm_stmt, false);
1371 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1373 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1374 ret |= build_access_from_expr (t, asm_stmt, true);
1377 break;
1379 default:
1380 break;
1385 return ret;
1388 /* Helper of QSORT function. There are pointers to accesses in the array. An
1389 access is considered smaller than another if it has smaller offset or if the
1390 offsets are the same but is size is bigger. */
1392 static int
1393 compare_access_positions (const void *a, const void *b)
1395 const access_p *fp1 = (const access_p *) a;
1396 const access_p *fp2 = (const access_p *) b;
1397 const access_p f1 = *fp1;
1398 const access_p f2 = *fp2;
1400 if (f1->offset != f2->offset)
1401 return f1->offset < f2->offset ? -1 : 1;
1403 if (f1->size == f2->size)
1405 if (f1->type == f2->type)
1406 return 0;
1407 /* Put any non-aggregate type before any aggregate type. */
1408 else if (!is_gimple_reg_type (f1->type)
1409 && is_gimple_reg_type (f2->type))
1410 return 1;
1411 else if (is_gimple_reg_type (f1->type)
1412 && !is_gimple_reg_type (f2->type))
1413 return -1;
1414 /* Put any complex or vector type before any other scalar type. */
1415 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1416 && TREE_CODE (f1->type) != VECTOR_TYPE
1417 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1418 || TREE_CODE (f2->type) == VECTOR_TYPE))
1419 return 1;
1420 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1421 || TREE_CODE (f1->type) == VECTOR_TYPE)
1422 && TREE_CODE (f2->type) != COMPLEX_TYPE
1423 && TREE_CODE (f2->type) != VECTOR_TYPE)
1424 return -1;
1425 /* Put the integral type with the bigger precision first. */
1426 else if (INTEGRAL_TYPE_P (f1->type)
1427 && INTEGRAL_TYPE_P (f2->type))
1428 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1429 /* Put any integral type with non-full precision last. */
1430 else if (INTEGRAL_TYPE_P (f1->type)
1431 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1432 != TYPE_PRECISION (f1->type)))
1433 return 1;
1434 else if (INTEGRAL_TYPE_P (f2->type)
1435 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1436 != TYPE_PRECISION (f2->type)))
1437 return -1;
1438 /* Stabilize the sort. */
1439 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1442 /* We want the bigger accesses first, thus the opposite operator in the next
1443 line: */
1444 return f1->size > f2->size ? -1 : 1;
1448 /* Append a name of the declaration to the name obstack. A helper function for
1449 make_fancy_name. */
1451 static void
1452 make_fancy_decl_name (tree decl)
1454 char buffer[32];
1456 tree name = DECL_NAME (decl);
1457 if (name)
1458 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1459 IDENTIFIER_LENGTH (name));
1460 else
1462 sprintf (buffer, "D%u", DECL_UID (decl));
1463 obstack_grow (&name_obstack, buffer, strlen (buffer));
1467 /* Helper for make_fancy_name. */
1469 static void
1470 make_fancy_name_1 (tree expr)
1472 char buffer[32];
1473 tree index;
1475 if (DECL_P (expr))
1477 make_fancy_decl_name (expr);
1478 return;
1481 switch (TREE_CODE (expr))
1483 case COMPONENT_REF:
1484 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1485 obstack_1grow (&name_obstack, '$');
1486 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1487 break;
1489 case ARRAY_REF:
1490 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1491 obstack_1grow (&name_obstack, '$');
1492 /* Arrays with only one element may not have a constant as their
1493 index. */
1494 index = TREE_OPERAND (expr, 1);
1495 if (TREE_CODE (index) != INTEGER_CST)
1496 break;
1497 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1498 obstack_grow (&name_obstack, buffer, strlen (buffer));
1499 break;
1501 case ADDR_EXPR:
1502 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1503 break;
1505 case MEM_REF:
1506 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1507 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1509 obstack_1grow (&name_obstack, '$');
1510 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1511 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1512 obstack_grow (&name_obstack, buffer, strlen (buffer));
1514 break;
1516 case BIT_FIELD_REF:
1517 case REALPART_EXPR:
1518 case IMAGPART_EXPR:
1519 gcc_unreachable (); /* we treat these as scalars. */
1520 break;
1521 default:
1522 break;
1526 /* Create a human readable name for replacement variable of ACCESS. */
1528 static char *
1529 make_fancy_name (tree expr)
1531 make_fancy_name_1 (expr);
1532 obstack_1grow (&name_obstack, '\0');
1533 return XOBFINISH (&name_obstack, char *);
1536 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1537 EXP_TYPE at the given OFFSET. If BASE is something for which
1538 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1539 to insert new statements either before or below the current one as specified
1540 by INSERT_AFTER. This function is not capable of handling bitfields.
1542 BASE must be either a declaration or a memory reference that has correct
1543 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1545 tree
1546 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1547 tree exp_type, gimple_stmt_iterator *gsi,
1548 bool insert_after)
1550 tree prev_base = base;
1551 tree off;
1552 tree mem_ref;
1553 HOST_WIDE_INT base_offset;
1554 unsigned HOST_WIDE_INT misalign;
1555 unsigned int align;
1557 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1558 get_object_alignment_1 (base, &align, &misalign);
1559 base = get_addr_base_and_unit_offset (base, &base_offset);
1561 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1562 offset such as array[var_index]. */
1563 if (!base)
1565 gassign *stmt;
1566 tree tmp, addr;
1568 gcc_checking_assert (gsi);
1569 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1570 addr = build_fold_addr_expr (unshare_expr (prev_base));
1571 STRIP_USELESS_TYPE_CONVERSION (addr);
1572 stmt = gimple_build_assign (tmp, addr);
1573 gimple_set_location (stmt, loc);
1574 if (insert_after)
1575 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1576 else
1577 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1579 off = build_int_cst (reference_alias_ptr_type (prev_base),
1580 offset / BITS_PER_UNIT);
1581 base = tmp;
1583 else if (TREE_CODE (base) == MEM_REF)
1585 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1586 base_offset + offset / BITS_PER_UNIT);
1587 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1588 base = unshare_expr (TREE_OPERAND (base, 0));
1590 else
1592 off = build_int_cst (reference_alias_ptr_type (base),
1593 base_offset + offset / BITS_PER_UNIT);
1594 base = build_fold_addr_expr (unshare_expr (base));
1597 misalign = (misalign + offset) & (align - 1);
1598 if (misalign != 0)
1599 align = (misalign & -misalign);
1600 if (align != TYPE_ALIGN (exp_type))
1601 exp_type = build_aligned_type (exp_type, align);
1603 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1604 if (TREE_THIS_VOLATILE (prev_base))
1605 TREE_THIS_VOLATILE (mem_ref) = 1;
1606 if (TREE_SIDE_EFFECTS (prev_base))
1607 TREE_SIDE_EFFECTS (mem_ref) = 1;
1608 return mem_ref;
1611 /* Construct a memory reference to a part of an aggregate BASE at the given
1612 OFFSET and of the same type as MODEL. In case this is a reference to a
1613 bit-field, the function will replicate the last component_ref of model's
1614 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1615 build_ref_for_offset. */
1617 static tree
1618 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1619 struct access *model, gimple_stmt_iterator *gsi,
1620 bool insert_after)
1622 if (TREE_CODE (model->expr) == COMPONENT_REF
1623 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1625 /* This access represents a bit-field. */
1626 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1628 offset -= int_bit_position (fld);
1629 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1630 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1631 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1632 NULL_TREE);
1634 else
1635 return build_ref_for_offset (loc, base, offset, model->type,
1636 gsi, insert_after);
1639 /* Attempt to build a memory reference that we could but into a gimple
1640 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1641 create statements and return s NULL instead. This function also ignores
1642 alignment issues and so its results should never end up in non-debug
1643 statements. */
1645 static tree
1646 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1647 struct access *model)
1649 HOST_WIDE_INT base_offset;
1650 tree off;
1652 if (TREE_CODE (model->expr) == COMPONENT_REF
1653 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1654 return NULL_TREE;
1656 base = get_addr_base_and_unit_offset (base, &base_offset);
1657 if (!base)
1658 return NULL_TREE;
1659 if (TREE_CODE (base) == MEM_REF)
1661 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1662 base_offset + offset / BITS_PER_UNIT);
1663 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1664 base = unshare_expr (TREE_OPERAND (base, 0));
1666 else
1668 off = build_int_cst (reference_alias_ptr_type (base),
1669 base_offset + offset / BITS_PER_UNIT);
1670 base = build_fold_addr_expr (unshare_expr (base));
1673 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1676 /* Construct a memory reference consisting of component_refs and array_refs to
1677 a part of an aggregate *RES (which is of type TYPE). The requested part
1678 should have type EXP_TYPE at be the given OFFSET. This function might not
1679 succeed, it returns true when it does and only then *RES points to something
1680 meaningful. This function should be used only to build expressions that we
1681 might need to present to user (e.g. in warnings). In all other situations,
1682 build_ref_for_model or build_ref_for_offset should be used instead. */
1684 static bool
1685 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1686 tree exp_type)
1688 while (1)
1690 tree fld;
1691 tree tr_size, index, minidx;
1692 HOST_WIDE_INT el_size;
1694 if (offset == 0 && exp_type
1695 && types_compatible_p (exp_type, type))
1696 return true;
1698 switch (TREE_CODE (type))
1700 case UNION_TYPE:
1701 case QUAL_UNION_TYPE:
1702 case RECORD_TYPE:
1703 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1705 HOST_WIDE_INT pos, size;
1706 tree tr_pos, expr, *expr_ptr;
1708 if (TREE_CODE (fld) != FIELD_DECL)
1709 continue;
1711 tr_pos = bit_position (fld);
1712 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1713 continue;
1714 pos = tree_to_uhwi (tr_pos);
1715 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1716 tr_size = DECL_SIZE (fld);
1717 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1718 continue;
1719 size = tree_to_uhwi (tr_size);
1720 if (size == 0)
1722 if (pos != offset)
1723 continue;
1725 else if (pos > offset || (pos + size) <= offset)
1726 continue;
1728 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1729 NULL_TREE);
1730 expr_ptr = &expr;
1731 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1732 offset - pos, exp_type))
1734 *res = expr;
1735 return true;
1738 return false;
1740 case ARRAY_TYPE:
1741 tr_size = TYPE_SIZE (TREE_TYPE (type));
1742 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1743 return false;
1744 el_size = tree_to_uhwi (tr_size);
1746 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1747 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1748 return false;
1749 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1750 if (!integer_zerop (minidx))
1751 index = int_const_binop (PLUS_EXPR, index, minidx);
1752 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1753 NULL_TREE, NULL_TREE);
1754 offset = offset % el_size;
1755 type = TREE_TYPE (type);
1756 break;
1758 default:
1759 if (offset != 0)
1760 return false;
1762 if (exp_type)
1763 return false;
1764 else
1765 return true;
1770 /* Return true iff TYPE is stdarg va_list type. */
1772 static inline bool
1773 is_va_list_type (tree type)
1775 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1778 /* Print message to dump file why a variable was rejected. */
1780 static void
1781 reject (tree var, const char *msg)
1783 if (dump_file && (dump_flags & TDF_DETAILS))
1785 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1786 print_generic_expr (dump_file, var, 0);
1787 fprintf (dump_file, "\n");
1791 /* Return true if VAR is a candidate for SRA. */
1793 static bool
1794 maybe_add_sra_candidate (tree var)
1796 tree type = TREE_TYPE (var);
1797 const char *msg;
1798 tree_node **slot;
1800 if (!AGGREGATE_TYPE_P (type))
1802 reject (var, "not aggregate");
1803 return false;
1805 if (needs_to_live_in_memory (var))
1807 reject (var, "needs to live in memory");
1808 return false;
1810 if (TREE_THIS_VOLATILE (var))
1812 reject (var, "is volatile");
1813 return false;
1815 if (!COMPLETE_TYPE_P (type))
1817 reject (var, "has incomplete type");
1818 return false;
1820 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1822 reject (var, "type size not fixed");
1823 return false;
1825 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1827 reject (var, "type size is zero");
1828 return false;
1830 if (type_internals_preclude_sra_p (type, &msg))
1832 reject (var, msg);
1833 return false;
1835 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1836 we also want to schedule it rather late. Thus we ignore it in
1837 the early pass. */
1838 (sra_mode == SRA_MODE_EARLY_INTRA
1839 && is_va_list_type (type)))
1841 reject (var, "is va_list");
1842 return false;
1845 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1846 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1847 *slot = var;
1849 if (dump_file && (dump_flags & TDF_DETAILS))
1851 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1852 print_generic_expr (dump_file, var, 0);
1853 fprintf (dump_file, "\n");
1856 return true;
1859 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1860 those with type which is suitable for scalarization. */
1862 static bool
1863 find_var_candidates (void)
1865 tree var, parm;
1866 unsigned int i;
1867 bool ret = false;
1869 for (parm = DECL_ARGUMENTS (current_function_decl);
1870 parm;
1871 parm = DECL_CHAIN (parm))
1872 ret |= maybe_add_sra_candidate (parm);
1874 FOR_EACH_LOCAL_DECL (cfun, i, var)
1876 if (TREE_CODE (var) != VAR_DECL)
1877 continue;
1879 ret |= maybe_add_sra_candidate (var);
1882 return ret;
1885 /* Sort all accesses for the given variable, check for partial overlaps and
1886 return NULL if there are any. If there are none, pick a representative for
1887 each combination of offset and size and create a linked list out of them.
1888 Return the pointer to the first representative and make sure it is the first
1889 one in the vector of accesses. */
1891 static struct access *
1892 sort_and_splice_var_accesses (tree var)
1894 int i, j, access_count;
1895 struct access *res, **prev_acc_ptr = &res;
1896 vec<access_p> *access_vec;
1897 bool first = true;
1898 HOST_WIDE_INT low = -1, high = 0;
1900 access_vec = get_base_access_vector (var);
1901 if (!access_vec)
1902 return NULL;
1903 access_count = access_vec->length ();
1905 /* Sort by <OFFSET, SIZE>. */
1906 access_vec->qsort (compare_access_positions);
1908 i = 0;
1909 while (i < access_count)
1911 struct access *access = (*access_vec)[i];
1912 bool grp_write = access->write;
1913 bool grp_read = !access->write;
1914 bool grp_scalar_write = access->write
1915 && is_gimple_reg_type (access->type);
1916 bool grp_scalar_read = !access->write
1917 && is_gimple_reg_type (access->type);
1918 bool grp_assignment_read = access->grp_assignment_read;
1919 bool grp_assignment_write = access->grp_assignment_write;
1920 bool multiple_scalar_reads = false;
1921 bool total_scalarization = access->grp_total_scalarization;
1922 bool grp_partial_lhs = access->grp_partial_lhs;
1923 bool first_scalar = is_gimple_reg_type (access->type);
1924 bool unscalarizable_region = access->grp_unscalarizable_region;
1926 if (first || access->offset >= high)
1928 first = false;
1929 low = access->offset;
1930 high = access->offset + access->size;
1932 else if (access->offset > low && access->offset + access->size > high)
1933 return NULL;
1934 else
1935 gcc_assert (access->offset >= low
1936 && access->offset + access->size <= high);
1938 j = i + 1;
1939 while (j < access_count)
1941 struct access *ac2 = (*access_vec)[j];
1942 if (ac2->offset != access->offset || ac2->size != access->size)
1943 break;
1944 if (ac2->write)
1946 grp_write = true;
1947 grp_scalar_write = (grp_scalar_write
1948 || is_gimple_reg_type (ac2->type));
1950 else
1952 grp_read = true;
1953 if (is_gimple_reg_type (ac2->type))
1955 if (grp_scalar_read)
1956 multiple_scalar_reads = true;
1957 else
1958 grp_scalar_read = true;
1961 grp_assignment_read |= ac2->grp_assignment_read;
1962 grp_assignment_write |= ac2->grp_assignment_write;
1963 grp_partial_lhs |= ac2->grp_partial_lhs;
1964 unscalarizable_region |= ac2->grp_unscalarizable_region;
1965 total_scalarization |= ac2->grp_total_scalarization;
1966 relink_to_new_repr (access, ac2);
1968 /* If there are both aggregate-type and scalar-type accesses with
1969 this combination of size and offset, the comparison function
1970 should have put the scalars first. */
1971 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1972 ac2->group_representative = access;
1973 j++;
1976 i = j;
1978 access->group_representative = access;
1979 access->grp_write = grp_write;
1980 access->grp_read = grp_read;
1981 access->grp_scalar_read = grp_scalar_read;
1982 access->grp_scalar_write = grp_scalar_write;
1983 access->grp_assignment_read = grp_assignment_read;
1984 access->grp_assignment_write = grp_assignment_write;
1985 access->grp_hint = multiple_scalar_reads || total_scalarization;
1986 access->grp_total_scalarization = total_scalarization;
1987 access->grp_partial_lhs = grp_partial_lhs;
1988 access->grp_unscalarizable_region = unscalarizable_region;
1989 if (access->first_link)
1990 add_access_to_work_queue (access);
1992 *prev_acc_ptr = access;
1993 prev_acc_ptr = &access->next_grp;
1996 gcc_assert (res == (*access_vec)[0]);
1997 return res;
2000 /* Create a variable for the given ACCESS which determines the type, name and a
2001 few other properties. Return the variable declaration and store it also to
2002 ACCESS->replacement. */
2004 static tree
2005 create_access_replacement (struct access *access)
2007 tree repl;
2009 if (access->grp_to_be_debug_replaced)
2011 repl = create_tmp_var_raw (access->type);
2012 DECL_CONTEXT (repl) = current_function_decl;
2014 else
2015 repl = create_tmp_var (access->type, "SR");
2016 if (TREE_CODE (access->type) == COMPLEX_TYPE
2017 || TREE_CODE (access->type) == VECTOR_TYPE)
2019 if (!access->grp_partial_lhs)
2020 DECL_GIMPLE_REG_P (repl) = 1;
2022 else if (access->grp_partial_lhs
2023 && is_gimple_reg_type (access->type))
2024 TREE_ADDRESSABLE (repl) = 1;
2026 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2027 DECL_ARTIFICIAL (repl) = 1;
2028 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2030 if (DECL_NAME (access->base)
2031 && !DECL_IGNORED_P (access->base)
2032 && !DECL_ARTIFICIAL (access->base))
2034 char *pretty_name = make_fancy_name (access->expr);
2035 tree debug_expr = unshare_expr_without_location (access->expr), d;
2036 bool fail = false;
2038 DECL_NAME (repl) = get_identifier (pretty_name);
2039 obstack_free (&name_obstack, pretty_name);
2041 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2042 as DECL_DEBUG_EXPR isn't considered when looking for still
2043 used SSA_NAMEs and thus they could be freed. All debug info
2044 generation cares is whether something is constant or variable
2045 and that get_ref_base_and_extent works properly on the
2046 expression. It cannot handle accesses at a non-constant offset
2047 though, so just give up in those cases. */
2048 for (d = debug_expr;
2049 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2050 d = TREE_OPERAND (d, 0))
2051 switch (TREE_CODE (d))
2053 case ARRAY_REF:
2054 case ARRAY_RANGE_REF:
2055 if (TREE_OPERAND (d, 1)
2056 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2057 fail = true;
2058 if (TREE_OPERAND (d, 3)
2059 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2060 fail = true;
2061 /* FALLTHRU */
2062 case COMPONENT_REF:
2063 if (TREE_OPERAND (d, 2)
2064 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2065 fail = true;
2066 break;
2067 case MEM_REF:
2068 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2069 fail = true;
2070 else
2071 d = TREE_OPERAND (d, 0);
2072 break;
2073 default:
2074 break;
2076 if (!fail)
2078 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2079 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2081 if (access->grp_no_warning)
2082 TREE_NO_WARNING (repl) = 1;
2083 else
2084 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2086 else
2087 TREE_NO_WARNING (repl) = 1;
2089 if (dump_file)
2091 if (access->grp_to_be_debug_replaced)
2093 fprintf (dump_file, "Created a debug-only replacement for ");
2094 print_generic_expr (dump_file, access->base, 0);
2095 fprintf (dump_file, " offset: %u, size: %u\n",
2096 (unsigned) access->offset, (unsigned) access->size);
2098 else
2100 fprintf (dump_file, "Created a replacement for ");
2101 print_generic_expr (dump_file, access->base, 0);
2102 fprintf (dump_file, " offset: %u, size: %u: ",
2103 (unsigned) access->offset, (unsigned) access->size);
2104 print_generic_expr (dump_file, repl, 0);
2105 fprintf (dump_file, "\n");
2108 sra_stats.replacements++;
2110 return repl;
2113 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2115 static inline tree
2116 get_access_replacement (struct access *access)
2118 gcc_checking_assert (access->replacement_decl);
2119 return access->replacement_decl;
2123 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2124 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2125 to it is not "within" the root. Return false iff some accesses partially
2126 overlap. */
2128 static bool
2129 build_access_subtree (struct access **access)
2131 struct access *root = *access, *last_child = NULL;
2132 HOST_WIDE_INT limit = root->offset + root->size;
2134 *access = (*access)->next_grp;
2135 while (*access && (*access)->offset + (*access)->size <= limit)
2137 if (!last_child)
2138 root->first_child = *access;
2139 else
2140 last_child->next_sibling = *access;
2141 last_child = *access;
2143 if (!build_access_subtree (access))
2144 return false;
2147 if (*access && (*access)->offset < limit)
2148 return false;
2150 return true;
2153 /* Build a tree of access representatives, ACCESS is the pointer to the first
2154 one, others are linked in a list by the next_grp field. Return false iff
2155 some accesses partially overlap. */
2157 static bool
2158 build_access_trees (struct access *access)
2160 while (access)
2162 struct access *root = access;
2164 if (!build_access_subtree (&access))
2165 return false;
2166 root->next_grp = access;
2168 return true;
2171 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2172 array. */
2174 static bool
2175 expr_with_var_bounded_array_refs_p (tree expr)
2177 while (handled_component_p (expr))
2179 if (TREE_CODE (expr) == ARRAY_REF
2180 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2181 return true;
2182 expr = TREE_OPERAND (expr, 0);
2184 return false;
2187 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2188 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2189 sorts of access flags appropriately along the way, notably always set
2190 grp_read and grp_assign_read according to MARK_READ and grp_write when
2191 MARK_WRITE is true.
2193 Creating a replacement for a scalar access is considered beneficial if its
2194 grp_hint is set (this means we are either attempting total scalarization or
2195 there is more than one direct read access) or according to the following
2196 table:
2198 Access written to through a scalar type (once or more times)
2200 | Written to in an assignment statement
2202 | | Access read as scalar _once_
2203 | | |
2204 | | | Read in an assignment statement
2205 | | | |
2206 | | | | Scalarize Comment
2207 -----------------------------------------------------------------------------
2208 0 0 0 0 No access for the scalar
2209 0 0 0 1 No access for the scalar
2210 0 0 1 0 No Single read - won't help
2211 0 0 1 1 No The same case
2212 0 1 0 0 No access for the scalar
2213 0 1 0 1 No access for the scalar
2214 0 1 1 0 Yes s = *g; return s.i;
2215 0 1 1 1 Yes The same case as above
2216 1 0 0 0 No Won't help
2217 1 0 0 1 Yes s.i = 1; *g = s;
2218 1 0 1 0 Yes s.i = 5; g = s.i;
2219 1 0 1 1 Yes The same case as above
2220 1 1 0 0 No Won't help.
2221 1 1 0 1 Yes s.i = 1; *g = s;
2222 1 1 1 0 Yes s = *g; return s.i;
2223 1 1 1 1 Yes Any of the above yeses */
2225 static bool
2226 analyze_access_subtree (struct access *root, struct access *parent,
2227 bool allow_replacements)
2229 struct access *child;
2230 HOST_WIDE_INT limit = root->offset + root->size;
2231 HOST_WIDE_INT covered_to = root->offset;
2232 bool scalar = is_gimple_reg_type (root->type);
2233 bool hole = false, sth_created = false;
2235 if (parent)
2237 if (parent->grp_read)
2238 root->grp_read = 1;
2239 if (parent->grp_assignment_read)
2240 root->grp_assignment_read = 1;
2241 if (parent->grp_write)
2242 root->grp_write = 1;
2243 if (parent->grp_assignment_write)
2244 root->grp_assignment_write = 1;
2245 if (parent->grp_total_scalarization)
2246 root->grp_total_scalarization = 1;
2249 if (root->grp_unscalarizable_region)
2250 allow_replacements = false;
2252 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2253 allow_replacements = false;
2255 for (child = root->first_child; child; child = child->next_sibling)
2257 hole |= covered_to < child->offset;
2258 sth_created |= analyze_access_subtree (child, root,
2259 allow_replacements && !scalar);
2261 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2262 root->grp_total_scalarization &= child->grp_total_scalarization;
2263 if (child->grp_covered)
2264 covered_to += child->size;
2265 else
2266 hole = true;
2269 if (allow_replacements && scalar && !root->first_child
2270 && (root->grp_hint
2271 || ((root->grp_scalar_read || root->grp_assignment_read)
2272 && (root->grp_scalar_write || root->grp_assignment_write))))
2274 /* Always create access replacements that cover the whole access.
2275 For integral types this means the precision has to match.
2276 Avoid assumptions based on the integral type kind, too. */
2277 if (INTEGRAL_TYPE_P (root->type)
2278 && (TREE_CODE (root->type) != INTEGER_TYPE
2279 || TYPE_PRECISION (root->type) != root->size)
2280 /* But leave bitfield accesses alone. */
2281 && (TREE_CODE (root->expr) != COMPONENT_REF
2282 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2284 tree rt = root->type;
2285 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2286 && (root->size % BITS_PER_UNIT) == 0);
2287 root->type = build_nonstandard_integer_type (root->size,
2288 TYPE_UNSIGNED (rt));
2289 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2290 root->base, root->offset,
2291 root->type, NULL, false);
2293 if (dump_file && (dump_flags & TDF_DETAILS))
2295 fprintf (dump_file, "Changing the type of a replacement for ");
2296 print_generic_expr (dump_file, root->base, 0);
2297 fprintf (dump_file, " offset: %u, size: %u ",
2298 (unsigned) root->offset, (unsigned) root->size);
2299 fprintf (dump_file, " to an integer.\n");
2303 root->grp_to_be_replaced = 1;
2304 root->replacement_decl = create_access_replacement (root);
2305 sth_created = true;
2306 hole = false;
2308 else
2310 if (allow_replacements
2311 && scalar && !root->first_child
2312 && (root->grp_scalar_write || root->grp_assignment_write)
2313 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2314 DECL_UID (root->base)))
2316 gcc_checking_assert (!root->grp_scalar_read
2317 && !root->grp_assignment_read);
2318 sth_created = true;
2319 if (MAY_HAVE_DEBUG_STMTS)
2321 root->grp_to_be_debug_replaced = 1;
2322 root->replacement_decl = create_access_replacement (root);
2326 if (covered_to < limit)
2327 hole = true;
2328 if (scalar)
2329 root->grp_total_scalarization = 0;
2332 if (!hole || root->grp_total_scalarization)
2333 root->grp_covered = 1;
2334 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2335 root->grp_unscalarized_data = 1; /* not covered and written to */
2336 return sth_created;
2339 /* Analyze all access trees linked by next_grp by the means of
2340 analyze_access_subtree. */
2341 static bool
2342 analyze_access_trees (struct access *access)
2344 bool ret = false;
2346 while (access)
2348 if (analyze_access_subtree (access, NULL, true))
2349 ret = true;
2350 access = access->next_grp;
2353 return ret;
2356 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2357 SIZE would conflict with an already existing one. If exactly such a child
2358 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2360 static bool
2361 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2362 HOST_WIDE_INT size, struct access **exact_match)
2364 struct access *child;
2366 for (child = lacc->first_child; child; child = child->next_sibling)
2368 if (child->offset == norm_offset && child->size == size)
2370 *exact_match = child;
2371 return true;
2374 if (child->offset < norm_offset + size
2375 && child->offset + child->size > norm_offset)
2376 return true;
2379 return false;
2382 /* Create a new child access of PARENT, with all properties just like MODEL
2383 except for its offset and with its grp_write false and grp_read true.
2384 Return the new access or NULL if it cannot be created. Note that this access
2385 is created long after all splicing and sorting, it's not located in any
2386 access vector and is automatically a representative of its group. */
2388 static struct access *
2389 create_artificial_child_access (struct access *parent, struct access *model,
2390 HOST_WIDE_INT new_offset)
2392 struct access *access;
2393 struct access **child;
2394 tree expr = parent->base;
2396 gcc_assert (!model->grp_unscalarizable_region);
2398 access = (struct access *) pool_alloc (access_pool);
2399 memset (access, 0, sizeof (struct access));
2400 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2401 model->type))
2403 access->grp_no_warning = true;
2404 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2405 new_offset, model, NULL, false);
2408 access->base = parent->base;
2409 access->expr = expr;
2410 access->offset = new_offset;
2411 access->size = model->size;
2412 access->type = model->type;
2413 access->grp_write = true;
2414 access->grp_read = false;
2416 child = &parent->first_child;
2417 while (*child && (*child)->offset < new_offset)
2418 child = &(*child)->next_sibling;
2420 access->next_sibling = *child;
2421 *child = access;
2423 return access;
2427 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2428 true if any new subaccess was created. Additionally, if RACC is a scalar
2429 access but LACC is not, change the type of the latter, if possible. */
2431 static bool
2432 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2434 struct access *rchild;
2435 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2436 bool ret = false;
2438 if (is_gimple_reg_type (lacc->type)
2439 || lacc->grp_unscalarizable_region
2440 || racc->grp_unscalarizable_region)
2441 return false;
2443 if (is_gimple_reg_type (racc->type))
2445 if (!lacc->first_child && !racc->first_child)
2447 tree t = lacc->base;
2449 lacc->type = racc->type;
2450 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2451 lacc->offset, racc->type))
2452 lacc->expr = t;
2453 else
2455 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2456 lacc->base, lacc->offset,
2457 racc, NULL, false);
2458 lacc->grp_no_warning = true;
2461 return false;
2464 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2466 struct access *new_acc = NULL;
2467 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2469 if (rchild->grp_unscalarizable_region)
2470 continue;
2472 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2473 &new_acc))
2475 if (new_acc)
2477 rchild->grp_hint = 1;
2478 new_acc->grp_hint |= new_acc->grp_read;
2479 if (rchild->first_child)
2480 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2482 continue;
2485 rchild->grp_hint = 1;
2486 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2487 if (new_acc)
2489 ret = true;
2490 if (racc->first_child)
2491 propagate_subaccesses_across_link (new_acc, rchild);
2495 return ret;
2498 /* Propagate all subaccesses across assignment links. */
2500 static void
2501 propagate_all_subaccesses (void)
2503 while (work_queue_head)
2505 struct access *racc = pop_access_from_work_queue ();
2506 struct assign_link *link;
2508 gcc_assert (racc->first_link);
2510 for (link = racc->first_link; link; link = link->next)
2512 struct access *lacc = link->lacc;
2514 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2515 continue;
2516 lacc = lacc->group_representative;
2517 if (propagate_subaccesses_across_link (lacc, racc)
2518 && lacc->first_link)
2519 add_access_to_work_queue (lacc);
2524 /* Go through all accesses collected throughout the (intraprocedural) analysis
2525 stage, exclude overlapping ones, identify representatives and build trees
2526 out of them, making decisions about scalarization on the way. Return true
2527 iff there are any to-be-scalarized variables after this stage. */
2529 static bool
2530 analyze_all_variable_accesses (void)
2532 int res = 0;
2533 bitmap tmp = BITMAP_ALLOC (NULL);
2534 bitmap_iterator bi;
2535 unsigned i;
2536 unsigned max_scalarization_size
2537 = (optimize_function_for_size_p (cfun)
2538 ? PARAM_VALUE (PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE)
2539 : PARAM_VALUE (PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED))
2540 * BITS_PER_UNIT;
2542 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2543 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2544 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2546 tree var = candidate (i);
2548 if (TREE_CODE (var) == VAR_DECL
2549 && type_consists_of_records_p (TREE_TYPE (var)))
2551 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2552 <= max_scalarization_size)
2554 completely_scalarize_var (var);
2555 if (dump_file && (dump_flags & TDF_DETAILS))
2557 fprintf (dump_file, "Will attempt to totally scalarize ");
2558 print_generic_expr (dump_file, var, 0);
2559 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2562 else if (dump_file && (dump_flags & TDF_DETAILS))
2564 fprintf (dump_file, "Too big to totally scalarize: ");
2565 print_generic_expr (dump_file, var, 0);
2566 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2571 bitmap_copy (tmp, candidate_bitmap);
2572 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2574 tree var = candidate (i);
2575 struct access *access;
2577 access = sort_and_splice_var_accesses (var);
2578 if (!access || !build_access_trees (access))
2579 disqualify_candidate (var,
2580 "No or inhibitingly overlapping accesses.");
2583 propagate_all_subaccesses ();
2585 bitmap_copy (tmp, candidate_bitmap);
2586 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2588 tree var = candidate (i);
2589 struct access *access = get_first_repr_for_decl (var);
2591 if (analyze_access_trees (access))
2593 res++;
2594 if (dump_file && (dump_flags & TDF_DETAILS))
2596 fprintf (dump_file, "\nAccess trees for ");
2597 print_generic_expr (dump_file, var, 0);
2598 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2599 dump_access_tree (dump_file, access);
2600 fprintf (dump_file, "\n");
2603 else
2604 disqualify_candidate (var, "No scalar replacements to be created.");
2607 BITMAP_FREE (tmp);
2609 if (res)
2611 statistics_counter_event (cfun, "Scalarized aggregates", res);
2612 return true;
2614 else
2615 return false;
2618 /* Generate statements copying scalar replacements of accesses within a subtree
2619 into or out of AGG. ACCESS, all its children, siblings and their children
2620 are to be processed. AGG is an aggregate type expression (can be a
2621 declaration but does not have to be, it can for example also be a mem_ref or
2622 a series of handled components). TOP_OFFSET is the offset of the processed
2623 subtree which has to be subtracted from offsets of individual accesses to
2624 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2625 replacements in the interval <start_offset, start_offset + chunk_size>,
2626 otherwise copy all. GSI is a statement iterator used to place the new
2627 statements. WRITE should be true when the statements should write from AGG
2628 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2629 statements will be added after the current statement in GSI, they will be
2630 added before the statement otherwise. */
2632 static void
2633 generate_subtree_copies (struct access *access, tree agg,
2634 HOST_WIDE_INT top_offset,
2635 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2636 gimple_stmt_iterator *gsi, bool write,
2637 bool insert_after, location_t loc)
2641 if (chunk_size && access->offset >= start_offset + chunk_size)
2642 return;
2644 if (access->grp_to_be_replaced
2645 && (chunk_size == 0
2646 || access->offset + access->size > start_offset))
2648 tree expr, repl = get_access_replacement (access);
2649 gassign *stmt;
2651 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2652 access, gsi, insert_after);
2654 if (write)
2656 if (access->grp_partial_lhs)
2657 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2658 !insert_after,
2659 insert_after ? GSI_NEW_STMT
2660 : GSI_SAME_STMT);
2661 stmt = gimple_build_assign (repl, expr);
2663 else
2665 TREE_NO_WARNING (repl) = 1;
2666 if (access->grp_partial_lhs)
2667 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2668 !insert_after,
2669 insert_after ? GSI_NEW_STMT
2670 : GSI_SAME_STMT);
2671 stmt = gimple_build_assign (expr, repl);
2673 gimple_set_location (stmt, loc);
2675 if (insert_after)
2676 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2677 else
2678 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2679 update_stmt (stmt);
2680 sra_stats.subtree_copies++;
2682 else if (write
2683 && access->grp_to_be_debug_replaced
2684 && (chunk_size == 0
2685 || access->offset + access->size > start_offset))
2687 gdebug *ds;
2688 tree drhs = build_debug_ref_for_model (loc, agg,
2689 access->offset - top_offset,
2690 access);
2691 ds = gimple_build_debug_bind (get_access_replacement (access),
2692 drhs, gsi_stmt (*gsi));
2693 if (insert_after)
2694 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2695 else
2696 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2699 if (access->first_child)
2700 generate_subtree_copies (access->first_child, agg, top_offset,
2701 start_offset, chunk_size, gsi,
2702 write, insert_after, loc);
2704 access = access->next_sibling;
2706 while (access);
2709 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2710 the root of the subtree to be processed. GSI is the statement iterator used
2711 for inserting statements which are added after the current statement if
2712 INSERT_AFTER is true or before it otherwise. */
2714 static void
2715 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2716 bool insert_after, location_t loc)
2719 struct access *child;
2721 if (access->grp_to_be_replaced)
2723 gassign *stmt;
2725 stmt = gimple_build_assign (get_access_replacement (access),
2726 build_zero_cst (access->type));
2727 if (insert_after)
2728 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2729 else
2730 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2731 update_stmt (stmt);
2732 gimple_set_location (stmt, loc);
2734 else if (access->grp_to_be_debug_replaced)
2736 gdebug *ds
2737 = gimple_build_debug_bind (get_access_replacement (access),
2738 build_zero_cst (access->type),
2739 gsi_stmt (*gsi));
2740 if (insert_after)
2741 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2742 else
2743 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2746 for (child = access->first_child; child; child = child->next_sibling)
2747 init_subtree_with_zero (child, gsi, insert_after, loc);
2750 /* Clobber all scalar replacements in an access subtree. ACCESS is the the
2751 root of the subtree to be processed. GSI is the statement iterator used
2752 for inserting statements which are added after the current statement if
2753 INSERT_AFTER is true or before it otherwise. */
2755 static void
2756 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2757 bool insert_after, location_t loc)
2760 struct access *child;
2762 if (access->grp_to_be_replaced)
2764 tree rep = get_access_replacement (access);
2765 tree clobber = build_constructor (access->type, NULL);
2766 TREE_THIS_VOLATILE (clobber) = 1;
2767 gimple stmt = gimple_build_assign (rep, clobber);
2769 if (insert_after)
2770 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2771 else
2772 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2773 update_stmt (stmt);
2774 gimple_set_location (stmt, loc);
2777 for (child = access->first_child; child; child = child->next_sibling)
2778 clobber_subtree (child, gsi, insert_after, loc);
2781 /* Search for an access representative for the given expression EXPR and
2782 return it or NULL if it cannot be found. */
2784 static struct access *
2785 get_access_for_expr (tree expr)
2787 HOST_WIDE_INT offset, size, max_size;
2788 tree base;
2790 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2791 a different size than the size of its argument and we need the latter
2792 one. */
2793 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2794 expr = TREE_OPERAND (expr, 0);
2796 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2797 if (max_size == -1 || !DECL_P (base))
2798 return NULL;
2800 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2801 return NULL;
2803 return get_var_base_offset_size_access (base, offset, max_size);
2806 /* Replace the expression EXPR with a scalar replacement if there is one and
2807 generate other statements to do type conversion or subtree copying if
2808 necessary. GSI is used to place newly created statements, WRITE is true if
2809 the expression is being written to (it is on a LHS of a statement or output
2810 in an assembly statement). */
2812 static bool
2813 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2815 location_t loc;
2816 struct access *access;
2817 tree type, bfr, orig_expr;
2819 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2821 bfr = *expr;
2822 expr = &TREE_OPERAND (*expr, 0);
2824 else
2825 bfr = NULL_TREE;
2827 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2828 expr = &TREE_OPERAND (*expr, 0);
2829 access = get_access_for_expr (*expr);
2830 if (!access)
2831 return false;
2832 type = TREE_TYPE (*expr);
2833 orig_expr = *expr;
2835 loc = gimple_location (gsi_stmt (*gsi));
2836 gimple_stmt_iterator alt_gsi = gsi_none ();
2837 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2839 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2840 gsi = &alt_gsi;
2843 if (access->grp_to_be_replaced)
2845 tree repl = get_access_replacement (access);
2846 /* If we replace a non-register typed access simply use the original
2847 access expression to extract the scalar component afterwards.
2848 This happens if scalarizing a function return value or parameter
2849 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2850 gcc.c-torture/compile/20011217-1.c.
2852 We also want to use this when accessing a complex or vector which can
2853 be accessed as a different type too, potentially creating a need for
2854 type conversion (see PR42196) and when scalarized unions are involved
2855 in assembler statements (see PR42398). */
2856 if (!useless_type_conversion_p (type, access->type))
2858 tree ref;
2860 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2862 if (write)
2864 gassign *stmt;
2866 if (access->grp_partial_lhs)
2867 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2868 false, GSI_NEW_STMT);
2869 stmt = gimple_build_assign (repl, ref);
2870 gimple_set_location (stmt, loc);
2871 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2873 else
2875 gassign *stmt;
2877 if (access->grp_partial_lhs)
2878 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2879 true, GSI_SAME_STMT);
2880 stmt = gimple_build_assign (ref, repl);
2881 gimple_set_location (stmt, loc);
2882 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2885 else
2886 *expr = repl;
2887 sra_stats.exprs++;
2889 else if (write && access->grp_to_be_debug_replaced)
2891 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
2892 NULL_TREE,
2893 gsi_stmt (*gsi));
2894 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2897 if (access->first_child)
2899 HOST_WIDE_INT start_offset, chunk_size;
2900 if (bfr
2901 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
2902 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
2904 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
2905 start_offset = access->offset
2906 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
2908 else
2909 start_offset = chunk_size = 0;
2911 generate_subtree_copies (access->first_child, orig_expr, access->offset,
2912 start_offset, chunk_size, gsi, write, write,
2913 loc);
2915 return true;
2918 /* Where scalar replacements of the RHS have been written to when a replacement
2919 of a LHS of an assigments cannot be direclty loaded from a replacement of
2920 the RHS. */
2921 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2922 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2923 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2925 struct subreplacement_assignment_data
2927 /* Offset of the access representing the lhs of the assignment. */
2928 HOST_WIDE_INT left_offset;
2930 /* LHS and RHS of the original assignment. */
2931 tree assignment_lhs, assignment_rhs;
2933 /* Access representing the rhs of the whole assignment. */
2934 struct access *top_racc;
2936 /* Stmt iterator used for statement insertions after the original assignment.
2937 It points to the main GSI used to traverse a BB during function body
2938 modification. */
2939 gimple_stmt_iterator *new_gsi;
2941 /* Stmt iterator used for statement insertions before the original
2942 assignment. Keeps on pointing to the original statement. */
2943 gimple_stmt_iterator old_gsi;
2945 /* Location of the assignment. */
2946 location_t loc;
2948 /* Keeps the information whether we have needed to refresh replacements of
2949 the LHS and from which side of the assignments this takes place. */
2950 enum unscalarized_data_handling refreshed;
2953 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2954 base aggregate if there are unscalarized data or directly to LHS of the
2955 statement that is pointed to by GSI otherwise. */
2957 static void
2958 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
2960 tree src;
2961 if (sad->top_racc->grp_unscalarized_data)
2963 src = sad->assignment_rhs;
2964 sad->refreshed = SRA_UDH_RIGHT;
2966 else
2968 src = sad->assignment_lhs;
2969 sad->refreshed = SRA_UDH_LEFT;
2971 generate_subtree_copies (sad->top_racc->first_child, src,
2972 sad->top_racc->offset, 0, 0,
2973 &sad->old_gsi, false, false, sad->loc);
2976 /* Try to generate statements to load all sub-replacements in an access subtree
2977 formed by children of LACC from scalar replacements in the SAD->top_racc
2978 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
2979 and load the accesses from it. */
2981 static void
2982 load_assign_lhs_subreplacements (struct access *lacc,
2983 struct subreplacement_assignment_data *sad)
2985 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2987 HOST_WIDE_INT offset;
2988 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
2990 if (lacc->grp_to_be_replaced)
2992 struct access *racc;
2993 gassign *stmt;
2994 tree rhs;
2996 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
2997 if (racc && racc->grp_to_be_replaced)
2999 rhs = get_access_replacement (racc);
3000 if (!useless_type_conversion_p (lacc->type, racc->type))
3001 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3002 lacc->type, rhs);
3004 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3005 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3006 NULL_TREE, true, GSI_SAME_STMT);
3008 else
3010 /* No suitable access on the right hand side, need to load from
3011 the aggregate. See if we have to update it first... */
3012 if (sad->refreshed == SRA_UDH_NONE)
3013 handle_unscalarized_data_in_subtree (sad);
3015 if (sad->refreshed == SRA_UDH_LEFT)
3016 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3017 lacc->offset - sad->left_offset,
3018 lacc, sad->new_gsi, true);
3019 else
3020 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3021 lacc->offset - sad->left_offset,
3022 lacc, sad->new_gsi, true);
3023 if (lacc->grp_partial_lhs)
3024 rhs = force_gimple_operand_gsi (sad->new_gsi,
3025 rhs, true, NULL_TREE,
3026 false, GSI_NEW_STMT);
3029 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3030 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3031 gimple_set_location (stmt, sad->loc);
3032 update_stmt (stmt);
3033 sra_stats.subreplacements++;
3035 else
3037 if (sad->refreshed == SRA_UDH_NONE
3038 && lacc->grp_read && !lacc->grp_covered)
3039 handle_unscalarized_data_in_subtree (sad);
3041 if (lacc && lacc->grp_to_be_debug_replaced)
3043 gdebug *ds;
3044 tree drhs;
3045 struct access *racc = find_access_in_subtree (sad->top_racc,
3046 offset,
3047 lacc->size);
3049 if (racc && racc->grp_to_be_replaced)
3051 if (racc->grp_write)
3052 drhs = get_access_replacement (racc);
3053 else
3054 drhs = NULL;
3056 else if (sad->refreshed == SRA_UDH_LEFT)
3057 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3058 lacc->offset, lacc);
3059 else if (sad->refreshed == SRA_UDH_RIGHT)
3060 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3061 offset, lacc);
3062 else
3063 drhs = NULL_TREE;
3064 if (drhs
3065 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3066 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3067 lacc->type, drhs);
3068 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3069 drhs, gsi_stmt (sad->old_gsi));
3070 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3074 if (lacc->first_child)
3075 load_assign_lhs_subreplacements (lacc, sad);
3079 /* Result code for SRA assignment modification. */
3080 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3081 SRA_AM_MODIFIED, /* stmt changed but not
3082 removed */
3083 SRA_AM_REMOVED }; /* stmt eliminated */
3085 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3086 to the assignment and GSI is the statement iterator pointing at it. Returns
3087 the same values as sra_modify_assign. */
3089 static enum assignment_mod_result
3090 sra_modify_constructor_assign (gimple stmt, gimple_stmt_iterator *gsi)
3092 tree lhs = gimple_assign_lhs (stmt);
3093 struct access *acc = get_access_for_expr (lhs);
3094 if (!acc)
3095 return SRA_AM_NONE;
3096 location_t loc = gimple_location (stmt);
3098 if (gimple_clobber_p (stmt))
3100 /* Clobber the replacement variable. */
3101 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3102 /* Remove clobbers of fully scalarized variables, they are dead. */
3103 if (acc->grp_covered)
3105 unlink_stmt_vdef (stmt);
3106 gsi_remove (gsi, true);
3107 release_defs (stmt);
3108 return SRA_AM_REMOVED;
3110 else
3111 return SRA_AM_MODIFIED;
3114 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt))) > 0)
3116 /* I have never seen this code path trigger but if it can happen the
3117 following should handle it gracefully. */
3118 if (access_has_children_p (acc))
3119 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3120 true, true, loc);
3121 return SRA_AM_MODIFIED;
3124 if (acc->grp_covered)
3126 init_subtree_with_zero (acc, gsi, false, loc);
3127 unlink_stmt_vdef (stmt);
3128 gsi_remove (gsi, true);
3129 release_defs (stmt);
3130 return SRA_AM_REMOVED;
3132 else
3134 init_subtree_with_zero (acc, gsi, true, loc);
3135 return SRA_AM_MODIFIED;
3139 /* Create and return a new suitable default definition SSA_NAME for RACC which
3140 is an access describing an uninitialized part of an aggregate that is being
3141 loaded. */
3143 static tree
3144 get_repl_default_def_ssa_name (struct access *racc)
3146 gcc_checking_assert (!racc->grp_to_be_replaced
3147 && !racc->grp_to_be_debug_replaced);
3148 if (!racc->replacement_decl)
3149 racc->replacement_decl = create_access_replacement (racc);
3150 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3153 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3154 bit-field field declaration somewhere in it. */
3156 static inline bool
3157 contains_vce_or_bfcref_p (const_tree ref)
3159 while (handled_component_p (ref))
3161 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3162 || (TREE_CODE (ref) == COMPONENT_REF
3163 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3164 return true;
3165 ref = TREE_OPERAND (ref, 0);
3168 return false;
3171 /* Examine both sides of the assignment statement pointed to by STMT, replace
3172 them with a scalare replacement if there is one and generate copying of
3173 replacements if scalarized aggregates have been used in the assignment. GSI
3174 is used to hold generated statements for type conversions and subtree
3175 copying. */
3177 static enum assignment_mod_result
3178 sra_modify_assign (gimple stmt, gimple_stmt_iterator *gsi)
3180 struct access *lacc, *racc;
3181 tree lhs, rhs;
3182 bool modify_this_stmt = false;
3183 bool force_gimple_rhs = false;
3184 location_t loc;
3185 gimple_stmt_iterator orig_gsi = *gsi;
3187 if (!gimple_assign_single_p (stmt))
3188 return SRA_AM_NONE;
3189 lhs = gimple_assign_lhs (stmt);
3190 rhs = gimple_assign_rhs1 (stmt);
3192 if (TREE_CODE (rhs) == CONSTRUCTOR)
3193 return sra_modify_constructor_assign (stmt, gsi);
3195 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3196 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3197 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3199 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3200 gsi, false);
3201 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3202 gsi, true);
3203 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3206 lacc = get_access_for_expr (lhs);
3207 racc = get_access_for_expr (rhs);
3208 if (!lacc && !racc)
3209 return SRA_AM_NONE;
3211 loc = gimple_location (stmt);
3212 if (lacc && lacc->grp_to_be_replaced)
3214 lhs = get_access_replacement (lacc);
3215 gimple_assign_set_lhs (stmt, lhs);
3216 modify_this_stmt = true;
3217 if (lacc->grp_partial_lhs)
3218 force_gimple_rhs = true;
3219 sra_stats.exprs++;
3222 if (racc && racc->grp_to_be_replaced)
3224 rhs = get_access_replacement (racc);
3225 modify_this_stmt = true;
3226 if (racc->grp_partial_lhs)
3227 force_gimple_rhs = true;
3228 sra_stats.exprs++;
3230 else if (racc
3231 && !racc->grp_unscalarized_data
3232 && TREE_CODE (lhs) == SSA_NAME
3233 && !access_has_replacements_p (racc))
3235 rhs = get_repl_default_def_ssa_name (racc);
3236 modify_this_stmt = true;
3237 sra_stats.exprs++;
3240 if (modify_this_stmt)
3242 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3244 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3245 ??? This should move to fold_stmt which we simply should
3246 call after building a VIEW_CONVERT_EXPR here. */
3247 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3248 && !contains_bitfld_component_ref_p (lhs))
3250 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3251 gimple_assign_set_lhs (stmt, lhs);
3253 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3254 && !contains_vce_or_bfcref_p (rhs))
3255 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3257 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3259 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3260 rhs);
3261 if (is_gimple_reg_type (TREE_TYPE (lhs))
3262 && TREE_CODE (lhs) != SSA_NAME)
3263 force_gimple_rhs = true;
3268 if (lacc && lacc->grp_to_be_debug_replaced)
3270 tree dlhs = get_access_replacement (lacc);
3271 tree drhs = unshare_expr (rhs);
3272 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3274 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3275 && !contains_vce_or_bfcref_p (drhs))
3276 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3277 if (drhs
3278 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3279 TREE_TYPE (drhs)))
3280 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3281 TREE_TYPE (dlhs), drhs);
3283 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3284 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3287 /* From this point on, the function deals with assignments in between
3288 aggregates when at least one has scalar reductions of some of its
3289 components. There are three possible scenarios: Both the LHS and RHS have
3290 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3292 In the first case, we would like to load the LHS components from RHS
3293 components whenever possible. If that is not possible, we would like to
3294 read it directly from the RHS (after updating it by storing in it its own
3295 components). If there are some necessary unscalarized data in the LHS,
3296 those will be loaded by the original assignment too. If neither of these
3297 cases happen, the original statement can be removed. Most of this is done
3298 by load_assign_lhs_subreplacements.
3300 In the second case, we would like to store all RHS scalarized components
3301 directly into LHS and if they cover the aggregate completely, remove the
3302 statement too. In the third case, we want the LHS components to be loaded
3303 directly from the RHS (DSE will remove the original statement if it
3304 becomes redundant).
3306 This is a bit complex but manageable when types match and when unions do
3307 not cause confusion in a way that we cannot really load a component of LHS
3308 from the RHS or vice versa (the access representing this level can have
3309 subaccesses that are accessible only through a different union field at a
3310 higher level - different from the one used in the examined expression).
3311 Unions are fun.
3313 Therefore, I specially handle a fourth case, happening when there is a
3314 specific type cast or it is impossible to locate a scalarized subaccess on
3315 the other side of the expression. If that happens, I simply "refresh" the
3316 RHS by storing in it is scalarized components leave the original statement
3317 there to do the copying and then load the scalar replacements of the LHS.
3318 This is what the first branch does. */
3320 if (modify_this_stmt
3321 || gimple_has_volatile_ops (stmt)
3322 || contains_vce_or_bfcref_p (rhs)
3323 || contains_vce_or_bfcref_p (lhs)
3324 || stmt_ends_bb_p (stmt))
3326 if (access_has_children_p (racc))
3327 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3328 gsi, false, false, loc);
3329 if (access_has_children_p (lacc))
3331 gimple_stmt_iterator alt_gsi = gsi_none ();
3332 if (stmt_ends_bb_p (stmt))
3334 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3335 gsi = &alt_gsi;
3337 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3338 gsi, true, true, loc);
3340 sra_stats.separate_lhs_rhs_handling++;
3342 /* This gimplification must be done after generate_subtree_copies,
3343 lest we insert the subtree copies in the middle of the gimplified
3344 sequence. */
3345 if (force_gimple_rhs)
3346 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3347 true, GSI_SAME_STMT);
3348 if (gimple_assign_rhs1 (stmt) != rhs)
3350 modify_this_stmt = true;
3351 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3352 gcc_assert (stmt == gsi_stmt (orig_gsi));
3355 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3357 else
3359 if (access_has_children_p (lacc)
3360 && access_has_children_p (racc)
3361 /* When an access represents an unscalarizable region, it usually
3362 represents accesses with variable offset and thus must not be used
3363 to generate new memory accesses. */
3364 && !lacc->grp_unscalarizable_region
3365 && !racc->grp_unscalarizable_region)
3367 struct subreplacement_assignment_data sad;
3369 sad.left_offset = lacc->offset;
3370 sad.assignment_lhs = lhs;
3371 sad.assignment_rhs = rhs;
3372 sad.top_racc = racc;
3373 sad.old_gsi = *gsi;
3374 sad.new_gsi = gsi;
3375 sad.loc = gimple_location (stmt);
3376 sad.refreshed = SRA_UDH_NONE;
3378 if (lacc->grp_read && !lacc->grp_covered)
3379 handle_unscalarized_data_in_subtree (&sad);
3381 load_assign_lhs_subreplacements (lacc, &sad);
3382 if (sad.refreshed != SRA_UDH_RIGHT)
3384 gsi_next (gsi);
3385 unlink_stmt_vdef (stmt);
3386 gsi_remove (&sad.old_gsi, true);
3387 release_defs (stmt);
3388 sra_stats.deleted++;
3389 return SRA_AM_REMOVED;
3392 else
3394 if (access_has_children_p (racc)
3395 && !racc->grp_unscalarized_data)
3397 if (dump_file)
3399 fprintf (dump_file, "Removing load: ");
3400 print_gimple_stmt (dump_file, stmt, 0, 0);
3402 generate_subtree_copies (racc->first_child, lhs,
3403 racc->offset, 0, 0, gsi,
3404 false, false, loc);
3405 gcc_assert (stmt == gsi_stmt (*gsi));
3406 unlink_stmt_vdef (stmt);
3407 gsi_remove (gsi, true);
3408 release_defs (stmt);
3409 sra_stats.deleted++;
3410 return SRA_AM_REMOVED;
3412 /* Restore the aggregate RHS from its components so the
3413 prevailing aggregate copy does the right thing. */
3414 if (access_has_children_p (racc))
3415 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3416 gsi, false, false, loc);
3417 /* Re-load the components of the aggregate copy destination.
3418 But use the RHS aggregate to load from to expose more
3419 optimization opportunities. */
3420 if (access_has_children_p (lacc))
3421 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3422 0, 0, gsi, true, true, loc);
3425 return SRA_AM_NONE;
3429 /* Traverse the function body and all modifications as decided in
3430 analyze_all_variable_accesses. Return true iff the CFG has been
3431 changed. */
3433 static bool
3434 sra_modify_function_body (void)
3436 bool cfg_changed = false;
3437 basic_block bb;
3439 FOR_EACH_BB_FN (bb, cfun)
3441 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3442 while (!gsi_end_p (gsi))
3444 gimple stmt = gsi_stmt (gsi);
3445 enum assignment_mod_result assign_result;
3446 bool modified = false, deleted = false;
3447 tree *t;
3448 unsigned i;
3450 switch (gimple_code (stmt))
3452 case GIMPLE_RETURN:
3453 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3454 if (*t != NULL_TREE)
3455 modified |= sra_modify_expr (t, &gsi, false);
3456 break;
3458 case GIMPLE_ASSIGN:
3459 assign_result = sra_modify_assign (stmt, &gsi);
3460 modified |= assign_result == SRA_AM_MODIFIED;
3461 deleted = assign_result == SRA_AM_REMOVED;
3462 break;
3464 case GIMPLE_CALL:
3465 /* Operands must be processed before the lhs. */
3466 for (i = 0; i < gimple_call_num_args (stmt); i++)
3468 t = gimple_call_arg_ptr (stmt, i);
3469 modified |= sra_modify_expr (t, &gsi, false);
3472 if (gimple_call_lhs (stmt))
3474 t = gimple_call_lhs_ptr (stmt);
3475 modified |= sra_modify_expr (t, &gsi, true);
3477 break;
3479 case GIMPLE_ASM:
3481 gasm *asm_stmt = as_a <gasm *> (stmt);
3482 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3484 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3485 modified |= sra_modify_expr (t, &gsi, false);
3487 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3489 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3490 modified |= sra_modify_expr (t, &gsi, true);
3493 break;
3495 default:
3496 break;
3499 if (modified)
3501 update_stmt (stmt);
3502 if (maybe_clean_eh_stmt (stmt)
3503 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3504 cfg_changed = true;
3506 if (!deleted)
3507 gsi_next (&gsi);
3511 gsi_commit_edge_inserts ();
3512 return cfg_changed;
3515 /* Generate statements initializing scalar replacements of parts of function
3516 parameters. */
3518 static void
3519 initialize_parameter_reductions (void)
3521 gimple_stmt_iterator gsi;
3522 gimple_seq seq = NULL;
3523 tree parm;
3525 gsi = gsi_start (seq);
3526 for (parm = DECL_ARGUMENTS (current_function_decl);
3527 parm;
3528 parm = DECL_CHAIN (parm))
3530 vec<access_p> *access_vec;
3531 struct access *access;
3533 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3534 continue;
3535 access_vec = get_base_access_vector (parm);
3536 if (!access_vec)
3537 continue;
3539 for (access = (*access_vec)[0];
3540 access;
3541 access = access->next_grp)
3542 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3543 EXPR_LOCATION (parm));
3546 seq = gsi_seq (gsi);
3547 if (seq)
3548 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3551 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3552 it reveals there are components of some aggregates to be scalarized, it runs
3553 the required transformations. */
3554 static unsigned int
3555 perform_intra_sra (void)
3557 int ret = 0;
3558 sra_initialize ();
3560 if (!find_var_candidates ())
3561 goto out;
3563 if (!scan_function ())
3564 goto out;
3566 if (!analyze_all_variable_accesses ())
3567 goto out;
3569 if (sra_modify_function_body ())
3570 ret = TODO_update_ssa | TODO_cleanup_cfg;
3571 else
3572 ret = TODO_update_ssa;
3573 initialize_parameter_reductions ();
3575 statistics_counter_event (cfun, "Scalar replacements created",
3576 sra_stats.replacements);
3577 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3578 statistics_counter_event (cfun, "Subtree copy stmts",
3579 sra_stats.subtree_copies);
3580 statistics_counter_event (cfun, "Subreplacement stmts",
3581 sra_stats.subreplacements);
3582 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3583 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3584 sra_stats.separate_lhs_rhs_handling);
3586 out:
3587 sra_deinitialize ();
3588 return ret;
3591 /* Perform early intraprocedural SRA. */
3592 static unsigned int
3593 early_intra_sra (void)
3595 sra_mode = SRA_MODE_EARLY_INTRA;
3596 return perform_intra_sra ();
3599 /* Perform "late" intraprocedural SRA. */
3600 static unsigned int
3601 late_intra_sra (void)
3603 sra_mode = SRA_MODE_INTRA;
3604 return perform_intra_sra ();
3608 static bool
3609 gate_intra_sra (void)
3611 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3615 namespace {
3617 const pass_data pass_data_sra_early =
3619 GIMPLE_PASS, /* type */
3620 "esra", /* name */
3621 OPTGROUP_NONE, /* optinfo_flags */
3622 TV_TREE_SRA, /* tv_id */
3623 ( PROP_cfg | PROP_ssa ), /* properties_required */
3624 0, /* properties_provided */
3625 0, /* properties_destroyed */
3626 0, /* todo_flags_start */
3627 TODO_update_ssa, /* todo_flags_finish */
3630 class pass_sra_early : public gimple_opt_pass
3632 public:
3633 pass_sra_early (gcc::context *ctxt)
3634 : gimple_opt_pass (pass_data_sra_early, ctxt)
3637 /* opt_pass methods: */
3638 virtual bool gate (function *) { return gate_intra_sra (); }
3639 virtual unsigned int execute (function *) { return early_intra_sra (); }
3641 }; // class pass_sra_early
3643 } // anon namespace
3645 gimple_opt_pass *
3646 make_pass_sra_early (gcc::context *ctxt)
3648 return new pass_sra_early (ctxt);
3651 namespace {
3653 const pass_data pass_data_sra =
3655 GIMPLE_PASS, /* type */
3656 "sra", /* name */
3657 OPTGROUP_NONE, /* optinfo_flags */
3658 TV_TREE_SRA, /* tv_id */
3659 ( PROP_cfg | PROP_ssa ), /* properties_required */
3660 0, /* properties_provided */
3661 0, /* properties_destroyed */
3662 TODO_update_address_taken, /* todo_flags_start */
3663 TODO_update_ssa, /* todo_flags_finish */
3666 class pass_sra : public gimple_opt_pass
3668 public:
3669 pass_sra (gcc::context *ctxt)
3670 : gimple_opt_pass (pass_data_sra, ctxt)
3673 /* opt_pass methods: */
3674 virtual bool gate (function *) { return gate_intra_sra (); }
3675 virtual unsigned int execute (function *) { return late_intra_sra (); }
3677 }; // class pass_sra
3679 } // anon namespace
3681 gimple_opt_pass *
3682 make_pass_sra (gcc::context *ctxt)
3684 return new pass_sra (ctxt);
3688 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3689 parameter. */
3691 static bool
3692 is_unused_scalar_param (tree parm)
3694 tree name;
3695 return (is_gimple_reg (parm)
3696 && (!(name = ssa_default_def (cfun, parm))
3697 || has_zero_uses (name)));
3700 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3701 examine whether there are any direct or otherwise infeasible ones. If so,
3702 return true, otherwise return false. PARM must be a gimple register with a
3703 non-NULL default definition. */
3705 static bool
3706 ptr_parm_has_direct_uses (tree parm)
3708 imm_use_iterator ui;
3709 gimple stmt;
3710 tree name = ssa_default_def (cfun, parm);
3711 bool ret = false;
3713 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3715 int uses_ok = 0;
3716 use_operand_p use_p;
3718 if (is_gimple_debug (stmt))
3719 continue;
3721 /* Valid uses include dereferences on the lhs and the rhs. */
3722 if (gimple_has_lhs (stmt))
3724 tree lhs = gimple_get_lhs (stmt);
3725 while (handled_component_p (lhs))
3726 lhs = TREE_OPERAND (lhs, 0);
3727 if (TREE_CODE (lhs) == MEM_REF
3728 && TREE_OPERAND (lhs, 0) == name
3729 && integer_zerop (TREE_OPERAND (lhs, 1))
3730 && types_compatible_p (TREE_TYPE (lhs),
3731 TREE_TYPE (TREE_TYPE (name)))
3732 && !TREE_THIS_VOLATILE (lhs))
3733 uses_ok++;
3735 if (gimple_assign_single_p (stmt))
3737 tree rhs = gimple_assign_rhs1 (stmt);
3738 while (handled_component_p (rhs))
3739 rhs = TREE_OPERAND (rhs, 0);
3740 if (TREE_CODE (rhs) == MEM_REF
3741 && TREE_OPERAND (rhs, 0) == name
3742 && integer_zerop (TREE_OPERAND (rhs, 1))
3743 && types_compatible_p (TREE_TYPE (rhs),
3744 TREE_TYPE (TREE_TYPE (name)))
3745 && !TREE_THIS_VOLATILE (rhs))
3746 uses_ok++;
3748 else if (is_gimple_call (stmt))
3750 unsigned i;
3751 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3753 tree arg = gimple_call_arg (stmt, i);
3754 while (handled_component_p (arg))
3755 arg = TREE_OPERAND (arg, 0);
3756 if (TREE_CODE (arg) == MEM_REF
3757 && TREE_OPERAND (arg, 0) == name
3758 && integer_zerop (TREE_OPERAND (arg, 1))
3759 && types_compatible_p (TREE_TYPE (arg),
3760 TREE_TYPE (TREE_TYPE (name)))
3761 && !TREE_THIS_VOLATILE (arg))
3762 uses_ok++;
3766 /* If the number of valid uses does not match the number of
3767 uses in this stmt there is an unhandled use. */
3768 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3769 --uses_ok;
3771 if (uses_ok != 0)
3772 ret = true;
3774 if (ret)
3775 BREAK_FROM_IMM_USE_STMT (ui);
3778 return ret;
3781 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3782 them in candidate_bitmap. Note that these do not necessarily include
3783 parameter which are unused and thus can be removed. Return true iff any
3784 such candidate has been found. */
3786 static bool
3787 find_param_candidates (void)
3789 tree parm;
3790 int count = 0;
3791 bool ret = false;
3792 const char *msg;
3794 for (parm = DECL_ARGUMENTS (current_function_decl);
3795 parm;
3796 parm = DECL_CHAIN (parm))
3798 tree type = TREE_TYPE (parm);
3799 tree_node **slot;
3801 count++;
3803 if (TREE_THIS_VOLATILE (parm)
3804 || TREE_ADDRESSABLE (parm)
3805 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3806 continue;
3808 if (is_unused_scalar_param (parm))
3810 ret = true;
3811 continue;
3814 if (POINTER_TYPE_P (type))
3816 type = TREE_TYPE (type);
3818 if (TREE_CODE (type) == FUNCTION_TYPE
3819 || TYPE_VOLATILE (type)
3820 || upc_shared_type_p (type)
3821 || (TREE_CODE (type) == ARRAY_TYPE
3822 && TYPE_NONALIASED_COMPONENT (type))
3823 || !is_gimple_reg (parm)
3824 || is_va_list_type (type)
3825 || ptr_parm_has_direct_uses (parm))
3826 continue;
3828 else if (!AGGREGATE_TYPE_P (type))
3829 continue;
3831 if (!COMPLETE_TYPE_P (type)
3832 || !tree_fits_uhwi_p (TYPE_SIZE (type))
3833 || tree_to_uhwi (TYPE_SIZE (type)) == 0
3834 || (AGGREGATE_TYPE_P (type)
3835 && type_internals_preclude_sra_p (type, &msg)))
3836 continue;
3838 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3839 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
3840 *slot = parm;
3842 ret = true;
3843 if (dump_file && (dump_flags & TDF_DETAILS))
3845 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3846 print_generic_expr (dump_file, parm, 0);
3847 fprintf (dump_file, "\n");
3851 func_param_count = count;
3852 return ret;
3855 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3856 maybe_modified. */
3858 static bool
3859 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3860 void *data)
3862 struct access *repr = (struct access *) data;
3864 repr->grp_maybe_modified = 1;
3865 return true;
3868 /* Analyze what representatives (in linked lists accessible from
3869 REPRESENTATIVES) can be modified by side effects of statements in the
3870 current function. */
3872 static void
3873 analyze_modified_params (vec<access_p> representatives)
3875 int i;
3877 for (i = 0; i < func_param_count; i++)
3879 struct access *repr;
3881 for (repr = representatives[i];
3882 repr;
3883 repr = repr->next_grp)
3885 struct access *access;
3886 bitmap visited;
3887 ao_ref ar;
3889 if (no_accesses_p (repr))
3890 continue;
3891 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3892 || repr->grp_maybe_modified)
3893 continue;
3895 ao_ref_init (&ar, repr->expr);
3896 visited = BITMAP_ALLOC (NULL);
3897 for (access = repr; access; access = access->next_sibling)
3899 /* All accesses are read ones, otherwise grp_maybe_modified would
3900 be trivially set. */
3901 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3902 mark_maybe_modified, repr, &visited);
3903 if (repr->grp_maybe_modified)
3904 break;
3906 BITMAP_FREE (visited);
3911 /* Propagate distances in bb_dereferences in the opposite direction than the
3912 control flow edges, in each step storing the maximum of the current value
3913 and the minimum of all successors. These steps are repeated until the table
3914 stabilizes. Note that BBs which might terminate the functions (according to
3915 final_bbs bitmap) never updated in this way. */
3917 static void
3918 propagate_dereference_distances (void)
3920 basic_block bb;
3922 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
3923 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3924 FOR_EACH_BB_FN (bb, cfun)
3926 queue.quick_push (bb);
3927 bb->aux = bb;
3930 while (!queue.is_empty ())
3932 edge_iterator ei;
3933 edge e;
3934 bool change = false;
3935 int i;
3937 bb = queue.pop ();
3938 bb->aux = NULL;
3940 if (bitmap_bit_p (final_bbs, bb->index))
3941 continue;
3943 for (i = 0; i < func_param_count; i++)
3945 int idx = bb->index * func_param_count + i;
3946 bool first = true;
3947 HOST_WIDE_INT inh = 0;
3949 FOR_EACH_EDGE (e, ei, bb->succs)
3951 int succ_idx = e->dest->index * func_param_count + i;
3953 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
3954 continue;
3956 if (first)
3958 first = false;
3959 inh = bb_dereferences [succ_idx];
3961 else if (bb_dereferences [succ_idx] < inh)
3962 inh = bb_dereferences [succ_idx];
3965 if (!first && bb_dereferences[idx] < inh)
3967 bb_dereferences[idx] = inh;
3968 change = true;
3972 if (change && !bitmap_bit_p (final_bbs, bb->index))
3973 FOR_EACH_EDGE (e, ei, bb->preds)
3975 if (e->src->aux)
3976 continue;
3978 e->src->aux = e->src;
3979 queue.quick_push (e->src);
3984 /* Dump a dereferences TABLE with heading STR to file F. */
3986 static void
3987 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3989 basic_block bb;
3991 fprintf (dump_file, "%s", str);
3992 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
3993 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3995 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3996 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3998 int i;
3999 for (i = 0; i < func_param_count; i++)
4001 int idx = bb->index * func_param_count + i;
4002 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4005 fprintf (f, "\n");
4007 fprintf (dump_file, "\n");
4010 /* Determine what (parts of) parameters passed by reference that are not
4011 assigned to are not certainly dereferenced in this function and thus the
4012 dereferencing cannot be safely moved to the caller without potentially
4013 introducing a segfault. Mark such REPRESENTATIVES as
4014 grp_not_necessarilly_dereferenced.
4016 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4017 part is calculated rather than simple booleans are calculated for each
4018 pointer parameter to handle cases when only a fraction of the whole
4019 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4020 an example).
4022 The maximum dereference distances for each pointer parameter and BB are
4023 already stored in bb_dereference. This routine simply propagates these
4024 values upwards by propagate_dereference_distances and then compares the
4025 distances of individual parameters in the ENTRY BB to the equivalent
4026 distances of each representative of a (fraction of a) parameter. */
4028 static void
4029 analyze_caller_dereference_legality (vec<access_p> representatives)
4031 int i;
4033 if (dump_file && (dump_flags & TDF_DETAILS))
4034 dump_dereferences_table (dump_file,
4035 "Dereference table before propagation:\n",
4036 bb_dereferences);
4038 propagate_dereference_distances ();
4040 if (dump_file && (dump_flags & TDF_DETAILS))
4041 dump_dereferences_table (dump_file,
4042 "Dereference table after propagation:\n",
4043 bb_dereferences);
4045 for (i = 0; i < func_param_count; i++)
4047 struct access *repr = representatives[i];
4048 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4050 if (!repr || no_accesses_p (repr))
4051 continue;
4055 if ((repr->offset + repr->size) > bb_dereferences[idx])
4056 repr->grp_not_necessarilly_dereferenced = 1;
4057 repr = repr->next_grp;
4059 while (repr);
4063 /* Return the representative access for the parameter declaration PARM if it is
4064 a scalar passed by reference which is not written to and the pointer value
4065 is not used directly. Thus, if it is legal to dereference it in the caller
4066 and we can rule out modifications through aliases, such parameter should be
4067 turned into one passed by value. Return NULL otherwise. */
4069 static struct access *
4070 unmodified_by_ref_scalar_representative (tree parm)
4072 int i, access_count;
4073 struct access *repr;
4074 vec<access_p> *access_vec;
4076 access_vec = get_base_access_vector (parm);
4077 gcc_assert (access_vec);
4078 repr = (*access_vec)[0];
4079 if (repr->write)
4080 return NULL;
4081 repr->group_representative = repr;
4083 access_count = access_vec->length ();
4084 for (i = 1; i < access_count; i++)
4086 struct access *access = (*access_vec)[i];
4087 if (access->write)
4088 return NULL;
4089 access->group_representative = repr;
4090 access->next_sibling = repr->next_sibling;
4091 repr->next_sibling = access;
4094 repr->grp_read = 1;
4095 repr->grp_scalar_ptr = 1;
4096 return repr;
4099 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4100 associated with. REQ_ALIGN is the minimum required alignment. */
4102 static bool
4103 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4105 unsigned int exp_align;
4106 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4107 is incompatible assign in a call statement (and possibly even in asm
4108 statements). This can be relaxed by using a new temporary but only for
4109 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4110 intraprocedural SRA we deal with this by keeping the old aggregate around,
4111 something we cannot do in IPA-SRA.) */
4112 if (access->write
4113 && (is_gimple_call (access->stmt)
4114 || gimple_code (access->stmt) == GIMPLE_ASM))
4115 return true;
4117 exp_align = get_object_alignment (access->expr);
4118 if (exp_align < req_align)
4119 return true;
4121 return false;
4125 /* Sort collected accesses for parameter PARM, identify representatives for
4126 each accessed region and link them together. Return NULL if there are
4127 different but overlapping accesses, return the special ptr value meaning
4128 there are no accesses for this parameter if that is the case and return the
4129 first representative otherwise. Set *RO_GRP if there is a group of accesses
4130 with only read (i.e. no write) accesses. */
4132 static struct access *
4133 splice_param_accesses (tree parm, bool *ro_grp)
4135 int i, j, access_count, group_count;
4136 int agg_size, total_size = 0;
4137 struct access *access, *res, **prev_acc_ptr = &res;
4138 vec<access_p> *access_vec;
4140 access_vec = get_base_access_vector (parm);
4141 if (!access_vec)
4142 return &no_accesses_representant;
4143 access_count = access_vec->length ();
4145 access_vec->qsort (compare_access_positions);
4147 i = 0;
4148 total_size = 0;
4149 group_count = 0;
4150 while (i < access_count)
4152 bool modification;
4153 tree a1_alias_type;
4154 access = (*access_vec)[i];
4155 modification = access->write;
4156 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4157 return NULL;
4158 a1_alias_type = reference_alias_ptr_type (access->expr);
4160 /* Access is about to become group representative unless we find some
4161 nasty overlap which would preclude us from breaking this parameter
4162 apart. */
4164 j = i + 1;
4165 while (j < access_count)
4167 struct access *ac2 = (*access_vec)[j];
4168 if (ac2->offset != access->offset)
4170 /* All or nothing law for parameters. */
4171 if (access->offset + access->size > ac2->offset)
4172 return NULL;
4173 else
4174 break;
4176 else if (ac2->size != access->size)
4177 return NULL;
4179 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4180 || (ac2->type != access->type
4181 && (TREE_ADDRESSABLE (ac2->type)
4182 || TREE_ADDRESSABLE (access->type)))
4183 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4184 return NULL;
4186 modification |= ac2->write;
4187 ac2->group_representative = access;
4188 ac2->next_sibling = access->next_sibling;
4189 access->next_sibling = ac2;
4190 j++;
4193 group_count++;
4194 access->grp_maybe_modified = modification;
4195 if (!modification)
4196 *ro_grp = true;
4197 *prev_acc_ptr = access;
4198 prev_acc_ptr = &access->next_grp;
4199 total_size += access->size;
4200 i = j;
4203 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4204 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4205 else
4206 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4207 if (total_size >= agg_size)
4208 return NULL;
4210 gcc_assert (group_count > 0);
4211 return res;
4214 /* Decide whether parameters with representative accesses given by REPR should
4215 be reduced into components. */
4217 static int
4218 decide_one_param_reduction (struct access *repr)
4220 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4221 bool by_ref;
4222 tree parm;
4224 parm = repr->base;
4225 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4226 gcc_assert (cur_parm_size > 0);
4228 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4230 by_ref = true;
4231 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4233 else
4235 by_ref = false;
4236 agg_size = cur_parm_size;
4239 if (dump_file)
4241 struct access *acc;
4242 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4243 print_generic_expr (dump_file, parm, 0);
4244 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4245 for (acc = repr; acc; acc = acc->next_grp)
4246 dump_access (dump_file, acc, true);
4249 total_size = 0;
4250 new_param_count = 0;
4252 for (; repr; repr = repr->next_grp)
4254 gcc_assert (parm == repr->base);
4256 /* Taking the address of a non-addressable field is verboten. */
4257 if (by_ref && repr->non_addressable)
4258 return 0;
4260 /* Do not decompose a non-BLKmode param in a way that would
4261 create BLKmode params. Especially for by-reference passing
4262 (thus, pointer-type param) this is hardly worthwhile. */
4263 if (DECL_MODE (parm) != BLKmode
4264 && TYPE_MODE (repr->type) == BLKmode)
4265 return 0;
4267 if (!by_ref || (!repr->grp_maybe_modified
4268 && !repr->grp_not_necessarilly_dereferenced))
4269 total_size += repr->size;
4270 else
4271 total_size += cur_parm_size;
4273 new_param_count++;
4276 gcc_assert (new_param_count > 0);
4278 if (optimize_function_for_size_p (cfun))
4279 parm_size_limit = cur_parm_size;
4280 else
4281 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4282 * cur_parm_size);
4284 if (total_size < agg_size
4285 && total_size <= parm_size_limit)
4287 if (dump_file)
4288 fprintf (dump_file, " ....will be split into %i components\n",
4289 new_param_count);
4290 return new_param_count;
4292 else
4293 return 0;
4296 /* The order of the following enums is important, we need to do extra work for
4297 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4298 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4299 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4301 /* Identify representatives of all accesses to all candidate parameters for
4302 IPA-SRA. Return result based on what representatives have been found. */
4304 static enum ipa_splicing_result
4305 splice_all_param_accesses (vec<access_p> &representatives)
4307 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4308 tree parm;
4309 struct access *repr;
4311 representatives.create (func_param_count);
4313 for (parm = DECL_ARGUMENTS (current_function_decl);
4314 parm;
4315 parm = DECL_CHAIN (parm))
4317 if (is_unused_scalar_param (parm))
4319 representatives.quick_push (&no_accesses_representant);
4320 if (result == NO_GOOD_ACCESS)
4321 result = UNUSED_PARAMS;
4323 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4324 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4325 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4327 repr = unmodified_by_ref_scalar_representative (parm);
4328 representatives.quick_push (repr);
4329 if (repr)
4330 result = UNMODIF_BY_REF_ACCESSES;
4332 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4334 bool ro_grp = false;
4335 repr = splice_param_accesses (parm, &ro_grp);
4336 representatives.quick_push (repr);
4338 if (repr && !no_accesses_p (repr))
4340 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4342 if (ro_grp)
4343 result = UNMODIF_BY_REF_ACCESSES;
4344 else if (result < MODIF_BY_REF_ACCESSES)
4345 result = MODIF_BY_REF_ACCESSES;
4347 else if (result < BY_VAL_ACCESSES)
4348 result = BY_VAL_ACCESSES;
4350 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4351 result = UNUSED_PARAMS;
4353 else
4354 representatives.quick_push (NULL);
4357 if (result == NO_GOOD_ACCESS)
4359 representatives.release ();
4360 return NO_GOOD_ACCESS;
4363 return result;
4366 /* Return the index of BASE in PARMS. Abort if it is not found. */
4368 static inline int
4369 get_param_index (tree base, vec<tree> parms)
4371 int i, len;
4373 len = parms.length ();
4374 for (i = 0; i < len; i++)
4375 if (parms[i] == base)
4376 return i;
4377 gcc_unreachable ();
4380 /* Convert the decisions made at the representative level into compact
4381 parameter adjustments. REPRESENTATIVES are pointers to first
4382 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4383 final number of adjustments. */
4385 static ipa_parm_adjustment_vec
4386 turn_representatives_into_adjustments (vec<access_p> representatives,
4387 int adjustments_count)
4389 vec<tree> parms;
4390 ipa_parm_adjustment_vec adjustments;
4391 tree parm;
4392 int i;
4394 gcc_assert (adjustments_count > 0);
4395 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4396 adjustments.create (adjustments_count);
4397 parm = DECL_ARGUMENTS (current_function_decl);
4398 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4400 struct access *repr = representatives[i];
4402 if (!repr || no_accesses_p (repr))
4404 struct ipa_parm_adjustment adj;
4406 memset (&adj, 0, sizeof (adj));
4407 adj.base_index = get_param_index (parm, parms);
4408 adj.base = parm;
4409 if (!repr)
4410 adj.op = IPA_PARM_OP_COPY;
4411 else
4412 adj.op = IPA_PARM_OP_REMOVE;
4413 adj.arg_prefix = "ISRA";
4414 adjustments.quick_push (adj);
4416 else
4418 struct ipa_parm_adjustment adj;
4419 int index = get_param_index (parm, parms);
4421 for (; repr; repr = repr->next_grp)
4423 memset (&adj, 0, sizeof (adj));
4424 gcc_assert (repr->base == parm);
4425 adj.base_index = index;
4426 adj.base = repr->base;
4427 adj.type = repr->type;
4428 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4429 adj.offset = repr->offset;
4430 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4431 && (repr->grp_maybe_modified
4432 || repr->grp_not_necessarilly_dereferenced));
4433 adj.arg_prefix = "ISRA";
4434 adjustments.quick_push (adj);
4438 parms.release ();
4439 return adjustments;
4442 /* Analyze the collected accesses and produce a plan what to do with the
4443 parameters in the form of adjustments, NULL meaning nothing. */
4445 static ipa_parm_adjustment_vec
4446 analyze_all_param_acesses (void)
4448 enum ipa_splicing_result repr_state;
4449 bool proceed = false;
4450 int i, adjustments_count = 0;
4451 vec<access_p> representatives;
4452 ipa_parm_adjustment_vec adjustments;
4454 repr_state = splice_all_param_accesses (representatives);
4455 if (repr_state == NO_GOOD_ACCESS)
4456 return ipa_parm_adjustment_vec ();
4458 /* If there are any parameters passed by reference which are not modified
4459 directly, we need to check whether they can be modified indirectly. */
4460 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4462 analyze_caller_dereference_legality (representatives);
4463 analyze_modified_params (representatives);
4466 for (i = 0; i < func_param_count; i++)
4468 struct access *repr = representatives[i];
4470 if (repr && !no_accesses_p (repr))
4472 if (repr->grp_scalar_ptr)
4474 adjustments_count++;
4475 if (repr->grp_not_necessarilly_dereferenced
4476 || repr->grp_maybe_modified)
4477 representatives[i] = NULL;
4478 else
4480 proceed = true;
4481 sra_stats.scalar_by_ref_to_by_val++;
4484 else
4486 int new_components = decide_one_param_reduction (repr);
4488 if (new_components == 0)
4490 representatives[i] = NULL;
4491 adjustments_count++;
4493 else
4495 adjustments_count += new_components;
4496 sra_stats.aggregate_params_reduced++;
4497 sra_stats.param_reductions_created += new_components;
4498 proceed = true;
4502 else
4504 if (no_accesses_p (repr))
4506 proceed = true;
4507 sra_stats.deleted_unused_parameters++;
4509 adjustments_count++;
4513 if (!proceed && dump_file)
4514 fprintf (dump_file, "NOT proceeding to change params.\n");
4516 if (proceed)
4517 adjustments = turn_representatives_into_adjustments (representatives,
4518 adjustments_count);
4519 else
4520 adjustments = ipa_parm_adjustment_vec ();
4522 representatives.release ();
4523 return adjustments;
4526 /* If a parameter replacement identified by ADJ does not yet exist in the form
4527 of declaration, create it and record it, otherwise return the previously
4528 created one. */
4530 static tree
4531 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4533 tree repl;
4534 if (!adj->new_ssa_base)
4536 char *pretty_name = make_fancy_name (adj->base);
4538 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4539 DECL_NAME (repl) = get_identifier (pretty_name);
4540 obstack_free (&name_obstack, pretty_name);
4542 adj->new_ssa_base = repl;
4544 else
4545 repl = adj->new_ssa_base;
4546 return repl;
4549 /* Find the first adjustment for a particular parameter BASE in a vector of
4550 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4551 adjustment. */
4553 static struct ipa_parm_adjustment *
4554 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4556 int i, len;
4558 len = adjustments.length ();
4559 for (i = 0; i < len; i++)
4561 struct ipa_parm_adjustment *adj;
4563 adj = &adjustments[i];
4564 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4565 return adj;
4568 return NULL;
4571 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4572 removed because its value is not used, replace the SSA_NAME with a one
4573 relating to a created VAR_DECL together all of its uses and return true.
4574 ADJUSTMENTS is a pointer to an adjustments vector. */
4576 static bool
4577 replace_removed_params_ssa_names (gimple stmt,
4578 ipa_parm_adjustment_vec adjustments)
4580 struct ipa_parm_adjustment *adj;
4581 tree lhs, decl, repl, name;
4583 if (gimple_code (stmt) == GIMPLE_PHI)
4584 lhs = gimple_phi_result (stmt);
4585 else if (is_gimple_assign (stmt))
4586 lhs = gimple_assign_lhs (stmt);
4587 else if (is_gimple_call (stmt))
4588 lhs = gimple_call_lhs (stmt);
4589 else
4590 gcc_unreachable ();
4592 if (TREE_CODE (lhs) != SSA_NAME)
4593 return false;
4595 decl = SSA_NAME_VAR (lhs);
4596 if (decl == NULL_TREE
4597 || TREE_CODE (decl) != PARM_DECL)
4598 return false;
4600 adj = get_adjustment_for_base (adjustments, decl);
4601 if (!adj)
4602 return false;
4604 repl = get_replaced_param_substitute (adj);
4605 name = make_ssa_name (repl, stmt);
4607 if (dump_file)
4609 fprintf (dump_file, "replacing an SSA name of a removed param ");
4610 print_generic_expr (dump_file, lhs, 0);
4611 fprintf (dump_file, " with ");
4612 print_generic_expr (dump_file, name, 0);
4613 fprintf (dump_file, "\n");
4616 if (is_gimple_assign (stmt))
4617 gimple_assign_set_lhs (stmt, name);
4618 else if (is_gimple_call (stmt))
4619 gimple_call_set_lhs (stmt, name);
4620 else
4621 gimple_phi_set_result (as_a <gphi *> (stmt), name);
4623 replace_uses_by (lhs, name);
4624 release_ssa_name (lhs);
4625 return true;
4628 /* If the statement STMT contains any expressions that need to replaced with a
4629 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4630 incompatibilities (GSI is used to accommodate conversion statements and must
4631 point to the statement). Return true iff the statement was modified. */
4633 static bool
4634 sra_ipa_modify_assign (gimple stmt, gimple_stmt_iterator *gsi,
4635 ipa_parm_adjustment_vec adjustments)
4637 tree *lhs_p, *rhs_p;
4638 bool any;
4640 if (!gimple_assign_single_p (stmt))
4641 return false;
4643 rhs_p = gimple_assign_rhs1_ptr (stmt);
4644 lhs_p = gimple_assign_lhs_ptr (stmt);
4646 any = ipa_modify_expr (rhs_p, false, adjustments);
4647 any |= ipa_modify_expr (lhs_p, false, adjustments);
4648 if (any)
4650 tree new_rhs = NULL_TREE;
4652 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4654 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4656 /* V_C_Es of constructors can cause trouble (PR 42714). */
4657 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4658 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4659 else
4660 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4661 NULL);
4663 else
4664 new_rhs = fold_build1_loc (gimple_location (stmt),
4665 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4666 *rhs_p);
4668 else if (REFERENCE_CLASS_P (*rhs_p)
4669 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4670 && !is_gimple_reg (*lhs_p))
4671 /* This can happen when an assignment in between two single field
4672 structures is turned into an assignment in between two pointers to
4673 scalars (PR 42237). */
4674 new_rhs = *rhs_p;
4676 if (new_rhs)
4678 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4679 true, GSI_SAME_STMT);
4681 gimple_assign_set_rhs_from_tree (gsi, tmp);
4684 return true;
4687 return false;
4690 /* Traverse the function body and all modifications as described in
4691 ADJUSTMENTS. Return true iff the CFG has been changed. */
4693 bool
4694 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4696 bool cfg_changed = false;
4697 basic_block bb;
4699 FOR_EACH_BB_FN (bb, cfun)
4701 gimple_stmt_iterator gsi;
4703 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4704 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4706 gsi = gsi_start_bb (bb);
4707 while (!gsi_end_p (gsi))
4709 gimple stmt = gsi_stmt (gsi);
4710 bool modified = false;
4711 tree *t;
4712 unsigned i;
4714 switch (gimple_code (stmt))
4716 case GIMPLE_RETURN:
4717 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4718 if (*t != NULL_TREE)
4719 modified |= ipa_modify_expr (t, true, adjustments);
4720 break;
4722 case GIMPLE_ASSIGN:
4723 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4724 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4725 break;
4727 case GIMPLE_CALL:
4728 /* Operands must be processed before the lhs. */
4729 for (i = 0; i < gimple_call_num_args (stmt); i++)
4731 t = gimple_call_arg_ptr (stmt, i);
4732 modified |= ipa_modify_expr (t, true, adjustments);
4735 if (gimple_call_lhs (stmt))
4737 t = gimple_call_lhs_ptr (stmt);
4738 modified |= ipa_modify_expr (t, false, adjustments);
4739 modified |= replace_removed_params_ssa_names (stmt,
4740 adjustments);
4742 break;
4744 case GIMPLE_ASM:
4746 gasm *asm_stmt = as_a <gasm *> (stmt);
4747 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4749 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4750 modified |= ipa_modify_expr (t, true, adjustments);
4752 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4754 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4755 modified |= ipa_modify_expr (t, false, adjustments);
4758 break;
4760 default:
4761 break;
4764 if (modified)
4766 update_stmt (stmt);
4767 if (maybe_clean_eh_stmt (stmt)
4768 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4769 cfg_changed = true;
4771 gsi_next (&gsi);
4775 return cfg_changed;
4778 /* Call gimple_debug_bind_reset_value on all debug statements describing
4779 gimple register parameters that are being removed or replaced. */
4781 static void
4782 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4784 int i, len;
4785 gimple_stmt_iterator *gsip = NULL, gsi;
4787 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4789 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4790 gsip = &gsi;
4792 len = adjustments.length ();
4793 for (i = 0; i < len; i++)
4795 struct ipa_parm_adjustment *adj;
4796 imm_use_iterator ui;
4797 gimple stmt;
4798 gdebug *def_temp;
4799 tree name, vexpr, copy = NULL_TREE;
4800 use_operand_p use_p;
4802 adj = &adjustments[i];
4803 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4804 continue;
4805 name = ssa_default_def (cfun, adj->base);
4806 vexpr = NULL;
4807 if (name)
4808 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4810 if (gimple_clobber_p (stmt))
4812 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4813 unlink_stmt_vdef (stmt);
4814 gsi_remove (&cgsi, true);
4815 release_defs (stmt);
4816 continue;
4818 /* All other users must have been removed by
4819 ipa_sra_modify_function_body. */
4820 gcc_assert (is_gimple_debug (stmt));
4821 if (vexpr == NULL && gsip != NULL)
4823 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4824 vexpr = make_node (DEBUG_EXPR_DECL);
4825 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4826 NULL);
4827 DECL_ARTIFICIAL (vexpr) = 1;
4828 TREE_TYPE (vexpr) = TREE_TYPE (name);
4829 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4830 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4832 if (vexpr)
4834 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4835 SET_USE (use_p, vexpr);
4837 else
4838 gimple_debug_bind_reset_value (stmt);
4839 update_stmt (stmt);
4841 /* Create a VAR_DECL for debug info purposes. */
4842 if (!DECL_IGNORED_P (adj->base))
4844 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4845 VAR_DECL, DECL_NAME (adj->base),
4846 TREE_TYPE (adj->base));
4847 if (DECL_PT_UID_SET_P (adj->base))
4848 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4849 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4850 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4851 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4852 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4853 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4854 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4855 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4856 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4857 SET_DECL_RTL (copy, 0);
4858 TREE_USED (copy) = 1;
4859 DECL_CONTEXT (copy) = current_function_decl;
4860 add_local_decl (cfun, copy);
4861 DECL_CHAIN (copy) =
4862 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4863 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4865 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4867 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4868 if (vexpr)
4869 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4870 else
4871 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4872 NULL);
4873 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4878 /* Return false if all callers have at least as many actual arguments as there
4879 are formal parameters in the current function and that their types
4880 match. */
4882 static bool
4883 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
4884 void *data ATTRIBUTE_UNUSED)
4886 struct cgraph_edge *cs;
4887 for (cs = node->callers; cs; cs = cs->next_caller)
4888 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
4889 return true;
4891 return false;
4894 /* Return false if all callers have vuse attached to a call statement. */
4896 static bool
4897 some_callers_have_no_vuse_p (struct cgraph_node *node,
4898 void *data ATTRIBUTE_UNUSED)
4900 struct cgraph_edge *cs;
4901 for (cs = node->callers; cs; cs = cs->next_caller)
4902 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
4903 return true;
4905 return false;
4908 /* Convert all callers of NODE. */
4910 static bool
4911 convert_callers_for_node (struct cgraph_node *node,
4912 void *data)
4914 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4915 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4916 struct cgraph_edge *cs;
4918 for (cs = node->callers; cs; cs = cs->next_caller)
4920 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4922 if (dump_file)
4923 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
4924 xstrdup (cs->caller->name ()),
4925 cs->caller->order,
4926 xstrdup (cs->callee->name ()),
4927 cs->callee->order);
4929 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4931 pop_cfun ();
4934 for (cs = node->callers; cs; cs = cs->next_caller)
4935 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4936 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4937 compute_inline_parameters (cs->caller, true);
4938 BITMAP_FREE (recomputed_callers);
4940 return true;
4943 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4945 static void
4946 convert_callers (struct cgraph_node *node, tree old_decl,
4947 ipa_parm_adjustment_vec adjustments)
4949 basic_block this_block;
4951 node->call_for_symbol_and_aliases (convert_callers_for_node,
4952 &adjustments, false);
4954 if (!encountered_recursive_call)
4955 return;
4957 FOR_EACH_BB_FN (this_block, cfun)
4959 gimple_stmt_iterator gsi;
4961 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4963 gcall *stmt;
4964 tree call_fndecl;
4965 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
4966 if (!stmt)
4967 continue;
4968 call_fndecl = gimple_call_fndecl (stmt);
4969 if (call_fndecl == old_decl)
4971 if (dump_file)
4972 fprintf (dump_file, "Adjusting recursive call");
4973 gimple_call_set_fndecl (stmt, node->decl);
4974 ipa_modify_call_arguments (NULL, stmt, adjustments);
4979 return;
4982 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4983 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4985 static bool
4986 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4988 struct cgraph_node *new_node;
4989 bool cfg_changed;
4991 cgraph_edge::rebuild_edges ();
4992 free_dominance_info (CDI_DOMINATORS);
4993 pop_cfun ();
4995 /* This must be done after rebuilding cgraph edges for node above.
4996 Otherwise any recursive calls to node that are recorded in
4997 redirect_callers will be corrupted. */
4998 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
4999 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5000 NULL, false, NULL, NULL,
5001 "isra");
5002 redirect_callers.release ();
5004 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5005 ipa_modify_formal_parameters (current_function_decl, adjustments);
5006 cfg_changed = ipa_sra_modify_function_body (adjustments);
5007 sra_ipa_reset_debug_stmts (adjustments);
5008 convert_callers (new_node, node->decl, adjustments);
5009 new_node->make_local ();
5010 return cfg_changed;
5013 /* Means of communication between ipa_sra_check_caller and
5014 ipa_sra_preliminary_function_checks. */
5016 struct ipa_sra_check_caller_data
5018 bool has_callers;
5019 bool bad_arg_alignment;
5020 bool has_thunk;
5023 /* If NODE has a caller, mark that fact in DATA which is pointer to
5024 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5025 calls if they are unit aligned and if not, set the appropriate flag in DATA
5026 too. */
5028 static bool
5029 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5031 if (!node->callers)
5032 return false;
5034 struct ipa_sra_check_caller_data *iscc;
5035 iscc = (struct ipa_sra_check_caller_data *) data;
5036 iscc->has_callers = true;
5038 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5040 if (cs->caller->thunk.thunk_p)
5042 iscc->has_thunk = true;
5043 return true;
5045 gimple call_stmt = cs->call_stmt;
5046 unsigned count = gimple_call_num_args (call_stmt);
5047 for (unsigned i = 0; i < count; i++)
5049 tree arg = gimple_call_arg (call_stmt, i);
5050 if (is_gimple_reg (arg))
5051 continue;
5053 tree offset;
5054 HOST_WIDE_INT bitsize, bitpos;
5055 machine_mode mode;
5056 int unsignedp, volatilep = 0;
5057 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5058 &unsignedp, &volatilep, false);
5059 if (bitpos % BITS_PER_UNIT)
5061 iscc->bad_arg_alignment = true;
5062 return true;
5067 return false;
5070 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5071 attributes, return true otherwise. NODE is the cgraph node of the current
5072 function. */
5074 static bool
5075 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5077 if (!node->can_be_local_p ())
5079 if (dump_file)
5080 fprintf (dump_file, "Function not local to this compilation unit.\n");
5081 return false;
5084 if (!node->local.can_change_signature)
5086 if (dump_file)
5087 fprintf (dump_file, "Function can not change signature.\n");
5088 return false;
5091 if (!tree_versionable_function_p (node->decl))
5093 if (dump_file)
5094 fprintf (dump_file, "Function is not versionable.\n");
5095 return false;
5098 if (!opt_for_fn (node->decl, optimize)
5099 || !opt_for_fn (node->decl, flag_ipa_sra))
5101 if (dump_file)
5102 fprintf (dump_file, "Function not optimized.\n");
5103 return false;
5106 if (DECL_VIRTUAL_P (current_function_decl))
5108 if (dump_file)
5109 fprintf (dump_file, "Function is a virtual method.\n");
5110 return false;
5113 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
5114 && inline_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5116 if (dump_file)
5117 fprintf (dump_file, "Function too big to be made truly local.\n");
5118 return false;
5121 if (cfun->stdarg)
5123 if (dump_file)
5124 fprintf (dump_file, "Function uses stdarg. \n");
5125 return false;
5128 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5129 return false;
5131 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5133 if (dump_file)
5134 fprintf (dump_file, "Always inline function will be inlined "
5135 "anyway. \n");
5136 return false;
5139 struct ipa_sra_check_caller_data iscc;
5140 memset (&iscc, 0, sizeof(iscc));
5141 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5142 if (!iscc.has_callers)
5144 if (dump_file)
5145 fprintf (dump_file,
5146 "Function has no callers in this compilation unit.\n");
5147 return false;
5150 if (iscc.bad_arg_alignment)
5152 if (dump_file)
5153 fprintf (dump_file,
5154 "A function call has an argument with non-unit alignment.\n");
5155 return false;
5158 if (iscc.has_thunk)
5160 if (dump_file)
5161 fprintf (dump_file,
5162 "A has thunk.\n");
5163 return false;
5166 return true;
5169 /* Perform early interprocedural SRA. */
5171 static unsigned int
5172 ipa_early_sra (void)
5174 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5175 ipa_parm_adjustment_vec adjustments;
5176 int ret = 0;
5178 if (!ipa_sra_preliminary_function_checks (node))
5179 return 0;
5181 sra_initialize ();
5182 sra_mode = SRA_MODE_EARLY_IPA;
5184 if (!find_param_candidates ())
5186 if (dump_file)
5187 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5188 goto simple_out;
5191 if (node->call_for_symbol_and_aliases
5192 (some_callers_have_mismatched_arguments_p, NULL, true))
5194 if (dump_file)
5195 fprintf (dump_file, "There are callers with insufficient number of "
5196 "arguments or arguments with type mismatches.\n");
5197 goto simple_out;
5200 if (node->call_for_symbol_and_aliases
5201 (some_callers_have_no_vuse_p, NULL, true))
5203 if (dump_file)
5204 fprintf (dump_file, "There are callers with no VUSE attached "
5205 "to a call stmt.\n");
5206 goto simple_out;
5209 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5210 func_param_count
5211 * last_basic_block_for_fn (cfun));
5212 final_bbs = BITMAP_ALLOC (NULL);
5214 scan_function ();
5215 if (encountered_apply_args)
5217 if (dump_file)
5218 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5219 goto out;
5222 if (encountered_unchangable_recursive_call)
5224 if (dump_file)
5225 fprintf (dump_file, "Function calls itself with insufficient "
5226 "number of arguments.\n");
5227 goto out;
5230 adjustments = analyze_all_param_acesses ();
5231 if (!adjustments.exists ())
5232 goto out;
5233 if (dump_file)
5234 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5236 if (modify_function (node, adjustments))
5237 ret = TODO_update_ssa | TODO_cleanup_cfg;
5238 else
5239 ret = TODO_update_ssa;
5240 adjustments.release ();
5242 statistics_counter_event (cfun, "Unused parameters deleted",
5243 sra_stats.deleted_unused_parameters);
5244 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5245 sra_stats.scalar_by_ref_to_by_val);
5246 statistics_counter_event (cfun, "Aggregate parameters broken up",
5247 sra_stats.aggregate_params_reduced);
5248 statistics_counter_event (cfun, "Aggregate parameter components created",
5249 sra_stats.param_reductions_created);
5251 out:
5252 BITMAP_FREE (final_bbs);
5253 free (bb_dereferences);
5254 simple_out:
5255 sra_deinitialize ();
5256 return ret;
5259 namespace {
5261 const pass_data pass_data_early_ipa_sra =
5263 GIMPLE_PASS, /* type */
5264 "eipa_sra", /* name */
5265 OPTGROUP_NONE, /* optinfo_flags */
5266 TV_IPA_SRA, /* tv_id */
5267 0, /* properties_required */
5268 0, /* properties_provided */
5269 0, /* properties_destroyed */
5270 0, /* todo_flags_start */
5271 TODO_dump_symtab, /* todo_flags_finish */
5274 class pass_early_ipa_sra : public gimple_opt_pass
5276 public:
5277 pass_early_ipa_sra (gcc::context *ctxt)
5278 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5281 /* opt_pass methods: */
5282 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5283 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5285 }; // class pass_early_ipa_sra
5287 } // anon namespace
5289 gimple_opt_pass *
5290 make_pass_early_ipa_sra (gcc::context *ctxt)
5292 return new pass_early_ipa_sra (ctxt);