target/112280 - properly guard permute query
[official-gcc.git] / gcc / tree-sra.cc
blobe786232f6b004c69af811c2dda91d4b92d4ab3ef
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2024 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "dbgcnt.h"
100 #include "builtins.h"
101 #include "tree-sra.h"
102 #include "opts.h"
104 /* Enumeration of all aggregate reductions we can do. */
105 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
106 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
107 SRA_MODE_INTRA }; /* late intraprocedural SRA */
109 /* Global variable describing which aggregate reduction we are performing at
110 the moment. */
111 static enum sra_mode sra_mode;
113 struct assign_link;
115 /* ACCESS represents each access to an aggregate variable (as a whole or a
116 part). It can also represent a group of accesses that refer to exactly the
117 same fragment of an aggregate (i.e. those that have exactly the same offset
118 and size). Such representatives for a single aggregate, once determined,
119 are linked in a linked list and have the group fields set.
121 Moreover, when doing intraprocedural SRA, a tree is built from those
122 representatives (by the means of first_child and next_sibling pointers), in
123 which all items in a subtree are "within" the root, i.e. their offset is
124 greater or equal to offset of the root and offset+size is smaller or equal
125 to offset+size of the root. Children of an access are sorted by offset.
127 Note that accesses to parts of vector and complex number types always
128 represented by an access to the whole complex number or a vector. It is a
129 duty of the modifying functions to replace them appropriately. */
131 struct access
133 /* Values returned by `get_ref_base_and_extent' for each component reference
134 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
135 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
136 HOST_WIDE_INT offset;
137 HOST_WIDE_INT size;
138 tree base;
140 /* Expression. It is context dependent so do not use it to create new
141 expressions to access the original aggregate. See PR 42154 for a
142 testcase. */
143 tree expr;
144 /* Type. */
145 tree type;
147 /* The statement this access belongs to. */
148 gimple *stmt;
150 /* Next group representative for this aggregate. */
151 struct access *next_grp;
153 /* Pointer to the group representative. Pointer to itself if the struct is
154 the representative. */
155 struct access *group_representative;
157 /* After access tree has been constructed, this points to the parent of the
158 current access, if there is one. NULL for roots. */
159 struct access *parent;
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access *first_child;
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166 described above. */
167 struct access *next_sibling;
169 /* Pointers to the first and last element in the linked list of assign
170 links for propagation from LHS to RHS. */
171 struct assign_link *first_rhs_link, *last_rhs_link;
173 /* Pointers to the first and last element in the linked list of assign
174 links for propagation from LHS to RHS. */
175 struct assign_link *first_lhs_link, *last_lhs_link;
177 /* Pointer to the next access in the work queues. */
178 struct access *next_rhs_queued, *next_lhs_queued;
180 /* Replacement variable for this access "region." Never to be accessed
181 directly, always only by the means of get_access_replacement() and only
182 when grp_to_be_replaced flag is set. */
183 tree replacement_decl;
185 /* Is this access made in reverse storage order? */
186 unsigned reverse : 1;
188 /* Is this particular access write access? */
189 unsigned write : 1;
191 /* Is this access currently in the rhs work queue? */
192 unsigned grp_rhs_queued : 1;
194 /* Is this access currently in the lhs work queue? */
195 unsigned grp_lhs_queued : 1;
197 /* Does this group contain a write access? This flag is propagated down the
198 access tree. */
199 unsigned grp_write : 1;
201 /* Does this group contain a read access? This flag is propagated down the
202 access tree. */
203 unsigned grp_read : 1;
205 /* Does this group contain a read access that comes from an assignment
206 statement? This flag is propagated down the access tree. */
207 unsigned grp_assignment_read : 1;
209 /* Does this group contain a write access that comes from an assignment
210 statement? This flag is propagated down the access tree. */
211 unsigned grp_assignment_write : 1;
213 /* Does this group contain a read access through a scalar type? This flag is
214 not propagated in the access tree in any direction. */
215 unsigned grp_scalar_read : 1;
217 /* Does this group contain a write access through a scalar type? This flag
218 is not propagated in the access tree in any direction. */
219 unsigned grp_scalar_write : 1;
221 /* In a root of an access tree, true means that the entire tree should be
222 totally scalarized - that all scalar leafs should be scalarized and
223 non-root grp_total_scalarization accesses should be honored. Otherwise,
224 non-root accesses with grp_total_scalarization should never get scalar
225 replacements. */
226 unsigned grp_total_scalarization : 1;
228 /* Other passes of the analysis use this bit to make function
229 analyze_access_subtree create scalar replacements for this group if
230 possible. */
231 unsigned grp_hint : 1;
233 /* Is the subtree rooted in this access fully covered by scalar
234 replacements? */
235 unsigned grp_covered : 1;
237 /* If set to true, this access and all below it in an access tree must not be
238 scalarized. */
239 unsigned grp_unscalarizable_region : 1;
241 /* Whether data have been written to parts of the aggregate covered by this
242 access which is not to be scalarized. This flag is propagated up in the
243 access tree. */
244 unsigned grp_unscalarized_data : 1;
246 /* Set if all accesses in the group consist of the same chain of
247 COMPONENT_REFs and ARRAY_REFs. */
248 unsigned grp_same_access_path : 1;
250 /* Does this access and/or group contain a write access through a
251 BIT_FIELD_REF? */
252 unsigned grp_partial_lhs : 1;
254 /* Set when a scalar replacement should be created for this variable. */
255 unsigned grp_to_be_replaced : 1;
257 /* Set when we want a replacement for the sole purpose of having it in
258 generated debug statements. */
259 unsigned grp_to_be_debug_replaced : 1;
261 /* Should TREE_NO_WARNING of a replacement be set? */
262 unsigned grp_no_warning : 1;
264 /* Result of propagation accross link from LHS to RHS. */
265 unsigned grp_result_of_prop_from_lhs : 1;
268 typedef struct access *access_p;
271 /* Alloc pool for allocating access structures. */
272 static object_allocator<struct access> access_pool ("SRA accesses");
274 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
275 are used to propagate subaccesses from rhs to lhs and vice versa as long as
276 they don't conflict with what is already there. In the RHS->LHS direction,
277 we also propagate grp_write flag to lazily mark that the access contains any
278 meaningful data. */
279 struct assign_link
281 struct access *lacc, *racc;
282 struct assign_link *next_rhs, *next_lhs;
285 /* Alloc pool for allocating assign link structures. */
286 static object_allocator<assign_link> assign_link_pool ("SRA links");
288 /* Base (tree) -> Vector (vec<access_p> *) map. */
289 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
291 /* Hash to limit creation of artificial accesses */
292 static hash_map<tree, unsigned> *propagation_budget;
294 /* Candidate hash table helpers. */
296 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
298 static inline hashval_t hash (const tree_node *);
299 static inline bool equal (const tree_node *, const tree_node *);
302 /* Hash a tree in a uid_decl_map. */
304 inline hashval_t
305 uid_decl_hasher::hash (const tree_node *item)
307 return item->decl_minimal.uid;
310 /* Return true if the DECL_UID in both trees are equal. */
312 inline bool
313 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
315 return (a->decl_minimal.uid == b->decl_minimal.uid);
318 /* Set of candidates. */
319 static bitmap candidate_bitmap;
320 static hash_table<uid_decl_hasher> *candidates;
322 /* For a candidate UID return the candidates decl. */
324 static inline tree
325 candidate (unsigned uid)
327 tree_node t;
328 t.decl_minimal.uid = uid;
329 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
332 /* Bitmap of candidates which we should try to entirely scalarize away and
333 those which cannot be (because they are and need be used as a whole). */
334 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
336 /* Bitmap of candidates in the constant pool, which cannot be scalarized
337 because this would produce non-constant expressions (e.g. Ada). */
338 static bitmap disqualified_constants;
340 /* Bitmap of candidates which are passed by reference in call arguments. */
341 static bitmap passed_by_ref_in_call;
343 /* Obstack for creation of fancy names. */
344 static struct obstack name_obstack;
346 /* Head of a linked list of accesses that need to have its subaccesses
347 propagated to their assignment counterparts. */
348 static struct access *rhs_work_queue_head, *lhs_work_queue_head;
350 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
351 representative fields are dumped, otherwise those which only describe the
352 individual access are. */
354 static struct
356 /* Number of processed aggregates is readily available in
357 analyze_all_variable_accesses and so is not stored here. */
359 /* Number of created scalar replacements. */
360 int replacements;
362 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
363 expression. */
364 int exprs;
366 /* Number of statements created by generate_subtree_copies. */
367 int subtree_copies;
369 /* Number of statements created by load_assign_lhs_subreplacements. */
370 int subreplacements;
372 /* Number of times sra_modify_assign has deleted a statement. */
373 int deleted;
375 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
376 RHS reparately due to type conversions or nonexistent matching
377 references. */
378 int separate_lhs_rhs_handling;
380 /* Number of parameters that were removed because they were unused. */
381 int deleted_unused_parameters;
383 /* Number of scalars passed as parameters by reference that have been
384 converted to be passed by value. */
385 int scalar_by_ref_to_by_val;
387 /* Number of aggregate parameters that were replaced by one or more of their
388 components. */
389 int aggregate_params_reduced;
391 /* Numbber of components created when splitting aggregate parameters. */
392 int param_reductions_created;
394 /* Number of deferred_init calls that are modified. */
395 int deferred_init;
397 /* Number of deferred_init calls that are created by
398 generate_subtree_deferred_init. */
399 int subtree_deferred_init;
400 } sra_stats;
402 static void
403 dump_access (FILE *f, struct access *access, bool grp)
405 fprintf (f, "access { ");
406 fprintf (f, "base = (%d)'", DECL_UID (access->base));
407 print_generic_expr (f, access->base);
408 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
409 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
410 fprintf (f, ", expr = ");
411 print_generic_expr (f, access->expr);
412 fprintf (f, ", type = ");
413 print_generic_expr (f, access->type);
414 fprintf (f, ", reverse = %d", access->reverse);
415 if (grp)
416 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
417 "grp_assignment_write = %d, grp_scalar_read = %d, "
418 "grp_scalar_write = %d, grp_total_scalarization = %d, "
419 "grp_hint = %d, grp_covered = %d, "
420 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
421 "grp_same_access_path = %d, grp_partial_lhs = %d, "
422 "grp_to_be_replaced = %d, grp_to_be_debug_replaced = %d}\n",
423 access->grp_read, access->grp_write, access->grp_assignment_read,
424 access->grp_assignment_write, access->grp_scalar_read,
425 access->grp_scalar_write, access->grp_total_scalarization,
426 access->grp_hint, access->grp_covered,
427 access->grp_unscalarizable_region, access->grp_unscalarized_data,
428 access->grp_same_access_path, access->grp_partial_lhs,
429 access->grp_to_be_replaced, access->grp_to_be_debug_replaced);
430 else
431 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
432 "grp_partial_lhs = %d}\n",
433 access->write, access->grp_total_scalarization,
434 access->grp_partial_lhs);
437 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
439 static void
440 dump_access_tree_1 (FILE *f, struct access *access, int level)
444 int i;
446 for (i = 0; i < level; i++)
447 fputs ("* ", f);
449 dump_access (f, access, true);
451 if (access->first_child)
452 dump_access_tree_1 (f, access->first_child, level + 1);
454 access = access->next_sibling;
456 while (access);
459 /* Dump all access trees for a variable, given the pointer to the first root in
460 ACCESS. */
462 static void
463 dump_access_tree (FILE *f, struct access *access)
465 for (; access; access = access->next_grp)
466 dump_access_tree_1 (f, access, 0);
469 /* Return true iff ACC is non-NULL and has subaccesses. */
471 static inline bool
472 access_has_children_p (struct access *acc)
474 return acc && acc->first_child;
477 /* Return true iff ACC is (partly) covered by at least one replacement. */
479 static bool
480 access_has_replacements_p (struct access *acc)
482 struct access *child;
483 if (acc->grp_to_be_replaced)
484 return true;
485 for (child = acc->first_child; child; child = child->next_sibling)
486 if (access_has_replacements_p (child))
487 return true;
488 return false;
491 /* Return a vector of pointers to accesses for the variable given in BASE or
492 NULL if there is none. */
494 static vec<access_p> *
495 get_base_access_vector (tree base)
497 return base_access_vec->get (base);
500 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
501 in ACCESS. Return NULL if it cannot be found. */
503 static struct access *
504 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
505 HOST_WIDE_INT size)
507 while (access && (access->offset != offset || access->size != size))
509 struct access *child = access->first_child;
511 while (child && (child->offset + child->size <= offset))
512 child = child->next_sibling;
513 access = child;
516 /* Total scalarization does not replace single field structures with their
517 single field but rather creates an access for them underneath. Look for
518 it. */
519 if (access)
520 while (access->first_child
521 && access->first_child->offset == offset
522 && access->first_child->size == size)
523 access = access->first_child;
525 return access;
528 /* Return the first group representative for DECL or NULL if none exists. */
530 static struct access *
531 get_first_repr_for_decl (tree base)
533 vec<access_p> *access_vec;
535 access_vec = get_base_access_vector (base);
536 if (!access_vec)
537 return NULL;
539 return (*access_vec)[0];
542 /* Find an access representative for the variable BASE and given OFFSET and
543 SIZE. Requires that access trees have already been built. Return NULL if
544 it cannot be found. */
546 static struct access *
547 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
548 HOST_WIDE_INT size)
550 struct access *access;
552 access = get_first_repr_for_decl (base);
553 while (access && (access->offset + access->size <= offset))
554 access = access->next_grp;
555 if (!access)
556 return NULL;
558 return find_access_in_subtree (access, offset, size);
561 /* Add LINK to the linked list of assign links of RACC. */
563 static void
564 add_link_to_rhs (struct access *racc, struct assign_link *link)
566 gcc_assert (link->racc == racc);
568 if (!racc->first_rhs_link)
570 gcc_assert (!racc->last_rhs_link);
571 racc->first_rhs_link = link;
573 else
574 racc->last_rhs_link->next_rhs = link;
576 racc->last_rhs_link = link;
577 link->next_rhs = NULL;
580 /* Add LINK to the linked list of lhs assign links of LACC. */
582 static void
583 add_link_to_lhs (struct access *lacc, struct assign_link *link)
585 gcc_assert (link->lacc == lacc);
587 if (!lacc->first_lhs_link)
589 gcc_assert (!lacc->last_lhs_link);
590 lacc->first_lhs_link = link;
592 else
593 lacc->last_lhs_link->next_lhs = link;
595 lacc->last_lhs_link = link;
596 link->next_lhs = NULL;
599 /* Move all link structures in their linked list in OLD_ACC to the linked list
600 in NEW_ACC. */
601 static void
602 relink_to_new_repr (struct access *new_acc, struct access *old_acc)
604 if (old_acc->first_rhs_link)
607 if (new_acc->first_rhs_link)
609 gcc_assert (!new_acc->last_rhs_link->next_rhs);
610 gcc_assert (!old_acc->last_rhs_link
611 || !old_acc->last_rhs_link->next_rhs);
613 new_acc->last_rhs_link->next_rhs = old_acc->first_rhs_link;
614 new_acc->last_rhs_link = old_acc->last_rhs_link;
616 else
618 gcc_assert (!new_acc->last_rhs_link);
620 new_acc->first_rhs_link = old_acc->first_rhs_link;
621 new_acc->last_rhs_link = old_acc->last_rhs_link;
623 old_acc->first_rhs_link = old_acc->last_rhs_link = NULL;
625 else
626 gcc_assert (!old_acc->last_rhs_link);
628 if (old_acc->first_lhs_link)
631 if (new_acc->first_lhs_link)
633 gcc_assert (!new_acc->last_lhs_link->next_lhs);
634 gcc_assert (!old_acc->last_lhs_link
635 || !old_acc->last_lhs_link->next_lhs);
637 new_acc->last_lhs_link->next_lhs = old_acc->first_lhs_link;
638 new_acc->last_lhs_link = old_acc->last_lhs_link;
640 else
642 gcc_assert (!new_acc->last_lhs_link);
644 new_acc->first_lhs_link = old_acc->first_lhs_link;
645 new_acc->last_lhs_link = old_acc->last_lhs_link;
647 old_acc->first_lhs_link = old_acc->last_lhs_link = NULL;
649 else
650 gcc_assert (!old_acc->last_lhs_link);
654 /* Add ACCESS to the work to queue for propagation of subaccesses from RHS to
655 LHS (which is actually a stack). */
657 static void
658 add_access_to_rhs_work_queue (struct access *access)
660 if (access->first_rhs_link && !access->grp_rhs_queued)
662 gcc_assert (!access->next_rhs_queued);
663 access->next_rhs_queued = rhs_work_queue_head;
664 access->grp_rhs_queued = 1;
665 rhs_work_queue_head = access;
669 /* Add ACCESS to the work to queue for propagation of subaccesses from LHS to
670 RHS (which is actually a stack). */
672 static void
673 add_access_to_lhs_work_queue (struct access *access)
675 if (access->first_lhs_link && !access->grp_lhs_queued)
677 gcc_assert (!access->next_lhs_queued);
678 access->next_lhs_queued = lhs_work_queue_head;
679 access->grp_lhs_queued = 1;
680 lhs_work_queue_head = access;
684 /* Pop an access from the work queue for propagating from RHS to LHS, and
685 return it, assuming there is one. */
687 static struct access *
688 pop_access_from_rhs_work_queue (void)
690 struct access *access = rhs_work_queue_head;
692 rhs_work_queue_head = access->next_rhs_queued;
693 access->next_rhs_queued = NULL;
694 access->grp_rhs_queued = 0;
695 return access;
698 /* Pop an access from the work queue for propagating from LHS to RHS, and
699 return it, assuming there is one. */
701 static struct access *
702 pop_access_from_lhs_work_queue (void)
704 struct access *access = lhs_work_queue_head;
706 lhs_work_queue_head = access->next_lhs_queued;
707 access->next_lhs_queued = NULL;
708 access->grp_lhs_queued = 0;
709 return access;
712 /* Allocate necessary structures. */
714 static void
715 sra_initialize (void)
717 candidate_bitmap = BITMAP_ALLOC (NULL);
718 candidates = new hash_table<uid_decl_hasher>
719 (vec_safe_length (cfun->local_decls) / 2);
720 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
721 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
722 disqualified_constants = BITMAP_ALLOC (NULL);
723 passed_by_ref_in_call = BITMAP_ALLOC (NULL);
724 gcc_obstack_init (&name_obstack);
725 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
726 memset (&sra_stats, 0, sizeof (sra_stats));
729 /* Deallocate all general structures. */
731 static void
732 sra_deinitialize (void)
734 BITMAP_FREE (candidate_bitmap);
735 delete candidates;
736 candidates = NULL;
737 BITMAP_FREE (should_scalarize_away_bitmap);
738 BITMAP_FREE (cannot_scalarize_away_bitmap);
739 BITMAP_FREE (disqualified_constants);
740 BITMAP_FREE (passed_by_ref_in_call);
741 access_pool.release ();
742 assign_link_pool.release ();
743 obstack_free (&name_obstack, NULL);
745 delete base_access_vec;
748 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
750 static bool constant_decl_p (tree decl)
752 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
755 /* Remove DECL from candidates for SRA and write REASON to the dump file if
756 there is one. */
758 static void
759 disqualify_candidate (tree decl, const char *reason)
761 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
762 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
763 if (constant_decl_p (decl))
764 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
766 if (dump_file && (dump_flags & TDF_DETAILS))
768 fprintf (dump_file, "! Disqualifying ");
769 print_generic_expr (dump_file, decl);
770 fprintf (dump_file, " - %s\n", reason);
774 /* Return true iff the type contains a field or an element which does not allow
775 scalarization. Use VISITED_TYPES to avoid re-checking already checked
776 (sub-)types. */
778 static bool
779 type_internals_preclude_sra_p_1 (tree type, const char **msg,
780 hash_set<tree> *visited_types)
782 tree fld;
783 tree et;
785 if (visited_types->contains (type))
786 return false;
787 visited_types->add (type);
789 switch (TREE_CODE (type))
791 case RECORD_TYPE:
792 case UNION_TYPE:
793 case QUAL_UNION_TYPE:
794 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
795 if (TREE_CODE (fld) == FIELD_DECL)
797 if (TREE_CODE (fld) == FUNCTION_DECL)
798 continue;
799 tree ft = TREE_TYPE (fld);
801 if (TREE_THIS_VOLATILE (fld))
803 *msg = "volatile structure field";
804 return true;
806 if (!DECL_FIELD_OFFSET (fld))
808 *msg = "no structure field offset";
809 return true;
811 if (!DECL_SIZE (fld))
813 *msg = "zero structure field size";
814 return true;
816 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
818 *msg = "structure field offset not fixed";
819 return true;
821 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
823 *msg = "structure field size not fixed";
824 return true;
826 if (!tree_fits_shwi_p (bit_position (fld)))
828 *msg = "structure field size too big";
829 return true;
831 if (AGGREGATE_TYPE_P (ft)
832 && int_bit_position (fld) % BITS_PER_UNIT != 0)
834 *msg = "structure field is bit field";
835 return true;
838 if (AGGREGATE_TYPE_P (ft)
839 && type_internals_preclude_sra_p_1 (ft, msg, visited_types))
840 return true;
843 return false;
845 case ARRAY_TYPE:
846 et = TREE_TYPE (type);
848 if (TYPE_VOLATILE (et))
850 *msg = "element type is volatile";
851 return true;
854 if (AGGREGATE_TYPE_P (et)
855 && type_internals_preclude_sra_p_1 (et, msg, visited_types))
856 return true;
858 return false;
860 default:
861 return false;
865 /* Return true iff the type contains a field or an element which does not allow
866 scalarization. */
868 bool
869 type_internals_preclude_sra_p (tree type, const char **msg)
871 hash_set<tree> visited_types;
872 return type_internals_preclude_sra_p_1 (type, msg, &visited_types);
876 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
877 the three fields. Also add it to the vector of accesses corresponding to
878 the base. Finally, return the new access. */
880 static struct access *
881 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
883 struct access *access = access_pool.allocate ();
885 memset (access, 0, sizeof (struct access));
886 access->base = base;
887 access->offset = offset;
888 access->size = size;
890 base_access_vec->get_or_insert (base).safe_push (access);
892 return access;
895 static bool maybe_add_sra_candidate (tree);
897 /* Create and insert access for EXPR. Return created access, or NULL if it is
898 not possible. Also scan for uses of constant pool as we go along and add
899 to candidates. */
901 static struct access *
902 create_access (tree expr, gimple *stmt, bool write)
904 struct access *access;
905 poly_int64 poffset, psize, pmax_size;
906 tree base = expr;
907 bool reverse, unscalarizable_region = false;
909 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
910 &reverse);
912 /* For constant-pool entries, check we can substitute the constant value. */
913 if (constant_decl_p (base)
914 && !bitmap_bit_p (disqualified_constants, DECL_UID (base)))
916 if (expr != base
917 && !is_gimple_reg_type (TREE_TYPE (expr))
918 && dump_file && (dump_flags & TDF_DETAILS))
920 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
921 and elements of multidimensional arrays (which are
922 multi-element arrays in their own right). */
923 fprintf (dump_file, "Allowing non-reg-type load of part"
924 " of constant-pool entry: ");
925 print_generic_expr (dump_file, expr);
927 maybe_add_sra_candidate (base);
930 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
931 return NULL;
933 if (write && TREE_READONLY (base))
935 disqualify_candidate (base, "Encountered a store to a read-only decl.");
936 return NULL;
939 HOST_WIDE_INT offset, size, max_size;
940 if (!poffset.is_constant (&offset)
941 || !psize.is_constant (&size)
942 || !pmax_size.is_constant (&max_size))
944 disqualify_candidate (base, "Encountered a polynomial-sized access.");
945 return NULL;
948 if (size != max_size)
950 size = max_size;
951 unscalarizable_region = true;
953 if (size == 0)
954 return NULL;
955 if (offset < 0)
957 disqualify_candidate (base, "Encountered a negative offset access.");
958 return NULL;
960 if (size < 0)
962 disqualify_candidate (base, "Encountered an unconstrained access.");
963 return NULL;
965 if (offset + size > tree_to_shwi (DECL_SIZE (base)))
967 disqualify_candidate (base, "Encountered an access beyond the base.");
968 return NULL;
971 access = create_access_1 (base, offset, size);
972 access->expr = expr;
973 access->type = TREE_TYPE (expr);
974 access->write = write;
975 access->grp_unscalarizable_region = unscalarizable_region;
976 access->stmt = stmt;
977 access->reverse = reverse;
979 return access;
983 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
984 ARRAY_TYPE with fields that are either of gimple register types (excluding
985 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
986 we are considering a decl from constant pool. If it is false, char arrays
987 will be refused. */
989 static bool
990 scalarizable_type_p (tree type, bool const_decl)
992 if (is_gimple_reg_type (type))
993 return true;
994 if (type_contains_placeholder_p (type))
995 return false;
997 bool have_predecessor_field = false;
998 HOST_WIDE_INT prev_pos = 0;
1000 switch (TREE_CODE (type))
1002 case RECORD_TYPE:
1003 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1004 if (TREE_CODE (fld) == FIELD_DECL)
1006 tree ft = TREE_TYPE (fld);
1008 if (zerop (DECL_SIZE (fld)))
1009 continue;
1011 HOST_WIDE_INT pos = int_bit_position (fld);
1012 if (have_predecessor_field
1013 && pos <= prev_pos)
1014 return false;
1016 have_predecessor_field = true;
1017 prev_pos = pos;
1019 if (DECL_BIT_FIELD (fld))
1020 return false;
1022 if (!scalarizable_type_p (ft, const_decl))
1023 return false;
1026 return true;
1028 case ARRAY_TYPE:
1030 HOST_WIDE_INT min_elem_size;
1031 if (const_decl)
1032 min_elem_size = 0;
1033 else
1034 min_elem_size = BITS_PER_UNIT;
1036 if (TYPE_DOMAIN (type) == NULL_TREE
1037 || !tree_fits_shwi_p (TYPE_SIZE (type))
1038 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
1039 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
1040 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1041 return false;
1042 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1043 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1044 /* Zero-element array, should not prevent scalarization. */
1046 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1047 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1048 /* Variable-length array, do not allow scalarization. */
1049 return false;
1051 tree elem = TREE_TYPE (type);
1052 if (!scalarizable_type_p (elem, const_decl))
1053 return false;
1054 return true;
1056 default:
1057 return false;
1061 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1063 static inline bool
1064 contains_view_convert_expr_p (const_tree ref)
1066 while (handled_component_p (ref))
1068 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1069 return true;
1070 ref = TREE_OPERAND (ref, 0);
1073 return false;
1076 /* Return true if REF contains a VIEW_CONVERT_EXPR or a COMPONENT_REF with a
1077 bit-field field declaration. If TYPE_CHANGING_P is non-NULL, set the bool
1078 it points to will be set if REF contains any of the above or a MEM_REF
1079 expression that effectively performs type conversion. */
1081 static bool
1082 contains_vce_or_bfcref_p (const_tree ref, bool *type_changing_p = NULL)
1084 while (handled_component_p (ref))
1086 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
1087 || (TREE_CODE (ref) == COMPONENT_REF
1088 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
1090 if (type_changing_p)
1091 *type_changing_p = true;
1092 return true;
1094 ref = TREE_OPERAND (ref, 0);
1097 if (!type_changing_p
1098 || TREE_CODE (ref) != MEM_REF
1099 || TREE_CODE (TREE_OPERAND (ref, 0)) != ADDR_EXPR)
1100 return false;
1102 tree mem = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
1103 if (TYPE_MAIN_VARIANT (TREE_TYPE (ref))
1104 != TYPE_MAIN_VARIANT (TREE_TYPE (mem)))
1105 *type_changing_p = true;
1107 return false;
1110 /* Search the given tree for a declaration by skipping handled components and
1111 exclude it from the candidates. */
1113 static void
1114 disqualify_base_of_expr (tree t, const char *reason)
1116 t = get_base_address (t);
1117 if (t && DECL_P (t))
1118 disqualify_candidate (t, reason);
1121 /* Return true if the BIT_FIELD_REF read EXPR is handled by SRA. */
1123 static bool
1124 sra_handled_bf_read_p (tree expr)
1126 uint64_t size, offset;
1127 if (bit_field_size (expr).is_constant (&size)
1128 && bit_field_offset (expr).is_constant (&offset)
1129 && size % BITS_PER_UNIT == 0
1130 && offset % BITS_PER_UNIT == 0
1131 && pow2p_hwi (size))
1132 return true;
1133 return false;
1136 /* Scan expression EXPR and create access structures for all accesses to
1137 candidates for scalarization. Return the created access or NULL if none is
1138 created. */
1140 static struct access *
1141 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1143 /* We only allow ADDR_EXPRs in arguments of function calls and those must
1144 have been dealt with in build_access_from_call_arg. Any other address
1145 taking should have been caught by scan_visit_addr. */
1146 if (TREE_CODE (expr) == ADDR_EXPR)
1148 tree base = get_base_address (TREE_OPERAND (expr, 0));
1149 gcc_assert (!DECL_P (base)
1150 || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)));
1151 return NULL;
1154 struct access *ret = NULL;
1155 bool partial_ref;
1157 if ((TREE_CODE (expr) == BIT_FIELD_REF
1158 && (write || !sra_handled_bf_read_p (expr)))
1159 || TREE_CODE (expr) == IMAGPART_EXPR
1160 || TREE_CODE (expr) == REALPART_EXPR)
1162 expr = TREE_OPERAND (expr, 0);
1163 partial_ref = true;
1165 else
1166 partial_ref = false;
1168 if (storage_order_barrier_p (expr))
1170 disqualify_base_of_expr (expr, "storage order barrier.");
1171 return NULL;
1174 /* We need to dive through V_C_Es in order to get the size of its parameter
1175 and not the result type. Ada produces such statements. We are also
1176 capable of handling the topmost V_C_E but not any of those buried in other
1177 handled components. */
1178 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1179 expr = TREE_OPERAND (expr, 0);
1181 if (contains_view_convert_expr_p (expr))
1183 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1184 "component.");
1185 return NULL;
1187 if (TREE_THIS_VOLATILE (expr))
1189 disqualify_base_of_expr (expr, "part of a volatile reference.");
1190 return NULL;
1193 switch (TREE_CODE (expr))
1195 case MEM_REF:
1196 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR)
1197 return NULL;
1198 /* fall through */
1199 case VAR_DECL:
1200 case PARM_DECL:
1201 case RESULT_DECL:
1202 case COMPONENT_REF:
1203 case ARRAY_REF:
1204 case ARRAY_RANGE_REF:
1205 case BIT_FIELD_REF:
1206 ret = create_access (expr, stmt, write);
1207 break;
1209 default:
1210 break;
1213 if (write && partial_ref && ret)
1214 ret->grp_partial_lhs = 1;
1216 return ret;
1219 /* Scan expression EXPR and create access structures for all accesses to
1220 candidates for scalarization. Return true if any access has been inserted.
1221 STMT must be the statement from which the expression is taken, WRITE must be
1222 true if the expression is a store and false otherwise. */
1224 static bool
1225 build_access_from_expr (tree expr, gimple *stmt, bool write)
1227 struct access *access;
1229 access = build_access_from_expr_1 (expr, stmt, write);
1230 if (access)
1232 /* This means the aggregate is accesses as a whole in a way other than an
1233 assign statement and thus cannot be removed even if we had a scalar
1234 replacement for everything. */
1235 if (cannot_scalarize_away_bitmap)
1236 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1237 return true;
1239 return false;
1242 enum out_edge_check { SRA_OUTGOING_EDGES_UNCHECKED, SRA_OUTGOING_EDGES_OK,
1243 SRA_OUTGOING_EDGES_FAIL };
1245 /* Return true if STMT terminates BB and there is an abnormal edge going out of
1246 the BB and remember the decision in OE_CHECK. */
1248 static bool
1249 abnormal_edge_after_stmt_p (gimple *stmt, enum out_edge_check *oe_check)
1251 if (*oe_check == SRA_OUTGOING_EDGES_OK)
1252 return false;
1253 if (*oe_check == SRA_OUTGOING_EDGES_FAIL)
1254 return true;
1255 if (stmt_ends_bb_p (stmt))
1257 edge e;
1258 edge_iterator ei;
1259 FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs)
1260 if (e->flags & EDGE_ABNORMAL)
1262 *oe_check = SRA_OUTGOING_EDGES_FAIL;
1263 return true;
1266 *oe_check = SRA_OUTGOING_EDGES_OK;
1267 return false;
1270 /* Scan expression EXPR which is an argument of a call and create access
1271 structures for all accesses to candidates for scalarization. Return true
1272 if any access has been inserted. STMT must be the statement from which the
1273 expression is taken. CAN_BE_RETURNED must be true if call argument flags
1274 do not rule out that the argument is directly returned. OE_CHECK is used
1275 to remember result of a test for abnormal outgoing edges after this
1276 statement. */
1278 static bool
1279 build_access_from_call_arg (tree expr, gimple *stmt, bool can_be_returned,
1280 enum out_edge_check *oe_check)
1282 if (TREE_CODE (expr) == ADDR_EXPR)
1284 tree base = get_base_address (TREE_OPERAND (expr, 0));
1286 if (can_be_returned)
1288 disqualify_base_of_expr (base, "Address possibly returned, "
1289 "leading to an alis SRA may not know.");
1290 return false;
1292 if (abnormal_edge_after_stmt_p (stmt, oe_check))
1294 disqualify_base_of_expr (base, "May lead to need to add statements "
1295 "to abnormal edge.");
1296 return false;
1299 bool read = build_access_from_expr (base, stmt, false);
1300 bool write = build_access_from_expr (base, stmt, true);
1301 if (read || write)
1303 if (dump_file && (dump_flags & TDF_DETAILS))
1305 fprintf (dump_file, "Allowed ADDR_EXPR of ");
1306 print_generic_expr (dump_file, base);
1307 fprintf (dump_file, " because of ");
1308 print_gimple_stmt (dump_file, stmt, 0);
1309 fprintf (dump_file, "\n");
1311 bitmap_set_bit (passed_by_ref_in_call, DECL_UID (base));
1312 return true;
1314 else
1315 return false;
1318 return build_access_from_expr (expr, stmt, false);
1322 /* Return the single non-EH successor edge of BB or NULL if there is none or
1323 more than one. */
1325 static edge
1326 single_non_eh_succ (basic_block bb)
1328 edge e, res = NULL;
1329 edge_iterator ei;
1331 FOR_EACH_EDGE (e, ei, bb->succs)
1332 if (!(e->flags & EDGE_EH))
1334 if (res)
1335 return NULL;
1336 res = e;
1339 return res;
1342 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1343 there is no alternative spot where to put statements SRA might need to
1344 generate after it. The spot we are looking for is an edge leading to a
1345 single non-EH successor, if it exists and is indeed single. RHS may be
1346 NULL, in that case ignore it. */
1348 static bool
1349 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1351 if (stmt_ends_bb_p (stmt))
1353 if (single_non_eh_succ (gimple_bb (stmt)))
1354 return false;
1356 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1357 if (rhs)
1358 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1359 return true;
1361 return false;
1364 /* Return true if the nature of BASE is such that it contains data even if
1365 there is no write to it in the function. */
1367 static bool
1368 comes_initialized_p (tree base)
1370 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1373 /* Scan expressions occurring in STMT, create access structures for all accesses
1374 to candidates for scalarization and remove those candidates which occur in
1375 statements or expressions that prevent them from being split apart. Return
1376 true if any access has been inserted. */
1378 static bool
1379 build_accesses_from_assign (gimple *stmt)
1381 tree lhs, rhs;
1382 struct access *lacc, *racc;
1384 if (!gimple_assign_single_p (stmt)
1385 /* Scope clobbers don't influence scalarization. */
1386 || gimple_clobber_p (stmt))
1387 return false;
1389 lhs = gimple_assign_lhs (stmt);
1390 rhs = gimple_assign_rhs1 (stmt);
1392 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1393 return false;
1395 racc = build_access_from_expr_1 (rhs, stmt, false);
1396 lacc = build_access_from_expr_1 (lhs, stmt, true);
1398 if (lacc)
1400 lacc->grp_assignment_write = 1;
1401 if (storage_order_barrier_p (rhs))
1402 lacc->grp_unscalarizable_region = 1;
1404 if (should_scalarize_away_bitmap && !is_gimple_reg_type (lacc->type))
1406 bool type_changing_p = false;
1407 contains_vce_or_bfcref_p (lhs, &type_changing_p);
1408 if (type_changing_p)
1409 bitmap_set_bit (cannot_scalarize_away_bitmap,
1410 DECL_UID (lacc->base));
1414 if (racc)
1416 racc->grp_assignment_read = 1;
1417 if (should_scalarize_away_bitmap && !is_gimple_reg_type (racc->type))
1419 bool type_changing_p = false;
1420 contains_vce_or_bfcref_p (rhs, &type_changing_p);
1422 if (type_changing_p || gimple_has_volatile_ops (stmt))
1423 bitmap_set_bit (cannot_scalarize_away_bitmap,
1424 DECL_UID (racc->base));
1425 else
1426 bitmap_set_bit (should_scalarize_away_bitmap,
1427 DECL_UID (racc->base));
1429 if (storage_order_barrier_p (lhs))
1430 racc->grp_unscalarizable_region = 1;
1433 if (lacc && racc
1434 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1435 && !lacc->grp_unscalarizable_region
1436 && !racc->grp_unscalarizable_region
1437 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1438 && lacc->size == racc->size
1439 && useless_type_conversion_p (lacc->type, racc->type))
1441 struct assign_link *link;
1443 link = assign_link_pool.allocate ();
1444 memset (link, 0, sizeof (struct assign_link));
1446 link->lacc = lacc;
1447 link->racc = racc;
1448 add_link_to_rhs (racc, link);
1449 add_link_to_lhs (lacc, link);
1450 add_access_to_rhs_work_queue (racc);
1451 add_access_to_lhs_work_queue (lacc);
1453 /* Let's delay marking the areas as written until propagation of accesses
1454 across link, unless the nature of rhs tells us that its data comes
1455 from elsewhere. */
1456 if (!comes_initialized_p (racc->base))
1457 lacc->write = false;
1460 return lacc || racc;
1463 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to detect taking
1464 addresses of candidates a places which are not call arguments. Such
1465 candidates are disqalified from SRA. This also applies to GIMPLE_ASM
1466 operands with memory constrains which cannot be scalarized. */
1468 static bool
1469 scan_visit_addr (gimple *, tree op, tree, void *)
1471 op = get_base_address (op);
1472 if (op
1473 && DECL_P (op))
1474 disqualify_candidate (op, "Address taken in a non-call-argument context.");
1476 return false;
1479 /* Scan function and look for interesting expressions and create access
1480 structures for them. Return true iff any access is created. */
1482 static bool
1483 scan_function (void)
1485 basic_block bb;
1486 bool ret = false;
1488 FOR_EACH_BB_FN (bb, cfun)
1490 gimple_stmt_iterator gsi;
1491 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1492 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), NULL, NULL, NULL,
1493 scan_visit_addr);
1495 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1497 gimple *stmt = gsi_stmt (gsi);
1498 tree t;
1499 unsigned i;
1501 if (gimple_code (stmt) != GIMPLE_CALL)
1502 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1503 scan_visit_addr);
1505 switch (gimple_code (stmt))
1507 case GIMPLE_RETURN:
1508 t = gimple_return_retval (as_a <greturn *> (stmt));
1509 if (t != NULL_TREE)
1510 ret |= build_access_from_expr (t, stmt, false);
1511 break;
1513 case GIMPLE_ASSIGN:
1514 ret |= build_accesses_from_assign (stmt);
1515 break;
1517 case GIMPLE_CALL:
1519 enum out_edge_check oe_check = SRA_OUTGOING_EDGES_UNCHECKED;
1520 gcall *call = as_a <gcall *> (stmt);
1521 for (i = 0; i < gimple_call_num_args (call); i++)
1523 bool can_be_returned;
1524 if (gimple_call_lhs (call))
1526 int af = gimple_call_arg_flags (call, i);
1527 can_be_returned = !(af & EAF_NOT_RETURNED_DIRECTLY);
1529 else
1530 can_be_returned = false;
1531 ret |= build_access_from_call_arg (gimple_call_arg (call,
1533 stmt, can_be_returned,
1534 &oe_check);
1536 if (gimple_call_chain(stmt))
1537 ret |= build_access_from_call_arg (gimple_call_chain(call),
1538 stmt, false, &oe_check);
1541 t = gimple_call_lhs (stmt);
1542 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1544 /* If the STMT is a call to DEFERRED_INIT, avoid setting
1545 cannot_scalarize_away_bitmap. */
1546 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
1547 ret |= !!build_access_from_expr_1 (t, stmt, true);
1548 else
1549 ret |= build_access_from_expr (t, stmt, true);
1551 break;
1553 case GIMPLE_ASM:
1555 gasm *asm_stmt = as_a <gasm *> (stmt);
1556 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1558 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1559 ret |= build_access_from_expr (t, asm_stmt, false);
1561 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1563 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1564 ret |= build_access_from_expr (t, asm_stmt, true);
1567 break;
1569 default:
1570 break;
1575 return ret;
1578 /* Helper of QSORT function. There are pointers to accesses in the array. An
1579 access is considered smaller than another if it has smaller offset or if the
1580 offsets are the same but is size is bigger. */
1582 static int
1583 compare_access_positions (const void *a, const void *b)
1585 const access_p *fp1 = (const access_p *) a;
1586 const access_p *fp2 = (const access_p *) b;
1587 const access_p f1 = *fp1;
1588 const access_p f2 = *fp2;
1590 if (f1->offset != f2->offset)
1591 return f1->offset < f2->offset ? -1 : 1;
1593 if (f1->size == f2->size)
1595 if (f1->type == f2->type)
1596 return 0;
1597 /* Put any non-aggregate type before any aggregate type. */
1598 else if (!is_gimple_reg_type (f1->type)
1599 && is_gimple_reg_type (f2->type))
1600 return 1;
1601 else if (is_gimple_reg_type (f1->type)
1602 && !is_gimple_reg_type (f2->type))
1603 return -1;
1604 /* Put any complex or vector type before any other scalar type. */
1605 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1606 && TREE_CODE (f1->type) != VECTOR_TYPE
1607 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1608 || VECTOR_TYPE_P (f2->type)))
1609 return 1;
1610 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1611 || VECTOR_TYPE_P (f1->type))
1612 && TREE_CODE (f2->type) != COMPLEX_TYPE
1613 && TREE_CODE (f2->type) != VECTOR_TYPE)
1614 return -1;
1615 /* Put any integral type before any non-integral type. When splicing, we
1616 make sure that those with insufficient precision and occupying the
1617 same space are not scalarized. */
1618 else if (INTEGRAL_TYPE_P (f1->type)
1619 && !INTEGRAL_TYPE_P (f2->type))
1620 return -1;
1621 else if (!INTEGRAL_TYPE_P (f1->type)
1622 && INTEGRAL_TYPE_P (f2->type))
1623 return 1;
1624 /* Put the integral type with the bigger precision first. */
1625 else if (INTEGRAL_TYPE_P (f1->type)
1626 && INTEGRAL_TYPE_P (f2->type)
1627 && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
1628 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1629 /* Stabilize the sort. */
1630 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1633 /* We want the bigger accesses first, thus the opposite operator in the next
1634 line: */
1635 return f1->size > f2->size ? -1 : 1;
1639 /* Append a name of the declaration to the name obstack. A helper function for
1640 make_fancy_name. */
1642 static void
1643 make_fancy_decl_name (tree decl)
1645 char buffer[32];
1647 tree name = DECL_NAME (decl);
1648 if (name)
1649 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1650 IDENTIFIER_LENGTH (name));
1651 else
1653 sprintf (buffer, "D%u", DECL_UID (decl));
1654 obstack_grow (&name_obstack, buffer, strlen (buffer));
1658 /* Helper for make_fancy_name. */
1660 static void
1661 make_fancy_name_1 (tree expr)
1663 char buffer[32];
1664 tree index;
1666 if (DECL_P (expr))
1668 make_fancy_decl_name (expr);
1669 return;
1672 switch (TREE_CODE (expr))
1674 case COMPONENT_REF:
1675 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1676 obstack_1grow (&name_obstack, '$');
1677 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1678 break;
1680 case ARRAY_REF:
1681 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1682 obstack_1grow (&name_obstack, '$');
1683 /* Arrays with only one element may not have a constant as their
1684 index. */
1685 index = TREE_OPERAND (expr, 1);
1686 if (TREE_CODE (index) != INTEGER_CST)
1687 break;
1688 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1689 obstack_grow (&name_obstack, buffer, strlen (buffer));
1690 break;
1692 case BIT_FIELD_REF:
1693 case ADDR_EXPR:
1694 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1695 break;
1697 case MEM_REF:
1698 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1699 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1701 obstack_1grow (&name_obstack, '$');
1702 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1703 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1704 obstack_grow (&name_obstack, buffer, strlen (buffer));
1706 break;
1708 case REALPART_EXPR:
1709 case IMAGPART_EXPR:
1710 gcc_unreachable (); /* we treat these as scalars. */
1711 break;
1712 default:
1713 break;
1717 /* Create a human readable name for replacement variable of ACCESS. */
1719 static char *
1720 make_fancy_name (tree expr)
1722 make_fancy_name_1 (expr);
1723 obstack_1grow (&name_obstack, '\0');
1724 return XOBFINISH (&name_obstack, char *);
1727 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1728 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1729 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1730 be non-NULL and is used to insert new statements either before or below
1731 the current one as specified by INSERT_AFTER. This function is not capable
1732 of handling bitfields. */
1734 tree
1735 build_ref_for_offset (location_t loc, tree base, poly_int64 offset,
1736 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1737 bool insert_after)
1739 tree prev_base = base;
1740 tree off;
1741 tree mem_ref;
1742 poly_int64 base_offset;
1743 unsigned HOST_WIDE_INT misalign;
1744 unsigned int align;
1746 /* Preserve address-space information. */
1747 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1748 if (as != TYPE_ADDR_SPACE (exp_type))
1749 exp_type = build_qualified_type (exp_type,
1750 TYPE_QUALS (exp_type)
1751 | ENCODE_QUAL_ADDR_SPACE (as));
1753 poly_int64 byte_offset = exact_div (offset, BITS_PER_UNIT);
1754 get_object_alignment_1 (base, &align, &misalign);
1755 base = get_addr_base_and_unit_offset (base, &base_offset);
1757 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1758 offset such as array[var_index]. */
1759 if (!base)
1761 gassign *stmt;
1762 tree tmp, addr;
1764 gcc_checking_assert (gsi);
1765 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1766 addr = build_fold_addr_expr (unshare_expr (prev_base));
1767 STRIP_USELESS_TYPE_CONVERSION (addr);
1768 stmt = gimple_build_assign (tmp, addr);
1769 gimple_set_location (stmt, loc);
1770 if (insert_after)
1771 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1772 else
1773 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1775 off = build_int_cst (reference_alias_ptr_type (prev_base), byte_offset);
1776 base = tmp;
1778 else if (TREE_CODE (base) == MEM_REF)
1780 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1781 base_offset + byte_offset);
1782 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1783 base = unshare_expr (TREE_OPERAND (base, 0));
1785 else
1787 off = build_int_cst (reference_alias_ptr_type (prev_base),
1788 base_offset + byte_offset);
1789 base = build_fold_addr_expr (unshare_expr (base));
1792 unsigned int align_bound = known_alignment (misalign + offset);
1793 if (align_bound != 0)
1794 align = MIN (align, align_bound);
1795 if (align != TYPE_ALIGN (exp_type))
1796 exp_type = build_aligned_type (exp_type, align);
1798 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1799 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1800 if (TREE_THIS_VOLATILE (prev_base))
1801 TREE_THIS_VOLATILE (mem_ref) = 1;
1802 if (TREE_SIDE_EFFECTS (prev_base))
1803 TREE_SIDE_EFFECTS (mem_ref) = 1;
1804 return mem_ref;
1807 /* Construct and return a memory reference that is equal to a portion of
1808 MODEL->expr but is based on BASE. If this cannot be done, return NULL. */
1810 static tree
1811 build_reconstructed_reference (location_t, tree base, struct access *model)
1813 tree expr = model->expr;
1814 /* We have to make sure to start just below the outermost union. */
1815 tree start_expr = expr;
1816 while (handled_component_p (expr))
1818 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == UNION_TYPE)
1819 start_expr = expr;
1820 expr = TREE_OPERAND (expr, 0);
1823 expr = start_expr;
1824 tree prev_expr = NULL_TREE;
1825 while (!types_compatible_p (TREE_TYPE (expr), TREE_TYPE (base)))
1827 if (!handled_component_p (expr))
1828 return NULL_TREE;
1829 prev_expr = expr;
1830 expr = TREE_OPERAND (expr, 0);
1833 /* Guard against broken VIEW_CONVERT_EXPRs... */
1834 if (!prev_expr)
1835 return NULL_TREE;
1837 TREE_OPERAND (prev_expr, 0) = base;
1838 tree ref = unshare_expr (model->expr);
1839 TREE_OPERAND (prev_expr, 0) = expr;
1840 return ref;
1843 /* Construct a memory reference to a part of an aggregate BASE at the given
1844 OFFSET and of the same type as MODEL. In case this is a reference to a
1845 bit-field, the function will replicate the last component_ref of model's
1846 expr to access it. INSERT_AFTER and GSI have the same meaning as in
1847 build_ref_for_offset, furthermore, when GSI is NULL, the function expects
1848 that it re-builds the entire reference from a DECL to the final access and
1849 so will create a MEM_REF when OFFSET does not exactly match offset of
1850 MODEL. */
1852 static tree
1853 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1854 struct access *model, gimple_stmt_iterator *gsi,
1855 bool insert_after)
1857 gcc_assert (offset >= 0);
1858 if (TREE_CODE (model->expr) == COMPONENT_REF
1859 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1861 /* This access represents a bit-field. */
1862 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1864 offset -= int_bit_position (fld);
1865 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1866 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1867 gsi, insert_after);
1868 /* The flag will be set on the record type. */
1869 REF_REVERSE_STORAGE_ORDER (t) = 0;
1870 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1871 NULL_TREE);
1873 else
1875 tree res;
1876 if (model->grp_same_access_path
1877 && !TREE_THIS_VOLATILE (base)
1878 && (TYPE_ADDR_SPACE (TREE_TYPE (base))
1879 == TYPE_ADDR_SPACE (TREE_TYPE (model->expr)))
1880 && (offset == model->offset
1881 || (gsi && offset <= model->offset))
1882 /* build_reconstructed_reference can still fail if we have already
1883 massaged BASE because of another type incompatibility. */
1884 && (res = build_reconstructed_reference (loc, base, model)))
1885 return res;
1886 else
1887 return build_ref_for_offset (loc, base, offset, model->reverse,
1888 model->type, gsi, insert_after);
1892 /* Attempt to build a memory reference that we could but into a gimple
1893 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1894 create statements and return s NULL instead. This function also ignores
1895 alignment issues and so its results should never end up in non-debug
1896 statements. */
1898 static tree
1899 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1900 struct access *model)
1902 poly_int64 base_offset;
1903 tree off;
1905 if (TREE_CODE (model->expr) == COMPONENT_REF
1906 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1907 return NULL_TREE;
1909 base = get_addr_base_and_unit_offset (base, &base_offset);
1910 if (!base)
1911 return NULL_TREE;
1912 if (TREE_CODE (base) == MEM_REF)
1914 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1915 base_offset + offset / BITS_PER_UNIT);
1916 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1917 base = unshare_expr (TREE_OPERAND (base, 0));
1919 else
1921 off = build_int_cst (reference_alias_ptr_type (base),
1922 base_offset + offset / BITS_PER_UNIT);
1923 base = build_fold_addr_expr (unshare_expr (base));
1926 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1929 /* Construct a memory reference consisting of component_refs and array_refs to
1930 a part of an aggregate *RES (which is of type TYPE). The requested part
1931 should have type EXP_TYPE at be the given OFFSET. This function might not
1932 succeed, it returns true when it does and only then *RES points to something
1933 meaningful. This function should be used only to build expressions that we
1934 might need to present to user (e.g. in warnings). In all other situations,
1935 build_ref_for_model or build_ref_for_offset should be used instead. */
1937 static bool
1938 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1939 tree exp_type)
1941 while (1)
1943 tree fld;
1944 tree tr_size, index, minidx;
1945 HOST_WIDE_INT el_size;
1947 if (offset == 0 && exp_type
1948 && types_compatible_p (exp_type, type))
1949 return true;
1951 switch (TREE_CODE (type))
1953 case UNION_TYPE:
1954 case QUAL_UNION_TYPE:
1955 case RECORD_TYPE:
1956 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1958 HOST_WIDE_INT pos, size;
1959 tree tr_pos, expr, *expr_ptr;
1961 if (TREE_CODE (fld) != FIELD_DECL)
1962 continue;
1964 tr_pos = bit_position (fld);
1965 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1966 continue;
1967 pos = tree_to_uhwi (tr_pos);
1968 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1969 tr_size = DECL_SIZE (fld);
1970 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1971 continue;
1972 size = tree_to_uhwi (tr_size);
1973 if (size == 0)
1975 if (pos != offset)
1976 continue;
1978 else if (pos > offset || (pos + size) <= offset)
1979 continue;
1981 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1982 NULL_TREE);
1983 expr_ptr = &expr;
1984 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1985 offset - pos, exp_type))
1987 *res = expr;
1988 return true;
1991 return false;
1993 case ARRAY_TYPE:
1994 tr_size = TYPE_SIZE (TREE_TYPE (type));
1995 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1996 return false;
1997 el_size = tree_to_uhwi (tr_size);
1999 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2000 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
2001 return false;
2002 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
2003 if (!integer_zerop (minidx))
2004 index = int_const_binop (PLUS_EXPR, index, minidx);
2005 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
2006 NULL_TREE, NULL_TREE);
2007 offset = offset % el_size;
2008 type = TREE_TYPE (type);
2009 break;
2011 default:
2012 if (offset != 0)
2013 return false;
2015 if (exp_type)
2016 return false;
2017 else
2018 return true;
2023 /* Print message to dump file why a variable was rejected. */
2025 static void
2026 reject (tree var, const char *msg)
2028 if (dump_file && (dump_flags & TDF_DETAILS))
2030 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
2031 print_generic_expr (dump_file, var);
2032 fprintf (dump_file, "\n");
2036 /* Return true if VAR is a candidate for SRA. */
2038 static bool
2039 maybe_add_sra_candidate (tree var)
2041 tree type = TREE_TYPE (var);
2042 const char *msg;
2043 tree_node **slot;
2045 if (!AGGREGATE_TYPE_P (type))
2047 reject (var, "not aggregate");
2048 return false;
2051 if ((is_global_var (var)
2052 /* There are cases where non-addressable variables fail the
2053 pt_solutions_check test, e.g in gcc.dg/uninit-40.c. */
2054 || (TREE_ADDRESSABLE (var)
2055 && pt_solution_includes (&cfun->gimple_df->escaped_return, var))
2056 || (TREE_CODE (var) == RESULT_DECL
2057 && !DECL_BY_REFERENCE (var)
2058 && aggregate_value_p (var, current_function_decl)))
2059 /* Allow constant-pool entries that "need to live in memory". */
2060 && !constant_decl_p (var))
2062 reject (var, "needs to live in memory and escapes or global");
2063 return false;
2065 if (TREE_THIS_VOLATILE (var))
2067 reject (var, "is volatile");
2068 return false;
2070 if (!COMPLETE_TYPE_P (type))
2072 reject (var, "has incomplete type");
2073 return false;
2075 if (!tree_fits_shwi_p (TYPE_SIZE (type)))
2077 reject (var, "type size not fixed");
2078 return false;
2080 if (tree_to_shwi (TYPE_SIZE (type)) == 0)
2082 reject (var, "type size is zero");
2083 return false;
2085 if (type_internals_preclude_sra_p (type, &msg))
2087 reject (var, msg);
2088 return false;
2090 if (/* Fix for PR 41089. tree-stdarg.cc needs to have va_lists intact but
2091 we also want to schedule it rather late. Thus we ignore it in
2092 the early pass. */
2093 (sra_mode == SRA_MODE_EARLY_INTRA
2094 && is_va_list_type (type)))
2096 reject (var, "is va_list");
2097 return false;
2100 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
2101 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
2102 *slot = var;
2104 if (dump_file && (dump_flags & TDF_DETAILS))
2106 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
2107 print_generic_expr (dump_file, var);
2108 fprintf (dump_file, "\n");
2111 return true;
2114 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
2115 those with type which is suitable for scalarization. */
2117 static bool
2118 find_var_candidates (void)
2120 tree var, parm;
2121 unsigned int i;
2122 bool ret = false;
2124 for (parm = DECL_ARGUMENTS (current_function_decl);
2125 parm;
2126 parm = DECL_CHAIN (parm))
2127 ret |= maybe_add_sra_candidate (parm);
2129 FOR_EACH_LOCAL_DECL (cfun, i, var)
2131 if (!VAR_P (var))
2132 continue;
2134 ret |= maybe_add_sra_candidate (var);
2137 return ret;
2140 /* Return true if EXP is a reference chain of COMPONENT_REFs and AREAY_REFs
2141 ending either with a DECL or a MEM_REF with zero offset. */
2143 static bool
2144 path_comparable_for_same_access (tree expr)
2146 while (handled_component_p (expr))
2148 if (TREE_CODE (expr) == ARRAY_REF)
2150 /* SSA name indices can occur here too when the array is of sie one.
2151 But we cannot just re-use array_refs with SSA names elsewhere in
2152 the function, so disallow non-constant indices. TODO: Remove this
2153 limitation after teaching build_reconstructed_reference to replace
2154 the index with the index type lower bound. */
2155 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST)
2156 return false;
2158 expr = TREE_OPERAND (expr, 0);
2161 if (TREE_CODE (expr) == MEM_REF)
2163 if (!zerop (TREE_OPERAND (expr, 1)))
2164 return false;
2166 else
2167 gcc_assert (DECL_P (expr));
2169 return true;
2172 /* Assuming that EXP1 consists of only COMPONENT_REFs and ARRAY_REFs, return
2173 true if the chain of these handled components are exactly the same as EXP2
2174 and the expression under them is the same DECL or an equivalent MEM_REF.
2175 The reference picked by compare_access_positions must go to EXP1. */
2177 static bool
2178 same_access_path_p (tree exp1, tree exp2)
2180 if (TREE_CODE (exp1) != TREE_CODE (exp2))
2182 /* Special case single-field structures loaded sometimes as the field
2183 and sometimes as the structure. If the field is of a scalar type,
2184 compare_access_positions will put it into exp1.
2186 TODO: The gimple register type condition can be removed if teach
2187 compare_access_positions to put inner types first. */
2188 if (is_gimple_reg_type (TREE_TYPE (exp1))
2189 && TREE_CODE (exp1) == COMPONENT_REF
2190 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (exp1, 0)))
2191 == TYPE_MAIN_VARIANT (TREE_TYPE (exp2))))
2192 exp1 = TREE_OPERAND (exp1, 0);
2193 else
2194 return false;
2197 if (!operand_equal_p (exp1, exp2, OEP_ADDRESS_OF))
2198 return false;
2200 return true;
2203 /* Sort all accesses for the given variable, check for partial overlaps and
2204 return NULL if there are any. If there are none, pick a representative for
2205 each combination of offset and size and create a linked list out of them.
2206 Return the pointer to the first representative and make sure it is the first
2207 one in the vector of accesses. */
2209 static struct access *
2210 sort_and_splice_var_accesses (tree var)
2212 int i, j, access_count;
2213 struct access *res, **prev_acc_ptr = &res;
2214 vec<access_p> *access_vec;
2215 bool first = true;
2216 HOST_WIDE_INT low = -1, high = 0;
2218 access_vec = get_base_access_vector (var);
2219 if (!access_vec)
2220 return NULL;
2221 access_count = access_vec->length ();
2223 /* Sort by <OFFSET, SIZE>. */
2224 access_vec->qsort (compare_access_positions);
2226 i = 0;
2227 while (i < access_count)
2229 struct access *access = (*access_vec)[i];
2230 bool grp_write = access->write;
2231 bool grp_read = !access->write;
2232 bool grp_scalar_write = access->write
2233 && is_gimple_reg_type (access->type);
2234 bool grp_scalar_read = !access->write
2235 && is_gimple_reg_type (access->type);
2236 bool grp_assignment_read = access->grp_assignment_read;
2237 bool grp_assignment_write = access->grp_assignment_write;
2238 bool multiple_scalar_reads = false;
2239 bool grp_partial_lhs = access->grp_partial_lhs;
2240 bool first_scalar = is_gimple_reg_type (access->type);
2241 bool unscalarizable_region = access->grp_unscalarizable_region;
2242 bool grp_same_access_path = true;
2243 bool bf_non_full_precision
2244 = (INTEGRAL_TYPE_P (access->type)
2245 && TYPE_PRECISION (access->type) != access->size
2246 && TREE_CODE (access->expr) == COMPONENT_REF
2247 && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
2249 if (first || access->offset >= high)
2251 first = false;
2252 low = access->offset;
2253 high = access->offset + access->size;
2255 else if (access->offset > low && access->offset + access->size > high)
2256 return NULL;
2257 else
2258 gcc_assert (access->offset >= low
2259 && access->offset + access->size <= high);
2261 if (INTEGRAL_TYPE_P (access->type)
2262 && TYPE_PRECISION (access->type) != access->size
2263 && bitmap_bit_p (passed_by_ref_in_call, DECL_UID (access->base)))
2265 /* This can lead to performance regressions because we can generate
2266 excessive zero extensions. */
2267 if (dump_file && (dump_flags & TDF_DETAILS))
2269 fprintf (dump_file, "Won't scalarize ");
2270 print_generic_expr (dump_file, access->base);
2271 fprintf (dump_file, "(%d), it is passed by reference to a call "
2272 "and there are accesses with precision not covering "
2273 "their type size.", DECL_UID (access->base));
2275 return NULL;
2278 grp_same_access_path = path_comparable_for_same_access (access->expr);
2280 j = i + 1;
2281 while (j < access_count)
2283 struct access *ac2 = (*access_vec)[j];
2284 if (ac2->offset != access->offset || ac2->size != access->size)
2285 break;
2286 if (ac2->write)
2288 grp_write = true;
2289 grp_scalar_write = (grp_scalar_write
2290 || is_gimple_reg_type (ac2->type));
2292 else
2294 grp_read = true;
2295 if (is_gimple_reg_type (ac2->type))
2297 if (grp_scalar_read)
2298 multiple_scalar_reads = true;
2299 else
2300 grp_scalar_read = true;
2303 grp_assignment_read |= ac2->grp_assignment_read;
2304 grp_assignment_write |= ac2->grp_assignment_write;
2305 grp_partial_lhs |= ac2->grp_partial_lhs;
2306 unscalarizable_region |= ac2->grp_unscalarizable_region;
2307 relink_to_new_repr (access, ac2);
2309 /* If there are both aggregate-type and scalar-type accesses with
2310 this combination of size and offset, the comparison function
2311 should have put the scalars first. */
2312 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2313 /* It also prefers integral types to non-integral. However, when the
2314 precision of the selected type does not span the entire area and
2315 should also be used for a non-integer (i.e. float), we must not
2316 let that happen. Normally analyze_access_subtree expands the type
2317 to cover the entire area but for bit-fields it doesn't. */
2318 if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
2320 if (dump_file && (dump_flags & TDF_DETAILS))
2322 fprintf (dump_file, "Cannot scalarize the following access "
2323 "because insufficient precision integer type was "
2324 "selected.\n ");
2325 dump_access (dump_file, access, false);
2327 unscalarizable_region = true;
2330 if (grp_same_access_path
2331 && !same_access_path_p (access->expr, ac2->expr))
2332 grp_same_access_path = false;
2334 ac2->group_representative = access;
2335 j++;
2338 i = j;
2340 access->group_representative = access;
2341 access->grp_write = grp_write;
2342 access->grp_read = grp_read;
2343 access->grp_scalar_read = grp_scalar_read;
2344 access->grp_scalar_write = grp_scalar_write;
2345 access->grp_assignment_read = grp_assignment_read;
2346 access->grp_assignment_write = grp_assignment_write;
2347 access->grp_hint = multiple_scalar_reads && !constant_decl_p (var);
2348 access->grp_partial_lhs = grp_partial_lhs;
2349 access->grp_unscalarizable_region = unscalarizable_region;
2350 access->grp_same_access_path = grp_same_access_path;
2352 *prev_acc_ptr = access;
2353 prev_acc_ptr = &access->next_grp;
2356 gcc_assert (res == (*access_vec)[0]);
2357 return res;
2360 /* Create a variable for the given ACCESS which determines the type, name and a
2361 few other properties. Return the variable declaration and store it also to
2362 ACCESS->replacement. REG_TREE is used when creating a declaration to base a
2363 default-definition SSA name on in order to facilitate an uninitialized
2364 warning. It is used instead of the actual ACCESS type if that is not of a
2365 gimple register type. */
2367 static tree
2368 create_access_replacement (struct access *access, tree reg_type = NULL_TREE)
2370 tree repl;
2372 tree type = access->type;
2373 if (reg_type && !is_gimple_reg_type (type))
2374 type = reg_type;
2376 if (access->grp_to_be_debug_replaced)
2378 repl = create_tmp_var_raw (access->type);
2379 DECL_CONTEXT (repl) = current_function_decl;
2381 else
2382 /* Drop any special alignment on the type if it's not on the main
2383 variant. This avoids issues with weirdo ABIs like AAPCS. */
2384 repl = create_tmp_var (build_qualified_type (TYPE_MAIN_VARIANT (type),
2385 TYPE_QUALS (type)), "SR");
2386 if (access->grp_partial_lhs
2387 && is_gimple_reg_type (type))
2388 DECL_NOT_GIMPLE_REG_P (repl) = 1;
2390 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2391 DECL_ARTIFICIAL (repl) = 1;
2392 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2394 if (DECL_NAME (access->base)
2395 && !DECL_IGNORED_P (access->base)
2396 && !DECL_ARTIFICIAL (access->base))
2398 char *pretty_name = make_fancy_name (access->expr);
2399 tree debug_expr = unshare_expr_without_location (access->expr), d;
2400 bool fail = false;
2402 DECL_NAME (repl) = get_identifier (pretty_name);
2403 DECL_NAMELESS (repl) = 1;
2404 obstack_free (&name_obstack, pretty_name);
2406 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2407 as DECL_DEBUG_EXPR isn't considered when looking for still
2408 used SSA_NAMEs and thus they could be freed. All debug info
2409 generation cares is whether something is constant or variable
2410 and that get_ref_base_and_extent works properly on the
2411 expression. It cannot handle accesses at a non-constant offset
2412 though, so just give up in those cases. */
2413 for (d = debug_expr;
2414 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2415 d = TREE_OPERAND (d, 0))
2416 switch (TREE_CODE (d))
2418 case ARRAY_REF:
2419 case ARRAY_RANGE_REF:
2420 if (TREE_OPERAND (d, 1)
2421 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2422 fail = true;
2423 if (TREE_OPERAND (d, 3)
2424 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2425 fail = true;
2426 /* FALLTHRU */
2427 case COMPONENT_REF:
2428 if (TREE_OPERAND (d, 2)
2429 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2430 fail = true;
2431 break;
2432 case MEM_REF:
2433 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2434 fail = true;
2435 else
2436 d = TREE_OPERAND (d, 0);
2437 break;
2438 default:
2439 break;
2441 if (!fail)
2443 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2444 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2446 if (access->grp_no_warning)
2447 suppress_warning (repl /* Be more selective! */);
2448 else
2449 copy_warning (repl, access->base);
2451 else
2452 suppress_warning (repl /* Be more selective! */);
2454 if (dump_file)
2456 if (access->grp_to_be_debug_replaced)
2458 fprintf (dump_file, "Created a debug-only replacement for ");
2459 print_generic_expr (dump_file, access->base);
2460 fprintf (dump_file, " offset: %u, size: %u\n",
2461 (unsigned) access->offset, (unsigned) access->size);
2463 else
2465 fprintf (dump_file, "Created a replacement for ");
2466 print_generic_expr (dump_file, access->base);
2467 fprintf (dump_file, " offset: %u, size: %u: ",
2468 (unsigned) access->offset, (unsigned) access->size);
2469 print_generic_expr (dump_file, repl, TDF_UID);
2470 fprintf (dump_file, "\n");
2473 sra_stats.replacements++;
2475 return repl;
2478 /* Return ACCESS scalar replacement, which must exist. */
2480 static inline tree
2481 get_access_replacement (struct access *access)
2483 gcc_checking_assert (access->replacement_decl);
2484 return access->replacement_decl;
2488 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2489 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2490 to it is not "within" the root. Return false iff some accesses partially
2491 overlap. */
2493 static bool
2494 build_access_subtree (struct access **access)
2496 struct access *root = *access, *last_child = NULL;
2497 HOST_WIDE_INT limit = root->offset + root->size;
2499 *access = (*access)->next_grp;
2500 while (*access && (*access)->offset + (*access)->size <= limit)
2502 if (!last_child)
2503 root->first_child = *access;
2504 else
2505 last_child->next_sibling = *access;
2506 last_child = *access;
2507 (*access)->parent = root;
2508 (*access)->grp_write |= root->grp_write;
2510 if (!build_access_subtree (access))
2511 return false;
2514 if (*access && (*access)->offset < limit)
2515 return false;
2517 return true;
2520 /* Build a tree of access representatives, ACCESS is the pointer to the first
2521 one, others are linked in a list by the next_grp field. Return false iff
2522 some accesses partially overlap. */
2524 static bool
2525 build_access_trees (struct access *access)
2527 while (access)
2529 struct access *root = access;
2531 if (!build_access_subtree (&access))
2532 return false;
2533 root->next_grp = access;
2535 return true;
2538 /* Traverse the access forest where ROOT is the first root and verify that
2539 various important invariants hold true. */
2541 DEBUG_FUNCTION void
2542 verify_sra_access_forest (struct access *root)
2544 struct access *access = root;
2545 tree first_base = root->base;
2546 gcc_assert (DECL_P (first_base));
2549 gcc_assert (access->base == first_base);
2550 if (access->parent)
2551 gcc_assert (access->offset >= access->parent->offset
2552 && access->size <= access->parent->size);
2553 if (access->next_sibling)
2554 gcc_assert (access->next_sibling->offset
2555 >= access->offset + access->size);
2557 poly_int64 poffset, psize, pmax_size;
2558 bool reverse;
2559 tree base = get_ref_base_and_extent (access->expr, &poffset, &psize,
2560 &pmax_size, &reverse);
2561 HOST_WIDE_INT offset, size, max_size;
2562 if (!poffset.is_constant (&offset)
2563 || !psize.is_constant (&size)
2564 || !pmax_size.is_constant (&max_size))
2565 gcc_unreachable ();
2566 gcc_assert (base == first_base);
2567 gcc_assert (offset == access->offset);
2568 gcc_assert (access->grp_unscalarizable_region
2569 || access->grp_total_scalarization
2570 || size == max_size);
2571 gcc_assert (access->grp_unscalarizable_region
2572 || !is_gimple_reg_type (access->type)
2573 || size == access->size);
2574 gcc_assert (reverse == access->reverse);
2576 if (access->first_child)
2578 gcc_assert (access->first_child->parent == access);
2579 access = access->first_child;
2581 else if (access->next_sibling)
2583 gcc_assert (access->next_sibling->parent == access->parent);
2584 access = access->next_sibling;
2586 else
2588 while (access->parent && !access->next_sibling)
2589 access = access->parent;
2590 if (access->next_sibling)
2591 access = access->next_sibling;
2592 else
2594 gcc_assert (access == root);
2595 root = root->next_grp;
2596 access = root;
2600 while (access);
2603 /* Verify access forests of all candidates with accesses by calling
2604 verify_access_forest on each on them. */
2606 DEBUG_FUNCTION void
2607 verify_all_sra_access_forests (void)
2609 bitmap_iterator bi;
2610 unsigned i;
2611 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2613 tree var = candidate (i);
2614 struct access *access = get_first_repr_for_decl (var);
2615 if (access)
2617 gcc_assert (access->base == var);
2618 verify_sra_access_forest (access);
2623 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2624 array. */
2626 static bool
2627 expr_with_var_bounded_array_refs_p (tree expr)
2629 while (handled_component_p (expr))
2631 if (TREE_CODE (expr) == ARRAY_REF
2632 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2633 return true;
2634 expr = TREE_OPERAND (expr, 0);
2636 return false;
2639 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2640 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. If TOTALLY
2641 is set, we are totally scalarizing the aggregate. Also set all sorts of
2642 access flags appropriately along the way, notably always set grp_read and
2643 grp_assign_read according to MARK_READ and grp_write when MARK_WRITE is
2644 true.
2646 Creating a replacement for a scalar access is considered beneficial if its
2647 grp_hint ot TOTALLY is set (this means either that there is more than one
2648 direct read access or that we are attempting total scalarization) or
2649 according to the following table:
2651 Access written to through a scalar type (once or more times)
2653 | Written to in an assignment statement
2655 | | Access read as scalar _once_
2656 | | |
2657 | | | Read in an assignment statement
2658 | | | |
2659 | | | | Scalarize Comment
2660 -----------------------------------------------------------------------------
2661 0 0 0 0 No access for the scalar
2662 0 0 0 1 No access for the scalar
2663 0 0 1 0 No Single read - won't help
2664 0 0 1 1 No The same case
2665 0 1 0 0 No access for the scalar
2666 0 1 0 1 No access for the scalar
2667 0 1 1 0 Yes s = *g; return s.i;
2668 0 1 1 1 Yes The same case as above
2669 1 0 0 0 No Won't help
2670 1 0 0 1 Yes s.i = 1; *g = s;
2671 1 0 1 0 Yes s.i = 5; g = s.i;
2672 1 0 1 1 Yes The same case as above
2673 1 1 0 0 No Won't help.
2674 1 1 0 1 Yes s.i = 1; *g = s;
2675 1 1 1 0 Yes s = *g; return s.i;
2676 1 1 1 1 Yes Any of the above yeses */
2678 static bool
2679 analyze_access_subtree (struct access *root, struct access *parent,
2680 bool allow_replacements, bool totally)
2682 struct access *child;
2683 HOST_WIDE_INT limit = root->offset + root->size;
2684 HOST_WIDE_INT covered_to = root->offset;
2685 bool scalar = is_gimple_reg_type (root->type);
2686 bool hole = false, sth_created = false;
2688 if (parent)
2690 if (parent->grp_read)
2691 root->grp_read = 1;
2692 if (parent->grp_assignment_read)
2693 root->grp_assignment_read = 1;
2694 if (parent->grp_write)
2695 root->grp_write = 1;
2696 if (parent->grp_assignment_write)
2697 root->grp_assignment_write = 1;
2698 if (!parent->grp_same_access_path)
2699 root->grp_same_access_path = 0;
2702 if (root->grp_unscalarizable_region)
2703 allow_replacements = false;
2705 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2706 allow_replacements = false;
2708 if (!totally && root->grp_result_of_prop_from_lhs)
2709 allow_replacements = false;
2711 for (child = root->first_child; child; child = child->next_sibling)
2713 hole |= covered_to < child->offset;
2714 sth_created |= analyze_access_subtree (child, root,
2715 allow_replacements && !scalar,
2716 totally);
2718 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2719 if (child->grp_covered)
2720 covered_to += child->size;
2721 else
2722 hole = true;
2725 if (allow_replacements && scalar && !root->first_child
2726 && (totally || !root->grp_total_scalarization)
2727 && (totally
2728 || root->grp_hint
2729 || ((root->grp_scalar_read || root->grp_assignment_read)
2730 && (root->grp_scalar_write || root->grp_assignment_write))))
2732 /* Always create access replacements that cover the whole access.
2733 For integral types this means the precision has to match.
2734 Avoid assumptions based on the integral type kind, too. */
2735 if (INTEGRAL_TYPE_P (root->type)
2736 && ((TREE_CODE (root->type) != INTEGER_TYPE
2737 && TREE_CODE (root->type) != BITINT_TYPE)
2738 || TYPE_PRECISION (root->type) != root->size)
2739 /* But leave bitfield accesses alone. */
2740 && (TREE_CODE (root->expr) != COMPONENT_REF
2741 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2743 tree rt = root->type;
2744 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2745 && (root->size % BITS_PER_UNIT) == 0);
2746 if (TREE_CODE (root->type) == BITINT_TYPE)
2747 root->type = build_bitint_type (root->size, TYPE_UNSIGNED (rt));
2748 else
2749 root->type = build_nonstandard_integer_type (root->size,
2750 TYPE_UNSIGNED (rt));
2751 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2752 root->offset, root->reverse,
2753 root->type, NULL, false);
2755 if (dump_file && (dump_flags & TDF_DETAILS))
2757 fprintf (dump_file, "Changing the type of a replacement for ");
2758 print_generic_expr (dump_file, root->base);
2759 fprintf (dump_file, " offset: %u, size: %u ",
2760 (unsigned) root->offset, (unsigned) root->size);
2761 fprintf (dump_file, " to an integer.\n");
2765 root->grp_to_be_replaced = 1;
2766 root->replacement_decl = create_access_replacement (root);
2767 sth_created = true;
2768 hole = false;
2770 else
2772 if (allow_replacements
2773 && scalar && !root->first_child
2774 && !root->grp_total_scalarization
2775 && (root->grp_scalar_write || root->grp_assignment_write)
2776 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2777 DECL_UID (root->base)))
2779 gcc_checking_assert (!root->grp_scalar_read
2780 && !root->grp_assignment_read);
2781 sth_created = true;
2782 if (MAY_HAVE_DEBUG_BIND_STMTS)
2784 root->grp_to_be_debug_replaced = 1;
2785 root->replacement_decl = create_access_replacement (root);
2789 if (covered_to < limit)
2790 hole = true;
2791 if (scalar || !allow_replacements)
2792 root->grp_total_scalarization = 0;
2795 if (!hole || totally)
2796 root->grp_covered = 1;
2797 else if (root->grp_write || comes_initialized_p (root->base))
2798 root->grp_unscalarized_data = 1; /* not covered and written to */
2799 return sth_created;
2802 /* Analyze all access trees linked by next_grp by the means of
2803 analyze_access_subtree. */
2804 static bool
2805 analyze_access_trees (struct access *access)
2807 bool ret = false;
2809 while (access)
2811 if (analyze_access_subtree (access, NULL, true,
2812 access->grp_total_scalarization))
2813 ret = true;
2814 access = access->next_grp;
2817 return ret;
2820 /* Return true iff a potential new child of ACC at offset OFFSET and with size
2821 SIZE would conflict with an already existing one. If exactly such a child
2822 already exists in ACC, store a pointer to it in EXACT_MATCH. */
2824 static bool
2825 child_would_conflict_in_acc (struct access *acc, HOST_WIDE_INT norm_offset,
2826 HOST_WIDE_INT size, struct access **exact_match)
2828 struct access *child;
2830 for (child = acc->first_child; child; child = child->next_sibling)
2832 if (child->offset == norm_offset && child->size == size)
2834 *exact_match = child;
2835 return true;
2838 if (child->offset < norm_offset + size
2839 && child->offset + child->size > norm_offset)
2840 return true;
2843 return false;
2846 /* Create a new child access of PARENT, with all properties just like MODEL
2847 except for its offset and with its grp_write false and grp_read true.
2848 Return the new access or NULL if it cannot be created. Note that this
2849 access is created long after all splicing and sorting, it's not located in
2850 any access vector and is automatically a representative of its group. Set
2851 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2853 static struct access *
2854 create_artificial_child_access (struct access *parent, struct access *model,
2855 HOST_WIDE_INT new_offset,
2856 bool set_grp_read, bool set_grp_write)
2858 struct access **child;
2859 tree expr = parent->base;
2861 gcc_assert (!model->grp_unscalarizable_region);
2863 struct access *access = access_pool.allocate ();
2864 memset (access, 0, sizeof (struct access));
2865 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2866 model->type))
2868 access->grp_no_warning = true;
2869 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2870 new_offset, model, NULL, false);
2873 access->base = parent->base;
2874 access->expr = expr;
2875 access->offset = new_offset;
2876 access->size = model->size;
2877 access->type = model->type;
2878 access->parent = parent;
2879 access->grp_read = set_grp_read;
2880 access->grp_write = set_grp_write;
2881 access->reverse = model->reverse;
2883 child = &parent->first_child;
2884 while (*child && (*child)->offset < new_offset)
2885 child = &(*child)->next_sibling;
2887 access->next_sibling = *child;
2888 *child = access;
2890 return access;
2894 /* Beginning with ACCESS, traverse its whole access subtree and mark all
2895 sub-trees as written to. If any of them has not been marked so previously
2896 and has assignment links leading from it, re-enqueue it. */
2898 static void
2899 subtree_mark_written_and_rhs_enqueue (struct access *access)
2901 if (access->grp_write)
2902 return;
2903 access->grp_write = true;
2904 add_access_to_rhs_work_queue (access);
2906 struct access *child;
2907 for (child = access->first_child; child; child = child->next_sibling)
2908 subtree_mark_written_and_rhs_enqueue (child);
2911 /* If there is still budget to create a propagation access for DECL, return
2912 true and decrement the budget. Otherwise return false. */
2914 static bool
2915 budget_for_propagation_access (tree decl)
2917 unsigned b, *p = propagation_budget->get (decl);
2918 if (p)
2919 b = *p;
2920 else
2921 b = param_sra_max_propagations;
2923 if (b == 0)
2924 return false;
2925 b--;
2927 if (b == 0 && dump_file && (dump_flags & TDF_DETAILS))
2929 fprintf (dump_file, "The propagation budget of ");
2930 print_generic_expr (dump_file, decl);
2931 fprintf (dump_file, " (UID: %u) has been exhausted.\n", DECL_UID (decl));
2933 propagation_budget->put (decl, b);
2934 return true;
2937 /* Return true if ACC or any of its subaccesses has grp_child set. */
2939 static bool
2940 access_or_its_child_written (struct access *acc)
2942 if (acc->grp_write)
2943 return true;
2944 for (struct access *sub = acc->first_child; sub; sub = sub->next_sibling)
2945 if (access_or_its_child_written (sub))
2946 return true;
2947 return false;
2950 /* Propagate subaccesses and grp_write flags of RACC across an assignment link
2951 to LACC. Enqueue sub-accesses as necessary so that the write flag is
2952 propagated transitively. Return true if anything changed. Additionally, if
2953 RACC is a scalar access but LACC is not, change the type of the latter, if
2954 possible. */
2956 static bool
2957 propagate_subaccesses_from_rhs (struct access *lacc, struct access *racc)
2959 struct access *rchild;
2960 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2961 bool ret = false;
2963 /* IF the LHS is still not marked as being written to, we only need to do so
2964 if the RHS at this level actually was. */
2965 if (!lacc->grp_write)
2967 gcc_checking_assert (!comes_initialized_p (racc->base));
2968 if (racc->grp_write)
2970 subtree_mark_written_and_rhs_enqueue (lacc);
2971 ret = true;
2975 if (is_gimple_reg_type (lacc->type)
2976 || lacc->grp_unscalarizable_region
2977 || racc->grp_unscalarizable_region)
2979 if (!lacc->grp_write)
2981 ret = true;
2982 subtree_mark_written_and_rhs_enqueue (lacc);
2984 return ret;
2987 if (is_gimple_reg_type (racc->type))
2989 if (!lacc->grp_write)
2991 ret = true;
2992 subtree_mark_written_and_rhs_enqueue (lacc);
2994 if (!lacc->first_child && !racc->first_child)
2996 /* We are about to change the access type from aggregate to scalar,
2997 so we need to put the reverse flag onto the access, if any. */
2998 const bool reverse
2999 = TYPE_REVERSE_STORAGE_ORDER (lacc->type)
3000 && !POINTER_TYPE_P (racc->type)
3001 && !VECTOR_TYPE_P (racc->type);
3002 tree t = lacc->base;
3004 lacc->type = racc->type;
3005 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
3006 lacc->offset, racc->type))
3008 lacc->expr = t;
3009 lacc->grp_same_access_path = true;
3011 else
3013 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
3014 lacc->base, lacc->offset,
3015 racc, NULL, false);
3016 if (TREE_CODE (lacc->expr) == MEM_REF)
3017 REF_REVERSE_STORAGE_ORDER (lacc->expr) = reverse;
3018 lacc->grp_no_warning = true;
3019 lacc->grp_same_access_path = false;
3021 lacc->reverse = reverse;
3023 return ret;
3026 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
3028 struct access *new_acc = NULL;
3029 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
3031 if (child_would_conflict_in_acc (lacc, norm_offset, rchild->size,
3032 &new_acc))
3034 if (new_acc)
3036 if (!new_acc->grp_write && rchild->grp_write)
3038 gcc_assert (!lacc->grp_write);
3039 subtree_mark_written_and_rhs_enqueue (new_acc);
3040 ret = true;
3043 rchild->grp_hint = 1;
3044 new_acc->grp_hint |= new_acc->grp_read;
3045 if (rchild->first_child
3046 && propagate_subaccesses_from_rhs (new_acc, rchild))
3048 ret = 1;
3049 add_access_to_rhs_work_queue (new_acc);
3052 else
3054 if (!lacc->grp_write)
3056 ret = true;
3057 subtree_mark_written_and_rhs_enqueue (lacc);
3060 continue;
3063 if (rchild->grp_unscalarizable_region
3064 || !budget_for_propagation_access (lacc->base))
3066 if (!lacc->grp_write && access_or_its_child_written (rchild))
3068 ret = true;
3069 subtree_mark_written_and_rhs_enqueue (lacc);
3071 continue;
3074 rchild->grp_hint = 1;
3075 /* Because get_ref_base_and_extent always includes padding in size for
3076 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
3077 type, we might be actually attempting to here to create a child of the
3078 same type as the parent. */
3079 if (!types_compatible_p (lacc->type, rchild->type))
3080 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
3081 false,
3082 (lacc->grp_write
3083 || rchild->grp_write));
3084 else
3085 new_acc = lacc;
3086 gcc_checking_assert (new_acc);
3087 if (racc->first_child)
3088 propagate_subaccesses_from_rhs (new_acc, rchild);
3090 add_access_to_rhs_work_queue (lacc);
3091 ret = true;
3094 return ret;
3097 /* Propagate subaccesses of LACC across an assignment link to RACC if they
3098 should inhibit total scalarization of the corresponding area. No flags are
3099 being propagated in the process. Return true if anything changed. */
3101 static bool
3102 propagate_subaccesses_from_lhs (struct access *lacc, struct access *racc)
3104 if (is_gimple_reg_type (racc->type)
3105 || lacc->grp_unscalarizable_region
3106 || racc->grp_unscalarizable_region)
3107 return false;
3109 /* TODO: Do we want set some new racc flag to stop potential total
3110 scalarization if lacc is a scalar access (and none fo the two have
3111 children)? */
3113 bool ret = false;
3114 HOST_WIDE_INT norm_delta = racc->offset - lacc->offset;
3115 for (struct access *lchild = lacc->first_child;
3116 lchild;
3117 lchild = lchild->next_sibling)
3119 struct access *matching_acc = NULL;
3120 HOST_WIDE_INT norm_offset = lchild->offset + norm_delta;
3122 if (lchild->grp_unscalarizable_region
3123 || child_would_conflict_in_acc (racc, norm_offset, lchild->size,
3124 &matching_acc)
3125 || !budget_for_propagation_access (racc->base))
3127 if (matching_acc
3128 && propagate_subaccesses_from_lhs (lchild, matching_acc))
3129 add_access_to_lhs_work_queue (matching_acc);
3130 continue;
3133 /* Because get_ref_base_and_extent always includes padding in size for
3134 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
3135 type, we might be actually attempting to here to create a child of the
3136 same type as the parent. */
3137 if (!types_compatible_p (racc->type, lchild->type))
3139 struct access *new_acc
3140 = create_artificial_child_access (racc, lchild, norm_offset,
3141 true, false);
3142 new_acc->grp_result_of_prop_from_lhs = 1;
3143 propagate_subaccesses_from_lhs (lchild, new_acc);
3145 else
3146 propagate_subaccesses_from_lhs (lchild, racc);
3147 ret = true;
3149 return ret;
3152 /* Propagate all subaccesses across assignment links. */
3154 static void
3155 propagate_all_subaccesses (void)
3157 propagation_budget = new hash_map<tree, unsigned>;
3158 while (rhs_work_queue_head)
3160 struct access *racc = pop_access_from_rhs_work_queue ();
3161 struct assign_link *link;
3163 if (racc->group_representative)
3164 racc= racc->group_representative;
3165 gcc_assert (racc->first_rhs_link);
3167 for (link = racc->first_rhs_link; link; link = link->next_rhs)
3169 struct access *lacc = link->lacc;
3171 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
3172 continue;
3173 lacc = lacc->group_representative;
3175 bool reque_parents = false;
3176 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
3178 if (!lacc->grp_write)
3180 subtree_mark_written_and_rhs_enqueue (lacc);
3181 reque_parents = true;
3184 else if (propagate_subaccesses_from_rhs (lacc, racc))
3185 reque_parents = true;
3187 if (reque_parents)
3190 add_access_to_rhs_work_queue (lacc);
3191 lacc = lacc->parent;
3193 while (lacc);
3197 while (lhs_work_queue_head)
3199 struct access *lacc = pop_access_from_lhs_work_queue ();
3200 struct assign_link *link;
3202 if (lacc->group_representative)
3203 lacc = lacc->group_representative;
3204 gcc_assert (lacc->first_lhs_link);
3206 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
3207 continue;
3209 for (link = lacc->first_lhs_link; link; link = link->next_lhs)
3211 struct access *racc = link->racc;
3213 if (racc->group_representative)
3214 racc = racc->group_representative;
3215 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
3216 continue;
3217 if (propagate_subaccesses_from_lhs (lacc, racc))
3218 add_access_to_lhs_work_queue (racc);
3221 delete propagation_budget;
3224 /* Return true if the forest beginning with ROOT does not contain
3225 unscalarizable regions or non-byte aligned accesses. */
3227 static bool
3228 can_totally_scalarize_forest_p (struct access *root)
3230 struct access *access = root;
3233 if (access->grp_unscalarizable_region
3234 || (access->offset % BITS_PER_UNIT) != 0
3235 || (access->size % BITS_PER_UNIT) != 0
3236 || (is_gimple_reg_type (access->type)
3237 && access->first_child))
3238 return false;
3240 if (access->first_child)
3241 access = access->first_child;
3242 else if (access->next_sibling)
3243 access = access->next_sibling;
3244 else
3246 while (access->parent && !access->next_sibling)
3247 access = access->parent;
3248 if (access->next_sibling)
3249 access = access->next_sibling;
3250 else
3252 gcc_assert (access == root);
3253 root = root->next_grp;
3254 access = root;
3258 while (access);
3259 return true;
3262 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3263 reference EXPR for total scalarization purposes and mark it as such. Within
3264 the children of PARENT, link it in between PTR and NEXT_SIBLING. */
3266 static struct access *
3267 create_total_scalarization_access (struct access *parent, HOST_WIDE_INT pos,
3268 HOST_WIDE_INT size, tree type, tree expr,
3269 struct access **ptr,
3270 struct access *next_sibling)
3272 struct access *access = access_pool.allocate ();
3273 memset (access, 0, sizeof (struct access));
3274 access->base = parent->base;
3275 access->offset = pos;
3276 access->size = size;
3277 access->expr = expr;
3278 access->type = type;
3279 access->parent = parent;
3280 access->grp_write = parent->grp_write;
3281 access->grp_total_scalarization = 1;
3282 access->grp_hint = 1;
3283 access->grp_same_access_path = path_comparable_for_same_access (expr);
3284 access->reverse = reverse_storage_order_for_component_p (expr);
3286 access->next_sibling = next_sibling;
3287 *ptr = access;
3288 return access;
3291 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3292 reference EXPR for total scalarization purposes and mark it as such, link it
3293 at *PTR and reshape the tree so that those elements at *PTR and their
3294 siblings which fall within the part described by POS and SIZE are moved to
3295 be children of the new access. If a partial overlap is detected, return
3296 NULL. */
3298 static struct access *
3299 create_total_access_and_reshape (struct access *parent, HOST_WIDE_INT pos,
3300 HOST_WIDE_INT size, tree type, tree expr,
3301 struct access **ptr)
3303 struct access **p = ptr;
3305 while (*p && (*p)->offset < pos + size)
3307 if ((*p)->offset + (*p)->size > pos + size)
3308 return NULL;
3309 p = &(*p)->next_sibling;
3312 struct access *next_child = *ptr;
3313 struct access *new_acc
3314 = create_total_scalarization_access (parent, pos, size, type, expr,
3315 ptr, *p);
3316 if (p != ptr)
3318 new_acc->first_child = next_child;
3319 *p = NULL;
3320 for (struct access *a = next_child; a; a = a->next_sibling)
3321 a->parent = new_acc;
3323 return new_acc;
3326 static bool totally_scalarize_subtree (struct access *root);
3328 /* Return true if INNER is either the same type as OUTER or if it is the type
3329 of a record field in OUTER at offset zero, possibly in nested
3330 sub-records. */
3332 static bool
3333 access_and_field_type_match_p (tree outer, tree inner)
3335 if (TYPE_MAIN_VARIANT (outer) == TYPE_MAIN_VARIANT (inner))
3336 return true;
3337 if (TREE_CODE (outer) != RECORD_TYPE)
3338 return false;
3339 tree fld = TYPE_FIELDS (outer);
3340 while (fld)
3342 if (TREE_CODE (fld) == FIELD_DECL)
3344 if (!zerop (DECL_FIELD_OFFSET (fld)))
3345 return false;
3346 if (TYPE_MAIN_VARIANT (TREE_TYPE (fld)) == inner)
3347 return true;
3348 if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE)
3349 fld = TYPE_FIELDS (TREE_TYPE (fld));
3350 else
3351 return false;
3353 else
3354 fld = DECL_CHAIN (fld);
3356 return false;
3359 /* Return type of total_should_skip_creating_access indicating whether a total
3360 scalarization access for a field/element should be created, whether it
3361 already exists or whether the entire total scalarization has to fail. */
3363 enum total_sra_field_state {TOTAL_FLD_CREATE, TOTAL_FLD_DONE, TOTAL_FLD_FAILED};
3365 /* Do all the necessary steps in total scalarization when the given aggregate
3366 type has a TYPE at POS with the given SIZE should be put into PARENT and
3367 when we have processed all its siblings with smaller offsets up until and
3368 including LAST_SEEN_SIBLING (which can be NULL).
3370 If some further siblings are to be skipped, set *LAST_SEEN_SIBLING as
3371 appropriate. Return TOTAL_FLD_CREATE id the caller should carry on with
3372 creating a new access, TOTAL_FLD_DONE if access or accesses capable of
3373 representing the described part of the aggregate for the purposes of total
3374 scalarization already exist or TOTAL_FLD_FAILED if there is a problem which
3375 prevents total scalarization from happening at all. */
3377 static enum total_sra_field_state
3378 total_should_skip_creating_access (struct access *parent,
3379 struct access **last_seen_sibling,
3380 tree type, HOST_WIDE_INT pos,
3381 HOST_WIDE_INT size)
3383 struct access *next_child;
3384 if (!*last_seen_sibling)
3385 next_child = parent->first_child;
3386 else
3387 next_child = (*last_seen_sibling)->next_sibling;
3389 /* First, traverse the chain of siblings until it points to an access with
3390 offset at least equal to POS. Check all skipped accesses whether they
3391 span the POS boundary and if so, return with a failure. */
3392 while (next_child && next_child->offset < pos)
3394 if (next_child->offset + next_child->size > pos)
3395 return TOTAL_FLD_FAILED;
3396 *last_seen_sibling = next_child;
3397 next_child = next_child->next_sibling;
3400 /* Now check whether next_child has exactly the right POS and SIZE and if so,
3401 whether it can represent what we need and can be totally scalarized
3402 itself. */
3403 if (next_child && next_child->offset == pos
3404 && next_child->size == size)
3406 if (!is_gimple_reg_type (next_child->type)
3407 && (!access_and_field_type_match_p (type, next_child->type)
3408 || !totally_scalarize_subtree (next_child)))
3409 return TOTAL_FLD_FAILED;
3411 *last_seen_sibling = next_child;
3412 return TOTAL_FLD_DONE;
3415 /* If the child we're looking at would partially overlap, we just cannot
3416 totally scalarize. */
3417 if (next_child
3418 && next_child->offset < pos + size
3419 && next_child->offset + next_child->size > pos + size)
3420 return TOTAL_FLD_FAILED;
3422 if (is_gimple_reg_type (type))
3424 /* We don't scalarize accesses that are children of other scalar type
3425 accesses, so if we go on and create an access for a register type,
3426 there should not be any pre-existing children. There are rare cases
3427 where the requested type is a vector but we already have register
3428 accesses for all its elements which is equally good. Detect that
3429 situation or whether we need to bail out. */
3431 HOST_WIDE_INT covered = pos;
3432 bool skipping = false;
3433 while (next_child
3434 && next_child->offset + next_child->size <= pos + size)
3436 if (next_child->offset != covered
3437 || !is_gimple_reg_type (next_child->type))
3438 return TOTAL_FLD_FAILED;
3440 covered += next_child->size;
3441 *last_seen_sibling = next_child;
3442 next_child = next_child->next_sibling;
3443 skipping = true;
3446 if (skipping)
3448 if (covered != pos + size)
3449 return TOTAL_FLD_FAILED;
3450 else
3451 return TOTAL_FLD_DONE;
3455 return TOTAL_FLD_CREATE;
3458 /* Go over sub-tree rooted in ROOT and attempt to create scalar accesses
3459 spanning all uncovered areas covered by ROOT, return false if the attempt
3460 failed. All created accesses will have grp_unscalarizable_region set (and
3461 should be ignored if the function returns false). */
3463 static bool
3464 totally_scalarize_subtree (struct access *root)
3466 gcc_checking_assert (!root->grp_unscalarizable_region);
3467 gcc_checking_assert (!is_gimple_reg_type (root->type));
3469 struct access *last_seen_sibling = NULL;
3471 switch (TREE_CODE (root->type))
3473 case RECORD_TYPE:
3474 for (tree fld = TYPE_FIELDS (root->type); fld; fld = DECL_CHAIN (fld))
3475 if (TREE_CODE (fld) == FIELD_DECL)
3477 tree ft = TREE_TYPE (fld);
3478 HOST_WIDE_INT fsize = tree_to_uhwi (DECL_SIZE (fld));
3479 if (!fsize)
3480 continue;
3482 HOST_WIDE_INT pos = root->offset + int_bit_position (fld);
3483 if (pos + fsize > root->offset + root->size)
3484 return false;
3485 enum total_sra_field_state
3486 state = total_should_skip_creating_access (root,
3487 &last_seen_sibling,
3488 ft, pos, fsize);
3489 switch (state)
3491 case TOTAL_FLD_FAILED:
3492 return false;
3493 case TOTAL_FLD_DONE:
3494 continue;
3495 case TOTAL_FLD_CREATE:
3496 break;
3497 default:
3498 gcc_unreachable ();
3501 struct access **p = (last_seen_sibling
3502 ? &last_seen_sibling->next_sibling
3503 : &root->first_child);
3504 tree nref = build3 (COMPONENT_REF, ft, root->expr, fld, NULL_TREE);
3505 struct access *new_child
3506 = create_total_access_and_reshape (root, pos, fsize, ft, nref, p);
3507 if (!new_child)
3508 return false;
3510 if (!is_gimple_reg_type (ft)
3511 && !totally_scalarize_subtree (new_child))
3512 return false;
3513 last_seen_sibling = new_child;
3515 break;
3516 case ARRAY_TYPE:
3518 tree elemtype = TREE_TYPE (root->type);
3519 tree elem_size = TYPE_SIZE (elemtype);
3520 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
3521 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
3522 gcc_assert (el_size > 0);
3524 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (root->type));
3525 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
3526 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (root->type));
3527 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
3528 if (!maxidx)
3529 goto out;
3530 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
3531 tree domain = TYPE_DOMAIN (root->type);
3532 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
3533 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
3534 offset_int idx = wi::to_offset (minidx);
3535 offset_int max = wi::to_offset (maxidx);
3536 if (!TYPE_UNSIGNED (domain))
3538 idx = wi::sext (idx, TYPE_PRECISION (domain));
3539 max = wi::sext (max, TYPE_PRECISION (domain));
3541 for (HOST_WIDE_INT pos = root->offset;
3542 idx <= max;
3543 pos += el_size, ++idx)
3545 enum total_sra_field_state
3546 state = total_should_skip_creating_access (root,
3547 &last_seen_sibling,
3548 elemtype, pos,
3549 el_size);
3550 switch (state)
3552 case TOTAL_FLD_FAILED:
3553 return false;
3554 case TOTAL_FLD_DONE:
3555 continue;
3556 case TOTAL_FLD_CREATE:
3557 break;
3558 default:
3559 gcc_unreachable ();
3562 struct access **p = (last_seen_sibling
3563 ? &last_seen_sibling->next_sibling
3564 : &root->first_child);
3565 tree nref = build4 (ARRAY_REF, elemtype, root->expr,
3566 wide_int_to_tree (domain, idx),
3567 NULL_TREE, NULL_TREE);
3568 struct access *new_child
3569 = create_total_access_and_reshape (root, pos, el_size, elemtype,
3570 nref, p);
3571 if (!new_child)
3572 return false;
3574 if (!is_gimple_reg_type (elemtype)
3575 && !totally_scalarize_subtree (new_child))
3576 return false;
3577 last_seen_sibling = new_child;
3580 break;
3581 default:
3582 gcc_unreachable ();
3585 out:
3586 return true;
3589 /* Go through all accesses collected throughout the (intraprocedural) analysis
3590 stage, exclude overlapping ones, identify representatives and build trees
3591 out of them, making decisions about scalarization on the way. Return true
3592 iff there are any to-be-scalarized variables after this stage. */
3594 static bool
3595 analyze_all_variable_accesses (void)
3597 int res = 0;
3598 bitmap tmp = BITMAP_ALLOC (NULL);
3599 bitmap_iterator bi;
3600 unsigned i;
3602 bitmap_copy (tmp, candidate_bitmap);
3603 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
3605 tree var = candidate (i);
3606 struct access *access;
3608 access = sort_and_splice_var_accesses (var);
3609 if (!access || !build_access_trees (access))
3610 disqualify_candidate (var,
3611 "No or inhibitingly overlapping accesses.");
3614 propagate_all_subaccesses ();
3616 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
3617 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
3618 fall back to a target default. */
3619 unsigned HOST_WIDE_INT max_scalarization_size
3620 = get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
3622 if (optimize_speed_p)
3624 if (OPTION_SET_P (param_sra_max_scalarization_size_speed))
3625 max_scalarization_size = param_sra_max_scalarization_size_speed;
3627 else
3629 if (OPTION_SET_P (param_sra_max_scalarization_size_size))
3630 max_scalarization_size = param_sra_max_scalarization_size_size;
3632 max_scalarization_size *= BITS_PER_UNIT;
3634 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3635 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
3636 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
3638 tree var = candidate (i);
3639 if (!VAR_P (var))
3640 continue;
3642 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var))) > max_scalarization_size)
3644 if (dump_file && (dump_flags & TDF_DETAILS))
3646 fprintf (dump_file, "Too big to totally scalarize: ");
3647 print_generic_expr (dump_file, var);
3648 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
3650 continue;
3653 bool all_types_ok = true;
3654 for (struct access *access = get_first_repr_for_decl (var);
3655 access;
3656 access = access->next_grp)
3657 if (!can_totally_scalarize_forest_p (access)
3658 || !scalarizable_type_p (access->type, constant_decl_p (var)))
3660 all_types_ok = false;
3661 break;
3663 if (!all_types_ok)
3664 continue;
3666 if (dump_file && (dump_flags & TDF_DETAILS))
3668 fprintf (dump_file, "Will attempt to totally scalarize ");
3669 print_generic_expr (dump_file, var);
3670 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
3672 bool scalarized = true;
3673 for (struct access *access = get_first_repr_for_decl (var);
3674 access;
3675 access = access->next_grp)
3676 if (!is_gimple_reg_type (access->type)
3677 && !totally_scalarize_subtree (access))
3679 scalarized = false;
3680 break;
3683 if (scalarized)
3684 for (struct access *access = get_first_repr_for_decl (var);
3685 access;
3686 access = access->next_grp)
3687 access->grp_total_scalarization = true;
3690 if (flag_checking)
3691 verify_all_sra_access_forests ();
3693 bitmap_copy (tmp, candidate_bitmap);
3694 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
3696 tree var = candidate (i);
3697 struct access *access = get_first_repr_for_decl (var);
3699 if (analyze_access_trees (access))
3701 res++;
3702 if (dump_file && (dump_flags & TDF_DETAILS))
3704 fprintf (dump_file, "\nAccess trees for ");
3705 print_generic_expr (dump_file, var);
3706 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
3707 dump_access_tree (dump_file, access);
3708 fprintf (dump_file, "\n");
3711 else
3712 disqualify_candidate (var, "No scalar replacements to be created.");
3715 BITMAP_FREE (tmp);
3717 if (res)
3719 statistics_counter_event (cfun, "Scalarized aggregates", res);
3720 return true;
3722 else
3723 return false;
3726 /* Generate statements copying scalar replacements of accesses within a subtree
3727 into or out of AGG. ACCESS, all its children, siblings and their children
3728 are to be processed. AGG is an aggregate type expression (can be a
3729 declaration but does not have to be, it can for example also be a mem_ref or
3730 a series of handled components). TOP_OFFSET is the offset of the processed
3731 subtree which has to be subtracted from offsets of individual accesses to
3732 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
3733 replacements in the interval <start_offset, start_offset + chunk_size>,
3734 otherwise copy all. GSI is a statement iterator used to place the new
3735 statements. WRITE should be true when the statements should write from AGG
3736 to the replacement and false if vice versa. if INSERT_AFTER is true, new
3737 statements will be added after the current statement in GSI, they will be
3738 added before the statement otherwise. */
3740 static void
3741 generate_subtree_copies (struct access *access, tree agg,
3742 HOST_WIDE_INT top_offset,
3743 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
3744 gimple_stmt_iterator *gsi, bool write,
3745 bool insert_after, location_t loc)
3747 /* Never write anything into constant pool decls. See PR70602. */
3748 if (!write && constant_decl_p (agg))
3749 return;
3752 if (chunk_size && access->offset >= start_offset + chunk_size)
3753 return;
3755 if (access->grp_to_be_replaced
3756 && (chunk_size == 0
3757 || access->offset + access->size > start_offset))
3759 tree expr, repl = get_access_replacement (access);
3760 gassign *stmt;
3762 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
3763 access, gsi, insert_after);
3765 if (write)
3767 if (access->grp_partial_lhs)
3768 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
3769 !insert_after,
3770 insert_after ? GSI_NEW_STMT
3771 : GSI_SAME_STMT);
3772 stmt = gimple_build_assign (repl, expr);
3774 else
3776 suppress_warning (repl /* Be more selective! */);
3777 if (access->grp_partial_lhs)
3778 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3779 !insert_after,
3780 insert_after ? GSI_NEW_STMT
3781 : GSI_SAME_STMT);
3782 stmt = gimple_build_assign (expr, repl);
3784 gimple_set_location (stmt, loc);
3786 if (insert_after)
3787 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3788 else
3789 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3790 update_stmt (stmt);
3791 sra_stats.subtree_copies++;
3793 else if (write
3794 && access->grp_to_be_debug_replaced
3795 && (chunk_size == 0
3796 || access->offset + access->size > start_offset))
3798 gdebug *ds;
3799 tree drhs = build_debug_ref_for_model (loc, agg,
3800 access->offset - top_offset,
3801 access);
3802 ds = gimple_build_debug_bind (get_access_replacement (access),
3803 drhs, gsi_stmt (*gsi));
3804 if (insert_after)
3805 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3806 else
3807 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3810 if (access->first_child)
3811 generate_subtree_copies (access->first_child, agg, top_offset,
3812 start_offset, chunk_size, gsi,
3813 write, insert_after, loc);
3815 access = access->next_sibling;
3817 while (access);
3820 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
3821 root of the subtree to be processed. GSI is the statement iterator used
3822 for inserting statements which are added after the current statement if
3823 INSERT_AFTER is true or before it otherwise. */
3825 static void
3826 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
3827 bool insert_after, location_t loc)
3830 struct access *child;
3832 if (access->grp_to_be_replaced)
3834 gassign *stmt;
3836 stmt = gimple_build_assign (get_access_replacement (access),
3837 build_zero_cst (access->type));
3838 if (insert_after)
3839 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3840 else
3841 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3842 update_stmt (stmt);
3843 gimple_set_location (stmt, loc);
3845 else if (access->grp_to_be_debug_replaced)
3847 gdebug *ds
3848 = gimple_build_debug_bind (get_access_replacement (access),
3849 build_zero_cst (access->type),
3850 gsi_stmt (*gsi));
3851 if (insert_after)
3852 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3853 else
3854 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3857 for (child = access->first_child; child; child = child->next_sibling)
3858 init_subtree_with_zero (child, gsi, insert_after, loc);
3861 /* Clobber all scalar replacements in an access subtree. ACCESS is the
3862 root of the subtree to be processed. GSI is the statement iterator used
3863 for inserting statements which are added after the current statement if
3864 INSERT_AFTER is true or before it otherwise. */
3866 static void
3867 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
3868 bool insert_after, location_t loc)
3871 struct access *child;
3873 if (access->grp_to_be_replaced)
3875 tree rep = get_access_replacement (access);
3876 tree clobber = build_clobber (access->type);
3877 gimple *stmt = gimple_build_assign (rep, clobber);
3879 if (insert_after)
3880 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3881 else
3882 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3883 update_stmt (stmt);
3884 gimple_set_location (stmt, loc);
3887 for (child = access->first_child; child; child = child->next_sibling)
3888 clobber_subtree (child, gsi, insert_after, loc);
3891 /* Search for an access representative for the given expression EXPR and
3892 return it or NULL if it cannot be found. */
3894 static struct access *
3895 get_access_for_expr (tree expr)
3897 poly_int64 poffset, psize, pmax_size;
3898 HOST_WIDE_INT offset, max_size;
3899 tree base;
3900 bool reverse;
3902 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3903 a different size than the size of its argument and we need the latter
3904 one. */
3905 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3906 expr = TREE_OPERAND (expr, 0);
3908 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
3909 &reverse);
3910 if (!known_size_p (pmax_size)
3911 || !pmax_size.is_constant (&max_size)
3912 || !poffset.is_constant (&offset)
3913 || !DECL_P (base))
3914 return NULL;
3916 if (tree basesize = DECL_SIZE (base))
3918 poly_int64 sz;
3919 if (offset < 0
3920 || !poly_int_tree_p (basesize, &sz)
3921 || known_le (sz, offset))
3922 return NULL;
3925 if (max_size == 0
3926 || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3927 return NULL;
3929 return get_var_base_offset_size_access (base, offset, max_size);
3932 /* Replace the expression EXPR with a scalar replacement if there is one and
3933 generate other statements to do type conversion or subtree copying if
3934 necessary. WRITE is true if the expression is being written to (it is on a
3935 LHS of a statement or output in an assembly statement). STMT_GSI is used to
3936 place newly created statements before the processed statement, REFRESH_GSI
3937 is used to place them afterwards - unless the processed statement must end a
3938 BB in which case it is placed on the outgoing non-EH edge. REFRESH_GSI and
3939 is then used to continue iteration over the BB. If sra_modify_expr is
3940 called only once with WRITE equal to true on a given statement, both
3941 iterator parameters can point to the same one. */
3943 static bool
3944 sra_modify_expr (tree *expr, bool write, gimple_stmt_iterator *stmt_gsi,
3945 gimple_stmt_iterator *refresh_gsi)
3947 location_t loc;
3948 struct access *access;
3949 tree type, bfr, orig_expr;
3950 bool partial_cplx_access = false;
3952 if (TREE_CODE (*expr) == BIT_FIELD_REF
3953 && (write || !sra_handled_bf_read_p (*expr)))
3955 bfr = *expr;
3956 expr = &TREE_OPERAND (*expr, 0);
3958 else
3959 bfr = NULL_TREE;
3961 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3963 expr = &TREE_OPERAND (*expr, 0);
3964 partial_cplx_access = true;
3966 access = get_access_for_expr (*expr);
3967 if (!access)
3968 return false;
3969 type = TREE_TYPE (*expr);
3970 orig_expr = *expr;
3972 loc = gimple_location (gsi_stmt (*stmt_gsi));
3973 gimple_stmt_iterator alt_gsi = gsi_none ();
3974 if (write && stmt_ends_bb_p (gsi_stmt (*stmt_gsi)))
3976 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*stmt_gsi)));
3977 refresh_gsi = &alt_gsi;
3980 if (access->grp_to_be_replaced)
3982 tree repl = get_access_replacement (access);
3983 /* If we replace a non-register typed access simply use the original
3984 access expression to extract the scalar component afterwards.
3985 This happens if scalarizing a function return value or parameter
3986 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3987 gcc.c-torture/compile/20011217-1.c.
3989 We also want to use this when accessing a complex or vector which can
3990 be accessed as a different type too, potentially creating a need for
3991 type conversion (see PR42196) and when scalarized unions are involved
3992 in assembler statements (see PR42398). */
3993 if (!bfr && !useless_type_conversion_p (type, access->type))
3995 tree ref;
3997 ref = build_ref_for_model (loc, orig_expr, 0, access, stmt_gsi,
3998 false);
4000 if (partial_cplx_access)
4002 /* VIEW_CONVERT_EXPRs in partial complex access are always fine in
4003 the case of a write because in such case the replacement cannot
4004 be a gimple register. In the case of a load, we have to
4005 differentiate in between a register an non-register
4006 replacement. */
4007 tree t = build1 (VIEW_CONVERT_EXPR, type, repl);
4008 gcc_checking_assert (!write || access->grp_partial_lhs);
4009 if (!access->grp_partial_lhs)
4011 tree tmp = make_ssa_name (type);
4012 gassign *stmt = gimple_build_assign (tmp, t);
4013 /* This is always a read. */
4014 gsi_insert_before (stmt_gsi, stmt, GSI_SAME_STMT);
4015 t = tmp;
4017 *expr = t;
4019 else if (write)
4021 gassign *stmt;
4023 if (access->grp_partial_lhs)
4024 ref = force_gimple_operand_gsi (refresh_gsi, ref, true,
4025 NULL_TREE, false, GSI_NEW_STMT);
4026 stmt = gimple_build_assign (repl, ref);
4027 gimple_set_location (stmt, loc);
4028 gsi_insert_after (refresh_gsi, stmt, GSI_NEW_STMT);
4030 else
4032 gassign *stmt;
4034 if (access->grp_partial_lhs)
4035 repl = force_gimple_operand_gsi (stmt_gsi, repl, true,
4036 NULL_TREE, true,
4037 GSI_SAME_STMT);
4038 stmt = gimple_build_assign (ref, repl);
4039 gimple_set_location (stmt, loc);
4040 gsi_insert_before (stmt_gsi, stmt, GSI_SAME_STMT);
4043 else
4045 /* If we are going to replace a scalar field in a structure with
4046 reverse storage order by a stand-alone scalar, we are going to
4047 effectively byte-swap the scalar and we also need to byte-swap
4048 the portion of it represented by the bit-field. */
4049 if (bfr && REF_REVERSE_STORAGE_ORDER (bfr))
4051 REF_REVERSE_STORAGE_ORDER (bfr) = 0;
4052 TREE_OPERAND (bfr, 2)
4053 = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (repl)),
4054 size_binop (PLUS_EXPR, TREE_OPERAND (bfr, 1),
4055 TREE_OPERAND (bfr, 2)));
4058 *expr = repl;
4061 sra_stats.exprs++;
4063 else if (write && access->grp_to_be_debug_replaced)
4065 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
4066 NULL_TREE,
4067 gsi_stmt (*stmt_gsi));
4068 gsi_insert_after (stmt_gsi, ds, GSI_NEW_STMT);
4071 if (access->first_child && !TREE_READONLY (access->base))
4073 HOST_WIDE_INT start_offset, chunk_size;
4074 if (bfr
4075 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
4076 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
4078 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
4079 start_offset = access->offset
4080 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
4082 else
4083 start_offset = chunk_size = 0;
4085 generate_subtree_copies (access->first_child, orig_expr, access->offset,
4086 start_offset, chunk_size,
4087 write ? refresh_gsi : stmt_gsi,
4088 write, write, loc);
4090 return true;
4093 /* If EXPR, which must be a call argument, is an ADDR_EXPR, generate writes and
4094 reads from its base before and after the call statement given in CALL_GSI
4095 and return true if any copying took place. Otherwise call sra_modify_expr
4096 on EXPR and return its value. FLAGS is what the gimple_call_arg_flags
4097 return for the given parameter. */
4099 static bool
4100 sra_modify_call_arg (tree *expr, gimple_stmt_iterator *call_gsi,
4101 gimple_stmt_iterator *refresh_gsi, int flags)
4103 if (TREE_CODE (*expr) != ADDR_EXPR)
4104 return sra_modify_expr (expr, false, call_gsi, refresh_gsi);
4106 if (flags & EAF_UNUSED)
4107 return false;
4109 tree base = get_base_address (TREE_OPERAND (*expr, 0));
4110 if (!DECL_P (base))
4111 return false;
4112 struct access *access = get_access_for_expr (base);
4113 if (!access)
4114 return false;
4116 gimple *stmt = gsi_stmt (*call_gsi);
4117 location_t loc = gimple_location (stmt);
4118 generate_subtree_copies (access, base, 0, 0, 0, call_gsi, false, false,
4119 loc);
4121 if (flags & EAF_NO_DIRECT_CLOBBER)
4122 return true;
4124 if (!stmt_ends_bb_p (stmt))
4125 generate_subtree_copies (access, base, 0, 0, 0, refresh_gsi, true,
4126 true, loc);
4127 else
4129 edge e;
4130 edge_iterator ei;
4131 FOR_EACH_EDGE (e, ei, gsi_bb (*call_gsi)->succs)
4133 gimple_stmt_iterator alt_gsi = gsi_start_edge (e);
4134 generate_subtree_copies (access, base, 0, 0, 0, &alt_gsi, true,
4135 true, loc);
4138 return true;
4141 /* Where scalar replacements of the RHS have been written to when a replacement
4142 of a LHS of an assigments cannot be direclty loaded from a replacement of
4143 the RHS. */
4144 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
4145 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
4146 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
4148 struct subreplacement_assignment_data
4150 /* Offset of the access representing the lhs of the assignment. */
4151 HOST_WIDE_INT left_offset;
4153 /* LHS and RHS of the original assignment. */
4154 tree assignment_lhs, assignment_rhs;
4156 /* Access representing the rhs of the whole assignment. */
4157 struct access *top_racc;
4159 /* Stmt iterator used for statement insertions after the original assignment.
4160 It points to the main GSI used to traverse a BB during function body
4161 modification. */
4162 gimple_stmt_iterator *new_gsi;
4164 /* Stmt iterator used for statement insertions before the original
4165 assignment. Keeps on pointing to the original statement. */
4166 gimple_stmt_iterator old_gsi;
4168 /* Location of the assignment. */
4169 location_t loc;
4171 /* Keeps the information whether we have needed to refresh replacements of
4172 the LHS and from which side of the assignments this takes place. */
4173 enum unscalarized_data_handling refreshed;
4176 /* Store all replacements in the access tree rooted in TOP_RACC either to their
4177 base aggregate if there are unscalarized data or directly to LHS of the
4178 statement that is pointed to by GSI otherwise. */
4180 static void
4181 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
4183 tree src;
4184 /* If the RHS is a load from a constant, we do not need to (and must not)
4185 flush replacements to it and can use it directly as if we did. */
4186 if (TREE_READONLY (sad->top_racc->base))
4188 sad->refreshed = SRA_UDH_RIGHT;
4189 return;
4191 if (sad->top_racc->grp_unscalarized_data)
4193 src = sad->assignment_rhs;
4194 sad->refreshed = SRA_UDH_RIGHT;
4196 else
4198 src = sad->assignment_lhs;
4199 sad->refreshed = SRA_UDH_LEFT;
4201 generate_subtree_copies (sad->top_racc->first_child, src,
4202 sad->top_racc->offset, 0, 0,
4203 &sad->old_gsi, false, false, sad->loc);
4206 /* Try to generate statements to load all sub-replacements in an access subtree
4207 formed by children of LACC from scalar replacements in the SAD->top_racc
4208 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
4209 and load the accesses from it. */
4211 static void
4212 load_assign_lhs_subreplacements (struct access *lacc,
4213 struct subreplacement_assignment_data *sad)
4215 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
4217 HOST_WIDE_INT offset;
4218 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
4220 if (lacc->grp_to_be_replaced)
4222 struct access *racc;
4223 gassign *stmt;
4224 tree rhs;
4226 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
4227 if (racc && racc->grp_to_be_replaced)
4229 rhs = get_access_replacement (racc);
4230 bool vce = false;
4231 if (!useless_type_conversion_p (lacc->type, racc->type))
4233 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
4234 lacc->type, rhs);
4235 vce = true;
4238 if (lacc->grp_partial_lhs && (vce || racc->grp_partial_lhs))
4239 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
4240 NULL_TREE, true, GSI_SAME_STMT);
4242 else
4244 /* No suitable access on the right hand side, need to load from
4245 the aggregate. See if we have to update it first... */
4246 if (sad->refreshed == SRA_UDH_NONE)
4247 handle_unscalarized_data_in_subtree (sad);
4249 if (sad->refreshed == SRA_UDH_LEFT)
4250 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
4251 lacc->offset - sad->left_offset,
4252 lacc, sad->new_gsi, true);
4253 else
4254 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
4255 lacc->offset - sad->left_offset,
4256 lacc, sad->new_gsi, true);
4257 if (lacc->grp_partial_lhs)
4258 rhs = force_gimple_operand_gsi (sad->new_gsi,
4259 rhs, true, NULL_TREE,
4260 false, GSI_NEW_STMT);
4263 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
4264 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
4265 gimple_set_location (stmt, sad->loc);
4266 update_stmt (stmt);
4267 sra_stats.subreplacements++;
4269 else
4271 if (sad->refreshed == SRA_UDH_NONE
4272 && lacc->grp_read && !lacc->grp_covered)
4273 handle_unscalarized_data_in_subtree (sad);
4275 if (lacc && lacc->grp_to_be_debug_replaced)
4277 gdebug *ds;
4278 tree drhs;
4279 struct access *racc = find_access_in_subtree (sad->top_racc,
4280 offset,
4281 lacc->size);
4283 if (racc && racc->grp_to_be_replaced)
4285 if (racc->grp_write || constant_decl_p (racc->base))
4286 drhs = get_access_replacement (racc);
4287 else
4288 drhs = NULL;
4290 else if (sad->refreshed == SRA_UDH_LEFT)
4291 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
4292 lacc->offset, lacc);
4293 else if (sad->refreshed == SRA_UDH_RIGHT)
4294 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
4295 offset, lacc);
4296 else
4297 drhs = NULL_TREE;
4298 if (drhs
4299 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
4300 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
4301 lacc->type, drhs);
4302 ds = gimple_build_debug_bind (get_access_replacement (lacc),
4303 drhs, gsi_stmt (sad->old_gsi));
4304 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
4308 if (lacc->first_child)
4309 load_assign_lhs_subreplacements (lacc, sad);
4313 /* Result code for SRA assignment modification. */
4314 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
4315 SRA_AM_MODIFIED, /* stmt changed but not
4316 removed */
4317 SRA_AM_REMOVED }; /* stmt eliminated */
4319 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
4320 to the assignment and GSI is the statement iterator pointing at it. Returns
4321 the same values as sra_modify_assign. */
4323 static enum assignment_mod_result
4324 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
4326 tree lhs = gimple_assign_lhs (stmt);
4327 struct access *acc = get_access_for_expr (lhs);
4328 if (!acc)
4329 return SRA_AM_NONE;
4330 location_t loc = gimple_location (stmt);
4332 if (gimple_clobber_p (stmt))
4334 /* Clobber the replacement variable. */
4335 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
4336 /* Remove clobbers of fully scalarized variables, they are dead. */
4337 if (acc->grp_covered)
4339 unlink_stmt_vdef (stmt);
4340 gsi_remove (gsi, true);
4341 release_defs (stmt);
4342 return SRA_AM_REMOVED;
4344 else
4345 return SRA_AM_MODIFIED;
4348 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
4350 /* I have never seen this code path trigger but if it can happen the
4351 following should handle it gracefully. */
4352 if (access_has_children_p (acc))
4353 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
4354 true, true, loc);
4355 return SRA_AM_MODIFIED;
4358 if (acc->grp_covered)
4360 init_subtree_with_zero (acc, gsi, false, loc);
4361 unlink_stmt_vdef (stmt);
4362 gsi_remove (gsi, true);
4363 release_defs (stmt);
4364 return SRA_AM_REMOVED;
4366 else
4368 init_subtree_with_zero (acc, gsi, true, loc);
4369 return SRA_AM_MODIFIED;
4373 /* Create and return a new suitable default definition SSA_NAME for RACC which
4374 is an access describing an uninitialized part of an aggregate that is being
4375 loaded. REG_TREE is used instead of the actual RACC type if that is not of
4376 a gimple register type. */
4378 static tree
4379 get_repl_default_def_ssa_name (struct access *racc, tree reg_type)
4381 gcc_checking_assert (!racc->grp_to_be_replaced
4382 && !racc->grp_to_be_debug_replaced);
4383 if (!racc->replacement_decl)
4384 racc->replacement_decl = create_access_replacement (racc, reg_type);
4385 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
4389 /* Generate statements to call .DEFERRED_INIT to initialize scalar replacements
4390 of accesses within a subtree ACCESS; all its children, siblings and their
4391 children are to be processed.
4392 GSI is a statement iterator used to place the new statements. */
4393 static void
4394 generate_subtree_deferred_init (struct access *access,
4395 tree init_type,
4396 tree decl_name,
4397 gimple_stmt_iterator *gsi,
4398 location_t loc)
4402 if (access->grp_to_be_replaced)
4404 tree repl = get_access_replacement (access);
4405 gimple *call
4406 = gimple_build_call_internal (IFN_DEFERRED_INIT, 3,
4407 TYPE_SIZE_UNIT (TREE_TYPE (repl)),
4408 init_type, decl_name);
4409 gimple_call_set_lhs (call, repl);
4410 gsi_insert_before (gsi, call, GSI_SAME_STMT);
4411 update_stmt (call);
4412 gimple_set_location (call, loc);
4413 sra_stats.subtree_deferred_init++;
4415 if (access->first_child)
4416 generate_subtree_deferred_init (access->first_child, init_type,
4417 decl_name, gsi, loc);
4419 access = access ->next_sibling;
4421 while (access);
4424 /* For a call to .DEFERRED_INIT:
4425 var = .DEFERRED_INIT (size_of_var, init_type, name_of_var);
4426 examine the LHS variable VAR and replace it with a scalar replacement if
4427 there is one, also replace the RHS call to a call to .DEFERRED_INIT of
4428 the corresponding scalar relacement variable. Examine the subtree and
4429 do the scalar replacements in the subtree too. STMT is the call, GSI is
4430 the statment iterator to place newly created statement. */
4432 static enum assignment_mod_result
4433 sra_modify_deferred_init (gimple *stmt, gimple_stmt_iterator *gsi)
4435 tree lhs = gimple_call_lhs (stmt);
4436 tree init_type = gimple_call_arg (stmt, 1);
4437 tree decl_name = gimple_call_arg (stmt, 2);
4439 struct access *lhs_access = get_access_for_expr (lhs);
4440 if (!lhs_access)
4441 return SRA_AM_NONE;
4443 location_t loc = gimple_location (stmt);
4445 if (lhs_access->grp_to_be_replaced)
4447 tree lhs_repl = get_access_replacement (lhs_access);
4448 gimple_call_set_lhs (stmt, lhs_repl);
4449 tree arg0_repl = TYPE_SIZE_UNIT (TREE_TYPE (lhs_repl));
4450 gimple_call_set_arg (stmt, 0, arg0_repl);
4451 sra_stats.deferred_init++;
4452 gcc_assert (!lhs_access->first_child);
4453 return SRA_AM_MODIFIED;
4456 if (lhs_access->first_child)
4457 generate_subtree_deferred_init (lhs_access->first_child,
4458 init_type, decl_name, gsi, loc);
4459 if (lhs_access->grp_covered)
4461 unlink_stmt_vdef (stmt);
4462 gsi_remove (gsi, true);
4463 release_defs (stmt);
4464 return SRA_AM_REMOVED;
4467 return SRA_AM_MODIFIED;
4470 /* Examine both sides of the assignment statement pointed to by STMT, replace
4471 them with a scalare replacement if there is one and generate copying of
4472 replacements if scalarized aggregates have been used in the assignment. GSI
4473 is used to hold generated statements for type conversions and subtree
4474 copying. */
4476 static enum assignment_mod_result
4477 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
4479 struct access *lacc, *racc;
4480 tree lhs, rhs;
4481 bool modify_this_stmt = false;
4482 bool force_gimple_rhs = false;
4483 location_t loc;
4484 gimple_stmt_iterator orig_gsi = *gsi;
4486 if (!gimple_assign_single_p (stmt))
4487 return SRA_AM_NONE;
4488 lhs = gimple_assign_lhs (stmt);
4489 rhs = gimple_assign_rhs1 (stmt);
4491 if (TREE_CODE (rhs) == CONSTRUCTOR)
4492 return sra_modify_constructor_assign (stmt, gsi);
4494 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
4495 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
4496 || (TREE_CODE (rhs) == BIT_FIELD_REF && !sra_handled_bf_read_p (rhs))
4497 || TREE_CODE (lhs) == BIT_FIELD_REF)
4499 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
4500 false, gsi, gsi);
4501 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
4502 true, gsi, gsi);
4503 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
4506 lacc = get_access_for_expr (lhs);
4507 racc = get_access_for_expr (rhs);
4508 if (!lacc && !racc)
4509 return SRA_AM_NONE;
4510 /* Avoid modifying initializations of constant-pool replacements. */
4511 if (racc && (racc->replacement_decl == lhs))
4512 return SRA_AM_NONE;
4514 loc = gimple_location (stmt);
4515 if (lacc && lacc->grp_to_be_replaced)
4517 lhs = get_access_replacement (lacc);
4518 gimple_assign_set_lhs (stmt, lhs);
4519 modify_this_stmt = true;
4520 if (lacc->grp_partial_lhs)
4521 force_gimple_rhs = true;
4522 sra_stats.exprs++;
4525 if (racc && racc->grp_to_be_replaced)
4527 rhs = get_access_replacement (racc);
4528 modify_this_stmt = true;
4529 if (racc->grp_partial_lhs)
4530 force_gimple_rhs = true;
4531 sra_stats.exprs++;
4533 else if (racc
4534 && !racc->grp_unscalarized_data
4535 && !racc->grp_unscalarizable_region
4536 && TREE_CODE (lhs) == SSA_NAME
4537 && !access_has_replacements_p (racc))
4539 rhs = get_repl_default_def_ssa_name (racc, TREE_TYPE (lhs));
4540 modify_this_stmt = true;
4541 sra_stats.exprs++;
4544 if (modify_this_stmt
4545 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
4547 /* If we can avoid creating a VIEW_CONVERT_EXPR, then do so.
4548 ??? This should move to fold_stmt which we simply should
4549 call after building a VIEW_CONVERT_EXPR here. */
4550 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
4551 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (lhs)) == racc->reverse
4552 && !contains_bitfld_component_ref_p (lhs))
4554 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
4555 gimple_assign_set_lhs (stmt, lhs);
4557 else if (lacc
4558 && AGGREGATE_TYPE_P (TREE_TYPE (rhs))
4559 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (rhs)) == lacc->reverse
4560 && !contains_vce_or_bfcref_p (rhs))
4561 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
4563 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
4565 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
4566 if (is_gimple_reg_type (TREE_TYPE (lhs))
4567 && TREE_CODE (lhs) != SSA_NAME)
4568 force_gimple_rhs = true;
4572 if (lacc && lacc->grp_to_be_debug_replaced)
4574 tree dlhs = get_access_replacement (lacc);
4575 tree drhs = unshare_expr (rhs);
4576 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
4578 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
4579 && !contains_vce_or_bfcref_p (drhs))
4580 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
4581 if (drhs
4582 && !useless_type_conversion_p (TREE_TYPE (dlhs),
4583 TREE_TYPE (drhs)))
4584 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
4585 TREE_TYPE (dlhs), drhs);
4587 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
4588 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
4591 /* From this point on, the function deals with assignments in between
4592 aggregates when at least one has scalar reductions of some of its
4593 components. There are three possible scenarios: Both the LHS and RHS have
4594 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
4596 In the first case, we would like to load the LHS components from RHS
4597 components whenever possible. If that is not possible, we would like to
4598 read it directly from the RHS (after updating it by storing in it its own
4599 components). If there are some necessary unscalarized data in the LHS,
4600 those will be loaded by the original assignment too. If neither of these
4601 cases happen, the original statement can be removed. Most of this is done
4602 by load_assign_lhs_subreplacements.
4604 In the second case, we would like to store all RHS scalarized components
4605 directly into LHS and if they cover the aggregate completely, remove the
4606 statement too. In the third case, we want the LHS components to be loaded
4607 directly from the RHS (DSE will remove the original statement if it
4608 becomes redundant).
4610 This is a bit complex but manageable when types match and when unions do
4611 not cause confusion in a way that we cannot really load a component of LHS
4612 from the RHS or vice versa (the access representing this level can have
4613 subaccesses that are accessible only through a different union field at a
4614 higher level - different from the one used in the examined expression).
4615 Unions are fun.
4617 Therefore, I specially handle a fourth case, happening when there is a
4618 specific type cast or it is impossible to locate a scalarized subaccess on
4619 the other side of the expression. If that happens, I simply "refresh" the
4620 RHS by storing in it is scalarized components leave the original statement
4621 there to do the copying and then load the scalar replacements of the LHS.
4622 This is what the first branch does. */
4624 if (modify_this_stmt
4625 || gimple_has_volatile_ops (stmt)
4626 || contains_vce_or_bfcref_p (rhs)
4627 || contains_vce_or_bfcref_p (lhs)
4628 || stmt_ends_bb_p (stmt))
4630 /* No need to copy into a constant, it comes pre-initialized. */
4631 if (access_has_children_p (racc) && !TREE_READONLY (racc->base))
4632 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
4633 gsi, false, false, loc);
4634 if (access_has_children_p (lacc))
4636 gimple_stmt_iterator alt_gsi = gsi_none ();
4637 if (stmt_ends_bb_p (stmt))
4639 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
4640 gsi = &alt_gsi;
4642 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
4643 gsi, true, true, loc);
4645 sra_stats.separate_lhs_rhs_handling++;
4647 /* This gimplification must be done after generate_subtree_copies,
4648 lest we insert the subtree copies in the middle of the gimplified
4649 sequence. */
4650 if (force_gimple_rhs)
4651 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
4652 true, GSI_SAME_STMT);
4653 if (gimple_assign_rhs1 (stmt) != rhs)
4655 modify_this_stmt = true;
4656 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
4657 gcc_assert (stmt == gsi_stmt (orig_gsi));
4660 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
4662 else
4664 if (access_has_children_p (lacc)
4665 && access_has_children_p (racc)
4666 /* When an access represents an unscalarizable region, it usually
4667 represents accesses with variable offset and thus must not be used
4668 to generate new memory accesses. */
4669 && !lacc->grp_unscalarizable_region
4670 && !racc->grp_unscalarizable_region)
4672 struct subreplacement_assignment_data sad;
4674 sad.left_offset = lacc->offset;
4675 sad.assignment_lhs = lhs;
4676 sad.assignment_rhs = rhs;
4677 sad.top_racc = racc;
4678 sad.old_gsi = *gsi;
4679 sad.new_gsi = gsi;
4680 sad.loc = gimple_location (stmt);
4681 sad.refreshed = SRA_UDH_NONE;
4683 if (lacc->grp_read && !lacc->grp_covered)
4684 handle_unscalarized_data_in_subtree (&sad);
4686 load_assign_lhs_subreplacements (lacc, &sad);
4687 if (sad.refreshed != SRA_UDH_RIGHT)
4689 gsi_next (gsi);
4690 unlink_stmt_vdef (stmt);
4691 gsi_remove (&sad.old_gsi, true);
4692 release_defs (stmt);
4693 sra_stats.deleted++;
4694 return SRA_AM_REMOVED;
4697 else
4699 if (access_has_children_p (racc)
4700 && !racc->grp_unscalarized_data
4701 && TREE_CODE (lhs) != SSA_NAME)
4703 if (dump_file)
4705 fprintf (dump_file, "Removing load: ");
4706 print_gimple_stmt (dump_file, stmt, 0);
4708 generate_subtree_copies (racc->first_child, lhs,
4709 racc->offset, 0, 0, gsi,
4710 false, false, loc);
4711 gcc_assert (stmt == gsi_stmt (*gsi));
4712 unlink_stmt_vdef (stmt);
4713 gsi_remove (gsi, true);
4714 release_defs (stmt);
4715 sra_stats.deleted++;
4716 return SRA_AM_REMOVED;
4718 /* Restore the aggregate RHS from its components so the
4719 prevailing aggregate copy does the right thing. */
4720 if (access_has_children_p (racc) && !TREE_READONLY (racc->base))
4721 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
4722 gsi, false, false, loc);
4723 /* Re-load the components of the aggregate copy destination.
4724 But use the RHS aggregate to load from to expose more
4725 optimization opportunities. */
4726 if (access_has_children_p (lacc))
4727 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
4728 0, 0, gsi, true, true, loc);
4731 return SRA_AM_NONE;
4735 /* Set any scalar replacements of values in the constant pool to the initial
4736 value of the constant. (Constant-pool decls like *.LC0 have effectively
4737 been initialized before the program starts, we must do the same for their
4738 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
4739 the function's entry block. */
4741 static void
4742 initialize_constant_pool_replacements (void)
4744 gimple_seq seq = NULL;
4745 gimple_stmt_iterator gsi = gsi_start (seq);
4746 bitmap_iterator bi;
4747 unsigned i;
4749 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
4751 tree var = candidate (i);
4752 if (!constant_decl_p (var))
4753 continue;
4755 struct access *access = get_first_repr_for_decl (var);
4757 while (access)
4759 if (access->replacement_decl)
4761 gassign *stmt
4762 = gimple_build_assign (get_access_replacement (access),
4763 unshare_expr (access->expr));
4764 if (dump_file && (dump_flags & TDF_DETAILS))
4766 fprintf (dump_file, "Generating constant initializer: ");
4767 print_gimple_stmt (dump_file, stmt, 0);
4768 fprintf (dump_file, "\n");
4770 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4771 update_stmt (stmt);
4774 if (access->first_child)
4775 access = access->first_child;
4776 else if (access->next_sibling)
4777 access = access->next_sibling;
4778 else
4780 while (access->parent && !access->next_sibling)
4781 access = access->parent;
4782 if (access->next_sibling)
4783 access = access->next_sibling;
4784 else
4785 access = access->next_grp;
4790 seq = gsi_seq (gsi);
4791 if (seq)
4792 gsi_insert_seq_on_edge_immediate (
4793 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
4796 /* Traverse the function body and all modifications as decided in
4797 analyze_all_variable_accesses. Return true iff the CFG has been
4798 changed. */
4800 static bool
4801 sra_modify_function_body (void)
4803 bool cfg_changed = false;
4804 basic_block bb;
4806 initialize_constant_pool_replacements ();
4808 FOR_EACH_BB_FN (bb, cfun)
4810 gimple_stmt_iterator gsi = gsi_start_bb (bb);
4811 while (!gsi_end_p (gsi))
4813 gimple *stmt = gsi_stmt (gsi);
4814 enum assignment_mod_result assign_result;
4815 bool modified = false, deleted = false;
4816 tree *t;
4817 unsigned i;
4819 switch (gimple_code (stmt))
4821 case GIMPLE_RETURN:
4822 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4823 if (*t != NULL_TREE)
4824 modified |= sra_modify_expr (t, false, &gsi, &gsi);
4825 break;
4827 case GIMPLE_ASSIGN:
4828 assign_result = sra_modify_assign (stmt, &gsi);
4829 modified |= assign_result == SRA_AM_MODIFIED;
4830 deleted = assign_result == SRA_AM_REMOVED;
4831 break;
4833 case GIMPLE_CALL:
4834 /* Handle calls to .DEFERRED_INIT specially. */
4835 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
4837 assign_result = sra_modify_deferred_init (stmt, &gsi);
4838 modified |= assign_result == SRA_AM_MODIFIED;
4839 deleted = assign_result == SRA_AM_REMOVED;
4841 else
4843 gcall *call = as_a <gcall *> (stmt);
4844 gimple_stmt_iterator call_gsi = gsi;
4846 /* Operands must be processed before the lhs. */
4847 for (i = 0; i < gimple_call_num_args (call); i++)
4849 int flags = gimple_call_arg_flags (call, i);
4850 t = gimple_call_arg_ptr (call, i);
4851 modified |= sra_modify_call_arg (t, &call_gsi, &gsi, flags);
4853 if (gimple_call_chain (call))
4855 t = gimple_call_chain_ptr (call);
4856 int flags = gimple_call_static_chain_flags (call);
4857 modified |= sra_modify_call_arg (t, &call_gsi, &gsi,
4858 flags);
4860 if (gimple_call_lhs (call))
4862 t = gimple_call_lhs_ptr (call);
4863 modified |= sra_modify_expr (t, true, &call_gsi, &gsi);
4866 break;
4868 case GIMPLE_ASM:
4870 gimple_stmt_iterator stmt_gsi = gsi;
4871 gasm *asm_stmt = as_a <gasm *> (stmt);
4872 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4874 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4875 modified |= sra_modify_expr (t, false, &stmt_gsi, &gsi);
4877 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4879 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4880 modified |= sra_modify_expr (t, true, &stmt_gsi, &gsi);
4883 break;
4885 default:
4886 break;
4889 if (modified)
4891 update_stmt (stmt);
4892 if (maybe_clean_eh_stmt (stmt)
4893 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4894 cfg_changed = true;
4896 if (!deleted)
4897 gsi_next (&gsi);
4901 gsi_commit_edge_inserts ();
4902 return cfg_changed;
4905 /* Generate statements initializing scalar replacements of parts of function
4906 parameters. */
4908 static void
4909 initialize_parameter_reductions (void)
4911 gimple_stmt_iterator gsi;
4912 gimple_seq seq = NULL;
4913 tree parm;
4915 gsi = gsi_start (seq);
4916 for (parm = DECL_ARGUMENTS (current_function_decl);
4917 parm;
4918 parm = DECL_CHAIN (parm))
4920 vec<access_p> *access_vec;
4921 struct access *access;
4923 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4924 continue;
4925 access_vec = get_base_access_vector (parm);
4926 if (!access_vec)
4927 continue;
4929 for (access = (*access_vec)[0];
4930 access;
4931 access = access->next_grp)
4932 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
4933 EXPR_LOCATION (parm));
4936 seq = gsi_seq (gsi);
4937 if (seq)
4938 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
4941 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
4942 it reveals there are components of some aggregates to be scalarized, it runs
4943 the required transformations. */
4944 static unsigned int
4945 perform_intra_sra (void)
4947 int ret = 0;
4948 sra_initialize ();
4950 if (!find_var_candidates ())
4951 goto out;
4953 if (!scan_function ())
4954 goto out;
4956 if (!analyze_all_variable_accesses ())
4957 goto out;
4959 if (sra_modify_function_body ())
4960 ret = TODO_update_ssa | TODO_cleanup_cfg;
4961 else
4962 ret = TODO_update_ssa;
4963 initialize_parameter_reductions ();
4965 statistics_counter_event (cfun, "Scalar replacements created",
4966 sra_stats.replacements);
4967 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
4968 statistics_counter_event (cfun, "Subtree copy stmts",
4969 sra_stats.subtree_copies);
4970 statistics_counter_event (cfun, "Subreplacement stmts",
4971 sra_stats.subreplacements);
4972 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
4973 statistics_counter_event (cfun, "Separate LHS and RHS handling",
4974 sra_stats.separate_lhs_rhs_handling);
4976 out:
4977 sra_deinitialize ();
4978 return ret;
4981 /* Perform early intraprocedural SRA. */
4982 static unsigned int
4983 early_intra_sra (void)
4985 sra_mode = SRA_MODE_EARLY_INTRA;
4986 return perform_intra_sra ();
4989 /* Perform "late" intraprocedural SRA. */
4990 static unsigned int
4991 late_intra_sra (void)
4993 sra_mode = SRA_MODE_INTRA;
4994 return perform_intra_sra ();
4998 static bool
4999 gate_intra_sra (void)
5001 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
5005 namespace {
5007 const pass_data pass_data_sra_early =
5009 GIMPLE_PASS, /* type */
5010 "esra", /* name */
5011 OPTGROUP_NONE, /* optinfo_flags */
5012 TV_TREE_SRA, /* tv_id */
5013 ( PROP_cfg | PROP_ssa ), /* properties_required */
5014 0, /* properties_provided */
5015 0, /* properties_destroyed */
5016 0, /* todo_flags_start */
5017 TODO_update_ssa, /* todo_flags_finish */
5020 class pass_sra_early : public gimple_opt_pass
5022 public:
5023 pass_sra_early (gcc::context *ctxt)
5024 : gimple_opt_pass (pass_data_sra_early, ctxt)
5027 /* opt_pass methods: */
5028 bool gate (function *) final override { return gate_intra_sra (); }
5029 unsigned int execute (function *) final override
5031 return early_intra_sra ();
5034 }; // class pass_sra_early
5036 } // anon namespace
5038 gimple_opt_pass *
5039 make_pass_sra_early (gcc::context *ctxt)
5041 return new pass_sra_early (ctxt);
5044 namespace {
5046 const pass_data pass_data_sra =
5048 GIMPLE_PASS, /* type */
5049 "sra", /* name */
5050 OPTGROUP_NONE, /* optinfo_flags */
5051 TV_TREE_SRA, /* tv_id */
5052 ( PROP_cfg | PROP_ssa ), /* properties_required */
5053 0, /* properties_provided */
5054 0, /* properties_destroyed */
5055 TODO_update_address_taken, /* todo_flags_start */
5056 TODO_update_ssa, /* todo_flags_finish */
5059 class pass_sra : public gimple_opt_pass
5061 public:
5062 pass_sra (gcc::context *ctxt)
5063 : gimple_opt_pass (pass_data_sra, ctxt)
5066 /* opt_pass methods: */
5067 bool gate (function *) final override { return gate_intra_sra (); }
5068 unsigned int execute (function *) final override { return late_intra_sra (); }
5070 }; // class pass_sra
5072 } // anon namespace
5074 gimple_opt_pass *
5075 make_pass_sra (gcc::context *ctxt)
5077 return new pass_sra (ctxt);