hppa: Fix REG+D address support before reload
[official-gcc.git] / gcc / tree-sra.cc
blobf8e71ec48b9372d827f9209d111f5498c5072674
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2024 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "dbgcnt.h"
100 #include "builtins.h"
101 #include "tree-sra.h"
102 #include "opts.h"
104 /* Enumeration of all aggregate reductions we can do. */
105 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
106 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
107 SRA_MODE_INTRA }; /* late intraprocedural SRA */
109 /* Global variable describing which aggregate reduction we are performing at
110 the moment. */
111 static enum sra_mode sra_mode;
113 struct assign_link;
115 /* ACCESS represents each access to an aggregate variable (as a whole or a
116 part). It can also represent a group of accesses that refer to exactly the
117 same fragment of an aggregate (i.e. those that have exactly the same offset
118 and size). Such representatives for a single aggregate, once determined,
119 are linked in a linked list and have the group fields set.
121 Moreover, when doing intraprocedural SRA, a tree is built from those
122 representatives (by the means of first_child and next_sibling pointers), in
123 which all items in a subtree are "within" the root, i.e. their offset is
124 greater or equal to offset of the root and offset+size is smaller or equal
125 to offset+size of the root. Children of an access are sorted by offset.
127 Note that accesses to parts of vector and complex number types always
128 represented by an access to the whole complex number or a vector. It is a
129 duty of the modifying functions to replace them appropriately. */
131 struct access
133 /* Values returned by `get_ref_base_and_extent' for each component reference
134 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
135 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
136 HOST_WIDE_INT offset;
137 HOST_WIDE_INT size;
138 tree base;
140 /* Expression. It is context dependent so do not use it to create new
141 expressions to access the original aggregate. See PR 42154 for a
142 testcase. */
143 tree expr;
144 /* Type. */
145 tree type;
147 /* The statement this access belongs to. */
148 gimple *stmt;
150 /* Next group representative for this aggregate. */
151 struct access *next_grp;
153 /* Pointer to the group representative. Pointer to itself if the struct is
154 the representative. */
155 struct access *group_representative;
157 /* After access tree has been constructed, this points to the parent of the
158 current access, if there is one. NULL for roots. */
159 struct access *parent;
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access *first_child;
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166 described above. */
167 struct access *next_sibling;
169 /* Pointers to the first and last element in the linked list of assign
170 links for propagation from LHS to RHS. */
171 struct assign_link *first_rhs_link, *last_rhs_link;
173 /* Pointers to the first and last element in the linked list of assign
174 links for propagation from LHS to RHS. */
175 struct assign_link *first_lhs_link, *last_lhs_link;
177 /* Pointer to the next access in the work queues. */
178 struct access *next_rhs_queued, *next_lhs_queued;
180 /* Replacement variable for this access "region." Never to be accessed
181 directly, always only by the means of get_access_replacement() and only
182 when grp_to_be_replaced flag is set. */
183 tree replacement_decl;
185 /* Is this access made in reverse storage order? */
186 unsigned reverse : 1;
188 /* Is this particular access write access? */
189 unsigned write : 1;
191 /* Is this access currently in the rhs work queue? */
192 unsigned grp_rhs_queued : 1;
194 /* Is this access currently in the lhs work queue? */
195 unsigned grp_lhs_queued : 1;
197 /* Does this group contain a write access? This flag is propagated down the
198 access tree. */
199 unsigned grp_write : 1;
201 /* Does this group contain a read access? This flag is propagated down the
202 access tree. */
203 unsigned grp_read : 1;
205 /* Does this group contain a read access that comes from an assignment
206 statement? This flag is propagated down the access tree. */
207 unsigned grp_assignment_read : 1;
209 /* Does this group contain a write access that comes from an assignment
210 statement? This flag is propagated down the access tree. */
211 unsigned grp_assignment_write : 1;
213 /* Does this group contain a read access through a scalar type? This flag is
214 not propagated in the access tree in any direction. */
215 unsigned grp_scalar_read : 1;
217 /* Does this group contain a write access through a scalar type? This flag
218 is not propagated in the access tree in any direction. */
219 unsigned grp_scalar_write : 1;
221 /* In a root of an access tree, true means that the entire tree should be
222 totally scalarized - that all scalar leafs should be scalarized and
223 non-root grp_total_scalarization accesses should be honored. Otherwise,
224 non-root accesses with grp_total_scalarization should never get scalar
225 replacements. */
226 unsigned grp_total_scalarization : 1;
228 /* Other passes of the analysis use this bit to make function
229 analyze_access_subtree create scalar replacements for this group if
230 possible. */
231 unsigned grp_hint : 1;
233 /* Is the subtree rooted in this access fully covered by scalar
234 replacements? */
235 unsigned grp_covered : 1;
237 /* If set to true, this access and all below it in an access tree must not be
238 scalarized. */
239 unsigned grp_unscalarizable_region : 1;
241 /* Whether data have been written to parts of the aggregate covered by this
242 access which is not to be scalarized. This flag is propagated up in the
243 access tree. */
244 unsigned grp_unscalarized_data : 1;
246 /* Set if all accesses in the group consist of the same chain of
247 COMPONENT_REFs and ARRAY_REFs. */
248 unsigned grp_same_access_path : 1;
250 /* Does this access and/or group contain a write access through a
251 BIT_FIELD_REF? */
252 unsigned grp_partial_lhs : 1;
254 /* Set when a scalar replacement should be created for this variable. */
255 unsigned grp_to_be_replaced : 1;
257 /* Set when we want a replacement for the sole purpose of having it in
258 generated debug statements. */
259 unsigned grp_to_be_debug_replaced : 1;
261 /* Should TREE_NO_WARNING of a replacement be set? */
262 unsigned grp_no_warning : 1;
264 /* Result of propagation accross link from LHS to RHS. */
265 unsigned grp_result_of_prop_from_lhs : 1;
268 typedef struct access *access_p;
271 /* Alloc pool for allocating access structures. */
272 static object_allocator<struct access> access_pool ("SRA accesses");
274 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
275 are used to propagate subaccesses from rhs to lhs and vice versa as long as
276 they don't conflict with what is already there. In the RHS->LHS direction,
277 we also propagate grp_write flag to lazily mark that the access contains any
278 meaningful data. */
279 struct assign_link
281 struct access *lacc, *racc;
282 struct assign_link *next_rhs, *next_lhs;
285 /* Alloc pool for allocating assign link structures. */
286 static object_allocator<assign_link> assign_link_pool ("SRA links");
288 /* Base (tree) -> Vector (vec<access_p> *) map. */
289 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
291 /* Hash to limit creation of artificial accesses */
292 static hash_map<tree, unsigned> *propagation_budget;
294 /* Candidate hash table helpers. */
296 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
298 static inline hashval_t hash (const tree_node *);
299 static inline bool equal (const tree_node *, const tree_node *);
302 /* Hash a tree in a uid_decl_map. */
304 inline hashval_t
305 uid_decl_hasher::hash (const tree_node *item)
307 return item->decl_minimal.uid;
310 /* Return true if the DECL_UID in both trees are equal. */
312 inline bool
313 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
315 return (a->decl_minimal.uid == b->decl_minimal.uid);
318 /* Set of candidates. */
319 static bitmap candidate_bitmap;
320 static hash_table<uid_decl_hasher> *candidates;
322 /* For a candidate UID return the candidates decl. */
324 static inline tree
325 candidate (unsigned uid)
327 tree_node t;
328 t.decl_minimal.uid = uid;
329 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
332 /* Bitmap of candidates which we should try to entirely scalarize away and
333 those which cannot be (because they are and need be used as a whole). */
334 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
336 /* Bitmap of candidates in the constant pool, which cannot be scalarized
337 because this would produce non-constant expressions (e.g. Ada). */
338 static bitmap disqualified_constants;
340 /* Bitmap of candidates which are passed by reference in call arguments. */
341 static bitmap passed_by_ref_in_call;
343 /* Obstack for creation of fancy names. */
344 static struct obstack name_obstack;
346 /* Head of a linked list of accesses that need to have its subaccesses
347 propagated to their assignment counterparts. */
348 static struct access *rhs_work_queue_head, *lhs_work_queue_head;
350 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
351 representative fields are dumped, otherwise those which only describe the
352 individual access are. */
354 static struct
356 /* Number of processed aggregates is readily available in
357 analyze_all_variable_accesses and so is not stored here. */
359 /* Number of created scalar replacements. */
360 int replacements;
362 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
363 expression. */
364 int exprs;
366 /* Number of statements created by generate_subtree_copies. */
367 int subtree_copies;
369 /* Number of statements created by load_assign_lhs_subreplacements. */
370 int subreplacements;
372 /* Number of times sra_modify_assign has deleted a statement. */
373 int deleted;
375 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
376 RHS reparately due to type conversions or nonexistent matching
377 references. */
378 int separate_lhs_rhs_handling;
380 /* Number of parameters that were removed because they were unused. */
381 int deleted_unused_parameters;
383 /* Number of scalars passed as parameters by reference that have been
384 converted to be passed by value. */
385 int scalar_by_ref_to_by_val;
387 /* Number of aggregate parameters that were replaced by one or more of their
388 components. */
389 int aggregate_params_reduced;
391 /* Numbber of components created when splitting aggregate parameters. */
392 int param_reductions_created;
394 /* Number of deferred_init calls that are modified. */
395 int deferred_init;
397 /* Number of deferred_init calls that are created by
398 generate_subtree_deferred_init. */
399 int subtree_deferred_init;
400 } sra_stats;
402 static void
403 dump_access (FILE *f, struct access *access, bool grp)
405 fprintf (f, "access { ");
406 fprintf (f, "base = (%d)'", DECL_UID (access->base));
407 print_generic_expr (f, access->base);
408 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
409 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
410 fprintf (f, ", expr = ");
411 print_generic_expr (f, access->expr);
412 fprintf (f, ", type = ");
413 print_generic_expr (f, access->type);
414 fprintf (f, ", reverse = %d", access->reverse);
415 if (grp)
416 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
417 "grp_assignment_write = %d, grp_scalar_read = %d, "
418 "grp_scalar_write = %d, grp_total_scalarization = %d, "
419 "grp_hint = %d, grp_covered = %d, "
420 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
421 "grp_same_access_path = %d, grp_partial_lhs = %d, "
422 "grp_to_be_replaced = %d, grp_to_be_debug_replaced = %d}\n",
423 access->grp_read, access->grp_write, access->grp_assignment_read,
424 access->grp_assignment_write, access->grp_scalar_read,
425 access->grp_scalar_write, access->grp_total_scalarization,
426 access->grp_hint, access->grp_covered,
427 access->grp_unscalarizable_region, access->grp_unscalarized_data,
428 access->grp_same_access_path, access->grp_partial_lhs,
429 access->grp_to_be_replaced, access->grp_to_be_debug_replaced);
430 else
431 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
432 "grp_partial_lhs = %d}\n",
433 access->write, access->grp_total_scalarization,
434 access->grp_partial_lhs);
437 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
439 static void
440 dump_access_tree_1 (FILE *f, struct access *access, int level)
444 int i;
446 for (i = 0; i < level; i++)
447 fputs ("* ", f);
449 dump_access (f, access, true);
451 if (access->first_child)
452 dump_access_tree_1 (f, access->first_child, level + 1);
454 access = access->next_sibling;
456 while (access);
459 /* Dump all access trees for a variable, given the pointer to the first root in
460 ACCESS. */
462 static void
463 dump_access_tree (FILE *f, struct access *access)
465 for (; access; access = access->next_grp)
466 dump_access_tree_1 (f, access, 0);
469 /* Return true iff ACC is non-NULL and has subaccesses. */
471 static inline bool
472 access_has_children_p (struct access *acc)
474 return acc && acc->first_child;
477 /* Return true iff ACC is (partly) covered by at least one replacement. */
479 static bool
480 access_has_replacements_p (struct access *acc)
482 struct access *child;
483 if (acc->grp_to_be_replaced)
484 return true;
485 for (child = acc->first_child; child; child = child->next_sibling)
486 if (access_has_replacements_p (child))
487 return true;
488 return false;
491 /* Return a vector of pointers to accesses for the variable given in BASE or
492 NULL if there is none. */
494 static vec<access_p> *
495 get_base_access_vector (tree base)
497 return base_access_vec->get (base);
500 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
501 in ACCESS. Return NULL if it cannot be found. */
503 static struct access *
504 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
505 HOST_WIDE_INT size)
507 while (access && (access->offset != offset || access->size != size))
509 struct access *child = access->first_child;
511 while (child && (child->offset + child->size <= offset))
512 child = child->next_sibling;
513 access = child;
516 /* Total scalarization does not replace single field structures with their
517 single field but rather creates an access for them underneath. Look for
518 it. */
519 if (access)
520 while (access->first_child
521 && access->first_child->offset == offset
522 && access->first_child->size == size)
523 access = access->first_child;
525 return access;
528 /* Return the first group representative for DECL or NULL if none exists. */
530 static struct access *
531 get_first_repr_for_decl (tree base)
533 vec<access_p> *access_vec;
535 access_vec = get_base_access_vector (base);
536 if (!access_vec)
537 return NULL;
539 return (*access_vec)[0];
542 /* Find an access representative for the variable BASE and given OFFSET and
543 SIZE. Requires that access trees have already been built. Return NULL if
544 it cannot be found. */
546 static struct access *
547 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
548 HOST_WIDE_INT size)
550 struct access *access;
552 access = get_first_repr_for_decl (base);
553 while (access && (access->offset + access->size <= offset))
554 access = access->next_grp;
555 if (!access)
556 return NULL;
558 return find_access_in_subtree (access, offset, size);
561 /* Add LINK to the linked list of assign links of RACC. */
563 static void
564 add_link_to_rhs (struct access *racc, struct assign_link *link)
566 gcc_assert (link->racc == racc);
568 if (!racc->first_rhs_link)
570 gcc_assert (!racc->last_rhs_link);
571 racc->first_rhs_link = link;
573 else
574 racc->last_rhs_link->next_rhs = link;
576 racc->last_rhs_link = link;
577 link->next_rhs = NULL;
580 /* Add LINK to the linked list of lhs assign links of LACC. */
582 static void
583 add_link_to_lhs (struct access *lacc, struct assign_link *link)
585 gcc_assert (link->lacc == lacc);
587 if (!lacc->first_lhs_link)
589 gcc_assert (!lacc->last_lhs_link);
590 lacc->first_lhs_link = link;
592 else
593 lacc->last_lhs_link->next_lhs = link;
595 lacc->last_lhs_link = link;
596 link->next_lhs = NULL;
599 /* Move all link structures in their linked list in OLD_ACC to the linked list
600 in NEW_ACC. */
601 static void
602 relink_to_new_repr (struct access *new_acc, struct access *old_acc)
604 if (old_acc->first_rhs_link)
607 if (new_acc->first_rhs_link)
609 gcc_assert (!new_acc->last_rhs_link->next_rhs);
610 gcc_assert (!old_acc->last_rhs_link
611 || !old_acc->last_rhs_link->next_rhs);
613 new_acc->last_rhs_link->next_rhs = old_acc->first_rhs_link;
614 new_acc->last_rhs_link = old_acc->last_rhs_link;
616 else
618 gcc_assert (!new_acc->last_rhs_link);
620 new_acc->first_rhs_link = old_acc->first_rhs_link;
621 new_acc->last_rhs_link = old_acc->last_rhs_link;
623 old_acc->first_rhs_link = old_acc->last_rhs_link = NULL;
625 else
626 gcc_assert (!old_acc->last_rhs_link);
628 if (old_acc->first_lhs_link)
631 if (new_acc->first_lhs_link)
633 gcc_assert (!new_acc->last_lhs_link->next_lhs);
634 gcc_assert (!old_acc->last_lhs_link
635 || !old_acc->last_lhs_link->next_lhs);
637 new_acc->last_lhs_link->next_lhs = old_acc->first_lhs_link;
638 new_acc->last_lhs_link = old_acc->last_lhs_link;
640 else
642 gcc_assert (!new_acc->last_lhs_link);
644 new_acc->first_lhs_link = old_acc->first_lhs_link;
645 new_acc->last_lhs_link = old_acc->last_lhs_link;
647 old_acc->first_lhs_link = old_acc->last_lhs_link = NULL;
649 else
650 gcc_assert (!old_acc->last_lhs_link);
654 /* Add ACCESS to the work to queue for propagation of subaccesses from RHS to
655 LHS (which is actually a stack). */
657 static void
658 add_access_to_rhs_work_queue (struct access *access)
660 if (access->first_rhs_link && !access->grp_rhs_queued)
662 gcc_assert (!access->next_rhs_queued);
663 access->next_rhs_queued = rhs_work_queue_head;
664 access->grp_rhs_queued = 1;
665 rhs_work_queue_head = access;
669 /* Add ACCESS to the work to queue for propagation of subaccesses from LHS to
670 RHS (which is actually a stack). */
672 static void
673 add_access_to_lhs_work_queue (struct access *access)
675 if (access->first_lhs_link && !access->grp_lhs_queued)
677 gcc_assert (!access->next_lhs_queued);
678 access->next_lhs_queued = lhs_work_queue_head;
679 access->grp_lhs_queued = 1;
680 lhs_work_queue_head = access;
684 /* Pop an access from the work queue for propagating from RHS to LHS, and
685 return it, assuming there is one. */
687 static struct access *
688 pop_access_from_rhs_work_queue (void)
690 struct access *access = rhs_work_queue_head;
692 rhs_work_queue_head = access->next_rhs_queued;
693 access->next_rhs_queued = NULL;
694 access->grp_rhs_queued = 0;
695 return access;
698 /* Pop an access from the work queue for propagating from LHS to RHS, and
699 return it, assuming there is one. */
701 static struct access *
702 pop_access_from_lhs_work_queue (void)
704 struct access *access = lhs_work_queue_head;
706 lhs_work_queue_head = access->next_lhs_queued;
707 access->next_lhs_queued = NULL;
708 access->grp_lhs_queued = 0;
709 return access;
712 /* Allocate necessary structures. */
714 static void
715 sra_initialize (void)
717 candidate_bitmap = BITMAP_ALLOC (NULL);
718 candidates = new hash_table<uid_decl_hasher>
719 (vec_safe_length (cfun->local_decls) / 2);
720 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
721 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
722 disqualified_constants = BITMAP_ALLOC (NULL);
723 passed_by_ref_in_call = BITMAP_ALLOC (NULL);
724 gcc_obstack_init (&name_obstack);
725 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
726 memset (&sra_stats, 0, sizeof (sra_stats));
729 /* Deallocate all general structures. */
731 static void
732 sra_deinitialize (void)
734 BITMAP_FREE (candidate_bitmap);
735 delete candidates;
736 candidates = NULL;
737 BITMAP_FREE (should_scalarize_away_bitmap);
738 BITMAP_FREE (cannot_scalarize_away_bitmap);
739 BITMAP_FREE (disqualified_constants);
740 BITMAP_FREE (passed_by_ref_in_call);
741 access_pool.release ();
742 assign_link_pool.release ();
743 obstack_free (&name_obstack, NULL);
745 delete base_access_vec;
748 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
750 static bool constant_decl_p (tree decl)
752 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
755 /* Remove DECL from candidates for SRA and write REASON to the dump file if
756 there is one. */
758 static void
759 disqualify_candidate (tree decl, const char *reason)
761 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
762 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
763 if (constant_decl_p (decl))
764 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
766 if (dump_file && (dump_flags & TDF_DETAILS))
768 fprintf (dump_file, "! Disqualifying ");
769 print_generic_expr (dump_file, decl);
770 fprintf (dump_file, " - %s\n", reason);
774 /* Return true iff the type contains a field or an element which does not allow
775 scalarization. Use VISITED_TYPES to avoid re-checking already checked
776 (sub-)types. */
778 static bool
779 type_internals_preclude_sra_p_1 (tree type, const char **msg,
780 hash_set<tree> *visited_types)
782 tree fld;
783 tree et;
785 if (visited_types->contains (type))
786 return false;
787 visited_types->add (type);
789 switch (TREE_CODE (type))
791 case RECORD_TYPE:
792 case UNION_TYPE:
793 case QUAL_UNION_TYPE:
794 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
795 if (TREE_CODE (fld) == FIELD_DECL)
797 if (TREE_CODE (fld) == FUNCTION_DECL)
798 continue;
799 tree ft = TREE_TYPE (fld);
801 if (TREE_THIS_VOLATILE (fld))
803 *msg = "volatile structure field";
804 return true;
806 if (!DECL_FIELD_OFFSET (fld))
808 *msg = "no structure field offset";
809 return true;
811 if (!DECL_SIZE (fld))
813 *msg = "zero structure field size";
814 return true;
816 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
818 *msg = "structure field offset not fixed";
819 return true;
821 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
823 *msg = "structure field size not fixed";
824 return true;
826 if (!tree_fits_shwi_p (bit_position (fld)))
828 *msg = "structure field size too big";
829 return true;
831 if (AGGREGATE_TYPE_P (ft)
832 && int_bit_position (fld) % BITS_PER_UNIT != 0)
834 *msg = "structure field is bit field";
835 return true;
838 if (AGGREGATE_TYPE_P (ft)
839 && type_internals_preclude_sra_p_1 (ft, msg, visited_types))
840 return true;
843 return false;
845 case ARRAY_TYPE:
846 et = TREE_TYPE (type);
848 if (TYPE_VOLATILE (et))
850 *msg = "element type is volatile";
851 return true;
854 if (AGGREGATE_TYPE_P (et)
855 && type_internals_preclude_sra_p_1 (et, msg, visited_types))
856 return true;
858 return false;
860 default:
861 return false;
865 /* Return true iff the type contains a field or an element which does not allow
866 scalarization. */
868 bool
869 type_internals_preclude_sra_p (tree type, const char **msg)
871 hash_set<tree> visited_types;
872 return type_internals_preclude_sra_p_1 (type, msg, &visited_types);
876 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
877 the three fields. Also add it to the vector of accesses corresponding to
878 the base. Finally, return the new access. */
880 static struct access *
881 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
883 struct access *access = access_pool.allocate ();
885 memset (access, 0, sizeof (struct access));
886 access->base = base;
887 access->offset = offset;
888 access->size = size;
890 base_access_vec->get_or_insert (base).safe_push (access);
892 return access;
895 static bool maybe_add_sra_candidate (tree);
897 /* Create and insert access for EXPR. Return created access, or NULL if it is
898 not possible. Also scan for uses of constant pool as we go along and add
899 to candidates. */
901 static struct access *
902 create_access (tree expr, gimple *stmt, bool write)
904 struct access *access;
905 poly_int64 poffset, psize, pmax_size;
906 tree base = expr;
907 bool reverse, unscalarizable_region = false;
909 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
910 &reverse);
912 /* For constant-pool entries, check we can substitute the constant value. */
913 if (constant_decl_p (base)
914 && !bitmap_bit_p (disqualified_constants, DECL_UID (base)))
916 if (expr != base
917 && !is_gimple_reg_type (TREE_TYPE (expr))
918 && dump_file && (dump_flags & TDF_DETAILS))
920 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
921 and elements of multidimensional arrays (which are
922 multi-element arrays in their own right). */
923 fprintf (dump_file, "Allowing non-reg-type load of part"
924 " of constant-pool entry: ");
925 print_generic_expr (dump_file, expr);
927 maybe_add_sra_candidate (base);
930 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
931 return NULL;
933 if (write && TREE_READONLY (base))
935 disqualify_candidate (base, "Encountered a store to a read-only decl.");
936 return NULL;
939 HOST_WIDE_INT offset, size, max_size;
940 if (!poffset.is_constant (&offset)
941 || !psize.is_constant (&size)
942 || !pmax_size.is_constant (&max_size))
944 disqualify_candidate (base, "Encountered a polynomial-sized access.");
945 return NULL;
948 if (size != max_size)
950 size = max_size;
951 unscalarizable_region = true;
953 if (size == 0)
954 return NULL;
955 if (offset < 0)
957 disqualify_candidate (base, "Encountered a negative offset access.");
958 return NULL;
960 if (size < 0)
962 disqualify_candidate (base, "Encountered an unconstrained access.");
963 return NULL;
965 if (offset + size > tree_to_shwi (DECL_SIZE (base)))
967 disqualify_candidate (base, "Encountered an access beyond the base.");
968 return NULL;
970 if (TREE_CODE (TREE_TYPE (expr)) == BITINT_TYPE
971 && size > WIDE_INT_MAX_PRECISION - 1)
973 disqualify_candidate (base, "Encountered too large _BitInt access.");
974 return NULL;
977 access = create_access_1 (base, offset, size);
978 access->expr = expr;
979 access->type = TREE_TYPE (expr);
980 access->write = write;
981 access->grp_unscalarizable_region = unscalarizable_region;
982 access->stmt = stmt;
983 access->reverse = reverse;
985 return access;
989 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
990 ARRAY_TYPE with fields that are either of gimple register types (excluding
991 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
992 we are considering a decl from constant pool. If it is false, char arrays
993 will be refused. */
995 static bool
996 scalarizable_type_p (tree type, bool const_decl)
998 if (is_gimple_reg_type (type))
999 return true;
1000 if (type_contains_placeholder_p (type))
1001 return false;
1003 bool have_predecessor_field = false;
1004 HOST_WIDE_INT prev_pos = 0;
1006 switch (TREE_CODE (type))
1008 case RECORD_TYPE:
1009 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1010 if (TREE_CODE (fld) == FIELD_DECL)
1012 tree ft = TREE_TYPE (fld);
1014 if (zerop (DECL_SIZE (fld)))
1015 continue;
1017 HOST_WIDE_INT pos = int_bit_position (fld);
1018 if (have_predecessor_field
1019 && pos <= prev_pos)
1020 return false;
1022 have_predecessor_field = true;
1023 prev_pos = pos;
1025 if (DECL_BIT_FIELD (fld))
1026 return false;
1028 if (!scalarizable_type_p (ft, const_decl))
1029 return false;
1032 return true;
1034 case ARRAY_TYPE:
1036 HOST_WIDE_INT min_elem_size;
1037 if (const_decl)
1038 min_elem_size = 0;
1039 else
1040 min_elem_size = BITS_PER_UNIT;
1042 if (TYPE_DOMAIN (type) == NULL_TREE
1043 || !tree_fits_shwi_p (TYPE_SIZE (type))
1044 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
1045 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
1046 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1047 return false;
1048 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1049 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1050 /* Zero-element array, should not prevent scalarization. */
1052 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1053 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1054 /* Variable-length array, do not allow scalarization. */
1055 return false;
1057 tree elem = TREE_TYPE (type);
1058 if (!scalarizable_type_p (elem, const_decl))
1059 return false;
1060 return true;
1062 default:
1063 return false;
1067 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1069 static inline bool
1070 contains_view_convert_expr_p (const_tree ref)
1072 while (handled_component_p (ref))
1074 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1075 return true;
1076 ref = TREE_OPERAND (ref, 0);
1079 return false;
1082 /* Return true if REF contains a VIEW_CONVERT_EXPR or a COMPONENT_REF with a
1083 bit-field field declaration. If TYPE_CHANGING_P is non-NULL, set the bool
1084 it points to will be set if REF contains any of the above or a MEM_REF
1085 expression that effectively performs type conversion. */
1087 static bool
1088 contains_vce_or_bfcref_p (const_tree ref, bool *type_changing_p = NULL)
1090 while (handled_component_p (ref))
1092 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
1093 || (TREE_CODE (ref) == COMPONENT_REF
1094 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
1096 if (type_changing_p)
1097 *type_changing_p = true;
1098 return true;
1100 ref = TREE_OPERAND (ref, 0);
1103 if (!type_changing_p
1104 || TREE_CODE (ref) != MEM_REF
1105 || TREE_CODE (TREE_OPERAND (ref, 0)) != ADDR_EXPR)
1106 return false;
1108 tree mem = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
1109 if (TYPE_MAIN_VARIANT (TREE_TYPE (ref))
1110 != TYPE_MAIN_VARIANT (TREE_TYPE (mem)))
1111 *type_changing_p = true;
1113 return false;
1116 /* Search the given tree for a declaration by skipping handled components and
1117 exclude it from the candidates. */
1119 static void
1120 disqualify_base_of_expr (tree t, const char *reason)
1122 t = get_base_address (t);
1123 if (t && DECL_P (t))
1124 disqualify_candidate (t, reason);
1127 /* Return true if the BIT_FIELD_REF read EXPR is handled by SRA. */
1129 static bool
1130 sra_handled_bf_read_p (tree expr)
1132 uint64_t size, offset;
1133 if (bit_field_size (expr).is_constant (&size)
1134 && bit_field_offset (expr).is_constant (&offset)
1135 && size % BITS_PER_UNIT == 0
1136 && offset % BITS_PER_UNIT == 0
1137 && pow2p_hwi (size))
1138 return true;
1139 return false;
1142 /* Scan expression EXPR and create access structures for all accesses to
1143 candidates for scalarization. Return the created access or NULL if none is
1144 created. */
1146 static struct access *
1147 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1149 /* We only allow ADDR_EXPRs in arguments of function calls and those must
1150 have been dealt with in build_access_from_call_arg. Any other address
1151 taking should have been caught by scan_visit_addr. */
1152 if (TREE_CODE (expr) == ADDR_EXPR)
1154 tree base = get_base_address (TREE_OPERAND (expr, 0));
1155 gcc_assert (!DECL_P (base)
1156 || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)));
1157 return NULL;
1160 struct access *ret = NULL;
1161 bool partial_ref;
1163 if ((TREE_CODE (expr) == BIT_FIELD_REF
1164 && (write || !sra_handled_bf_read_p (expr)))
1165 || TREE_CODE (expr) == IMAGPART_EXPR
1166 || TREE_CODE (expr) == REALPART_EXPR)
1168 expr = TREE_OPERAND (expr, 0);
1169 partial_ref = true;
1171 else
1172 partial_ref = false;
1174 if (storage_order_barrier_p (expr))
1176 disqualify_base_of_expr (expr, "storage order barrier.");
1177 return NULL;
1180 /* We need to dive through V_C_Es in order to get the size of its parameter
1181 and not the result type. Ada produces such statements. We are also
1182 capable of handling the topmost V_C_E but not any of those buried in other
1183 handled components. */
1184 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1185 expr = TREE_OPERAND (expr, 0);
1187 if (contains_view_convert_expr_p (expr))
1189 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1190 "component.");
1191 return NULL;
1193 if (TREE_THIS_VOLATILE (expr))
1195 disqualify_base_of_expr (expr, "part of a volatile reference.");
1196 return NULL;
1199 switch (TREE_CODE (expr))
1201 case MEM_REF:
1202 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR)
1203 return NULL;
1204 /* fall through */
1205 case VAR_DECL:
1206 case PARM_DECL:
1207 case RESULT_DECL:
1208 case COMPONENT_REF:
1209 case ARRAY_REF:
1210 case ARRAY_RANGE_REF:
1211 case BIT_FIELD_REF:
1212 ret = create_access (expr, stmt, write);
1213 break;
1215 default:
1216 break;
1219 if (write && partial_ref && ret)
1220 ret->grp_partial_lhs = 1;
1222 return ret;
1225 /* Scan expression EXPR and create access structures for all accesses to
1226 candidates for scalarization. Return true if any access has been inserted.
1227 STMT must be the statement from which the expression is taken, WRITE must be
1228 true if the expression is a store and false otherwise. */
1230 static bool
1231 build_access_from_expr (tree expr, gimple *stmt, bool write)
1233 struct access *access;
1235 access = build_access_from_expr_1 (expr, stmt, write);
1236 if (access)
1238 /* This means the aggregate is accesses as a whole in a way other than an
1239 assign statement and thus cannot be removed even if we had a scalar
1240 replacement for everything. */
1241 if (cannot_scalarize_away_bitmap)
1242 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1243 return true;
1245 return false;
1248 enum out_edge_check { SRA_OUTGOING_EDGES_UNCHECKED, SRA_OUTGOING_EDGES_OK,
1249 SRA_OUTGOING_EDGES_FAIL };
1251 /* Return true if STMT terminates BB and there is an abnormal edge going out of
1252 the BB and remember the decision in OE_CHECK. */
1254 static bool
1255 abnormal_edge_after_stmt_p (gimple *stmt, enum out_edge_check *oe_check)
1257 if (*oe_check == SRA_OUTGOING_EDGES_OK)
1258 return false;
1259 if (*oe_check == SRA_OUTGOING_EDGES_FAIL)
1260 return true;
1261 if (stmt_ends_bb_p (stmt))
1263 edge e;
1264 edge_iterator ei;
1265 FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs)
1266 if (e->flags & EDGE_ABNORMAL)
1268 *oe_check = SRA_OUTGOING_EDGES_FAIL;
1269 return true;
1272 *oe_check = SRA_OUTGOING_EDGES_OK;
1273 return false;
1276 /* Scan expression EXPR which is an argument of a call and create access
1277 structures for all accesses to candidates for scalarization. Return true
1278 if any access has been inserted. STMT must be the statement from which the
1279 expression is taken. CAN_BE_RETURNED must be true if call argument flags
1280 do not rule out that the argument is directly returned. OE_CHECK is used
1281 to remember result of a test for abnormal outgoing edges after this
1282 statement. */
1284 static bool
1285 build_access_from_call_arg (tree expr, gimple *stmt, bool can_be_returned,
1286 enum out_edge_check *oe_check)
1288 if (TREE_CODE (expr) == ADDR_EXPR)
1290 tree base = get_base_address (TREE_OPERAND (expr, 0));
1292 if (can_be_returned)
1294 disqualify_base_of_expr (base, "Address possibly returned, "
1295 "leading to an alis SRA may not know.");
1296 return false;
1298 if (abnormal_edge_after_stmt_p (stmt, oe_check))
1300 disqualify_base_of_expr (base, "May lead to need to add statements "
1301 "to abnormal edge.");
1302 return false;
1305 bool read = build_access_from_expr (base, stmt, false);
1306 bool write = build_access_from_expr (base, stmt, true);
1307 if (read || write)
1309 if (dump_file && (dump_flags & TDF_DETAILS))
1311 fprintf (dump_file, "Allowed ADDR_EXPR of ");
1312 print_generic_expr (dump_file, base);
1313 fprintf (dump_file, " because of ");
1314 print_gimple_stmt (dump_file, stmt, 0);
1315 fprintf (dump_file, "\n");
1317 bitmap_set_bit (passed_by_ref_in_call, DECL_UID (base));
1318 return true;
1320 else
1321 return false;
1324 return build_access_from_expr (expr, stmt, false);
1328 /* Return the single non-EH successor edge of BB or NULL if there is none or
1329 more than one. */
1331 static edge
1332 single_non_eh_succ (basic_block bb)
1334 edge e, res = NULL;
1335 edge_iterator ei;
1337 FOR_EACH_EDGE (e, ei, bb->succs)
1338 if (!(e->flags & EDGE_EH))
1340 if (res)
1341 return NULL;
1342 res = e;
1345 return res;
1348 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1349 there is no alternative spot where to put statements SRA might need to
1350 generate after it. The spot we are looking for is an edge leading to a
1351 single non-EH successor, if it exists and is indeed single. RHS may be
1352 NULL, in that case ignore it. */
1354 static bool
1355 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1357 if (stmt_ends_bb_p (stmt))
1359 if (single_non_eh_succ (gimple_bb (stmt)))
1360 return false;
1362 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1363 if (rhs)
1364 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1365 return true;
1367 return false;
1370 /* Return true if the nature of BASE is such that it contains data even if
1371 there is no write to it in the function. */
1373 static bool
1374 comes_initialized_p (tree base)
1376 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1379 /* Scan expressions occurring in STMT, create access structures for all accesses
1380 to candidates for scalarization and remove those candidates which occur in
1381 statements or expressions that prevent them from being split apart. Return
1382 true if any access has been inserted. */
1384 static bool
1385 build_accesses_from_assign (gimple *stmt)
1387 tree lhs, rhs;
1388 struct access *lacc, *racc;
1390 if (!gimple_assign_single_p (stmt)
1391 /* Scope clobbers don't influence scalarization. */
1392 || gimple_clobber_p (stmt))
1393 return false;
1395 lhs = gimple_assign_lhs (stmt);
1396 rhs = gimple_assign_rhs1 (stmt);
1398 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1399 return false;
1401 racc = build_access_from_expr_1 (rhs, stmt, false);
1402 lacc = build_access_from_expr_1 (lhs, stmt, true);
1404 if (lacc)
1406 lacc->grp_assignment_write = 1;
1407 if (storage_order_barrier_p (rhs))
1408 lacc->grp_unscalarizable_region = 1;
1410 if (should_scalarize_away_bitmap && !is_gimple_reg_type (lacc->type))
1412 bool type_changing_p = false;
1413 contains_vce_or_bfcref_p (lhs, &type_changing_p);
1414 if (type_changing_p)
1415 bitmap_set_bit (cannot_scalarize_away_bitmap,
1416 DECL_UID (lacc->base));
1420 if (racc)
1422 racc->grp_assignment_read = 1;
1423 if (should_scalarize_away_bitmap && !is_gimple_reg_type (racc->type))
1425 bool type_changing_p = false;
1426 contains_vce_or_bfcref_p (rhs, &type_changing_p);
1428 if (type_changing_p || gimple_has_volatile_ops (stmt))
1429 bitmap_set_bit (cannot_scalarize_away_bitmap,
1430 DECL_UID (racc->base));
1431 else
1432 bitmap_set_bit (should_scalarize_away_bitmap,
1433 DECL_UID (racc->base));
1435 if (storage_order_barrier_p (lhs))
1436 racc->grp_unscalarizable_region = 1;
1439 if (lacc && racc
1440 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1441 && !lacc->grp_unscalarizable_region
1442 && !racc->grp_unscalarizable_region
1443 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1444 && lacc->size == racc->size
1445 && useless_type_conversion_p (lacc->type, racc->type))
1447 struct assign_link *link;
1449 link = assign_link_pool.allocate ();
1450 memset (link, 0, sizeof (struct assign_link));
1452 link->lacc = lacc;
1453 link->racc = racc;
1454 add_link_to_rhs (racc, link);
1455 add_link_to_lhs (lacc, link);
1456 add_access_to_rhs_work_queue (racc);
1457 add_access_to_lhs_work_queue (lacc);
1459 /* Let's delay marking the areas as written until propagation of accesses
1460 across link, unless the nature of rhs tells us that its data comes
1461 from elsewhere. */
1462 if (!comes_initialized_p (racc->base))
1463 lacc->write = false;
1466 return lacc || racc;
1469 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to detect taking
1470 addresses of candidates a places which are not call arguments. Such
1471 candidates are disqalified from SRA. This also applies to GIMPLE_ASM
1472 operands with memory constrains which cannot be scalarized. */
1474 static bool
1475 scan_visit_addr (gimple *, tree op, tree, void *)
1477 op = get_base_address (op);
1478 if (op
1479 && DECL_P (op))
1480 disqualify_candidate (op, "Address taken in a non-call-argument context.");
1482 return false;
1485 /* Scan function and look for interesting expressions and create access
1486 structures for them. Return true iff any access is created. */
1488 static bool
1489 scan_function (void)
1491 basic_block bb;
1492 bool ret = false;
1494 FOR_EACH_BB_FN (bb, cfun)
1496 gimple_stmt_iterator gsi;
1497 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1498 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), NULL, NULL, NULL,
1499 scan_visit_addr);
1501 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1503 gimple *stmt = gsi_stmt (gsi);
1504 tree t;
1505 unsigned i;
1507 if (gimple_code (stmt) != GIMPLE_CALL)
1508 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1509 scan_visit_addr);
1511 switch (gimple_code (stmt))
1513 case GIMPLE_RETURN:
1514 t = gimple_return_retval (as_a <greturn *> (stmt));
1515 if (t != NULL_TREE)
1516 ret |= build_access_from_expr (t, stmt, false);
1517 break;
1519 case GIMPLE_ASSIGN:
1520 ret |= build_accesses_from_assign (stmt);
1521 break;
1523 case GIMPLE_CALL:
1525 enum out_edge_check oe_check = SRA_OUTGOING_EDGES_UNCHECKED;
1526 gcall *call = as_a <gcall *> (stmt);
1527 for (i = 0; i < gimple_call_num_args (call); i++)
1529 bool can_be_returned;
1530 if (gimple_call_lhs (call))
1532 int af = gimple_call_arg_flags (call, i);
1533 can_be_returned = !(af & EAF_NOT_RETURNED_DIRECTLY);
1535 else
1536 can_be_returned = false;
1537 ret |= build_access_from_call_arg (gimple_call_arg (call,
1539 stmt, can_be_returned,
1540 &oe_check);
1542 if (gimple_call_chain(stmt))
1543 ret |= build_access_from_call_arg (gimple_call_chain(call),
1544 stmt, false, &oe_check);
1547 t = gimple_call_lhs (stmt);
1548 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1550 /* If the STMT is a call to DEFERRED_INIT, avoid setting
1551 cannot_scalarize_away_bitmap. */
1552 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
1553 ret |= !!build_access_from_expr_1 (t, stmt, true);
1554 else
1555 ret |= build_access_from_expr (t, stmt, true);
1557 break;
1559 case GIMPLE_ASM:
1561 gasm *asm_stmt = as_a <gasm *> (stmt);
1562 if (stmt_ends_bb_p (asm_stmt)
1563 && !single_succ_p (gimple_bb (asm_stmt)))
1565 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1567 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1568 disqualify_base_of_expr (t, "OP of asm goto.");
1570 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1572 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1573 disqualify_base_of_expr (t, "OP of asm goto.");
1576 else
1578 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1580 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1581 ret |= build_access_from_expr (t, asm_stmt, false);
1583 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1585 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1586 ret |= build_access_from_expr (t, asm_stmt, true);
1590 break;
1592 default:
1593 break;
1598 return ret;
1601 /* Helper of QSORT function. There are pointers to accesses in the array. An
1602 access is considered smaller than another if it has smaller offset or if the
1603 offsets are the same but is size is bigger. */
1605 static int
1606 compare_access_positions (const void *a, const void *b)
1608 const access_p *fp1 = (const access_p *) a;
1609 const access_p *fp2 = (const access_p *) b;
1610 const access_p f1 = *fp1;
1611 const access_p f2 = *fp2;
1613 if (f1->offset != f2->offset)
1614 return f1->offset < f2->offset ? -1 : 1;
1616 if (f1->size == f2->size)
1618 if (f1->type == f2->type)
1619 return 0;
1620 /* Put any non-aggregate type before any aggregate type. */
1621 else if (!is_gimple_reg_type (f1->type)
1622 && is_gimple_reg_type (f2->type))
1623 return 1;
1624 else if (is_gimple_reg_type (f1->type)
1625 && !is_gimple_reg_type (f2->type))
1626 return -1;
1627 /* Put any complex or vector type before any other scalar type. */
1628 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1629 && TREE_CODE (f1->type) != VECTOR_TYPE
1630 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1631 || VECTOR_TYPE_P (f2->type)))
1632 return 1;
1633 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1634 || VECTOR_TYPE_P (f1->type))
1635 && TREE_CODE (f2->type) != COMPLEX_TYPE
1636 && TREE_CODE (f2->type) != VECTOR_TYPE)
1637 return -1;
1638 /* Put any integral type before any non-integral type. When splicing, we
1639 make sure that those with insufficient precision and occupying the
1640 same space are not scalarized. */
1641 else if (INTEGRAL_TYPE_P (f1->type)
1642 && !INTEGRAL_TYPE_P (f2->type))
1643 return -1;
1644 else if (!INTEGRAL_TYPE_P (f1->type)
1645 && INTEGRAL_TYPE_P (f2->type))
1646 return 1;
1647 /* Put the integral type with the bigger precision first. */
1648 else if (INTEGRAL_TYPE_P (f1->type)
1649 && INTEGRAL_TYPE_P (f2->type)
1650 && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
1651 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1652 /* Stabilize the sort. */
1653 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1656 /* We want the bigger accesses first, thus the opposite operator in the next
1657 line: */
1658 return f1->size > f2->size ? -1 : 1;
1662 /* Append a name of the declaration to the name obstack. A helper function for
1663 make_fancy_name. */
1665 static void
1666 make_fancy_decl_name (tree decl)
1668 char buffer[32];
1670 tree name = DECL_NAME (decl);
1671 if (name)
1672 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1673 IDENTIFIER_LENGTH (name));
1674 else
1676 sprintf (buffer, "D%u", DECL_UID (decl));
1677 obstack_grow (&name_obstack, buffer, strlen (buffer));
1681 /* Helper for make_fancy_name. */
1683 static void
1684 make_fancy_name_1 (tree expr)
1686 char buffer[32];
1687 tree index;
1689 if (DECL_P (expr))
1691 make_fancy_decl_name (expr);
1692 return;
1695 switch (TREE_CODE (expr))
1697 case COMPONENT_REF:
1698 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1699 obstack_1grow (&name_obstack, '$');
1700 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1701 break;
1703 case ARRAY_REF:
1704 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1705 obstack_1grow (&name_obstack, '$');
1706 /* Arrays with only one element may not have a constant as their
1707 index. */
1708 index = TREE_OPERAND (expr, 1);
1709 if (TREE_CODE (index) != INTEGER_CST)
1710 break;
1711 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1712 obstack_grow (&name_obstack, buffer, strlen (buffer));
1713 break;
1715 case BIT_FIELD_REF:
1716 case ADDR_EXPR:
1717 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1718 break;
1720 case MEM_REF:
1721 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1722 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1724 obstack_1grow (&name_obstack, '$');
1725 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1726 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1727 obstack_grow (&name_obstack, buffer, strlen (buffer));
1729 break;
1731 case REALPART_EXPR:
1732 case IMAGPART_EXPR:
1733 gcc_unreachable (); /* we treat these as scalars. */
1734 break;
1735 default:
1736 break;
1740 /* Create a human readable name for replacement variable of ACCESS. */
1742 static char *
1743 make_fancy_name (tree expr)
1745 make_fancy_name_1 (expr);
1746 obstack_1grow (&name_obstack, '\0');
1747 return XOBFINISH (&name_obstack, char *);
1750 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1751 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1752 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1753 be non-NULL and is used to insert new statements either before or below
1754 the current one as specified by INSERT_AFTER. This function is not capable
1755 of handling bitfields. */
1757 tree
1758 build_ref_for_offset (location_t loc, tree base, poly_int64 offset,
1759 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1760 bool insert_after)
1762 tree prev_base = base;
1763 tree off;
1764 tree mem_ref;
1765 poly_int64 base_offset;
1766 unsigned HOST_WIDE_INT misalign;
1767 unsigned int align;
1769 /* Preserve address-space information. */
1770 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1771 if (as != TYPE_ADDR_SPACE (exp_type))
1772 exp_type = build_qualified_type (exp_type,
1773 TYPE_QUALS (exp_type)
1774 | ENCODE_QUAL_ADDR_SPACE (as));
1776 poly_int64 byte_offset = exact_div (offset, BITS_PER_UNIT);
1777 get_object_alignment_1 (base, &align, &misalign);
1778 base = get_addr_base_and_unit_offset (base, &base_offset);
1780 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1781 offset such as array[var_index]. */
1782 if (!base)
1784 gassign *stmt;
1785 tree tmp, addr;
1787 gcc_checking_assert (gsi);
1788 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1789 addr = build_fold_addr_expr (unshare_expr (prev_base));
1790 STRIP_USELESS_TYPE_CONVERSION (addr);
1791 stmt = gimple_build_assign (tmp, addr);
1792 gimple_set_location (stmt, loc);
1793 if (insert_after)
1794 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1795 else
1796 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1798 off = build_int_cst (reference_alias_ptr_type (prev_base), byte_offset);
1799 base = tmp;
1801 else if (TREE_CODE (base) == MEM_REF)
1803 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1804 base_offset + byte_offset);
1805 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1806 base = unshare_expr (TREE_OPERAND (base, 0));
1808 else
1810 off = build_int_cst (reference_alias_ptr_type (prev_base),
1811 base_offset + byte_offset);
1812 base = build_fold_addr_expr (unshare_expr (base));
1815 unsigned int align_bound = known_alignment (misalign + offset);
1816 if (align_bound != 0)
1817 align = MIN (align, align_bound);
1818 if (align != TYPE_ALIGN (exp_type))
1819 exp_type = build_aligned_type (exp_type, align);
1821 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1822 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1823 if (TREE_THIS_VOLATILE (prev_base))
1824 TREE_THIS_VOLATILE (mem_ref) = 1;
1825 if (TREE_SIDE_EFFECTS (prev_base))
1826 TREE_SIDE_EFFECTS (mem_ref) = 1;
1827 return mem_ref;
1830 /* Construct and return a memory reference that is equal to a portion of
1831 MODEL->expr but is based on BASE. If this cannot be done, return NULL. */
1833 static tree
1834 build_reconstructed_reference (location_t, tree base, struct access *model)
1836 tree expr = model->expr;
1837 /* We have to make sure to start just below the outermost union. */
1838 tree start_expr = expr;
1839 while (handled_component_p (expr))
1841 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == UNION_TYPE)
1842 start_expr = expr;
1843 expr = TREE_OPERAND (expr, 0);
1846 expr = start_expr;
1847 tree prev_expr = NULL_TREE;
1848 while (!types_compatible_p (TREE_TYPE (expr), TREE_TYPE (base)))
1850 if (!handled_component_p (expr))
1851 return NULL_TREE;
1852 prev_expr = expr;
1853 expr = TREE_OPERAND (expr, 0);
1856 /* Guard against broken VIEW_CONVERT_EXPRs... */
1857 if (!prev_expr)
1858 return NULL_TREE;
1860 TREE_OPERAND (prev_expr, 0) = base;
1861 tree ref = unshare_expr (model->expr);
1862 TREE_OPERAND (prev_expr, 0) = expr;
1863 return ref;
1866 /* Construct a memory reference to a part of an aggregate BASE at the given
1867 OFFSET and of the same type as MODEL. In case this is a reference to a
1868 bit-field, the function will replicate the last component_ref of model's
1869 expr to access it. INSERT_AFTER and GSI have the same meaning as in
1870 build_ref_for_offset, furthermore, when GSI is NULL, the function expects
1871 that it re-builds the entire reference from a DECL to the final access and
1872 so will create a MEM_REF when OFFSET does not exactly match offset of
1873 MODEL. */
1875 static tree
1876 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1877 struct access *model, gimple_stmt_iterator *gsi,
1878 bool insert_after)
1880 gcc_assert (offset >= 0);
1881 if (TREE_CODE (model->expr) == COMPONENT_REF
1882 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1884 /* This access represents a bit-field. */
1885 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1887 offset -= int_bit_position (fld);
1888 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1889 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1890 gsi, insert_after);
1891 /* The flag will be set on the record type. */
1892 REF_REVERSE_STORAGE_ORDER (t) = 0;
1893 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1894 NULL_TREE);
1896 else
1898 tree res;
1899 if (model->grp_same_access_path
1900 && !TREE_THIS_VOLATILE (base)
1901 && (TYPE_ADDR_SPACE (TREE_TYPE (base))
1902 == TYPE_ADDR_SPACE (TREE_TYPE (model->expr)))
1903 && (offset == model->offset
1904 || (gsi && offset <= model->offset))
1905 /* build_reconstructed_reference can still fail if we have already
1906 massaged BASE because of another type incompatibility. */
1907 && (res = build_reconstructed_reference (loc, base, model)))
1908 return res;
1909 else
1910 return build_ref_for_offset (loc, base, offset, model->reverse,
1911 model->type, gsi, insert_after);
1915 /* Attempt to build a memory reference that we could but into a gimple
1916 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1917 create statements and return s NULL instead. This function also ignores
1918 alignment issues and so its results should never end up in non-debug
1919 statements. */
1921 static tree
1922 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1923 struct access *model)
1925 poly_int64 base_offset;
1926 tree off;
1928 if (TREE_CODE (model->expr) == COMPONENT_REF
1929 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1930 return NULL_TREE;
1932 base = get_addr_base_and_unit_offset (base, &base_offset);
1933 if (!base)
1934 return NULL_TREE;
1935 if (TREE_CODE (base) == MEM_REF)
1937 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1938 base_offset + offset / BITS_PER_UNIT);
1939 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1940 base = unshare_expr (TREE_OPERAND (base, 0));
1942 else
1944 off = build_int_cst (reference_alias_ptr_type (base),
1945 base_offset + offset / BITS_PER_UNIT);
1946 base = build_fold_addr_expr (unshare_expr (base));
1949 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1952 /* Construct a memory reference consisting of component_refs and array_refs to
1953 a part of an aggregate *RES (which is of type TYPE). The requested part
1954 should have type EXP_TYPE at be the given OFFSET. This function might not
1955 succeed, it returns true when it does and only then *RES points to something
1956 meaningful. This function should be used only to build expressions that we
1957 might need to present to user (e.g. in warnings). In all other situations,
1958 build_ref_for_model or build_ref_for_offset should be used instead. */
1960 static bool
1961 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1962 tree exp_type)
1964 while (1)
1966 tree fld;
1967 tree tr_size, index, minidx;
1968 HOST_WIDE_INT el_size;
1970 if (offset == 0 && exp_type
1971 && types_compatible_p (exp_type, type))
1972 return true;
1974 switch (TREE_CODE (type))
1976 case UNION_TYPE:
1977 case QUAL_UNION_TYPE:
1978 case RECORD_TYPE:
1979 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1981 HOST_WIDE_INT pos, size;
1982 tree tr_pos, expr, *expr_ptr;
1984 if (TREE_CODE (fld) != FIELD_DECL)
1985 continue;
1987 tr_pos = bit_position (fld);
1988 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1989 continue;
1990 pos = tree_to_uhwi (tr_pos);
1991 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1992 tr_size = DECL_SIZE (fld);
1993 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1994 continue;
1995 size = tree_to_uhwi (tr_size);
1996 if (size == 0)
1998 if (pos != offset)
1999 continue;
2001 else if (pos > offset || (pos + size) <= offset)
2002 continue;
2004 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
2005 NULL_TREE);
2006 expr_ptr = &expr;
2007 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
2008 offset - pos, exp_type))
2010 *res = expr;
2011 return true;
2014 return false;
2016 case ARRAY_TYPE:
2017 tr_size = TYPE_SIZE (TREE_TYPE (type));
2018 if (!tr_size || !tree_fits_uhwi_p (tr_size))
2019 return false;
2020 el_size = tree_to_uhwi (tr_size);
2022 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2023 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
2024 return false;
2025 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
2026 if (!integer_zerop (minidx))
2027 index = int_const_binop (PLUS_EXPR, index, minidx);
2028 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
2029 NULL_TREE, NULL_TREE);
2030 offset = offset % el_size;
2031 type = TREE_TYPE (type);
2032 break;
2034 default:
2035 if (offset != 0)
2036 return false;
2038 if (exp_type)
2039 return false;
2040 else
2041 return true;
2046 /* Print message to dump file why a variable was rejected. */
2048 static void
2049 reject (tree var, const char *msg)
2051 if (dump_file && (dump_flags & TDF_DETAILS))
2053 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
2054 print_generic_expr (dump_file, var);
2055 fprintf (dump_file, "\n");
2059 /* Return true if VAR is a candidate for SRA. */
2061 static bool
2062 maybe_add_sra_candidate (tree var)
2064 tree type = TREE_TYPE (var);
2065 const char *msg;
2066 tree_node **slot;
2068 if (!AGGREGATE_TYPE_P (type))
2070 reject (var, "not aggregate");
2071 return false;
2074 if ((is_global_var (var)
2075 /* There are cases where non-addressable variables fail the
2076 pt_solutions_check test, e.g in gcc.dg/uninit-40.c. */
2077 || (TREE_ADDRESSABLE (var)
2078 && pt_solution_includes (&cfun->gimple_df->escaped_return, var))
2079 || (TREE_CODE (var) == RESULT_DECL
2080 && !DECL_BY_REFERENCE (var)
2081 && aggregate_value_p (var, current_function_decl)))
2082 /* Allow constant-pool entries that "need to live in memory". */
2083 && !constant_decl_p (var))
2085 reject (var, "needs to live in memory and escapes or global");
2086 return false;
2088 if (TREE_THIS_VOLATILE (var))
2090 reject (var, "is volatile");
2091 return false;
2093 if (!COMPLETE_TYPE_P (type))
2095 reject (var, "has incomplete type");
2096 return false;
2098 if (!tree_fits_shwi_p (TYPE_SIZE (type)))
2100 reject (var, "type size not fixed");
2101 return false;
2103 if (tree_to_shwi (TYPE_SIZE (type)) == 0)
2105 reject (var, "type size is zero");
2106 return false;
2108 if (type_internals_preclude_sra_p (type, &msg))
2110 reject (var, msg);
2111 return false;
2113 if (/* Fix for PR 41089. tree-stdarg.cc needs to have va_lists intact but
2114 we also want to schedule it rather late. Thus we ignore it in
2115 the early pass. */
2116 (sra_mode == SRA_MODE_EARLY_INTRA
2117 && is_va_list_type (type)))
2119 reject (var, "is va_list");
2120 return false;
2123 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
2124 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
2125 *slot = var;
2127 if (dump_file && (dump_flags & TDF_DETAILS))
2129 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
2130 print_generic_expr (dump_file, var);
2131 fprintf (dump_file, "\n");
2134 return true;
2137 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
2138 those with type which is suitable for scalarization. */
2140 static bool
2141 find_var_candidates (void)
2143 tree var, parm;
2144 unsigned int i;
2145 bool ret = false;
2147 for (parm = DECL_ARGUMENTS (current_function_decl);
2148 parm;
2149 parm = DECL_CHAIN (parm))
2150 ret |= maybe_add_sra_candidate (parm);
2152 FOR_EACH_LOCAL_DECL (cfun, i, var)
2154 if (!VAR_P (var))
2155 continue;
2157 ret |= maybe_add_sra_candidate (var);
2160 return ret;
2163 /* Return true if EXP is a reference chain of COMPONENT_REFs and AREAY_REFs
2164 ending either with a DECL or a MEM_REF with zero offset. */
2166 static bool
2167 path_comparable_for_same_access (tree expr)
2169 while (handled_component_p (expr))
2171 if (TREE_CODE (expr) == ARRAY_REF)
2173 /* SSA name indices can occur here too when the array is of sie one.
2174 But we cannot just re-use array_refs with SSA names elsewhere in
2175 the function, so disallow non-constant indices. TODO: Remove this
2176 limitation after teaching build_reconstructed_reference to replace
2177 the index with the index type lower bound. */
2178 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST)
2179 return false;
2181 expr = TREE_OPERAND (expr, 0);
2184 if (TREE_CODE (expr) == MEM_REF)
2186 if (!zerop (TREE_OPERAND (expr, 1)))
2187 return false;
2189 else
2190 gcc_assert (DECL_P (expr));
2192 return true;
2195 /* Assuming that EXP1 consists of only COMPONENT_REFs and ARRAY_REFs, return
2196 true if the chain of these handled components are exactly the same as EXP2
2197 and the expression under them is the same DECL or an equivalent MEM_REF.
2198 The reference picked by compare_access_positions must go to EXP1. */
2200 static bool
2201 same_access_path_p (tree exp1, tree exp2)
2203 if (TREE_CODE (exp1) != TREE_CODE (exp2))
2205 /* Special case single-field structures loaded sometimes as the field
2206 and sometimes as the structure. If the field is of a scalar type,
2207 compare_access_positions will put it into exp1.
2209 TODO: The gimple register type condition can be removed if teach
2210 compare_access_positions to put inner types first. */
2211 if (is_gimple_reg_type (TREE_TYPE (exp1))
2212 && TREE_CODE (exp1) == COMPONENT_REF
2213 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (exp1, 0)))
2214 == TYPE_MAIN_VARIANT (TREE_TYPE (exp2))))
2215 exp1 = TREE_OPERAND (exp1, 0);
2216 else
2217 return false;
2220 if (!operand_equal_p (exp1, exp2, OEP_ADDRESS_OF))
2221 return false;
2223 return true;
2226 /* Sort all accesses for the given variable, check for partial overlaps and
2227 return NULL if there are any. If there are none, pick a representative for
2228 each combination of offset and size and create a linked list out of them.
2229 Return the pointer to the first representative and make sure it is the first
2230 one in the vector of accesses. */
2232 static struct access *
2233 sort_and_splice_var_accesses (tree var)
2235 int i, j, access_count;
2236 struct access *res, **prev_acc_ptr = &res;
2237 vec<access_p> *access_vec;
2238 bool first = true;
2239 HOST_WIDE_INT low = -1, high = 0;
2241 access_vec = get_base_access_vector (var);
2242 if (!access_vec)
2243 return NULL;
2244 access_count = access_vec->length ();
2246 /* Sort by <OFFSET, SIZE>. */
2247 access_vec->qsort (compare_access_positions);
2249 i = 0;
2250 while (i < access_count)
2252 struct access *access = (*access_vec)[i];
2253 bool grp_write = access->write;
2254 bool grp_read = !access->write;
2255 bool grp_scalar_write = access->write
2256 && is_gimple_reg_type (access->type);
2257 bool grp_scalar_read = !access->write
2258 && is_gimple_reg_type (access->type);
2259 bool grp_assignment_read = access->grp_assignment_read;
2260 bool grp_assignment_write = access->grp_assignment_write;
2261 bool multiple_scalar_reads = false;
2262 bool grp_partial_lhs = access->grp_partial_lhs;
2263 bool first_scalar = is_gimple_reg_type (access->type);
2264 bool unscalarizable_region = access->grp_unscalarizable_region;
2265 bool grp_same_access_path = true;
2266 bool bf_non_full_precision
2267 = (INTEGRAL_TYPE_P (access->type)
2268 && TYPE_PRECISION (access->type) != access->size
2269 && TREE_CODE (access->expr) == COMPONENT_REF
2270 && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
2272 if (first || access->offset >= high)
2274 first = false;
2275 low = access->offset;
2276 high = access->offset + access->size;
2278 else if (access->offset > low && access->offset + access->size > high)
2279 return NULL;
2280 else
2281 gcc_assert (access->offset >= low
2282 && access->offset + access->size <= high);
2284 if (INTEGRAL_TYPE_P (access->type)
2285 && TYPE_PRECISION (access->type) != access->size
2286 && bitmap_bit_p (passed_by_ref_in_call, DECL_UID (access->base)))
2288 /* This can lead to performance regressions because we can generate
2289 excessive zero extensions. */
2290 if (dump_file && (dump_flags & TDF_DETAILS))
2292 fprintf (dump_file, "Won't scalarize ");
2293 print_generic_expr (dump_file, access->base);
2294 fprintf (dump_file, "(%d), it is passed by reference to a call "
2295 "and there are accesses with precision not covering "
2296 "their type size.", DECL_UID (access->base));
2298 return NULL;
2301 grp_same_access_path = path_comparable_for_same_access (access->expr);
2303 j = i + 1;
2304 while (j < access_count)
2306 struct access *ac2 = (*access_vec)[j];
2307 if (ac2->offset != access->offset || ac2->size != access->size)
2308 break;
2309 if (ac2->write)
2311 grp_write = true;
2312 grp_scalar_write = (grp_scalar_write
2313 || is_gimple_reg_type (ac2->type));
2315 else
2317 grp_read = true;
2318 if (is_gimple_reg_type (ac2->type))
2320 if (grp_scalar_read)
2321 multiple_scalar_reads = true;
2322 else
2323 grp_scalar_read = true;
2326 grp_assignment_read |= ac2->grp_assignment_read;
2327 grp_assignment_write |= ac2->grp_assignment_write;
2328 grp_partial_lhs |= ac2->grp_partial_lhs;
2329 unscalarizable_region |= ac2->grp_unscalarizable_region;
2330 relink_to_new_repr (access, ac2);
2332 /* If there are both aggregate-type and scalar-type accesses with
2333 this combination of size and offset, the comparison function
2334 should have put the scalars first. */
2335 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2336 /* It also prefers integral types to non-integral. However, when the
2337 precision of the selected type does not span the entire area and
2338 should also be used for a non-integer (i.e. float), we must not
2339 let that happen. Normally analyze_access_subtree expands the type
2340 to cover the entire area but for bit-fields it doesn't. */
2341 if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
2343 if (dump_file && (dump_flags & TDF_DETAILS))
2345 fprintf (dump_file, "Cannot scalarize the following access "
2346 "because insufficient precision integer type was "
2347 "selected.\n ");
2348 dump_access (dump_file, access, false);
2350 unscalarizable_region = true;
2353 if (grp_same_access_path
2354 && !same_access_path_p (access->expr, ac2->expr))
2355 grp_same_access_path = false;
2357 ac2->group_representative = access;
2358 j++;
2361 i = j;
2363 access->group_representative = access;
2364 access->grp_write = grp_write;
2365 access->grp_read = grp_read;
2366 access->grp_scalar_read = grp_scalar_read;
2367 access->grp_scalar_write = grp_scalar_write;
2368 access->grp_assignment_read = grp_assignment_read;
2369 access->grp_assignment_write = grp_assignment_write;
2370 access->grp_hint = multiple_scalar_reads && !constant_decl_p (var);
2371 access->grp_partial_lhs = grp_partial_lhs;
2372 access->grp_unscalarizable_region = unscalarizable_region;
2373 access->grp_same_access_path = grp_same_access_path;
2375 *prev_acc_ptr = access;
2376 prev_acc_ptr = &access->next_grp;
2379 gcc_assert (res == (*access_vec)[0]);
2380 return res;
2383 /* Create a variable for the given ACCESS which determines the type, name and a
2384 few other properties. Return the variable declaration and store it also to
2385 ACCESS->replacement. REG_TREE is used when creating a declaration to base a
2386 default-definition SSA name on in order to facilitate an uninitialized
2387 warning. It is used instead of the actual ACCESS type if that is not of a
2388 gimple register type. */
2390 static tree
2391 create_access_replacement (struct access *access, tree reg_type = NULL_TREE)
2393 tree repl;
2395 tree type = access->type;
2396 if (reg_type && !is_gimple_reg_type (type))
2397 type = reg_type;
2399 if (access->grp_to_be_debug_replaced)
2401 repl = create_tmp_var_raw (access->type);
2402 DECL_CONTEXT (repl) = current_function_decl;
2404 else
2405 /* Drop any special alignment on the type if it's not on the main
2406 variant. This avoids issues with weirdo ABIs like AAPCS. */
2407 repl = create_tmp_var (build_qualified_type (TYPE_MAIN_VARIANT (type),
2408 TYPE_QUALS (type)), "SR");
2409 if (access->grp_partial_lhs
2410 && is_gimple_reg_type (type))
2411 DECL_NOT_GIMPLE_REG_P (repl) = 1;
2413 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2414 DECL_ARTIFICIAL (repl) = 1;
2415 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2417 if (DECL_NAME (access->base)
2418 && !DECL_IGNORED_P (access->base)
2419 && !DECL_ARTIFICIAL (access->base))
2421 char *pretty_name = make_fancy_name (access->expr);
2422 tree debug_expr = unshare_expr_without_location (access->expr), d;
2423 bool fail = false;
2425 DECL_NAME (repl) = get_identifier (pretty_name);
2426 DECL_NAMELESS (repl) = 1;
2427 obstack_free (&name_obstack, pretty_name);
2429 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2430 as DECL_DEBUG_EXPR isn't considered when looking for still
2431 used SSA_NAMEs and thus they could be freed. All debug info
2432 generation cares is whether something is constant or variable
2433 and that get_ref_base_and_extent works properly on the
2434 expression. It cannot handle accesses at a non-constant offset
2435 though, so just give up in those cases. */
2436 for (d = debug_expr;
2437 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2438 d = TREE_OPERAND (d, 0))
2439 switch (TREE_CODE (d))
2441 case ARRAY_REF:
2442 case ARRAY_RANGE_REF:
2443 if (TREE_OPERAND (d, 1)
2444 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2445 fail = true;
2446 if (TREE_OPERAND (d, 3)
2447 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2448 fail = true;
2449 /* FALLTHRU */
2450 case COMPONENT_REF:
2451 if (TREE_OPERAND (d, 2)
2452 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2453 fail = true;
2454 break;
2455 case MEM_REF:
2456 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2457 fail = true;
2458 else
2459 d = TREE_OPERAND (d, 0);
2460 break;
2461 default:
2462 break;
2464 if (!fail)
2466 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2467 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2469 if (access->grp_no_warning)
2470 suppress_warning (repl /* Be more selective! */);
2471 else
2472 copy_warning (repl, access->base);
2474 else
2475 suppress_warning (repl /* Be more selective! */);
2477 if (dump_file)
2479 if (access->grp_to_be_debug_replaced)
2481 fprintf (dump_file, "Created a debug-only replacement for ");
2482 print_generic_expr (dump_file, access->base);
2483 fprintf (dump_file, " offset: %u, size: %u\n",
2484 (unsigned) access->offset, (unsigned) access->size);
2486 else
2488 fprintf (dump_file, "Created a replacement for ");
2489 print_generic_expr (dump_file, access->base);
2490 fprintf (dump_file, " offset: %u, size: %u: ",
2491 (unsigned) access->offset, (unsigned) access->size);
2492 print_generic_expr (dump_file, repl, TDF_UID);
2493 fprintf (dump_file, "\n");
2496 sra_stats.replacements++;
2498 return repl;
2501 /* Return ACCESS scalar replacement, which must exist. */
2503 static inline tree
2504 get_access_replacement (struct access *access)
2506 gcc_checking_assert (access->replacement_decl);
2507 return access->replacement_decl;
2511 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2512 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2513 to it is not "within" the root. Return false iff some accesses partially
2514 overlap. */
2516 static bool
2517 build_access_subtree (struct access **access)
2519 struct access *root = *access, *last_child = NULL;
2520 HOST_WIDE_INT limit = root->offset + root->size;
2522 *access = (*access)->next_grp;
2523 while (*access && (*access)->offset + (*access)->size <= limit)
2525 if (!last_child)
2526 root->first_child = *access;
2527 else
2528 last_child->next_sibling = *access;
2529 last_child = *access;
2530 (*access)->parent = root;
2531 (*access)->grp_write |= root->grp_write;
2533 if (!build_access_subtree (access))
2534 return false;
2537 if (*access && (*access)->offset < limit)
2538 return false;
2540 return true;
2543 /* Build a tree of access representatives, ACCESS is the pointer to the first
2544 one, others are linked in a list by the next_grp field. Return false iff
2545 some accesses partially overlap. */
2547 static bool
2548 build_access_trees (struct access *access)
2550 while (access)
2552 struct access *root = access;
2554 if (!build_access_subtree (&access))
2555 return false;
2556 root->next_grp = access;
2558 return true;
2561 /* Traverse the access forest where ROOT is the first root and verify that
2562 various important invariants hold true. */
2564 DEBUG_FUNCTION void
2565 verify_sra_access_forest (struct access *root)
2567 struct access *access = root;
2568 tree first_base = root->base;
2569 gcc_assert (DECL_P (first_base));
2572 gcc_assert (access->base == first_base);
2573 if (access->parent)
2574 gcc_assert (access->offset >= access->parent->offset
2575 && access->size <= access->parent->size);
2576 if (access->next_sibling)
2577 gcc_assert (access->next_sibling->offset
2578 >= access->offset + access->size);
2580 poly_int64 poffset, psize, pmax_size;
2581 bool reverse;
2582 tree base = get_ref_base_and_extent (access->expr, &poffset, &psize,
2583 &pmax_size, &reverse);
2584 HOST_WIDE_INT offset, size, max_size;
2585 if (!poffset.is_constant (&offset)
2586 || !psize.is_constant (&size)
2587 || !pmax_size.is_constant (&max_size))
2588 gcc_unreachable ();
2589 gcc_assert (base == first_base);
2590 gcc_assert (offset == access->offset);
2591 gcc_assert (access->grp_unscalarizable_region
2592 || access->grp_total_scalarization
2593 || size == max_size);
2594 gcc_assert (access->grp_unscalarizable_region
2595 || !is_gimple_reg_type (access->type)
2596 || size == access->size);
2597 gcc_assert (reverse == access->reverse);
2599 if (access->first_child)
2601 gcc_assert (access->first_child->parent == access);
2602 access = access->first_child;
2604 else if (access->next_sibling)
2606 gcc_assert (access->next_sibling->parent == access->parent);
2607 access = access->next_sibling;
2609 else
2611 while (access->parent && !access->next_sibling)
2612 access = access->parent;
2613 if (access->next_sibling)
2614 access = access->next_sibling;
2615 else
2617 gcc_assert (access == root);
2618 root = root->next_grp;
2619 access = root;
2623 while (access);
2626 /* Verify access forests of all candidates with accesses by calling
2627 verify_access_forest on each on them. */
2629 DEBUG_FUNCTION void
2630 verify_all_sra_access_forests (void)
2632 bitmap_iterator bi;
2633 unsigned i;
2634 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2636 tree var = candidate (i);
2637 struct access *access = get_first_repr_for_decl (var);
2638 if (access)
2640 gcc_assert (access->base == var);
2641 verify_sra_access_forest (access);
2646 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2647 array. */
2649 static bool
2650 expr_with_var_bounded_array_refs_p (tree expr)
2652 while (handled_component_p (expr))
2654 if (TREE_CODE (expr) == ARRAY_REF
2655 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2656 return true;
2657 expr = TREE_OPERAND (expr, 0);
2659 return false;
2662 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2663 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. If TOTALLY
2664 is set, we are totally scalarizing the aggregate. Also set all sorts of
2665 access flags appropriately along the way, notably always set grp_read and
2666 grp_assign_read according to MARK_READ and grp_write when MARK_WRITE is
2667 true.
2669 Creating a replacement for a scalar access is considered beneficial if its
2670 grp_hint ot TOTALLY is set (this means either that there is more than one
2671 direct read access or that we are attempting total scalarization) or
2672 according to the following table:
2674 Access written to through a scalar type (once or more times)
2676 | Written to in an assignment statement
2678 | | Access read as scalar _once_
2679 | | |
2680 | | | Read in an assignment statement
2681 | | | |
2682 | | | | Scalarize Comment
2683 -----------------------------------------------------------------------------
2684 0 0 0 0 No access for the scalar
2685 0 0 0 1 No access for the scalar
2686 0 0 1 0 No Single read - won't help
2687 0 0 1 1 No The same case
2688 0 1 0 0 No access for the scalar
2689 0 1 0 1 No access for the scalar
2690 0 1 1 0 Yes s = *g; return s.i;
2691 0 1 1 1 Yes The same case as above
2692 1 0 0 0 No Won't help
2693 1 0 0 1 Yes s.i = 1; *g = s;
2694 1 0 1 0 Yes s.i = 5; g = s.i;
2695 1 0 1 1 Yes The same case as above
2696 1 1 0 0 No Won't help.
2697 1 1 0 1 Yes s.i = 1; *g = s;
2698 1 1 1 0 Yes s = *g; return s.i;
2699 1 1 1 1 Yes Any of the above yeses */
2701 static bool
2702 analyze_access_subtree (struct access *root, struct access *parent,
2703 bool allow_replacements, bool totally)
2705 struct access *child;
2706 HOST_WIDE_INT limit = root->offset + root->size;
2707 HOST_WIDE_INT covered_to = root->offset;
2708 bool scalar = is_gimple_reg_type (root->type);
2709 bool hole = false, sth_created = false;
2711 if (parent)
2713 if (parent->grp_read)
2714 root->grp_read = 1;
2715 if (parent->grp_assignment_read)
2716 root->grp_assignment_read = 1;
2717 if (parent->grp_write)
2718 root->grp_write = 1;
2719 if (parent->grp_assignment_write)
2720 root->grp_assignment_write = 1;
2721 if (!parent->grp_same_access_path)
2722 root->grp_same_access_path = 0;
2725 if (root->grp_unscalarizable_region)
2726 allow_replacements = false;
2728 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2729 allow_replacements = false;
2731 if (!totally && root->grp_result_of_prop_from_lhs)
2732 allow_replacements = false;
2734 for (child = root->first_child; child; child = child->next_sibling)
2736 hole |= covered_to < child->offset;
2737 sth_created |= analyze_access_subtree (child, root,
2738 allow_replacements && !scalar,
2739 totally);
2741 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2742 if (child->grp_covered)
2743 covered_to += child->size;
2744 else
2745 hole = true;
2748 if (allow_replacements && scalar && !root->first_child
2749 && (totally || !root->grp_total_scalarization)
2750 && (totally
2751 || root->grp_hint
2752 || ((root->grp_scalar_read || root->grp_assignment_read)
2753 && (root->grp_scalar_write || root->grp_assignment_write))))
2755 /* Always create access replacements that cover the whole access.
2756 For integral types this means the precision has to match.
2757 Avoid assumptions based on the integral type kind, too. */
2758 if (INTEGRAL_TYPE_P (root->type)
2759 && ((TREE_CODE (root->type) != INTEGER_TYPE
2760 && TREE_CODE (root->type) != BITINT_TYPE)
2761 || TYPE_PRECISION (root->type) != root->size)
2762 /* But leave bitfield accesses alone. */
2763 && (TREE_CODE (root->expr) != COMPONENT_REF
2764 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2766 tree rt = root->type;
2767 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2768 && (root->size % BITS_PER_UNIT) == 0);
2769 if (TREE_CODE (root->type) == BITINT_TYPE)
2770 root->type = build_bitint_type (root->size, TYPE_UNSIGNED (rt));
2771 else
2772 root->type = build_nonstandard_integer_type (root->size,
2773 TYPE_UNSIGNED (rt));
2774 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2775 root->offset, root->reverse,
2776 root->type, NULL, false);
2778 if (dump_file && (dump_flags & TDF_DETAILS))
2780 fprintf (dump_file, "Changing the type of a replacement for ");
2781 print_generic_expr (dump_file, root->base);
2782 fprintf (dump_file, " offset: %u, size: %u ",
2783 (unsigned) root->offset, (unsigned) root->size);
2784 fprintf (dump_file, " to an integer.\n");
2788 root->grp_to_be_replaced = 1;
2789 root->replacement_decl = create_access_replacement (root);
2790 sth_created = true;
2791 hole = false;
2793 else
2795 if (allow_replacements
2796 && scalar && !root->first_child
2797 && !root->grp_total_scalarization
2798 && (root->grp_scalar_write || root->grp_assignment_write)
2799 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2800 DECL_UID (root->base)))
2802 gcc_checking_assert (!root->grp_scalar_read
2803 && !root->grp_assignment_read);
2804 sth_created = true;
2805 if (MAY_HAVE_DEBUG_BIND_STMTS)
2807 root->grp_to_be_debug_replaced = 1;
2808 root->replacement_decl = create_access_replacement (root);
2812 if (covered_to < limit)
2813 hole = true;
2814 if (scalar || !allow_replacements)
2815 root->grp_total_scalarization = 0;
2818 if (!hole || totally)
2819 root->grp_covered = 1;
2820 else if (root->grp_write || comes_initialized_p (root->base))
2821 root->grp_unscalarized_data = 1; /* not covered and written to */
2822 return sth_created;
2825 /* Analyze all access trees linked by next_grp by the means of
2826 analyze_access_subtree. */
2827 static bool
2828 analyze_access_trees (struct access *access)
2830 bool ret = false;
2832 while (access)
2834 if (analyze_access_subtree (access, NULL, true,
2835 access->grp_total_scalarization))
2836 ret = true;
2837 access = access->next_grp;
2840 return ret;
2843 /* Return true iff a potential new child of ACC at offset OFFSET and with size
2844 SIZE would conflict with an already existing one. If exactly such a child
2845 already exists in ACC, store a pointer to it in EXACT_MATCH. */
2847 static bool
2848 child_would_conflict_in_acc (struct access *acc, HOST_WIDE_INT norm_offset,
2849 HOST_WIDE_INT size, struct access **exact_match)
2851 struct access *child;
2853 for (child = acc->first_child; child; child = child->next_sibling)
2855 if (child->offset == norm_offset && child->size == size)
2857 *exact_match = child;
2858 return true;
2861 if (child->offset < norm_offset + size
2862 && child->offset + child->size > norm_offset)
2863 return true;
2866 return false;
2869 /* Create a new child access of PARENT, with all properties just like MODEL
2870 except for its offset and with its grp_write false and grp_read true.
2871 Return the new access or NULL if it cannot be created. Note that this
2872 access is created long after all splicing and sorting, it's not located in
2873 any access vector and is automatically a representative of its group. Set
2874 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2876 static struct access *
2877 create_artificial_child_access (struct access *parent, struct access *model,
2878 HOST_WIDE_INT new_offset,
2879 bool set_grp_read, bool set_grp_write)
2881 struct access **child;
2882 tree expr = parent->base;
2884 gcc_assert (!model->grp_unscalarizable_region);
2886 struct access *access = access_pool.allocate ();
2887 memset (access, 0, sizeof (struct access));
2888 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2889 model->type))
2891 access->grp_no_warning = true;
2892 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2893 new_offset, model, NULL, false);
2896 access->base = parent->base;
2897 access->expr = expr;
2898 access->offset = new_offset;
2899 access->size = model->size;
2900 access->type = model->type;
2901 access->parent = parent;
2902 access->grp_read = set_grp_read;
2903 access->grp_write = set_grp_write;
2904 access->reverse = model->reverse;
2906 child = &parent->first_child;
2907 while (*child && (*child)->offset < new_offset)
2908 child = &(*child)->next_sibling;
2910 access->next_sibling = *child;
2911 *child = access;
2913 return access;
2917 /* Beginning with ACCESS, traverse its whole access subtree and mark all
2918 sub-trees as written to. If any of them has not been marked so previously
2919 and has assignment links leading from it, re-enqueue it. */
2921 static void
2922 subtree_mark_written_and_rhs_enqueue (struct access *access)
2924 if (access->grp_write)
2925 return;
2926 access->grp_write = true;
2927 add_access_to_rhs_work_queue (access);
2929 struct access *child;
2930 for (child = access->first_child; child; child = child->next_sibling)
2931 subtree_mark_written_and_rhs_enqueue (child);
2934 /* If there is still budget to create a propagation access for DECL, return
2935 true and decrement the budget. Otherwise return false. */
2937 static bool
2938 budget_for_propagation_access (tree decl)
2940 unsigned b, *p = propagation_budget->get (decl);
2941 if (p)
2942 b = *p;
2943 else
2944 b = param_sra_max_propagations;
2946 if (b == 0)
2947 return false;
2948 b--;
2950 if (b == 0 && dump_file && (dump_flags & TDF_DETAILS))
2952 fprintf (dump_file, "The propagation budget of ");
2953 print_generic_expr (dump_file, decl);
2954 fprintf (dump_file, " (UID: %u) has been exhausted.\n", DECL_UID (decl));
2956 propagation_budget->put (decl, b);
2957 return true;
2960 /* Return true if ACC or any of its subaccesses has grp_child set. */
2962 static bool
2963 access_or_its_child_written (struct access *acc)
2965 if (acc->grp_write)
2966 return true;
2967 for (struct access *sub = acc->first_child; sub; sub = sub->next_sibling)
2968 if (access_or_its_child_written (sub))
2969 return true;
2970 return false;
2973 /* Propagate subaccesses and grp_write flags of RACC across an assignment link
2974 to LACC. Enqueue sub-accesses as necessary so that the write flag is
2975 propagated transitively. Return true if anything changed. Additionally, if
2976 RACC is a scalar access but LACC is not, change the type of the latter, if
2977 possible. */
2979 static bool
2980 propagate_subaccesses_from_rhs (struct access *lacc, struct access *racc)
2982 struct access *rchild;
2983 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2984 bool ret = false;
2986 /* IF the LHS is still not marked as being written to, we only need to do so
2987 if the RHS at this level actually was. */
2988 if (!lacc->grp_write)
2990 gcc_checking_assert (!comes_initialized_p (racc->base));
2991 if (racc->grp_write)
2993 subtree_mark_written_and_rhs_enqueue (lacc);
2994 ret = true;
2998 if (is_gimple_reg_type (lacc->type)
2999 || lacc->grp_unscalarizable_region
3000 || racc->grp_unscalarizable_region)
3002 if (!lacc->grp_write)
3004 ret = true;
3005 subtree_mark_written_and_rhs_enqueue (lacc);
3007 return ret;
3010 if (is_gimple_reg_type (racc->type))
3012 if (!lacc->grp_write)
3014 ret = true;
3015 subtree_mark_written_and_rhs_enqueue (lacc);
3017 if (!lacc->first_child && !racc->first_child)
3019 /* We are about to change the access type from aggregate to scalar,
3020 so we need to put the reverse flag onto the access, if any. */
3021 const bool reverse
3022 = TYPE_REVERSE_STORAGE_ORDER (lacc->type)
3023 && !POINTER_TYPE_P (racc->type)
3024 && !VECTOR_TYPE_P (racc->type);
3025 tree t = lacc->base;
3027 lacc->type = racc->type;
3028 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
3029 lacc->offset, racc->type))
3031 lacc->expr = t;
3032 lacc->grp_same_access_path = true;
3034 else
3036 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
3037 lacc->base, lacc->offset,
3038 racc, NULL, false);
3039 if (TREE_CODE (lacc->expr) == MEM_REF)
3040 REF_REVERSE_STORAGE_ORDER (lacc->expr) = reverse;
3041 lacc->grp_no_warning = true;
3042 lacc->grp_same_access_path = false;
3044 lacc->reverse = reverse;
3046 return ret;
3049 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
3051 struct access *new_acc = NULL;
3052 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
3054 if (child_would_conflict_in_acc (lacc, norm_offset, rchild->size,
3055 &new_acc))
3057 if (new_acc)
3059 if (!new_acc->grp_write && rchild->grp_write)
3061 gcc_assert (!lacc->grp_write);
3062 subtree_mark_written_and_rhs_enqueue (new_acc);
3063 ret = true;
3066 rchild->grp_hint = 1;
3067 new_acc->grp_hint |= new_acc->grp_read;
3068 if (rchild->first_child
3069 && propagate_subaccesses_from_rhs (new_acc, rchild))
3071 ret = 1;
3072 add_access_to_rhs_work_queue (new_acc);
3075 else
3077 if (!lacc->grp_write)
3079 ret = true;
3080 subtree_mark_written_and_rhs_enqueue (lacc);
3083 continue;
3086 if (rchild->grp_unscalarizable_region
3087 || !budget_for_propagation_access (lacc->base))
3089 if (!lacc->grp_write && access_or_its_child_written (rchild))
3091 ret = true;
3092 subtree_mark_written_and_rhs_enqueue (lacc);
3094 continue;
3097 rchild->grp_hint = 1;
3098 /* Because get_ref_base_and_extent always includes padding in size for
3099 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
3100 type, we might be actually attempting to here to create a child of the
3101 same type as the parent. */
3102 if (!types_compatible_p (lacc->type, rchild->type))
3103 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
3104 false,
3105 (lacc->grp_write
3106 || rchild->grp_write));
3107 else
3108 new_acc = lacc;
3109 gcc_checking_assert (new_acc);
3110 if (racc->first_child)
3111 propagate_subaccesses_from_rhs (new_acc, rchild);
3113 add_access_to_rhs_work_queue (lacc);
3114 ret = true;
3117 return ret;
3120 /* Propagate subaccesses of LACC across an assignment link to RACC if they
3121 should inhibit total scalarization of the corresponding area. No flags are
3122 being propagated in the process. Return true if anything changed. */
3124 static bool
3125 propagate_subaccesses_from_lhs (struct access *lacc, struct access *racc)
3127 if (is_gimple_reg_type (racc->type)
3128 || lacc->grp_unscalarizable_region
3129 || racc->grp_unscalarizable_region)
3130 return false;
3132 /* TODO: Do we want set some new racc flag to stop potential total
3133 scalarization if lacc is a scalar access (and none fo the two have
3134 children)? */
3136 bool ret = false;
3137 HOST_WIDE_INT norm_delta = racc->offset - lacc->offset;
3138 for (struct access *lchild = lacc->first_child;
3139 lchild;
3140 lchild = lchild->next_sibling)
3142 struct access *matching_acc = NULL;
3143 HOST_WIDE_INT norm_offset = lchild->offset + norm_delta;
3145 if (lchild->grp_unscalarizable_region
3146 || child_would_conflict_in_acc (racc, norm_offset, lchild->size,
3147 &matching_acc)
3148 || !budget_for_propagation_access (racc->base))
3150 if (matching_acc
3151 && propagate_subaccesses_from_lhs (lchild, matching_acc))
3152 add_access_to_lhs_work_queue (matching_acc);
3153 continue;
3156 /* Because get_ref_base_and_extent always includes padding in size for
3157 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
3158 type, we might be actually attempting to here to create a child of the
3159 same type as the parent. */
3160 if (!types_compatible_p (racc->type, lchild->type))
3162 struct access *new_acc
3163 = create_artificial_child_access (racc, lchild, norm_offset,
3164 true, false);
3165 new_acc->grp_result_of_prop_from_lhs = 1;
3166 propagate_subaccesses_from_lhs (lchild, new_acc);
3168 else
3169 propagate_subaccesses_from_lhs (lchild, racc);
3170 ret = true;
3172 return ret;
3175 /* Propagate all subaccesses across assignment links. */
3177 static void
3178 propagate_all_subaccesses (void)
3180 propagation_budget = new hash_map<tree, unsigned>;
3181 while (rhs_work_queue_head)
3183 struct access *racc = pop_access_from_rhs_work_queue ();
3184 struct assign_link *link;
3186 if (racc->group_representative)
3187 racc= racc->group_representative;
3188 gcc_assert (racc->first_rhs_link);
3190 for (link = racc->first_rhs_link; link; link = link->next_rhs)
3192 struct access *lacc = link->lacc;
3194 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
3195 continue;
3196 lacc = lacc->group_representative;
3198 bool reque_parents = false;
3199 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
3201 if (!lacc->grp_write)
3203 subtree_mark_written_and_rhs_enqueue (lacc);
3204 reque_parents = true;
3207 else if (propagate_subaccesses_from_rhs (lacc, racc))
3208 reque_parents = true;
3210 if (reque_parents)
3213 add_access_to_rhs_work_queue (lacc);
3214 lacc = lacc->parent;
3216 while (lacc);
3220 while (lhs_work_queue_head)
3222 struct access *lacc = pop_access_from_lhs_work_queue ();
3223 struct assign_link *link;
3225 if (lacc->group_representative)
3226 lacc = lacc->group_representative;
3227 gcc_assert (lacc->first_lhs_link);
3229 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
3230 continue;
3232 for (link = lacc->first_lhs_link; link; link = link->next_lhs)
3234 struct access *racc = link->racc;
3236 if (racc->group_representative)
3237 racc = racc->group_representative;
3238 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
3239 continue;
3240 if (propagate_subaccesses_from_lhs (lacc, racc))
3241 add_access_to_lhs_work_queue (racc);
3244 delete propagation_budget;
3247 /* Return true if the forest beginning with ROOT does not contain
3248 unscalarizable regions or non-byte aligned accesses. */
3250 static bool
3251 can_totally_scalarize_forest_p (struct access *root)
3253 struct access *access = root;
3256 if (access->grp_unscalarizable_region
3257 || (access->offset % BITS_PER_UNIT) != 0
3258 || (access->size % BITS_PER_UNIT) != 0
3259 || (is_gimple_reg_type (access->type)
3260 && access->first_child))
3261 return false;
3263 if (access->first_child)
3264 access = access->first_child;
3265 else if (access->next_sibling)
3266 access = access->next_sibling;
3267 else
3269 while (access->parent && !access->next_sibling)
3270 access = access->parent;
3271 if (access->next_sibling)
3272 access = access->next_sibling;
3273 else
3275 gcc_assert (access == root);
3276 root = root->next_grp;
3277 access = root;
3281 while (access);
3282 return true;
3285 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3286 reference EXPR for total scalarization purposes and mark it as such. Within
3287 the children of PARENT, link it in between PTR and NEXT_SIBLING. */
3289 static struct access *
3290 create_total_scalarization_access (struct access *parent, HOST_WIDE_INT pos,
3291 HOST_WIDE_INT size, tree type, tree expr,
3292 struct access **ptr,
3293 struct access *next_sibling)
3295 struct access *access = access_pool.allocate ();
3296 memset (access, 0, sizeof (struct access));
3297 access->base = parent->base;
3298 access->offset = pos;
3299 access->size = size;
3300 access->expr = expr;
3301 access->type = type;
3302 access->parent = parent;
3303 access->grp_write = parent->grp_write;
3304 access->grp_total_scalarization = 1;
3305 access->grp_hint = 1;
3306 access->grp_same_access_path = path_comparable_for_same_access (expr);
3307 access->reverse = reverse_storage_order_for_component_p (expr);
3309 access->next_sibling = next_sibling;
3310 *ptr = access;
3311 return access;
3314 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3315 reference EXPR for total scalarization purposes and mark it as such, link it
3316 at *PTR and reshape the tree so that those elements at *PTR and their
3317 siblings which fall within the part described by POS and SIZE are moved to
3318 be children of the new access. If a partial overlap is detected, return
3319 NULL. */
3321 static struct access *
3322 create_total_access_and_reshape (struct access *parent, HOST_WIDE_INT pos,
3323 HOST_WIDE_INT size, tree type, tree expr,
3324 struct access **ptr)
3326 struct access **p = ptr;
3328 while (*p && (*p)->offset < pos + size)
3330 if ((*p)->offset + (*p)->size > pos + size)
3331 return NULL;
3332 p = &(*p)->next_sibling;
3335 struct access *next_child = *ptr;
3336 struct access *new_acc
3337 = create_total_scalarization_access (parent, pos, size, type, expr,
3338 ptr, *p);
3339 if (p != ptr)
3341 new_acc->first_child = next_child;
3342 *p = NULL;
3343 for (struct access *a = next_child; a; a = a->next_sibling)
3344 a->parent = new_acc;
3346 return new_acc;
3349 static bool totally_scalarize_subtree (struct access *root);
3351 /* Return true if INNER is either the same type as OUTER or if it is the type
3352 of a record field in OUTER at offset zero, possibly in nested
3353 sub-records. */
3355 static bool
3356 access_and_field_type_match_p (tree outer, tree inner)
3358 if (TYPE_MAIN_VARIANT (outer) == TYPE_MAIN_VARIANT (inner))
3359 return true;
3360 if (TREE_CODE (outer) != RECORD_TYPE)
3361 return false;
3362 tree fld = TYPE_FIELDS (outer);
3363 while (fld)
3365 if (TREE_CODE (fld) == FIELD_DECL)
3367 if (!zerop (DECL_FIELD_OFFSET (fld)))
3368 return false;
3369 if (TYPE_MAIN_VARIANT (TREE_TYPE (fld)) == inner)
3370 return true;
3371 if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE)
3372 fld = TYPE_FIELDS (TREE_TYPE (fld));
3373 else
3374 return false;
3376 else
3377 fld = DECL_CHAIN (fld);
3379 return false;
3382 /* Return type of total_should_skip_creating_access indicating whether a total
3383 scalarization access for a field/element should be created, whether it
3384 already exists or whether the entire total scalarization has to fail. */
3386 enum total_sra_field_state {TOTAL_FLD_CREATE, TOTAL_FLD_DONE, TOTAL_FLD_FAILED};
3388 /* Do all the necessary steps in total scalarization when the given aggregate
3389 type has a TYPE at POS with the given SIZE should be put into PARENT and
3390 when we have processed all its siblings with smaller offsets up until and
3391 including LAST_SEEN_SIBLING (which can be NULL).
3393 If some further siblings are to be skipped, set *LAST_SEEN_SIBLING as
3394 appropriate. Return TOTAL_FLD_CREATE id the caller should carry on with
3395 creating a new access, TOTAL_FLD_DONE if access or accesses capable of
3396 representing the described part of the aggregate for the purposes of total
3397 scalarization already exist or TOTAL_FLD_FAILED if there is a problem which
3398 prevents total scalarization from happening at all. */
3400 static enum total_sra_field_state
3401 total_should_skip_creating_access (struct access *parent,
3402 struct access **last_seen_sibling,
3403 tree type, HOST_WIDE_INT pos,
3404 HOST_WIDE_INT size)
3406 struct access *next_child;
3407 if (!*last_seen_sibling)
3408 next_child = parent->first_child;
3409 else
3410 next_child = (*last_seen_sibling)->next_sibling;
3412 /* First, traverse the chain of siblings until it points to an access with
3413 offset at least equal to POS. Check all skipped accesses whether they
3414 span the POS boundary and if so, return with a failure. */
3415 while (next_child && next_child->offset < pos)
3417 if (next_child->offset + next_child->size > pos)
3418 return TOTAL_FLD_FAILED;
3419 *last_seen_sibling = next_child;
3420 next_child = next_child->next_sibling;
3423 /* Now check whether next_child has exactly the right POS and SIZE and if so,
3424 whether it can represent what we need and can be totally scalarized
3425 itself. */
3426 if (next_child && next_child->offset == pos
3427 && next_child->size == size)
3429 if (!is_gimple_reg_type (next_child->type)
3430 && (!access_and_field_type_match_p (type, next_child->type)
3431 || !totally_scalarize_subtree (next_child)))
3432 return TOTAL_FLD_FAILED;
3434 *last_seen_sibling = next_child;
3435 return TOTAL_FLD_DONE;
3438 /* If the child we're looking at would partially overlap, we just cannot
3439 totally scalarize. */
3440 if (next_child
3441 && next_child->offset < pos + size
3442 && next_child->offset + next_child->size > pos + size)
3443 return TOTAL_FLD_FAILED;
3445 if (is_gimple_reg_type (type))
3447 /* We don't scalarize accesses that are children of other scalar type
3448 accesses, so if we go on and create an access for a register type,
3449 there should not be any pre-existing children. There are rare cases
3450 where the requested type is a vector but we already have register
3451 accesses for all its elements which is equally good. Detect that
3452 situation or whether we need to bail out. */
3454 HOST_WIDE_INT covered = pos;
3455 bool skipping = false;
3456 while (next_child
3457 && next_child->offset + next_child->size <= pos + size)
3459 if (next_child->offset != covered
3460 || !is_gimple_reg_type (next_child->type))
3461 return TOTAL_FLD_FAILED;
3463 covered += next_child->size;
3464 *last_seen_sibling = next_child;
3465 next_child = next_child->next_sibling;
3466 skipping = true;
3469 if (skipping)
3471 if (covered != pos + size)
3472 return TOTAL_FLD_FAILED;
3473 else
3474 return TOTAL_FLD_DONE;
3478 return TOTAL_FLD_CREATE;
3481 /* Go over sub-tree rooted in ROOT and attempt to create scalar accesses
3482 spanning all uncovered areas covered by ROOT, return false if the attempt
3483 failed. All created accesses will have grp_unscalarizable_region set (and
3484 should be ignored if the function returns false). */
3486 static bool
3487 totally_scalarize_subtree (struct access *root)
3489 gcc_checking_assert (!root->grp_unscalarizable_region);
3490 gcc_checking_assert (!is_gimple_reg_type (root->type));
3492 struct access *last_seen_sibling = NULL;
3494 switch (TREE_CODE (root->type))
3496 case RECORD_TYPE:
3497 for (tree fld = TYPE_FIELDS (root->type); fld; fld = DECL_CHAIN (fld))
3498 if (TREE_CODE (fld) == FIELD_DECL)
3500 tree ft = TREE_TYPE (fld);
3501 HOST_WIDE_INT fsize = tree_to_uhwi (DECL_SIZE (fld));
3502 if (!fsize)
3503 continue;
3505 HOST_WIDE_INT pos = root->offset + int_bit_position (fld);
3506 if (pos + fsize > root->offset + root->size)
3507 return false;
3508 enum total_sra_field_state
3509 state = total_should_skip_creating_access (root,
3510 &last_seen_sibling,
3511 ft, pos, fsize);
3512 switch (state)
3514 case TOTAL_FLD_FAILED:
3515 return false;
3516 case TOTAL_FLD_DONE:
3517 continue;
3518 case TOTAL_FLD_CREATE:
3519 break;
3520 default:
3521 gcc_unreachable ();
3524 struct access **p = (last_seen_sibling
3525 ? &last_seen_sibling->next_sibling
3526 : &root->first_child);
3527 tree nref = build3 (COMPONENT_REF, ft, root->expr, fld, NULL_TREE);
3528 struct access *new_child
3529 = create_total_access_and_reshape (root, pos, fsize, ft, nref, p);
3530 if (!new_child)
3531 return false;
3533 if (!is_gimple_reg_type (ft)
3534 && !totally_scalarize_subtree (new_child))
3535 return false;
3536 last_seen_sibling = new_child;
3538 break;
3539 case ARRAY_TYPE:
3541 tree elemtype = TREE_TYPE (root->type);
3542 tree elem_size = TYPE_SIZE (elemtype);
3543 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
3544 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
3545 gcc_assert (el_size > 0);
3547 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (root->type));
3548 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
3549 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (root->type));
3550 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
3551 if (!maxidx)
3552 goto out;
3553 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
3554 tree domain = TYPE_DOMAIN (root->type);
3555 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
3556 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
3557 offset_int idx = wi::to_offset (minidx);
3558 offset_int max = wi::to_offset (maxidx);
3559 if (!TYPE_UNSIGNED (domain))
3561 idx = wi::sext (idx, TYPE_PRECISION (domain));
3562 max = wi::sext (max, TYPE_PRECISION (domain));
3564 for (HOST_WIDE_INT pos = root->offset;
3565 idx <= max;
3566 pos += el_size, ++idx)
3568 enum total_sra_field_state
3569 state = total_should_skip_creating_access (root,
3570 &last_seen_sibling,
3571 elemtype, pos,
3572 el_size);
3573 switch (state)
3575 case TOTAL_FLD_FAILED:
3576 return false;
3577 case TOTAL_FLD_DONE:
3578 continue;
3579 case TOTAL_FLD_CREATE:
3580 break;
3581 default:
3582 gcc_unreachable ();
3585 struct access **p = (last_seen_sibling
3586 ? &last_seen_sibling->next_sibling
3587 : &root->first_child);
3588 tree nref = build4 (ARRAY_REF, elemtype, root->expr,
3589 wide_int_to_tree (domain, idx),
3590 NULL_TREE, NULL_TREE);
3591 struct access *new_child
3592 = create_total_access_and_reshape (root, pos, el_size, elemtype,
3593 nref, p);
3594 if (!new_child)
3595 return false;
3597 if (!is_gimple_reg_type (elemtype)
3598 && !totally_scalarize_subtree (new_child))
3599 return false;
3600 last_seen_sibling = new_child;
3603 break;
3604 default:
3605 gcc_unreachable ();
3608 out:
3609 return true;
3612 /* Go through all accesses collected throughout the (intraprocedural) analysis
3613 stage, exclude overlapping ones, identify representatives and build trees
3614 out of them, making decisions about scalarization on the way. Return true
3615 iff there are any to-be-scalarized variables after this stage. */
3617 static bool
3618 analyze_all_variable_accesses (void)
3620 int res = 0;
3621 bitmap tmp = BITMAP_ALLOC (NULL);
3622 bitmap_iterator bi;
3623 unsigned i;
3625 bitmap_copy (tmp, candidate_bitmap);
3626 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
3628 tree var = candidate (i);
3629 struct access *access;
3631 access = sort_and_splice_var_accesses (var);
3632 if (!access || !build_access_trees (access))
3633 disqualify_candidate (var,
3634 "No or inhibitingly overlapping accesses.");
3637 propagate_all_subaccesses ();
3639 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
3640 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
3641 fall back to a target default. */
3642 unsigned HOST_WIDE_INT max_scalarization_size
3643 = get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
3645 if (optimize_speed_p)
3647 if (OPTION_SET_P (param_sra_max_scalarization_size_speed))
3648 max_scalarization_size = param_sra_max_scalarization_size_speed;
3650 else
3652 if (OPTION_SET_P (param_sra_max_scalarization_size_size))
3653 max_scalarization_size = param_sra_max_scalarization_size_size;
3655 max_scalarization_size *= BITS_PER_UNIT;
3657 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3658 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
3659 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
3661 tree var = candidate (i);
3662 if (!VAR_P (var))
3663 continue;
3665 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var))) > max_scalarization_size)
3667 if (dump_file && (dump_flags & TDF_DETAILS))
3669 fprintf (dump_file, "Too big to totally scalarize: ");
3670 print_generic_expr (dump_file, var);
3671 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
3673 continue;
3676 bool all_types_ok = true;
3677 for (struct access *access = get_first_repr_for_decl (var);
3678 access;
3679 access = access->next_grp)
3680 if (!can_totally_scalarize_forest_p (access)
3681 || !scalarizable_type_p (access->type, constant_decl_p (var)))
3683 all_types_ok = false;
3684 break;
3686 if (!all_types_ok)
3687 continue;
3689 if (dump_file && (dump_flags & TDF_DETAILS))
3691 fprintf (dump_file, "Will attempt to totally scalarize ");
3692 print_generic_expr (dump_file, var);
3693 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
3695 bool scalarized = true;
3696 for (struct access *access = get_first_repr_for_decl (var);
3697 access;
3698 access = access->next_grp)
3699 if (!is_gimple_reg_type (access->type)
3700 && !totally_scalarize_subtree (access))
3702 scalarized = false;
3703 break;
3706 if (scalarized)
3707 for (struct access *access = get_first_repr_for_decl (var);
3708 access;
3709 access = access->next_grp)
3710 access->grp_total_scalarization = true;
3713 if (flag_checking)
3714 verify_all_sra_access_forests ();
3716 bitmap_copy (tmp, candidate_bitmap);
3717 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
3719 tree var = candidate (i);
3720 struct access *access = get_first_repr_for_decl (var);
3722 if (analyze_access_trees (access))
3724 res++;
3725 if (dump_file && (dump_flags & TDF_DETAILS))
3727 fprintf (dump_file, "\nAccess trees for ");
3728 print_generic_expr (dump_file, var);
3729 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
3730 dump_access_tree (dump_file, access);
3731 fprintf (dump_file, "\n");
3734 else
3735 disqualify_candidate (var, "No scalar replacements to be created.");
3738 BITMAP_FREE (tmp);
3740 if (res)
3742 statistics_counter_event (cfun, "Scalarized aggregates", res);
3743 return true;
3745 else
3746 return false;
3749 /* Generate statements copying scalar replacements of accesses within a subtree
3750 into or out of AGG. ACCESS, all its children, siblings and their children
3751 are to be processed. AGG is an aggregate type expression (can be a
3752 declaration but does not have to be, it can for example also be a mem_ref or
3753 a series of handled components). TOP_OFFSET is the offset of the processed
3754 subtree which has to be subtracted from offsets of individual accesses to
3755 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
3756 replacements in the interval <start_offset, start_offset + chunk_size>,
3757 otherwise copy all. GSI is a statement iterator used to place the new
3758 statements. WRITE should be true when the statements should write from AGG
3759 to the replacement and false if vice versa. if INSERT_AFTER is true, new
3760 statements will be added after the current statement in GSI, they will be
3761 added before the statement otherwise. */
3763 static void
3764 generate_subtree_copies (struct access *access, tree agg,
3765 HOST_WIDE_INT top_offset,
3766 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
3767 gimple_stmt_iterator *gsi, bool write,
3768 bool insert_after, location_t loc)
3770 /* Never write anything into constant pool decls. See PR70602. */
3771 if (!write && constant_decl_p (agg))
3772 return;
3775 if (chunk_size && access->offset >= start_offset + chunk_size)
3776 return;
3778 if (access->grp_to_be_replaced
3779 && (chunk_size == 0
3780 || access->offset + access->size > start_offset))
3782 tree expr, repl = get_access_replacement (access);
3783 gassign *stmt;
3785 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
3786 access, gsi, insert_after);
3788 if (write)
3790 if (access->grp_partial_lhs)
3791 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
3792 !insert_after,
3793 insert_after ? GSI_NEW_STMT
3794 : GSI_SAME_STMT);
3795 stmt = gimple_build_assign (repl, expr);
3797 else
3799 suppress_warning (repl /* Be more selective! */);
3800 if (access->grp_partial_lhs)
3801 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3802 !insert_after,
3803 insert_after ? GSI_NEW_STMT
3804 : GSI_SAME_STMT);
3805 stmt = gimple_build_assign (expr, repl);
3807 gimple_set_location (stmt, loc);
3809 if (insert_after)
3810 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3811 else
3812 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3813 update_stmt (stmt);
3814 sra_stats.subtree_copies++;
3816 else if (write
3817 && access->grp_to_be_debug_replaced
3818 && (chunk_size == 0
3819 || access->offset + access->size > start_offset))
3821 gdebug *ds;
3822 tree drhs = build_debug_ref_for_model (loc, agg,
3823 access->offset - top_offset,
3824 access);
3825 ds = gimple_build_debug_bind (get_access_replacement (access),
3826 drhs, gsi_stmt (*gsi));
3827 if (insert_after)
3828 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3829 else
3830 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3833 if (access->first_child)
3834 generate_subtree_copies (access->first_child, agg, top_offset,
3835 start_offset, chunk_size, gsi,
3836 write, insert_after, loc);
3838 access = access->next_sibling;
3840 while (access);
3843 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
3844 root of the subtree to be processed. GSI is the statement iterator used
3845 for inserting statements which are added after the current statement if
3846 INSERT_AFTER is true or before it otherwise. */
3848 static void
3849 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
3850 bool insert_after, location_t loc)
3853 struct access *child;
3855 if (access->grp_to_be_replaced)
3857 gassign *stmt;
3859 stmt = gimple_build_assign (get_access_replacement (access),
3860 build_zero_cst (access->type));
3861 if (insert_after)
3862 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3863 else
3864 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3865 update_stmt (stmt);
3866 gimple_set_location (stmt, loc);
3868 else if (access->grp_to_be_debug_replaced)
3870 gdebug *ds
3871 = gimple_build_debug_bind (get_access_replacement (access),
3872 build_zero_cst (access->type),
3873 gsi_stmt (*gsi));
3874 if (insert_after)
3875 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3876 else
3877 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3880 for (child = access->first_child; child; child = child->next_sibling)
3881 init_subtree_with_zero (child, gsi, insert_after, loc);
3884 /* Clobber all scalar replacements in an access subtree. ACCESS is the
3885 root of the subtree to be processed. GSI is the statement iterator used
3886 for inserting statements which are added after the current statement if
3887 INSERT_AFTER is true or before it otherwise. */
3889 static void
3890 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
3891 bool insert_after, location_t loc)
3894 struct access *child;
3896 if (access->grp_to_be_replaced)
3898 tree rep = get_access_replacement (access);
3899 tree clobber = build_clobber (access->type);
3900 gimple *stmt = gimple_build_assign (rep, clobber);
3902 if (insert_after)
3903 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3904 else
3905 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3906 update_stmt (stmt);
3907 gimple_set_location (stmt, loc);
3910 for (child = access->first_child; child; child = child->next_sibling)
3911 clobber_subtree (child, gsi, insert_after, loc);
3914 /* Search for an access representative for the given expression EXPR and
3915 return it or NULL if it cannot be found. */
3917 static struct access *
3918 get_access_for_expr (tree expr)
3920 poly_int64 poffset, psize, pmax_size;
3921 HOST_WIDE_INT offset, max_size;
3922 tree base;
3923 bool reverse;
3925 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3926 a different size than the size of its argument and we need the latter
3927 one. */
3928 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3929 expr = TREE_OPERAND (expr, 0);
3931 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
3932 &reverse);
3933 if (!known_size_p (pmax_size)
3934 || !pmax_size.is_constant (&max_size)
3935 || !poffset.is_constant (&offset)
3936 || !DECL_P (base))
3937 return NULL;
3939 if (tree basesize = DECL_SIZE (base))
3941 poly_int64 sz;
3942 if (offset < 0
3943 || !poly_int_tree_p (basesize, &sz)
3944 || known_le (sz, offset))
3945 return NULL;
3948 if (max_size == 0
3949 || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3950 return NULL;
3952 return get_var_base_offset_size_access (base, offset, max_size);
3955 /* Replace the expression EXPR with a scalar replacement if there is one and
3956 generate other statements to do type conversion or subtree copying if
3957 necessary. WRITE is true if the expression is being written to (it is on a
3958 LHS of a statement or output in an assembly statement). STMT_GSI is used to
3959 place newly created statements before the processed statement, REFRESH_GSI
3960 is used to place them afterwards - unless the processed statement must end a
3961 BB in which case it is placed on the outgoing non-EH edge. REFRESH_GSI and
3962 is then used to continue iteration over the BB. If sra_modify_expr is
3963 called only once with WRITE equal to true on a given statement, both
3964 iterator parameters can point to the same one. */
3966 static bool
3967 sra_modify_expr (tree *expr, bool write, gimple_stmt_iterator *stmt_gsi,
3968 gimple_stmt_iterator *refresh_gsi)
3970 location_t loc;
3971 struct access *access;
3972 tree type, bfr, orig_expr;
3973 bool partial_cplx_access = false;
3975 if (TREE_CODE (*expr) == BIT_FIELD_REF
3976 && (write || !sra_handled_bf_read_p (*expr)))
3978 bfr = *expr;
3979 expr = &TREE_OPERAND (*expr, 0);
3981 else
3982 bfr = NULL_TREE;
3984 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3986 expr = &TREE_OPERAND (*expr, 0);
3987 partial_cplx_access = true;
3989 access = get_access_for_expr (*expr);
3990 if (!access)
3991 return false;
3992 type = TREE_TYPE (*expr);
3993 orig_expr = *expr;
3995 loc = gimple_location (gsi_stmt (*stmt_gsi));
3996 gimple_stmt_iterator alt_gsi = gsi_none ();
3997 if (write && stmt_ends_bb_p (gsi_stmt (*stmt_gsi)))
3999 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*stmt_gsi)));
4000 refresh_gsi = &alt_gsi;
4003 if (access->grp_to_be_replaced)
4005 tree repl = get_access_replacement (access);
4006 /* If we replace a non-register typed access simply use the original
4007 access expression to extract the scalar component afterwards.
4008 This happens if scalarizing a function return value or parameter
4009 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
4010 gcc.c-torture/compile/20011217-1.c.
4012 We also want to use this when accessing a complex or vector which can
4013 be accessed as a different type too, potentially creating a need for
4014 type conversion (see PR42196) and when scalarized unions are involved
4015 in assembler statements (see PR42398). */
4016 if (!bfr && !useless_type_conversion_p (type, access->type))
4018 tree ref;
4020 ref = build_ref_for_model (loc, orig_expr, 0, access, stmt_gsi,
4021 false);
4023 if (partial_cplx_access)
4025 /* VIEW_CONVERT_EXPRs in partial complex access are always fine in
4026 the case of a write because in such case the replacement cannot
4027 be a gimple register. In the case of a load, we have to
4028 differentiate in between a register an non-register
4029 replacement. */
4030 tree t = build1 (VIEW_CONVERT_EXPR, type, repl);
4031 gcc_checking_assert (!write || access->grp_partial_lhs);
4032 if (!access->grp_partial_lhs)
4034 tree tmp = make_ssa_name (type);
4035 gassign *stmt = gimple_build_assign (tmp, t);
4036 /* This is always a read. */
4037 gsi_insert_before (stmt_gsi, stmt, GSI_SAME_STMT);
4038 t = tmp;
4040 *expr = t;
4042 else if (write)
4044 gassign *stmt;
4046 if (access->grp_partial_lhs)
4047 ref = force_gimple_operand_gsi (refresh_gsi, ref, true,
4048 NULL_TREE, false, GSI_NEW_STMT);
4049 stmt = gimple_build_assign (repl, ref);
4050 gimple_set_location (stmt, loc);
4051 gsi_insert_after (refresh_gsi, stmt, GSI_NEW_STMT);
4053 else
4055 gassign *stmt;
4057 if (access->grp_partial_lhs)
4058 repl = force_gimple_operand_gsi (stmt_gsi, repl, true,
4059 NULL_TREE, true,
4060 GSI_SAME_STMT);
4061 stmt = gimple_build_assign (ref, repl);
4062 gimple_set_location (stmt, loc);
4063 gsi_insert_before (stmt_gsi, stmt, GSI_SAME_STMT);
4066 else
4068 /* If we are going to replace a scalar field in a structure with
4069 reverse storage order by a stand-alone scalar, we are going to
4070 effectively byte-swap the scalar and we also need to byte-swap
4071 the portion of it represented by the bit-field. */
4072 if (bfr && REF_REVERSE_STORAGE_ORDER (bfr))
4074 REF_REVERSE_STORAGE_ORDER (bfr) = 0;
4075 TREE_OPERAND (bfr, 2)
4076 = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (repl)),
4077 size_binop (PLUS_EXPR, TREE_OPERAND (bfr, 1),
4078 TREE_OPERAND (bfr, 2)));
4081 *expr = repl;
4084 sra_stats.exprs++;
4086 else if (write && access->grp_to_be_debug_replaced)
4088 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
4089 NULL_TREE,
4090 gsi_stmt (*stmt_gsi));
4091 gsi_insert_after (stmt_gsi, ds, GSI_NEW_STMT);
4094 if (access->first_child && !TREE_READONLY (access->base))
4096 HOST_WIDE_INT start_offset, chunk_size;
4097 if (bfr
4098 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
4099 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
4101 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
4102 start_offset = access->offset
4103 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
4105 else
4106 start_offset = chunk_size = 0;
4108 generate_subtree_copies (access->first_child, orig_expr, access->offset,
4109 start_offset, chunk_size,
4110 write ? refresh_gsi : stmt_gsi,
4111 write, write, loc);
4113 return true;
4116 /* If EXPR, which must be a call argument, is an ADDR_EXPR, generate writes and
4117 reads from its base before and after the call statement given in CALL_GSI
4118 and return true if any copying took place. Otherwise call sra_modify_expr
4119 on EXPR and return its value. FLAGS is what the gimple_call_arg_flags
4120 return for the given parameter. */
4122 static bool
4123 sra_modify_call_arg (tree *expr, gimple_stmt_iterator *call_gsi,
4124 gimple_stmt_iterator *refresh_gsi, int flags)
4126 if (TREE_CODE (*expr) != ADDR_EXPR)
4127 return sra_modify_expr (expr, false, call_gsi, refresh_gsi);
4129 if (flags & EAF_UNUSED)
4130 return false;
4132 tree base = get_base_address (TREE_OPERAND (*expr, 0));
4133 if (!DECL_P (base))
4134 return false;
4135 struct access *access = get_access_for_expr (base);
4136 if (!access)
4137 return false;
4139 gimple *stmt = gsi_stmt (*call_gsi);
4140 location_t loc = gimple_location (stmt);
4141 generate_subtree_copies (access, base, 0, 0, 0, call_gsi, false, false,
4142 loc);
4144 if (flags & EAF_NO_DIRECT_CLOBBER)
4145 return true;
4147 if (!stmt_ends_bb_p (stmt))
4148 generate_subtree_copies (access, base, 0, 0, 0, refresh_gsi, true,
4149 true, loc);
4150 else
4152 edge e;
4153 edge_iterator ei;
4154 FOR_EACH_EDGE (e, ei, gsi_bb (*call_gsi)->succs)
4156 gimple_stmt_iterator alt_gsi = gsi_start_edge (e);
4157 generate_subtree_copies (access, base, 0, 0, 0, &alt_gsi, true,
4158 true, loc);
4161 return true;
4164 /* Where scalar replacements of the RHS have been written to when a replacement
4165 of a LHS of an assigments cannot be direclty loaded from a replacement of
4166 the RHS. */
4167 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
4168 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
4169 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
4171 struct subreplacement_assignment_data
4173 /* Offset of the access representing the lhs of the assignment. */
4174 HOST_WIDE_INT left_offset;
4176 /* LHS and RHS of the original assignment. */
4177 tree assignment_lhs, assignment_rhs;
4179 /* Access representing the rhs of the whole assignment. */
4180 struct access *top_racc;
4182 /* Stmt iterator used for statement insertions after the original assignment.
4183 It points to the main GSI used to traverse a BB during function body
4184 modification. */
4185 gimple_stmt_iterator *new_gsi;
4187 /* Stmt iterator used for statement insertions before the original
4188 assignment. Keeps on pointing to the original statement. */
4189 gimple_stmt_iterator old_gsi;
4191 /* Location of the assignment. */
4192 location_t loc;
4194 /* Keeps the information whether we have needed to refresh replacements of
4195 the LHS and from which side of the assignments this takes place. */
4196 enum unscalarized_data_handling refreshed;
4199 /* Store all replacements in the access tree rooted in TOP_RACC either to their
4200 base aggregate if there are unscalarized data or directly to LHS of the
4201 statement that is pointed to by GSI otherwise. */
4203 static void
4204 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
4206 tree src;
4207 /* If the RHS is a load from a constant, we do not need to (and must not)
4208 flush replacements to it and can use it directly as if we did. */
4209 if (TREE_READONLY (sad->top_racc->base))
4211 sad->refreshed = SRA_UDH_RIGHT;
4212 return;
4214 if (sad->top_racc->grp_unscalarized_data)
4216 src = sad->assignment_rhs;
4217 sad->refreshed = SRA_UDH_RIGHT;
4219 else
4221 src = sad->assignment_lhs;
4222 sad->refreshed = SRA_UDH_LEFT;
4224 generate_subtree_copies (sad->top_racc->first_child, src,
4225 sad->top_racc->offset, 0, 0,
4226 &sad->old_gsi, false, false, sad->loc);
4229 /* Try to generate statements to load all sub-replacements in an access subtree
4230 formed by children of LACC from scalar replacements in the SAD->top_racc
4231 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
4232 and load the accesses from it. */
4234 static void
4235 load_assign_lhs_subreplacements (struct access *lacc,
4236 struct subreplacement_assignment_data *sad)
4238 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
4240 HOST_WIDE_INT offset;
4241 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
4243 if (lacc->grp_to_be_replaced)
4245 struct access *racc;
4246 gassign *stmt;
4247 tree rhs;
4249 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
4250 if (racc && racc->grp_to_be_replaced)
4252 rhs = get_access_replacement (racc);
4253 bool vce = false;
4254 if (!useless_type_conversion_p (lacc->type, racc->type))
4256 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
4257 lacc->type, rhs);
4258 vce = true;
4261 if (lacc->grp_partial_lhs && (vce || racc->grp_partial_lhs))
4262 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
4263 NULL_TREE, true, GSI_SAME_STMT);
4265 else
4267 /* No suitable access on the right hand side, need to load from
4268 the aggregate. See if we have to update it first... */
4269 if (sad->refreshed == SRA_UDH_NONE)
4270 handle_unscalarized_data_in_subtree (sad);
4272 if (sad->refreshed == SRA_UDH_LEFT)
4273 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
4274 lacc->offset - sad->left_offset,
4275 lacc, sad->new_gsi, true);
4276 else
4277 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
4278 lacc->offset - sad->left_offset,
4279 lacc, sad->new_gsi, true);
4280 if (lacc->grp_partial_lhs)
4281 rhs = force_gimple_operand_gsi (sad->new_gsi,
4282 rhs, true, NULL_TREE,
4283 false, GSI_NEW_STMT);
4286 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
4287 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
4288 gimple_set_location (stmt, sad->loc);
4289 update_stmt (stmt);
4290 sra_stats.subreplacements++;
4292 else
4294 if (sad->refreshed == SRA_UDH_NONE
4295 && lacc->grp_read && !lacc->grp_covered)
4296 handle_unscalarized_data_in_subtree (sad);
4298 if (lacc && lacc->grp_to_be_debug_replaced)
4300 gdebug *ds;
4301 tree drhs;
4302 struct access *racc = find_access_in_subtree (sad->top_racc,
4303 offset,
4304 lacc->size);
4306 if (racc && racc->grp_to_be_replaced)
4308 if (racc->grp_write || constant_decl_p (racc->base))
4309 drhs = get_access_replacement (racc);
4310 else
4311 drhs = NULL;
4313 else if (sad->refreshed == SRA_UDH_LEFT)
4314 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
4315 lacc->offset, lacc);
4316 else if (sad->refreshed == SRA_UDH_RIGHT)
4317 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
4318 offset, lacc);
4319 else
4320 drhs = NULL_TREE;
4321 if (drhs
4322 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
4323 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
4324 lacc->type, drhs);
4325 ds = gimple_build_debug_bind (get_access_replacement (lacc),
4326 drhs, gsi_stmt (sad->old_gsi));
4327 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
4331 if (lacc->first_child)
4332 load_assign_lhs_subreplacements (lacc, sad);
4336 /* Result code for SRA assignment modification. */
4337 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
4338 SRA_AM_MODIFIED, /* stmt changed but not
4339 removed */
4340 SRA_AM_REMOVED }; /* stmt eliminated */
4342 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
4343 to the assignment and GSI is the statement iterator pointing at it. Returns
4344 the same values as sra_modify_assign. */
4346 static enum assignment_mod_result
4347 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
4349 tree lhs = gimple_assign_lhs (stmt);
4350 struct access *acc = get_access_for_expr (lhs);
4351 if (!acc)
4352 return SRA_AM_NONE;
4353 location_t loc = gimple_location (stmt);
4355 if (gimple_clobber_p (stmt))
4357 /* Clobber the replacement variable. */
4358 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
4359 /* Remove clobbers of fully scalarized variables, they are dead. */
4360 if (acc->grp_covered)
4362 unlink_stmt_vdef (stmt);
4363 gsi_remove (gsi, true);
4364 release_defs (stmt);
4365 return SRA_AM_REMOVED;
4367 else
4368 return SRA_AM_MODIFIED;
4371 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
4373 /* I have never seen this code path trigger but if it can happen the
4374 following should handle it gracefully. */
4375 if (access_has_children_p (acc))
4376 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
4377 true, true, loc);
4378 return SRA_AM_MODIFIED;
4381 if (acc->grp_covered)
4383 init_subtree_with_zero (acc, gsi, false, loc);
4384 unlink_stmt_vdef (stmt);
4385 gsi_remove (gsi, true);
4386 release_defs (stmt);
4387 return SRA_AM_REMOVED;
4389 else
4391 init_subtree_with_zero (acc, gsi, true, loc);
4392 return SRA_AM_MODIFIED;
4396 /* Create and return a new suitable default definition SSA_NAME for RACC which
4397 is an access describing an uninitialized part of an aggregate that is being
4398 loaded. REG_TREE is used instead of the actual RACC type if that is not of
4399 a gimple register type. */
4401 static tree
4402 get_repl_default_def_ssa_name (struct access *racc, tree reg_type)
4404 gcc_checking_assert (!racc->grp_to_be_replaced
4405 && !racc->grp_to_be_debug_replaced);
4406 if (!racc->replacement_decl)
4407 racc->replacement_decl = create_access_replacement (racc, reg_type);
4408 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
4412 /* Generate statements to call .DEFERRED_INIT to initialize scalar replacements
4413 of accesses within a subtree ACCESS; all its children, siblings and their
4414 children are to be processed.
4415 GSI is a statement iterator used to place the new statements. */
4416 static void
4417 generate_subtree_deferred_init (struct access *access,
4418 tree init_type,
4419 tree decl_name,
4420 gimple_stmt_iterator *gsi,
4421 location_t loc)
4425 if (access->grp_to_be_replaced)
4427 tree repl = get_access_replacement (access);
4428 gimple *call
4429 = gimple_build_call_internal (IFN_DEFERRED_INIT, 3,
4430 TYPE_SIZE_UNIT (TREE_TYPE (repl)),
4431 init_type, decl_name);
4432 gimple_call_set_lhs (call, repl);
4433 gsi_insert_before (gsi, call, GSI_SAME_STMT);
4434 update_stmt (call);
4435 gimple_set_location (call, loc);
4436 sra_stats.subtree_deferred_init++;
4438 if (access->first_child)
4439 generate_subtree_deferred_init (access->first_child, init_type,
4440 decl_name, gsi, loc);
4442 access = access ->next_sibling;
4444 while (access);
4447 /* For a call to .DEFERRED_INIT:
4448 var = .DEFERRED_INIT (size_of_var, init_type, name_of_var);
4449 examine the LHS variable VAR and replace it with a scalar replacement if
4450 there is one, also replace the RHS call to a call to .DEFERRED_INIT of
4451 the corresponding scalar relacement variable. Examine the subtree and
4452 do the scalar replacements in the subtree too. STMT is the call, GSI is
4453 the statment iterator to place newly created statement. */
4455 static enum assignment_mod_result
4456 sra_modify_deferred_init (gimple *stmt, gimple_stmt_iterator *gsi)
4458 tree lhs = gimple_call_lhs (stmt);
4459 tree init_type = gimple_call_arg (stmt, 1);
4460 tree decl_name = gimple_call_arg (stmt, 2);
4462 struct access *lhs_access = get_access_for_expr (lhs);
4463 if (!lhs_access)
4464 return SRA_AM_NONE;
4466 location_t loc = gimple_location (stmt);
4468 if (lhs_access->grp_to_be_replaced)
4470 tree lhs_repl = get_access_replacement (lhs_access);
4471 gimple_call_set_lhs (stmt, lhs_repl);
4472 tree arg0_repl = TYPE_SIZE_UNIT (TREE_TYPE (lhs_repl));
4473 gimple_call_set_arg (stmt, 0, arg0_repl);
4474 sra_stats.deferred_init++;
4475 gcc_assert (!lhs_access->first_child);
4476 return SRA_AM_MODIFIED;
4479 if (lhs_access->first_child)
4480 generate_subtree_deferred_init (lhs_access->first_child,
4481 init_type, decl_name, gsi, loc);
4482 if (lhs_access->grp_covered)
4484 unlink_stmt_vdef (stmt);
4485 gsi_remove (gsi, true);
4486 release_defs (stmt);
4487 return SRA_AM_REMOVED;
4490 return SRA_AM_MODIFIED;
4493 /* Examine both sides of the assignment statement pointed to by STMT, replace
4494 them with a scalare replacement if there is one and generate copying of
4495 replacements if scalarized aggregates have been used in the assignment. GSI
4496 is used to hold generated statements for type conversions and subtree
4497 copying. */
4499 static enum assignment_mod_result
4500 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
4502 struct access *lacc, *racc;
4503 tree lhs, rhs;
4504 bool modify_this_stmt = false;
4505 bool force_gimple_rhs = false;
4506 location_t loc;
4507 gimple_stmt_iterator orig_gsi = *gsi;
4509 if (!gimple_assign_single_p (stmt))
4510 return SRA_AM_NONE;
4511 lhs = gimple_assign_lhs (stmt);
4512 rhs = gimple_assign_rhs1 (stmt);
4514 if (TREE_CODE (rhs) == CONSTRUCTOR)
4515 return sra_modify_constructor_assign (stmt, gsi);
4517 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
4518 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
4519 || (TREE_CODE (rhs) == BIT_FIELD_REF && !sra_handled_bf_read_p (rhs))
4520 || TREE_CODE (lhs) == BIT_FIELD_REF)
4522 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
4523 false, gsi, gsi);
4524 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
4525 true, gsi, gsi);
4526 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
4529 lacc = get_access_for_expr (lhs);
4530 racc = get_access_for_expr (rhs);
4531 if (!lacc && !racc)
4532 return SRA_AM_NONE;
4533 /* Avoid modifying initializations of constant-pool replacements. */
4534 if (racc && (racc->replacement_decl == lhs))
4535 return SRA_AM_NONE;
4537 loc = gimple_location (stmt);
4538 if (lacc && lacc->grp_to_be_replaced)
4540 lhs = get_access_replacement (lacc);
4541 gimple_assign_set_lhs (stmt, lhs);
4542 modify_this_stmt = true;
4543 if (lacc->grp_partial_lhs)
4544 force_gimple_rhs = true;
4545 sra_stats.exprs++;
4548 if (racc && racc->grp_to_be_replaced)
4550 rhs = get_access_replacement (racc);
4551 modify_this_stmt = true;
4552 if (racc->grp_partial_lhs)
4553 force_gimple_rhs = true;
4554 sra_stats.exprs++;
4556 else if (racc
4557 && !racc->grp_unscalarized_data
4558 && !racc->grp_unscalarizable_region
4559 && TREE_CODE (lhs) == SSA_NAME
4560 && !access_has_replacements_p (racc))
4562 rhs = get_repl_default_def_ssa_name (racc, TREE_TYPE (lhs));
4563 modify_this_stmt = true;
4564 sra_stats.exprs++;
4567 if (modify_this_stmt
4568 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
4570 /* If we can avoid creating a VIEW_CONVERT_EXPR, then do so.
4571 ??? This should move to fold_stmt which we simply should
4572 call after building a VIEW_CONVERT_EXPR here. */
4573 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
4574 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (lhs)) == racc->reverse
4575 && !contains_bitfld_component_ref_p (lhs))
4577 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
4578 gimple_assign_set_lhs (stmt, lhs);
4580 else if (lacc
4581 && AGGREGATE_TYPE_P (TREE_TYPE (rhs))
4582 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (rhs)) == lacc->reverse
4583 && !contains_vce_or_bfcref_p (rhs))
4584 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
4586 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
4588 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
4589 if (is_gimple_reg_type (TREE_TYPE (lhs))
4590 && TREE_CODE (lhs) != SSA_NAME)
4591 force_gimple_rhs = true;
4595 if (lacc && lacc->grp_to_be_debug_replaced)
4597 tree dlhs = get_access_replacement (lacc);
4598 tree drhs = unshare_expr (rhs);
4599 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
4601 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
4602 && !contains_vce_or_bfcref_p (drhs))
4603 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
4604 if (drhs
4605 && !useless_type_conversion_p (TREE_TYPE (dlhs),
4606 TREE_TYPE (drhs)))
4607 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
4608 TREE_TYPE (dlhs), drhs);
4610 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
4611 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
4614 /* From this point on, the function deals with assignments in between
4615 aggregates when at least one has scalar reductions of some of its
4616 components. There are three possible scenarios: Both the LHS and RHS have
4617 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
4619 In the first case, we would like to load the LHS components from RHS
4620 components whenever possible. If that is not possible, we would like to
4621 read it directly from the RHS (after updating it by storing in it its own
4622 components). If there are some necessary unscalarized data in the LHS,
4623 those will be loaded by the original assignment too. If neither of these
4624 cases happen, the original statement can be removed. Most of this is done
4625 by load_assign_lhs_subreplacements.
4627 In the second case, we would like to store all RHS scalarized components
4628 directly into LHS and if they cover the aggregate completely, remove the
4629 statement too. In the third case, we want the LHS components to be loaded
4630 directly from the RHS (DSE will remove the original statement if it
4631 becomes redundant).
4633 This is a bit complex but manageable when types match and when unions do
4634 not cause confusion in a way that we cannot really load a component of LHS
4635 from the RHS or vice versa (the access representing this level can have
4636 subaccesses that are accessible only through a different union field at a
4637 higher level - different from the one used in the examined expression).
4638 Unions are fun.
4640 Therefore, I specially handle a fourth case, happening when there is a
4641 specific type cast or it is impossible to locate a scalarized subaccess on
4642 the other side of the expression. If that happens, I simply "refresh" the
4643 RHS by storing in it is scalarized components leave the original statement
4644 there to do the copying and then load the scalar replacements of the LHS.
4645 This is what the first branch does. */
4647 if (modify_this_stmt
4648 || gimple_has_volatile_ops (stmt)
4649 || contains_vce_or_bfcref_p (rhs)
4650 || contains_vce_or_bfcref_p (lhs)
4651 || stmt_ends_bb_p (stmt))
4653 /* No need to copy into a constant, it comes pre-initialized. */
4654 if (access_has_children_p (racc) && !TREE_READONLY (racc->base))
4655 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
4656 gsi, false, false, loc);
4657 if (access_has_children_p (lacc))
4659 gimple_stmt_iterator alt_gsi = gsi_none ();
4660 if (stmt_ends_bb_p (stmt))
4662 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
4663 gsi = &alt_gsi;
4665 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
4666 gsi, true, true, loc);
4668 sra_stats.separate_lhs_rhs_handling++;
4670 /* This gimplification must be done after generate_subtree_copies,
4671 lest we insert the subtree copies in the middle of the gimplified
4672 sequence. */
4673 if (force_gimple_rhs)
4674 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
4675 true, GSI_SAME_STMT);
4676 if (gimple_assign_rhs1 (stmt) != rhs)
4678 modify_this_stmt = true;
4679 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
4680 gcc_assert (stmt == gsi_stmt (orig_gsi));
4683 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
4685 else
4687 if (access_has_children_p (lacc)
4688 && access_has_children_p (racc)
4689 /* When an access represents an unscalarizable region, it usually
4690 represents accesses with variable offset and thus must not be used
4691 to generate new memory accesses. */
4692 && !lacc->grp_unscalarizable_region
4693 && !racc->grp_unscalarizable_region)
4695 struct subreplacement_assignment_data sad;
4697 sad.left_offset = lacc->offset;
4698 sad.assignment_lhs = lhs;
4699 sad.assignment_rhs = rhs;
4700 sad.top_racc = racc;
4701 sad.old_gsi = *gsi;
4702 sad.new_gsi = gsi;
4703 sad.loc = gimple_location (stmt);
4704 sad.refreshed = SRA_UDH_NONE;
4706 if (lacc->grp_read && !lacc->grp_covered)
4707 handle_unscalarized_data_in_subtree (&sad);
4709 load_assign_lhs_subreplacements (lacc, &sad);
4710 if (sad.refreshed != SRA_UDH_RIGHT)
4712 gsi_next (gsi);
4713 unlink_stmt_vdef (stmt);
4714 gsi_remove (&sad.old_gsi, true);
4715 release_defs (stmt);
4716 sra_stats.deleted++;
4717 return SRA_AM_REMOVED;
4720 else
4722 if (access_has_children_p (racc)
4723 && !racc->grp_unscalarized_data
4724 && TREE_CODE (lhs) != SSA_NAME)
4726 if (dump_file)
4728 fprintf (dump_file, "Removing load: ");
4729 print_gimple_stmt (dump_file, stmt, 0);
4731 generate_subtree_copies (racc->first_child, lhs,
4732 racc->offset, 0, 0, gsi,
4733 false, false, loc);
4734 gcc_assert (stmt == gsi_stmt (*gsi));
4735 unlink_stmt_vdef (stmt);
4736 gsi_remove (gsi, true);
4737 release_defs (stmt);
4738 sra_stats.deleted++;
4739 return SRA_AM_REMOVED;
4741 /* Restore the aggregate RHS from its components so the
4742 prevailing aggregate copy does the right thing. */
4743 if (access_has_children_p (racc) && !TREE_READONLY (racc->base))
4744 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
4745 gsi, false, false, loc);
4746 /* Re-load the components of the aggregate copy destination.
4747 But use the RHS aggregate to load from to expose more
4748 optimization opportunities. */
4749 if (access_has_children_p (lacc))
4750 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
4751 0, 0, gsi, true, true, loc);
4754 return SRA_AM_NONE;
4758 /* Set any scalar replacements of values in the constant pool to the initial
4759 value of the constant. (Constant-pool decls like *.LC0 have effectively
4760 been initialized before the program starts, we must do the same for their
4761 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
4762 the function's entry block. */
4764 static void
4765 initialize_constant_pool_replacements (void)
4767 gimple_seq seq = NULL;
4768 gimple_stmt_iterator gsi = gsi_start (seq);
4769 bitmap_iterator bi;
4770 unsigned i;
4772 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
4774 tree var = candidate (i);
4775 if (!constant_decl_p (var))
4776 continue;
4778 struct access *access = get_first_repr_for_decl (var);
4780 while (access)
4782 if (access->replacement_decl)
4784 gassign *stmt
4785 = gimple_build_assign (get_access_replacement (access),
4786 unshare_expr (access->expr));
4787 if (dump_file && (dump_flags & TDF_DETAILS))
4789 fprintf (dump_file, "Generating constant initializer: ");
4790 print_gimple_stmt (dump_file, stmt, 0);
4791 fprintf (dump_file, "\n");
4793 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4794 update_stmt (stmt);
4797 if (access->first_child)
4798 access = access->first_child;
4799 else if (access->next_sibling)
4800 access = access->next_sibling;
4801 else
4803 while (access->parent && !access->next_sibling)
4804 access = access->parent;
4805 if (access->next_sibling)
4806 access = access->next_sibling;
4807 else
4808 access = access->next_grp;
4813 seq = gsi_seq (gsi);
4814 if (seq)
4815 gsi_insert_seq_on_edge_immediate (
4816 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
4819 /* Traverse the function body and all modifications as decided in
4820 analyze_all_variable_accesses. Return true iff the CFG has been
4821 changed. */
4823 static bool
4824 sra_modify_function_body (void)
4826 bool cfg_changed = false;
4827 basic_block bb;
4829 initialize_constant_pool_replacements ();
4831 FOR_EACH_BB_FN (bb, cfun)
4833 gimple_stmt_iterator gsi = gsi_start_bb (bb);
4834 while (!gsi_end_p (gsi))
4836 gimple *stmt = gsi_stmt (gsi);
4837 enum assignment_mod_result assign_result;
4838 bool modified = false, deleted = false;
4839 tree *t;
4840 unsigned i;
4842 switch (gimple_code (stmt))
4844 case GIMPLE_RETURN:
4845 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4846 if (*t != NULL_TREE)
4847 modified |= sra_modify_expr (t, false, &gsi, &gsi);
4848 break;
4850 case GIMPLE_ASSIGN:
4851 assign_result = sra_modify_assign (stmt, &gsi);
4852 modified |= assign_result == SRA_AM_MODIFIED;
4853 deleted = assign_result == SRA_AM_REMOVED;
4854 break;
4856 case GIMPLE_CALL:
4857 /* Handle calls to .DEFERRED_INIT specially. */
4858 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
4860 assign_result = sra_modify_deferred_init (stmt, &gsi);
4861 modified |= assign_result == SRA_AM_MODIFIED;
4862 deleted = assign_result == SRA_AM_REMOVED;
4864 else
4866 gcall *call = as_a <gcall *> (stmt);
4867 gimple_stmt_iterator call_gsi = gsi;
4869 /* Operands must be processed before the lhs. */
4870 for (i = 0; i < gimple_call_num_args (call); i++)
4872 int flags = gimple_call_arg_flags (call, i);
4873 t = gimple_call_arg_ptr (call, i);
4874 modified |= sra_modify_call_arg (t, &call_gsi, &gsi, flags);
4876 if (gimple_call_chain (call))
4878 t = gimple_call_chain_ptr (call);
4879 int flags = gimple_call_static_chain_flags (call);
4880 modified |= sra_modify_call_arg (t, &call_gsi, &gsi,
4881 flags);
4883 if (gimple_call_lhs (call))
4885 t = gimple_call_lhs_ptr (call);
4886 modified |= sra_modify_expr (t, true, &call_gsi, &gsi);
4889 break;
4891 case GIMPLE_ASM:
4893 gimple_stmt_iterator stmt_gsi = gsi;
4894 gasm *asm_stmt = as_a <gasm *> (stmt);
4895 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4897 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4898 modified |= sra_modify_expr (t, false, &stmt_gsi, &gsi);
4900 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4902 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4903 modified |= sra_modify_expr (t, true, &stmt_gsi, &gsi);
4906 break;
4908 default:
4909 break;
4912 if (modified)
4914 update_stmt (stmt);
4915 if (maybe_clean_eh_stmt (stmt)
4916 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4917 cfg_changed = true;
4919 if (!deleted)
4920 gsi_next (&gsi);
4924 gsi_commit_edge_inserts ();
4925 return cfg_changed;
4928 /* Generate statements initializing scalar replacements of parts of function
4929 parameters. */
4931 static void
4932 initialize_parameter_reductions (void)
4934 gimple_stmt_iterator gsi;
4935 gimple_seq seq = NULL;
4936 tree parm;
4938 gsi = gsi_start (seq);
4939 for (parm = DECL_ARGUMENTS (current_function_decl);
4940 parm;
4941 parm = DECL_CHAIN (parm))
4943 vec<access_p> *access_vec;
4944 struct access *access;
4946 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4947 continue;
4948 access_vec = get_base_access_vector (parm);
4949 if (!access_vec)
4950 continue;
4952 for (access = (*access_vec)[0];
4953 access;
4954 access = access->next_grp)
4955 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
4956 EXPR_LOCATION (parm));
4959 seq = gsi_seq (gsi);
4960 if (seq)
4961 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
4964 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
4965 it reveals there are components of some aggregates to be scalarized, it runs
4966 the required transformations. */
4967 static unsigned int
4968 perform_intra_sra (void)
4970 int ret = 0;
4971 sra_initialize ();
4973 if (!find_var_candidates ())
4974 goto out;
4976 if (!scan_function ())
4977 goto out;
4979 if (!analyze_all_variable_accesses ())
4980 goto out;
4982 if (sra_modify_function_body ())
4983 ret = TODO_update_ssa | TODO_cleanup_cfg;
4984 else
4985 ret = TODO_update_ssa;
4986 initialize_parameter_reductions ();
4988 statistics_counter_event (cfun, "Scalar replacements created",
4989 sra_stats.replacements);
4990 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
4991 statistics_counter_event (cfun, "Subtree copy stmts",
4992 sra_stats.subtree_copies);
4993 statistics_counter_event (cfun, "Subreplacement stmts",
4994 sra_stats.subreplacements);
4995 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
4996 statistics_counter_event (cfun, "Separate LHS and RHS handling",
4997 sra_stats.separate_lhs_rhs_handling);
4999 out:
5000 sra_deinitialize ();
5001 return ret;
5004 /* Perform early intraprocedural SRA. */
5005 static unsigned int
5006 early_intra_sra (void)
5008 sra_mode = SRA_MODE_EARLY_INTRA;
5009 return perform_intra_sra ();
5012 /* Perform "late" intraprocedural SRA. */
5013 static unsigned int
5014 late_intra_sra (void)
5016 sra_mode = SRA_MODE_INTRA;
5017 return perform_intra_sra ();
5021 static bool
5022 gate_intra_sra (void)
5024 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
5028 namespace {
5030 const pass_data pass_data_sra_early =
5032 GIMPLE_PASS, /* type */
5033 "esra", /* name */
5034 OPTGROUP_NONE, /* optinfo_flags */
5035 TV_TREE_SRA, /* tv_id */
5036 ( PROP_cfg | PROP_ssa ), /* properties_required */
5037 0, /* properties_provided */
5038 0, /* properties_destroyed */
5039 0, /* todo_flags_start */
5040 TODO_update_ssa, /* todo_flags_finish */
5043 class pass_sra_early : public gimple_opt_pass
5045 public:
5046 pass_sra_early (gcc::context *ctxt)
5047 : gimple_opt_pass (pass_data_sra_early, ctxt)
5050 /* opt_pass methods: */
5051 bool gate (function *) final override { return gate_intra_sra (); }
5052 unsigned int execute (function *) final override
5054 return early_intra_sra ();
5057 }; // class pass_sra_early
5059 } // anon namespace
5061 gimple_opt_pass *
5062 make_pass_sra_early (gcc::context *ctxt)
5064 return new pass_sra_early (ctxt);
5067 namespace {
5069 const pass_data pass_data_sra =
5071 GIMPLE_PASS, /* type */
5072 "sra", /* name */
5073 OPTGROUP_NONE, /* optinfo_flags */
5074 TV_TREE_SRA, /* tv_id */
5075 ( PROP_cfg | PROP_ssa ), /* properties_required */
5076 0, /* properties_provided */
5077 0, /* properties_destroyed */
5078 TODO_update_address_taken, /* todo_flags_start */
5079 TODO_update_ssa, /* todo_flags_finish */
5082 class pass_sra : public gimple_opt_pass
5084 public:
5085 pass_sra (gcc::context *ctxt)
5086 : gimple_opt_pass (pass_data_sra, ctxt)
5089 /* opt_pass methods: */
5090 bool gate (function *) final override { return gate_intra_sra (); }
5091 unsigned int execute (function *) final override { return late_intra_sra (); }
5093 }; // class pass_sra
5095 } // anon namespace
5097 gimple_opt_pass *
5098 make_pass_sra (gcc::context *ctxt)
5100 return new pass_sra (ctxt);