Delete changes meant for a private branch.
[official-gcc.git] / gcc / tree-sra.c
blob227bde0625792e6cf6c5df924f1e09649ef2bacb
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2020 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "dbgcnt.h"
100 #include "builtins.h"
101 #include "tree-sra.h"
104 /* Enumeration of all aggregate reductions we can do. */
105 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
106 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
107 SRA_MODE_INTRA }; /* late intraprocedural SRA */
109 /* Global variable describing which aggregate reduction we are performing at
110 the moment. */
111 static enum sra_mode sra_mode;
113 struct assign_link;
115 /* ACCESS represents each access to an aggregate variable (as a whole or a
116 part). It can also represent a group of accesses that refer to exactly the
117 same fragment of an aggregate (i.e. those that have exactly the same offset
118 and size). Such representatives for a single aggregate, once determined,
119 are linked in a linked list and have the group fields set.
121 Moreover, when doing intraprocedural SRA, a tree is built from those
122 representatives (by the means of first_child and next_sibling pointers), in
123 which all items in a subtree are "within" the root, i.e. their offset is
124 greater or equal to offset of the root and offset+size is smaller or equal
125 to offset+size of the root. Children of an access are sorted by offset.
127 Note that accesses to parts of vector and complex number types always
128 represented by an access to the whole complex number or a vector. It is a
129 duty of the modifying functions to replace them appropriately. */
131 struct access
133 /* Values returned by `get_ref_base_and_extent' for each component reference
134 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
135 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
136 HOST_WIDE_INT offset;
137 HOST_WIDE_INT size;
138 tree base;
140 /* Expression. It is context dependent so do not use it to create new
141 expressions to access the original aggregate. See PR 42154 for a
142 testcase. */
143 tree expr;
144 /* Type. */
145 tree type;
147 /* The statement this access belongs to. */
148 gimple *stmt;
150 /* Next group representative for this aggregate. */
151 struct access *next_grp;
153 /* Pointer to the group representative. Pointer to itself if the struct is
154 the representative. */
155 struct access *group_representative;
157 /* After access tree has been constructed, this points to the parent of the
158 current access, if there is one. NULL for roots. */
159 struct access *parent;
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access *first_child;
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166 described above. */
167 struct access *next_sibling;
169 /* Pointers to the first and last element in the linked list of assign
170 links for propagation from LHS to RHS. */
171 struct assign_link *first_rhs_link, *last_rhs_link;
173 /* Pointers to the first and last element in the linked list of assign
174 links for propagation from LHS to RHS. */
175 struct assign_link *first_lhs_link, *last_lhs_link;
177 /* Pointer to the next access in the work queues. */
178 struct access *next_rhs_queued, *next_lhs_queued;
180 /* Replacement variable for this access "region." Never to be accessed
181 directly, always only by the means of get_access_replacement() and only
182 when grp_to_be_replaced flag is set. */
183 tree replacement_decl;
185 /* Is this access made in reverse storage order? */
186 unsigned reverse : 1;
188 /* Is this particular access write access? */
189 unsigned write : 1;
191 /* Is this access currently in the rhs work queue? */
192 unsigned grp_rhs_queued : 1;
194 /* Is this access currently in the lhs work queue? */
195 unsigned grp_lhs_queued : 1;
197 /* Does this group contain a write access? This flag is propagated down the
198 access tree. */
199 unsigned grp_write : 1;
201 /* Does this group contain a read access? This flag is propagated down the
202 access tree. */
203 unsigned grp_read : 1;
205 /* Does this group contain a read access that comes from an assignment
206 statement? This flag is propagated down the access tree. */
207 unsigned grp_assignment_read : 1;
209 /* Does this group contain a write access that comes from an assignment
210 statement? This flag is propagated down the access tree. */
211 unsigned grp_assignment_write : 1;
213 /* Does this group contain a read access through a scalar type? This flag is
214 not propagated in the access tree in any direction. */
215 unsigned grp_scalar_read : 1;
217 /* Does this group contain a write access through a scalar type? This flag
218 is not propagated in the access tree in any direction. */
219 unsigned grp_scalar_write : 1;
221 /* In a root of an access tree, true means that the entire tree should be
222 totally scalarized - that all scalar leafs should be scalarized and
223 non-root grp_total_scalarization accesses should be honored. Otherwise,
224 non-root accesses with grp_total_scalarization should never get scalar
225 replacements. */
226 unsigned grp_total_scalarization : 1;
228 /* Other passes of the analysis use this bit to make function
229 analyze_access_subtree create scalar replacements for this group if
230 possible. */
231 unsigned grp_hint : 1;
233 /* Is the subtree rooted in this access fully covered by scalar
234 replacements? */
235 unsigned grp_covered : 1;
237 /* If set to true, this access and all below it in an access tree must not be
238 scalarized. */
239 unsigned grp_unscalarizable_region : 1;
241 /* Whether data have been written to parts of the aggregate covered by this
242 access which is not to be scalarized. This flag is propagated up in the
243 access tree. */
244 unsigned grp_unscalarized_data : 1;
246 /* Set if all accesses in the group consist of the same chain of
247 COMPONENT_REFs and ARRAY_REFs. */
248 unsigned grp_same_access_path : 1;
250 /* Does this access and/or group contain a write access through a
251 BIT_FIELD_REF? */
252 unsigned grp_partial_lhs : 1;
254 /* Set when a scalar replacement should be created for this variable. */
255 unsigned grp_to_be_replaced : 1;
257 /* Set when we want a replacement for the sole purpose of having it in
258 generated debug statements. */
259 unsigned grp_to_be_debug_replaced : 1;
261 /* Should TREE_NO_WARNING of a replacement be set? */
262 unsigned grp_no_warning : 1;
265 typedef struct access *access_p;
268 /* Alloc pool for allocating access structures. */
269 static object_allocator<struct access> access_pool ("SRA accesses");
271 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
272 are used to propagate subaccesses from rhs to lhs and vice versa as long as
273 they don't conflict with what is already there. In the RHS->LHS direction,
274 we also propagate grp_write flag to lazily mark that the access contains any
275 meaningful data. */
276 struct assign_link
278 struct access *lacc, *racc;
279 struct assign_link *next_rhs, *next_lhs;
282 /* Alloc pool for allocating assign link structures. */
283 static object_allocator<assign_link> assign_link_pool ("SRA links");
285 /* Base (tree) -> Vector (vec<access_p> *) map. */
286 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
288 /* Hash to limit creation of artificial accesses */
289 static hash_map<tree, unsigned> *propagation_budget;
291 /* Candidate hash table helpers. */
293 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
295 static inline hashval_t hash (const tree_node *);
296 static inline bool equal (const tree_node *, const tree_node *);
299 /* Hash a tree in a uid_decl_map. */
301 inline hashval_t
302 uid_decl_hasher::hash (const tree_node *item)
304 return item->decl_minimal.uid;
307 /* Return true if the DECL_UID in both trees are equal. */
309 inline bool
310 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
312 return (a->decl_minimal.uid == b->decl_minimal.uid);
315 /* Set of candidates. */
316 static bitmap candidate_bitmap;
317 static hash_table<uid_decl_hasher> *candidates;
319 /* For a candidate UID return the candidates decl. */
321 static inline tree
322 candidate (unsigned uid)
324 tree_node t;
325 t.decl_minimal.uid = uid;
326 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
329 /* Bitmap of candidates which we should try to entirely scalarize away and
330 those which cannot be (because they are and need be used as a whole). */
331 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
333 /* Bitmap of candidates in the constant pool, which cannot be scalarized
334 because this would produce non-constant expressions (e.g. Ada). */
335 static bitmap disqualified_constants;
337 /* Obstack for creation of fancy names. */
338 static struct obstack name_obstack;
340 /* Head of a linked list of accesses that need to have its subaccesses
341 propagated to their assignment counterparts. */
342 static struct access *rhs_work_queue_head, *lhs_work_queue_head;
344 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
345 representative fields are dumped, otherwise those which only describe the
346 individual access are. */
348 static struct
350 /* Number of processed aggregates is readily available in
351 analyze_all_variable_accesses and so is not stored here. */
353 /* Number of created scalar replacements. */
354 int replacements;
356 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
357 expression. */
358 int exprs;
360 /* Number of statements created by generate_subtree_copies. */
361 int subtree_copies;
363 /* Number of statements created by load_assign_lhs_subreplacements. */
364 int subreplacements;
366 /* Number of times sra_modify_assign has deleted a statement. */
367 int deleted;
369 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
370 RHS reparately due to type conversions or nonexistent matching
371 references. */
372 int separate_lhs_rhs_handling;
374 /* Number of parameters that were removed because they were unused. */
375 int deleted_unused_parameters;
377 /* Number of scalars passed as parameters by reference that have been
378 converted to be passed by value. */
379 int scalar_by_ref_to_by_val;
381 /* Number of aggregate parameters that were replaced by one or more of their
382 components. */
383 int aggregate_params_reduced;
385 /* Numbber of components created when splitting aggregate parameters. */
386 int param_reductions_created;
387 } sra_stats;
389 static void
390 dump_access (FILE *f, struct access *access, bool grp)
392 fprintf (f, "access { ");
393 fprintf (f, "base = (%d)'", DECL_UID (access->base));
394 print_generic_expr (f, access->base);
395 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
396 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
397 fprintf (f, ", expr = ");
398 print_generic_expr (f, access->expr);
399 fprintf (f, ", type = ");
400 print_generic_expr (f, access->type);
401 fprintf (f, ", reverse = %d", access->reverse);
402 if (grp)
403 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
404 "grp_assignment_write = %d, grp_scalar_read = %d, "
405 "grp_scalar_write = %d, grp_total_scalarization = %d, "
406 "grp_hint = %d, grp_covered = %d, "
407 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
408 "grp_same_access_path = %d, grp_partial_lhs = %d, "
409 "grp_to_be_replaced = %d, grp_to_be_debug_replaced = %d}\n",
410 access->grp_read, access->grp_write, access->grp_assignment_read,
411 access->grp_assignment_write, access->grp_scalar_read,
412 access->grp_scalar_write, access->grp_total_scalarization,
413 access->grp_hint, access->grp_covered,
414 access->grp_unscalarizable_region, access->grp_unscalarized_data,
415 access->grp_same_access_path, access->grp_partial_lhs,
416 access->grp_to_be_replaced, access->grp_to_be_debug_replaced);
417 else
418 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
419 "grp_partial_lhs = %d}\n",
420 access->write, access->grp_total_scalarization,
421 access->grp_partial_lhs);
424 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
426 static void
427 dump_access_tree_1 (FILE *f, struct access *access, int level)
431 int i;
433 for (i = 0; i < level; i++)
434 fputs ("* ", f);
436 dump_access (f, access, true);
438 if (access->first_child)
439 dump_access_tree_1 (f, access->first_child, level + 1);
441 access = access->next_sibling;
443 while (access);
446 /* Dump all access trees for a variable, given the pointer to the first root in
447 ACCESS. */
449 static void
450 dump_access_tree (FILE *f, struct access *access)
452 for (; access; access = access->next_grp)
453 dump_access_tree_1 (f, access, 0);
456 /* Return true iff ACC is non-NULL and has subaccesses. */
458 static inline bool
459 access_has_children_p (struct access *acc)
461 return acc && acc->first_child;
464 /* Return true iff ACC is (partly) covered by at least one replacement. */
466 static bool
467 access_has_replacements_p (struct access *acc)
469 struct access *child;
470 if (acc->grp_to_be_replaced)
471 return true;
472 for (child = acc->first_child; child; child = child->next_sibling)
473 if (access_has_replacements_p (child))
474 return true;
475 return false;
478 /* Return a vector of pointers to accesses for the variable given in BASE or
479 NULL if there is none. */
481 static vec<access_p> *
482 get_base_access_vector (tree base)
484 return base_access_vec->get (base);
487 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
488 in ACCESS. Return NULL if it cannot be found. */
490 static struct access *
491 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
492 HOST_WIDE_INT size)
494 while (access && (access->offset != offset || access->size != size))
496 struct access *child = access->first_child;
498 while (child && (child->offset + child->size <= offset))
499 child = child->next_sibling;
500 access = child;
503 /* Total scalarization does not replace single field structures with their
504 single field but rather creates an access for them underneath. Look for
505 it. */
506 if (access)
507 while (access->first_child
508 && access->first_child->offset == offset
509 && access->first_child->size == size)
510 access = access->first_child;
512 return access;
515 /* Return the first group representative for DECL or NULL if none exists. */
517 static struct access *
518 get_first_repr_for_decl (tree base)
520 vec<access_p> *access_vec;
522 access_vec = get_base_access_vector (base);
523 if (!access_vec)
524 return NULL;
526 return (*access_vec)[0];
529 /* Find an access representative for the variable BASE and given OFFSET and
530 SIZE. Requires that access trees have already been built. Return NULL if
531 it cannot be found. */
533 static struct access *
534 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
535 HOST_WIDE_INT size)
537 struct access *access;
539 access = get_first_repr_for_decl (base);
540 while (access && (access->offset + access->size <= offset))
541 access = access->next_grp;
542 if (!access)
543 return NULL;
545 return find_access_in_subtree (access, offset, size);
548 /* Add LINK to the linked list of assign links of RACC. */
550 static void
551 add_link_to_rhs (struct access *racc, struct assign_link *link)
553 gcc_assert (link->racc == racc);
555 if (!racc->first_rhs_link)
557 gcc_assert (!racc->last_rhs_link);
558 racc->first_rhs_link = link;
560 else
561 racc->last_rhs_link->next_rhs = link;
563 racc->last_rhs_link = link;
564 link->next_rhs = NULL;
567 /* Add LINK to the linked list of lhs assign links of LACC. */
569 static void
570 add_link_to_lhs (struct access *lacc, struct assign_link *link)
572 gcc_assert (link->lacc == lacc);
574 if (!lacc->first_lhs_link)
576 gcc_assert (!lacc->last_lhs_link);
577 lacc->first_lhs_link = link;
579 else
580 lacc->last_lhs_link->next_lhs = link;
582 lacc->last_lhs_link = link;
583 link->next_lhs = NULL;
586 /* Move all link structures in their linked list in OLD_ACC to the linked list
587 in NEW_ACC. */
588 static void
589 relink_to_new_repr (struct access *new_acc, struct access *old_acc)
591 if (old_acc->first_rhs_link)
594 if (new_acc->first_rhs_link)
596 gcc_assert (!new_acc->last_rhs_link->next_rhs);
597 gcc_assert (!old_acc->last_rhs_link
598 || !old_acc->last_rhs_link->next_rhs);
600 new_acc->last_rhs_link->next_rhs = old_acc->first_rhs_link;
601 new_acc->last_rhs_link = old_acc->last_rhs_link;
603 else
605 gcc_assert (!new_acc->last_rhs_link);
607 new_acc->first_rhs_link = old_acc->first_rhs_link;
608 new_acc->last_rhs_link = old_acc->last_rhs_link;
610 old_acc->first_rhs_link = old_acc->last_rhs_link = NULL;
612 else
613 gcc_assert (!old_acc->last_rhs_link);
615 if (old_acc->first_lhs_link)
618 if (new_acc->first_lhs_link)
620 gcc_assert (!new_acc->last_lhs_link->next_lhs);
621 gcc_assert (!old_acc->last_lhs_link
622 || !old_acc->last_lhs_link->next_lhs);
624 new_acc->last_lhs_link->next_lhs = old_acc->first_lhs_link;
625 new_acc->last_lhs_link = old_acc->last_lhs_link;
627 else
629 gcc_assert (!new_acc->last_lhs_link);
631 new_acc->first_lhs_link = old_acc->first_lhs_link;
632 new_acc->last_lhs_link = old_acc->last_lhs_link;
634 old_acc->first_lhs_link = old_acc->last_lhs_link = NULL;
636 else
637 gcc_assert (!old_acc->last_lhs_link);
641 /* Add ACCESS to the work to queue for propagation of subaccesses from RHS to
642 LHS (which is actually a stack). */
644 static void
645 add_access_to_rhs_work_queue (struct access *access)
647 if (access->first_rhs_link && !access->grp_rhs_queued)
649 gcc_assert (!access->next_rhs_queued);
650 access->next_rhs_queued = rhs_work_queue_head;
651 access->grp_rhs_queued = 1;
652 rhs_work_queue_head = access;
656 /* Add ACCESS to the work to queue for propagation of subaccesses from LHS to
657 RHS (which is actually a stack). */
659 static void
660 add_access_to_lhs_work_queue (struct access *access)
662 if (access->first_lhs_link && !access->grp_lhs_queued)
664 gcc_assert (!access->next_lhs_queued);
665 access->next_lhs_queued = lhs_work_queue_head;
666 access->grp_lhs_queued = 1;
667 lhs_work_queue_head = access;
671 /* Pop an access from the work queue for propagating from RHS to LHS, and
672 return it, assuming there is one. */
674 static struct access *
675 pop_access_from_rhs_work_queue (void)
677 struct access *access = rhs_work_queue_head;
679 rhs_work_queue_head = access->next_rhs_queued;
680 access->next_rhs_queued = NULL;
681 access->grp_rhs_queued = 0;
682 return access;
685 /* Pop an access from the work queue for propagating from LHS to RHS, and
686 return it, assuming there is one. */
688 static struct access *
689 pop_access_from_lhs_work_queue (void)
691 struct access *access = lhs_work_queue_head;
693 lhs_work_queue_head = access->next_lhs_queued;
694 access->next_lhs_queued = NULL;
695 access->grp_lhs_queued = 0;
696 return access;
699 /* Allocate necessary structures. */
701 static void
702 sra_initialize (void)
704 candidate_bitmap = BITMAP_ALLOC (NULL);
705 candidates = new hash_table<uid_decl_hasher>
706 (vec_safe_length (cfun->local_decls) / 2);
707 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
708 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
709 disqualified_constants = BITMAP_ALLOC (NULL);
710 gcc_obstack_init (&name_obstack);
711 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
712 memset (&sra_stats, 0, sizeof (sra_stats));
715 /* Deallocate all general structures. */
717 static void
718 sra_deinitialize (void)
720 BITMAP_FREE (candidate_bitmap);
721 delete candidates;
722 candidates = NULL;
723 BITMAP_FREE (should_scalarize_away_bitmap);
724 BITMAP_FREE (cannot_scalarize_away_bitmap);
725 BITMAP_FREE (disqualified_constants);
726 access_pool.release ();
727 assign_link_pool.release ();
728 obstack_free (&name_obstack, NULL);
730 delete base_access_vec;
733 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
735 static bool constant_decl_p (tree decl)
737 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
740 /* Remove DECL from candidates for SRA and write REASON to the dump file if
741 there is one. */
743 static void
744 disqualify_candidate (tree decl, const char *reason)
746 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
747 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
748 if (constant_decl_p (decl))
749 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
751 if (dump_file && (dump_flags & TDF_DETAILS))
753 fprintf (dump_file, "! Disqualifying ");
754 print_generic_expr (dump_file, decl);
755 fprintf (dump_file, " - %s\n", reason);
759 /* Return true iff the type contains a field or an element which does not allow
760 scalarization. Use VISITED_TYPES to avoid re-checking already checked
761 (sub-)types. */
763 static bool
764 type_internals_preclude_sra_p_1 (tree type, const char **msg,
765 hash_set<tree> *visited_types)
767 tree fld;
768 tree et;
770 if (visited_types->contains (type))
771 return false;
772 visited_types->add (type);
774 switch (TREE_CODE (type))
776 case RECORD_TYPE:
777 case UNION_TYPE:
778 case QUAL_UNION_TYPE:
779 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
780 if (TREE_CODE (fld) == FIELD_DECL)
782 if (TREE_CODE (fld) == FUNCTION_DECL)
783 continue;
784 tree ft = TREE_TYPE (fld);
786 if (TREE_THIS_VOLATILE (fld))
788 *msg = "volatile structure field";
789 return true;
791 if (!DECL_FIELD_OFFSET (fld))
793 *msg = "no structure field offset";
794 return true;
796 if (!DECL_SIZE (fld))
798 *msg = "zero structure field size";
799 return true;
801 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
803 *msg = "structure field offset not fixed";
804 return true;
806 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
808 *msg = "structure field size not fixed";
809 return true;
811 if (!tree_fits_shwi_p (bit_position (fld)))
813 *msg = "structure field size too big";
814 return true;
816 if (AGGREGATE_TYPE_P (ft)
817 && int_bit_position (fld) % BITS_PER_UNIT != 0)
819 *msg = "structure field is bit field";
820 return true;
823 if (AGGREGATE_TYPE_P (ft)
824 && type_internals_preclude_sra_p_1 (ft, msg, visited_types))
825 return true;
828 return false;
830 case ARRAY_TYPE:
831 et = TREE_TYPE (type);
833 if (TYPE_VOLATILE (et))
835 *msg = "element type is volatile";
836 return true;
839 if (AGGREGATE_TYPE_P (et)
840 && type_internals_preclude_sra_p_1 (et, msg, visited_types))
841 return true;
843 return false;
845 default:
846 return false;
850 /* Return true iff the type contains a field or an element which does not allow
851 scalarization. */
853 bool
854 type_internals_preclude_sra_p (tree type, const char **msg)
856 hash_set<tree> visited_types;
857 return type_internals_preclude_sra_p_1 (type, msg, &visited_types);
861 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
862 the three fields. Also add it to the vector of accesses corresponding to
863 the base. Finally, return the new access. */
865 static struct access *
866 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
868 struct access *access = access_pool.allocate ();
870 memset (access, 0, sizeof (struct access));
871 access->base = base;
872 access->offset = offset;
873 access->size = size;
875 base_access_vec->get_or_insert (base).safe_push (access);
877 return access;
880 static bool maybe_add_sra_candidate (tree);
882 /* Create and insert access for EXPR. Return created access, or NULL if it is
883 not possible. Also scan for uses of constant pool as we go along and add
884 to candidates. */
886 static struct access *
887 create_access (tree expr, gimple *stmt, bool write)
889 struct access *access;
890 poly_int64 poffset, psize, pmax_size;
891 tree base = expr;
892 bool reverse, unscalarizable_region = false;
894 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
895 &reverse);
897 /* For constant-pool entries, check we can substitute the constant value. */
898 if (constant_decl_p (base))
900 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
901 if (expr != base
902 && !is_gimple_reg_type (TREE_TYPE (expr))
903 && dump_file && (dump_flags & TDF_DETAILS))
905 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
906 and elements of multidimensional arrays (which are
907 multi-element arrays in their own right). */
908 fprintf (dump_file, "Allowing non-reg-type load of part"
909 " of constant-pool entry: ");
910 print_generic_expr (dump_file, expr);
912 maybe_add_sra_candidate (base);
915 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
916 return NULL;
918 HOST_WIDE_INT offset, size, max_size;
919 if (!poffset.is_constant (&offset)
920 || !psize.is_constant (&size)
921 || !pmax_size.is_constant (&max_size))
923 disqualify_candidate (base, "Encountered a polynomial-sized access.");
924 return NULL;
927 if (size != max_size)
929 size = max_size;
930 unscalarizable_region = true;
932 if (size == 0)
933 return NULL;
934 if (size < 0)
936 disqualify_candidate (base, "Encountered an unconstrained access.");
937 return NULL;
940 access = create_access_1 (base, offset, size);
941 access->expr = expr;
942 access->type = TREE_TYPE (expr);
943 access->write = write;
944 access->grp_unscalarizable_region = unscalarizable_region;
945 access->stmt = stmt;
946 access->reverse = reverse;
948 return access;
952 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
953 ARRAY_TYPE with fields that are either of gimple register types (excluding
954 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
955 we are considering a decl from constant pool. If it is false, char arrays
956 will be refused. */
958 static bool
959 scalarizable_type_p (tree type, bool const_decl)
961 if (is_gimple_reg_type (type))
962 return true;
963 if (type_contains_placeholder_p (type))
964 return false;
966 bool have_predecessor_field = false;
967 HOST_WIDE_INT prev_pos = 0;
969 switch (TREE_CODE (type))
971 case RECORD_TYPE:
972 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
973 if (TREE_CODE (fld) == FIELD_DECL)
975 tree ft = TREE_TYPE (fld);
977 if (zerop (DECL_SIZE (fld)))
978 continue;
980 HOST_WIDE_INT pos = int_bit_position (fld);
981 if (have_predecessor_field
982 && pos <= prev_pos)
983 return false;
985 have_predecessor_field = true;
986 prev_pos = pos;
988 if (DECL_BIT_FIELD (fld))
989 return false;
991 if (!scalarizable_type_p (ft, const_decl))
992 return false;
995 return true;
997 case ARRAY_TYPE:
999 HOST_WIDE_INT min_elem_size;
1000 if (const_decl)
1001 min_elem_size = 0;
1002 else
1003 min_elem_size = BITS_PER_UNIT;
1005 if (TYPE_DOMAIN (type) == NULL_TREE
1006 || !tree_fits_shwi_p (TYPE_SIZE (type))
1007 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
1008 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
1009 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1010 return false;
1011 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1012 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1013 /* Zero-element array, should not prevent scalarization. */
1015 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1016 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1017 /* Variable-length array, do not allow scalarization. */
1018 return false;
1020 tree elem = TREE_TYPE (type);
1021 if (!scalarizable_type_p (elem, const_decl))
1022 return false;
1023 return true;
1025 default:
1026 return false;
1030 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1032 static inline bool
1033 contains_view_convert_expr_p (const_tree ref)
1035 while (handled_component_p (ref))
1037 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1038 return true;
1039 ref = TREE_OPERAND (ref, 0);
1042 return false;
1045 /* Return true if REF contains a VIEW_CONVERT_EXPR or a COMPONENT_REF with a
1046 bit-field field declaration. If TYPE_CHANGING_P is non-NULL, set the bool
1047 it points to will be set if REF contains any of the above or a MEM_REF
1048 expression that effectively performs type conversion. */
1050 static bool
1051 contains_vce_or_bfcref_p (const_tree ref, bool *type_changing_p = NULL)
1053 while (handled_component_p (ref))
1055 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
1056 || (TREE_CODE (ref) == COMPONENT_REF
1057 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
1059 if (type_changing_p)
1060 *type_changing_p = true;
1061 return true;
1063 ref = TREE_OPERAND (ref, 0);
1066 if (!type_changing_p
1067 || TREE_CODE (ref) != MEM_REF
1068 || TREE_CODE (TREE_OPERAND (ref, 0)) != ADDR_EXPR)
1069 return false;
1071 tree mem = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
1072 if (TYPE_MAIN_VARIANT (TREE_TYPE (ref))
1073 != TYPE_MAIN_VARIANT (TREE_TYPE (mem)))
1074 *type_changing_p = true;
1076 return false;
1079 /* Search the given tree for a declaration by skipping handled components and
1080 exclude it from the candidates. */
1082 static void
1083 disqualify_base_of_expr (tree t, const char *reason)
1085 t = get_base_address (t);
1086 if (t && DECL_P (t))
1087 disqualify_candidate (t, reason);
1090 /* Scan expression EXPR and create access structures for all accesses to
1091 candidates for scalarization. Return the created access or NULL if none is
1092 created. */
1094 static struct access *
1095 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1097 struct access *ret = NULL;
1098 bool partial_ref;
1100 if (TREE_CODE (expr) == BIT_FIELD_REF
1101 || TREE_CODE (expr) == IMAGPART_EXPR
1102 || TREE_CODE (expr) == REALPART_EXPR)
1104 expr = TREE_OPERAND (expr, 0);
1105 partial_ref = true;
1107 else
1108 partial_ref = false;
1110 if (storage_order_barrier_p (expr))
1112 disqualify_base_of_expr (expr, "storage order barrier.");
1113 return NULL;
1116 /* We need to dive through V_C_Es in order to get the size of its parameter
1117 and not the result type. Ada produces such statements. We are also
1118 capable of handling the topmost V_C_E but not any of those buried in other
1119 handled components. */
1120 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1121 expr = TREE_OPERAND (expr, 0);
1123 if (contains_view_convert_expr_p (expr))
1125 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1126 "component.");
1127 return NULL;
1129 if (TREE_THIS_VOLATILE (expr))
1131 disqualify_base_of_expr (expr, "part of a volatile reference.");
1132 return NULL;
1135 switch (TREE_CODE (expr))
1137 case MEM_REF:
1138 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR)
1139 return NULL;
1140 /* fall through */
1141 case VAR_DECL:
1142 case PARM_DECL:
1143 case RESULT_DECL:
1144 case COMPONENT_REF:
1145 case ARRAY_REF:
1146 case ARRAY_RANGE_REF:
1147 ret = create_access (expr, stmt, write);
1148 break;
1150 default:
1151 break;
1154 if (write && partial_ref && ret)
1155 ret->grp_partial_lhs = 1;
1157 return ret;
1160 /* Scan expression EXPR and create access structures for all accesses to
1161 candidates for scalarization. Return true if any access has been inserted.
1162 STMT must be the statement from which the expression is taken, WRITE must be
1163 true if the expression is a store and false otherwise. */
1165 static bool
1166 build_access_from_expr (tree expr, gimple *stmt, bool write)
1168 struct access *access;
1170 access = build_access_from_expr_1 (expr, stmt, write);
1171 if (access)
1173 /* This means the aggregate is accesses as a whole in a way other than an
1174 assign statement and thus cannot be removed even if we had a scalar
1175 replacement for everything. */
1176 if (cannot_scalarize_away_bitmap)
1177 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1178 return true;
1180 return false;
1183 /* Return the single non-EH successor edge of BB or NULL if there is none or
1184 more than one. */
1186 static edge
1187 single_non_eh_succ (basic_block bb)
1189 edge e, res = NULL;
1190 edge_iterator ei;
1192 FOR_EACH_EDGE (e, ei, bb->succs)
1193 if (!(e->flags & EDGE_EH))
1195 if (res)
1196 return NULL;
1197 res = e;
1200 return res;
1203 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1204 there is no alternative spot where to put statements SRA might need to
1205 generate after it. The spot we are looking for is an edge leading to a
1206 single non-EH successor, if it exists and is indeed single. RHS may be
1207 NULL, in that case ignore it. */
1209 static bool
1210 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1212 if (stmt_ends_bb_p (stmt))
1214 if (single_non_eh_succ (gimple_bb (stmt)))
1215 return false;
1217 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1218 if (rhs)
1219 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1220 return true;
1222 return false;
1225 /* Return true if the nature of BASE is such that it contains data even if
1226 there is no write to it in the function. */
1228 static bool
1229 comes_initialized_p (tree base)
1231 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1234 /* Scan expressions occurring in STMT, create access structures for all accesses
1235 to candidates for scalarization and remove those candidates which occur in
1236 statements or expressions that prevent them from being split apart. Return
1237 true if any access has been inserted. */
1239 static bool
1240 build_accesses_from_assign (gimple *stmt)
1242 tree lhs, rhs;
1243 struct access *lacc, *racc;
1245 if (!gimple_assign_single_p (stmt)
1246 /* Scope clobbers don't influence scalarization. */
1247 || gimple_clobber_p (stmt))
1248 return false;
1250 lhs = gimple_assign_lhs (stmt);
1251 rhs = gimple_assign_rhs1 (stmt);
1253 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1254 return false;
1256 racc = build_access_from_expr_1 (rhs, stmt, false);
1257 lacc = build_access_from_expr_1 (lhs, stmt, true);
1259 if (lacc)
1261 lacc->grp_assignment_write = 1;
1262 if (storage_order_barrier_p (rhs))
1263 lacc->grp_unscalarizable_region = 1;
1265 if (should_scalarize_away_bitmap && !is_gimple_reg_type (lacc->type))
1267 bool type_changing_p = false;
1268 contains_vce_or_bfcref_p (lhs, &type_changing_p);
1269 if (type_changing_p)
1270 bitmap_set_bit (cannot_scalarize_away_bitmap,
1271 DECL_UID (lacc->base));
1275 if (racc)
1277 racc->grp_assignment_read = 1;
1278 if (should_scalarize_away_bitmap && !is_gimple_reg_type (racc->type))
1280 bool type_changing_p = false;
1281 contains_vce_or_bfcref_p (rhs, &type_changing_p);
1283 if (type_changing_p || gimple_has_volatile_ops (stmt))
1284 bitmap_set_bit (cannot_scalarize_away_bitmap,
1285 DECL_UID (racc->base));
1286 else
1287 bitmap_set_bit (should_scalarize_away_bitmap,
1288 DECL_UID (racc->base));
1290 if (storage_order_barrier_p (lhs))
1291 racc->grp_unscalarizable_region = 1;
1294 if (lacc && racc
1295 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1296 && !lacc->grp_unscalarizable_region
1297 && !racc->grp_unscalarizable_region
1298 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1299 && lacc->size == racc->size
1300 && useless_type_conversion_p (lacc->type, racc->type))
1302 struct assign_link *link;
1304 link = assign_link_pool.allocate ();
1305 memset (link, 0, sizeof (struct assign_link));
1307 link->lacc = lacc;
1308 link->racc = racc;
1309 add_link_to_rhs (racc, link);
1310 add_link_to_lhs (lacc, link);
1311 add_access_to_rhs_work_queue (racc);
1312 add_access_to_lhs_work_queue (lacc);
1314 /* Let's delay marking the areas as written until propagation of accesses
1315 across link, unless the nature of rhs tells us that its data comes
1316 from elsewhere. */
1317 if (!comes_initialized_p (racc->base))
1318 lacc->write = false;
1321 return lacc || racc;
1324 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1325 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1327 static bool
1328 asm_visit_addr (gimple *, tree op, tree, void *)
1330 op = get_base_address (op);
1331 if (op
1332 && DECL_P (op))
1333 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1335 return false;
1338 /* Scan function and look for interesting expressions and create access
1339 structures for them. Return true iff any access is created. */
1341 static bool
1342 scan_function (void)
1344 basic_block bb;
1345 bool ret = false;
1347 FOR_EACH_BB_FN (bb, cfun)
1349 gimple_stmt_iterator gsi;
1350 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1352 gimple *stmt = gsi_stmt (gsi);
1353 tree t;
1354 unsigned i;
1356 switch (gimple_code (stmt))
1358 case GIMPLE_RETURN:
1359 t = gimple_return_retval (as_a <greturn *> (stmt));
1360 if (t != NULL_TREE)
1361 ret |= build_access_from_expr (t, stmt, false);
1362 break;
1364 case GIMPLE_ASSIGN:
1365 ret |= build_accesses_from_assign (stmt);
1366 break;
1368 case GIMPLE_CALL:
1369 for (i = 0; i < gimple_call_num_args (stmt); i++)
1370 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1371 stmt, false);
1373 t = gimple_call_lhs (stmt);
1374 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1375 ret |= build_access_from_expr (t, stmt, true);
1376 break;
1378 case GIMPLE_ASM:
1380 gasm *asm_stmt = as_a <gasm *> (stmt);
1381 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1382 asm_visit_addr);
1383 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1385 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1386 ret |= build_access_from_expr (t, asm_stmt, false);
1388 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1390 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1391 ret |= build_access_from_expr (t, asm_stmt, true);
1394 break;
1396 default:
1397 break;
1402 return ret;
1405 /* Helper of QSORT function. There are pointers to accesses in the array. An
1406 access is considered smaller than another if it has smaller offset or if the
1407 offsets are the same but is size is bigger. */
1409 static int
1410 compare_access_positions (const void *a, const void *b)
1412 const access_p *fp1 = (const access_p *) a;
1413 const access_p *fp2 = (const access_p *) b;
1414 const access_p f1 = *fp1;
1415 const access_p f2 = *fp2;
1417 if (f1->offset != f2->offset)
1418 return f1->offset < f2->offset ? -1 : 1;
1420 if (f1->size == f2->size)
1422 if (f1->type == f2->type)
1423 return 0;
1424 /* Put any non-aggregate type before any aggregate type. */
1425 else if (!is_gimple_reg_type (f1->type)
1426 && is_gimple_reg_type (f2->type))
1427 return 1;
1428 else if (is_gimple_reg_type (f1->type)
1429 && !is_gimple_reg_type (f2->type))
1430 return -1;
1431 /* Put any complex or vector type before any other scalar type. */
1432 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1433 && TREE_CODE (f1->type) != VECTOR_TYPE
1434 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1435 || TREE_CODE (f2->type) == VECTOR_TYPE))
1436 return 1;
1437 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1438 || TREE_CODE (f1->type) == VECTOR_TYPE)
1439 && TREE_CODE (f2->type) != COMPLEX_TYPE
1440 && TREE_CODE (f2->type) != VECTOR_TYPE)
1441 return -1;
1442 /* Put any integral type before any non-integral type. When splicing, we
1443 make sure that those with insufficient precision and occupying the
1444 same space are not scalarized. */
1445 else if (INTEGRAL_TYPE_P (f1->type)
1446 && !INTEGRAL_TYPE_P (f2->type))
1447 return -1;
1448 else if (!INTEGRAL_TYPE_P (f1->type)
1449 && INTEGRAL_TYPE_P (f2->type))
1450 return 1;
1451 /* Put the integral type with the bigger precision first. */
1452 else if (INTEGRAL_TYPE_P (f1->type)
1453 && INTEGRAL_TYPE_P (f2->type)
1454 && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
1455 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1456 /* Stabilize the sort. */
1457 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1460 /* We want the bigger accesses first, thus the opposite operator in the next
1461 line: */
1462 return f1->size > f2->size ? -1 : 1;
1466 /* Append a name of the declaration to the name obstack. A helper function for
1467 make_fancy_name. */
1469 static void
1470 make_fancy_decl_name (tree decl)
1472 char buffer[32];
1474 tree name = DECL_NAME (decl);
1475 if (name)
1476 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1477 IDENTIFIER_LENGTH (name));
1478 else
1480 sprintf (buffer, "D%u", DECL_UID (decl));
1481 obstack_grow (&name_obstack, buffer, strlen (buffer));
1485 /* Helper for make_fancy_name. */
1487 static void
1488 make_fancy_name_1 (tree expr)
1490 char buffer[32];
1491 tree index;
1493 if (DECL_P (expr))
1495 make_fancy_decl_name (expr);
1496 return;
1499 switch (TREE_CODE (expr))
1501 case COMPONENT_REF:
1502 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1503 obstack_1grow (&name_obstack, '$');
1504 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1505 break;
1507 case ARRAY_REF:
1508 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1509 obstack_1grow (&name_obstack, '$');
1510 /* Arrays with only one element may not have a constant as their
1511 index. */
1512 index = TREE_OPERAND (expr, 1);
1513 if (TREE_CODE (index) != INTEGER_CST)
1514 break;
1515 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1516 obstack_grow (&name_obstack, buffer, strlen (buffer));
1517 break;
1519 case ADDR_EXPR:
1520 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1521 break;
1523 case MEM_REF:
1524 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1525 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1527 obstack_1grow (&name_obstack, '$');
1528 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1529 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1530 obstack_grow (&name_obstack, buffer, strlen (buffer));
1532 break;
1534 case BIT_FIELD_REF:
1535 case REALPART_EXPR:
1536 case IMAGPART_EXPR:
1537 gcc_unreachable (); /* we treat these as scalars. */
1538 break;
1539 default:
1540 break;
1544 /* Create a human readable name for replacement variable of ACCESS. */
1546 static char *
1547 make_fancy_name (tree expr)
1549 make_fancy_name_1 (expr);
1550 obstack_1grow (&name_obstack, '\0');
1551 return XOBFINISH (&name_obstack, char *);
1554 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1555 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1556 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1557 be non-NULL and is used to insert new statements either before or below
1558 the current one as specified by INSERT_AFTER. This function is not capable
1559 of handling bitfields. */
1561 tree
1562 build_ref_for_offset (location_t loc, tree base, poly_int64 offset,
1563 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1564 bool insert_after)
1566 tree prev_base = base;
1567 tree off;
1568 tree mem_ref;
1569 poly_int64 base_offset;
1570 unsigned HOST_WIDE_INT misalign;
1571 unsigned int align;
1573 /* Preserve address-space information. */
1574 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1575 if (as != TYPE_ADDR_SPACE (exp_type))
1576 exp_type = build_qualified_type (exp_type,
1577 TYPE_QUALS (exp_type)
1578 | ENCODE_QUAL_ADDR_SPACE (as));
1580 poly_int64 byte_offset = exact_div (offset, BITS_PER_UNIT);
1581 get_object_alignment_1 (base, &align, &misalign);
1582 base = get_addr_base_and_unit_offset (base, &base_offset);
1584 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1585 offset such as array[var_index]. */
1586 if (!base)
1588 gassign *stmt;
1589 tree tmp, addr;
1591 gcc_checking_assert (gsi);
1592 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1593 addr = build_fold_addr_expr (unshare_expr (prev_base));
1594 STRIP_USELESS_TYPE_CONVERSION (addr);
1595 stmt = gimple_build_assign (tmp, addr);
1596 gimple_set_location (stmt, loc);
1597 if (insert_after)
1598 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1599 else
1600 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1602 off = build_int_cst (reference_alias_ptr_type (prev_base), byte_offset);
1603 base = tmp;
1605 else if (TREE_CODE (base) == MEM_REF)
1607 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1608 base_offset + byte_offset);
1609 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1610 base = unshare_expr (TREE_OPERAND (base, 0));
1612 else
1614 off = build_int_cst (reference_alias_ptr_type (prev_base),
1615 base_offset + byte_offset);
1616 base = build_fold_addr_expr (unshare_expr (base));
1619 unsigned int align_bound = known_alignment (misalign + offset);
1620 if (align_bound != 0)
1621 align = MIN (align, align_bound);
1622 if (align != TYPE_ALIGN (exp_type))
1623 exp_type = build_aligned_type (exp_type, align);
1625 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1626 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1627 if (TREE_THIS_VOLATILE (prev_base))
1628 TREE_THIS_VOLATILE (mem_ref) = 1;
1629 if (TREE_SIDE_EFFECTS (prev_base))
1630 TREE_SIDE_EFFECTS (mem_ref) = 1;
1631 return mem_ref;
1634 /* Construct and return a memory reference that is equal to a portion of
1635 MODEL->expr but is based on BASE. If this cannot be done, return NULL. */
1637 static tree
1638 build_reconstructed_reference (location_t, tree base, struct access *model)
1640 tree expr = model->expr, prev_expr = NULL;
1641 while (!types_compatible_p (TREE_TYPE (expr), TREE_TYPE (base)))
1643 if (!handled_component_p (expr))
1644 return NULL_TREE;
1645 prev_expr = expr;
1646 expr = TREE_OPERAND (expr, 0);
1649 /* Guard against broken VIEW_CONVERT_EXPRs... */
1650 if (!prev_expr)
1651 return NULL_TREE;
1653 TREE_OPERAND (prev_expr, 0) = base;
1654 tree ref = unshare_expr (model->expr);
1655 TREE_OPERAND (prev_expr, 0) = expr;
1656 return ref;
1659 /* Construct a memory reference to a part of an aggregate BASE at the given
1660 OFFSET and of the same type as MODEL. In case this is a reference to a
1661 bit-field, the function will replicate the last component_ref of model's
1662 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1663 build_ref_for_offset. */
1665 static tree
1666 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1667 struct access *model, gimple_stmt_iterator *gsi,
1668 bool insert_after)
1670 if (TREE_CODE (model->expr) == COMPONENT_REF
1671 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1673 /* This access represents a bit-field. */
1674 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1676 offset -= int_bit_position (fld);
1677 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1678 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1679 gsi, insert_after);
1680 /* The flag will be set on the record type. */
1681 REF_REVERSE_STORAGE_ORDER (t) = 0;
1682 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1683 NULL_TREE);
1685 else
1687 tree res;
1688 if (model->grp_same_access_path
1689 && !TREE_THIS_VOLATILE (base)
1690 && (TYPE_ADDR_SPACE (TREE_TYPE (base))
1691 == TYPE_ADDR_SPACE (TREE_TYPE (model->expr)))
1692 && offset <= model->offset
1693 /* build_reconstructed_reference can still fail if we have already
1694 massaged BASE because of another type incompatibility. */
1695 && (res = build_reconstructed_reference (loc, base, model)))
1696 return res;
1697 else
1698 return build_ref_for_offset (loc, base, offset, model->reverse,
1699 model->type, gsi, insert_after);
1703 /* Attempt to build a memory reference that we could but into a gimple
1704 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1705 create statements and return s NULL instead. This function also ignores
1706 alignment issues and so its results should never end up in non-debug
1707 statements. */
1709 static tree
1710 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1711 struct access *model)
1713 poly_int64 base_offset;
1714 tree off;
1716 if (TREE_CODE (model->expr) == COMPONENT_REF
1717 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1718 return NULL_TREE;
1720 base = get_addr_base_and_unit_offset (base, &base_offset);
1721 if (!base)
1722 return NULL_TREE;
1723 if (TREE_CODE (base) == MEM_REF)
1725 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1726 base_offset + offset / BITS_PER_UNIT);
1727 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1728 base = unshare_expr (TREE_OPERAND (base, 0));
1730 else
1732 off = build_int_cst (reference_alias_ptr_type (base),
1733 base_offset + offset / BITS_PER_UNIT);
1734 base = build_fold_addr_expr (unshare_expr (base));
1737 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1740 /* Construct a memory reference consisting of component_refs and array_refs to
1741 a part of an aggregate *RES (which is of type TYPE). The requested part
1742 should have type EXP_TYPE at be the given OFFSET. This function might not
1743 succeed, it returns true when it does and only then *RES points to something
1744 meaningful. This function should be used only to build expressions that we
1745 might need to present to user (e.g. in warnings). In all other situations,
1746 build_ref_for_model or build_ref_for_offset should be used instead. */
1748 static bool
1749 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1750 tree exp_type)
1752 while (1)
1754 tree fld;
1755 tree tr_size, index, minidx;
1756 HOST_WIDE_INT el_size;
1758 if (offset == 0 && exp_type
1759 && types_compatible_p (exp_type, type))
1760 return true;
1762 switch (TREE_CODE (type))
1764 case UNION_TYPE:
1765 case QUAL_UNION_TYPE:
1766 case RECORD_TYPE:
1767 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1769 HOST_WIDE_INT pos, size;
1770 tree tr_pos, expr, *expr_ptr;
1772 if (TREE_CODE (fld) != FIELD_DECL)
1773 continue;
1775 tr_pos = bit_position (fld);
1776 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1777 continue;
1778 pos = tree_to_uhwi (tr_pos);
1779 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1780 tr_size = DECL_SIZE (fld);
1781 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1782 continue;
1783 size = tree_to_uhwi (tr_size);
1784 if (size == 0)
1786 if (pos != offset)
1787 continue;
1789 else if (pos > offset || (pos + size) <= offset)
1790 continue;
1792 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1793 NULL_TREE);
1794 expr_ptr = &expr;
1795 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1796 offset - pos, exp_type))
1798 *res = expr;
1799 return true;
1802 return false;
1804 case ARRAY_TYPE:
1805 tr_size = TYPE_SIZE (TREE_TYPE (type));
1806 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1807 return false;
1808 el_size = tree_to_uhwi (tr_size);
1810 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1811 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1812 return false;
1813 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1814 if (!integer_zerop (minidx))
1815 index = int_const_binop (PLUS_EXPR, index, minidx);
1816 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1817 NULL_TREE, NULL_TREE);
1818 offset = offset % el_size;
1819 type = TREE_TYPE (type);
1820 break;
1822 default:
1823 if (offset != 0)
1824 return false;
1826 if (exp_type)
1827 return false;
1828 else
1829 return true;
1834 /* Print message to dump file why a variable was rejected. */
1836 static void
1837 reject (tree var, const char *msg)
1839 if (dump_file && (dump_flags & TDF_DETAILS))
1841 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1842 print_generic_expr (dump_file, var);
1843 fprintf (dump_file, "\n");
1847 /* Return true if VAR is a candidate for SRA. */
1849 static bool
1850 maybe_add_sra_candidate (tree var)
1852 tree type = TREE_TYPE (var);
1853 const char *msg;
1854 tree_node **slot;
1856 if (!AGGREGATE_TYPE_P (type))
1858 reject (var, "not aggregate");
1859 return false;
1861 /* Allow constant-pool entries that "need to live in memory". */
1862 if (needs_to_live_in_memory (var) && !constant_decl_p (var))
1864 reject (var, "needs to live in memory");
1865 return false;
1867 if (TREE_THIS_VOLATILE (var))
1869 reject (var, "is volatile");
1870 return false;
1872 if (!COMPLETE_TYPE_P (type))
1874 reject (var, "has incomplete type");
1875 return false;
1877 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1879 reject (var, "type size not fixed");
1880 return false;
1882 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1884 reject (var, "type size is zero");
1885 return false;
1887 if (type_internals_preclude_sra_p (type, &msg))
1889 reject (var, msg);
1890 return false;
1892 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1893 we also want to schedule it rather late. Thus we ignore it in
1894 the early pass. */
1895 (sra_mode == SRA_MODE_EARLY_INTRA
1896 && is_va_list_type (type)))
1898 reject (var, "is va_list");
1899 return false;
1902 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1903 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1904 *slot = var;
1906 if (dump_file && (dump_flags & TDF_DETAILS))
1908 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1909 print_generic_expr (dump_file, var);
1910 fprintf (dump_file, "\n");
1913 return true;
1916 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1917 those with type which is suitable for scalarization. */
1919 static bool
1920 find_var_candidates (void)
1922 tree var, parm;
1923 unsigned int i;
1924 bool ret = false;
1926 for (parm = DECL_ARGUMENTS (current_function_decl);
1927 parm;
1928 parm = DECL_CHAIN (parm))
1929 ret |= maybe_add_sra_candidate (parm);
1931 FOR_EACH_LOCAL_DECL (cfun, i, var)
1933 if (!VAR_P (var))
1934 continue;
1936 ret |= maybe_add_sra_candidate (var);
1939 return ret;
1942 /* Return true if EXP is a reference chain of COMPONENT_REFs and AREAY_REFs
1943 ending either with a DECL or a MEM_REF with zero offset. */
1945 static bool
1946 path_comparable_for_same_access (tree expr)
1948 while (handled_component_p (expr))
1950 if (TREE_CODE (expr) == ARRAY_REF)
1952 /* SSA name indices can occur here too when the array is of sie one.
1953 But we cannot just re-use array_refs with SSA names elsewhere in
1954 the function, so disallow non-constant indices. TODO: Remove this
1955 limitation after teaching build_reconstructed_reference to replace
1956 the index with the index type lower bound. */
1957 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST)
1958 return false;
1960 expr = TREE_OPERAND (expr, 0);
1963 if (TREE_CODE (expr) == MEM_REF)
1965 if (!zerop (TREE_OPERAND (expr, 1)))
1966 return false;
1968 else
1969 gcc_assert (DECL_P (expr));
1971 return true;
1974 /* Assuming that EXP1 consists of only COMPONENT_REFs and ARRAY_REFs, return
1975 true if the chain of these handled components are exactly the same as EXP2
1976 and the expression under them is the same DECL or an equivalent MEM_REF.
1977 The reference picked by compare_access_positions must go to EXP1. */
1979 static bool
1980 same_access_path_p (tree exp1, tree exp2)
1982 if (TREE_CODE (exp1) != TREE_CODE (exp2))
1984 /* Special case single-field structures loaded sometimes as the field
1985 and sometimes as the structure. If the field is of a scalar type,
1986 compare_access_positions will put it into exp1.
1988 TODO: The gimple register type condition can be removed if teach
1989 compare_access_positions to put inner types first. */
1990 if (is_gimple_reg_type (TREE_TYPE (exp1))
1991 && TREE_CODE (exp1) == COMPONENT_REF
1992 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (exp1, 0)))
1993 == TYPE_MAIN_VARIANT (TREE_TYPE (exp2))))
1994 exp1 = TREE_OPERAND (exp1, 0);
1995 else
1996 return false;
1999 if (!operand_equal_p (exp1, exp2, OEP_ADDRESS_OF))
2000 return false;
2002 return true;
2005 /* Sort all accesses for the given variable, check for partial overlaps and
2006 return NULL if there are any. If there are none, pick a representative for
2007 each combination of offset and size and create a linked list out of them.
2008 Return the pointer to the first representative and make sure it is the first
2009 one in the vector of accesses. */
2011 static struct access *
2012 sort_and_splice_var_accesses (tree var)
2014 int i, j, access_count;
2015 struct access *res, **prev_acc_ptr = &res;
2016 vec<access_p> *access_vec;
2017 bool first = true;
2018 HOST_WIDE_INT low = -1, high = 0;
2020 access_vec = get_base_access_vector (var);
2021 if (!access_vec)
2022 return NULL;
2023 access_count = access_vec->length ();
2025 /* Sort by <OFFSET, SIZE>. */
2026 access_vec->qsort (compare_access_positions);
2028 i = 0;
2029 while (i < access_count)
2031 struct access *access = (*access_vec)[i];
2032 bool grp_write = access->write;
2033 bool grp_read = !access->write;
2034 bool grp_scalar_write = access->write
2035 && is_gimple_reg_type (access->type);
2036 bool grp_scalar_read = !access->write
2037 && is_gimple_reg_type (access->type);
2038 bool grp_assignment_read = access->grp_assignment_read;
2039 bool grp_assignment_write = access->grp_assignment_write;
2040 bool multiple_scalar_reads = false;
2041 bool grp_partial_lhs = access->grp_partial_lhs;
2042 bool first_scalar = is_gimple_reg_type (access->type);
2043 bool unscalarizable_region = access->grp_unscalarizable_region;
2044 bool grp_same_access_path = true;
2045 bool bf_non_full_precision
2046 = (INTEGRAL_TYPE_P (access->type)
2047 && TYPE_PRECISION (access->type) != access->size
2048 && TREE_CODE (access->expr) == COMPONENT_REF
2049 && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
2051 if (first || access->offset >= high)
2053 first = false;
2054 low = access->offset;
2055 high = access->offset + access->size;
2057 else if (access->offset > low && access->offset + access->size > high)
2058 return NULL;
2059 else
2060 gcc_assert (access->offset >= low
2061 && access->offset + access->size <= high);
2063 grp_same_access_path = path_comparable_for_same_access (access->expr);
2065 j = i + 1;
2066 while (j < access_count)
2068 struct access *ac2 = (*access_vec)[j];
2069 if (ac2->offset != access->offset || ac2->size != access->size)
2070 break;
2071 if (ac2->write)
2073 grp_write = true;
2074 grp_scalar_write = (grp_scalar_write
2075 || is_gimple_reg_type (ac2->type));
2077 else
2079 grp_read = true;
2080 if (is_gimple_reg_type (ac2->type))
2082 if (grp_scalar_read)
2083 multiple_scalar_reads = true;
2084 else
2085 grp_scalar_read = true;
2088 grp_assignment_read |= ac2->grp_assignment_read;
2089 grp_assignment_write |= ac2->grp_assignment_write;
2090 grp_partial_lhs |= ac2->grp_partial_lhs;
2091 unscalarizable_region |= ac2->grp_unscalarizable_region;
2092 relink_to_new_repr (access, ac2);
2094 /* If there are both aggregate-type and scalar-type accesses with
2095 this combination of size and offset, the comparison function
2096 should have put the scalars first. */
2097 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2098 /* It also prefers integral types to non-integral. However, when the
2099 precision of the selected type does not span the entire area and
2100 should also be used for a non-integer (i.e. float), we must not
2101 let that happen. Normally analyze_access_subtree expands the type
2102 to cover the entire area but for bit-fields it doesn't. */
2103 if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
2105 if (dump_file && (dump_flags & TDF_DETAILS))
2107 fprintf (dump_file, "Cannot scalarize the following access "
2108 "because insufficient precision integer type was "
2109 "selected.\n ");
2110 dump_access (dump_file, access, false);
2112 unscalarizable_region = true;
2115 if (grp_same_access_path
2116 && !same_access_path_p (access->expr, ac2->expr))
2117 grp_same_access_path = false;
2119 ac2->group_representative = access;
2120 j++;
2123 i = j;
2125 access->group_representative = access;
2126 access->grp_write = grp_write;
2127 access->grp_read = grp_read;
2128 access->grp_scalar_read = grp_scalar_read;
2129 access->grp_scalar_write = grp_scalar_write;
2130 access->grp_assignment_read = grp_assignment_read;
2131 access->grp_assignment_write = grp_assignment_write;
2132 access->grp_hint = multiple_scalar_reads && !constant_decl_p (var);
2133 access->grp_partial_lhs = grp_partial_lhs;
2134 access->grp_unscalarizable_region = unscalarizable_region;
2135 access->grp_same_access_path = grp_same_access_path;
2137 *prev_acc_ptr = access;
2138 prev_acc_ptr = &access->next_grp;
2141 gcc_assert (res == (*access_vec)[0]);
2142 return res;
2145 /* Create a variable for the given ACCESS which determines the type, name and a
2146 few other properties. Return the variable declaration and store it also to
2147 ACCESS->replacement. REG_TREE is used when creating a declaration to base a
2148 default-definition SSA name on in order to facilitate an uninitialized
2149 warning. It is used instead of the actual ACCESS type if that is not of a
2150 gimple register type. */
2152 static tree
2153 create_access_replacement (struct access *access, tree reg_type = NULL_TREE)
2155 tree repl;
2157 tree type = access->type;
2158 if (reg_type && !is_gimple_reg_type (type))
2159 type = reg_type;
2161 if (access->grp_to_be_debug_replaced)
2163 repl = create_tmp_var_raw (access->type);
2164 DECL_CONTEXT (repl) = current_function_decl;
2166 else
2167 /* Drop any special alignment on the type if it's not on the main
2168 variant. This avoids issues with weirdo ABIs like AAPCS. */
2169 repl = create_tmp_var (build_qualified_type (TYPE_MAIN_VARIANT (type),
2170 TYPE_QUALS (type)), "SR");
2171 if (TREE_CODE (type) == COMPLEX_TYPE
2172 || TREE_CODE (type) == VECTOR_TYPE)
2174 if (!access->grp_partial_lhs)
2175 DECL_GIMPLE_REG_P (repl) = 1;
2177 else if (access->grp_partial_lhs
2178 && is_gimple_reg_type (type))
2179 TREE_ADDRESSABLE (repl) = 1;
2181 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2182 DECL_ARTIFICIAL (repl) = 1;
2183 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2185 if (DECL_NAME (access->base)
2186 && !DECL_IGNORED_P (access->base)
2187 && !DECL_ARTIFICIAL (access->base))
2189 char *pretty_name = make_fancy_name (access->expr);
2190 tree debug_expr = unshare_expr_without_location (access->expr), d;
2191 bool fail = false;
2193 DECL_NAME (repl) = get_identifier (pretty_name);
2194 DECL_NAMELESS (repl) = 1;
2195 obstack_free (&name_obstack, pretty_name);
2197 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2198 as DECL_DEBUG_EXPR isn't considered when looking for still
2199 used SSA_NAMEs and thus they could be freed. All debug info
2200 generation cares is whether something is constant or variable
2201 and that get_ref_base_and_extent works properly on the
2202 expression. It cannot handle accesses at a non-constant offset
2203 though, so just give up in those cases. */
2204 for (d = debug_expr;
2205 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2206 d = TREE_OPERAND (d, 0))
2207 switch (TREE_CODE (d))
2209 case ARRAY_REF:
2210 case ARRAY_RANGE_REF:
2211 if (TREE_OPERAND (d, 1)
2212 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2213 fail = true;
2214 if (TREE_OPERAND (d, 3)
2215 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2216 fail = true;
2217 /* FALLTHRU */
2218 case COMPONENT_REF:
2219 if (TREE_OPERAND (d, 2)
2220 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2221 fail = true;
2222 break;
2223 case MEM_REF:
2224 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2225 fail = true;
2226 else
2227 d = TREE_OPERAND (d, 0);
2228 break;
2229 default:
2230 break;
2232 if (!fail)
2234 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2235 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2237 if (access->grp_no_warning)
2238 TREE_NO_WARNING (repl) = 1;
2239 else
2240 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2242 else
2243 TREE_NO_WARNING (repl) = 1;
2245 if (dump_file)
2247 if (access->grp_to_be_debug_replaced)
2249 fprintf (dump_file, "Created a debug-only replacement for ");
2250 print_generic_expr (dump_file, access->base);
2251 fprintf (dump_file, " offset: %u, size: %u\n",
2252 (unsigned) access->offset, (unsigned) access->size);
2254 else
2256 fprintf (dump_file, "Created a replacement for ");
2257 print_generic_expr (dump_file, access->base);
2258 fprintf (dump_file, " offset: %u, size: %u: ",
2259 (unsigned) access->offset, (unsigned) access->size);
2260 print_generic_expr (dump_file, repl, TDF_UID);
2261 fprintf (dump_file, "\n");
2264 sra_stats.replacements++;
2266 return repl;
2269 /* Return ACCESS scalar replacement, which must exist. */
2271 static inline tree
2272 get_access_replacement (struct access *access)
2274 gcc_checking_assert (access->replacement_decl);
2275 return access->replacement_decl;
2279 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2280 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2281 to it is not "within" the root. Return false iff some accesses partially
2282 overlap. */
2284 static bool
2285 build_access_subtree (struct access **access)
2287 struct access *root = *access, *last_child = NULL;
2288 HOST_WIDE_INT limit = root->offset + root->size;
2290 *access = (*access)->next_grp;
2291 while (*access && (*access)->offset + (*access)->size <= limit)
2293 if (!last_child)
2294 root->first_child = *access;
2295 else
2296 last_child->next_sibling = *access;
2297 last_child = *access;
2298 (*access)->parent = root;
2299 (*access)->grp_write |= root->grp_write;
2301 if (!build_access_subtree (access))
2302 return false;
2305 if (*access && (*access)->offset < limit)
2306 return false;
2308 return true;
2311 /* Build a tree of access representatives, ACCESS is the pointer to the first
2312 one, others are linked in a list by the next_grp field. Return false iff
2313 some accesses partially overlap. */
2315 static bool
2316 build_access_trees (struct access *access)
2318 while (access)
2320 struct access *root = access;
2322 if (!build_access_subtree (&access))
2323 return false;
2324 root->next_grp = access;
2326 return true;
2329 /* Traverse the access forest where ROOT is the first root and verify that
2330 various important invariants hold true. */
2332 DEBUG_FUNCTION void
2333 verify_sra_access_forest (struct access *root)
2335 struct access *access = root;
2336 tree first_base = root->base;
2337 gcc_assert (DECL_P (first_base));
2340 gcc_assert (access->base == first_base);
2341 if (access->parent)
2342 gcc_assert (access->offset >= access->parent->offset
2343 && access->size <= access->parent->size);
2344 if (access->next_sibling)
2345 gcc_assert (access->next_sibling->offset
2346 >= access->offset + access->size);
2348 poly_int64 poffset, psize, pmax_size;
2349 bool reverse;
2350 tree base = get_ref_base_and_extent (access->expr, &poffset, &psize,
2351 &pmax_size, &reverse);
2352 HOST_WIDE_INT offset, size, max_size;
2353 if (!poffset.is_constant (&offset)
2354 || !psize.is_constant (&size)
2355 || !pmax_size.is_constant (&max_size))
2356 gcc_unreachable ();
2357 gcc_assert (base == first_base);
2358 gcc_assert (offset == access->offset);
2359 gcc_assert (access->grp_unscalarizable_region
2360 || access->grp_total_scalarization
2361 || size == max_size);
2362 gcc_assert (access->grp_unscalarizable_region
2363 || !is_gimple_reg_type (access->type)
2364 || size == access->size);
2365 gcc_assert (reverse == access->reverse);
2367 if (access->first_child)
2369 gcc_assert (access->first_child->parent == access);
2370 access = access->first_child;
2372 else if (access->next_sibling)
2374 gcc_assert (access->next_sibling->parent == access->parent);
2375 access = access->next_sibling;
2377 else
2379 while (access->parent && !access->next_sibling)
2380 access = access->parent;
2381 if (access->next_sibling)
2382 access = access->next_sibling;
2383 else
2385 gcc_assert (access == root);
2386 root = root->next_grp;
2387 access = root;
2391 while (access);
2394 /* Verify access forests of all candidates with accesses by calling
2395 verify_access_forest on each on them. */
2397 DEBUG_FUNCTION void
2398 verify_all_sra_access_forests (void)
2400 bitmap_iterator bi;
2401 unsigned i;
2402 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2404 tree var = candidate (i);
2405 struct access *access = get_first_repr_for_decl (var);
2406 if (access)
2408 gcc_assert (access->base == var);
2409 verify_sra_access_forest (access);
2414 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2415 array. */
2417 static bool
2418 expr_with_var_bounded_array_refs_p (tree expr)
2420 while (handled_component_p (expr))
2422 if (TREE_CODE (expr) == ARRAY_REF
2423 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2424 return true;
2425 expr = TREE_OPERAND (expr, 0);
2427 return false;
2430 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2431 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. If TOTALLY
2432 is set, we are totally scalarizing the aggregate. Also set all sorts of
2433 access flags appropriately along the way, notably always set grp_read and
2434 grp_assign_read according to MARK_READ and grp_write when MARK_WRITE is
2435 true.
2437 Creating a replacement for a scalar access is considered beneficial if its
2438 grp_hint ot TOTALLY is set (this means either that there is more than one
2439 direct read access or that we are attempting total scalarization) or
2440 according to the following table:
2442 Access written to through a scalar type (once or more times)
2444 | Written to in an assignment statement
2446 | | Access read as scalar _once_
2447 | | |
2448 | | | Read in an assignment statement
2449 | | | |
2450 | | | | Scalarize Comment
2451 -----------------------------------------------------------------------------
2452 0 0 0 0 No access for the scalar
2453 0 0 0 1 No access for the scalar
2454 0 0 1 0 No Single read - won't help
2455 0 0 1 1 No The same case
2456 0 1 0 0 No access for the scalar
2457 0 1 0 1 No access for the scalar
2458 0 1 1 0 Yes s = *g; return s.i;
2459 0 1 1 1 Yes The same case as above
2460 1 0 0 0 No Won't help
2461 1 0 0 1 Yes s.i = 1; *g = s;
2462 1 0 1 0 Yes s.i = 5; g = s.i;
2463 1 0 1 1 Yes The same case as above
2464 1 1 0 0 No Won't help.
2465 1 1 0 1 Yes s.i = 1; *g = s;
2466 1 1 1 0 Yes s = *g; return s.i;
2467 1 1 1 1 Yes Any of the above yeses */
2469 static bool
2470 analyze_access_subtree (struct access *root, struct access *parent,
2471 bool allow_replacements, bool totally)
2473 struct access *child;
2474 HOST_WIDE_INT limit = root->offset + root->size;
2475 HOST_WIDE_INT covered_to = root->offset;
2476 bool scalar = is_gimple_reg_type (root->type);
2477 bool hole = false, sth_created = false;
2479 if (parent)
2481 if (parent->grp_read)
2482 root->grp_read = 1;
2483 if (parent->grp_assignment_read)
2484 root->grp_assignment_read = 1;
2485 if (parent->grp_write)
2486 root->grp_write = 1;
2487 if (parent->grp_assignment_write)
2488 root->grp_assignment_write = 1;
2489 if (!parent->grp_same_access_path)
2490 root->grp_same_access_path = 0;
2493 if (root->grp_unscalarizable_region)
2494 allow_replacements = false;
2496 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2497 allow_replacements = false;
2499 for (child = root->first_child; child; child = child->next_sibling)
2501 hole |= covered_to < child->offset;
2502 sth_created |= analyze_access_subtree (child, root,
2503 allow_replacements && !scalar,
2504 totally);
2506 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2507 if (child->grp_covered)
2508 covered_to += child->size;
2509 else
2510 hole = true;
2513 if (allow_replacements && scalar && !root->first_child
2514 && (totally || !root->grp_total_scalarization)
2515 && (totally
2516 || root->grp_hint
2517 || ((root->grp_scalar_read || root->grp_assignment_read)
2518 && (root->grp_scalar_write || root->grp_assignment_write))))
2520 /* Always create access replacements that cover the whole access.
2521 For integral types this means the precision has to match.
2522 Avoid assumptions based on the integral type kind, too. */
2523 if (INTEGRAL_TYPE_P (root->type)
2524 && (TREE_CODE (root->type) != INTEGER_TYPE
2525 || TYPE_PRECISION (root->type) != root->size)
2526 /* But leave bitfield accesses alone. */
2527 && (TREE_CODE (root->expr) != COMPONENT_REF
2528 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2530 tree rt = root->type;
2531 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2532 && (root->size % BITS_PER_UNIT) == 0);
2533 root->type = build_nonstandard_integer_type (root->size,
2534 TYPE_UNSIGNED (rt));
2535 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2536 root->offset, root->reverse,
2537 root->type, NULL, false);
2539 if (dump_file && (dump_flags & TDF_DETAILS))
2541 fprintf (dump_file, "Changing the type of a replacement for ");
2542 print_generic_expr (dump_file, root->base);
2543 fprintf (dump_file, " offset: %u, size: %u ",
2544 (unsigned) root->offset, (unsigned) root->size);
2545 fprintf (dump_file, " to an integer.\n");
2549 root->grp_to_be_replaced = 1;
2550 root->replacement_decl = create_access_replacement (root);
2551 sth_created = true;
2552 hole = false;
2554 else
2556 if (allow_replacements
2557 && scalar && !root->first_child
2558 && !root->grp_total_scalarization
2559 && (root->grp_scalar_write || root->grp_assignment_write)
2560 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2561 DECL_UID (root->base)))
2563 gcc_checking_assert (!root->grp_scalar_read
2564 && !root->grp_assignment_read);
2565 sth_created = true;
2566 if (MAY_HAVE_DEBUG_BIND_STMTS)
2568 root->grp_to_be_debug_replaced = 1;
2569 root->replacement_decl = create_access_replacement (root);
2573 if (covered_to < limit)
2574 hole = true;
2575 if (scalar || !allow_replacements)
2576 root->grp_total_scalarization = 0;
2579 if (!hole || totally)
2580 root->grp_covered = 1;
2581 else if (root->grp_write || comes_initialized_p (root->base))
2582 root->grp_unscalarized_data = 1; /* not covered and written to */
2583 return sth_created;
2586 /* Analyze all access trees linked by next_grp by the means of
2587 analyze_access_subtree. */
2588 static bool
2589 analyze_access_trees (struct access *access)
2591 bool ret = false;
2593 while (access)
2595 if (analyze_access_subtree (access, NULL, true,
2596 access->grp_total_scalarization))
2597 ret = true;
2598 access = access->next_grp;
2601 return ret;
2604 /* Return true iff a potential new child of ACC at offset OFFSET and with size
2605 SIZE would conflict with an already existing one. If exactly such a child
2606 already exists in ACC, store a pointer to it in EXACT_MATCH. */
2608 static bool
2609 child_would_conflict_in_acc (struct access *acc, HOST_WIDE_INT norm_offset,
2610 HOST_WIDE_INT size, struct access **exact_match)
2612 struct access *child;
2614 for (child = acc->first_child; child; child = child->next_sibling)
2616 if (child->offset == norm_offset && child->size == size)
2618 *exact_match = child;
2619 return true;
2622 if (child->offset < norm_offset + size
2623 && child->offset + child->size > norm_offset)
2624 return true;
2627 return false;
2630 /* Create a new child access of PARENT, with all properties just like MODEL
2631 except for its offset and with its grp_write false and grp_read true.
2632 Return the new access or NULL if it cannot be created. Note that this
2633 access is created long after all splicing and sorting, it's not located in
2634 any access vector and is automatically a representative of its group. Set
2635 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2637 static struct access *
2638 create_artificial_child_access (struct access *parent, struct access *model,
2639 HOST_WIDE_INT new_offset,
2640 bool set_grp_read, bool set_grp_write)
2642 struct access **child;
2643 tree expr = parent->base;
2645 gcc_assert (!model->grp_unscalarizable_region);
2647 struct access *access = access_pool.allocate ();
2648 memset (access, 0, sizeof (struct access));
2649 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2650 model->type))
2652 access->grp_no_warning = true;
2653 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2654 new_offset, model, NULL, false);
2657 access->base = parent->base;
2658 access->expr = expr;
2659 access->offset = new_offset;
2660 access->size = model->size;
2661 access->type = model->type;
2662 access->parent = parent;
2663 access->grp_read = set_grp_read;
2664 access->grp_write = set_grp_write;
2665 access->reverse = model->reverse;
2667 child = &parent->first_child;
2668 while (*child && (*child)->offset < new_offset)
2669 child = &(*child)->next_sibling;
2671 access->next_sibling = *child;
2672 *child = access;
2674 return access;
2678 /* Beginning with ACCESS, traverse its whole access subtree and mark all
2679 sub-trees as written to. If any of them has not been marked so previously
2680 and has assignment links leading from it, re-enqueue it. */
2682 static void
2683 subtree_mark_written_and_rhs_enqueue (struct access *access)
2685 if (access->grp_write)
2686 return;
2687 access->grp_write = true;
2688 add_access_to_rhs_work_queue (access);
2690 struct access *child;
2691 for (child = access->first_child; child; child = child->next_sibling)
2692 subtree_mark_written_and_rhs_enqueue (child);
2695 /* If there is still budget to create a propagation access for DECL, return
2696 true and decrement the budget. Otherwise return false. */
2698 static bool
2699 budget_for_propagation_access (tree decl)
2701 unsigned b, *p = propagation_budget->get (decl);
2702 if (p)
2703 b = *p;
2704 else
2705 b = param_sra_max_propagations;
2707 if (b == 0)
2708 return false;
2709 b--;
2711 if (b == 0 && dump_file && (dump_flags & TDF_DETAILS))
2713 fprintf (dump_file, "The propagation budget of ");
2714 print_generic_expr (dump_file, decl);
2715 fprintf (dump_file, " (UID: %u) has been exhausted.\n", DECL_UID (decl));
2717 propagation_budget->put (decl, b);
2718 return true;
2721 /* Propagate subaccesses and grp_write flags of RACC across an assignment link
2722 to LACC. Enqueue sub-accesses as necessary so that the write flag is
2723 propagated transitively. Return true if anything changed. Additionally, if
2724 RACC is a scalar access but LACC is not, change the type of the latter, if
2725 possible. */
2727 static bool
2728 propagate_subaccesses_from_rhs (struct access *lacc, struct access *racc)
2730 struct access *rchild;
2731 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2732 bool ret = false;
2734 /* IF the LHS is still not marked as being written to, we only need to do so
2735 if the RHS at this level actually was. */
2736 if (!lacc->grp_write)
2738 gcc_checking_assert (!comes_initialized_p (racc->base));
2739 if (racc->grp_write)
2741 subtree_mark_written_and_rhs_enqueue (lacc);
2742 ret = true;
2746 if (is_gimple_reg_type (lacc->type)
2747 || lacc->grp_unscalarizable_region
2748 || racc->grp_unscalarizable_region)
2750 if (!lacc->grp_write)
2752 ret = true;
2753 subtree_mark_written_and_rhs_enqueue (lacc);
2755 return ret;
2758 if (is_gimple_reg_type (racc->type))
2760 if (!lacc->grp_write)
2762 ret = true;
2763 subtree_mark_written_and_rhs_enqueue (lacc);
2765 if (!lacc->first_child && !racc->first_child)
2767 tree t = lacc->base;
2769 lacc->type = racc->type;
2770 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2771 lacc->offset, racc->type))
2773 lacc->expr = t;
2774 lacc->grp_same_access_path = true;
2776 else
2778 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2779 lacc->base, lacc->offset,
2780 racc, NULL, false);
2781 lacc->grp_no_warning = true;
2782 lacc->grp_same_access_path = false;
2785 return ret;
2788 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2790 struct access *new_acc = NULL;
2791 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2793 if (child_would_conflict_in_acc (lacc, norm_offset, rchild->size,
2794 &new_acc))
2796 if (new_acc)
2798 if (!new_acc->grp_write && rchild->grp_write)
2800 gcc_assert (!lacc->grp_write);
2801 subtree_mark_written_and_rhs_enqueue (new_acc);
2802 ret = true;
2805 rchild->grp_hint = 1;
2806 new_acc->grp_hint |= new_acc->grp_read;
2807 if (rchild->first_child
2808 && propagate_subaccesses_from_rhs (new_acc, rchild))
2810 ret = 1;
2811 add_access_to_rhs_work_queue (new_acc);
2814 else
2816 if (!lacc->grp_write)
2818 ret = true;
2819 subtree_mark_written_and_rhs_enqueue (lacc);
2822 continue;
2825 if (rchild->grp_unscalarizable_region
2826 || !budget_for_propagation_access (lacc->base))
2828 if (rchild->grp_write && !lacc->grp_write)
2830 ret = true;
2831 subtree_mark_written_and_rhs_enqueue (lacc);
2833 continue;
2836 rchild->grp_hint = 1;
2837 /* Because get_ref_base_and_extent always includes padding in size for
2838 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
2839 type, we might be actually attempting to here to create a child of the
2840 same type as the parent. */
2841 if (!types_compatible_p (lacc->type, rchild->type))
2842 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
2843 false,
2844 (lacc->grp_write
2845 || rchild->grp_write));
2846 else
2847 new_acc = lacc;
2848 gcc_checking_assert (new_acc);
2849 if (racc->first_child)
2850 propagate_subaccesses_from_rhs (new_acc, rchild);
2852 add_access_to_rhs_work_queue (lacc);
2853 ret = true;
2856 return ret;
2859 /* Propagate subaccesses of LACC across an assignment link to RACC if they
2860 should inhibit total scalarization of the corresponding area. No flags are
2861 being propagated in the process. Return true if anything changed. */
2863 static bool
2864 propagate_subaccesses_from_lhs (struct access *lacc, struct access *racc)
2866 if (is_gimple_reg_type (racc->type)
2867 || lacc->grp_unscalarizable_region
2868 || racc->grp_unscalarizable_region)
2869 return false;
2871 /* TODO: Do we want set some new racc flag to stop potential total
2872 scalarization if lacc is a scalar access (and none fo the two have
2873 children)? */
2875 bool ret = false;
2876 HOST_WIDE_INT norm_delta = racc->offset - lacc->offset;
2877 for (struct access *lchild = lacc->first_child;
2878 lchild;
2879 lchild = lchild->next_sibling)
2881 struct access *matching_acc = NULL;
2882 HOST_WIDE_INT norm_offset = lchild->offset + norm_delta;
2884 if (lchild->grp_unscalarizable_region
2885 || child_would_conflict_in_acc (racc, norm_offset, lchild->size,
2886 &matching_acc)
2887 || !budget_for_propagation_access (racc->base))
2889 if (matching_acc
2890 && propagate_subaccesses_from_lhs (lchild, matching_acc))
2891 add_access_to_lhs_work_queue (matching_acc);
2892 continue;
2895 /* Because get_ref_base_and_extent always includes padding in size for
2896 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
2897 type, we might be actually attempting to here to create a child of the
2898 same type as the parent. */
2899 if (!types_compatible_p (racc->type, lchild->type))
2901 struct access *new_acc
2902 = create_artificial_child_access (racc, lchild, norm_offset,
2903 true, false);
2904 propagate_subaccesses_from_lhs (lchild, new_acc);
2906 else
2907 propagate_subaccesses_from_lhs (lchild, racc);
2908 ret = true;
2910 return ret;
2913 /* Propagate all subaccesses across assignment links. */
2915 static void
2916 propagate_all_subaccesses (void)
2918 propagation_budget = new hash_map<tree, unsigned>;
2919 while (rhs_work_queue_head)
2921 struct access *racc = pop_access_from_rhs_work_queue ();
2922 struct assign_link *link;
2924 if (racc->group_representative)
2925 racc= racc->group_representative;
2926 gcc_assert (racc->first_rhs_link);
2928 for (link = racc->first_rhs_link; link; link = link->next_rhs)
2930 struct access *lacc = link->lacc;
2932 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2933 continue;
2934 lacc = lacc->group_representative;
2936 bool reque_parents = false;
2937 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2939 if (!lacc->grp_write)
2941 subtree_mark_written_and_rhs_enqueue (lacc);
2942 reque_parents = true;
2945 else if (propagate_subaccesses_from_rhs (lacc, racc))
2946 reque_parents = true;
2948 if (reque_parents)
2951 add_access_to_rhs_work_queue (lacc);
2952 lacc = lacc->parent;
2954 while (lacc);
2958 while (lhs_work_queue_head)
2960 struct access *lacc = pop_access_from_lhs_work_queue ();
2961 struct assign_link *link;
2963 if (lacc->group_representative)
2964 lacc = lacc->group_representative;
2965 gcc_assert (lacc->first_lhs_link);
2967 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2968 continue;
2970 for (link = lacc->first_lhs_link; link; link = link->next_lhs)
2972 struct access *racc = link->racc;
2974 if (racc->group_representative)
2975 racc = racc->group_representative;
2976 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2977 continue;
2978 if (propagate_subaccesses_from_lhs (lacc, racc))
2979 add_access_to_lhs_work_queue (racc);
2982 delete propagation_budget;
2985 /* Return true if the forest beginning with ROOT does not contain
2986 unscalarizable regions or non-byte aligned accesses. */
2988 static bool
2989 can_totally_scalarize_forest_p (struct access *root)
2991 struct access *access = root;
2994 if (access->grp_unscalarizable_region
2995 || (access->offset % BITS_PER_UNIT) != 0
2996 || (access->size % BITS_PER_UNIT) != 0
2997 || (is_gimple_reg_type (access->type)
2998 && access->first_child))
2999 return false;
3001 if (access->first_child)
3002 access = access->first_child;
3003 else if (access->next_sibling)
3004 access = access->next_sibling;
3005 else
3007 while (access->parent && !access->next_sibling)
3008 access = access->parent;
3009 if (access->next_sibling)
3010 access = access->next_sibling;
3011 else
3013 gcc_assert (access == root);
3014 root = root->next_grp;
3015 access = root;
3019 while (access);
3020 return true;
3023 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3024 reference EXPR for total scalarization purposes and mark it as such. Within
3025 the children of PARENT, link it in between PTR and NEXT_SIBLING. */
3027 static struct access *
3028 create_total_scalarization_access (struct access *parent, HOST_WIDE_INT pos,
3029 HOST_WIDE_INT size, tree type, tree expr,
3030 struct access **ptr,
3031 struct access *next_sibling)
3033 struct access *access = access_pool.allocate ();
3034 memset (access, 0, sizeof (struct access));
3035 access->base = parent->base;
3036 access->offset = pos;
3037 access->size = size;
3038 access->expr = expr;
3039 access->type = type;
3040 access->parent = parent;
3041 access->grp_write = parent->grp_write;
3042 access->grp_total_scalarization = 1;
3043 access->grp_hint = 1;
3044 access->grp_same_access_path = path_comparable_for_same_access (expr);
3045 access->reverse = reverse_storage_order_for_component_p (expr);
3047 access->next_sibling = next_sibling;
3048 *ptr = access;
3049 return access;
3052 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3053 reference EXPR for total scalarization purposes and mark it as such, link it
3054 at *PTR and reshape the tree so that those elements at *PTR and their
3055 siblings which fall within the part described by POS and SIZE are moved to
3056 be children of the new access. If a partial overlap is detected, return
3057 NULL. */
3059 static struct access *
3060 create_total_access_and_reshape (struct access *parent, HOST_WIDE_INT pos,
3061 HOST_WIDE_INT size, tree type, tree expr,
3062 struct access **ptr)
3064 struct access **p = ptr;
3066 while (*p && (*p)->offset < pos + size)
3068 if ((*p)->offset + (*p)->size > pos + size)
3069 return NULL;
3070 p = &(*p)->next_sibling;
3073 struct access *next_child = *ptr;
3074 struct access *new_acc
3075 = create_total_scalarization_access (parent, pos, size, type, expr,
3076 ptr, *p);
3077 if (p != ptr)
3079 new_acc->first_child = next_child;
3080 *p = NULL;
3081 for (struct access *a = next_child; a; a = a->next_sibling)
3082 a->parent = new_acc;
3084 return new_acc;
3087 static bool totally_scalarize_subtree (struct access *root);
3089 /* Return true if INNER is either the same type as OUTER or if it is the type
3090 of a record field in OUTER at offset zero, possibly in nested
3091 sub-records. */
3093 static bool
3094 access_and_field_type_match_p (tree outer, tree inner)
3096 if (TYPE_MAIN_VARIANT (outer) == TYPE_MAIN_VARIANT (inner))
3097 return true;
3098 if (TREE_CODE (outer) != RECORD_TYPE)
3099 return false;
3100 tree fld = TYPE_FIELDS (outer);
3101 while (fld)
3103 if (TREE_CODE (fld) == FIELD_DECL)
3105 if (!zerop (DECL_FIELD_OFFSET (fld)))
3106 return false;
3107 if (TYPE_MAIN_VARIANT (TREE_TYPE (fld)) == inner)
3108 return true;
3109 if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE)
3110 fld = TYPE_FIELDS (TREE_TYPE (fld));
3111 else
3112 return false;
3114 else
3115 fld = DECL_CHAIN (fld);
3117 return false;
3120 /* Return type of total_should_skip_creating_access indicating whether a total
3121 scalarization access for a field/element should be created, whether it
3122 already exists or whether the entire total scalarization has to fail. */
3124 enum total_sra_field_state {TOTAL_FLD_CREATE, TOTAL_FLD_DONE, TOTAL_FLD_FAILED};
3126 /* Do all the necessary steps in total scalarization when the given aggregate
3127 type has a TYPE at POS with the given SIZE should be put into PARENT and
3128 when we have processed all its siblings with smaller offsets up until and
3129 including LAST_SEEN_SIBLING (which can be NULL).
3131 If some further siblings are to be skipped, set *LAST_SEEN_SIBLING as
3132 appropriate. Return TOTAL_FLD_CREATE id the caller should carry on with
3133 creating a new access, TOTAL_FLD_DONE if access or accesses capable of
3134 representing the described part of the aggregate for the purposes of total
3135 scalarization already exist or TOTAL_FLD_FAILED if there is a problem which
3136 prevents total scalarization from happening at all. */
3138 static enum total_sra_field_state
3139 total_should_skip_creating_access (struct access *parent,
3140 struct access **last_seen_sibling,
3141 tree type, HOST_WIDE_INT pos,
3142 HOST_WIDE_INT size)
3144 struct access *next_child;
3145 if (!*last_seen_sibling)
3146 next_child = parent->first_child;
3147 else
3148 next_child = (*last_seen_sibling)->next_sibling;
3150 /* First, traverse the chain of siblings until it points to an access with
3151 offset at least equal to POS. Check all skipped accesses whether they
3152 span the POS boundary and if so, return with a failure. */
3153 while (next_child && next_child->offset < pos)
3155 if (next_child->offset + next_child->size > pos)
3156 return TOTAL_FLD_FAILED;
3157 *last_seen_sibling = next_child;
3158 next_child = next_child->next_sibling;
3161 /* Now check whether next_child has exactly the right POS and SIZE and if so,
3162 whether it can represent what we need and can be totally scalarized
3163 itself. */
3164 if (next_child && next_child->offset == pos
3165 && next_child->size == size)
3167 if (!is_gimple_reg_type (next_child->type)
3168 && (!access_and_field_type_match_p (type, next_child->type)
3169 || !totally_scalarize_subtree (next_child)))
3170 return TOTAL_FLD_FAILED;
3172 *last_seen_sibling = next_child;
3173 return TOTAL_FLD_DONE;
3176 /* If the child we're looking at would partially overlap, we just cannot
3177 totally scalarize. */
3178 if (next_child
3179 && next_child->offset < pos + size
3180 && next_child->offset + next_child->size > pos + size)
3181 return TOTAL_FLD_FAILED;
3183 if (is_gimple_reg_type (type))
3185 /* We don't scalarize accesses that are children of other scalar type
3186 accesses, so if we go on and create an access for a register type,
3187 there should not be any pre-existing children. There are rare cases
3188 where the requested type is a vector but we already have register
3189 accesses for all its elements which is equally good. Detect that
3190 situation or whether we need to bail out. */
3192 HOST_WIDE_INT covered = pos;
3193 bool skipping = false;
3194 while (next_child
3195 && next_child->offset + next_child->size <= pos + size)
3197 if (next_child->offset != covered
3198 || !is_gimple_reg_type (next_child->type))
3199 return TOTAL_FLD_FAILED;
3201 covered += next_child->size;
3202 *last_seen_sibling = next_child;
3203 next_child = next_child->next_sibling;
3204 skipping = true;
3207 if (skipping)
3209 if (covered != pos + size)
3210 return TOTAL_FLD_FAILED;
3211 else
3212 return TOTAL_FLD_DONE;
3216 return TOTAL_FLD_CREATE;
3219 /* Go over sub-tree rooted in ROOT and attempt to create scalar accesses
3220 spanning all uncovered areas covered by ROOT, return false if the attempt
3221 failed. All created accesses will have grp_unscalarizable_region set (and
3222 should be ignored if the function returns false). */
3224 static bool
3225 totally_scalarize_subtree (struct access *root)
3227 gcc_checking_assert (!root->grp_unscalarizable_region);
3228 gcc_checking_assert (!is_gimple_reg_type (root->type));
3230 struct access *last_seen_sibling = NULL;
3232 switch (TREE_CODE (root->type))
3234 case RECORD_TYPE:
3235 for (tree fld = TYPE_FIELDS (root->type); fld; fld = DECL_CHAIN (fld))
3236 if (TREE_CODE (fld) == FIELD_DECL)
3238 tree ft = TREE_TYPE (fld);
3239 HOST_WIDE_INT fsize = tree_to_uhwi (DECL_SIZE (fld));
3240 if (!fsize)
3241 continue;
3243 HOST_WIDE_INT pos = root->offset + int_bit_position (fld);
3244 enum total_sra_field_state
3245 state = total_should_skip_creating_access (root,
3246 &last_seen_sibling,
3247 ft, pos, fsize);
3248 switch (state)
3250 case TOTAL_FLD_FAILED:
3251 return false;
3252 case TOTAL_FLD_DONE:
3253 continue;
3254 case TOTAL_FLD_CREATE:
3255 break;
3256 default:
3257 gcc_unreachable ();
3260 struct access **p = (last_seen_sibling
3261 ? &last_seen_sibling->next_sibling
3262 : &root->first_child);
3263 tree nref = build3 (COMPONENT_REF, ft, root->expr, fld, NULL_TREE);
3264 struct access *new_child
3265 = create_total_access_and_reshape (root, pos, fsize, ft, nref, p);
3266 if (!new_child)
3267 return false;
3269 if (!is_gimple_reg_type (ft)
3270 && !totally_scalarize_subtree (new_child))
3271 return false;
3272 last_seen_sibling = new_child;
3274 break;
3275 case ARRAY_TYPE:
3277 tree elemtype = TREE_TYPE (root->type);
3278 tree elem_size = TYPE_SIZE (elemtype);
3279 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
3280 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
3281 gcc_assert (el_size > 0);
3283 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (root->type));
3284 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
3285 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (root->type));
3286 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
3287 if (!maxidx)
3288 goto out;
3289 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
3290 tree domain = TYPE_DOMAIN (root->type);
3291 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
3292 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
3293 offset_int idx = wi::to_offset (minidx);
3294 offset_int max = wi::to_offset (maxidx);
3295 if (!TYPE_UNSIGNED (domain))
3297 idx = wi::sext (idx, TYPE_PRECISION (domain));
3298 max = wi::sext (max, TYPE_PRECISION (domain));
3300 for (HOST_WIDE_INT pos = root->offset;
3301 idx <= max;
3302 pos += el_size, ++idx)
3304 enum total_sra_field_state
3305 state = total_should_skip_creating_access (root,
3306 &last_seen_sibling,
3307 elemtype, pos,
3308 el_size);
3309 switch (state)
3311 case TOTAL_FLD_FAILED:
3312 return false;
3313 case TOTAL_FLD_DONE:
3314 continue;
3315 case TOTAL_FLD_CREATE:
3316 break;
3317 default:
3318 gcc_unreachable ();
3321 struct access **p = (last_seen_sibling
3322 ? &last_seen_sibling->next_sibling
3323 : &root->first_child);
3324 tree nref = build4 (ARRAY_REF, elemtype, root->expr,
3325 wide_int_to_tree (domain, idx),
3326 NULL_TREE, NULL_TREE);
3327 struct access *new_child
3328 = create_total_access_and_reshape (root, pos, el_size, elemtype,
3329 nref, p);
3330 if (!new_child)
3331 return false;
3333 if (!is_gimple_reg_type (elemtype)
3334 && !totally_scalarize_subtree (new_child))
3335 return false;
3336 last_seen_sibling = new_child;
3339 break;
3340 default:
3341 gcc_unreachable ();
3344 out:
3345 return true;
3348 /* Go through all accesses collected throughout the (intraprocedural) analysis
3349 stage, exclude overlapping ones, identify representatives and build trees
3350 out of them, making decisions about scalarization on the way. Return true
3351 iff there are any to-be-scalarized variables after this stage. */
3353 static bool
3354 analyze_all_variable_accesses (void)
3356 int res = 0;
3357 bitmap tmp = BITMAP_ALLOC (NULL);
3358 bitmap_iterator bi;
3359 unsigned i;
3361 bitmap_copy (tmp, candidate_bitmap);
3362 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
3364 tree var = candidate (i);
3365 struct access *access;
3367 access = sort_and_splice_var_accesses (var);
3368 if (!access || !build_access_trees (access))
3369 disqualify_candidate (var,
3370 "No or inhibitingly overlapping accesses.");
3373 propagate_all_subaccesses ();
3375 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
3376 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
3377 fall back to a target default. */
3378 unsigned HOST_WIDE_INT max_scalarization_size
3379 = get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
3381 if (optimize_speed_p)
3383 if (global_options_set.x_param_sra_max_scalarization_size_speed)
3384 max_scalarization_size = param_sra_max_scalarization_size_speed;
3386 else
3388 if (global_options_set.x_param_sra_max_scalarization_size_size)
3389 max_scalarization_size = param_sra_max_scalarization_size_size;
3391 max_scalarization_size *= BITS_PER_UNIT;
3393 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3394 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
3395 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
3397 tree var = candidate (i);
3398 if (!VAR_P (var))
3399 continue;
3401 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var))) > max_scalarization_size)
3403 if (dump_file && (dump_flags & TDF_DETAILS))
3405 fprintf (dump_file, "Too big to totally scalarize: ");
3406 print_generic_expr (dump_file, var);
3407 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
3409 continue;
3412 bool all_types_ok = true;
3413 for (struct access *access = get_first_repr_for_decl (var);
3414 access;
3415 access = access->next_grp)
3416 if (!can_totally_scalarize_forest_p (access)
3417 || !scalarizable_type_p (access->type, constant_decl_p (var)))
3419 all_types_ok = false;
3420 break;
3422 if (!all_types_ok)
3423 continue;
3425 if (dump_file && (dump_flags & TDF_DETAILS))
3427 fprintf (dump_file, "Will attempt to totally scalarize ");
3428 print_generic_expr (dump_file, var);
3429 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
3431 bool scalarized = true;
3432 for (struct access *access = get_first_repr_for_decl (var);
3433 access;
3434 access = access->next_grp)
3435 if (!is_gimple_reg_type (access->type)
3436 && !totally_scalarize_subtree (access))
3438 scalarized = false;
3439 break;
3442 if (scalarized)
3443 for (struct access *access = get_first_repr_for_decl (var);
3444 access;
3445 access = access->next_grp)
3446 access->grp_total_scalarization = true;
3449 if (flag_checking)
3450 verify_all_sra_access_forests ();
3452 bitmap_copy (tmp, candidate_bitmap);
3453 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
3455 tree var = candidate (i);
3456 struct access *access = get_first_repr_for_decl (var);
3458 if (analyze_access_trees (access))
3460 res++;
3461 if (dump_file && (dump_flags & TDF_DETAILS))
3463 fprintf (dump_file, "\nAccess trees for ");
3464 print_generic_expr (dump_file, var);
3465 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
3466 dump_access_tree (dump_file, access);
3467 fprintf (dump_file, "\n");
3470 else
3471 disqualify_candidate (var, "No scalar replacements to be created.");
3474 BITMAP_FREE (tmp);
3476 if (res)
3478 statistics_counter_event (cfun, "Scalarized aggregates", res);
3479 return true;
3481 else
3482 return false;
3485 /* Generate statements copying scalar replacements of accesses within a subtree
3486 into or out of AGG. ACCESS, all its children, siblings and their children
3487 are to be processed. AGG is an aggregate type expression (can be a
3488 declaration but does not have to be, it can for example also be a mem_ref or
3489 a series of handled components). TOP_OFFSET is the offset of the processed
3490 subtree which has to be subtracted from offsets of individual accesses to
3491 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
3492 replacements in the interval <start_offset, start_offset + chunk_size>,
3493 otherwise copy all. GSI is a statement iterator used to place the new
3494 statements. WRITE should be true when the statements should write from AGG
3495 to the replacement and false if vice versa. if INSERT_AFTER is true, new
3496 statements will be added after the current statement in GSI, they will be
3497 added before the statement otherwise. */
3499 static void
3500 generate_subtree_copies (struct access *access, tree agg,
3501 HOST_WIDE_INT top_offset,
3502 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
3503 gimple_stmt_iterator *gsi, bool write,
3504 bool insert_after, location_t loc)
3506 /* Never write anything into constant pool decls. See PR70602. */
3507 if (!write && constant_decl_p (agg))
3508 return;
3511 if (chunk_size && access->offset >= start_offset + chunk_size)
3512 return;
3514 if (access->grp_to_be_replaced
3515 && (chunk_size == 0
3516 || access->offset + access->size > start_offset))
3518 tree expr, repl = get_access_replacement (access);
3519 gassign *stmt;
3521 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
3522 access, gsi, insert_after);
3524 if (write)
3526 if (access->grp_partial_lhs)
3527 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
3528 !insert_after,
3529 insert_after ? GSI_NEW_STMT
3530 : GSI_SAME_STMT);
3531 stmt = gimple_build_assign (repl, expr);
3533 else
3535 TREE_NO_WARNING (repl) = 1;
3536 if (access->grp_partial_lhs)
3537 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3538 !insert_after,
3539 insert_after ? GSI_NEW_STMT
3540 : GSI_SAME_STMT);
3541 stmt = gimple_build_assign (expr, repl);
3543 gimple_set_location (stmt, loc);
3545 if (insert_after)
3546 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3547 else
3548 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3549 update_stmt (stmt);
3550 sra_stats.subtree_copies++;
3552 else if (write
3553 && access->grp_to_be_debug_replaced
3554 && (chunk_size == 0
3555 || access->offset + access->size > start_offset))
3557 gdebug *ds;
3558 tree drhs = build_debug_ref_for_model (loc, agg,
3559 access->offset - top_offset,
3560 access);
3561 ds = gimple_build_debug_bind (get_access_replacement (access),
3562 drhs, gsi_stmt (*gsi));
3563 if (insert_after)
3564 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3565 else
3566 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3569 if (access->first_child)
3570 generate_subtree_copies (access->first_child, agg, top_offset,
3571 start_offset, chunk_size, gsi,
3572 write, insert_after, loc);
3574 access = access->next_sibling;
3576 while (access);
3579 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
3580 root of the subtree to be processed. GSI is the statement iterator used
3581 for inserting statements which are added after the current statement if
3582 INSERT_AFTER is true or before it otherwise. */
3584 static void
3585 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
3586 bool insert_after, location_t loc)
3589 struct access *child;
3591 if (access->grp_to_be_replaced)
3593 gassign *stmt;
3595 stmt = gimple_build_assign (get_access_replacement (access),
3596 build_zero_cst (access->type));
3597 if (insert_after)
3598 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3599 else
3600 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3601 update_stmt (stmt);
3602 gimple_set_location (stmt, loc);
3604 else if (access->grp_to_be_debug_replaced)
3606 gdebug *ds
3607 = gimple_build_debug_bind (get_access_replacement (access),
3608 build_zero_cst (access->type),
3609 gsi_stmt (*gsi));
3610 if (insert_after)
3611 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3612 else
3613 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3616 for (child = access->first_child; child; child = child->next_sibling)
3617 init_subtree_with_zero (child, gsi, insert_after, loc);
3620 /* Clobber all scalar replacements in an access subtree. ACCESS is the
3621 root of the subtree to be processed. GSI is the statement iterator used
3622 for inserting statements which are added after the current statement if
3623 INSERT_AFTER is true or before it otherwise. */
3625 static void
3626 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
3627 bool insert_after, location_t loc)
3630 struct access *child;
3632 if (access->grp_to_be_replaced)
3634 tree rep = get_access_replacement (access);
3635 tree clobber = build_clobber (access->type);
3636 gimple *stmt = gimple_build_assign (rep, clobber);
3638 if (insert_after)
3639 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3640 else
3641 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3642 update_stmt (stmt);
3643 gimple_set_location (stmt, loc);
3646 for (child = access->first_child; child; child = child->next_sibling)
3647 clobber_subtree (child, gsi, insert_after, loc);
3650 /* Search for an access representative for the given expression EXPR and
3651 return it or NULL if it cannot be found. */
3653 static struct access *
3654 get_access_for_expr (tree expr)
3656 poly_int64 poffset, psize, pmax_size;
3657 HOST_WIDE_INT offset, max_size;
3658 tree base;
3659 bool reverse;
3661 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3662 a different size than the size of its argument and we need the latter
3663 one. */
3664 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3665 expr = TREE_OPERAND (expr, 0);
3667 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
3668 &reverse);
3669 if (!known_size_p (pmax_size)
3670 || !pmax_size.is_constant (&max_size)
3671 || !poffset.is_constant (&offset)
3672 || !DECL_P (base))
3673 return NULL;
3675 if (tree basesize = DECL_SIZE (base))
3677 poly_int64 sz;
3678 if (offset < 0
3679 || !poly_int_tree_p (basesize, &sz)
3680 || known_le (sz, offset))
3681 return NULL;
3684 if (max_size == 0
3685 || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3686 return NULL;
3688 return get_var_base_offset_size_access (base, offset, max_size);
3691 /* Replace the expression EXPR with a scalar replacement if there is one and
3692 generate other statements to do type conversion or subtree copying if
3693 necessary. GSI is used to place newly created statements, WRITE is true if
3694 the expression is being written to (it is on a LHS of a statement or output
3695 in an assembly statement). */
3697 static bool
3698 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
3700 location_t loc;
3701 struct access *access;
3702 tree type, bfr, orig_expr;
3703 bool partial_cplx_access = false;
3705 if (TREE_CODE (*expr) == BIT_FIELD_REF)
3707 bfr = *expr;
3708 expr = &TREE_OPERAND (*expr, 0);
3710 else
3711 bfr = NULL_TREE;
3713 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3715 expr = &TREE_OPERAND (*expr, 0);
3716 partial_cplx_access = true;
3718 access = get_access_for_expr (*expr);
3719 if (!access)
3720 return false;
3721 type = TREE_TYPE (*expr);
3722 orig_expr = *expr;
3724 loc = gimple_location (gsi_stmt (*gsi));
3725 gimple_stmt_iterator alt_gsi = gsi_none ();
3726 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
3728 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3729 gsi = &alt_gsi;
3732 if (access->grp_to_be_replaced)
3734 tree repl = get_access_replacement (access);
3735 /* If we replace a non-register typed access simply use the original
3736 access expression to extract the scalar component afterwards.
3737 This happens if scalarizing a function return value or parameter
3738 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3739 gcc.c-torture/compile/20011217-1.c.
3741 We also want to use this when accessing a complex or vector which can
3742 be accessed as a different type too, potentially creating a need for
3743 type conversion (see PR42196) and when scalarized unions are involved
3744 in assembler statements (see PR42398). */
3745 if (!bfr && !useless_type_conversion_p (type, access->type))
3747 tree ref;
3749 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
3751 if (partial_cplx_access)
3753 /* VIEW_CONVERT_EXPRs in partial complex access are always fine in
3754 the case of a write because in such case the replacement cannot
3755 be a gimple register. In the case of a load, we have to
3756 differentiate in between a register an non-register
3757 replacement. */
3758 tree t = build1 (VIEW_CONVERT_EXPR, type, repl);
3759 gcc_checking_assert (!write || access->grp_partial_lhs);
3760 if (!access->grp_partial_lhs)
3762 tree tmp = make_ssa_name (type);
3763 gassign *stmt = gimple_build_assign (tmp, t);
3764 /* This is always a read. */
3765 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3766 t = tmp;
3768 *expr = t;
3770 else if (write)
3772 gassign *stmt;
3774 if (access->grp_partial_lhs)
3775 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3776 false, GSI_NEW_STMT);
3777 stmt = gimple_build_assign (repl, ref);
3778 gimple_set_location (stmt, loc);
3779 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3781 else
3783 gassign *stmt;
3785 if (access->grp_partial_lhs)
3786 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3787 true, GSI_SAME_STMT);
3788 stmt = gimple_build_assign (ref, repl);
3789 gimple_set_location (stmt, loc);
3790 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3793 else
3794 *expr = repl;
3795 sra_stats.exprs++;
3797 else if (write && access->grp_to_be_debug_replaced)
3799 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3800 NULL_TREE,
3801 gsi_stmt (*gsi));
3802 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3805 if (access->first_child)
3807 HOST_WIDE_INT start_offset, chunk_size;
3808 if (bfr
3809 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3810 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3812 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3813 start_offset = access->offset
3814 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3816 else
3817 start_offset = chunk_size = 0;
3819 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3820 start_offset, chunk_size, gsi, write, write,
3821 loc);
3823 return true;
3826 /* Where scalar replacements of the RHS have been written to when a replacement
3827 of a LHS of an assigments cannot be direclty loaded from a replacement of
3828 the RHS. */
3829 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3830 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3831 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3833 struct subreplacement_assignment_data
3835 /* Offset of the access representing the lhs of the assignment. */
3836 HOST_WIDE_INT left_offset;
3838 /* LHS and RHS of the original assignment. */
3839 tree assignment_lhs, assignment_rhs;
3841 /* Access representing the rhs of the whole assignment. */
3842 struct access *top_racc;
3844 /* Stmt iterator used for statement insertions after the original assignment.
3845 It points to the main GSI used to traverse a BB during function body
3846 modification. */
3847 gimple_stmt_iterator *new_gsi;
3849 /* Stmt iterator used for statement insertions before the original
3850 assignment. Keeps on pointing to the original statement. */
3851 gimple_stmt_iterator old_gsi;
3853 /* Location of the assignment. */
3854 location_t loc;
3856 /* Keeps the information whether we have needed to refresh replacements of
3857 the LHS and from which side of the assignments this takes place. */
3858 enum unscalarized_data_handling refreshed;
3861 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3862 base aggregate if there are unscalarized data or directly to LHS of the
3863 statement that is pointed to by GSI otherwise. */
3865 static void
3866 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3868 tree src;
3869 if (sad->top_racc->grp_unscalarized_data)
3871 src = sad->assignment_rhs;
3872 sad->refreshed = SRA_UDH_RIGHT;
3874 else
3876 src = sad->assignment_lhs;
3877 sad->refreshed = SRA_UDH_LEFT;
3879 generate_subtree_copies (sad->top_racc->first_child, src,
3880 sad->top_racc->offset, 0, 0,
3881 &sad->old_gsi, false, false, sad->loc);
3884 /* Try to generate statements to load all sub-replacements in an access subtree
3885 formed by children of LACC from scalar replacements in the SAD->top_racc
3886 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3887 and load the accesses from it. */
3889 static void
3890 load_assign_lhs_subreplacements (struct access *lacc,
3891 struct subreplacement_assignment_data *sad)
3893 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3895 HOST_WIDE_INT offset;
3896 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3898 if (lacc->grp_to_be_replaced)
3900 struct access *racc;
3901 gassign *stmt;
3902 tree rhs;
3904 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3905 if (racc && racc->grp_to_be_replaced)
3907 rhs = get_access_replacement (racc);
3908 if (!useless_type_conversion_p (lacc->type, racc->type))
3909 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3910 lacc->type, rhs);
3912 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3913 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3914 NULL_TREE, true, GSI_SAME_STMT);
3916 else
3918 /* No suitable access on the right hand side, need to load from
3919 the aggregate. See if we have to update it first... */
3920 if (sad->refreshed == SRA_UDH_NONE)
3921 handle_unscalarized_data_in_subtree (sad);
3923 if (sad->refreshed == SRA_UDH_LEFT)
3924 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3925 lacc->offset - sad->left_offset,
3926 lacc, sad->new_gsi, true);
3927 else
3928 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3929 lacc->offset - sad->left_offset,
3930 lacc, sad->new_gsi, true);
3931 if (lacc->grp_partial_lhs)
3932 rhs = force_gimple_operand_gsi (sad->new_gsi,
3933 rhs, true, NULL_TREE,
3934 false, GSI_NEW_STMT);
3937 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3938 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3939 gimple_set_location (stmt, sad->loc);
3940 update_stmt (stmt);
3941 sra_stats.subreplacements++;
3943 else
3945 if (sad->refreshed == SRA_UDH_NONE
3946 && lacc->grp_read && !lacc->grp_covered)
3947 handle_unscalarized_data_in_subtree (sad);
3949 if (lacc && lacc->grp_to_be_debug_replaced)
3951 gdebug *ds;
3952 tree drhs;
3953 struct access *racc = find_access_in_subtree (sad->top_racc,
3954 offset,
3955 lacc->size);
3957 if (racc && racc->grp_to_be_replaced)
3959 if (racc->grp_write || constant_decl_p (racc->base))
3960 drhs = get_access_replacement (racc);
3961 else
3962 drhs = NULL;
3964 else if (sad->refreshed == SRA_UDH_LEFT)
3965 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3966 lacc->offset, lacc);
3967 else if (sad->refreshed == SRA_UDH_RIGHT)
3968 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3969 offset, lacc);
3970 else
3971 drhs = NULL_TREE;
3972 if (drhs
3973 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3974 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3975 lacc->type, drhs);
3976 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3977 drhs, gsi_stmt (sad->old_gsi));
3978 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3982 if (lacc->first_child)
3983 load_assign_lhs_subreplacements (lacc, sad);
3987 /* Result code for SRA assignment modification. */
3988 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3989 SRA_AM_MODIFIED, /* stmt changed but not
3990 removed */
3991 SRA_AM_REMOVED }; /* stmt eliminated */
3993 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3994 to the assignment and GSI is the statement iterator pointing at it. Returns
3995 the same values as sra_modify_assign. */
3997 static enum assignment_mod_result
3998 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
4000 tree lhs = gimple_assign_lhs (stmt);
4001 struct access *acc = get_access_for_expr (lhs);
4002 if (!acc)
4003 return SRA_AM_NONE;
4004 location_t loc = gimple_location (stmt);
4006 if (gimple_clobber_p (stmt))
4008 /* Clobber the replacement variable. */
4009 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
4010 /* Remove clobbers of fully scalarized variables, they are dead. */
4011 if (acc->grp_covered)
4013 unlink_stmt_vdef (stmt);
4014 gsi_remove (gsi, true);
4015 release_defs (stmt);
4016 return SRA_AM_REMOVED;
4018 else
4019 return SRA_AM_MODIFIED;
4022 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
4024 /* I have never seen this code path trigger but if it can happen the
4025 following should handle it gracefully. */
4026 if (access_has_children_p (acc))
4027 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
4028 true, true, loc);
4029 return SRA_AM_MODIFIED;
4032 if (acc->grp_covered)
4034 init_subtree_with_zero (acc, gsi, false, loc);
4035 unlink_stmt_vdef (stmt);
4036 gsi_remove (gsi, true);
4037 release_defs (stmt);
4038 return SRA_AM_REMOVED;
4040 else
4042 init_subtree_with_zero (acc, gsi, true, loc);
4043 return SRA_AM_MODIFIED;
4047 /* Create and return a new suitable default definition SSA_NAME for RACC which
4048 is an access describing an uninitialized part of an aggregate that is being
4049 loaded. REG_TREE is used instead of the actual RACC type if that is not of
4050 a gimple register type. */
4052 static tree
4053 get_repl_default_def_ssa_name (struct access *racc, tree reg_type)
4055 gcc_checking_assert (!racc->grp_to_be_replaced
4056 && !racc->grp_to_be_debug_replaced);
4057 if (!racc->replacement_decl)
4058 racc->replacement_decl = create_access_replacement (racc, reg_type);
4059 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
4062 /* Examine both sides of the assignment statement pointed to by STMT, replace
4063 them with a scalare replacement if there is one and generate copying of
4064 replacements if scalarized aggregates have been used in the assignment. GSI
4065 is used to hold generated statements for type conversions and subtree
4066 copying. */
4068 static enum assignment_mod_result
4069 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
4071 struct access *lacc, *racc;
4072 tree lhs, rhs;
4073 bool modify_this_stmt = false;
4074 bool force_gimple_rhs = false;
4075 location_t loc;
4076 gimple_stmt_iterator orig_gsi = *gsi;
4078 if (!gimple_assign_single_p (stmt))
4079 return SRA_AM_NONE;
4080 lhs = gimple_assign_lhs (stmt);
4081 rhs = gimple_assign_rhs1 (stmt);
4083 if (TREE_CODE (rhs) == CONSTRUCTOR)
4084 return sra_modify_constructor_assign (stmt, gsi);
4086 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
4087 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
4088 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
4090 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
4091 gsi, false);
4092 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
4093 gsi, true);
4094 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
4097 lacc = get_access_for_expr (lhs);
4098 racc = get_access_for_expr (rhs);
4099 if (!lacc && !racc)
4100 return SRA_AM_NONE;
4101 /* Avoid modifying initializations of constant-pool replacements. */
4102 if (racc && (racc->replacement_decl == lhs))
4103 return SRA_AM_NONE;
4105 loc = gimple_location (stmt);
4106 if (lacc && lacc->grp_to_be_replaced)
4108 lhs = get_access_replacement (lacc);
4109 gimple_assign_set_lhs (stmt, lhs);
4110 modify_this_stmt = true;
4111 if (lacc->grp_partial_lhs)
4112 force_gimple_rhs = true;
4113 sra_stats.exprs++;
4116 if (racc && racc->grp_to_be_replaced)
4118 rhs = get_access_replacement (racc);
4119 modify_this_stmt = true;
4120 if (racc->grp_partial_lhs)
4121 force_gimple_rhs = true;
4122 sra_stats.exprs++;
4124 else if (racc
4125 && !racc->grp_unscalarized_data
4126 && !racc->grp_unscalarizable_region
4127 && TREE_CODE (lhs) == SSA_NAME
4128 && !access_has_replacements_p (racc))
4130 rhs = get_repl_default_def_ssa_name (racc, TREE_TYPE (lhs));
4131 modify_this_stmt = true;
4132 sra_stats.exprs++;
4135 if (modify_this_stmt)
4137 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
4139 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
4140 ??? This should move to fold_stmt which we simply should
4141 call after building a VIEW_CONVERT_EXPR here. */
4142 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
4143 && !contains_bitfld_component_ref_p (lhs))
4145 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
4146 gimple_assign_set_lhs (stmt, lhs);
4148 else if (lacc
4149 && AGGREGATE_TYPE_P (TREE_TYPE (rhs))
4150 && !contains_vce_or_bfcref_p (rhs))
4151 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
4153 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
4155 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
4156 rhs);
4157 if (is_gimple_reg_type (TREE_TYPE (lhs))
4158 && TREE_CODE (lhs) != SSA_NAME)
4159 force_gimple_rhs = true;
4164 if (lacc && lacc->grp_to_be_debug_replaced)
4166 tree dlhs = get_access_replacement (lacc);
4167 tree drhs = unshare_expr (rhs);
4168 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
4170 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
4171 && !contains_vce_or_bfcref_p (drhs))
4172 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
4173 if (drhs
4174 && !useless_type_conversion_p (TREE_TYPE (dlhs),
4175 TREE_TYPE (drhs)))
4176 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
4177 TREE_TYPE (dlhs), drhs);
4179 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
4180 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
4183 /* From this point on, the function deals with assignments in between
4184 aggregates when at least one has scalar reductions of some of its
4185 components. There are three possible scenarios: Both the LHS and RHS have
4186 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
4188 In the first case, we would like to load the LHS components from RHS
4189 components whenever possible. If that is not possible, we would like to
4190 read it directly from the RHS (after updating it by storing in it its own
4191 components). If there are some necessary unscalarized data in the LHS,
4192 those will be loaded by the original assignment too. If neither of these
4193 cases happen, the original statement can be removed. Most of this is done
4194 by load_assign_lhs_subreplacements.
4196 In the second case, we would like to store all RHS scalarized components
4197 directly into LHS and if they cover the aggregate completely, remove the
4198 statement too. In the third case, we want the LHS components to be loaded
4199 directly from the RHS (DSE will remove the original statement if it
4200 becomes redundant).
4202 This is a bit complex but manageable when types match and when unions do
4203 not cause confusion in a way that we cannot really load a component of LHS
4204 from the RHS or vice versa (the access representing this level can have
4205 subaccesses that are accessible only through a different union field at a
4206 higher level - different from the one used in the examined expression).
4207 Unions are fun.
4209 Therefore, I specially handle a fourth case, happening when there is a
4210 specific type cast or it is impossible to locate a scalarized subaccess on
4211 the other side of the expression. If that happens, I simply "refresh" the
4212 RHS by storing in it is scalarized components leave the original statement
4213 there to do the copying and then load the scalar replacements of the LHS.
4214 This is what the first branch does. */
4216 if (modify_this_stmt
4217 || gimple_has_volatile_ops (stmt)
4218 || contains_vce_or_bfcref_p (rhs)
4219 || contains_vce_or_bfcref_p (lhs)
4220 || stmt_ends_bb_p (stmt))
4222 /* No need to copy into a constant-pool, it comes pre-initialized. */
4223 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
4224 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
4225 gsi, false, false, loc);
4226 if (access_has_children_p (lacc))
4228 gimple_stmt_iterator alt_gsi = gsi_none ();
4229 if (stmt_ends_bb_p (stmt))
4231 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
4232 gsi = &alt_gsi;
4234 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
4235 gsi, true, true, loc);
4237 sra_stats.separate_lhs_rhs_handling++;
4239 /* This gimplification must be done after generate_subtree_copies,
4240 lest we insert the subtree copies in the middle of the gimplified
4241 sequence. */
4242 if (force_gimple_rhs)
4243 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
4244 true, GSI_SAME_STMT);
4245 if (gimple_assign_rhs1 (stmt) != rhs)
4247 modify_this_stmt = true;
4248 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
4249 gcc_assert (stmt == gsi_stmt (orig_gsi));
4252 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
4254 else
4256 if (access_has_children_p (lacc)
4257 && access_has_children_p (racc)
4258 /* When an access represents an unscalarizable region, it usually
4259 represents accesses with variable offset and thus must not be used
4260 to generate new memory accesses. */
4261 && !lacc->grp_unscalarizable_region
4262 && !racc->grp_unscalarizable_region)
4264 struct subreplacement_assignment_data sad;
4266 sad.left_offset = lacc->offset;
4267 sad.assignment_lhs = lhs;
4268 sad.assignment_rhs = rhs;
4269 sad.top_racc = racc;
4270 sad.old_gsi = *gsi;
4271 sad.new_gsi = gsi;
4272 sad.loc = gimple_location (stmt);
4273 sad.refreshed = SRA_UDH_NONE;
4275 if (lacc->grp_read && !lacc->grp_covered)
4276 handle_unscalarized_data_in_subtree (&sad);
4278 load_assign_lhs_subreplacements (lacc, &sad);
4279 if (sad.refreshed != SRA_UDH_RIGHT)
4281 gsi_next (gsi);
4282 unlink_stmt_vdef (stmt);
4283 gsi_remove (&sad.old_gsi, true);
4284 release_defs (stmt);
4285 sra_stats.deleted++;
4286 return SRA_AM_REMOVED;
4289 else
4291 if (access_has_children_p (racc)
4292 && !racc->grp_unscalarized_data
4293 && TREE_CODE (lhs) != SSA_NAME)
4295 if (dump_file)
4297 fprintf (dump_file, "Removing load: ");
4298 print_gimple_stmt (dump_file, stmt, 0);
4300 generate_subtree_copies (racc->first_child, lhs,
4301 racc->offset, 0, 0, gsi,
4302 false, false, loc);
4303 gcc_assert (stmt == gsi_stmt (*gsi));
4304 unlink_stmt_vdef (stmt);
4305 gsi_remove (gsi, true);
4306 release_defs (stmt);
4307 sra_stats.deleted++;
4308 return SRA_AM_REMOVED;
4310 /* Restore the aggregate RHS from its components so the
4311 prevailing aggregate copy does the right thing. */
4312 if (access_has_children_p (racc))
4313 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
4314 gsi, false, false, loc);
4315 /* Re-load the components of the aggregate copy destination.
4316 But use the RHS aggregate to load from to expose more
4317 optimization opportunities. */
4318 if (access_has_children_p (lacc))
4319 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
4320 0, 0, gsi, true, true, loc);
4323 return SRA_AM_NONE;
4327 /* Set any scalar replacements of values in the constant pool to the initial
4328 value of the constant. (Constant-pool decls like *.LC0 have effectively
4329 been initialized before the program starts, we must do the same for their
4330 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
4331 the function's entry block. */
4333 static void
4334 initialize_constant_pool_replacements (void)
4336 gimple_seq seq = NULL;
4337 gimple_stmt_iterator gsi = gsi_start (seq);
4338 bitmap_iterator bi;
4339 unsigned i;
4341 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
4343 tree var = candidate (i);
4344 if (!constant_decl_p (var))
4345 continue;
4347 struct access *access = get_first_repr_for_decl (var);
4349 while (access)
4351 if (access->replacement_decl)
4353 gassign *stmt
4354 = gimple_build_assign (get_access_replacement (access),
4355 unshare_expr (access->expr));
4356 if (dump_file && (dump_flags & TDF_DETAILS))
4358 fprintf (dump_file, "Generating constant initializer: ");
4359 print_gimple_stmt (dump_file, stmt, 0);
4360 fprintf (dump_file, "\n");
4362 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4363 update_stmt (stmt);
4366 if (access->first_child)
4367 access = access->first_child;
4368 else if (access->next_sibling)
4369 access = access->next_sibling;
4370 else
4372 while (access->parent && !access->next_sibling)
4373 access = access->parent;
4374 if (access->next_sibling)
4375 access = access->next_sibling;
4376 else
4377 access = access->next_grp;
4382 seq = gsi_seq (gsi);
4383 if (seq)
4384 gsi_insert_seq_on_edge_immediate (
4385 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
4388 /* Traverse the function body and all modifications as decided in
4389 analyze_all_variable_accesses. Return true iff the CFG has been
4390 changed. */
4392 static bool
4393 sra_modify_function_body (void)
4395 bool cfg_changed = false;
4396 basic_block bb;
4398 initialize_constant_pool_replacements ();
4400 FOR_EACH_BB_FN (bb, cfun)
4402 gimple_stmt_iterator gsi = gsi_start_bb (bb);
4403 while (!gsi_end_p (gsi))
4405 gimple *stmt = gsi_stmt (gsi);
4406 enum assignment_mod_result assign_result;
4407 bool modified = false, deleted = false;
4408 tree *t;
4409 unsigned i;
4411 switch (gimple_code (stmt))
4413 case GIMPLE_RETURN:
4414 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4415 if (*t != NULL_TREE)
4416 modified |= sra_modify_expr (t, &gsi, false);
4417 break;
4419 case GIMPLE_ASSIGN:
4420 assign_result = sra_modify_assign (stmt, &gsi);
4421 modified |= assign_result == SRA_AM_MODIFIED;
4422 deleted = assign_result == SRA_AM_REMOVED;
4423 break;
4425 case GIMPLE_CALL:
4426 /* Operands must be processed before the lhs. */
4427 for (i = 0; i < gimple_call_num_args (stmt); i++)
4429 t = gimple_call_arg_ptr (stmt, i);
4430 modified |= sra_modify_expr (t, &gsi, false);
4433 if (gimple_call_lhs (stmt))
4435 t = gimple_call_lhs_ptr (stmt);
4436 modified |= sra_modify_expr (t, &gsi, true);
4438 break;
4440 case GIMPLE_ASM:
4442 gasm *asm_stmt = as_a <gasm *> (stmt);
4443 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4445 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4446 modified |= sra_modify_expr (t, &gsi, false);
4448 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4450 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4451 modified |= sra_modify_expr (t, &gsi, true);
4454 break;
4456 default:
4457 break;
4460 if (modified)
4462 update_stmt (stmt);
4463 if (maybe_clean_eh_stmt (stmt)
4464 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4465 cfg_changed = true;
4467 if (!deleted)
4468 gsi_next (&gsi);
4472 gsi_commit_edge_inserts ();
4473 return cfg_changed;
4476 /* Generate statements initializing scalar replacements of parts of function
4477 parameters. */
4479 static void
4480 initialize_parameter_reductions (void)
4482 gimple_stmt_iterator gsi;
4483 gimple_seq seq = NULL;
4484 tree parm;
4486 gsi = gsi_start (seq);
4487 for (parm = DECL_ARGUMENTS (current_function_decl);
4488 parm;
4489 parm = DECL_CHAIN (parm))
4491 vec<access_p> *access_vec;
4492 struct access *access;
4494 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4495 continue;
4496 access_vec = get_base_access_vector (parm);
4497 if (!access_vec)
4498 continue;
4500 for (access = (*access_vec)[0];
4501 access;
4502 access = access->next_grp)
4503 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
4504 EXPR_LOCATION (parm));
4507 seq = gsi_seq (gsi);
4508 if (seq)
4509 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
4512 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
4513 it reveals there are components of some aggregates to be scalarized, it runs
4514 the required transformations. */
4515 static unsigned int
4516 perform_intra_sra (void)
4518 int ret = 0;
4519 sra_initialize ();
4521 if (!find_var_candidates ())
4522 goto out;
4524 if (!scan_function ())
4525 goto out;
4527 if (!analyze_all_variable_accesses ())
4528 goto out;
4530 if (sra_modify_function_body ())
4531 ret = TODO_update_ssa | TODO_cleanup_cfg;
4532 else
4533 ret = TODO_update_ssa;
4534 initialize_parameter_reductions ();
4536 statistics_counter_event (cfun, "Scalar replacements created",
4537 sra_stats.replacements);
4538 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
4539 statistics_counter_event (cfun, "Subtree copy stmts",
4540 sra_stats.subtree_copies);
4541 statistics_counter_event (cfun, "Subreplacement stmts",
4542 sra_stats.subreplacements);
4543 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
4544 statistics_counter_event (cfun, "Separate LHS and RHS handling",
4545 sra_stats.separate_lhs_rhs_handling);
4547 out:
4548 sra_deinitialize ();
4549 return ret;
4552 /* Perform early intraprocedural SRA. */
4553 static unsigned int
4554 early_intra_sra (void)
4556 sra_mode = SRA_MODE_EARLY_INTRA;
4557 return perform_intra_sra ();
4560 /* Perform "late" intraprocedural SRA. */
4561 static unsigned int
4562 late_intra_sra (void)
4564 sra_mode = SRA_MODE_INTRA;
4565 return perform_intra_sra ();
4569 static bool
4570 gate_intra_sra (void)
4572 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
4576 namespace {
4578 const pass_data pass_data_sra_early =
4580 GIMPLE_PASS, /* type */
4581 "esra", /* name */
4582 OPTGROUP_NONE, /* optinfo_flags */
4583 TV_TREE_SRA, /* tv_id */
4584 ( PROP_cfg | PROP_ssa ), /* properties_required */
4585 0, /* properties_provided */
4586 0, /* properties_destroyed */
4587 0, /* todo_flags_start */
4588 TODO_update_ssa, /* todo_flags_finish */
4591 class pass_sra_early : public gimple_opt_pass
4593 public:
4594 pass_sra_early (gcc::context *ctxt)
4595 : gimple_opt_pass (pass_data_sra_early, ctxt)
4598 /* opt_pass methods: */
4599 virtual bool gate (function *) { return gate_intra_sra (); }
4600 virtual unsigned int execute (function *) { return early_intra_sra (); }
4602 }; // class pass_sra_early
4604 } // anon namespace
4606 gimple_opt_pass *
4607 make_pass_sra_early (gcc::context *ctxt)
4609 return new pass_sra_early (ctxt);
4612 namespace {
4614 const pass_data pass_data_sra =
4616 GIMPLE_PASS, /* type */
4617 "sra", /* name */
4618 OPTGROUP_NONE, /* optinfo_flags */
4619 TV_TREE_SRA, /* tv_id */
4620 ( PROP_cfg | PROP_ssa ), /* properties_required */
4621 0, /* properties_provided */
4622 0, /* properties_destroyed */
4623 TODO_update_address_taken, /* todo_flags_start */
4624 TODO_update_ssa, /* todo_flags_finish */
4627 class pass_sra : public gimple_opt_pass
4629 public:
4630 pass_sra (gcc::context *ctxt)
4631 : gimple_opt_pass (pass_data_sra, ctxt)
4634 /* opt_pass methods: */
4635 virtual bool gate (function *) { return gate_intra_sra (); }
4636 virtual unsigned int execute (function *) { return late_intra_sra (); }
4638 }; // class pass_sra
4640 } // anon namespace
4642 gimple_opt_pass *
4643 make_pass_sra (gcc::context *ctxt)
4645 return new pass_sra (ctxt);