1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008-2022 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
87 #include "gimple-pretty-print.h"
89 #include "fold-const.h"
91 #include "stor-layout.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
100 #include "builtins.h"
101 #include "tree-sra.h"
104 /* Enumeration of all aggregate reductions we can do. */
105 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
106 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
107 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
109 /* Global variable describing which aggregate reduction we are performing at
111 static enum sra_mode sra_mode
;
115 /* ACCESS represents each access to an aggregate variable (as a whole or a
116 part). It can also represent a group of accesses that refer to exactly the
117 same fragment of an aggregate (i.e. those that have exactly the same offset
118 and size). Such representatives for a single aggregate, once determined,
119 are linked in a linked list and have the group fields set.
121 Moreover, when doing intraprocedural SRA, a tree is built from those
122 representatives (by the means of first_child and next_sibling pointers), in
123 which all items in a subtree are "within" the root, i.e. their offset is
124 greater or equal to offset of the root and offset+size is smaller or equal
125 to offset+size of the root. Children of an access are sorted by offset.
127 Note that accesses to parts of vector and complex number types always
128 represented by an access to the whole complex number or a vector. It is a
129 duty of the modifying functions to replace them appropriately. */
133 /* Values returned by `get_ref_base_and_extent' for each component reference
134 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
135 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
136 HOST_WIDE_INT offset
;
140 /* Expression. It is context dependent so do not use it to create new
141 expressions to access the original aggregate. See PR 42154 for a
147 /* The statement this access belongs to. */
150 /* Next group representative for this aggregate. */
151 struct access
*next_grp
;
153 /* Pointer to the group representative. Pointer to itself if the struct is
154 the representative. */
155 struct access
*group_representative
;
157 /* After access tree has been constructed, this points to the parent of the
158 current access, if there is one. NULL for roots. */
159 struct access
*parent
;
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access
*first_child
;
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
167 struct access
*next_sibling
;
169 /* Pointers to the first and last element in the linked list of assign
170 links for propagation from LHS to RHS. */
171 struct assign_link
*first_rhs_link
, *last_rhs_link
;
173 /* Pointers to the first and last element in the linked list of assign
174 links for propagation from LHS to RHS. */
175 struct assign_link
*first_lhs_link
, *last_lhs_link
;
177 /* Pointer to the next access in the work queues. */
178 struct access
*next_rhs_queued
, *next_lhs_queued
;
180 /* Replacement variable for this access "region." Never to be accessed
181 directly, always only by the means of get_access_replacement() and only
182 when grp_to_be_replaced flag is set. */
183 tree replacement_decl
;
185 /* Is this access made in reverse storage order? */
186 unsigned reverse
: 1;
188 /* Is this particular access write access? */
191 /* Is this access currently in the rhs work queue? */
192 unsigned grp_rhs_queued
: 1;
194 /* Is this access currently in the lhs work queue? */
195 unsigned grp_lhs_queued
: 1;
197 /* Does this group contain a write access? This flag is propagated down the
199 unsigned grp_write
: 1;
201 /* Does this group contain a read access? This flag is propagated down the
203 unsigned grp_read
: 1;
205 /* Does this group contain a read access that comes from an assignment
206 statement? This flag is propagated down the access tree. */
207 unsigned grp_assignment_read
: 1;
209 /* Does this group contain a write access that comes from an assignment
210 statement? This flag is propagated down the access tree. */
211 unsigned grp_assignment_write
: 1;
213 /* Does this group contain a read access through a scalar type? This flag is
214 not propagated in the access tree in any direction. */
215 unsigned grp_scalar_read
: 1;
217 /* Does this group contain a write access through a scalar type? This flag
218 is not propagated in the access tree in any direction. */
219 unsigned grp_scalar_write
: 1;
221 /* In a root of an access tree, true means that the entire tree should be
222 totally scalarized - that all scalar leafs should be scalarized and
223 non-root grp_total_scalarization accesses should be honored. Otherwise,
224 non-root accesses with grp_total_scalarization should never get scalar
226 unsigned grp_total_scalarization
: 1;
228 /* Other passes of the analysis use this bit to make function
229 analyze_access_subtree create scalar replacements for this group if
231 unsigned grp_hint
: 1;
233 /* Is the subtree rooted in this access fully covered by scalar
235 unsigned grp_covered
: 1;
237 /* If set to true, this access and all below it in an access tree must not be
239 unsigned grp_unscalarizable_region
: 1;
241 /* Whether data have been written to parts of the aggregate covered by this
242 access which is not to be scalarized. This flag is propagated up in the
244 unsigned grp_unscalarized_data
: 1;
246 /* Set if all accesses in the group consist of the same chain of
247 COMPONENT_REFs and ARRAY_REFs. */
248 unsigned grp_same_access_path
: 1;
250 /* Does this access and/or group contain a write access through a
252 unsigned grp_partial_lhs
: 1;
254 /* Set when a scalar replacement should be created for this variable. */
255 unsigned grp_to_be_replaced
: 1;
257 /* Set when we want a replacement for the sole purpose of having it in
258 generated debug statements. */
259 unsigned grp_to_be_debug_replaced
: 1;
261 /* Should TREE_NO_WARNING of a replacement be set? */
262 unsigned grp_no_warning
: 1;
265 typedef struct access
*access_p
;
268 /* Alloc pool for allocating access structures. */
269 static object_allocator
<struct access
> access_pool ("SRA accesses");
271 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
272 are used to propagate subaccesses from rhs to lhs and vice versa as long as
273 they don't conflict with what is already there. In the RHS->LHS direction,
274 we also propagate grp_write flag to lazily mark that the access contains any
278 struct access
*lacc
, *racc
;
279 struct assign_link
*next_rhs
, *next_lhs
;
282 /* Alloc pool for allocating assign link structures. */
283 static object_allocator
<assign_link
> assign_link_pool ("SRA links");
285 /* Base (tree) -> Vector (vec<access_p> *) map. */
286 static hash_map
<tree
, auto_vec
<access_p
> > *base_access_vec
;
288 /* Hash to limit creation of artificial accesses */
289 static hash_map
<tree
, unsigned> *propagation_budget
;
291 /* Candidate hash table helpers. */
293 struct uid_decl_hasher
: nofree_ptr_hash
<tree_node
>
295 static inline hashval_t
hash (const tree_node
*);
296 static inline bool equal (const tree_node
*, const tree_node
*);
299 /* Hash a tree in a uid_decl_map. */
302 uid_decl_hasher::hash (const tree_node
*item
)
304 return item
->decl_minimal
.uid
;
307 /* Return true if the DECL_UID in both trees are equal. */
310 uid_decl_hasher::equal (const tree_node
*a
, const tree_node
*b
)
312 return (a
->decl_minimal
.uid
== b
->decl_minimal
.uid
);
315 /* Set of candidates. */
316 static bitmap candidate_bitmap
;
317 static hash_table
<uid_decl_hasher
> *candidates
;
319 /* For a candidate UID return the candidates decl. */
322 candidate (unsigned uid
)
325 t
.decl_minimal
.uid
= uid
;
326 return candidates
->find_with_hash (&t
, static_cast <hashval_t
> (uid
));
329 /* Bitmap of candidates which we should try to entirely scalarize away and
330 those which cannot be (because they are and need be used as a whole). */
331 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
333 /* Bitmap of candidates in the constant pool, which cannot be scalarized
334 because this would produce non-constant expressions (e.g. Ada). */
335 static bitmap disqualified_constants
;
337 /* Obstack for creation of fancy names. */
338 static struct obstack name_obstack
;
340 /* Head of a linked list of accesses that need to have its subaccesses
341 propagated to their assignment counterparts. */
342 static struct access
*rhs_work_queue_head
, *lhs_work_queue_head
;
344 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
345 representative fields are dumped, otherwise those which only describe the
346 individual access are. */
350 /* Number of processed aggregates is readily available in
351 analyze_all_variable_accesses and so is not stored here. */
353 /* Number of created scalar replacements. */
356 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
360 /* Number of statements created by generate_subtree_copies. */
363 /* Number of statements created by load_assign_lhs_subreplacements. */
366 /* Number of times sra_modify_assign has deleted a statement. */
369 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
370 RHS reparately due to type conversions or nonexistent matching
372 int separate_lhs_rhs_handling
;
374 /* Number of parameters that were removed because they were unused. */
375 int deleted_unused_parameters
;
377 /* Number of scalars passed as parameters by reference that have been
378 converted to be passed by value. */
379 int scalar_by_ref_to_by_val
;
381 /* Number of aggregate parameters that were replaced by one or more of their
383 int aggregate_params_reduced
;
385 /* Numbber of components created when splitting aggregate parameters. */
386 int param_reductions_created
;
388 /* Number of deferred_init calls that are modified. */
391 /* Number of deferred_init calls that are created by
392 generate_subtree_deferred_init. */
393 int subtree_deferred_init
;
397 dump_access (FILE *f
, struct access
*access
, bool grp
)
399 fprintf (f
, "access { ");
400 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
401 print_generic_expr (f
, access
->base
);
402 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
403 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
404 fprintf (f
, ", expr = ");
405 print_generic_expr (f
, access
->expr
);
406 fprintf (f
, ", type = ");
407 print_generic_expr (f
, access
->type
);
408 fprintf (f
, ", reverse = %d", access
->reverse
);
410 fprintf (f
, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
411 "grp_assignment_write = %d, grp_scalar_read = %d, "
412 "grp_scalar_write = %d, grp_total_scalarization = %d, "
413 "grp_hint = %d, grp_covered = %d, "
414 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
415 "grp_same_access_path = %d, grp_partial_lhs = %d, "
416 "grp_to_be_replaced = %d, grp_to_be_debug_replaced = %d}\n",
417 access
->grp_read
, access
->grp_write
, access
->grp_assignment_read
,
418 access
->grp_assignment_write
, access
->grp_scalar_read
,
419 access
->grp_scalar_write
, access
->grp_total_scalarization
,
420 access
->grp_hint
, access
->grp_covered
,
421 access
->grp_unscalarizable_region
, access
->grp_unscalarized_data
,
422 access
->grp_same_access_path
, access
->grp_partial_lhs
,
423 access
->grp_to_be_replaced
, access
->grp_to_be_debug_replaced
);
425 fprintf (f
, ", write = %d, grp_total_scalarization = %d, "
426 "grp_partial_lhs = %d}\n",
427 access
->write
, access
->grp_total_scalarization
,
428 access
->grp_partial_lhs
);
431 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
434 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
440 for (i
= 0; i
< level
; i
++)
443 dump_access (f
, access
, true);
445 if (access
->first_child
)
446 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
448 access
= access
->next_sibling
;
453 /* Dump all access trees for a variable, given the pointer to the first root in
457 dump_access_tree (FILE *f
, struct access
*access
)
459 for (; access
; access
= access
->next_grp
)
460 dump_access_tree_1 (f
, access
, 0);
463 /* Return true iff ACC is non-NULL and has subaccesses. */
466 access_has_children_p (struct access
*acc
)
468 return acc
&& acc
->first_child
;
471 /* Return true iff ACC is (partly) covered by at least one replacement. */
474 access_has_replacements_p (struct access
*acc
)
476 struct access
*child
;
477 if (acc
->grp_to_be_replaced
)
479 for (child
= acc
->first_child
; child
; child
= child
->next_sibling
)
480 if (access_has_replacements_p (child
))
485 /* Return a vector of pointers to accesses for the variable given in BASE or
486 NULL if there is none. */
488 static vec
<access_p
> *
489 get_base_access_vector (tree base
)
491 return base_access_vec
->get (base
);
494 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
495 in ACCESS. Return NULL if it cannot be found. */
497 static struct access
*
498 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
501 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
503 struct access
*child
= access
->first_child
;
505 while (child
&& (child
->offset
+ child
->size
<= offset
))
506 child
= child
->next_sibling
;
510 /* Total scalarization does not replace single field structures with their
511 single field but rather creates an access for them underneath. Look for
514 while (access
->first_child
515 && access
->first_child
->offset
== offset
516 && access
->first_child
->size
== size
)
517 access
= access
->first_child
;
522 /* Return the first group representative for DECL or NULL if none exists. */
524 static struct access
*
525 get_first_repr_for_decl (tree base
)
527 vec
<access_p
> *access_vec
;
529 access_vec
= get_base_access_vector (base
);
533 return (*access_vec
)[0];
536 /* Find an access representative for the variable BASE and given OFFSET and
537 SIZE. Requires that access trees have already been built. Return NULL if
538 it cannot be found. */
540 static struct access
*
541 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
544 struct access
*access
;
546 access
= get_first_repr_for_decl (base
);
547 while (access
&& (access
->offset
+ access
->size
<= offset
))
548 access
= access
->next_grp
;
552 return find_access_in_subtree (access
, offset
, size
);
555 /* Add LINK to the linked list of assign links of RACC. */
558 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
560 gcc_assert (link
->racc
== racc
);
562 if (!racc
->first_rhs_link
)
564 gcc_assert (!racc
->last_rhs_link
);
565 racc
->first_rhs_link
= link
;
568 racc
->last_rhs_link
->next_rhs
= link
;
570 racc
->last_rhs_link
= link
;
571 link
->next_rhs
= NULL
;
574 /* Add LINK to the linked list of lhs assign links of LACC. */
577 add_link_to_lhs (struct access
*lacc
, struct assign_link
*link
)
579 gcc_assert (link
->lacc
== lacc
);
581 if (!lacc
->first_lhs_link
)
583 gcc_assert (!lacc
->last_lhs_link
);
584 lacc
->first_lhs_link
= link
;
587 lacc
->last_lhs_link
->next_lhs
= link
;
589 lacc
->last_lhs_link
= link
;
590 link
->next_lhs
= NULL
;
593 /* Move all link structures in their linked list in OLD_ACC to the linked list
596 relink_to_new_repr (struct access
*new_acc
, struct access
*old_acc
)
598 if (old_acc
->first_rhs_link
)
601 if (new_acc
->first_rhs_link
)
603 gcc_assert (!new_acc
->last_rhs_link
->next_rhs
);
604 gcc_assert (!old_acc
->last_rhs_link
605 || !old_acc
->last_rhs_link
->next_rhs
);
607 new_acc
->last_rhs_link
->next_rhs
= old_acc
->first_rhs_link
;
608 new_acc
->last_rhs_link
= old_acc
->last_rhs_link
;
612 gcc_assert (!new_acc
->last_rhs_link
);
614 new_acc
->first_rhs_link
= old_acc
->first_rhs_link
;
615 new_acc
->last_rhs_link
= old_acc
->last_rhs_link
;
617 old_acc
->first_rhs_link
= old_acc
->last_rhs_link
= NULL
;
620 gcc_assert (!old_acc
->last_rhs_link
);
622 if (old_acc
->first_lhs_link
)
625 if (new_acc
->first_lhs_link
)
627 gcc_assert (!new_acc
->last_lhs_link
->next_lhs
);
628 gcc_assert (!old_acc
->last_lhs_link
629 || !old_acc
->last_lhs_link
->next_lhs
);
631 new_acc
->last_lhs_link
->next_lhs
= old_acc
->first_lhs_link
;
632 new_acc
->last_lhs_link
= old_acc
->last_lhs_link
;
636 gcc_assert (!new_acc
->last_lhs_link
);
638 new_acc
->first_lhs_link
= old_acc
->first_lhs_link
;
639 new_acc
->last_lhs_link
= old_acc
->last_lhs_link
;
641 old_acc
->first_lhs_link
= old_acc
->last_lhs_link
= NULL
;
644 gcc_assert (!old_acc
->last_lhs_link
);
648 /* Add ACCESS to the work to queue for propagation of subaccesses from RHS to
649 LHS (which is actually a stack). */
652 add_access_to_rhs_work_queue (struct access
*access
)
654 if (access
->first_rhs_link
&& !access
->grp_rhs_queued
)
656 gcc_assert (!access
->next_rhs_queued
);
657 access
->next_rhs_queued
= rhs_work_queue_head
;
658 access
->grp_rhs_queued
= 1;
659 rhs_work_queue_head
= access
;
663 /* Add ACCESS to the work to queue for propagation of subaccesses from LHS to
664 RHS (which is actually a stack). */
667 add_access_to_lhs_work_queue (struct access
*access
)
669 if (access
->first_lhs_link
&& !access
->grp_lhs_queued
)
671 gcc_assert (!access
->next_lhs_queued
);
672 access
->next_lhs_queued
= lhs_work_queue_head
;
673 access
->grp_lhs_queued
= 1;
674 lhs_work_queue_head
= access
;
678 /* Pop an access from the work queue for propagating from RHS to LHS, and
679 return it, assuming there is one. */
681 static struct access
*
682 pop_access_from_rhs_work_queue (void)
684 struct access
*access
= rhs_work_queue_head
;
686 rhs_work_queue_head
= access
->next_rhs_queued
;
687 access
->next_rhs_queued
= NULL
;
688 access
->grp_rhs_queued
= 0;
692 /* Pop an access from the work queue for propagating from LHS to RHS, and
693 return it, assuming there is one. */
695 static struct access
*
696 pop_access_from_lhs_work_queue (void)
698 struct access
*access
= lhs_work_queue_head
;
700 lhs_work_queue_head
= access
->next_lhs_queued
;
701 access
->next_lhs_queued
= NULL
;
702 access
->grp_lhs_queued
= 0;
706 /* Allocate necessary structures. */
709 sra_initialize (void)
711 candidate_bitmap
= BITMAP_ALLOC (NULL
);
712 candidates
= new hash_table
<uid_decl_hasher
>
713 (vec_safe_length (cfun
->local_decls
) / 2);
714 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
715 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
716 disqualified_constants
= BITMAP_ALLOC (NULL
);
717 gcc_obstack_init (&name_obstack
);
718 base_access_vec
= new hash_map
<tree
, auto_vec
<access_p
> >;
719 memset (&sra_stats
, 0, sizeof (sra_stats
));
722 /* Deallocate all general structures. */
725 sra_deinitialize (void)
727 BITMAP_FREE (candidate_bitmap
);
730 BITMAP_FREE (should_scalarize_away_bitmap
);
731 BITMAP_FREE (cannot_scalarize_away_bitmap
);
732 BITMAP_FREE (disqualified_constants
);
733 access_pool
.release ();
734 assign_link_pool
.release ();
735 obstack_free (&name_obstack
, NULL
);
737 delete base_access_vec
;
740 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
742 static bool constant_decl_p (tree decl
)
744 return VAR_P (decl
) && DECL_IN_CONSTANT_POOL (decl
);
747 /* Remove DECL from candidates for SRA and write REASON to the dump file if
751 disqualify_candidate (tree decl
, const char *reason
)
753 if (bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
)))
754 candidates
->remove_elt_with_hash (decl
, DECL_UID (decl
));
755 if (constant_decl_p (decl
))
756 bitmap_set_bit (disqualified_constants
, DECL_UID (decl
));
758 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
760 fprintf (dump_file
, "! Disqualifying ");
761 print_generic_expr (dump_file
, decl
);
762 fprintf (dump_file
, " - %s\n", reason
);
766 /* Return true iff the type contains a field or an element which does not allow
767 scalarization. Use VISITED_TYPES to avoid re-checking already checked
771 type_internals_preclude_sra_p_1 (tree type
, const char **msg
,
772 hash_set
<tree
> *visited_types
)
777 if (visited_types
->contains (type
))
779 visited_types
->add (type
);
781 switch (TREE_CODE (type
))
785 case QUAL_UNION_TYPE
:
786 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
787 if (TREE_CODE (fld
) == FIELD_DECL
)
789 if (TREE_CODE (fld
) == FUNCTION_DECL
)
791 tree ft
= TREE_TYPE (fld
);
793 if (TREE_THIS_VOLATILE (fld
))
795 *msg
= "volatile structure field";
798 if (!DECL_FIELD_OFFSET (fld
))
800 *msg
= "no structure field offset";
803 if (!DECL_SIZE (fld
))
805 *msg
= "zero structure field size";
808 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
810 *msg
= "structure field offset not fixed";
813 if (!tree_fits_uhwi_p (DECL_SIZE (fld
)))
815 *msg
= "structure field size not fixed";
818 if (!tree_fits_shwi_p (bit_position (fld
)))
820 *msg
= "structure field size too big";
823 if (AGGREGATE_TYPE_P (ft
)
824 && int_bit_position (fld
) % BITS_PER_UNIT
!= 0)
826 *msg
= "structure field is bit field";
830 if (AGGREGATE_TYPE_P (ft
)
831 && type_internals_preclude_sra_p_1 (ft
, msg
, visited_types
))
838 et
= TREE_TYPE (type
);
840 if (TYPE_VOLATILE (et
))
842 *msg
= "element type is volatile";
846 if (AGGREGATE_TYPE_P (et
)
847 && type_internals_preclude_sra_p_1 (et
, msg
, visited_types
))
857 /* Return true iff the type contains a field or an element which does not allow
861 type_internals_preclude_sra_p (tree type
, const char **msg
)
863 hash_set
<tree
> visited_types
;
864 return type_internals_preclude_sra_p_1 (type
, msg
, &visited_types
);
868 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
869 the three fields. Also add it to the vector of accesses corresponding to
870 the base. Finally, return the new access. */
872 static struct access
*
873 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
875 struct access
*access
= access_pool
.allocate ();
877 memset (access
, 0, sizeof (struct access
));
879 access
->offset
= offset
;
882 base_access_vec
->get_or_insert (base
).safe_push (access
);
887 static bool maybe_add_sra_candidate (tree
);
889 /* Create and insert access for EXPR. Return created access, or NULL if it is
890 not possible. Also scan for uses of constant pool as we go along and add
893 static struct access
*
894 create_access (tree expr
, gimple
*stmt
, bool write
)
896 struct access
*access
;
897 poly_int64 poffset
, psize
, pmax_size
;
899 bool reverse
, unscalarizable_region
= false;
901 base
= get_ref_base_and_extent (expr
, &poffset
, &psize
, &pmax_size
,
904 /* For constant-pool entries, check we can substitute the constant value. */
905 if (constant_decl_p (base
))
907 gcc_assert (!bitmap_bit_p (disqualified_constants
, DECL_UID (base
)));
909 && !is_gimple_reg_type (TREE_TYPE (expr
))
910 && dump_file
&& (dump_flags
& TDF_DETAILS
))
912 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
913 and elements of multidimensional arrays (which are
914 multi-element arrays in their own right). */
915 fprintf (dump_file
, "Allowing non-reg-type load of part"
916 " of constant-pool entry: ");
917 print_generic_expr (dump_file
, expr
);
919 maybe_add_sra_candidate (base
);
922 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
925 if (write
&& TREE_READONLY (base
))
927 disqualify_candidate (base
, "Encountered a store to a read-only decl.");
931 HOST_WIDE_INT offset
, size
, max_size
;
932 if (!poffset
.is_constant (&offset
)
933 || !psize
.is_constant (&size
)
934 || !pmax_size
.is_constant (&max_size
))
936 disqualify_candidate (base
, "Encountered a polynomial-sized access.");
940 if (size
!= max_size
)
943 unscalarizable_region
= true;
949 disqualify_candidate (base
, "Encountered a negative offset access.");
954 disqualify_candidate (base
, "Encountered an unconstrained access.");
957 if (offset
+ size
> tree_to_shwi (DECL_SIZE (base
)))
959 disqualify_candidate (base
, "Encountered an access beyond the base.");
963 access
= create_access_1 (base
, offset
, size
);
965 access
->type
= TREE_TYPE (expr
);
966 access
->write
= write
;
967 access
->grp_unscalarizable_region
= unscalarizable_region
;
969 access
->reverse
= reverse
;
975 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
976 ARRAY_TYPE with fields that are either of gimple register types (excluding
977 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
978 we are considering a decl from constant pool. If it is false, char arrays
982 scalarizable_type_p (tree type
, bool const_decl
)
984 if (is_gimple_reg_type (type
))
986 if (type_contains_placeholder_p (type
))
989 bool have_predecessor_field
= false;
990 HOST_WIDE_INT prev_pos
= 0;
992 switch (TREE_CODE (type
))
995 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
996 if (TREE_CODE (fld
) == FIELD_DECL
)
998 tree ft
= TREE_TYPE (fld
);
1000 if (zerop (DECL_SIZE (fld
)))
1003 HOST_WIDE_INT pos
= int_bit_position (fld
);
1004 if (have_predecessor_field
1008 have_predecessor_field
= true;
1011 if (DECL_BIT_FIELD (fld
))
1014 if (!scalarizable_type_p (ft
, const_decl
))
1022 HOST_WIDE_INT min_elem_size
;
1026 min_elem_size
= BITS_PER_UNIT
;
1028 if (TYPE_DOMAIN (type
) == NULL_TREE
1029 || !tree_fits_shwi_p (TYPE_SIZE (type
))
1030 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type
)))
1031 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type
))) <= min_elem_size
)
1032 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type
))))
1034 if (tree_to_shwi (TYPE_SIZE (type
)) == 0
1035 && TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
)
1036 /* Zero-element array, should not prevent scalarization. */
1038 else if ((tree_to_shwi (TYPE_SIZE (type
)) <= 0)
1039 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
1040 /* Variable-length array, do not allow scalarization. */
1043 tree elem
= TREE_TYPE (type
);
1044 if (!scalarizable_type_p (elem
, const_decl
))
1053 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1056 contains_view_convert_expr_p (const_tree ref
)
1058 while (handled_component_p (ref
))
1060 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
1062 ref
= TREE_OPERAND (ref
, 0);
1068 /* Return true if REF contains a VIEW_CONVERT_EXPR or a COMPONENT_REF with a
1069 bit-field field declaration. If TYPE_CHANGING_P is non-NULL, set the bool
1070 it points to will be set if REF contains any of the above or a MEM_REF
1071 expression that effectively performs type conversion. */
1074 contains_vce_or_bfcref_p (const_tree ref
, bool *type_changing_p
= NULL
)
1076 while (handled_component_p (ref
))
1078 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
1079 || (TREE_CODE (ref
) == COMPONENT_REF
1080 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1))))
1082 if (type_changing_p
)
1083 *type_changing_p
= true;
1086 ref
= TREE_OPERAND (ref
, 0);
1089 if (!type_changing_p
1090 || TREE_CODE (ref
) != MEM_REF
1091 || TREE_CODE (TREE_OPERAND (ref
, 0)) != ADDR_EXPR
)
1094 tree mem
= TREE_OPERAND (TREE_OPERAND (ref
, 0), 0);
1095 if (TYPE_MAIN_VARIANT (TREE_TYPE (ref
))
1096 != TYPE_MAIN_VARIANT (TREE_TYPE (mem
)))
1097 *type_changing_p
= true;
1102 /* Search the given tree for a declaration by skipping handled components and
1103 exclude it from the candidates. */
1106 disqualify_base_of_expr (tree t
, const char *reason
)
1108 t
= get_base_address (t
);
1109 if (t
&& DECL_P (t
))
1110 disqualify_candidate (t
, reason
);
1113 /* Scan expression EXPR and create access structures for all accesses to
1114 candidates for scalarization. Return the created access or NULL if none is
1117 static struct access
*
1118 build_access_from_expr_1 (tree expr
, gimple
*stmt
, bool write
)
1120 struct access
*ret
= NULL
;
1123 if (TREE_CODE (expr
) == BIT_FIELD_REF
1124 || TREE_CODE (expr
) == IMAGPART_EXPR
1125 || TREE_CODE (expr
) == REALPART_EXPR
)
1127 expr
= TREE_OPERAND (expr
, 0);
1131 partial_ref
= false;
1133 if (storage_order_barrier_p (expr
))
1135 disqualify_base_of_expr (expr
, "storage order barrier.");
1139 /* We need to dive through V_C_Es in order to get the size of its parameter
1140 and not the result type. Ada produces such statements. We are also
1141 capable of handling the topmost V_C_E but not any of those buried in other
1142 handled components. */
1143 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
1144 expr
= TREE_OPERAND (expr
, 0);
1146 if (contains_view_convert_expr_p (expr
))
1148 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
1152 if (TREE_THIS_VOLATILE (expr
))
1154 disqualify_base_of_expr (expr
, "part of a volatile reference.");
1158 switch (TREE_CODE (expr
))
1161 if (TREE_CODE (TREE_OPERAND (expr
, 0)) != ADDR_EXPR
)
1169 case ARRAY_RANGE_REF
:
1170 ret
= create_access (expr
, stmt
, write
);
1177 if (write
&& partial_ref
&& ret
)
1178 ret
->grp_partial_lhs
= 1;
1183 /* Scan expression EXPR and create access structures for all accesses to
1184 candidates for scalarization. Return true if any access has been inserted.
1185 STMT must be the statement from which the expression is taken, WRITE must be
1186 true if the expression is a store and false otherwise. */
1189 build_access_from_expr (tree expr
, gimple
*stmt
, bool write
)
1191 struct access
*access
;
1193 access
= build_access_from_expr_1 (expr
, stmt
, write
);
1196 /* This means the aggregate is accesses as a whole in a way other than an
1197 assign statement and thus cannot be removed even if we had a scalar
1198 replacement for everything. */
1199 if (cannot_scalarize_away_bitmap
)
1200 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
1206 /* Return the single non-EH successor edge of BB or NULL if there is none or
1210 single_non_eh_succ (basic_block bb
)
1215 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1216 if (!(e
->flags
& EDGE_EH
))
1226 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1227 there is no alternative spot where to put statements SRA might need to
1228 generate after it. The spot we are looking for is an edge leading to a
1229 single non-EH successor, if it exists and is indeed single. RHS may be
1230 NULL, in that case ignore it. */
1233 disqualify_if_bad_bb_terminating_stmt (gimple
*stmt
, tree lhs
, tree rhs
)
1235 if (stmt_ends_bb_p (stmt
))
1237 if (single_non_eh_succ (gimple_bb (stmt
)))
1240 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
1242 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
1248 /* Return true if the nature of BASE is such that it contains data even if
1249 there is no write to it in the function. */
1252 comes_initialized_p (tree base
)
1254 return TREE_CODE (base
) == PARM_DECL
|| constant_decl_p (base
);
1257 /* Scan expressions occurring in STMT, create access structures for all accesses
1258 to candidates for scalarization and remove those candidates which occur in
1259 statements or expressions that prevent them from being split apart. Return
1260 true if any access has been inserted. */
1263 build_accesses_from_assign (gimple
*stmt
)
1266 struct access
*lacc
, *racc
;
1268 if (!gimple_assign_single_p (stmt
)
1269 /* Scope clobbers don't influence scalarization. */
1270 || gimple_clobber_p (stmt
))
1273 lhs
= gimple_assign_lhs (stmt
);
1274 rhs
= gimple_assign_rhs1 (stmt
);
1276 if (disqualify_if_bad_bb_terminating_stmt (stmt
, lhs
, rhs
))
1279 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1280 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1284 lacc
->grp_assignment_write
= 1;
1285 if (storage_order_barrier_p (rhs
))
1286 lacc
->grp_unscalarizable_region
= 1;
1288 if (should_scalarize_away_bitmap
&& !is_gimple_reg_type (lacc
->type
))
1290 bool type_changing_p
= false;
1291 contains_vce_or_bfcref_p (lhs
, &type_changing_p
);
1292 if (type_changing_p
)
1293 bitmap_set_bit (cannot_scalarize_away_bitmap
,
1294 DECL_UID (lacc
->base
));
1300 racc
->grp_assignment_read
= 1;
1301 if (should_scalarize_away_bitmap
&& !is_gimple_reg_type (racc
->type
))
1303 bool type_changing_p
= false;
1304 contains_vce_or_bfcref_p (rhs
, &type_changing_p
);
1306 if (type_changing_p
|| gimple_has_volatile_ops (stmt
))
1307 bitmap_set_bit (cannot_scalarize_away_bitmap
,
1308 DECL_UID (racc
->base
));
1310 bitmap_set_bit (should_scalarize_away_bitmap
,
1311 DECL_UID (racc
->base
));
1313 if (storage_order_barrier_p (lhs
))
1314 racc
->grp_unscalarizable_region
= 1;
1318 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1319 && !lacc
->grp_unscalarizable_region
1320 && !racc
->grp_unscalarizable_region
1321 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1322 && lacc
->size
== racc
->size
1323 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1325 struct assign_link
*link
;
1327 link
= assign_link_pool
.allocate ();
1328 memset (link
, 0, sizeof (struct assign_link
));
1332 add_link_to_rhs (racc
, link
);
1333 add_link_to_lhs (lacc
, link
);
1334 add_access_to_rhs_work_queue (racc
);
1335 add_access_to_lhs_work_queue (lacc
);
1337 /* Let's delay marking the areas as written until propagation of accesses
1338 across link, unless the nature of rhs tells us that its data comes
1340 if (!comes_initialized_p (racc
->base
))
1341 lacc
->write
= false;
1344 return lacc
|| racc
;
1347 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1348 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1351 asm_visit_addr (gimple
*, tree op
, tree
, void *)
1353 op
= get_base_address (op
);
1356 disqualify_candidate (op
, "Non-scalarizable GIMPLE_ASM operand.");
1361 /* Scan function and look for interesting expressions and create access
1362 structures for them. Return true iff any access is created. */
1365 scan_function (void)
1370 FOR_EACH_BB_FN (bb
, cfun
)
1372 gimple_stmt_iterator gsi
;
1373 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1375 gimple
*stmt
= gsi_stmt (gsi
);
1379 switch (gimple_code (stmt
))
1382 t
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1384 ret
|= build_access_from_expr (t
, stmt
, false);
1388 ret
|= build_accesses_from_assign (stmt
);
1392 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1393 ret
|= build_access_from_expr (gimple_call_arg (stmt
, i
),
1396 t
= gimple_call_lhs (stmt
);
1397 if (t
&& !disqualify_if_bad_bb_terminating_stmt (stmt
, t
, NULL
))
1399 /* If the STMT is a call to DEFERRED_INIT, avoid setting
1400 cannot_scalarize_away_bitmap. */
1401 if (gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
1402 ret
|= !!build_access_from_expr_1 (t
, stmt
, true);
1404 ret
|= build_access_from_expr (t
, stmt
, true);
1410 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1411 walk_stmt_load_store_addr_ops (asm_stmt
, NULL
, NULL
, NULL
,
1413 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
1415 t
= TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
1416 ret
|= build_access_from_expr (t
, asm_stmt
, false);
1418 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
1420 t
= TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
1421 ret
|= build_access_from_expr (t
, asm_stmt
, true);
1435 /* Helper of QSORT function. There are pointers to accesses in the array. An
1436 access is considered smaller than another if it has smaller offset or if the
1437 offsets are the same but is size is bigger. */
1440 compare_access_positions (const void *a
, const void *b
)
1442 const access_p
*fp1
= (const access_p
*) a
;
1443 const access_p
*fp2
= (const access_p
*) b
;
1444 const access_p f1
= *fp1
;
1445 const access_p f2
= *fp2
;
1447 if (f1
->offset
!= f2
->offset
)
1448 return f1
->offset
< f2
->offset
? -1 : 1;
1450 if (f1
->size
== f2
->size
)
1452 if (f1
->type
== f2
->type
)
1454 /* Put any non-aggregate type before any aggregate type. */
1455 else if (!is_gimple_reg_type (f1
->type
)
1456 && is_gimple_reg_type (f2
->type
))
1458 else if (is_gimple_reg_type (f1
->type
)
1459 && !is_gimple_reg_type (f2
->type
))
1461 /* Put any complex or vector type before any other scalar type. */
1462 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1463 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1464 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1465 || TREE_CODE (f2
->type
) == VECTOR_TYPE
))
1467 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1468 || TREE_CODE (f1
->type
) == VECTOR_TYPE
)
1469 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1470 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1472 /* Put any integral type before any non-integral type. When splicing, we
1473 make sure that those with insufficient precision and occupying the
1474 same space are not scalarized. */
1475 else if (INTEGRAL_TYPE_P (f1
->type
)
1476 && !INTEGRAL_TYPE_P (f2
->type
))
1478 else if (!INTEGRAL_TYPE_P (f1
->type
)
1479 && INTEGRAL_TYPE_P (f2
->type
))
1481 /* Put the integral type with the bigger precision first. */
1482 else if (INTEGRAL_TYPE_P (f1
->type
)
1483 && INTEGRAL_TYPE_P (f2
->type
)
1484 && (TYPE_PRECISION (f2
->type
) != TYPE_PRECISION (f1
->type
)))
1485 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1486 /* Stabilize the sort. */
1487 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1490 /* We want the bigger accesses first, thus the opposite operator in the next
1492 return f1
->size
> f2
->size
? -1 : 1;
1496 /* Append a name of the declaration to the name obstack. A helper function for
1500 make_fancy_decl_name (tree decl
)
1504 tree name
= DECL_NAME (decl
);
1506 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1507 IDENTIFIER_LENGTH (name
));
1510 sprintf (buffer
, "D%u", DECL_UID (decl
));
1511 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1515 /* Helper for make_fancy_name. */
1518 make_fancy_name_1 (tree expr
)
1525 make_fancy_decl_name (expr
);
1529 switch (TREE_CODE (expr
))
1532 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1533 obstack_1grow (&name_obstack
, '$');
1534 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1538 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1539 obstack_1grow (&name_obstack
, '$');
1540 /* Arrays with only one element may not have a constant as their
1542 index
= TREE_OPERAND (expr
, 1);
1543 if (TREE_CODE (index
) != INTEGER_CST
)
1545 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1546 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1550 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1554 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1555 if (!integer_zerop (TREE_OPERAND (expr
, 1)))
1557 obstack_1grow (&name_obstack
, '$');
1558 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
,
1559 TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)));
1560 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1567 gcc_unreachable (); /* we treat these as scalars. */
1574 /* Create a human readable name for replacement variable of ACCESS. */
1577 make_fancy_name (tree expr
)
1579 make_fancy_name_1 (expr
);
1580 obstack_1grow (&name_obstack
, '\0');
1581 return XOBFINISH (&name_obstack
, char *);
1584 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1585 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1586 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1587 be non-NULL and is used to insert new statements either before or below
1588 the current one as specified by INSERT_AFTER. This function is not capable
1589 of handling bitfields. */
1592 build_ref_for_offset (location_t loc
, tree base
, poly_int64 offset
,
1593 bool reverse
, tree exp_type
, gimple_stmt_iterator
*gsi
,
1596 tree prev_base
= base
;
1599 poly_int64 base_offset
;
1600 unsigned HOST_WIDE_INT misalign
;
1603 /* Preserve address-space information. */
1604 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
1605 if (as
!= TYPE_ADDR_SPACE (exp_type
))
1606 exp_type
= build_qualified_type (exp_type
,
1607 TYPE_QUALS (exp_type
)
1608 | ENCODE_QUAL_ADDR_SPACE (as
));
1610 poly_int64 byte_offset
= exact_div (offset
, BITS_PER_UNIT
);
1611 get_object_alignment_1 (base
, &align
, &misalign
);
1612 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1614 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1615 offset such as array[var_index]. */
1621 gcc_checking_assert (gsi
);
1622 tmp
= make_ssa_name (build_pointer_type (TREE_TYPE (prev_base
)));
1623 addr
= build_fold_addr_expr (unshare_expr (prev_base
));
1624 STRIP_USELESS_TYPE_CONVERSION (addr
);
1625 stmt
= gimple_build_assign (tmp
, addr
);
1626 gimple_set_location (stmt
, loc
);
1628 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1630 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1632 off
= build_int_cst (reference_alias_ptr_type (prev_base
), byte_offset
);
1635 else if (TREE_CODE (base
) == MEM_REF
)
1637 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1638 base_offset
+ byte_offset
);
1639 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1640 base
= unshare_expr (TREE_OPERAND (base
, 0));
1644 off
= build_int_cst (reference_alias_ptr_type (prev_base
),
1645 base_offset
+ byte_offset
);
1646 base
= build_fold_addr_expr (unshare_expr (base
));
1649 unsigned int align_bound
= known_alignment (misalign
+ offset
);
1650 if (align_bound
!= 0)
1651 align
= MIN (align
, align_bound
);
1652 if (align
!= TYPE_ALIGN (exp_type
))
1653 exp_type
= build_aligned_type (exp_type
, align
);
1655 mem_ref
= fold_build2_loc (loc
, MEM_REF
, exp_type
, base
, off
);
1656 REF_REVERSE_STORAGE_ORDER (mem_ref
) = reverse
;
1657 if (TREE_THIS_VOLATILE (prev_base
))
1658 TREE_THIS_VOLATILE (mem_ref
) = 1;
1659 if (TREE_SIDE_EFFECTS (prev_base
))
1660 TREE_SIDE_EFFECTS (mem_ref
) = 1;
1664 /* Construct and return a memory reference that is equal to a portion of
1665 MODEL->expr but is based on BASE. If this cannot be done, return NULL. */
1668 build_reconstructed_reference (location_t
, tree base
, struct access
*model
)
1670 tree expr
= model
->expr
, prev_expr
= NULL
;
1671 while (!types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (base
)))
1673 if (!handled_component_p (expr
))
1676 expr
= TREE_OPERAND (expr
, 0);
1679 /* Guard against broken VIEW_CONVERT_EXPRs... */
1683 TREE_OPERAND (prev_expr
, 0) = base
;
1684 tree ref
= unshare_expr (model
->expr
);
1685 TREE_OPERAND (prev_expr
, 0) = expr
;
1689 /* Construct a memory reference to a part of an aggregate BASE at the given
1690 OFFSET and of the same type as MODEL. In case this is a reference to a
1691 bit-field, the function will replicate the last component_ref of model's
1692 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1693 build_ref_for_offset. */
1696 build_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1697 struct access
*model
, gimple_stmt_iterator
*gsi
,
1700 gcc_assert (offset
>= 0);
1701 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1702 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1704 /* This access represents a bit-field. */
1705 tree t
, exp_type
, fld
= TREE_OPERAND (model
->expr
, 1);
1707 offset
-= int_bit_position (fld
);
1708 exp_type
= TREE_TYPE (TREE_OPERAND (model
->expr
, 0));
1709 t
= build_ref_for_offset (loc
, base
, offset
, model
->reverse
, exp_type
,
1711 /* The flag will be set on the record type. */
1712 REF_REVERSE_STORAGE_ORDER (t
) = 0;
1713 return fold_build3_loc (loc
, COMPONENT_REF
, TREE_TYPE (fld
), t
, fld
,
1719 if (model
->grp_same_access_path
1720 && !TREE_THIS_VOLATILE (base
)
1721 && (TYPE_ADDR_SPACE (TREE_TYPE (base
))
1722 == TYPE_ADDR_SPACE (TREE_TYPE (model
->expr
)))
1723 && offset
<= model
->offset
1724 /* build_reconstructed_reference can still fail if we have already
1725 massaged BASE because of another type incompatibility. */
1726 && (res
= build_reconstructed_reference (loc
, base
, model
)))
1729 return build_ref_for_offset (loc
, base
, offset
, model
->reverse
,
1730 model
->type
, gsi
, insert_after
);
1734 /* Attempt to build a memory reference that we could but into a gimple
1735 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1736 create statements and return s NULL instead. This function also ignores
1737 alignment issues and so its results should never end up in non-debug
1741 build_debug_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1742 struct access
*model
)
1744 poly_int64 base_offset
;
1747 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1748 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1751 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1754 if (TREE_CODE (base
) == MEM_REF
)
1756 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1757 base_offset
+ offset
/ BITS_PER_UNIT
);
1758 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1759 base
= unshare_expr (TREE_OPERAND (base
, 0));
1763 off
= build_int_cst (reference_alias_ptr_type (base
),
1764 base_offset
+ offset
/ BITS_PER_UNIT
);
1765 base
= build_fold_addr_expr (unshare_expr (base
));
1768 return fold_build2_loc (loc
, MEM_REF
, model
->type
, base
, off
);
1771 /* Construct a memory reference consisting of component_refs and array_refs to
1772 a part of an aggregate *RES (which is of type TYPE). The requested part
1773 should have type EXP_TYPE at be the given OFFSET. This function might not
1774 succeed, it returns true when it does and only then *RES points to something
1775 meaningful. This function should be used only to build expressions that we
1776 might need to present to user (e.g. in warnings). In all other situations,
1777 build_ref_for_model or build_ref_for_offset should be used instead. */
1780 build_user_friendly_ref_for_offset (tree
*res
, tree type
, HOST_WIDE_INT offset
,
1786 tree tr_size
, index
, minidx
;
1787 HOST_WIDE_INT el_size
;
1789 if (offset
== 0 && exp_type
1790 && types_compatible_p (exp_type
, type
))
1793 switch (TREE_CODE (type
))
1796 case QUAL_UNION_TYPE
:
1798 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1800 HOST_WIDE_INT pos
, size
;
1801 tree tr_pos
, expr
, *expr_ptr
;
1803 if (TREE_CODE (fld
) != FIELD_DECL
)
1806 tr_pos
= bit_position (fld
);
1807 if (!tr_pos
|| !tree_fits_uhwi_p (tr_pos
))
1809 pos
= tree_to_uhwi (tr_pos
);
1810 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
1811 tr_size
= DECL_SIZE (fld
);
1812 if (!tr_size
|| !tree_fits_uhwi_p (tr_size
))
1814 size
= tree_to_uhwi (tr_size
);
1820 else if (pos
> offset
|| (pos
+ size
) <= offset
)
1823 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
1826 if (build_user_friendly_ref_for_offset (expr_ptr
, TREE_TYPE (fld
),
1827 offset
- pos
, exp_type
))
1836 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
1837 if (!tr_size
|| !tree_fits_uhwi_p (tr_size
))
1839 el_size
= tree_to_uhwi (tr_size
);
1841 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1842 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
1844 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
1845 if (!integer_zerop (minidx
))
1846 index
= int_const_binop (PLUS_EXPR
, index
, minidx
);
1847 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
1848 NULL_TREE
, NULL_TREE
);
1849 offset
= offset
% el_size
;
1850 type
= TREE_TYPE (type
);
1865 /* Print message to dump file why a variable was rejected. */
1868 reject (tree var
, const char *msg
)
1870 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1872 fprintf (dump_file
, "Rejected (%d): %s: ", DECL_UID (var
), msg
);
1873 print_generic_expr (dump_file
, var
);
1874 fprintf (dump_file
, "\n");
1878 /* Return true if VAR is a candidate for SRA. */
1881 maybe_add_sra_candidate (tree var
)
1883 tree type
= TREE_TYPE (var
);
1887 if (!AGGREGATE_TYPE_P (type
))
1889 reject (var
, "not aggregate");
1892 /* Allow constant-pool entries that "need to live in memory". */
1893 if (needs_to_live_in_memory (var
) && !constant_decl_p (var
))
1895 reject (var
, "needs to live in memory");
1898 if (TREE_THIS_VOLATILE (var
))
1900 reject (var
, "is volatile");
1903 if (!COMPLETE_TYPE_P (type
))
1905 reject (var
, "has incomplete type");
1908 if (!tree_fits_shwi_p (TYPE_SIZE (type
)))
1910 reject (var
, "type size not fixed");
1913 if (tree_to_shwi (TYPE_SIZE (type
)) == 0)
1915 reject (var
, "type size is zero");
1918 if (type_internals_preclude_sra_p (type
, &msg
))
1923 if (/* Fix for PR 41089. tree-stdarg.cc needs to have va_lists intact but
1924 we also want to schedule it rather late. Thus we ignore it in
1926 (sra_mode
== SRA_MODE_EARLY_INTRA
1927 && is_va_list_type (type
)))
1929 reject (var
, "is va_list");
1933 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
1934 slot
= candidates
->find_slot_with_hash (var
, DECL_UID (var
), INSERT
);
1937 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1939 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
1940 print_generic_expr (dump_file
, var
);
1941 fprintf (dump_file
, "\n");
1947 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1948 those with type which is suitable for scalarization. */
1951 find_var_candidates (void)
1957 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1959 parm
= DECL_CHAIN (parm
))
1960 ret
|= maybe_add_sra_candidate (parm
);
1962 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1967 ret
|= maybe_add_sra_candidate (var
);
1973 /* Return true if EXP is a reference chain of COMPONENT_REFs and AREAY_REFs
1974 ending either with a DECL or a MEM_REF with zero offset. */
1977 path_comparable_for_same_access (tree expr
)
1979 while (handled_component_p (expr
))
1981 if (TREE_CODE (expr
) == ARRAY_REF
)
1983 /* SSA name indices can occur here too when the array is of sie one.
1984 But we cannot just re-use array_refs with SSA names elsewhere in
1985 the function, so disallow non-constant indices. TODO: Remove this
1986 limitation after teaching build_reconstructed_reference to replace
1987 the index with the index type lower bound. */
1988 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
)
1991 expr
= TREE_OPERAND (expr
, 0);
1994 if (TREE_CODE (expr
) == MEM_REF
)
1996 if (!zerop (TREE_OPERAND (expr
, 1)))
2000 gcc_assert (DECL_P (expr
));
2005 /* Assuming that EXP1 consists of only COMPONENT_REFs and ARRAY_REFs, return
2006 true if the chain of these handled components are exactly the same as EXP2
2007 and the expression under them is the same DECL or an equivalent MEM_REF.
2008 The reference picked by compare_access_positions must go to EXP1. */
2011 same_access_path_p (tree exp1
, tree exp2
)
2013 if (TREE_CODE (exp1
) != TREE_CODE (exp2
))
2015 /* Special case single-field structures loaded sometimes as the field
2016 and sometimes as the structure. If the field is of a scalar type,
2017 compare_access_positions will put it into exp1.
2019 TODO: The gimple register type condition can be removed if teach
2020 compare_access_positions to put inner types first. */
2021 if (is_gimple_reg_type (TREE_TYPE (exp1
))
2022 && TREE_CODE (exp1
) == COMPONENT_REF
2023 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (exp1
, 0)))
2024 == TYPE_MAIN_VARIANT (TREE_TYPE (exp2
))))
2025 exp1
= TREE_OPERAND (exp1
, 0);
2030 if (!operand_equal_p (exp1
, exp2
, OEP_ADDRESS_OF
))
2036 /* Sort all accesses for the given variable, check for partial overlaps and
2037 return NULL if there are any. If there are none, pick a representative for
2038 each combination of offset and size and create a linked list out of them.
2039 Return the pointer to the first representative and make sure it is the first
2040 one in the vector of accesses. */
2042 static struct access
*
2043 sort_and_splice_var_accesses (tree var
)
2045 int i
, j
, access_count
;
2046 struct access
*res
, **prev_acc_ptr
= &res
;
2047 vec
<access_p
> *access_vec
;
2049 HOST_WIDE_INT low
= -1, high
= 0;
2051 access_vec
= get_base_access_vector (var
);
2054 access_count
= access_vec
->length ();
2056 /* Sort by <OFFSET, SIZE>. */
2057 access_vec
->qsort (compare_access_positions
);
2060 while (i
< access_count
)
2062 struct access
*access
= (*access_vec
)[i
];
2063 bool grp_write
= access
->write
;
2064 bool grp_read
= !access
->write
;
2065 bool grp_scalar_write
= access
->write
2066 && is_gimple_reg_type (access
->type
);
2067 bool grp_scalar_read
= !access
->write
2068 && is_gimple_reg_type (access
->type
);
2069 bool grp_assignment_read
= access
->grp_assignment_read
;
2070 bool grp_assignment_write
= access
->grp_assignment_write
;
2071 bool multiple_scalar_reads
= false;
2072 bool grp_partial_lhs
= access
->grp_partial_lhs
;
2073 bool first_scalar
= is_gimple_reg_type (access
->type
);
2074 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
2075 bool grp_same_access_path
= true;
2076 bool bf_non_full_precision
2077 = (INTEGRAL_TYPE_P (access
->type
)
2078 && TYPE_PRECISION (access
->type
) != access
->size
2079 && TREE_CODE (access
->expr
) == COMPONENT_REF
2080 && DECL_BIT_FIELD (TREE_OPERAND (access
->expr
, 1)));
2082 if (first
|| access
->offset
>= high
)
2085 low
= access
->offset
;
2086 high
= access
->offset
+ access
->size
;
2088 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
2091 gcc_assert (access
->offset
>= low
2092 && access
->offset
+ access
->size
<= high
);
2094 grp_same_access_path
= path_comparable_for_same_access (access
->expr
);
2097 while (j
< access_count
)
2099 struct access
*ac2
= (*access_vec
)[j
];
2100 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
2105 grp_scalar_write
= (grp_scalar_write
2106 || is_gimple_reg_type (ac2
->type
));
2111 if (is_gimple_reg_type (ac2
->type
))
2113 if (grp_scalar_read
)
2114 multiple_scalar_reads
= true;
2116 grp_scalar_read
= true;
2119 grp_assignment_read
|= ac2
->grp_assignment_read
;
2120 grp_assignment_write
|= ac2
->grp_assignment_write
;
2121 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
2122 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
2123 relink_to_new_repr (access
, ac2
);
2125 /* If there are both aggregate-type and scalar-type accesses with
2126 this combination of size and offset, the comparison function
2127 should have put the scalars first. */
2128 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
2129 /* It also prefers integral types to non-integral. However, when the
2130 precision of the selected type does not span the entire area and
2131 should also be used for a non-integer (i.e. float), we must not
2132 let that happen. Normally analyze_access_subtree expands the type
2133 to cover the entire area but for bit-fields it doesn't. */
2134 if (bf_non_full_precision
&& !INTEGRAL_TYPE_P (ac2
->type
))
2136 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2138 fprintf (dump_file
, "Cannot scalarize the following access "
2139 "because insufficient precision integer type was "
2141 dump_access (dump_file
, access
, false);
2143 unscalarizable_region
= true;
2146 if (grp_same_access_path
2147 && !same_access_path_p (access
->expr
, ac2
->expr
))
2148 grp_same_access_path
= false;
2150 ac2
->group_representative
= access
;
2156 access
->group_representative
= access
;
2157 access
->grp_write
= grp_write
;
2158 access
->grp_read
= grp_read
;
2159 access
->grp_scalar_read
= grp_scalar_read
;
2160 access
->grp_scalar_write
= grp_scalar_write
;
2161 access
->grp_assignment_read
= grp_assignment_read
;
2162 access
->grp_assignment_write
= grp_assignment_write
;
2163 access
->grp_hint
= multiple_scalar_reads
&& !constant_decl_p (var
);
2164 access
->grp_partial_lhs
= grp_partial_lhs
;
2165 access
->grp_unscalarizable_region
= unscalarizable_region
;
2166 access
->grp_same_access_path
= grp_same_access_path
;
2168 *prev_acc_ptr
= access
;
2169 prev_acc_ptr
= &access
->next_grp
;
2172 gcc_assert (res
== (*access_vec
)[0]);
2176 /* Create a variable for the given ACCESS which determines the type, name and a
2177 few other properties. Return the variable declaration and store it also to
2178 ACCESS->replacement. REG_TREE is used when creating a declaration to base a
2179 default-definition SSA name on in order to facilitate an uninitialized
2180 warning. It is used instead of the actual ACCESS type if that is not of a
2181 gimple register type. */
2184 create_access_replacement (struct access
*access
, tree reg_type
= NULL_TREE
)
2188 tree type
= access
->type
;
2189 if (reg_type
&& !is_gimple_reg_type (type
))
2192 if (access
->grp_to_be_debug_replaced
)
2194 repl
= create_tmp_var_raw (access
->type
);
2195 DECL_CONTEXT (repl
) = current_function_decl
;
2198 /* Drop any special alignment on the type if it's not on the main
2199 variant. This avoids issues with weirdo ABIs like AAPCS. */
2200 repl
= create_tmp_var (build_qualified_type (TYPE_MAIN_VARIANT (type
),
2201 TYPE_QUALS (type
)), "SR");
2202 if (access
->grp_partial_lhs
2203 && is_gimple_reg_type (type
))
2204 DECL_NOT_GIMPLE_REG_P (repl
) = 1;
2206 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
2207 DECL_ARTIFICIAL (repl
) = 1;
2208 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
2210 if (DECL_NAME (access
->base
)
2211 && !DECL_IGNORED_P (access
->base
)
2212 && !DECL_ARTIFICIAL (access
->base
))
2214 char *pretty_name
= make_fancy_name (access
->expr
);
2215 tree debug_expr
= unshare_expr_without_location (access
->expr
), d
;
2218 DECL_NAME (repl
) = get_identifier (pretty_name
);
2219 DECL_NAMELESS (repl
) = 1;
2220 obstack_free (&name_obstack
, pretty_name
);
2222 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2223 as DECL_DEBUG_EXPR isn't considered when looking for still
2224 used SSA_NAMEs and thus they could be freed. All debug info
2225 generation cares is whether something is constant or variable
2226 and that get_ref_base_and_extent works properly on the
2227 expression. It cannot handle accesses at a non-constant offset
2228 though, so just give up in those cases. */
2229 for (d
= debug_expr
;
2230 !fail
&& (handled_component_p (d
) || TREE_CODE (d
) == MEM_REF
);
2231 d
= TREE_OPERAND (d
, 0))
2232 switch (TREE_CODE (d
))
2235 case ARRAY_RANGE_REF
:
2236 if (TREE_OPERAND (d
, 1)
2237 && TREE_CODE (TREE_OPERAND (d
, 1)) != INTEGER_CST
)
2239 if (TREE_OPERAND (d
, 3)
2240 && TREE_CODE (TREE_OPERAND (d
, 3)) != INTEGER_CST
)
2244 if (TREE_OPERAND (d
, 2)
2245 && TREE_CODE (TREE_OPERAND (d
, 2)) != INTEGER_CST
)
2249 if (TREE_CODE (TREE_OPERAND (d
, 0)) != ADDR_EXPR
)
2252 d
= TREE_OPERAND (d
, 0);
2259 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
2260 DECL_HAS_DEBUG_EXPR_P (repl
) = 1;
2262 if (access
->grp_no_warning
)
2263 suppress_warning (repl
/* Be more selective! */);
2265 copy_warning (repl
, access
->base
);
2268 suppress_warning (repl
/* Be more selective! */);
2272 if (access
->grp_to_be_debug_replaced
)
2274 fprintf (dump_file
, "Created a debug-only replacement for ");
2275 print_generic_expr (dump_file
, access
->base
);
2276 fprintf (dump_file
, " offset: %u, size: %u\n",
2277 (unsigned) access
->offset
, (unsigned) access
->size
);
2281 fprintf (dump_file
, "Created a replacement for ");
2282 print_generic_expr (dump_file
, access
->base
);
2283 fprintf (dump_file
, " offset: %u, size: %u: ",
2284 (unsigned) access
->offset
, (unsigned) access
->size
);
2285 print_generic_expr (dump_file
, repl
, TDF_UID
);
2286 fprintf (dump_file
, "\n");
2289 sra_stats
.replacements
++;
2294 /* Return ACCESS scalar replacement, which must exist. */
2297 get_access_replacement (struct access
*access
)
2299 gcc_checking_assert (access
->replacement_decl
);
2300 return access
->replacement_decl
;
2304 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2305 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2306 to it is not "within" the root. Return false iff some accesses partially
2310 build_access_subtree (struct access
**access
)
2312 struct access
*root
= *access
, *last_child
= NULL
;
2313 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2315 *access
= (*access
)->next_grp
;
2316 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
2319 root
->first_child
= *access
;
2321 last_child
->next_sibling
= *access
;
2322 last_child
= *access
;
2323 (*access
)->parent
= root
;
2324 (*access
)->grp_write
|= root
->grp_write
;
2326 if (!build_access_subtree (access
))
2330 if (*access
&& (*access
)->offset
< limit
)
2336 /* Build a tree of access representatives, ACCESS is the pointer to the first
2337 one, others are linked in a list by the next_grp field. Return false iff
2338 some accesses partially overlap. */
2341 build_access_trees (struct access
*access
)
2345 struct access
*root
= access
;
2347 if (!build_access_subtree (&access
))
2349 root
->next_grp
= access
;
2354 /* Traverse the access forest where ROOT is the first root and verify that
2355 various important invariants hold true. */
2358 verify_sra_access_forest (struct access
*root
)
2360 struct access
*access
= root
;
2361 tree first_base
= root
->base
;
2362 gcc_assert (DECL_P (first_base
));
2365 gcc_assert (access
->base
== first_base
);
2367 gcc_assert (access
->offset
>= access
->parent
->offset
2368 && access
->size
<= access
->parent
->size
);
2369 if (access
->next_sibling
)
2370 gcc_assert (access
->next_sibling
->offset
2371 >= access
->offset
+ access
->size
);
2373 poly_int64 poffset
, psize
, pmax_size
;
2375 tree base
= get_ref_base_and_extent (access
->expr
, &poffset
, &psize
,
2376 &pmax_size
, &reverse
);
2377 HOST_WIDE_INT offset
, size
, max_size
;
2378 if (!poffset
.is_constant (&offset
)
2379 || !psize
.is_constant (&size
)
2380 || !pmax_size
.is_constant (&max_size
))
2382 gcc_assert (base
== first_base
);
2383 gcc_assert (offset
== access
->offset
);
2384 gcc_assert (access
->grp_unscalarizable_region
2385 || access
->grp_total_scalarization
2386 || size
== max_size
);
2387 gcc_assert (access
->grp_unscalarizable_region
2388 || !is_gimple_reg_type (access
->type
)
2389 || size
== access
->size
);
2390 gcc_assert (reverse
== access
->reverse
);
2392 if (access
->first_child
)
2394 gcc_assert (access
->first_child
->parent
== access
);
2395 access
= access
->first_child
;
2397 else if (access
->next_sibling
)
2399 gcc_assert (access
->next_sibling
->parent
== access
->parent
);
2400 access
= access
->next_sibling
;
2404 while (access
->parent
&& !access
->next_sibling
)
2405 access
= access
->parent
;
2406 if (access
->next_sibling
)
2407 access
= access
->next_sibling
;
2410 gcc_assert (access
== root
);
2411 root
= root
->next_grp
;
2419 /* Verify access forests of all candidates with accesses by calling
2420 verify_access_forest on each on them. */
2423 verify_all_sra_access_forests (void)
2427 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2429 tree var
= candidate (i
);
2430 struct access
*access
= get_first_repr_for_decl (var
);
2433 gcc_assert (access
->base
== var
);
2434 verify_sra_access_forest (access
);
2439 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2443 expr_with_var_bounded_array_refs_p (tree expr
)
2445 while (handled_component_p (expr
))
2447 if (TREE_CODE (expr
) == ARRAY_REF
2448 && !tree_fits_shwi_p (array_ref_low_bound (expr
)))
2450 expr
= TREE_OPERAND (expr
, 0);
2455 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2456 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. If TOTALLY
2457 is set, we are totally scalarizing the aggregate. Also set all sorts of
2458 access flags appropriately along the way, notably always set grp_read and
2459 grp_assign_read according to MARK_READ and grp_write when MARK_WRITE is
2462 Creating a replacement for a scalar access is considered beneficial if its
2463 grp_hint ot TOTALLY is set (this means either that there is more than one
2464 direct read access or that we are attempting total scalarization) or
2465 according to the following table:
2467 Access written to through a scalar type (once or more times)
2469 | Written to in an assignment statement
2471 | | Access read as scalar _once_
2473 | | | Read in an assignment statement
2475 | | | | Scalarize Comment
2476 -----------------------------------------------------------------------------
2477 0 0 0 0 No access for the scalar
2478 0 0 0 1 No access for the scalar
2479 0 0 1 0 No Single read - won't help
2480 0 0 1 1 No The same case
2481 0 1 0 0 No access for the scalar
2482 0 1 0 1 No access for the scalar
2483 0 1 1 0 Yes s = *g; return s.i;
2484 0 1 1 1 Yes The same case as above
2485 1 0 0 0 No Won't help
2486 1 0 0 1 Yes s.i = 1; *g = s;
2487 1 0 1 0 Yes s.i = 5; g = s.i;
2488 1 0 1 1 Yes The same case as above
2489 1 1 0 0 No Won't help.
2490 1 1 0 1 Yes s.i = 1; *g = s;
2491 1 1 1 0 Yes s = *g; return s.i;
2492 1 1 1 1 Yes Any of the above yeses */
2495 analyze_access_subtree (struct access
*root
, struct access
*parent
,
2496 bool allow_replacements
, bool totally
)
2498 struct access
*child
;
2499 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2500 HOST_WIDE_INT covered_to
= root
->offset
;
2501 bool scalar
= is_gimple_reg_type (root
->type
);
2502 bool hole
= false, sth_created
= false;
2506 if (parent
->grp_read
)
2508 if (parent
->grp_assignment_read
)
2509 root
->grp_assignment_read
= 1;
2510 if (parent
->grp_write
)
2511 root
->grp_write
= 1;
2512 if (parent
->grp_assignment_write
)
2513 root
->grp_assignment_write
= 1;
2514 if (!parent
->grp_same_access_path
)
2515 root
->grp_same_access_path
= 0;
2518 if (root
->grp_unscalarizable_region
)
2519 allow_replacements
= false;
2521 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
2522 allow_replacements
= false;
2524 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
2526 hole
|= covered_to
< child
->offset
;
2527 sth_created
|= analyze_access_subtree (child
, root
,
2528 allow_replacements
&& !scalar
,
2531 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
2532 if (child
->grp_covered
)
2533 covered_to
+= child
->size
;
2538 if (allow_replacements
&& scalar
&& !root
->first_child
2539 && (totally
|| !root
->grp_total_scalarization
)
2542 || ((root
->grp_scalar_read
|| root
->grp_assignment_read
)
2543 && (root
->grp_scalar_write
|| root
->grp_assignment_write
))))
2545 /* Always create access replacements that cover the whole access.
2546 For integral types this means the precision has to match.
2547 Avoid assumptions based on the integral type kind, too. */
2548 if (INTEGRAL_TYPE_P (root
->type
)
2549 && (TREE_CODE (root
->type
) != INTEGER_TYPE
2550 || TYPE_PRECISION (root
->type
) != root
->size
)
2551 /* But leave bitfield accesses alone. */
2552 && (TREE_CODE (root
->expr
) != COMPONENT_REF
2553 || !DECL_BIT_FIELD (TREE_OPERAND (root
->expr
, 1))))
2555 tree rt
= root
->type
;
2556 gcc_assert ((root
->offset
% BITS_PER_UNIT
) == 0
2557 && (root
->size
% BITS_PER_UNIT
) == 0);
2558 root
->type
= build_nonstandard_integer_type (root
->size
,
2559 TYPE_UNSIGNED (rt
));
2560 root
->expr
= build_ref_for_offset (UNKNOWN_LOCATION
, root
->base
,
2561 root
->offset
, root
->reverse
,
2562 root
->type
, NULL
, false);
2564 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2566 fprintf (dump_file
, "Changing the type of a replacement for ");
2567 print_generic_expr (dump_file
, root
->base
);
2568 fprintf (dump_file
, " offset: %u, size: %u ",
2569 (unsigned) root
->offset
, (unsigned) root
->size
);
2570 fprintf (dump_file
, " to an integer.\n");
2574 root
->grp_to_be_replaced
= 1;
2575 root
->replacement_decl
= create_access_replacement (root
);
2581 if (allow_replacements
2582 && scalar
&& !root
->first_child
2583 && !root
->grp_total_scalarization
2584 && (root
->grp_scalar_write
|| root
->grp_assignment_write
)
2585 && !bitmap_bit_p (cannot_scalarize_away_bitmap
,
2586 DECL_UID (root
->base
)))
2588 gcc_checking_assert (!root
->grp_scalar_read
2589 && !root
->grp_assignment_read
);
2591 if (MAY_HAVE_DEBUG_BIND_STMTS
)
2593 root
->grp_to_be_debug_replaced
= 1;
2594 root
->replacement_decl
= create_access_replacement (root
);
2598 if (covered_to
< limit
)
2600 if (scalar
|| !allow_replacements
)
2601 root
->grp_total_scalarization
= 0;
2604 if (!hole
|| totally
)
2605 root
->grp_covered
= 1;
2606 else if (root
->grp_write
|| comes_initialized_p (root
->base
))
2607 root
->grp_unscalarized_data
= 1; /* not covered and written to */
2611 /* Analyze all access trees linked by next_grp by the means of
2612 analyze_access_subtree. */
2614 analyze_access_trees (struct access
*access
)
2620 if (analyze_access_subtree (access
, NULL
, true,
2621 access
->grp_total_scalarization
))
2623 access
= access
->next_grp
;
2629 /* Return true iff a potential new child of ACC at offset OFFSET and with size
2630 SIZE would conflict with an already existing one. If exactly such a child
2631 already exists in ACC, store a pointer to it in EXACT_MATCH. */
2634 child_would_conflict_in_acc (struct access
*acc
, HOST_WIDE_INT norm_offset
,
2635 HOST_WIDE_INT size
, struct access
**exact_match
)
2637 struct access
*child
;
2639 for (child
= acc
->first_child
; child
; child
= child
->next_sibling
)
2641 if (child
->offset
== norm_offset
&& child
->size
== size
)
2643 *exact_match
= child
;
2647 if (child
->offset
< norm_offset
+ size
2648 && child
->offset
+ child
->size
> norm_offset
)
2655 /* Create a new child access of PARENT, with all properties just like MODEL
2656 except for its offset and with its grp_write false and grp_read true.
2657 Return the new access or NULL if it cannot be created. Note that this
2658 access is created long after all splicing and sorting, it's not located in
2659 any access vector and is automatically a representative of its group. Set
2660 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2662 static struct access
*
2663 create_artificial_child_access (struct access
*parent
, struct access
*model
,
2664 HOST_WIDE_INT new_offset
,
2665 bool set_grp_read
, bool set_grp_write
)
2667 struct access
**child
;
2668 tree expr
= parent
->base
;
2670 gcc_assert (!model
->grp_unscalarizable_region
);
2672 struct access
*access
= access_pool
.allocate ();
2673 memset (access
, 0, sizeof (struct access
));
2674 if (!build_user_friendly_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
2677 access
->grp_no_warning
= true;
2678 expr
= build_ref_for_model (EXPR_LOCATION (parent
->base
), parent
->base
,
2679 new_offset
, model
, NULL
, false);
2682 access
->base
= parent
->base
;
2683 access
->expr
= expr
;
2684 access
->offset
= new_offset
;
2685 access
->size
= model
->size
;
2686 access
->type
= model
->type
;
2687 access
->parent
= parent
;
2688 access
->grp_read
= set_grp_read
;
2689 access
->grp_write
= set_grp_write
;
2690 access
->reverse
= model
->reverse
;
2692 child
= &parent
->first_child
;
2693 while (*child
&& (*child
)->offset
< new_offset
)
2694 child
= &(*child
)->next_sibling
;
2696 access
->next_sibling
= *child
;
2703 /* Beginning with ACCESS, traverse its whole access subtree and mark all
2704 sub-trees as written to. If any of them has not been marked so previously
2705 and has assignment links leading from it, re-enqueue it. */
2708 subtree_mark_written_and_rhs_enqueue (struct access
*access
)
2710 if (access
->grp_write
)
2712 access
->grp_write
= true;
2713 add_access_to_rhs_work_queue (access
);
2715 struct access
*child
;
2716 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2717 subtree_mark_written_and_rhs_enqueue (child
);
2720 /* If there is still budget to create a propagation access for DECL, return
2721 true and decrement the budget. Otherwise return false. */
2724 budget_for_propagation_access (tree decl
)
2726 unsigned b
, *p
= propagation_budget
->get (decl
);
2730 b
= param_sra_max_propagations
;
2736 if (b
== 0 && dump_file
&& (dump_flags
& TDF_DETAILS
))
2738 fprintf (dump_file
, "The propagation budget of ");
2739 print_generic_expr (dump_file
, decl
);
2740 fprintf (dump_file
, " (UID: %u) has been exhausted.\n", DECL_UID (decl
));
2742 propagation_budget
->put (decl
, b
);
2746 /* Return true if ACC or any of its subaccesses has grp_child set. */
2749 access_or_its_child_written (struct access
*acc
)
2753 for (struct access
*sub
= acc
->first_child
; sub
; sub
= sub
->next_sibling
)
2754 if (access_or_its_child_written (sub
))
2759 /* Propagate subaccesses and grp_write flags of RACC across an assignment link
2760 to LACC. Enqueue sub-accesses as necessary so that the write flag is
2761 propagated transitively. Return true if anything changed. Additionally, if
2762 RACC is a scalar access but LACC is not, change the type of the latter, if
2766 propagate_subaccesses_from_rhs (struct access
*lacc
, struct access
*racc
)
2768 struct access
*rchild
;
2769 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
2772 /* IF the LHS is still not marked as being written to, we only need to do so
2773 if the RHS at this level actually was. */
2774 if (!lacc
->grp_write
)
2776 gcc_checking_assert (!comes_initialized_p (racc
->base
));
2777 if (racc
->grp_write
)
2779 subtree_mark_written_and_rhs_enqueue (lacc
);
2784 if (is_gimple_reg_type (lacc
->type
)
2785 || lacc
->grp_unscalarizable_region
2786 || racc
->grp_unscalarizable_region
)
2788 if (!lacc
->grp_write
)
2791 subtree_mark_written_and_rhs_enqueue (lacc
);
2796 if (is_gimple_reg_type (racc
->type
))
2798 if (!lacc
->grp_write
)
2801 subtree_mark_written_and_rhs_enqueue (lacc
);
2803 if (!lacc
->first_child
&& !racc
->first_child
)
2805 /* We are about to change the access type from aggregate to scalar,
2806 so we need to put the reverse flag onto the access, if any. */
2808 = TYPE_REVERSE_STORAGE_ORDER (lacc
->type
)
2809 && !POINTER_TYPE_P (racc
->type
)
2810 && !VECTOR_TYPE_P (racc
->type
);
2811 tree t
= lacc
->base
;
2813 lacc
->type
= racc
->type
;
2814 if (build_user_friendly_ref_for_offset (&t
, TREE_TYPE (t
),
2815 lacc
->offset
, racc
->type
))
2818 lacc
->grp_same_access_path
= true;
2822 lacc
->expr
= build_ref_for_model (EXPR_LOCATION (lacc
->base
),
2823 lacc
->base
, lacc
->offset
,
2825 if (TREE_CODE (lacc
->expr
) == MEM_REF
)
2826 REF_REVERSE_STORAGE_ORDER (lacc
->expr
) = reverse
;
2827 lacc
->grp_no_warning
= true;
2828 lacc
->grp_same_access_path
= false;
2830 lacc
->reverse
= reverse
;
2835 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
2837 struct access
*new_acc
= NULL
;
2838 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
2840 if (child_would_conflict_in_acc (lacc
, norm_offset
, rchild
->size
,
2845 if (!new_acc
->grp_write
&& rchild
->grp_write
)
2847 gcc_assert (!lacc
->grp_write
);
2848 subtree_mark_written_and_rhs_enqueue (new_acc
);
2852 rchild
->grp_hint
= 1;
2853 new_acc
->grp_hint
|= new_acc
->grp_read
;
2854 if (rchild
->first_child
2855 && propagate_subaccesses_from_rhs (new_acc
, rchild
))
2858 add_access_to_rhs_work_queue (new_acc
);
2863 if (!lacc
->grp_write
)
2866 subtree_mark_written_and_rhs_enqueue (lacc
);
2872 if (rchild
->grp_unscalarizable_region
2873 || !budget_for_propagation_access (lacc
->base
))
2875 if (!lacc
->grp_write
&& access_or_its_child_written (rchild
))
2878 subtree_mark_written_and_rhs_enqueue (lacc
);
2883 rchild
->grp_hint
= 1;
2884 /* Because get_ref_base_and_extent always includes padding in size for
2885 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
2886 type, we might be actually attempting to here to create a child of the
2887 same type as the parent. */
2888 if (!types_compatible_p (lacc
->type
, rchild
->type
))
2889 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
,
2892 || rchild
->grp_write
));
2895 gcc_checking_assert (new_acc
);
2896 if (racc
->first_child
)
2897 propagate_subaccesses_from_rhs (new_acc
, rchild
);
2899 add_access_to_rhs_work_queue (lacc
);
2906 /* Propagate subaccesses of LACC across an assignment link to RACC if they
2907 should inhibit total scalarization of the corresponding area. No flags are
2908 being propagated in the process. Return true if anything changed. */
2911 propagate_subaccesses_from_lhs (struct access
*lacc
, struct access
*racc
)
2913 if (is_gimple_reg_type (racc
->type
)
2914 || lacc
->grp_unscalarizable_region
2915 || racc
->grp_unscalarizable_region
)
2918 /* TODO: Do we want set some new racc flag to stop potential total
2919 scalarization if lacc is a scalar access (and none fo the two have
2923 HOST_WIDE_INT norm_delta
= racc
->offset
- lacc
->offset
;
2924 for (struct access
*lchild
= lacc
->first_child
;
2926 lchild
= lchild
->next_sibling
)
2928 struct access
*matching_acc
= NULL
;
2929 HOST_WIDE_INT norm_offset
= lchild
->offset
+ norm_delta
;
2931 if (lchild
->grp_unscalarizable_region
2932 || child_would_conflict_in_acc (racc
, norm_offset
, lchild
->size
,
2934 || !budget_for_propagation_access (racc
->base
))
2937 && propagate_subaccesses_from_lhs (lchild
, matching_acc
))
2938 add_access_to_lhs_work_queue (matching_acc
);
2942 /* Because get_ref_base_and_extent always includes padding in size for
2943 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
2944 type, we might be actually attempting to here to create a child of the
2945 same type as the parent. */
2946 if (!types_compatible_p (racc
->type
, lchild
->type
))
2948 struct access
*new_acc
2949 = create_artificial_child_access (racc
, lchild
, norm_offset
,
2951 propagate_subaccesses_from_lhs (lchild
, new_acc
);
2954 propagate_subaccesses_from_lhs (lchild
, racc
);
2960 /* Propagate all subaccesses across assignment links. */
2963 propagate_all_subaccesses (void)
2965 propagation_budget
= new hash_map
<tree
, unsigned>;
2966 while (rhs_work_queue_head
)
2968 struct access
*racc
= pop_access_from_rhs_work_queue ();
2969 struct assign_link
*link
;
2971 if (racc
->group_representative
)
2972 racc
= racc
->group_representative
;
2973 gcc_assert (racc
->first_rhs_link
);
2975 for (link
= racc
->first_rhs_link
; link
; link
= link
->next_rhs
)
2977 struct access
*lacc
= link
->lacc
;
2979 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
2981 lacc
= lacc
->group_representative
;
2983 bool reque_parents
= false;
2984 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (racc
->base
)))
2986 if (!lacc
->grp_write
)
2988 subtree_mark_written_and_rhs_enqueue (lacc
);
2989 reque_parents
= true;
2992 else if (propagate_subaccesses_from_rhs (lacc
, racc
))
2993 reque_parents
= true;
2998 add_access_to_rhs_work_queue (lacc
);
2999 lacc
= lacc
->parent
;
3005 while (lhs_work_queue_head
)
3007 struct access
*lacc
= pop_access_from_lhs_work_queue ();
3008 struct assign_link
*link
;
3010 if (lacc
->group_representative
)
3011 lacc
= lacc
->group_representative
;
3012 gcc_assert (lacc
->first_lhs_link
);
3014 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
3017 for (link
= lacc
->first_lhs_link
; link
; link
= link
->next_lhs
)
3019 struct access
*racc
= link
->racc
;
3021 if (racc
->group_representative
)
3022 racc
= racc
->group_representative
;
3023 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (racc
->base
)))
3025 if (propagate_subaccesses_from_lhs (lacc
, racc
))
3026 add_access_to_lhs_work_queue (racc
);
3029 delete propagation_budget
;
3032 /* Return true if the forest beginning with ROOT does not contain
3033 unscalarizable regions or non-byte aligned accesses. */
3036 can_totally_scalarize_forest_p (struct access
*root
)
3038 struct access
*access
= root
;
3041 if (access
->grp_unscalarizable_region
3042 || (access
->offset
% BITS_PER_UNIT
) != 0
3043 || (access
->size
% BITS_PER_UNIT
) != 0
3044 || (is_gimple_reg_type (access
->type
)
3045 && access
->first_child
))
3048 if (access
->first_child
)
3049 access
= access
->first_child
;
3050 else if (access
->next_sibling
)
3051 access
= access
->next_sibling
;
3054 while (access
->parent
&& !access
->next_sibling
)
3055 access
= access
->parent
;
3056 if (access
->next_sibling
)
3057 access
= access
->next_sibling
;
3060 gcc_assert (access
== root
);
3061 root
= root
->next_grp
;
3070 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3071 reference EXPR for total scalarization purposes and mark it as such. Within
3072 the children of PARENT, link it in between PTR and NEXT_SIBLING. */
3074 static struct access
*
3075 create_total_scalarization_access (struct access
*parent
, HOST_WIDE_INT pos
,
3076 HOST_WIDE_INT size
, tree type
, tree expr
,
3077 struct access
**ptr
,
3078 struct access
*next_sibling
)
3080 struct access
*access
= access_pool
.allocate ();
3081 memset (access
, 0, sizeof (struct access
));
3082 access
->base
= parent
->base
;
3083 access
->offset
= pos
;
3084 access
->size
= size
;
3085 access
->expr
= expr
;
3086 access
->type
= type
;
3087 access
->parent
= parent
;
3088 access
->grp_write
= parent
->grp_write
;
3089 access
->grp_total_scalarization
= 1;
3090 access
->grp_hint
= 1;
3091 access
->grp_same_access_path
= path_comparable_for_same_access (expr
);
3092 access
->reverse
= reverse_storage_order_for_component_p (expr
);
3094 access
->next_sibling
= next_sibling
;
3099 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3100 reference EXPR for total scalarization purposes and mark it as such, link it
3101 at *PTR and reshape the tree so that those elements at *PTR and their
3102 siblings which fall within the part described by POS and SIZE are moved to
3103 be children of the new access. If a partial overlap is detected, return
3106 static struct access
*
3107 create_total_access_and_reshape (struct access
*parent
, HOST_WIDE_INT pos
,
3108 HOST_WIDE_INT size
, tree type
, tree expr
,
3109 struct access
**ptr
)
3111 struct access
**p
= ptr
;
3113 while (*p
&& (*p
)->offset
< pos
+ size
)
3115 if ((*p
)->offset
+ (*p
)->size
> pos
+ size
)
3117 p
= &(*p
)->next_sibling
;
3120 struct access
*next_child
= *ptr
;
3121 struct access
*new_acc
3122 = create_total_scalarization_access (parent
, pos
, size
, type
, expr
,
3126 new_acc
->first_child
= next_child
;
3128 for (struct access
*a
= next_child
; a
; a
= a
->next_sibling
)
3129 a
->parent
= new_acc
;
3134 static bool totally_scalarize_subtree (struct access
*root
);
3136 /* Return true if INNER is either the same type as OUTER or if it is the type
3137 of a record field in OUTER at offset zero, possibly in nested
3141 access_and_field_type_match_p (tree outer
, tree inner
)
3143 if (TYPE_MAIN_VARIANT (outer
) == TYPE_MAIN_VARIANT (inner
))
3145 if (TREE_CODE (outer
) != RECORD_TYPE
)
3147 tree fld
= TYPE_FIELDS (outer
);
3150 if (TREE_CODE (fld
) == FIELD_DECL
)
3152 if (!zerop (DECL_FIELD_OFFSET (fld
)))
3154 if (TYPE_MAIN_VARIANT (TREE_TYPE (fld
)) == inner
)
3156 if (TREE_CODE (TREE_TYPE (fld
)) == RECORD_TYPE
)
3157 fld
= TYPE_FIELDS (TREE_TYPE (fld
));
3162 fld
= DECL_CHAIN (fld
);
3167 /* Return type of total_should_skip_creating_access indicating whether a total
3168 scalarization access for a field/element should be created, whether it
3169 already exists or whether the entire total scalarization has to fail. */
3171 enum total_sra_field_state
{TOTAL_FLD_CREATE
, TOTAL_FLD_DONE
, TOTAL_FLD_FAILED
};
3173 /* Do all the necessary steps in total scalarization when the given aggregate
3174 type has a TYPE at POS with the given SIZE should be put into PARENT and
3175 when we have processed all its siblings with smaller offsets up until and
3176 including LAST_SEEN_SIBLING (which can be NULL).
3178 If some further siblings are to be skipped, set *LAST_SEEN_SIBLING as
3179 appropriate. Return TOTAL_FLD_CREATE id the caller should carry on with
3180 creating a new access, TOTAL_FLD_DONE if access or accesses capable of
3181 representing the described part of the aggregate for the purposes of total
3182 scalarization already exist or TOTAL_FLD_FAILED if there is a problem which
3183 prevents total scalarization from happening at all. */
3185 static enum total_sra_field_state
3186 total_should_skip_creating_access (struct access
*parent
,
3187 struct access
**last_seen_sibling
,
3188 tree type
, HOST_WIDE_INT pos
,
3191 struct access
*next_child
;
3192 if (!*last_seen_sibling
)
3193 next_child
= parent
->first_child
;
3195 next_child
= (*last_seen_sibling
)->next_sibling
;
3197 /* First, traverse the chain of siblings until it points to an access with
3198 offset at least equal to POS. Check all skipped accesses whether they
3199 span the POS boundary and if so, return with a failure. */
3200 while (next_child
&& next_child
->offset
< pos
)
3202 if (next_child
->offset
+ next_child
->size
> pos
)
3203 return TOTAL_FLD_FAILED
;
3204 *last_seen_sibling
= next_child
;
3205 next_child
= next_child
->next_sibling
;
3208 /* Now check whether next_child has exactly the right POS and SIZE and if so,
3209 whether it can represent what we need and can be totally scalarized
3211 if (next_child
&& next_child
->offset
== pos
3212 && next_child
->size
== size
)
3214 if (!is_gimple_reg_type (next_child
->type
)
3215 && (!access_and_field_type_match_p (type
, next_child
->type
)
3216 || !totally_scalarize_subtree (next_child
)))
3217 return TOTAL_FLD_FAILED
;
3219 *last_seen_sibling
= next_child
;
3220 return TOTAL_FLD_DONE
;
3223 /* If the child we're looking at would partially overlap, we just cannot
3224 totally scalarize. */
3226 && next_child
->offset
< pos
+ size
3227 && next_child
->offset
+ next_child
->size
> pos
+ size
)
3228 return TOTAL_FLD_FAILED
;
3230 if (is_gimple_reg_type (type
))
3232 /* We don't scalarize accesses that are children of other scalar type
3233 accesses, so if we go on and create an access for a register type,
3234 there should not be any pre-existing children. There are rare cases
3235 where the requested type is a vector but we already have register
3236 accesses for all its elements which is equally good. Detect that
3237 situation or whether we need to bail out. */
3239 HOST_WIDE_INT covered
= pos
;
3240 bool skipping
= false;
3242 && next_child
->offset
+ next_child
->size
<= pos
+ size
)
3244 if (next_child
->offset
!= covered
3245 || !is_gimple_reg_type (next_child
->type
))
3246 return TOTAL_FLD_FAILED
;
3248 covered
+= next_child
->size
;
3249 *last_seen_sibling
= next_child
;
3250 next_child
= next_child
->next_sibling
;
3256 if (covered
!= pos
+ size
)
3257 return TOTAL_FLD_FAILED
;
3259 return TOTAL_FLD_DONE
;
3263 return TOTAL_FLD_CREATE
;
3266 /* Go over sub-tree rooted in ROOT and attempt to create scalar accesses
3267 spanning all uncovered areas covered by ROOT, return false if the attempt
3268 failed. All created accesses will have grp_unscalarizable_region set (and
3269 should be ignored if the function returns false). */
3272 totally_scalarize_subtree (struct access
*root
)
3274 gcc_checking_assert (!root
->grp_unscalarizable_region
);
3275 gcc_checking_assert (!is_gimple_reg_type (root
->type
));
3277 struct access
*last_seen_sibling
= NULL
;
3279 switch (TREE_CODE (root
->type
))
3282 for (tree fld
= TYPE_FIELDS (root
->type
); fld
; fld
= DECL_CHAIN (fld
))
3283 if (TREE_CODE (fld
) == FIELD_DECL
)
3285 tree ft
= TREE_TYPE (fld
);
3286 HOST_WIDE_INT fsize
= tree_to_uhwi (DECL_SIZE (fld
));
3290 HOST_WIDE_INT pos
= root
->offset
+ int_bit_position (fld
);
3291 if (pos
+ fsize
> root
->offset
+ root
->size
)
3293 enum total_sra_field_state
3294 state
= total_should_skip_creating_access (root
,
3299 case TOTAL_FLD_FAILED
:
3301 case TOTAL_FLD_DONE
:
3303 case TOTAL_FLD_CREATE
:
3309 struct access
**p
= (last_seen_sibling
3310 ? &last_seen_sibling
->next_sibling
3311 : &root
->first_child
);
3312 tree nref
= build3 (COMPONENT_REF
, ft
, root
->expr
, fld
, NULL_TREE
);
3313 struct access
*new_child
3314 = create_total_access_and_reshape (root
, pos
, fsize
, ft
, nref
, p
);
3318 if (!is_gimple_reg_type (ft
)
3319 && !totally_scalarize_subtree (new_child
))
3321 last_seen_sibling
= new_child
;
3326 tree elemtype
= TREE_TYPE (root
->type
);
3327 tree elem_size
= TYPE_SIZE (elemtype
);
3328 gcc_assert (elem_size
&& tree_fits_shwi_p (elem_size
));
3329 HOST_WIDE_INT el_size
= tree_to_shwi (elem_size
);
3330 gcc_assert (el_size
> 0);
3332 tree minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (root
->type
));
3333 gcc_assert (TREE_CODE (minidx
) == INTEGER_CST
);
3334 tree maxidx
= TYPE_MAX_VALUE (TYPE_DOMAIN (root
->type
));
3335 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
3338 gcc_assert (TREE_CODE (maxidx
) == INTEGER_CST
);
3339 tree domain
= TYPE_DOMAIN (root
->type
);
3340 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
3341 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
3342 offset_int idx
= wi::to_offset (minidx
);
3343 offset_int max
= wi::to_offset (maxidx
);
3344 if (!TYPE_UNSIGNED (domain
))
3346 idx
= wi::sext (idx
, TYPE_PRECISION (domain
));
3347 max
= wi::sext (max
, TYPE_PRECISION (domain
));
3349 for (HOST_WIDE_INT pos
= root
->offset
;
3351 pos
+= el_size
, ++idx
)
3353 enum total_sra_field_state
3354 state
= total_should_skip_creating_access (root
,
3360 case TOTAL_FLD_FAILED
:
3362 case TOTAL_FLD_DONE
:
3364 case TOTAL_FLD_CREATE
:
3370 struct access
**p
= (last_seen_sibling
3371 ? &last_seen_sibling
->next_sibling
3372 : &root
->first_child
);
3373 tree nref
= build4 (ARRAY_REF
, elemtype
, root
->expr
,
3374 wide_int_to_tree (domain
, idx
),
3375 NULL_TREE
, NULL_TREE
);
3376 struct access
*new_child
3377 = create_total_access_and_reshape (root
, pos
, el_size
, elemtype
,
3382 if (!is_gimple_reg_type (elemtype
)
3383 && !totally_scalarize_subtree (new_child
))
3385 last_seen_sibling
= new_child
;
3397 /* Go through all accesses collected throughout the (intraprocedural) analysis
3398 stage, exclude overlapping ones, identify representatives and build trees
3399 out of them, making decisions about scalarization on the way. Return true
3400 iff there are any to-be-scalarized variables after this stage. */
3403 analyze_all_variable_accesses (void)
3406 bitmap tmp
= BITMAP_ALLOC (NULL
);
3410 bitmap_copy (tmp
, candidate_bitmap
);
3411 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
3413 tree var
= candidate (i
);
3414 struct access
*access
;
3416 access
= sort_and_splice_var_accesses (var
);
3417 if (!access
|| !build_access_trees (access
))
3418 disqualify_candidate (var
,
3419 "No or inhibitingly overlapping accesses.");
3422 propagate_all_subaccesses ();
3424 bool optimize_speed_p
= !optimize_function_for_size_p (cfun
);
3425 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
3426 fall back to a target default. */
3427 unsigned HOST_WIDE_INT max_scalarization_size
3428 = get_move_ratio (optimize_speed_p
) * UNITS_PER_WORD
;
3430 if (optimize_speed_p
)
3432 if (OPTION_SET_P (param_sra_max_scalarization_size_speed
))
3433 max_scalarization_size
= param_sra_max_scalarization_size_speed
;
3437 if (OPTION_SET_P (param_sra_max_scalarization_size_size
))
3438 max_scalarization_size
= param_sra_max_scalarization_size_size
;
3440 max_scalarization_size
*= BITS_PER_UNIT
;
3442 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
3443 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
3444 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
3446 tree var
= candidate (i
);
3450 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var
))) > max_scalarization_size
)
3452 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3454 fprintf (dump_file
, "Too big to totally scalarize: ");
3455 print_generic_expr (dump_file
, var
);
3456 fprintf (dump_file
, " (UID: %u)\n", DECL_UID (var
));
3461 bool all_types_ok
= true;
3462 for (struct access
*access
= get_first_repr_for_decl (var
);
3464 access
= access
->next_grp
)
3465 if (!can_totally_scalarize_forest_p (access
)
3466 || !scalarizable_type_p (access
->type
, constant_decl_p (var
)))
3468 all_types_ok
= false;
3474 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3476 fprintf (dump_file
, "Will attempt to totally scalarize ");
3477 print_generic_expr (dump_file
, var
);
3478 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
3480 bool scalarized
= true;
3481 for (struct access
*access
= get_first_repr_for_decl (var
);
3483 access
= access
->next_grp
)
3484 if (!is_gimple_reg_type (access
->type
)
3485 && !totally_scalarize_subtree (access
))
3492 for (struct access
*access
= get_first_repr_for_decl (var
);
3494 access
= access
->next_grp
)
3495 access
->grp_total_scalarization
= true;
3499 verify_all_sra_access_forests ();
3501 bitmap_copy (tmp
, candidate_bitmap
);
3502 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
3504 tree var
= candidate (i
);
3505 struct access
*access
= get_first_repr_for_decl (var
);
3507 if (analyze_access_trees (access
))
3510 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3512 fprintf (dump_file
, "\nAccess trees for ");
3513 print_generic_expr (dump_file
, var
);
3514 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
3515 dump_access_tree (dump_file
, access
);
3516 fprintf (dump_file
, "\n");
3520 disqualify_candidate (var
, "No scalar replacements to be created.");
3527 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
3534 /* Generate statements copying scalar replacements of accesses within a subtree
3535 into or out of AGG. ACCESS, all its children, siblings and their children
3536 are to be processed. AGG is an aggregate type expression (can be a
3537 declaration but does not have to be, it can for example also be a mem_ref or
3538 a series of handled components). TOP_OFFSET is the offset of the processed
3539 subtree which has to be subtracted from offsets of individual accesses to
3540 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
3541 replacements in the interval <start_offset, start_offset + chunk_size>,
3542 otherwise copy all. GSI is a statement iterator used to place the new
3543 statements. WRITE should be true when the statements should write from AGG
3544 to the replacement and false if vice versa. if INSERT_AFTER is true, new
3545 statements will be added after the current statement in GSI, they will be
3546 added before the statement otherwise. */
3549 generate_subtree_copies (struct access
*access
, tree agg
,
3550 HOST_WIDE_INT top_offset
,
3551 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
3552 gimple_stmt_iterator
*gsi
, bool write
,
3553 bool insert_after
, location_t loc
)
3555 /* Never write anything into constant pool decls. See PR70602. */
3556 if (!write
&& constant_decl_p (agg
))
3560 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
3563 if (access
->grp_to_be_replaced
3565 || access
->offset
+ access
->size
> start_offset
))
3567 tree expr
, repl
= get_access_replacement (access
);
3570 expr
= build_ref_for_model (loc
, agg
, access
->offset
- top_offset
,
3571 access
, gsi
, insert_after
);
3575 if (access
->grp_partial_lhs
)
3576 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
3578 insert_after
? GSI_NEW_STMT
3580 stmt
= gimple_build_assign (repl
, expr
);
3584 suppress_warning (repl
/* Be more selective! */);
3585 if (access
->grp_partial_lhs
)
3586 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
3588 insert_after
? GSI_NEW_STMT
3590 stmt
= gimple_build_assign (expr
, repl
);
3592 gimple_set_location (stmt
, loc
);
3595 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
3597 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
3599 sra_stats
.subtree_copies
++;
3602 && access
->grp_to_be_debug_replaced
3604 || access
->offset
+ access
->size
> start_offset
))
3607 tree drhs
= build_debug_ref_for_model (loc
, agg
,
3608 access
->offset
- top_offset
,
3610 ds
= gimple_build_debug_bind (get_access_replacement (access
),
3611 drhs
, gsi_stmt (*gsi
));
3613 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
3615 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
3618 if (access
->first_child
)
3619 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
3620 start_offset
, chunk_size
, gsi
,
3621 write
, insert_after
, loc
);
3623 access
= access
->next_sibling
;
3628 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
3629 root of the subtree to be processed. GSI is the statement iterator used
3630 for inserting statements which are added after the current statement if
3631 INSERT_AFTER is true or before it otherwise. */
3634 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
3635 bool insert_after
, location_t loc
)
3638 struct access
*child
;
3640 if (access
->grp_to_be_replaced
)
3644 stmt
= gimple_build_assign (get_access_replacement (access
),
3645 build_zero_cst (access
->type
));
3647 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
3649 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
3651 gimple_set_location (stmt
, loc
);
3653 else if (access
->grp_to_be_debug_replaced
)
3656 = gimple_build_debug_bind (get_access_replacement (access
),
3657 build_zero_cst (access
->type
),
3660 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
3662 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
3665 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
3666 init_subtree_with_zero (child
, gsi
, insert_after
, loc
);
3669 /* Clobber all scalar replacements in an access subtree. ACCESS is the
3670 root of the subtree to be processed. GSI is the statement iterator used
3671 for inserting statements which are added after the current statement if
3672 INSERT_AFTER is true or before it otherwise. */
3675 clobber_subtree (struct access
*access
, gimple_stmt_iterator
*gsi
,
3676 bool insert_after
, location_t loc
)
3679 struct access
*child
;
3681 if (access
->grp_to_be_replaced
)
3683 tree rep
= get_access_replacement (access
);
3684 tree clobber
= build_clobber (access
->type
);
3685 gimple
*stmt
= gimple_build_assign (rep
, clobber
);
3688 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
3690 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
3692 gimple_set_location (stmt
, loc
);
3695 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
3696 clobber_subtree (child
, gsi
, insert_after
, loc
);
3699 /* Search for an access representative for the given expression EXPR and
3700 return it or NULL if it cannot be found. */
3702 static struct access
*
3703 get_access_for_expr (tree expr
)
3705 poly_int64 poffset
, psize
, pmax_size
;
3706 HOST_WIDE_INT offset
, max_size
;
3710 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3711 a different size than the size of its argument and we need the latter
3713 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3714 expr
= TREE_OPERAND (expr
, 0);
3716 base
= get_ref_base_and_extent (expr
, &poffset
, &psize
, &pmax_size
,
3718 if (!known_size_p (pmax_size
)
3719 || !pmax_size
.is_constant (&max_size
)
3720 || !poffset
.is_constant (&offset
)
3724 if (tree basesize
= DECL_SIZE (base
))
3728 || !poly_int_tree_p (basesize
, &sz
)
3729 || known_le (sz
, offset
))
3734 || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
3737 return get_var_base_offset_size_access (base
, offset
, max_size
);
3740 /* Replace the expression EXPR with a scalar replacement if there is one and
3741 generate other statements to do type conversion or subtree copying if
3742 necessary. GSI is used to place newly created statements, WRITE is true if
3743 the expression is being written to (it is on a LHS of a statement or output
3744 in an assembly statement). */
3747 sra_modify_expr (tree
*expr
, gimple_stmt_iterator
*gsi
, bool write
)
3750 struct access
*access
;
3751 tree type
, bfr
, orig_expr
;
3752 bool partial_cplx_access
= false;
3754 if (TREE_CODE (*expr
) == BIT_FIELD_REF
)
3757 expr
= &TREE_OPERAND (*expr
, 0);
3762 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
3764 expr
= &TREE_OPERAND (*expr
, 0);
3765 partial_cplx_access
= true;
3767 access
= get_access_for_expr (*expr
);
3770 type
= TREE_TYPE (*expr
);
3773 loc
= gimple_location (gsi_stmt (*gsi
));
3774 gimple_stmt_iterator alt_gsi
= gsi_none ();
3775 if (write
&& stmt_ends_bb_p (gsi_stmt (*gsi
)))
3777 alt_gsi
= gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi
)));
3781 if (access
->grp_to_be_replaced
)
3783 tree repl
= get_access_replacement (access
);
3784 /* If we replace a non-register typed access simply use the original
3785 access expression to extract the scalar component afterwards.
3786 This happens if scalarizing a function return value or parameter
3787 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3788 gcc.c-torture/compile/20011217-1.c.
3790 We also want to use this when accessing a complex or vector which can
3791 be accessed as a different type too, potentially creating a need for
3792 type conversion (see PR42196) and when scalarized unions are involved
3793 in assembler statements (see PR42398). */
3794 if (!bfr
&& !useless_type_conversion_p (type
, access
->type
))
3798 ref
= build_ref_for_model (loc
, orig_expr
, 0, access
, gsi
, false);
3800 if (partial_cplx_access
)
3802 /* VIEW_CONVERT_EXPRs in partial complex access are always fine in
3803 the case of a write because in such case the replacement cannot
3804 be a gimple register. In the case of a load, we have to
3805 differentiate in between a register an non-register
3807 tree t
= build1 (VIEW_CONVERT_EXPR
, type
, repl
);
3808 gcc_checking_assert (!write
|| access
->grp_partial_lhs
);
3809 if (!access
->grp_partial_lhs
)
3811 tree tmp
= make_ssa_name (type
);
3812 gassign
*stmt
= gimple_build_assign (tmp
, t
);
3813 /* This is always a read. */
3814 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
3823 if (access
->grp_partial_lhs
)
3824 ref
= force_gimple_operand_gsi (gsi
, ref
, true, NULL_TREE
,
3825 false, GSI_NEW_STMT
);
3826 stmt
= gimple_build_assign (repl
, ref
);
3827 gimple_set_location (stmt
, loc
);
3828 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
3834 if (access
->grp_partial_lhs
)
3835 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
3836 true, GSI_SAME_STMT
);
3837 stmt
= gimple_build_assign (ref
, repl
);
3838 gimple_set_location (stmt
, loc
);
3839 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
3846 else if (write
&& access
->grp_to_be_debug_replaced
)
3848 gdebug
*ds
= gimple_build_debug_bind (get_access_replacement (access
),
3851 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
3854 if (access
->first_child
&& !TREE_READONLY (access
->base
))
3856 HOST_WIDE_INT start_offset
, chunk_size
;
3858 && tree_fits_uhwi_p (TREE_OPERAND (bfr
, 1))
3859 && tree_fits_uhwi_p (TREE_OPERAND (bfr
, 2)))
3861 chunk_size
= tree_to_uhwi (TREE_OPERAND (bfr
, 1));
3862 start_offset
= access
->offset
3863 + tree_to_uhwi (TREE_OPERAND (bfr
, 2));
3866 start_offset
= chunk_size
= 0;
3868 generate_subtree_copies (access
->first_child
, orig_expr
, access
->offset
,
3869 start_offset
, chunk_size
, gsi
, write
, write
,
3875 /* Where scalar replacements of the RHS have been written to when a replacement
3876 of a LHS of an assigments cannot be direclty loaded from a replacement of
3878 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
3879 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
3880 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
3882 struct subreplacement_assignment_data
3884 /* Offset of the access representing the lhs of the assignment. */
3885 HOST_WIDE_INT left_offset
;
3887 /* LHS and RHS of the original assignment. */
3888 tree assignment_lhs
, assignment_rhs
;
3890 /* Access representing the rhs of the whole assignment. */
3891 struct access
*top_racc
;
3893 /* Stmt iterator used for statement insertions after the original assignment.
3894 It points to the main GSI used to traverse a BB during function body
3896 gimple_stmt_iterator
*new_gsi
;
3898 /* Stmt iterator used for statement insertions before the original
3899 assignment. Keeps on pointing to the original statement. */
3900 gimple_stmt_iterator old_gsi
;
3902 /* Location of the assignment. */
3905 /* Keeps the information whether we have needed to refresh replacements of
3906 the LHS and from which side of the assignments this takes place. */
3907 enum unscalarized_data_handling refreshed
;
3910 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3911 base aggregate if there are unscalarized data or directly to LHS of the
3912 statement that is pointed to by GSI otherwise. */
3915 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data
*sad
)
3918 /* If the RHS is a load from a constant, we do not need to (and must not)
3919 flush replacements to it and can use it directly as if we did. */
3920 if (TREE_READONLY (sad
->top_racc
->base
))
3922 sad
->refreshed
= SRA_UDH_RIGHT
;
3925 if (sad
->top_racc
->grp_unscalarized_data
)
3927 src
= sad
->assignment_rhs
;
3928 sad
->refreshed
= SRA_UDH_RIGHT
;
3932 src
= sad
->assignment_lhs
;
3933 sad
->refreshed
= SRA_UDH_LEFT
;
3935 generate_subtree_copies (sad
->top_racc
->first_child
, src
,
3936 sad
->top_racc
->offset
, 0, 0,
3937 &sad
->old_gsi
, false, false, sad
->loc
);
3940 /* Try to generate statements to load all sub-replacements in an access subtree
3941 formed by children of LACC from scalar replacements in the SAD->top_racc
3942 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3943 and load the accesses from it. */
3946 load_assign_lhs_subreplacements (struct access
*lacc
,
3947 struct subreplacement_assignment_data
*sad
)
3949 for (lacc
= lacc
->first_child
; lacc
; lacc
= lacc
->next_sibling
)
3951 HOST_WIDE_INT offset
;
3952 offset
= lacc
->offset
- sad
->left_offset
+ sad
->top_racc
->offset
;
3954 if (lacc
->grp_to_be_replaced
)
3956 struct access
*racc
;
3960 racc
= find_access_in_subtree (sad
->top_racc
, offset
, lacc
->size
);
3961 if (racc
&& racc
->grp_to_be_replaced
)
3963 rhs
= get_access_replacement (racc
);
3964 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
3965 rhs
= fold_build1_loc (sad
->loc
, VIEW_CONVERT_EXPR
,
3968 if (racc
->grp_partial_lhs
&& lacc
->grp_partial_lhs
)
3969 rhs
= force_gimple_operand_gsi (&sad
->old_gsi
, rhs
, true,
3970 NULL_TREE
, true, GSI_SAME_STMT
);
3974 /* No suitable access on the right hand side, need to load from
3975 the aggregate. See if we have to update it first... */
3976 if (sad
->refreshed
== SRA_UDH_NONE
)
3977 handle_unscalarized_data_in_subtree (sad
);
3979 if (sad
->refreshed
== SRA_UDH_LEFT
)
3980 rhs
= build_ref_for_model (sad
->loc
, sad
->assignment_lhs
,
3981 lacc
->offset
- sad
->left_offset
,
3982 lacc
, sad
->new_gsi
, true);
3984 rhs
= build_ref_for_model (sad
->loc
, sad
->assignment_rhs
,
3985 lacc
->offset
- sad
->left_offset
,
3986 lacc
, sad
->new_gsi
, true);
3987 if (lacc
->grp_partial_lhs
)
3988 rhs
= force_gimple_operand_gsi (sad
->new_gsi
,
3989 rhs
, true, NULL_TREE
,
3990 false, GSI_NEW_STMT
);
3993 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
3994 gsi_insert_after (sad
->new_gsi
, stmt
, GSI_NEW_STMT
);
3995 gimple_set_location (stmt
, sad
->loc
);
3997 sra_stats
.subreplacements
++;
4001 if (sad
->refreshed
== SRA_UDH_NONE
4002 && lacc
->grp_read
&& !lacc
->grp_covered
)
4003 handle_unscalarized_data_in_subtree (sad
);
4005 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
4009 struct access
*racc
= find_access_in_subtree (sad
->top_racc
,
4013 if (racc
&& racc
->grp_to_be_replaced
)
4015 if (racc
->grp_write
|| constant_decl_p (racc
->base
))
4016 drhs
= get_access_replacement (racc
);
4020 else if (sad
->refreshed
== SRA_UDH_LEFT
)
4021 drhs
= build_debug_ref_for_model (sad
->loc
, lacc
->base
,
4022 lacc
->offset
, lacc
);
4023 else if (sad
->refreshed
== SRA_UDH_RIGHT
)
4024 drhs
= build_debug_ref_for_model (sad
->loc
, sad
->top_racc
->base
,
4029 && !useless_type_conversion_p (lacc
->type
, TREE_TYPE (drhs
)))
4030 drhs
= fold_build1_loc (sad
->loc
, VIEW_CONVERT_EXPR
,
4032 ds
= gimple_build_debug_bind (get_access_replacement (lacc
),
4033 drhs
, gsi_stmt (sad
->old_gsi
));
4034 gsi_insert_after (sad
->new_gsi
, ds
, GSI_NEW_STMT
);
4038 if (lacc
->first_child
)
4039 load_assign_lhs_subreplacements (lacc
, sad
);
4043 /* Result code for SRA assignment modification. */
4044 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
4045 SRA_AM_MODIFIED
, /* stmt changed but not
4047 SRA_AM_REMOVED
}; /* stmt eliminated */
4049 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
4050 to the assignment and GSI is the statement iterator pointing at it. Returns
4051 the same values as sra_modify_assign. */
4053 static enum assignment_mod_result
4054 sra_modify_constructor_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
4056 tree lhs
= gimple_assign_lhs (stmt
);
4057 struct access
*acc
= get_access_for_expr (lhs
);
4060 location_t loc
= gimple_location (stmt
);
4062 if (gimple_clobber_p (stmt
))
4064 /* Clobber the replacement variable. */
4065 clobber_subtree (acc
, gsi
, !acc
->grp_covered
, loc
);
4066 /* Remove clobbers of fully scalarized variables, they are dead. */
4067 if (acc
->grp_covered
)
4069 unlink_stmt_vdef (stmt
);
4070 gsi_remove (gsi
, true);
4071 release_defs (stmt
);
4072 return SRA_AM_REMOVED
;
4075 return SRA_AM_MODIFIED
;
4078 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
)) > 0)
4080 /* I have never seen this code path trigger but if it can happen the
4081 following should handle it gracefully. */
4082 if (access_has_children_p (acc
))
4083 generate_subtree_copies (acc
->first_child
, lhs
, acc
->offset
, 0, 0, gsi
,
4085 return SRA_AM_MODIFIED
;
4088 if (acc
->grp_covered
)
4090 init_subtree_with_zero (acc
, gsi
, false, loc
);
4091 unlink_stmt_vdef (stmt
);
4092 gsi_remove (gsi
, true);
4093 release_defs (stmt
);
4094 return SRA_AM_REMOVED
;
4098 init_subtree_with_zero (acc
, gsi
, true, loc
);
4099 return SRA_AM_MODIFIED
;
4103 /* Create and return a new suitable default definition SSA_NAME for RACC which
4104 is an access describing an uninitialized part of an aggregate that is being
4105 loaded. REG_TREE is used instead of the actual RACC type if that is not of
4106 a gimple register type. */
4109 get_repl_default_def_ssa_name (struct access
*racc
, tree reg_type
)
4111 gcc_checking_assert (!racc
->grp_to_be_replaced
4112 && !racc
->grp_to_be_debug_replaced
);
4113 if (!racc
->replacement_decl
)
4114 racc
->replacement_decl
= create_access_replacement (racc
, reg_type
);
4115 return get_or_create_ssa_default_def (cfun
, racc
->replacement_decl
);
4119 /* Generate statements to call .DEFERRED_INIT to initialize scalar replacements
4120 of accesses within a subtree ACCESS; all its children, siblings and their
4121 children are to be processed.
4122 GSI is a statement iterator used to place the new statements. */
4124 generate_subtree_deferred_init (struct access
*access
,
4127 gimple_stmt_iterator
*gsi
,
4132 if (access
->grp_to_be_replaced
)
4134 tree repl
= get_access_replacement (access
);
4136 = gimple_build_call_internal (IFN_DEFERRED_INIT
, 3,
4137 TYPE_SIZE_UNIT (TREE_TYPE (repl
)),
4138 init_type
, decl_name
);
4139 gimple_call_set_lhs (call
, repl
);
4140 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
4142 gimple_set_location (call
, loc
);
4143 sra_stats
.subtree_deferred_init
++;
4145 if (access
->first_child
)
4146 generate_subtree_deferred_init (access
->first_child
, init_type
,
4147 decl_name
, gsi
, loc
);
4149 access
= access
->next_sibling
;
4154 /* For a call to .DEFERRED_INIT:
4155 var = .DEFERRED_INIT (size_of_var, init_type, name_of_var);
4156 examine the LHS variable VAR and replace it with a scalar replacement if
4157 there is one, also replace the RHS call to a call to .DEFERRED_INIT of
4158 the corresponding scalar relacement variable. Examine the subtree and
4159 do the scalar replacements in the subtree too. STMT is the call, GSI is
4160 the statment iterator to place newly created statement. */
4162 static enum assignment_mod_result
4163 sra_modify_deferred_init (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
4165 tree lhs
= gimple_call_lhs (stmt
);
4166 tree init_type
= gimple_call_arg (stmt
, 1);
4167 tree decl_name
= gimple_call_arg (stmt
, 2);
4169 struct access
*lhs_access
= get_access_for_expr (lhs
);
4173 location_t loc
= gimple_location (stmt
);
4175 if (lhs_access
->grp_to_be_replaced
)
4177 tree lhs_repl
= get_access_replacement (lhs_access
);
4178 gimple_call_set_lhs (stmt
, lhs_repl
);
4179 tree arg0_repl
= TYPE_SIZE_UNIT (TREE_TYPE (lhs_repl
));
4180 gimple_call_set_arg (stmt
, 0, arg0_repl
);
4181 sra_stats
.deferred_init
++;
4182 gcc_assert (!lhs_access
->first_child
);
4183 return SRA_AM_MODIFIED
;
4186 if (lhs_access
->first_child
)
4187 generate_subtree_deferred_init (lhs_access
->first_child
,
4188 init_type
, decl_name
, gsi
, loc
);
4189 if (lhs_access
->grp_covered
)
4191 unlink_stmt_vdef (stmt
);
4192 gsi_remove (gsi
, true);
4193 release_defs (stmt
);
4194 return SRA_AM_REMOVED
;
4197 return SRA_AM_MODIFIED
;
4200 /* Examine both sides of the assignment statement pointed to by STMT, replace
4201 them with a scalare replacement if there is one and generate copying of
4202 replacements if scalarized aggregates have been used in the assignment. GSI
4203 is used to hold generated statements for type conversions and subtree
4206 static enum assignment_mod_result
4207 sra_modify_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
4209 struct access
*lacc
, *racc
;
4211 bool modify_this_stmt
= false;
4212 bool force_gimple_rhs
= false;
4214 gimple_stmt_iterator orig_gsi
= *gsi
;
4216 if (!gimple_assign_single_p (stmt
))
4218 lhs
= gimple_assign_lhs (stmt
);
4219 rhs
= gimple_assign_rhs1 (stmt
);
4221 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
4222 return sra_modify_constructor_assign (stmt
, gsi
);
4224 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
4225 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
4226 || TREE_CODE (rhs
) == BIT_FIELD_REF
|| TREE_CODE (lhs
) == BIT_FIELD_REF
)
4228 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (stmt
),
4230 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (stmt
),
4232 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
4235 lacc
= get_access_for_expr (lhs
);
4236 racc
= get_access_for_expr (rhs
);
4239 /* Avoid modifying initializations of constant-pool replacements. */
4240 if (racc
&& (racc
->replacement_decl
== lhs
))
4243 loc
= gimple_location (stmt
);
4244 if (lacc
&& lacc
->grp_to_be_replaced
)
4246 lhs
= get_access_replacement (lacc
);
4247 gimple_assign_set_lhs (stmt
, lhs
);
4248 modify_this_stmt
= true;
4249 if (lacc
->grp_partial_lhs
)
4250 force_gimple_rhs
= true;
4254 if (racc
&& racc
->grp_to_be_replaced
)
4256 rhs
= get_access_replacement (racc
);
4257 modify_this_stmt
= true;
4258 if (racc
->grp_partial_lhs
)
4259 force_gimple_rhs
= true;
4263 && !racc
->grp_unscalarized_data
4264 && !racc
->grp_unscalarizable_region
4265 && TREE_CODE (lhs
) == SSA_NAME
4266 && !access_has_replacements_p (racc
))
4268 rhs
= get_repl_default_def_ssa_name (racc
, TREE_TYPE (lhs
));
4269 modify_this_stmt
= true;
4273 if (modify_this_stmt
)
4275 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
4277 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
4278 ??? This should move to fold_stmt which we simply should
4279 call after building a VIEW_CONVERT_EXPR here. */
4280 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
4281 && !contains_bitfld_component_ref_p (lhs
))
4283 lhs
= build_ref_for_model (loc
, lhs
, 0, racc
, gsi
, false);
4284 gimple_assign_set_lhs (stmt
, lhs
);
4287 && AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
4288 && !contains_vce_or_bfcref_p (rhs
))
4289 rhs
= build_ref_for_model (loc
, rhs
, 0, lacc
, gsi
, false);
4291 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
4293 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
4295 if (is_gimple_reg_type (TREE_TYPE (lhs
))
4296 && TREE_CODE (lhs
) != SSA_NAME
)
4297 force_gimple_rhs
= true;
4302 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
4304 tree dlhs
= get_access_replacement (lacc
);
4305 tree drhs
= unshare_expr (rhs
);
4306 if (!useless_type_conversion_p (TREE_TYPE (dlhs
), TREE_TYPE (drhs
)))
4308 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs
))
4309 && !contains_vce_or_bfcref_p (drhs
))
4310 drhs
= build_debug_ref_for_model (loc
, drhs
, 0, lacc
);
4312 && !useless_type_conversion_p (TREE_TYPE (dlhs
),
4314 drhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
4315 TREE_TYPE (dlhs
), drhs
);
4317 gdebug
*ds
= gimple_build_debug_bind (dlhs
, drhs
, stmt
);
4318 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
4321 /* From this point on, the function deals with assignments in between
4322 aggregates when at least one has scalar reductions of some of its
4323 components. There are three possible scenarios: Both the LHS and RHS have
4324 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
4326 In the first case, we would like to load the LHS components from RHS
4327 components whenever possible. If that is not possible, we would like to
4328 read it directly from the RHS (after updating it by storing in it its own
4329 components). If there are some necessary unscalarized data in the LHS,
4330 those will be loaded by the original assignment too. If neither of these
4331 cases happen, the original statement can be removed. Most of this is done
4332 by load_assign_lhs_subreplacements.
4334 In the second case, we would like to store all RHS scalarized components
4335 directly into LHS and if they cover the aggregate completely, remove the
4336 statement too. In the third case, we want the LHS components to be loaded
4337 directly from the RHS (DSE will remove the original statement if it
4340 This is a bit complex but manageable when types match and when unions do
4341 not cause confusion in a way that we cannot really load a component of LHS
4342 from the RHS or vice versa (the access representing this level can have
4343 subaccesses that are accessible only through a different union field at a
4344 higher level - different from the one used in the examined expression).
4347 Therefore, I specially handle a fourth case, happening when there is a
4348 specific type cast or it is impossible to locate a scalarized subaccess on
4349 the other side of the expression. If that happens, I simply "refresh" the
4350 RHS by storing in it is scalarized components leave the original statement
4351 there to do the copying and then load the scalar replacements of the LHS.
4352 This is what the first branch does. */
4354 if (modify_this_stmt
4355 || gimple_has_volatile_ops (stmt
)
4356 || contains_vce_or_bfcref_p (rhs
)
4357 || contains_vce_or_bfcref_p (lhs
)
4358 || stmt_ends_bb_p (stmt
))
4360 /* No need to copy into a constant, it comes pre-initialized. */
4361 if (access_has_children_p (racc
) && !TREE_READONLY (racc
->base
))
4362 generate_subtree_copies (racc
->first_child
, rhs
, racc
->offset
, 0, 0,
4363 gsi
, false, false, loc
);
4364 if (access_has_children_p (lacc
))
4366 gimple_stmt_iterator alt_gsi
= gsi_none ();
4367 if (stmt_ends_bb_p (stmt
))
4369 alt_gsi
= gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi
)));
4372 generate_subtree_copies (lacc
->first_child
, lhs
, lacc
->offset
, 0, 0,
4373 gsi
, true, true, loc
);
4375 sra_stats
.separate_lhs_rhs_handling
++;
4377 /* This gimplification must be done after generate_subtree_copies,
4378 lest we insert the subtree copies in the middle of the gimplified
4380 if (force_gimple_rhs
)
4381 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
4382 true, GSI_SAME_STMT
);
4383 if (gimple_assign_rhs1 (stmt
) != rhs
)
4385 modify_this_stmt
= true;
4386 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
4387 gcc_assert (stmt
== gsi_stmt (orig_gsi
));
4390 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
4394 if (access_has_children_p (lacc
)
4395 && access_has_children_p (racc
)
4396 /* When an access represents an unscalarizable region, it usually
4397 represents accesses with variable offset and thus must not be used
4398 to generate new memory accesses. */
4399 && !lacc
->grp_unscalarizable_region
4400 && !racc
->grp_unscalarizable_region
)
4402 struct subreplacement_assignment_data sad
;
4404 sad
.left_offset
= lacc
->offset
;
4405 sad
.assignment_lhs
= lhs
;
4406 sad
.assignment_rhs
= rhs
;
4407 sad
.top_racc
= racc
;
4410 sad
.loc
= gimple_location (stmt
);
4411 sad
.refreshed
= SRA_UDH_NONE
;
4413 if (lacc
->grp_read
&& !lacc
->grp_covered
)
4414 handle_unscalarized_data_in_subtree (&sad
);
4416 load_assign_lhs_subreplacements (lacc
, &sad
);
4417 if (sad
.refreshed
!= SRA_UDH_RIGHT
)
4420 unlink_stmt_vdef (stmt
);
4421 gsi_remove (&sad
.old_gsi
, true);
4422 release_defs (stmt
);
4423 sra_stats
.deleted
++;
4424 return SRA_AM_REMOVED
;
4429 if (access_has_children_p (racc
)
4430 && !racc
->grp_unscalarized_data
4431 && TREE_CODE (lhs
) != SSA_NAME
)
4435 fprintf (dump_file
, "Removing load: ");
4436 print_gimple_stmt (dump_file
, stmt
, 0);
4438 generate_subtree_copies (racc
->first_child
, lhs
,
4439 racc
->offset
, 0, 0, gsi
,
4441 gcc_assert (stmt
== gsi_stmt (*gsi
));
4442 unlink_stmt_vdef (stmt
);
4443 gsi_remove (gsi
, true);
4444 release_defs (stmt
);
4445 sra_stats
.deleted
++;
4446 return SRA_AM_REMOVED
;
4448 /* Restore the aggregate RHS from its components so the
4449 prevailing aggregate copy does the right thing. */
4450 if (access_has_children_p (racc
) && !TREE_READONLY (racc
->base
))
4451 generate_subtree_copies (racc
->first_child
, rhs
, racc
->offset
, 0, 0,
4452 gsi
, false, false, loc
);
4453 /* Re-load the components of the aggregate copy destination.
4454 But use the RHS aggregate to load from to expose more
4455 optimization opportunities. */
4456 if (access_has_children_p (lacc
))
4457 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
4458 0, 0, gsi
, true, true, loc
);
4465 /* Set any scalar replacements of values in the constant pool to the initial
4466 value of the constant. (Constant-pool decls like *.LC0 have effectively
4467 been initialized before the program starts, we must do the same for their
4468 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
4469 the function's entry block. */
4472 initialize_constant_pool_replacements (void)
4474 gimple_seq seq
= NULL
;
4475 gimple_stmt_iterator gsi
= gsi_start (seq
);
4479 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
4481 tree var
= candidate (i
);
4482 if (!constant_decl_p (var
))
4485 struct access
*access
= get_first_repr_for_decl (var
);
4489 if (access
->replacement_decl
)
4492 = gimple_build_assign (get_access_replacement (access
),
4493 unshare_expr (access
->expr
));
4494 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4496 fprintf (dump_file
, "Generating constant initializer: ");
4497 print_gimple_stmt (dump_file
, stmt
, 0);
4498 fprintf (dump_file
, "\n");
4500 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
4504 if (access
->first_child
)
4505 access
= access
->first_child
;
4506 else if (access
->next_sibling
)
4507 access
= access
->next_sibling
;
4510 while (access
->parent
&& !access
->next_sibling
)
4511 access
= access
->parent
;
4512 if (access
->next_sibling
)
4513 access
= access
->next_sibling
;
4515 access
= access
->next_grp
;
4520 seq
= gsi_seq (gsi
);
4522 gsi_insert_seq_on_edge_immediate (
4523 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), seq
);
4526 /* Traverse the function body and all modifications as decided in
4527 analyze_all_variable_accesses. Return true iff the CFG has been
4531 sra_modify_function_body (void)
4533 bool cfg_changed
= false;
4536 initialize_constant_pool_replacements ();
4538 FOR_EACH_BB_FN (bb
, cfun
)
4540 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
4541 while (!gsi_end_p (gsi
))
4543 gimple
*stmt
= gsi_stmt (gsi
);
4544 enum assignment_mod_result assign_result
;
4545 bool modified
= false, deleted
= false;
4549 switch (gimple_code (stmt
))
4552 t
= gimple_return_retval_ptr (as_a
<greturn
*> (stmt
));
4553 if (*t
!= NULL_TREE
)
4554 modified
|= sra_modify_expr (t
, &gsi
, false);
4558 assign_result
= sra_modify_assign (stmt
, &gsi
);
4559 modified
|= assign_result
== SRA_AM_MODIFIED
;
4560 deleted
= assign_result
== SRA_AM_REMOVED
;
4564 /* Handle calls to .DEFERRED_INIT specially. */
4565 if (gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
4567 assign_result
= sra_modify_deferred_init (stmt
, &gsi
);
4568 modified
|= assign_result
== SRA_AM_MODIFIED
;
4569 deleted
= assign_result
== SRA_AM_REMOVED
;
4573 /* Operands must be processed before the lhs. */
4574 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4576 t
= gimple_call_arg_ptr (stmt
, i
);
4577 modified
|= sra_modify_expr (t
, &gsi
, false);
4580 if (gimple_call_lhs (stmt
))
4582 t
= gimple_call_lhs_ptr (stmt
);
4583 modified
|= sra_modify_expr (t
, &gsi
, true);
4590 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4591 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
4593 t
= &TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
4594 modified
|= sra_modify_expr (t
, &gsi
, false);
4596 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
4598 t
= &TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
4599 modified
|= sra_modify_expr (t
, &gsi
, true);
4611 if (maybe_clean_eh_stmt (stmt
)
4612 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4620 gsi_commit_edge_inserts ();
4624 /* Generate statements initializing scalar replacements of parts of function
4628 initialize_parameter_reductions (void)
4630 gimple_stmt_iterator gsi
;
4631 gimple_seq seq
= NULL
;
4634 gsi
= gsi_start (seq
);
4635 for (parm
= DECL_ARGUMENTS (current_function_decl
);
4637 parm
= DECL_CHAIN (parm
))
4639 vec
<access_p
> *access_vec
;
4640 struct access
*access
;
4642 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4644 access_vec
= get_base_access_vector (parm
);
4648 for (access
= (*access_vec
)[0];
4650 access
= access
->next_grp
)
4651 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true,
4652 EXPR_LOCATION (parm
));
4655 seq
= gsi_seq (gsi
);
4657 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), seq
);
4660 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
4661 it reveals there are components of some aggregates to be scalarized, it runs
4662 the required transformations. */
4664 perform_intra_sra (void)
4669 if (!find_var_candidates ())
4672 if (!scan_function ())
4675 if (!analyze_all_variable_accesses ())
4678 if (sra_modify_function_body ())
4679 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
4681 ret
= TODO_update_ssa
;
4682 initialize_parameter_reductions ();
4684 statistics_counter_event (cfun
, "Scalar replacements created",
4685 sra_stats
.replacements
);
4686 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
4687 statistics_counter_event (cfun
, "Subtree copy stmts",
4688 sra_stats
.subtree_copies
);
4689 statistics_counter_event (cfun
, "Subreplacement stmts",
4690 sra_stats
.subreplacements
);
4691 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
4692 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
4693 sra_stats
.separate_lhs_rhs_handling
);
4696 sra_deinitialize ();
4700 /* Perform early intraprocedural SRA. */
4702 early_intra_sra (void)
4704 sra_mode
= SRA_MODE_EARLY_INTRA
;
4705 return perform_intra_sra ();
4708 /* Perform "late" intraprocedural SRA. */
4710 late_intra_sra (void)
4712 sra_mode
= SRA_MODE_INTRA
;
4713 return perform_intra_sra ();
4718 gate_intra_sra (void)
4720 return flag_tree_sra
!= 0 && dbg_cnt (tree_sra
);
4726 const pass_data pass_data_sra_early
=
4728 GIMPLE_PASS
, /* type */
4730 OPTGROUP_NONE
, /* optinfo_flags */
4731 TV_TREE_SRA
, /* tv_id */
4732 ( PROP_cfg
| PROP_ssa
), /* properties_required */
4733 0, /* properties_provided */
4734 0, /* properties_destroyed */
4735 0, /* todo_flags_start */
4736 TODO_update_ssa
, /* todo_flags_finish */
4739 class pass_sra_early
: public gimple_opt_pass
4742 pass_sra_early (gcc::context
*ctxt
)
4743 : gimple_opt_pass (pass_data_sra_early
, ctxt
)
4746 /* opt_pass methods: */
4747 virtual bool gate (function
*) { return gate_intra_sra (); }
4748 virtual unsigned int execute (function
*) { return early_intra_sra (); }
4750 }; // class pass_sra_early
4755 make_pass_sra_early (gcc::context
*ctxt
)
4757 return new pass_sra_early (ctxt
);
4762 const pass_data pass_data_sra
=
4764 GIMPLE_PASS
, /* type */
4766 OPTGROUP_NONE
, /* optinfo_flags */
4767 TV_TREE_SRA
, /* tv_id */
4768 ( PROP_cfg
| PROP_ssa
), /* properties_required */
4769 0, /* properties_provided */
4770 0, /* properties_destroyed */
4771 TODO_update_address_taken
, /* todo_flags_start */
4772 TODO_update_ssa
, /* todo_flags_finish */
4775 class pass_sra
: public gimple_opt_pass
4778 pass_sra (gcc::context
*ctxt
)
4779 : gimple_opt_pass (pass_data_sra
, ctxt
)
4782 /* opt_pass methods: */
4783 virtual bool gate (function
*) { return gate_intra_sra (); }
4784 virtual unsigned int execute (function
*) { return late_intra_sra (); }
4786 }; // class pass_sra
4791 make_pass_sra (gcc::context
*ctxt
)
4793 return new pass_sra (ctxt
);