1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004, 2005, 2006, 2007
5 Free Software Foundation, Inc.
6 Contributed by Diego Novillo <dnovillo@redhat.com>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
31 /* These RTL headers are needed for basic-block.h. */
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
40 #include "tree-gimple.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
48 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* The set of todo flags to return from tree_sra. */
79 static unsigned int todoflags
;
81 /* The set of aggregate variables that are candidates for scalarization. */
82 static bitmap sra_candidates
;
84 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
85 beginning of the function. */
86 static bitmap needs_copy_in
;
88 /* Sets of bit pairs that cache type decomposition and instantiation. */
89 static bitmap sra_type_decomp_cache
;
90 static bitmap sra_type_inst_cache
;
92 /* One of these structures is created for each candidate aggregate and
93 each (accessed) member or group of members of such an aggregate. */
96 /* A tree of the elements. Used when we want to traverse everything. */
97 struct sra_elt
*parent
;
98 struct sra_elt
*groups
;
99 struct sra_elt
*children
;
100 struct sra_elt
*sibling
;
102 /* If this element is a root, then this is the VAR_DECL. If this is
103 a sub-element, this is some token used to identify the reference.
104 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
105 of an ARRAY_REF, this is the (constant) index. In the case of an
106 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
107 of a complex number, this is a zero or one. */
110 /* The type of the element. */
113 /* A VAR_DECL, for any sub-element we've decided to replace. */
116 /* The number of times the element is referenced as a whole. I.e.
117 given "a.b.c", this would be incremented for C, but not for A or B. */
120 /* The number of times the element is copied to or from another
121 scalarizable element. */
122 unsigned int n_copies
;
124 /* True if TYPE is scalar. */
127 /* True if this element is a group of members of its parent. */
130 /* True if we saw something about this element that prevents scalarization,
131 such as non-constant indexing. */
132 bool cannot_scalarize
;
134 /* True if we've decided that structure-to-structure assignment
135 should happen via memcpy and not per-element. */
138 /* True if everything under this element has been marked TREE_NO_WARNING. */
141 /* A flag for use with/after random access traversals. */
145 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
147 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
148 for ((CHILD) = (ELT)->is_group \
149 ? next_child_for_group (NULL, (ELT)) \
152 (CHILD) = (ELT)->is_group \
153 ? next_child_for_group ((CHILD), (ELT)) \
156 /* Helper function for above macro. Return next child in group. */
157 static struct sra_elt
*
158 next_child_for_group (struct sra_elt
*child
, struct sra_elt
*group
)
160 gcc_assert (group
->is_group
);
162 /* Find the next child in the parent. */
164 child
= child
->sibling
;
166 child
= group
->parent
->children
;
168 /* Skip siblings that do not belong to the group. */
171 tree g_elt
= group
->element
;
172 if (TREE_CODE (g_elt
) == RANGE_EXPR
)
174 if (!tree_int_cst_lt (child
->element
, TREE_OPERAND (g_elt
, 0))
175 && !tree_int_cst_lt (TREE_OPERAND (g_elt
, 1), child
->element
))
181 child
= child
->sibling
;
187 /* Random access to the child of a parent is performed by hashing.
188 This prevents quadratic behavior, and allows SRA to function
189 reasonably on larger records. */
190 static htab_t sra_map
;
192 /* All structures are allocated out of the following obstack. */
193 static struct obstack sra_obstack
;
195 /* Debugging functions. */
196 static void dump_sra_elt_name (FILE *, struct sra_elt
*);
197 extern void debug_sra_elt_name (struct sra_elt
*);
199 /* Forward declarations. */
200 static tree
generate_element_ref (struct sra_elt
*);
202 /* Return true if DECL is an SRA candidate. */
205 is_sra_candidate_decl (tree decl
)
207 return DECL_P (decl
) && bitmap_bit_p (sra_candidates
, DECL_UID (decl
));
210 /* Return true if TYPE is a scalar type. */
213 is_sra_scalar_type (tree type
)
215 enum tree_code code
= TREE_CODE (type
);
216 return (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== VECTOR_TYPE
217 || code
== ENUMERAL_TYPE
|| code
== BOOLEAN_TYPE
218 || code
== POINTER_TYPE
|| code
== OFFSET_TYPE
219 || code
== REFERENCE_TYPE
);
222 /* Return true if TYPE can be decomposed into a set of independent variables.
224 Note that this doesn't imply that all elements of TYPE can be
225 instantiated, just that if we decide to break up the type into
226 separate pieces that it can be done. */
229 sra_type_can_be_decomposed_p (tree type
)
231 unsigned int cache
= TYPE_UID (TYPE_MAIN_VARIANT (type
)) * 2;
234 /* Avoid searching the same type twice. */
235 if (bitmap_bit_p (sra_type_decomp_cache
, cache
+0))
237 if (bitmap_bit_p (sra_type_decomp_cache
, cache
+1))
240 /* The type must have a definite nonzero size. */
241 if (TYPE_SIZE (type
) == NULL
|| TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
242 || integer_zerop (TYPE_SIZE (type
)))
245 /* The type must be a non-union aggregate. */
246 switch (TREE_CODE (type
))
250 bool saw_one_field
= false;
252 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
253 if (TREE_CODE (t
) == FIELD_DECL
)
255 /* Reject incorrectly represented bit fields. */
256 if (DECL_BIT_FIELD (t
)
257 && (tree_low_cst (DECL_SIZE (t
), 1)
258 != TYPE_PRECISION (TREE_TYPE (t
))))
261 saw_one_field
= true;
264 /* Record types must have at least one field. */
271 /* Array types must have a fixed lower and upper bound. */
272 t
= TYPE_DOMAIN (type
);
275 if (TYPE_MIN_VALUE (t
) == NULL
|| !TREE_CONSTANT (TYPE_MIN_VALUE (t
)))
277 if (TYPE_MAX_VALUE (t
) == NULL
|| !TREE_CONSTANT (TYPE_MAX_VALUE (t
)))
288 bitmap_set_bit (sra_type_decomp_cache
, cache
+0);
292 bitmap_set_bit (sra_type_decomp_cache
, cache
+1);
296 /* Return true if DECL can be decomposed into a set of independent
297 (though not necessarily scalar) variables. */
300 decl_can_be_decomposed_p (tree var
)
302 /* Early out for scalars. */
303 if (is_sra_scalar_type (TREE_TYPE (var
)))
306 /* The variable must not be aliased. */
307 if (!is_gimple_non_addressable (var
))
309 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
311 fprintf (dump_file
, "Cannot scalarize variable ");
312 print_generic_expr (dump_file
, var
, dump_flags
);
313 fprintf (dump_file
, " because it must live in memory\n");
318 /* The variable must not be volatile. */
319 if (TREE_THIS_VOLATILE (var
))
321 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
323 fprintf (dump_file
, "Cannot scalarize variable ");
324 print_generic_expr (dump_file
, var
, dump_flags
);
325 fprintf (dump_file
, " because it is declared volatile\n");
330 /* We must be able to decompose the variable's type. */
331 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var
)))
333 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
335 fprintf (dump_file
, "Cannot scalarize variable ");
336 print_generic_expr (dump_file
, var
, dump_flags
);
337 fprintf (dump_file
, " because its type cannot be decomposed\n");
345 /* Return true if TYPE can be *completely* decomposed into scalars. */
348 type_can_instantiate_all_elements (tree type
)
350 if (is_sra_scalar_type (type
))
352 if (!sra_type_can_be_decomposed_p (type
))
355 switch (TREE_CODE (type
))
359 unsigned int cache
= TYPE_UID (TYPE_MAIN_VARIANT (type
)) * 2;
362 if (bitmap_bit_p (sra_type_inst_cache
, cache
+0))
364 if (bitmap_bit_p (sra_type_inst_cache
, cache
+1))
367 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
368 if (TREE_CODE (f
) == FIELD_DECL
)
370 if (!type_can_instantiate_all_elements (TREE_TYPE (f
)))
372 bitmap_set_bit (sra_type_inst_cache
, cache
+1);
377 bitmap_set_bit (sra_type_inst_cache
, cache
+0);
382 return type_can_instantiate_all_elements (TREE_TYPE (type
));
392 /* Test whether ELT or some sub-element cannot be scalarized. */
395 can_completely_scalarize_p (struct sra_elt
*elt
)
399 if (elt
->cannot_scalarize
)
402 for (c
= elt
->children
; c
; c
= c
->sibling
)
403 if (!can_completely_scalarize_p (c
))
406 for (c
= elt
->groups
; c
; c
= c
->sibling
)
407 if (!can_completely_scalarize_p (c
))
414 /* A simplified tree hashing algorithm that only handles the types of
415 trees we expect to find in sra_elt->element. */
418 sra_hash_tree (tree t
)
422 switch (TREE_CODE (t
))
431 h
= TREE_INT_CST_LOW (t
) ^ TREE_INT_CST_HIGH (t
);
435 h
= iterative_hash_expr (TREE_OPERAND (t
, 0), 0);
436 h
= iterative_hash_expr (TREE_OPERAND (t
, 1), h
);
440 /* We can have types that are compatible, but have different member
441 lists, so we can't hash fields by ID. Use offsets instead. */
442 h
= iterative_hash_expr (DECL_FIELD_OFFSET (t
), 0);
443 h
= iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t
), h
);
453 /* Hash function for type SRA_PAIR. */
456 sra_elt_hash (const void *x
)
458 const struct sra_elt
*e
= x
;
459 const struct sra_elt
*p
;
462 h
= sra_hash_tree (e
->element
);
464 /* Take into account everything back up the chain. Given that chain
465 lengths are rarely very long, this should be acceptable. If we
466 truly identify this as a performance problem, it should work to
467 hash the pointer value "e->parent". */
468 for (p
= e
->parent
; p
; p
= p
->parent
)
469 h
= (h
* 65521) ^ sra_hash_tree (p
->element
);
474 /* Equality function for type SRA_PAIR. */
477 sra_elt_eq (const void *x
, const void *y
)
479 const struct sra_elt
*a
= x
;
480 const struct sra_elt
*b
= y
;
483 if (a
->parent
!= b
->parent
)
491 if (TREE_CODE (ae
) != TREE_CODE (be
))
494 switch (TREE_CODE (ae
))
499 /* These are all pointer unique. */
503 /* Integers are not pointer unique, so compare their values. */
504 return tree_int_cst_equal (ae
, be
);
508 tree_int_cst_equal (TREE_OPERAND (ae
, 0), TREE_OPERAND (be
, 0))
509 && tree_int_cst_equal (TREE_OPERAND (ae
, 1), TREE_OPERAND (be
, 1));
512 /* Fields are unique within a record, but not between
513 compatible records. */
514 if (DECL_FIELD_CONTEXT (ae
) == DECL_FIELD_CONTEXT (be
))
516 return fields_compatible_p (ae
, be
);
523 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
524 may be null, in which case CHILD must be a DECL. */
526 static struct sra_elt
*
527 lookup_element (struct sra_elt
*parent
, tree child
, tree type
,
528 enum insert_option insert
)
530 struct sra_elt dummy
;
531 struct sra_elt
**slot
;
535 dummy
.parent
= parent
->is_group
? parent
->parent
: parent
;
538 dummy
.element
= child
;
540 slot
= (struct sra_elt
**) htab_find_slot (sra_map
, &dummy
, insert
);
541 if (!slot
&& insert
== NO_INSERT
)
545 if (!elt
&& insert
== INSERT
)
547 *slot
= elt
= obstack_alloc (&sra_obstack
, sizeof (*elt
));
548 memset (elt
, 0, sizeof (*elt
));
550 elt
->parent
= parent
;
551 elt
->element
= child
;
553 elt
->is_scalar
= is_sra_scalar_type (type
);
557 if (IS_ELEMENT_FOR_GROUP (elt
->element
))
559 elt
->is_group
= true;
560 elt
->sibling
= parent
->groups
;
561 parent
->groups
= elt
;
565 elt
->sibling
= parent
->children
;
566 parent
->children
= elt
;
570 /* If this is a parameter, then if we want to scalarize, we have
571 one copy from the true function parameter. Count it now. */
572 if (TREE_CODE (child
) == PARM_DECL
)
575 bitmap_set_bit (needs_copy_in
, DECL_UID (child
));
582 /* Create or return the SRA_ELT structure for EXPR if the expression
583 refers to a scalarizable variable. */
585 static struct sra_elt
*
586 maybe_lookup_element_for_expr (tree expr
)
591 switch (TREE_CODE (expr
))
596 if (is_sra_candidate_decl (expr
))
597 return lookup_element (NULL
, expr
, TREE_TYPE (expr
), INSERT
);
601 /* We can't scalarize variable array indices. */
602 if (in_array_bounds_p (expr
))
603 child
= TREE_OPERAND (expr
, 1);
608 case ARRAY_RANGE_REF
:
609 /* We can't scalarize variable array indices. */
610 if (range_in_array_bounds_p (expr
))
612 tree domain
= TYPE_DOMAIN (TREE_TYPE (expr
));
613 child
= build2 (RANGE_EXPR
, integer_type_node
,
614 TYPE_MIN_VALUE (domain
), TYPE_MAX_VALUE (domain
));
621 /* Don't look through unions. */
622 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) != RECORD_TYPE
)
624 child
= TREE_OPERAND (expr
, 1);
628 child
= integer_zero_node
;
631 child
= integer_one_node
;
638 elt
= maybe_lookup_element_for_expr (TREE_OPERAND (expr
, 0));
640 return lookup_element (elt
, child
, TREE_TYPE (expr
), INSERT
);
645 /* Functions to walk just enough of the tree to see all scalarizable
646 references, and categorize them. */
648 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
649 various kinds of references seen. In all cases, *BSI is an iterator
650 pointing to the statement being processed. */
653 /* Invoked when ELT is required as a unit. Note that ELT might refer to
654 a leaf node, in which case this is a simple scalar reference. *EXPR_P
655 points to the location of the expression. IS_OUTPUT is true if this
656 is a left-hand-side reference. USE_ALL is true if we saw something we
657 couldn't quite identify and had to force the use of the entire object. */
658 void (*use
) (struct sra_elt
*elt
, tree
*expr_p
,
659 block_stmt_iterator
*bsi
, bool is_output
, bool use_all
);
661 /* Invoked when we have a copy between two scalarizable references. */
662 void (*copy
) (struct sra_elt
*lhs_elt
, struct sra_elt
*rhs_elt
,
663 block_stmt_iterator
*bsi
);
665 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
666 in which case it should be treated as an empty CONSTRUCTOR. */
667 void (*init
) (struct sra_elt
*elt
, tree value
, block_stmt_iterator
*bsi
);
669 /* Invoked when we have a copy between one scalarizable reference ELT
670 and one non-scalarizable reference OTHER without side-effects.
671 IS_OUTPUT is true if ELT is on the left-hand side. */
672 void (*ldst
) (struct sra_elt
*elt
, tree other
,
673 block_stmt_iterator
*bsi
, bool is_output
);
675 /* True during phase 2, false during phase 4. */
676 /* ??? This is a hack. */
680 #ifdef ENABLE_CHECKING
681 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
684 sra_find_candidate_decl (tree
*tp
, int *walk_subtrees
,
685 void *data ATTRIBUTE_UNUSED
)
688 enum tree_code code
= TREE_CODE (t
);
690 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
693 if (is_sra_candidate_decl (t
))
703 /* Walk most expressions looking for a scalarizable aggregate.
704 If we find one, invoke FNS->USE. */
707 sra_walk_expr (tree
*expr_p
, block_stmt_iterator
*bsi
, bool is_output
,
708 const struct sra_walk_fns
*fns
)
712 bool disable_scalarization
= false;
713 bool use_all_p
= false;
715 /* We're looking to collect a reference expression between EXPR and INNER,
716 such that INNER is a scalarizable decl and all other nodes through EXPR
717 are references that we can scalarize. If we come across something that
718 we can't scalarize, we reset EXPR. This has the effect of making it
719 appear that we're referring to the larger expression as a whole. */
722 switch (TREE_CODE (inner
))
727 /* If there is a scalarizable decl at the bottom, then process it. */
728 if (is_sra_candidate_decl (inner
))
730 struct sra_elt
*elt
= maybe_lookup_element_for_expr (expr
);
731 if (disable_scalarization
)
732 elt
->cannot_scalarize
= true;
734 fns
->use (elt
, expr_p
, bsi
, is_output
, use_all_p
);
739 /* Non-constant index means any member may be accessed. Prevent the
740 expression from being scalarized. If we were to treat this as a
741 reference to the whole array, we can wind up with a single dynamic
742 index reference inside a loop being overridden by several constant
743 index references during loop setup. It's possible that this could
744 be avoided by using dynamic usage counts based on BB trip counts
745 (based on loop analysis or profiling), but that hardly seems worth
747 /* ??? Hack. Figure out how to push this into the scan routines
748 without duplicating too much code. */
749 if (!in_array_bounds_p (inner
))
751 disable_scalarization
= true;
754 /* ??? Are we assured that non-constant bounds and stride will have
755 the same value everywhere? I don't think Fortran will... */
756 if (TREE_OPERAND (inner
, 2) || TREE_OPERAND (inner
, 3))
758 inner
= TREE_OPERAND (inner
, 0);
761 case ARRAY_RANGE_REF
:
762 if (!range_in_array_bounds_p (inner
))
764 disable_scalarization
= true;
767 /* ??? See above non-constant bounds and stride . */
768 if (TREE_OPERAND (inner
, 2) || TREE_OPERAND (inner
, 3))
770 inner
= TREE_OPERAND (inner
, 0);
774 /* A reference to a union member constitutes a reference to the
776 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner
, 0))) != RECORD_TYPE
)
778 /* ??? See above re non-constant stride. */
779 if (TREE_OPERAND (inner
, 2))
781 inner
= TREE_OPERAND (inner
, 0);
786 inner
= TREE_OPERAND (inner
, 0);
790 /* A bit field reference (access to *multiple* fields simultaneously)
791 is not currently scalarized. Consider this an access to the
792 complete outer element, to which walk_tree will bring us next. */
795 case VIEW_CONVERT_EXPR
:
797 /* Similarly, a view/nop explicitly wants to look at an object in a
798 type other than the one we've scalarized. */
802 /* This is a transparent wrapper. The entire inner expression really
807 expr_p
= &TREE_OPERAND (inner
, 0);
808 inner
= expr
= *expr_p
;
813 #ifdef ENABLE_CHECKING
814 /* Validate that we're not missing any references. */
815 gcc_assert (!walk_tree (&inner
, sra_find_candidate_decl
, NULL
, NULL
));
821 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
822 If we find one, invoke FNS->USE. */
825 sra_walk_tree_list (tree list
, block_stmt_iterator
*bsi
, bool is_output
,
826 const struct sra_walk_fns
*fns
)
829 for (op
= list
; op
; op
= TREE_CHAIN (op
))
830 sra_walk_expr (&TREE_VALUE (op
), bsi
, is_output
, fns
);
833 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
834 If we find one, invoke FNS->USE. */
837 sra_walk_call_expr (tree expr
, block_stmt_iterator
*bsi
,
838 const struct sra_walk_fns
*fns
)
840 sra_walk_tree_list (TREE_OPERAND (expr
, 1), bsi
, false, fns
);
843 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
844 aggregates. If we find one, invoke FNS->USE. */
847 sra_walk_asm_expr (tree expr
, block_stmt_iterator
*bsi
,
848 const struct sra_walk_fns
*fns
)
850 sra_walk_tree_list (ASM_INPUTS (expr
), bsi
, false, fns
);
851 sra_walk_tree_list (ASM_OUTPUTS (expr
), bsi
, true, fns
);
854 /* Walk a MODIFY_EXPR and categorize the assignment appropriately. */
857 sra_walk_modify_expr (tree expr
, block_stmt_iterator
*bsi
,
858 const struct sra_walk_fns
*fns
)
860 struct sra_elt
*lhs_elt
, *rhs_elt
;
863 lhs
= TREE_OPERAND (expr
, 0);
864 rhs
= TREE_OPERAND (expr
, 1);
865 lhs_elt
= maybe_lookup_element_for_expr (lhs
);
866 rhs_elt
= maybe_lookup_element_for_expr (rhs
);
868 /* If both sides are scalarizable, this is a COPY operation. */
869 if (lhs_elt
&& rhs_elt
)
871 fns
->copy (lhs_elt
, rhs_elt
, bsi
);
875 /* If the RHS is scalarizable, handle it. There are only two cases. */
878 if (!rhs_elt
->is_scalar
&& !TREE_SIDE_EFFECTS (lhs
))
879 fns
->ldst (rhs_elt
, lhs
, bsi
, false);
881 fns
->use (rhs_elt
, &TREE_OPERAND (expr
, 1), bsi
, false, false);
884 /* If it isn't scalarizable, there may be scalarizable variables within, so
885 check for a call or else walk the RHS to see if we need to do any
886 copy-in operations. We need to do it before the LHS is scalarized so
887 that the statements get inserted in the proper place, before any
888 copy-out operations. */
891 tree call
= get_call_expr_in (rhs
);
893 sra_walk_call_expr (call
, bsi
, fns
);
895 sra_walk_expr (&TREE_OPERAND (expr
, 1), bsi
, false, fns
);
898 /* Likewise, handle the LHS being scalarizable. We have cases similar
899 to those above, but also want to handle RHS being constant. */
902 /* If this is an assignment from a constant, or constructor, then
903 we have access to all of the elements individually. Invoke INIT. */
904 if (TREE_CODE (rhs
) == COMPLEX_EXPR
905 || TREE_CODE (rhs
) == COMPLEX_CST
906 || TREE_CODE (rhs
) == CONSTRUCTOR
)
907 fns
->init (lhs_elt
, rhs
, bsi
);
909 /* If this is an assignment from read-only memory, treat this as if
910 we'd been passed the constructor directly. Invoke INIT. */
911 else if (TREE_CODE (rhs
) == VAR_DECL
913 && TREE_READONLY (rhs
)
914 && targetm
.binds_local_p (rhs
))
915 fns
->init (lhs_elt
, DECL_INITIAL (rhs
), bsi
);
917 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
918 The lvalue requirement prevents us from trying to directly scalarize
919 the result of a function call. Which would result in trying to call
920 the function multiple times, and other evil things. */
921 else if (!lhs_elt
->is_scalar
922 && !TREE_SIDE_EFFECTS (rhs
) && is_gimple_addressable (rhs
))
923 fns
->ldst (lhs_elt
, rhs
, bsi
, true);
925 /* Otherwise we're being used in some context that requires the
926 aggregate to be seen as a whole. Invoke USE. */
928 fns
->use (lhs_elt
, &TREE_OPERAND (expr
, 0), bsi
, true, false);
931 /* Similarly to above, LHS_ELT being null only means that the LHS as a
932 whole is not a scalarizable reference. There may be occurrences of
933 scalarizable variables within, which implies a USE. */
935 sra_walk_expr (&TREE_OPERAND (expr
, 0), bsi
, true, fns
);
938 /* Entry point to the walk functions. Search the entire function,
939 invoking the callbacks in FNS on each of the references to
940 scalarizable variables. */
943 sra_walk_function (const struct sra_walk_fns
*fns
)
946 block_stmt_iterator si
, ni
;
948 /* ??? Phase 4 could derive some benefit to walking the function in
949 dominator tree order. */
952 for (si
= bsi_start (bb
); !bsi_end_p (si
); si
= ni
)
957 stmt
= bsi_stmt (si
);
958 ann
= stmt_ann (stmt
);
963 /* If the statement has no virtual operands, then it doesn't
964 make any structure references that we care about. */
965 if (ZERO_SSA_OPERANDS (stmt
, (SSA_OP_VIRTUAL_DEFS
| SSA_OP_VUSE
)))
968 switch (TREE_CODE (stmt
))
971 /* If we have "return <retval>" then the return value is
972 already exposed for our pleasure. Walk it as a USE to
973 force all the components back in place for the return.
975 If we have an embedded assignment, then <retval> is of
976 a type that gets returned in registers in this ABI, and
977 we do not wish to extend their lifetimes. Treat this
978 as a USE of the variable on the RHS of this assignment. */
980 t
= TREE_OPERAND (stmt
, 0);
981 if (TREE_CODE (t
) == MODIFY_EXPR
)
982 sra_walk_expr (&TREE_OPERAND (t
, 1), &si
, false, fns
);
984 sra_walk_expr (&TREE_OPERAND (stmt
, 0), &si
, false, fns
);
988 sra_walk_modify_expr (stmt
, &si
, fns
);
991 sra_walk_call_expr (stmt
, &si
, fns
);
994 sra_walk_asm_expr (stmt
, &si
, fns
);
1003 /* Phase One: Scan all referenced variables in the program looking for
1004 structures that could be decomposed. */
1007 find_candidates_for_sra (void)
1009 bool any_set
= false;
1011 referenced_var_iterator rvi
;
1013 FOR_EACH_REFERENCED_VAR (var
, rvi
)
1015 if (decl_can_be_decomposed_p (var
))
1017 bitmap_set_bit (sra_candidates
, DECL_UID (var
));
1026 /* Phase Two: Scan all references to scalarizable variables. Count the
1027 number of times they are used or copied respectively. */
1029 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1030 considered a copy, because we can decompose the reference such that
1031 the sub-elements needn't be contiguous. */
1034 scan_use (struct sra_elt
*elt
, tree
*expr_p ATTRIBUTE_UNUSED
,
1035 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
,
1036 bool is_output ATTRIBUTE_UNUSED
, bool use_all ATTRIBUTE_UNUSED
)
1042 scan_copy (struct sra_elt
*lhs_elt
, struct sra_elt
*rhs_elt
,
1043 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
)
1045 lhs_elt
->n_copies
+= 1;
1046 rhs_elt
->n_copies
+= 1;
1050 scan_init (struct sra_elt
*lhs_elt
, tree rhs ATTRIBUTE_UNUSED
,
1051 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
)
1053 lhs_elt
->n_copies
+= 1;
1057 scan_ldst (struct sra_elt
*elt
, tree other ATTRIBUTE_UNUSED
,
1058 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
,
1059 bool is_output ATTRIBUTE_UNUSED
)
1064 /* Dump the values we collected during the scanning phase. */
1067 scan_dump (struct sra_elt
*elt
)
1071 dump_sra_elt_name (dump_file
, elt
);
1072 fprintf (dump_file
, ": n_uses=%u n_copies=%u\n", elt
->n_uses
, elt
->n_copies
);
1074 for (c
= elt
->children
; c
; c
= c
->sibling
)
1077 for (c
= elt
->groups
; c
; c
= c
->sibling
)
1081 /* Entry point to phase 2. Scan the entire function, building up
1082 scalarization data structures, recording copies and uses. */
1085 scan_function (void)
1087 static const struct sra_walk_fns fns
= {
1088 scan_use
, scan_copy
, scan_init
, scan_ldst
, true
1092 sra_walk_function (&fns
);
1094 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1098 fputs ("\nScan results:\n", dump_file
);
1099 EXECUTE_IF_SET_IN_BITMAP (sra_candidates
, 0, i
, bi
)
1101 tree var
= referenced_var (i
);
1102 struct sra_elt
*elt
= lookup_element (NULL
, var
, NULL
, NO_INSERT
);
1106 fputc ('\n', dump_file
);
1110 /* Phase Three: Make decisions about which variables to scalarize, if any.
1111 All elements to be scalarized have replacement variables made for them. */
1113 /* A subroutine of build_element_name. Recursively build the element
1114 name on the obstack. */
1117 build_element_name_1 (struct sra_elt
*elt
)
1124 build_element_name_1 (elt
->parent
);
1125 obstack_1grow (&sra_obstack
, '$');
1127 if (TREE_CODE (elt
->parent
->type
) == COMPLEX_TYPE
)
1129 if (elt
->element
== integer_zero_node
)
1130 obstack_grow (&sra_obstack
, "real", 4);
1132 obstack_grow (&sra_obstack
, "imag", 4);
1138 if (TREE_CODE (t
) == INTEGER_CST
)
1140 /* ??? Eh. Don't bother doing double-wide printing. */
1141 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (t
));
1142 obstack_grow (&sra_obstack
, buffer
, strlen (buffer
));
1146 tree name
= DECL_NAME (t
);
1148 obstack_grow (&sra_obstack
, IDENTIFIER_POINTER (name
),
1149 IDENTIFIER_LENGTH (name
));
1152 sprintf (buffer
, "D%u", DECL_UID (t
));
1153 obstack_grow (&sra_obstack
, buffer
, strlen (buffer
));
1158 /* Construct a pretty variable name for an element's replacement variable.
1159 The name is built on the obstack. */
1162 build_element_name (struct sra_elt
*elt
)
1164 build_element_name_1 (elt
);
1165 obstack_1grow (&sra_obstack
, '\0');
1166 return XOBFINISH (&sra_obstack
, char *);
1169 /* Instantiate an element as an independent variable. */
1172 instantiate_element (struct sra_elt
*elt
)
1174 struct sra_elt
*base_elt
;
1177 for (base_elt
= elt
; base_elt
->parent
; base_elt
= base_elt
->parent
)
1179 base
= base_elt
->element
;
1181 elt
->replacement
= var
= make_rename_temp (elt
->type
, "SR");
1182 DECL_SOURCE_LOCATION (var
) = DECL_SOURCE_LOCATION (base
);
1183 DECL_ARTIFICIAL (var
) = 1;
1185 if (TREE_THIS_VOLATILE (elt
->type
))
1187 TREE_THIS_VOLATILE (var
) = 1;
1188 TREE_SIDE_EFFECTS (var
) = 1;
1191 if (DECL_NAME (base
) && !DECL_IGNORED_P (base
))
1193 char *pretty_name
= build_element_name (elt
);
1194 DECL_NAME (var
) = get_identifier (pretty_name
);
1195 obstack_free (&sra_obstack
, pretty_name
);
1197 SET_DECL_DEBUG_EXPR (var
, generate_element_ref (elt
));
1198 DECL_DEBUG_EXPR_IS_FROM (var
) = 1;
1200 DECL_IGNORED_P (var
) = 0;
1201 TREE_NO_WARNING (var
) = TREE_NO_WARNING (base
);
1205 DECL_IGNORED_P (var
) = 1;
1206 /* ??? We can't generate any warning that would be meaningful. */
1207 TREE_NO_WARNING (var
) = 1;
1212 fputs (" ", dump_file
);
1213 dump_sra_elt_name (dump_file
, elt
);
1214 fputs (" -> ", dump_file
);
1215 print_generic_expr (dump_file
, var
, dump_flags
);
1216 fputc ('\n', dump_file
);
1220 /* Make one pass across an element tree deciding whether or not it's
1221 profitable to instantiate individual leaf scalars.
1223 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1224 fields all the way up the tree. */
1227 decide_instantiation_1 (struct sra_elt
*elt
, unsigned int parent_uses
,
1228 unsigned int parent_copies
)
1230 if (dump_file
&& !elt
->parent
)
1232 fputs ("Initial instantiation for ", dump_file
);
1233 dump_sra_elt_name (dump_file
, elt
);
1234 fputc ('\n', dump_file
);
1237 if (elt
->cannot_scalarize
)
1242 /* The decision is simple: instantiate if we're used more frequently
1243 than the parent needs to be seen as a complete unit. */
1244 if (elt
->n_uses
+ elt
->n_copies
+ parent_copies
> parent_uses
)
1245 instantiate_element (elt
);
1249 struct sra_elt
*c
, *group
;
1250 unsigned int this_uses
= elt
->n_uses
+ parent_uses
;
1251 unsigned int this_copies
= elt
->n_copies
+ parent_copies
;
1253 /* Consider groups of sub-elements as weighing in favour of
1254 instantiation whatever their size. */
1255 for (group
= elt
->groups
; group
; group
= group
->sibling
)
1256 FOR_EACH_ACTUAL_CHILD (c
, group
)
1258 c
->n_uses
+= group
->n_uses
;
1259 c
->n_copies
+= group
->n_copies
;
1262 for (c
= elt
->children
; c
; c
= c
->sibling
)
1263 decide_instantiation_1 (c
, this_uses
, this_copies
);
1267 /* Compute the size and number of all instantiated elements below ELT.
1268 We will only care about this if the size of the complete structure
1269 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1272 sum_instantiated_sizes (struct sra_elt
*elt
, unsigned HOST_WIDE_INT
*sizep
)
1274 if (elt
->replacement
)
1276 *sizep
+= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt
->type
));
1282 unsigned int count
= 0;
1284 for (c
= elt
->children
; c
; c
= c
->sibling
)
1285 count
+= sum_instantiated_sizes (c
, sizep
);
1291 /* Instantiate fields in ELT->TYPE that are not currently present as
1294 static void instantiate_missing_elements (struct sra_elt
*elt
);
1297 instantiate_missing_elements_1 (struct sra_elt
*elt
, tree child
, tree type
)
1299 struct sra_elt
*sub
= lookup_element (elt
, child
, type
, INSERT
);
1302 if (sub
->replacement
== NULL
)
1303 instantiate_element (sub
);
1306 instantiate_missing_elements (sub
);
1310 instantiate_missing_elements (struct sra_elt
*elt
)
1312 tree type
= elt
->type
;
1314 switch (TREE_CODE (type
))
1319 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
1320 if (TREE_CODE (f
) == FIELD_DECL
)
1322 tree field_type
= TREE_TYPE (f
);
1324 /* canonicalize_component_ref() unwidens some bit-field
1325 types (not marked as DECL_BIT_FIELD in C++), so we
1326 must do the same, lest we may introduce type
1328 if (INTEGRAL_TYPE_P (field_type
)
1329 && DECL_MODE (f
) != TYPE_MODE (field_type
))
1330 field_type
= TREE_TYPE (get_unwidened (build3 (COMPONENT_REF
,
1336 instantiate_missing_elements_1 (elt
, f
, field_type
);
1343 tree i
, max
, subtype
;
1345 i
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1346 max
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1347 subtype
= TREE_TYPE (type
);
1351 instantiate_missing_elements_1 (elt
, i
, subtype
);
1352 if (tree_int_cst_equal (i
, max
))
1354 i
= int_const_binop (PLUS_EXPR
, i
, integer_one_node
, true);
1361 type
= TREE_TYPE (type
);
1362 instantiate_missing_elements_1 (elt
, integer_zero_node
, type
);
1363 instantiate_missing_elements_1 (elt
, integer_one_node
, type
);
1371 /* Make one pass across an element tree deciding whether to perform block
1372 or element copies. If we decide on element copies, instantiate all
1373 elements. Return true if there are any instantiated sub-elements. */
1376 decide_block_copy (struct sra_elt
*elt
)
1381 /* We shouldn't be invoked on groups of sub-elements as they must
1382 behave like their parent as far as block copy is concerned. */
1383 gcc_assert (!elt
->is_group
);
1385 /* If scalarization is disabled, respect it. */
1386 if (elt
->cannot_scalarize
)
1388 elt
->use_block_copy
= 1;
1392 fputs ("Scalarization disabled for ", dump_file
);
1393 dump_sra_elt_name (dump_file
, elt
);
1394 fputc ('\n', dump_file
);
1397 /* Disable scalarization of sub-elements */
1398 for (c
= elt
->children
; c
; c
= c
->sibling
)
1400 c
->cannot_scalarize
= 1;
1401 decide_block_copy (c
);
1404 /* Groups behave like their parent. */
1405 for (c
= elt
->groups
; c
; c
= c
->sibling
)
1407 c
->cannot_scalarize
= 1;
1408 c
->use_block_copy
= 1;
1414 /* Don't decide if we've no uses. */
1415 if (elt
->n_uses
== 0 && elt
->n_copies
== 0)
1418 else if (!elt
->is_scalar
)
1420 tree size_tree
= TYPE_SIZE_UNIT (elt
->type
);
1421 bool use_block_copy
= true;
1423 /* Tradeoffs for COMPLEX types pretty much always make it better
1424 to go ahead and split the components. */
1425 if (TREE_CODE (elt
->type
) == COMPLEX_TYPE
)
1426 use_block_copy
= false;
1428 /* Don't bother trying to figure out the rest if the structure is
1429 so large we can't do easy arithmetic. This also forces block
1430 copies for variable sized structures. */
1431 else if (host_integerp (size_tree
, 1))
1433 unsigned HOST_WIDE_INT full_size
, inst_size
= 0;
1434 unsigned int max_size
, max_count
, inst_count
, full_count
;
1436 /* If the sra-max-structure-size parameter is 0, then the
1437 user has not overridden the parameter and we can choose a
1438 sensible default. */
1439 max_size
= SRA_MAX_STRUCTURE_SIZE
1440 ? SRA_MAX_STRUCTURE_SIZE
1441 : MOVE_RATIO
* UNITS_PER_WORD
;
1442 max_count
= SRA_MAX_STRUCTURE_COUNT
1443 ? SRA_MAX_STRUCTURE_COUNT
1446 full_size
= tree_low_cst (size_tree
, 1);
1447 full_count
= count_type_elements (elt
->type
, false);
1448 inst_count
= sum_instantiated_sizes (elt
, &inst_size
);
1450 /* ??? What to do here. If there are two fields, and we've only
1451 instantiated one, then instantiating the other is clearly a win.
1452 If there are a large number of fields then the size of the copy
1453 is much more of a factor. */
1455 /* If the structure is small, and we've made copies, go ahead
1456 and instantiate, hoping that the copies will go away. */
1457 if (full_size
<= max_size
1458 && (full_count
- inst_count
) <= max_count
1459 && elt
->n_copies
> elt
->n_uses
)
1460 use_block_copy
= false;
1461 else if (inst_count
* 100 >= full_count
* SRA_FIELD_STRUCTURE_RATIO
1462 && inst_size
* 100 >= full_size
* SRA_FIELD_STRUCTURE_RATIO
)
1463 use_block_copy
= false;
1465 /* In order to avoid block copy, we have to be able to instantiate
1466 all elements of the type. See if this is possible. */
1468 && (!can_completely_scalarize_p (elt
)
1469 || !type_can_instantiate_all_elements (elt
->type
)))
1470 use_block_copy
= true;
1473 elt
->use_block_copy
= use_block_copy
;
1475 /* Groups behave like their parent. */
1476 for (c
= elt
->groups
; c
; c
= c
->sibling
)
1477 c
->use_block_copy
= use_block_copy
;
1481 fprintf (dump_file
, "Using %s for ",
1482 use_block_copy
? "block-copy" : "element-copy");
1483 dump_sra_elt_name (dump_file
, elt
);
1484 fputc ('\n', dump_file
);
1487 if (!use_block_copy
)
1489 instantiate_missing_elements (elt
);
1494 any_inst
= elt
->replacement
!= NULL
;
1496 for (c
= elt
->children
; c
; c
= c
->sibling
)
1497 any_inst
|= decide_block_copy (c
);
1502 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1505 decide_instantiations (void)
1509 bitmap_head done_head
;
1512 /* We cannot clear bits from a bitmap we're iterating over,
1513 so save up all the bits to clear until the end. */
1514 bitmap_initialize (&done_head
, &bitmap_default_obstack
);
1515 cleared_any
= false;
1517 EXECUTE_IF_SET_IN_BITMAP (sra_candidates
, 0, i
, bi
)
1519 tree var
= referenced_var (i
);
1520 struct sra_elt
*elt
= lookup_element (NULL
, var
, NULL
, NO_INSERT
);
1523 decide_instantiation_1 (elt
, 0, 0);
1524 if (!decide_block_copy (elt
))
1529 bitmap_set_bit (&done_head
, i
);
1536 bitmap_and_compl_into (sra_candidates
, &done_head
);
1537 bitmap_and_compl_into (needs_copy_in
, &done_head
);
1539 bitmap_clear (&done_head
);
1541 if (!bitmap_empty_p (sra_candidates
))
1542 todoflags
|= TODO_update_smt_usage
;
1544 mark_set_for_renaming (sra_candidates
);
1547 fputc ('\n', dump_file
);
1551 /* Phase Four: Update the function to match the replacements created. */
1553 /* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
1554 renaming. This becomes necessary when we modify all of a non-scalar. */
1557 mark_all_v_defs_1 (tree stmt
)
1562 update_stmt_if_modified (stmt
);
1564 FOR_EACH_SSA_TREE_OPERAND (sym
, stmt
, iter
, SSA_OP_ALL_VIRTUALS
)
1566 if (TREE_CODE (sym
) == SSA_NAME
)
1567 sym
= SSA_NAME_VAR (sym
);
1568 mark_sym_for_renaming (sym
);
1573 /* Mark all the variables in virtual operands in all the statements in
1574 LIST for renaming. */
1577 mark_all_v_defs (tree list
)
1579 if (TREE_CODE (list
) != STATEMENT_LIST
)
1580 mark_all_v_defs_1 (list
);
1583 tree_stmt_iterator i
;
1584 for (i
= tsi_start (list
); !tsi_end_p (i
); tsi_next (&i
))
1585 mark_all_v_defs_1 (tsi_stmt (i
));
1589 /* Mark every replacement under ELT with TREE_NO_WARNING. */
1592 mark_no_warning (struct sra_elt
*elt
)
1594 if (!elt
->all_no_warning
)
1596 if (elt
->replacement
)
1597 TREE_NO_WARNING (elt
->replacement
) = 1;
1601 FOR_EACH_ACTUAL_CHILD (c
, elt
)
1602 mark_no_warning (c
);
1604 elt
->all_no_warning
= true;
1608 /* Build a single level component reference to ELT rooted at BASE. */
1611 generate_one_element_ref (struct sra_elt
*elt
, tree base
)
1613 switch (TREE_CODE (TREE_TYPE (base
)))
1617 tree field
= elt
->element
;
1619 /* Watch out for compatible records with differing field lists. */
1620 if (DECL_FIELD_CONTEXT (field
) != TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
1621 field
= find_compatible_field (TREE_TYPE (base
), field
);
1623 return build3 (COMPONENT_REF
, elt
->type
, base
, field
, NULL
);
1627 todoflags
|= TODO_update_smt_usage
;
1628 if (TREE_CODE (elt
->element
) == RANGE_EXPR
)
1629 return build4 (ARRAY_RANGE_REF
, elt
->type
, base
,
1630 TREE_OPERAND (elt
->element
, 0), NULL
, NULL
);
1632 return build4 (ARRAY_REF
, elt
->type
, base
, elt
->element
, NULL
, NULL
);
1635 if (elt
->element
== integer_zero_node
)
1636 return build1 (REALPART_EXPR
, elt
->type
, base
);
1638 return build1 (IMAGPART_EXPR
, elt
->type
, base
);
1645 /* Build a full component reference to ELT rooted at its native variable. */
1648 generate_element_ref (struct sra_elt
*elt
)
1651 return generate_one_element_ref (elt
, generate_element_ref (elt
->parent
));
1653 return elt
->element
;
1657 sra_build_assignment (tree dst
, tree src
)
1659 /* We need TYPE_CANONICAL to compare the types of dst and src
1660 efficiently, but that's only introduced in GCC 4.3. */
1661 return build2 (MODIFY_EXPR
, void_type_node
, dst
, src
);
1664 /* Generate a set of assignment statements in *LIST_P to copy all
1665 instantiated elements under ELT to or from the equivalent structure
1666 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1667 true meaning to copy out of EXPR into ELT. */
1670 generate_copy_inout (struct sra_elt
*elt
, bool copy_out
, tree expr
,
1676 if (!copy_out
&& TREE_CODE (expr
) == SSA_NAME
1677 && TREE_CODE (TREE_TYPE (expr
)) == COMPLEX_TYPE
)
1681 c
= lookup_element (elt
, integer_zero_node
, NULL
, NO_INSERT
);
1683 c
= lookup_element (elt
, integer_one_node
, NULL
, NO_INSERT
);
1686 t
= build2 (COMPLEX_EXPR
, elt
->type
, r
, i
);
1687 t
= sra_build_assignment (expr
, t
);
1688 SSA_NAME_DEF_STMT (expr
) = t
;
1689 append_to_statement_list (t
, list_p
);
1691 else if (elt
->replacement
)
1694 t
= sra_build_assignment (elt
->replacement
, expr
);
1696 t
= sra_build_assignment (expr
, elt
->replacement
);
1697 append_to_statement_list (t
, list_p
);
1701 FOR_EACH_ACTUAL_CHILD (c
, elt
)
1703 t
= generate_one_element_ref (c
, unshare_expr (expr
));
1704 generate_copy_inout (c
, copy_out
, t
, list_p
);
1709 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1710 elements under SRC to their counterparts under DST. There must be a 1-1
1711 correspondence of instantiated elements. */
1714 generate_element_copy (struct sra_elt
*dst
, struct sra_elt
*src
, tree
*list_p
)
1716 struct sra_elt
*dc
, *sc
;
1718 FOR_EACH_ACTUAL_CHILD (dc
, dst
)
1720 sc
= lookup_element (src
, dc
->element
, NULL
, NO_INSERT
);
1722 generate_element_copy (dc
, sc
, list_p
);
1725 if (dst
->replacement
)
1729 gcc_assert (src
->replacement
);
1731 t
= sra_build_assignment (dst
->replacement
, src
->replacement
);
1732 append_to_statement_list (t
, list_p
);
1736 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1737 elements under ELT. In addition, do not assign to elements that have been
1738 marked VISITED but do reset the visited flag; this allows easy coordination
1739 with generate_element_init. */
1742 generate_element_zero (struct sra_elt
*elt
, tree
*list_p
)
1748 elt
->visited
= false;
1752 FOR_EACH_ACTUAL_CHILD (c
, elt
)
1753 generate_element_zero (c
, list_p
);
1755 if (elt
->replacement
)
1759 gcc_assert (elt
->is_scalar
);
1760 t
= fold_convert (elt
->type
, integer_zero_node
);
1762 t
= sra_build_assignment (elt
->replacement
, t
);
1763 append_to_statement_list (t
, list_p
);
1767 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1768 Add the result to *LIST_P. */
1771 generate_one_element_init (tree var
, tree init
, tree
*list_p
)
1773 /* The replacement can be almost arbitrarily complex. Gimplify. */
1774 tree stmt
= sra_build_assignment (var
, init
);
1775 gimplify_and_add (stmt
, list_p
);
1778 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1779 elements under ELT with the contents of the initializer INIT. In addition,
1780 mark all assigned elements VISITED; this allows easy coordination with
1781 generate_element_zero. Return false if we found a case we couldn't
1785 generate_element_init_1 (struct sra_elt
*elt
, tree init
, tree
*list_p
)
1788 enum tree_code init_code
;
1789 struct sra_elt
*sub
;
1791 unsigned HOST_WIDE_INT idx
;
1792 tree value
, purpose
;
1794 /* We can be passed DECL_INITIAL of a static variable. It might have a
1795 conversion, which we strip off here. */
1796 STRIP_USELESS_TYPE_CONVERSION (init
);
1797 init_code
= TREE_CODE (init
);
1801 if (elt
->replacement
)
1803 generate_one_element_init (elt
->replacement
, init
, list_p
);
1804 elt
->visited
= true;
1813 FOR_EACH_ACTUAL_CHILD (sub
, elt
)
1815 if (sub
->element
== integer_zero_node
)
1816 t
= (init_code
== COMPLEX_EXPR
1817 ? TREE_OPERAND (init
, 0) : TREE_REALPART (init
));
1819 t
= (init_code
== COMPLEX_EXPR
1820 ? TREE_OPERAND (init
, 1) : TREE_IMAGPART (init
));
1821 result
&= generate_element_init_1 (sub
, t
, list_p
);
1826 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), idx
, purpose
, value
)
1828 if (TREE_CODE (purpose
) == RANGE_EXPR
)
1830 tree lower
= TREE_OPERAND (purpose
, 0);
1831 tree upper
= TREE_OPERAND (purpose
, 1);
1835 sub
= lookup_element (elt
, lower
, NULL
, NO_INSERT
);
1837 result
&= generate_element_init_1 (sub
, value
, list_p
);
1838 if (tree_int_cst_equal (lower
, upper
))
1840 lower
= int_const_binop (PLUS_EXPR
, lower
,
1841 integer_one_node
, true);
1846 sub
= lookup_element (elt
, purpose
, NULL
, NO_INSERT
);
1848 result
&= generate_element_init_1 (sub
, value
, list_p
);
1854 elt
->visited
= true;
1861 /* A wrapper function for generate_element_init_1 that handles cleanup after
1865 generate_element_init (struct sra_elt
*elt
, tree init
, tree
*list_p
)
1869 push_gimplify_context ();
1870 ret
= generate_element_init_1 (elt
, init
, list_p
);
1871 pop_gimplify_context (NULL
);
1873 /* The replacement can expose previously unreferenced variables. */
1876 tree_stmt_iterator i
;
1878 for (i
= tsi_start (*list_p
); !tsi_end_p (i
); tsi_next (&i
))
1879 find_new_referenced_vars (tsi_stmt_ptr (i
));
1885 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1886 has more than one edge, STMT will be replicated for each edge. Also,
1887 abnormal edges will be ignored. */
1890 insert_edge_copies (tree stmt
, basic_block bb
)
1897 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1899 /* We don't need to insert copies on abnormal edges. The
1900 value of the scalar replacement is not guaranteed to
1901 be valid through an abnormal edge. */
1902 if (!(e
->flags
& EDGE_ABNORMAL
))
1906 bsi_insert_on_edge (e
, stmt
);
1910 bsi_insert_on_edge (e
, unsave_expr_now (stmt
));
1915 /* Helper function to insert LIST before BSI, and set up line number info. */
1918 sra_insert_before (block_stmt_iterator
*bsi
, tree list
)
1920 tree stmt
= bsi_stmt (*bsi
);
1922 if (EXPR_HAS_LOCATION (stmt
))
1923 annotate_all_with_locus (&list
, EXPR_LOCATION (stmt
));
1924 bsi_insert_before (bsi
, list
, BSI_SAME_STMT
);
1927 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1930 sra_insert_after (block_stmt_iterator
*bsi
, tree list
)
1932 tree stmt
= bsi_stmt (*bsi
);
1934 if (EXPR_HAS_LOCATION (stmt
))
1935 annotate_all_with_locus (&list
, EXPR_LOCATION (stmt
));
1937 if (stmt_ends_bb_p (stmt
))
1938 insert_edge_copies (list
, bsi
->bb
);
1940 bsi_insert_after (bsi
, list
, BSI_SAME_STMT
);
1943 /* Similarly, but replace the statement at BSI. */
1946 sra_replace (block_stmt_iterator
*bsi
, tree list
)
1948 sra_insert_before (bsi
, list
);
1949 bsi_remove (bsi
, false);
1950 if (bsi_end_p (*bsi
))
1951 *bsi
= bsi_last (bsi
->bb
);
1956 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1957 if elt is scalar, or some occurrence of ELT that requires a complete
1958 aggregate. IS_OUTPUT is true if ELT is being modified. */
1961 scalarize_use (struct sra_elt
*elt
, tree
*expr_p
, block_stmt_iterator
*bsi
,
1962 bool is_output
, bool use_all
)
1964 tree list
= NULL
, stmt
= bsi_stmt (*bsi
);
1966 if (elt
->replacement
)
1968 /* If we have a replacement, then updating the reference is as
1969 simple as modifying the existing statement in place. */
1971 mark_all_v_defs (stmt
);
1972 *expr_p
= elt
->replacement
;
1977 /* Otherwise we need some copies. If ELT is being read, then we want
1978 to store all (modified) sub-elements back into the structure before
1979 the reference takes place. If ELT is being written, then we want to
1980 load the changed values back into our shadow variables. */
1981 /* ??? We don't check modified for reads, we just always write all of
1982 the values. We should be able to record the SSA number of the VOP
1983 for which the values were last read. If that number matches the
1984 SSA number of the VOP in the current statement, then we needn't
1985 emit an assignment. This would also eliminate double writes when
1986 a structure is passed as more than one argument to a function call.
1987 This optimization would be most effective if sra_walk_function
1988 processed the blocks in dominator order. */
1990 generate_copy_inout (elt
, is_output
, generate_element_ref (elt
), &list
);
1993 mark_all_v_defs (list
);
1995 sra_insert_after (bsi
, list
);
1998 sra_insert_before (bsi
, list
);
2000 mark_no_warning (elt
);
2005 /* Scalarize a COPY. To recap, this is an assignment statement between
2006 two scalarizable references, LHS_ELT and RHS_ELT. */
2009 scalarize_copy (struct sra_elt
*lhs_elt
, struct sra_elt
*rhs_elt
,
2010 block_stmt_iterator
*bsi
)
2014 if (lhs_elt
->replacement
&& rhs_elt
->replacement
)
2016 /* If we have two scalar operands, modify the existing statement. */
2017 stmt
= bsi_stmt (*bsi
);
2019 /* See the commentary in sra_walk_function concerning
2020 RETURN_EXPR, and why we should never see one here. */
2021 gcc_assert (TREE_CODE (stmt
) == MODIFY_EXPR
);
2023 TREE_OPERAND (stmt
, 0) = lhs_elt
->replacement
;
2024 TREE_OPERAND (stmt
, 1) = rhs_elt
->replacement
;
2027 else if (lhs_elt
->use_block_copy
|| rhs_elt
->use_block_copy
)
2029 /* If either side requires a block copy, then sync the RHS back
2030 to the original structure, leave the original assignment
2031 statement (which will perform the block copy), then load the
2032 LHS values out of its now-updated original structure. */
2033 /* ??? Could perform a modified pair-wise element copy. That
2034 would at least allow those elements that are instantiated in
2035 both structures to be optimized well. */
2038 generate_copy_inout (rhs_elt
, false,
2039 generate_element_ref (rhs_elt
), &list
);
2042 mark_all_v_defs (list
);
2043 sra_insert_before (bsi
, list
);
2047 generate_copy_inout (lhs_elt
, true,
2048 generate_element_ref (lhs_elt
), &list
);
2051 mark_all_v_defs (list
);
2052 sra_insert_after (bsi
, list
);
2057 /* Otherwise both sides must be fully instantiated. In which
2058 case perform pair-wise element assignments and replace the
2059 original block copy statement. */
2061 stmt
= bsi_stmt (*bsi
);
2062 mark_all_v_defs (stmt
);
2065 generate_element_copy (lhs_elt
, rhs_elt
, &list
);
2067 mark_all_v_defs (list
);
2068 sra_replace (bsi
, list
);
2072 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
2073 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
2074 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
2078 scalarize_init (struct sra_elt
*lhs_elt
, tree rhs
, block_stmt_iterator
*bsi
)
2083 /* Generate initialization statements for all members extant in the RHS. */
2086 /* Unshare the expression just in case this is from a decl's initial. */
2087 rhs
= unshare_expr (rhs
);
2088 result
= generate_element_init (lhs_elt
, rhs
, &list
);
2091 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
2092 a zero value. Initialize the rest of the instantiated elements. */
2093 generate_element_zero (lhs_elt
, &list
);
2097 /* If we failed to convert the entire initializer, then we must
2098 leave the structure assignment in place and must load values
2099 from the structure into the slots for which we did not find
2100 constants. The easiest way to do this is to generate a complete
2101 copy-out, and then follow that with the constant assignments
2102 that we were able to build. DCE will clean things up. */
2104 generate_copy_inout (lhs_elt
, true, generate_element_ref (lhs_elt
),
2106 append_to_statement_list (list
, &list0
);
2110 if (lhs_elt
->use_block_copy
|| !result
)
2112 /* Since LHS is not fully instantiated, we must leave the structure
2113 assignment in place. Treating this case differently from a USE
2114 exposes constants to later optimizations. */
2117 mark_all_v_defs (list
);
2118 sra_insert_after (bsi
, list
);
2123 /* The LHS is fully instantiated. The list of initializations
2124 replaces the original structure assignment. */
2126 mark_all_v_defs (bsi_stmt (*bsi
));
2127 mark_all_v_defs (list
);
2128 sra_replace (bsi
, list
);
2132 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
2133 on all INDIRECT_REFs. */
2136 mark_notrap (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2140 if (TREE_CODE (t
) == INDIRECT_REF
)
2142 TREE_THIS_NOTRAP (t
) = 1;
2145 else if (IS_TYPE_OR_DECL_P (t
))
2151 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
2152 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
2153 if ELT is on the left-hand side. */
2156 scalarize_ldst (struct sra_elt
*elt
, tree other
,
2157 block_stmt_iterator
*bsi
, bool is_output
)
2159 /* Shouldn't have gotten called for a scalar. */
2160 gcc_assert (!elt
->replacement
);
2162 if (elt
->use_block_copy
)
2164 /* Since ELT is not fully instantiated, we have to leave the
2165 block copy in place. Treat this as a USE. */
2166 scalarize_use (elt
, NULL
, bsi
, is_output
, false);
2170 /* The interesting case is when ELT is fully instantiated. In this
2171 case we can have each element stored/loaded directly to/from the
2172 corresponding slot in OTHER. This avoids a block copy. */
2174 tree list
= NULL
, stmt
= bsi_stmt (*bsi
);
2176 mark_all_v_defs (stmt
);
2177 generate_copy_inout (elt
, is_output
, other
, &list
);
2178 mark_all_v_defs (list
);
2181 /* Preserve EH semantics. */
2182 if (stmt_ends_bb_p (stmt
))
2184 tree_stmt_iterator tsi
;
2187 /* Extract the first statement from LIST. */
2188 tsi
= tsi_start (list
);
2189 first
= tsi_stmt (tsi
);
2192 /* Replace the old statement with this new representative. */
2193 bsi_replace (bsi
, first
, true);
2195 if (!tsi_end_p (tsi
))
2197 /* If any reference would trap, then they all would. And more
2198 to the point, the first would. Therefore none of the rest
2199 will trap since the first didn't. Indicate this by
2200 iterating over the remaining statements and set
2201 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
2204 walk_tree (tsi_stmt_ptr (tsi
), mark_notrap
, NULL
, NULL
);
2207 while (!tsi_end_p (tsi
));
2209 insert_edge_copies (list
, bsi
->bb
);
2213 sra_replace (bsi
, list
);
2217 /* Generate initializations for all scalarizable parameters. */
2220 scalarize_parms (void)
2226 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in
, 0, i
, bi
)
2228 tree var
= referenced_var (i
);
2229 struct sra_elt
*elt
= lookup_element (NULL
, var
, NULL
, NO_INSERT
);
2230 generate_copy_inout (elt
, true, var
, &list
);
2235 insert_edge_copies (list
, ENTRY_BLOCK_PTR
);
2236 mark_all_v_defs (list
);
2240 /* Entry point to phase 4. Update the function to match replacements. */
2243 scalarize_function (void)
2245 static const struct sra_walk_fns fns
= {
2246 scalarize_use
, scalarize_copy
, scalarize_init
, scalarize_ldst
, false
2249 sra_walk_function (&fns
);
2251 bsi_commit_edge_inserts ();
2255 /* Debug helper function. Print ELT in a nice human-readable format. */
2258 dump_sra_elt_name (FILE *f
, struct sra_elt
*elt
)
2260 if (elt
->parent
&& TREE_CODE (elt
->parent
->type
) == COMPLEX_TYPE
)
2262 fputs (elt
->element
== integer_zero_node
? "__real__ " : "__imag__ ", f
);
2263 dump_sra_elt_name (f
, elt
->parent
);
2268 dump_sra_elt_name (f
, elt
->parent
);
2269 if (DECL_P (elt
->element
))
2271 if (TREE_CODE (elt
->element
) == FIELD_DECL
)
2273 print_generic_expr (f
, elt
->element
, dump_flags
);
2275 else if (TREE_CODE (elt
->element
) == RANGE_EXPR
)
2276 fprintf (f
, "["HOST_WIDE_INT_PRINT_DEC
".."HOST_WIDE_INT_PRINT_DEC
"]",
2277 TREE_INT_CST_LOW (TREE_OPERAND (elt
->element
, 0)),
2278 TREE_INT_CST_LOW (TREE_OPERAND (elt
->element
, 1)));
2280 fprintf (f
, "[" HOST_WIDE_INT_PRINT_DEC
"]",
2281 TREE_INT_CST_LOW (elt
->element
));
2285 /* Likewise, but callable from the debugger. */
2288 debug_sra_elt_name (struct sra_elt
*elt
)
2290 dump_sra_elt_name (stderr
, elt
);
2291 fputc ('\n', stderr
);
2295 sra_init_cache (void)
2297 if (sra_type_decomp_cache
)
2300 sra_type_decomp_cache
= BITMAP_ALLOC (NULL
);
2301 sra_type_inst_cache
= BITMAP_ALLOC (NULL
);
2304 /* Main entry point. */
2309 /* Initialize local variables. */
2311 gcc_obstack_init (&sra_obstack
);
2312 sra_candidates
= BITMAP_ALLOC (NULL
);
2313 needs_copy_in
= BITMAP_ALLOC (NULL
);
2315 sra_map
= htab_create (101, sra_elt_hash
, sra_elt_eq
, NULL
);
2317 /* Scan. If we find anything, instantiate and scalarize. */
2318 if (find_candidates_for_sra ())
2321 decide_instantiations ();
2322 scalarize_function ();
2325 /* Free allocated memory. */
2326 htab_delete (sra_map
);
2328 BITMAP_FREE (sra_candidates
);
2329 BITMAP_FREE (needs_copy_in
);
2330 BITMAP_FREE (sra_type_decomp_cache
);
2331 BITMAP_FREE (sra_type_inst_cache
);
2332 obstack_free (&sra_obstack
, NULL
);
2339 return flag_tree_sra
!= 0;
2342 struct tree_opt_pass pass_sra
=
2345 gate_sra
, /* gate */
2346 tree_sra
, /* execute */
2349 0, /* static_pass_number */
2350 TV_TREE_SRA
, /* tv_id */
2351 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
2352 0, /* properties_provided */
2353 PROP_smt_usage
, /* properties_destroyed */
2354 0, /* todo_flags_start */
2355 TODO_dump_func
/* todo_flags_finish */
2357 | TODO_ggc_collect
| TODO_verify_ssa
,