1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
26 #include "coretypes.h"
31 /* These RTL headers are needed for basic-block.h. */
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
40 #include "tree-gimple.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
48 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* The set of todo flags to return from tree_sra. */
79 static unsigned int todoflags
;
81 /* The set of aggregate variables that are candidates for scalarization. */
82 static bitmap sra_candidates
;
84 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
85 beginning of the function. */
86 static bitmap needs_copy_in
;
88 /* Sets of bit pairs that cache type decomposition and instantiation. */
89 static bitmap sra_type_decomp_cache
;
90 static bitmap sra_type_inst_cache
;
92 /* One of these structures is created for each candidate aggregate and
93 each (accessed) member or group of members of such an aggregate. */
96 /* A tree of the elements. Used when we want to traverse everything. */
97 struct sra_elt
*parent
;
98 struct sra_elt
*groups
;
99 struct sra_elt
*children
;
100 struct sra_elt
*sibling
;
102 /* If this element is a root, then this is the VAR_DECL. If this is
103 a sub-element, this is some token used to identify the reference.
104 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
105 of an ARRAY_REF, this is the (constant) index. In the case of an
106 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
107 of a complex number, this is a zero or one. */
110 /* The type of the element. */
113 /* A VAR_DECL, for any sub-element we've decided to replace. */
116 /* The number of times the element is referenced as a whole. I.e.
117 given "a.b.c", this would be incremented for C, but not for A or B. */
120 /* The number of times the element is copied to or from another
121 scalarizable element. */
122 unsigned int n_copies
;
124 /* True if TYPE is scalar. */
127 /* True if this element is a group of members of its parent. */
130 /* True if we saw something about this element that prevents scalarization,
131 such as non-constant indexing. */
132 bool cannot_scalarize
;
134 /* True if we've decided that structure-to-structure assignment
135 should happen via memcpy and not per-element. */
138 /* True if everything under this element has been marked TREE_NO_WARNING. */
141 /* A flag for use with/after random access traversals. */
144 /* True if there is BIT_FIELD_REF on the lhs with a vector. */
148 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
150 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
151 for ((CHILD) = (ELT)->is_group \
152 ? next_child_for_group (NULL, (ELT)) \
155 (CHILD) = (ELT)->is_group \
156 ? next_child_for_group ((CHILD), (ELT)) \
159 /* Helper function for above macro. Return next child in group. */
160 static struct sra_elt
*
161 next_child_for_group (struct sra_elt
*child
, struct sra_elt
*group
)
163 gcc_assert (group
->is_group
);
165 /* Find the next child in the parent. */
167 child
= child
->sibling
;
169 child
= group
->parent
->children
;
171 /* Skip siblings that do not belong to the group. */
174 tree g_elt
= group
->element
;
175 if (TREE_CODE (g_elt
) == RANGE_EXPR
)
177 if (!tree_int_cst_lt (child
->element
, TREE_OPERAND (g_elt
, 0))
178 && !tree_int_cst_lt (TREE_OPERAND (g_elt
, 1), child
->element
))
184 child
= child
->sibling
;
190 /* Random access to the child of a parent is performed by hashing.
191 This prevents quadratic behavior, and allows SRA to function
192 reasonably on larger records. */
193 static htab_t sra_map
;
195 /* All structures are allocated out of the following obstack. */
196 static struct obstack sra_obstack
;
198 /* Debugging functions. */
199 static void dump_sra_elt_name (FILE *, struct sra_elt
*);
200 extern void debug_sra_elt_name (struct sra_elt
*);
202 /* Forward declarations. */
203 static tree
generate_element_ref (struct sra_elt
*);
205 /* Return true if DECL is an SRA candidate. */
208 is_sra_candidate_decl (tree decl
)
210 return DECL_P (decl
) && bitmap_bit_p (sra_candidates
, DECL_UID (decl
));
213 /* Return true if TYPE is a scalar type. */
216 is_sra_scalar_type (tree type
)
218 enum tree_code code
= TREE_CODE (type
);
219 return (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== VECTOR_TYPE
220 || code
== ENUMERAL_TYPE
|| code
== BOOLEAN_TYPE
221 || code
== POINTER_TYPE
|| code
== OFFSET_TYPE
222 || code
== REFERENCE_TYPE
);
225 /* Return true if TYPE can be decomposed into a set of independent variables.
227 Note that this doesn't imply that all elements of TYPE can be
228 instantiated, just that if we decide to break up the type into
229 separate pieces that it can be done. */
232 sra_type_can_be_decomposed_p (tree type
)
234 unsigned int cache
= TYPE_UID (TYPE_MAIN_VARIANT (type
)) * 2;
237 /* Avoid searching the same type twice. */
238 if (bitmap_bit_p (sra_type_decomp_cache
, cache
+0))
240 if (bitmap_bit_p (sra_type_decomp_cache
, cache
+1))
243 /* The type must have a definite nonzero size. */
244 if (TYPE_SIZE (type
) == NULL
|| TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
245 || integer_zerop (TYPE_SIZE (type
)))
248 /* The type must be a non-union aggregate. */
249 switch (TREE_CODE (type
))
253 bool saw_one_field
= false;
255 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
256 if (TREE_CODE (t
) == FIELD_DECL
)
258 /* Reject incorrectly represented bit fields. */
259 if (DECL_BIT_FIELD (t
)
260 && (tree_low_cst (DECL_SIZE (t
), 1)
261 != TYPE_PRECISION (TREE_TYPE (t
))))
264 saw_one_field
= true;
267 /* Record types must have at least one field. */
274 /* Array types must have a fixed lower and upper bound. */
275 t
= TYPE_DOMAIN (type
);
278 if (TYPE_MIN_VALUE (t
) == NULL
|| !TREE_CONSTANT (TYPE_MIN_VALUE (t
)))
280 if (TYPE_MAX_VALUE (t
) == NULL
|| !TREE_CONSTANT (TYPE_MAX_VALUE (t
)))
291 bitmap_set_bit (sra_type_decomp_cache
, cache
+0);
295 bitmap_set_bit (sra_type_decomp_cache
, cache
+1);
299 /* Return true if DECL can be decomposed into a set of independent
300 (though not necessarily scalar) variables. */
303 decl_can_be_decomposed_p (tree var
)
305 /* Early out for scalars. */
306 if (is_sra_scalar_type (TREE_TYPE (var
)))
309 /* The variable must not be aliased. */
310 if (!is_gimple_non_addressable (var
))
312 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
314 fprintf (dump_file
, "Cannot scalarize variable ");
315 print_generic_expr (dump_file
, var
, dump_flags
);
316 fprintf (dump_file
, " because it must live in memory\n");
321 /* The variable must not be volatile. */
322 if (TREE_THIS_VOLATILE (var
))
324 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
326 fprintf (dump_file
, "Cannot scalarize variable ");
327 print_generic_expr (dump_file
, var
, dump_flags
);
328 fprintf (dump_file
, " because it is declared volatile\n");
333 /* We must be able to decompose the variable's type. */
334 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var
)))
336 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
338 fprintf (dump_file
, "Cannot scalarize variable ");
339 print_generic_expr (dump_file
, var
, dump_flags
);
340 fprintf (dump_file
, " because its type cannot be decomposed\n");
348 /* Return true if TYPE can be *completely* decomposed into scalars. */
351 type_can_instantiate_all_elements (tree type
)
353 if (is_sra_scalar_type (type
))
355 if (!sra_type_can_be_decomposed_p (type
))
358 switch (TREE_CODE (type
))
362 unsigned int cache
= TYPE_UID (TYPE_MAIN_VARIANT (type
)) * 2;
365 if (bitmap_bit_p (sra_type_inst_cache
, cache
+0))
367 if (bitmap_bit_p (sra_type_inst_cache
, cache
+1))
370 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
371 if (TREE_CODE (f
) == FIELD_DECL
)
373 if (!type_can_instantiate_all_elements (TREE_TYPE (f
)))
375 bitmap_set_bit (sra_type_inst_cache
, cache
+1);
380 bitmap_set_bit (sra_type_inst_cache
, cache
+0);
385 return type_can_instantiate_all_elements (TREE_TYPE (type
));
395 /* Test whether ELT or some sub-element cannot be scalarized. */
398 can_completely_scalarize_p (struct sra_elt
*elt
)
402 if (elt
->cannot_scalarize
)
405 for (c
= elt
->children
; c
; c
= c
->sibling
)
406 if (!can_completely_scalarize_p (c
))
409 for (c
= elt
->groups
; c
; c
= c
->sibling
)
410 if (!can_completely_scalarize_p (c
))
417 /* A simplified tree hashing algorithm that only handles the types of
418 trees we expect to find in sra_elt->element. */
421 sra_hash_tree (tree t
)
425 switch (TREE_CODE (t
))
434 h
= TREE_INT_CST_LOW (t
) ^ TREE_INT_CST_HIGH (t
);
438 h
= iterative_hash_expr (TREE_OPERAND (t
, 0), 0);
439 h
= iterative_hash_expr (TREE_OPERAND (t
, 1), h
);
443 /* We can have types that are compatible, but have different member
444 lists, so we can't hash fields by ID. Use offsets instead. */
445 h
= iterative_hash_expr (DECL_FIELD_OFFSET (t
), 0);
446 h
= iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t
), h
);
456 /* Hash function for type SRA_PAIR. */
459 sra_elt_hash (const void *x
)
461 const struct sra_elt
*e
= x
;
462 const struct sra_elt
*p
;
465 h
= sra_hash_tree (e
->element
);
467 /* Take into account everything back up the chain. Given that chain
468 lengths are rarely very long, this should be acceptable. If we
469 truly identify this as a performance problem, it should work to
470 hash the pointer value "e->parent". */
471 for (p
= e
->parent
; p
; p
= p
->parent
)
472 h
= (h
* 65521) ^ sra_hash_tree (p
->element
);
477 /* Equality function for type SRA_PAIR. */
480 sra_elt_eq (const void *x
, const void *y
)
482 const struct sra_elt
*a
= x
;
483 const struct sra_elt
*b
= y
;
486 if (a
->parent
!= b
->parent
)
494 if (TREE_CODE (ae
) != TREE_CODE (be
))
497 switch (TREE_CODE (ae
))
502 /* These are all pointer unique. */
506 /* Integers are not pointer unique, so compare their values. */
507 return tree_int_cst_equal (ae
, be
);
511 tree_int_cst_equal (TREE_OPERAND (ae
, 0), TREE_OPERAND (be
, 0))
512 && tree_int_cst_equal (TREE_OPERAND (ae
, 1), TREE_OPERAND (be
, 1));
515 /* Fields are unique within a record, but not between
516 compatible records. */
517 if (DECL_FIELD_CONTEXT (ae
) == DECL_FIELD_CONTEXT (be
))
519 return fields_compatible_p (ae
, be
);
526 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
527 may be null, in which case CHILD must be a DECL. */
529 static struct sra_elt
*
530 lookup_element (struct sra_elt
*parent
, tree child
, tree type
,
531 enum insert_option insert
)
533 struct sra_elt dummy
;
534 struct sra_elt
**slot
;
538 dummy
.parent
= parent
->is_group
? parent
->parent
: parent
;
541 dummy
.element
= child
;
543 slot
= (struct sra_elt
**) htab_find_slot (sra_map
, &dummy
, insert
);
544 if (!slot
&& insert
== NO_INSERT
)
548 if (!elt
&& insert
== INSERT
)
550 *slot
= elt
= obstack_alloc (&sra_obstack
, sizeof (*elt
));
551 memset (elt
, 0, sizeof (*elt
));
553 elt
->parent
= parent
;
554 elt
->element
= child
;
556 elt
->is_scalar
= is_sra_scalar_type (type
);
560 if (IS_ELEMENT_FOR_GROUP (elt
->element
))
562 elt
->is_group
= true;
563 elt
->sibling
= parent
->groups
;
564 parent
->groups
= elt
;
568 elt
->sibling
= parent
->children
;
569 parent
->children
= elt
;
573 /* If this is a parameter, then if we want to scalarize, we have
574 one copy from the true function parameter. Count it now. */
575 if (TREE_CODE (child
) == PARM_DECL
)
578 bitmap_set_bit (needs_copy_in
, DECL_UID (child
));
585 /* Create or return the SRA_ELT structure for EXPR if the expression
586 refers to a scalarizable variable. */
588 static struct sra_elt
*
589 maybe_lookup_element_for_expr (tree expr
)
594 switch (TREE_CODE (expr
))
599 if (is_sra_candidate_decl (expr
))
600 return lookup_element (NULL
, expr
, TREE_TYPE (expr
), INSERT
);
604 /* We can't scalarize variable array indices. */
605 if (in_array_bounds_p (expr
))
606 child
= TREE_OPERAND (expr
, 1);
611 case ARRAY_RANGE_REF
:
612 /* We can't scalarize variable array indices. */
613 if (range_in_array_bounds_p (expr
))
615 tree domain
= TYPE_DOMAIN (TREE_TYPE (expr
));
616 child
= build2 (RANGE_EXPR
, integer_type_node
,
617 TYPE_MIN_VALUE (domain
), TYPE_MAX_VALUE (domain
));
624 /* Don't look through unions. */
625 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) != RECORD_TYPE
)
627 child
= TREE_OPERAND (expr
, 1);
631 child
= integer_zero_node
;
634 child
= integer_one_node
;
641 elt
= maybe_lookup_element_for_expr (TREE_OPERAND (expr
, 0));
643 return lookup_element (elt
, child
, TREE_TYPE (expr
), INSERT
);
648 /* Functions to walk just enough of the tree to see all scalarizable
649 references, and categorize them. */
651 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
652 various kinds of references seen. In all cases, *BSI is an iterator
653 pointing to the statement being processed. */
656 /* Invoked when ELT is required as a unit. Note that ELT might refer to
657 a leaf node, in which case this is a simple scalar reference. *EXPR_P
658 points to the location of the expression. IS_OUTPUT is true if this
659 is a left-hand-side reference. USE_ALL is true if we saw something we
660 couldn't quite identify and had to force the use of the entire object. */
661 void (*use
) (struct sra_elt
*elt
, tree
*expr_p
,
662 block_stmt_iterator
*bsi
, bool is_output
, bool use_all
);
664 /* Invoked when we have a copy between two scalarizable references. */
665 void (*copy
) (struct sra_elt
*lhs_elt
, struct sra_elt
*rhs_elt
,
666 block_stmt_iterator
*bsi
);
668 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
669 in which case it should be treated as an empty CONSTRUCTOR. */
670 void (*init
) (struct sra_elt
*elt
, tree value
, block_stmt_iterator
*bsi
);
672 /* Invoked when we have a copy between one scalarizable reference ELT
673 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
674 is on the left-hand side. */
675 void (*ldst
) (struct sra_elt
*elt
, tree other
,
676 block_stmt_iterator
*bsi
, bool is_output
);
678 /* True during phase 2, false during phase 4. */
679 /* ??? This is a hack. */
683 #ifdef ENABLE_CHECKING
684 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
687 sra_find_candidate_decl (tree
*tp
, int *walk_subtrees
,
688 void *data ATTRIBUTE_UNUSED
)
691 enum tree_code code
= TREE_CODE (t
);
693 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
696 if (is_sra_candidate_decl (t
))
706 /* Walk most expressions looking for a scalarizable aggregate.
707 If we find one, invoke FNS->USE. */
710 sra_walk_expr (tree
*expr_p
, block_stmt_iterator
*bsi
, bool is_output
,
711 const struct sra_walk_fns
*fns
)
715 bool disable_scalarization
= false;
716 bool use_all_p
= false;
718 /* We're looking to collect a reference expression between EXPR and INNER,
719 such that INNER is a scalarizable decl and all other nodes through EXPR
720 are references that we can scalarize. If we come across something that
721 we can't scalarize, we reset EXPR. This has the effect of making it
722 appear that we're referring to the larger expression as a whole. */
725 switch (TREE_CODE (inner
))
730 /* If there is a scalarizable decl at the bottom, then process it. */
731 if (is_sra_candidate_decl (inner
))
733 struct sra_elt
*elt
= maybe_lookup_element_for_expr (expr
);
734 if (disable_scalarization
)
735 elt
->cannot_scalarize
= true;
737 fns
->use (elt
, expr_p
, bsi
, is_output
, use_all_p
);
742 /* Non-constant index means any member may be accessed. Prevent the
743 expression from being scalarized. If we were to treat this as a
744 reference to the whole array, we can wind up with a single dynamic
745 index reference inside a loop being overridden by several constant
746 index references during loop setup. It's possible that this could
747 be avoided by using dynamic usage counts based on BB trip counts
748 (based on loop analysis or profiling), but that hardly seems worth
750 /* ??? Hack. Figure out how to push this into the scan routines
751 without duplicating too much code. */
752 if (!in_array_bounds_p (inner
))
754 disable_scalarization
= true;
757 /* ??? Are we assured that non-constant bounds and stride will have
758 the same value everywhere? I don't think Fortran will... */
759 if (TREE_OPERAND (inner
, 2) || TREE_OPERAND (inner
, 3))
761 inner
= TREE_OPERAND (inner
, 0);
764 case ARRAY_RANGE_REF
:
765 if (!range_in_array_bounds_p (inner
))
767 disable_scalarization
= true;
770 /* ??? See above non-constant bounds and stride . */
771 if (TREE_OPERAND (inner
, 2) || TREE_OPERAND (inner
, 3))
773 inner
= TREE_OPERAND (inner
, 0);
777 /* A reference to a union member constitutes a reference to the
779 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner
, 0))) != RECORD_TYPE
)
781 /* ??? See above re non-constant stride. */
782 if (TREE_OPERAND (inner
, 2))
784 inner
= TREE_OPERAND (inner
, 0);
789 inner
= TREE_OPERAND (inner
, 0);
793 /* A bit field reference to a specific vector is scalarized but for
794 ones for inputs need to be marked as used on the left hand size so
795 when we scalarize it, we can mark that variable as non renamable. */
796 if (is_output
&& TREE_CODE (TREE_TYPE (TREE_OPERAND (inner
, 0))) == VECTOR_TYPE
)
798 struct sra_elt
*elt
= maybe_lookup_element_for_expr (TREE_OPERAND (inner
, 0));
799 elt
->is_vector_lhs
= true;
801 /* A bit field reference (access to *multiple* fields simultaneously)
802 is not currently scalarized. Consider this an access to the
803 complete outer element, to which walk_tree will bring us next. */
807 case VIEW_CONVERT_EXPR
:
809 /* Similarly, a view/nop explicitly wants to look at an object in a
810 type other than the one we've scalarized. */
814 /* This is a transparent wrapper. The entire inner expression really
819 expr_p
= &TREE_OPERAND (inner
, 0);
820 inner
= expr
= *expr_p
;
825 #ifdef ENABLE_CHECKING
826 /* Validate that we're not missing any references. */
827 gcc_assert (!walk_tree (&inner
, sra_find_candidate_decl
, NULL
, NULL
));
833 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
834 If we find one, invoke FNS->USE. */
837 sra_walk_tree_list (tree list
, block_stmt_iterator
*bsi
, bool is_output
,
838 const struct sra_walk_fns
*fns
)
841 for (op
= list
; op
; op
= TREE_CHAIN (op
))
842 sra_walk_expr (&TREE_VALUE (op
), bsi
, is_output
, fns
);
845 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
846 If we find one, invoke FNS->USE. */
849 sra_walk_call_expr (tree expr
, block_stmt_iterator
*bsi
,
850 const struct sra_walk_fns
*fns
)
852 sra_walk_tree_list (TREE_OPERAND (expr
, 1), bsi
, false, fns
);
855 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
856 aggregates. If we find one, invoke FNS->USE. */
859 sra_walk_asm_expr (tree expr
, block_stmt_iterator
*bsi
,
860 const struct sra_walk_fns
*fns
)
862 sra_walk_tree_list (ASM_INPUTS (expr
), bsi
, false, fns
);
863 sra_walk_tree_list (ASM_OUTPUTS (expr
), bsi
, true, fns
);
866 /* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately. */
869 sra_walk_gimple_modify_stmt (tree expr
, block_stmt_iterator
*bsi
,
870 const struct sra_walk_fns
*fns
)
872 struct sra_elt
*lhs_elt
, *rhs_elt
;
875 lhs
= GIMPLE_STMT_OPERAND (expr
, 0);
876 rhs
= GIMPLE_STMT_OPERAND (expr
, 1);
877 lhs_elt
= maybe_lookup_element_for_expr (lhs
);
878 rhs_elt
= maybe_lookup_element_for_expr (rhs
);
880 /* If both sides are scalarizable, this is a COPY operation. */
881 if (lhs_elt
&& rhs_elt
)
883 fns
->copy (lhs_elt
, rhs_elt
, bsi
);
887 /* If the RHS is scalarizable, handle it. There are only two cases. */
890 if (!rhs_elt
->is_scalar
)
891 fns
->ldst (rhs_elt
, lhs
, bsi
, false);
893 fns
->use (rhs_elt
, &GIMPLE_STMT_OPERAND (expr
, 1), bsi
, false, false);
896 /* If it isn't scalarizable, there may be scalarizable variables within, so
897 check for a call or else walk the RHS to see if we need to do any
898 copy-in operations. We need to do it before the LHS is scalarized so
899 that the statements get inserted in the proper place, before any
900 copy-out operations. */
903 tree call
= get_call_expr_in (rhs
);
905 sra_walk_call_expr (call
, bsi
, fns
);
907 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr
, 1), bsi
, false, fns
);
910 /* Likewise, handle the LHS being scalarizable. We have cases similar
911 to those above, but also want to handle RHS being constant. */
914 /* If this is an assignment from a constant, or constructor, then
915 we have access to all of the elements individually. Invoke INIT. */
916 if (TREE_CODE (rhs
) == COMPLEX_EXPR
917 || TREE_CODE (rhs
) == COMPLEX_CST
918 || TREE_CODE (rhs
) == CONSTRUCTOR
)
919 fns
->init (lhs_elt
, rhs
, bsi
);
921 /* If this is an assignment from read-only memory, treat this as if
922 we'd been passed the constructor directly. Invoke INIT. */
923 else if (TREE_CODE (rhs
) == VAR_DECL
925 && TREE_READONLY (rhs
)
926 && targetm
.binds_local_p (rhs
))
927 fns
->init (lhs_elt
, DECL_INITIAL (rhs
), bsi
);
929 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
930 The lvalue requirement prevents us from trying to directly scalarize
931 the result of a function call. Which would result in trying to call
932 the function multiple times, and other evil things. */
933 else if (!lhs_elt
->is_scalar
&& is_gimple_addressable (rhs
))
934 fns
->ldst (lhs_elt
, rhs
, bsi
, true);
936 /* Otherwise we're being used in some context that requires the
937 aggregate to be seen as a whole. Invoke USE. */
939 fns
->use (lhs_elt
, &GIMPLE_STMT_OPERAND (expr
, 0), bsi
, true, false);
942 /* Similarly to above, LHS_ELT being null only means that the LHS as a
943 whole is not a scalarizable reference. There may be occurrences of
944 scalarizable variables within, which implies a USE. */
946 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr
, 0), bsi
, true, fns
);
949 /* Entry point to the walk functions. Search the entire function,
950 invoking the callbacks in FNS on each of the references to
951 scalarizable variables. */
954 sra_walk_function (const struct sra_walk_fns
*fns
)
957 block_stmt_iterator si
, ni
;
959 /* ??? Phase 4 could derive some benefit to walking the function in
960 dominator tree order. */
963 for (si
= bsi_start (bb
); !bsi_end_p (si
); si
= ni
)
968 stmt
= bsi_stmt (si
);
969 ann
= stmt_ann (stmt
);
974 /* If the statement has no virtual operands, then it doesn't
975 make any structure references that we care about. */
976 if (ZERO_SSA_OPERANDS (stmt
, (SSA_OP_VIRTUAL_DEFS
| SSA_OP_VUSE
)))
979 switch (TREE_CODE (stmt
))
982 /* If we have "return <retval>" then the return value is
983 already exposed for our pleasure. Walk it as a USE to
984 force all the components back in place for the return.
986 If we have an embedded assignment, then <retval> is of
987 a type that gets returned in registers in this ABI, and
988 we do not wish to extend their lifetimes. Treat this
989 as a USE of the variable on the RHS of this assignment. */
991 t
= TREE_OPERAND (stmt
, 0);
992 if (TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
993 sra_walk_expr (&GIMPLE_STMT_OPERAND (t
, 1), &si
, false, fns
);
995 sra_walk_expr (&TREE_OPERAND (stmt
, 0), &si
, false, fns
);
998 case GIMPLE_MODIFY_STMT
:
999 sra_walk_gimple_modify_stmt (stmt
, &si
, fns
);
1002 sra_walk_call_expr (stmt
, &si
, fns
);
1005 sra_walk_asm_expr (stmt
, &si
, fns
);
1014 /* Phase One: Scan all referenced variables in the program looking for
1015 structures that could be decomposed. */
1018 find_candidates_for_sra (void)
1020 bool any_set
= false;
1022 referenced_var_iterator rvi
;
1024 FOR_EACH_REFERENCED_VAR (var
, rvi
)
1026 if (decl_can_be_decomposed_p (var
))
1028 bitmap_set_bit (sra_candidates
, DECL_UID (var
));
1037 /* Phase Two: Scan all references to scalarizable variables. Count the
1038 number of times they are used or copied respectively. */
1040 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1041 considered a copy, because we can decompose the reference such that
1042 the sub-elements needn't be contiguous. */
1045 scan_use (struct sra_elt
*elt
, tree
*expr_p ATTRIBUTE_UNUSED
,
1046 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
,
1047 bool is_output ATTRIBUTE_UNUSED
, bool use_all ATTRIBUTE_UNUSED
)
1053 scan_copy (struct sra_elt
*lhs_elt
, struct sra_elt
*rhs_elt
,
1054 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
)
1056 lhs_elt
->n_copies
+= 1;
1057 rhs_elt
->n_copies
+= 1;
1061 scan_init (struct sra_elt
*lhs_elt
, tree rhs ATTRIBUTE_UNUSED
,
1062 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
)
1064 lhs_elt
->n_copies
+= 1;
1068 scan_ldst (struct sra_elt
*elt
, tree other ATTRIBUTE_UNUSED
,
1069 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
,
1070 bool is_output ATTRIBUTE_UNUSED
)
1075 /* Dump the values we collected during the scanning phase. */
1078 scan_dump (struct sra_elt
*elt
)
1082 dump_sra_elt_name (dump_file
, elt
);
1083 fprintf (dump_file
, ": n_uses=%u n_copies=%u\n", elt
->n_uses
, elt
->n_copies
);
1085 for (c
= elt
->children
; c
; c
= c
->sibling
)
1088 for (c
= elt
->groups
; c
; c
= c
->sibling
)
1092 /* Entry point to phase 2. Scan the entire function, building up
1093 scalarization data structures, recording copies and uses. */
1096 scan_function (void)
1098 static const struct sra_walk_fns fns
= {
1099 scan_use
, scan_copy
, scan_init
, scan_ldst
, true
1103 sra_walk_function (&fns
);
1105 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1109 fputs ("\nScan results:\n", dump_file
);
1110 EXECUTE_IF_SET_IN_BITMAP (sra_candidates
, 0, i
, bi
)
1112 tree var
= referenced_var (i
);
1113 struct sra_elt
*elt
= lookup_element (NULL
, var
, NULL
, NO_INSERT
);
1117 fputc ('\n', dump_file
);
1121 /* Phase Three: Make decisions about which variables to scalarize, if any.
1122 All elements to be scalarized have replacement variables made for them. */
1124 /* A subroutine of build_element_name. Recursively build the element
1125 name on the obstack. */
1128 build_element_name_1 (struct sra_elt
*elt
)
1135 build_element_name_1 (elt
->parent
);
1136 obstack_1grow (&sra_obstack
, '$');
1138 if (TREE_CODE (elt
->parent
->type
) == COMPLEX_TYPE
)
1140 if (elt
->element
== integer_zero_node
)
1141 obstack_grow (&sra_obstack
, "real", 4);
1143 obstack_grow (&sra_obstack
, "imag", 4);
1149 if (TREE_CODE (t
) == INTEGER_CST
)
1151 /* ??? Eh. Don't bother doing double-wide printing. */
1152 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (t
));
1153 obstack_grow (&sra_obstack
, buffer
, strlen (buffer
));
1157 tree name
= DECL_NAME (t
);
1159 obstack_grow (&sra_obstack
, IDENTIFIER_POINTER (name
),
1160 IDENTIFIER_LENGTH (name
));
1163 sprintf (buffer
, "D%u", DECL_UID (t
));
1164 obstack_grow (&sra_obstack
, buffer
, strlen (buffer
));
1169 /* Construct a pretty variable name for an element's replacement variable.
1170 The name is built on the obstack. */
1173 build_element_name (struct sra_elt
*elt
)
1175 build_element_name_1 (elt
);
1176 obstack_1grow (&sra_obstack
, '\0');
1177 return XOBFINISH (&sra_obstack
, char *);
1180 /* Instantiate an element as an independent variable. */
1183 instantiate_element (struct sra_elt
*elt
)
1185 struct sra_elt
*base_elt
;
1188 for (base_elt
= elt
; base_elt
->parent
; base_elt
= base_elt
->parent
)
1190 base
= base_elt
->element
;
1192 elt
->replacement
= var
= make_rename_temp (elt
->type
, "SR");
1194 /* For vectors, if used on the left hand side with BIT_FIELD_REF,
1195 they are not a gimple register. */
1196 if (TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
&& elt
->is_vector_lhs
)
1197 DECL_GIMPLE_REG_P (var
) = 0;
1199 DECL_SOURCE_LOCATION (var
) = DECL_SOURCE_LOCATION (base
);
1200 DECL_ARTIFICIAL (var
) = 1;
1202 if (TREE_THIS_VOLATILE (elt
->type
))
1204 TREE_THIS_VOLATILE (var
) = 1;
1205 TREE_SIDE_EFFECTS (var
) = 1;
1208 if (DECL_NAME (base
) && !DECL_IGNORED_P (base
))
1210 char *pretty_name
= build_element_name (elt
);
1211 DECL_NAME (var
) = get_identifier (pretty_name
);
1212 obstack_free (&sra_obstack
, pretty_name
);
1214 SET_DECL_DEBUG_EXPR (var
, generate_element_ref (elt
));
1215 DECL_DEBUG_EXPR_IS_FROM (var
) = 1;
1217 DECL_IGNORED_P (var
) = 0;
1218 TREE_NO_WARNING (var
) = TREE_NO_WARNING (base
);
1222 DECL_IGNORED_P (var
) = 1;
1223 /* ??? We can't generate any warning that would be meaningful. */
1224 TREE_NO_WARNING (var
) = 1;
1229 fputs (" ", dump_file
);
1230 dump_sra_elt_name (dump_file
, elt
);
1231 fputs (" -> ", dump_file
);
1232 print_generic_expr (dump_file
, var
, dump_flags
);
1233 fputc ('\n', dump_file
);
1237 /* Make one pass across an element tree deciding whether or not it's
1238 profitable to instantiate individual leaf scalars.
1240 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1241 fields all the way up the tree. */
1244 decide_instantiation_1 (struct sra_elt
*elt
, unsigned int parent_uses
,
1245 unsigned int parent_copies
)
1247 if (dump_file
&& !elt
->parent
)
1249 fputs ("Initial instantiation for ", dump_file
);
1250 dump_sra_elt_name (dump_file
, elt
);
1251 fputc ('\n', dump_file
);
1254 if (elt
->cannot_scalarize
)
1259 /* The decision is simple: instantiate if we're used more frequently
1260 than the parent needs to be seen as a complete unit. */
1261 if (elt
->n_uses
+ elt
->n_copies
+ parent_copies
> parent_uses
)
1262 instantiate_element (elt
);
1266 struct sra_elt
*c
, *group
;
1267 unsigned int this_uses
= elt
->n_uses
+ parent_uses
;
1268 unsigned int this_copies
= elt
->n_copies
+ parent_copies
;
1270 /* Consider groups of sub-elements as weighing in favour of
1271 instantiation whatever their size. */
1272 for (group
= elt
->groups
; group
; group
= group
->sibling
)
1273 FOR_EACH_ACTUAL_CHILD (c
, group
)
1275 c
->n_uses
+= group
->n_uses
;
1276 c
->n_copies
+= group
->n_copies
;
1279 for (c
= elt
->children
; c
; c
= c
->sibling
)
1280 decide_instantiation_1 (c
, this_uses
, this_copies
);
1284 /* Compute the size and number of all instantiated elements below ELT.
1285 We will only care about this if the size of the complete structure
1286 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1289 sum_instantiated_sizes (struct sra_elt
*elt
, unsigned HOST_WIDE_INT
*sizep
)
1291 if (elt
->replacement
)
1293 *sizep
+= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt
->type
));
1299 unsigned int count
= 0;
1301 for (c
= elt
->children
; c
; c
= c
->sibling
)
1302 count
+= sum_instantiated_sizes (c
, sizep
);
1308 /* Instantiate fields in ELT->TYPE that are not currently present as
1311 static void instantiate_missing_elements (struct sra_elt
*elt
);
1314 instantiate_missing_elements_1 (struct sra_elt
*elt
, tree child
, tree type
)
1316 struct sra_elt
*sub
= lookup_element (elt
, child
, type
, INSERT
);
1319 if (sub
->replacement
== NULL
)
1320 instantiate_element (sub
);
1323 instantiate_missing_elements (sub
);
1327 instantiate_missing_elements (struct sra_elt
*elt
)
1329 tree type
= elt
->type
;
1331 switch (TREE_CODE (type
))
1336 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
1337 if (TREE_CODE (f
) == FIELD_DECL
)
1338 instantiate_missing_elements_1 (elt
, f
, TREE_TYPE (f
));
1344 tree i
, max
, subtype
;
1346 i
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1347 max
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1348 subtype
= TREE_TYPE (type
);
1352 instantiate_missing_elements_1 (elt
, i
, subtype
);
1353 if (tree_int_cst_equal (i
, max
))
1355 i
= int_const_binop (PLUS_EXPR
, i
, integer_one_node
, true);
1362 type
= TREE_TYPE (type
);
1363 instantiate_missing_elements_1 (elt
, integer_zero_node
, type
);
1364 instantiate_missing_elements_1 (elt
, integer_one_node
, type
);
1372 /* Return true if there is only one non aggregate field in the record, TYPE.
1373 Return false otherwise. */
1376 single_scalar_field_in_record_p (tree type
)
1380 if (TREE_CODE (type
) != RECORD_TYPE
)
1383 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
1384 if (TREE_CODE (field
) == FIELD_DECL
)
1388 if (num_fields
== 2)
1391 if (AGGREGATE_TYPE_P (TREE_TYPE (field
)))
1398 /* Make one pass across an element tree deciding whether to perform block
1399 or element copies. If we decide on element copies, instantiate all
1400 elements. Return true if there are any instantiated sub-elements. */
1403 decide_block_copy (struct sra_elt
*elt
)
1408 /* We shouldn't be invoked on groups of sub-elements as they must
1409 behave like their parent as far as block copy is concerned. */
1410 gcc_assert (!elt
->is_group
);
1412 /* If scalarization is disabled, respect it. */
1413 if (elt
->cannot_scalarize
)
1415 elt
->use_block_copy
= 1;
1419 fputs ("Scalarization disabled for ", dump_file
);
1420 dump_sra_elt_name (dump_file
, elt
);
1421 fputc ('\n', dump_file
);
1424 /* Disable scalarization of sub-elements */
1425 for (c
= elt
->children
; c
; c
= c
->sibling
)
1427 c
->cannot_scalarize
= 1;
1428 decide_block_copy (c
);
1431 /* Groups behave like their parent. */
1432 for (c
= elt
->groups
; c
; c
= c
->sibling
)
1434 c
->cannot_scalarize
= 1;
1435 c
->use_block_copy
= 1;
1441 /* Don't decide if we've no uses. */
1442 if (elt
->n_uses
== 0 && elt
->n_copies
== 0)
1445 else if (!elt
->is_scalar
)
1447 tree size_tree
= TYPE_SIZE_UNIT (elt
->type
);
1448 bool use_block_copy
= true;
1450 /* Tradeoffs for COMPLEX types pretty much always make it better
1451 to go ahead and split the components. */
1452 if (TREE_CODE (elt
->type
) == COMPLEX_TYPE
)
1453 use_block_copy
= false;
1455 /* Don't bother trying to figure out the rest if the structure is
1456 so large we can't do easy arithmetic. This also forces block
1457 copies for variable sized structures. */
1458 else if (host_integerp (size_tree
, 1))
1460 unsigned HOST_WIDE_INT full_size
, inst_size
= 0;
1461 unsigned int max_size
, max_count
, inst_count
, full_count
;
1463 /* If the sra-max-structure-size parameter is 0, then the
1464 user has not overridden the parameter and we can choose a
1465 sensible default. */
1466 max_size
= SRA_MAX_STRUCTURE_SIZE
1467 ? SRA_MAX_STRUCTURE_SIZE
1468 : MOVE_RATIO
* UNITS_PER_WORD
;
1469 max_count
= SRA_MAX_STRUCTURE_COUNT
1470 ? SRA_MAX_STRUCTURE_COUNT
1473 full_size
= tree_low_cst (size_tree
, 1);
1474 full_count
= count_type_elements (elt
->type
, false);
1475 inst_count
= sum_instantiated_sizes (elt
, &inst_size
);
1477 /* If there is only one scalar field in the record, don't block copy. */
1478 if (single_scalar_field_in_record_p (elt
->type
))
1479 use_block_copy
= false;
1481 /* ??? What to do here. If there are two fields, and we've only
1482 instantiated one, then instantiating the other is clearly a win.
1483 If there are a large number of fields then the size of the copy
1484 is much more of a factor. */
1486 /* If the structure is small, and we've made copies, go ahead
1487 and instantiate, hoping that the copies will go away. */
1488 if (full_size
<= max_size
1489 && (full_count
- inst_count
) <= max_count
1490 && elt
->n_copies
> elt
->n_uses
)
1491 use_block_copy
= false;
1492 else if (inst_count
* 100 >= full_count
* SRA_FIELD_STRUCTURE_RATIO
1493 && inst_size
* 100 >= full_size
* SRA_FIELD_STRUCTURE_RATIO
)
1494 use_block_copy
= false;
1496 /* In order to avoid block copy, we have to be able to instantiate
1497 all elements of the type. See if this is possible. */
1499 && (!can_completely_scalarize_p (elt
)
1500 || !type_can_instantiate_all_elements (elt
->type
)))
1501 use_block_copy
= true;
1504 elt
->use_block_copy
= use_block_copy
;
1506 /* Groups behave like their parent. */
1507 for (c
= elt
->groups
; c
; c
= c
->sibling
)
1508 c
->use_block_copy
= use_block_copy
;
1512 fprintf (dump_file
, "Using %s for ",
1513 use_block_copy
? "block-copy" : "element-copy");
1514 dump_sra_elt_name (dump_file
, elt
);
1515 fputc ('\n', dump_file
);
1518 if (!use_block_copy
)
1520 instantiate_missing_elements (elt
);
1525 any_inst
= elt
->replacement
!= NULL
;
1527 for (c
= elt
->children
; c
; c
= c
->sibling
)
1528 any_inst
|= decide_block_copy (c
);
1533 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1536 decide_instantiations (void)
1540 bitmap_head done_head
;
1543 /* We cannot clear bits from a bitmap we're iterating over,
1544 so save up all the bits to clear until the end. */
1545 bitmap_initialize (&done_head
, &bitmap_default_obstack
);
1546 cleared_any
= false;
1548 EXECUTE_IF_SET_IN_BITMAP (sra_candidates
, 0, i
, bi
)
1550 tree var
= referenced_var (i
);
1551 struct sra_elt
*elt
= lookup_element (NULL
, var
, NULL
, NO_INSERT
);
1554 decide_instantiation_1 (elt
, 0, 0);
1555 if (!decide_block_copy (elt
))
1560 bitmap_set_bit (&done_head
, i
);
1567 bitmap_and_compl_into (sra_candidates
, &done_head
);
1568 bitmap_and_compl_into (needs_copy_in
, &done_head
);
1570 bitmap_clear (&done_head
);
1572 if (!bitmap_empty_p (sra_candidates
))
1573 todoflags
|= TODO_update_smt_usage
;
1575 mark_set_for_renaming (sra_candidates
);
1578 fputc ('\n', dump_file
);
1582 /* Phase Four: Update the function to match the replacements created. */
1584 /* Mark all the variables in VDEF/VUSE operators for STMT for
1585 renaming. This becomes necessary when we modify all of a
1589 mark_all_v_defs_1 (tree stmt
)
1594 update_stmt_if_modified (stmt
);
1596 FOR_EACH_SSA_TREE_OPERAND (sym
, stmt
, iter
, SSA_OP_ALL_VIRTUALS
)
1598 if (TREE_CODE (sym
) == SSA_NAME
)
1599 sym
= SSA_NAME_VAR (sym
);
1600 mark_sym_for_renaming (sym
);
1605 /* Mark all the variables in virtual operands in all the statements in
1606 LIST for renaming. */
1609 mark_all_v_defs (tree list
)
1611 if (TREE_CODE (list
) != STATEMENT_LIST
)
1612 mark_all_v_defs_1 (list
);
1615 tree_stmt_iterator i
;
1616 for (i
= tsi_start (list
); !tsi_end_p (i
); tsi_next (&i
))
1617 mark_all_v_defs_1 (tsi_stmt (i
));
1622 /* Mark every replacement under ELT with TREE_NO_WARNING. */
1625 mark_no_warning (struct sra_elt
*elt
)
1627 if (!elt
->all_no_warning
)
1629 if (elt
->replacement
)
1630 TREE_NO_WARNING (elt
->replacement
) = 1;
1634 FOR_EACH_ACTUAL_CHILD (c
, elt
)
1635 mark_no_warning (c
);
1637 elt
->all_no_warning
= true;
1641 /* Build a single level component reference to ELT rooted at BASE. */
1644 generate_one_element_ref (struct sra_elt
*elt
, tree base
)
1646 switch (TREE_CODE (TREE_TYPE (base
)))
1650 tree field
= elt
->element
;
1652 /* Watch out for compatible records with differing field lists. */
1653 if (DECL_FIELD_CONTEXT (field
) != TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
1654 field
= find_compatible_field (TREE_TYPE (base
), field
);
1656 return build3 (COMPONENT_REF
, elt
->type
, base
, field
, NULL
);
1660 todoflags
|= TODO_update_smt_usage
;
1661 if (TREE_CODE (elt
->element
) == RANGE_EXPR
)
1662 return build4 (ARRAY_RANGE_REF
, elt
->type
, base
,
1663 TREE_OPERAND (elt
->element
, 0), NULL
, NULL
);
1665 return build4 (ARRAY_REF
, elt
->type
, base
, elt
->element
, NULL
, NULL
);
1668 if (elt
->element
== integer_zero_node
)
1669 return build1 (REALPART_EXPR
, elt
->type
, base
);
1671 return build1 (IMAGPART_EXPR
, elt
->type
, base
);
1678 /* Build a full component reference to ELT rooted at its native variable. */
1681 generate_element_ref (struct sra_elt
*elt
)
1684 return generate_one_element_ref (elt
, generate_element_ref (elt
->parent
));
1686 return elt
->element
;
1689 /* Generate a set of assignment statements in *LIST_P to copy all
1690 instantiated elements under ELT to or from the equivalent structure
1691 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1692 true meaning to copy out of EXPR into ELT. */
1695 generate_copy_inout (struct sra_elt
*elt
, bool copy_out
, tree expr
,
1701 if (!copy_out
&& TREE_CODE (expr
) == SSA_NAME
1702 && TREE_CODE (TREE_TYPE (expr
)) == COMPLEX_TYPE
)
1706 c
= lookup_element (elt
, integer_zero_node
, NULL
, NO_INSERT
);
1708 c
= lookup_element (elt
, integer_one_node
, NULL
, NO_INSERT
);
1711 t
= build2 (COMPLEX_EXPR
, elt
->type
, r
, i
);
1712 t
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, expr
, t
);
1713 SSA_NAME_DEF_STMT (expr
) = t
;
1714 append_to_statement_list (t
, list_p
);
1716 else if (elt
->replacement
)
1719 t
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, elt
->replacement
, expr
);
1721 t
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, expr
, elt
->replacement
);
1722 append_to_statement_list (t
, list_p
);
1726 FOR_EACH_ACTUAL_CHILD (c
, elt
)
1728 t
= generate_one_element_ref (c
, unshare_expr (expr
));
1729 generate_copy_inout (c
, copy_out
, t
, list_p
);
1734 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1735 elements under SRC to their counterparts under DST. There must be a 1-1
1736 correspondence of instantiated elements. */
1739 generate_element_copy (struct sra_elt
*dst
, struct sra_elt
*src
, tree
*list_p
)
1741 struct sra_elt
*dc
, *sc
;
1743 FOR_EACH_ACTUAL_CHILD (dc
, dst
)
1745 sc
= lookup_element (src
, dc
->element
, NULL
, NO_INSERT
);
1747 generate_element_copy (dc
, sc
, list_p
);
1750 if (dst
->replacement
)
1754 gcc_assert (src
->replacement
);
1756 t
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, dst
->replacement
,
1758 append_to_statement_list (t
, list_p
);
1762 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1763 elements under ELT. In addition, do not assign to elements that have been
1764 marked VISITED but do reset the visited flag; this allows easy coordination
1765 with generate_element_init. */
1768 generate_element_zero (struct sra_elt
*elt
, tree
*list_p
)
1774 elt
->visited
= false;
1778 FOR_EACH_ACTUAL_CHILD (c
, elt
)
1779 generate_element_zero (c
, list_p
);
1781 if (elt
->replacement
)
1785 gcc_assert (elt
->is_scalar
);
1786 t
= fold_convert (elt
->type
, integer_zero_node
);
1788 t
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, elt
->replacement
, t
);
1789 append_to_statement_list (t
, list_p
);
1793 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1794 Add the result to *LIST_P. */
1797 generate_one_element_init (tree var
, tree init
, tree
*list_p
)
1799 /* The replacement can be almost arbitrarily complex. Gimplify. */
1800 tree stmt
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, var
, init
);
1801 gimplify_and_add (stmt
, list_p
);
1804 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1805 elements under ELT with the contents of the initializer INIT. In addition,
1806 mark all assigned elements VISITED; this allows easy coordination with
1807 generate_element_zero. Return false if we found a case we couldn't
1811 generate_element_init_1 (struct sra_elt
*elt
, tree init
, tree
*list_p
)
1814 enum tree_code init_code
;
1815 struct sra_elt
*sub
;
1817 unsigned HOST_WIDE_INT idx
;
1818 tree value
, purpose
;
1820 /* We can be passed DECL_INITIAL of a static variable. It might have a
1821 conversion, which we strip off here. */
1822 STRIP_USELESS_TYPE_CONVERSION (init
);
1823 init_code
= TREE_CODE (init
);
1827 if (elt
->replacement
)
1829 generate_one_element_init (elt
->replacement
, init
, list_p
);
1830 elt
->visited
= true;
1839 FOR_EACH_ACTUAL_CHILD (sub
, elt
)
1841 if (sub
->element
== integer_zero_node
)
1842 t
= (init_code
== COMPLEX_EXPR
1843 ? TREE_OPERAND (init
, 0) : TREE_REALPART (init
));
1845 t
= (init_code
== COMPLEX_EXPR
1846 ? TREE_OPERAND (init
, 1) : TREE_IMAGPART (init
));
1847 result
&= generate_element_init_1 (sub
, t
, list_p
);
1852 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), idx
, purpose
, value
)
1854 if (TREE_CODE (purpose
) == RANGE_EXPR
)
1856 tree lower
= TREE_OPERAND (purpose
, 0);
1857 tree upper
= TREE_OPERAND (purpose
, 1);
1861 sub
= lookup_element (elt
, lower
, NULL
, NO_INSERT
);
1863 result
&= generate_element_init_1 (sub
, value
, list_p
);
1864 if (tree_int_cst_equal (lower
, upper
))
1866 lower
= int_const_binop (PLUS_EXPR
, lower
,
1867 integer_one_node
, true);
1872 sub
= lookup_element (elt
, purpose
, NULL
, NO_INSERT
);
1874 result
&= generate_element_init_1 (sub
, value
, list_p
);
1880 elt
->visited
= true;
1887 /* A wrapper function for generate_element_init_1 that handles cleanup after
1891 generate_element_init (struct sra_elt
*elt
, tree init
, tree
*list_p
)
1895 push_gimplify_context ();
1896 ret
= generate_element_init_1 (elt
, init
, list_p
);
1897 pop_gimplify_context (NULL
);
1899 /* The replacement can expose previously unreferenced variables. */
1902 tree_stmt_iterator i
;
1904 for (i
= tsi_start (*list_p
); !tsi_end_p (i
); tsi_next (&i
))
1905 find_new_referenced_vars (tsi_stmt_ptr (i
));
1911 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1912 has more than one edge, STMT will be replicated for each edge. Also,
1913 abnormal edges will be ignored. */
1916 insert_edge_copies (tree stmt
, basic_block bb
)
1923 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1925 /* We don't need to insert copies on abnormal edges. The
1926 value of the scalar replacement is not guaranteed to
1927 be valid through an abnormal edge. */
1928 if (!(e
->flags
& EDGE_ABNORMAL
))
1932 bsi_insert_on_edge (e
, stmt
);
1936 bsi_insert_on_edge (e
, unsave_expr_now (stmt
));
1941 /* Helper function to insert LIST before BSI, and set up line number info. */
1944 sra_insert_before (block_stmt_iterator
*bsi
, tree list
)
1946 tree stmt
= bsi_stmt (*bsi
);
1948 if (EXPR_HAS_LOCATION (stmt
))
1949 annotate_all_with_locus (&list
, EXPR_LOCATION (stmt
));
1950 bsi_insert_before (bsi
, list
, BSI_SAME_STMT
);
1953 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1956 sra_insert_after (block_stmt_iterator
*bsi
, tree list
)
1958 tree stmt
= bsi_stmt (*bsi
);
1960 if (EXPR_HAS_LOCATION (stmt
))
1961 annotate_all_with_locus (&list
, EXPR_LOCATION (stmt
));
1963 if (stmt_ends_bb_p (stmt
))
1964 insert_edge_copies (list
, bsi
->bb
);
1966 bsi_insert_after (bsi
, list
, BSI_SAME_STMT
);
1969 /* Similarly, but replace the statement at BSI. */
1972 sra_replace (block_stmt_iterator
*bsi
, tree list
)
1974 sra_insert_before (bsi
, list
);
1975 bsi_remove (bsi
, false);
1976 if (bsi_end_p (*bsi
))
1977 *bsi
= bsi_last (bsi
->bb
);
1982 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1983 if elt is scalar, or some occurrence of ELT that requires a complete
1984 aggregate. IS_OUTPUT is true if ELT is being modified. */
1987 scalarize_use (struct sra_elt
*elt
, tree
*expr_p
, block_stmt_iterator
*bsi
,
1988 bool is_output
, bool use_all
)
1990 tree list
= NULL
, stmt
= bsi_stmt (*bsi
);
1992 if (elt
->replacement
)
1994 /* If we have a replacement, then updating the reference is as
1995 simple as modifying the existing statement in place. */
1997 mark_all_v_defs (stmt
);
1998 *expr_p
= elt
->replacement
;
2003 /* Otherwise we need some copies. If ELT is being read, then we want
2004 to store all (modified) sub-elements back into the structure before
2005 the reference takes place. If ELT is being written, then we want to
2006 load the changed values back into our shadow variables. */
2007 /* ??? We don't check modified for reads, we just always write all of
2008 the values. We should be able to record the SSA number of the VOP
2009 for which the values were last read. If that number matches the
2010 SSA number of the VOP in the current statement, then we needn't
2011 emit an assignment. This would also eliminate double writes when
2012 a structure is passed as more than one argument to a function call.
2013 This optimization would be most effective if sra_walk_function
2014 processed the blocks in dominator order. */
2016 generate_copy_inout (elt
, is_output
, generate_element_ref (elt
), &list
);
2019 mark_all_v_defs (list
);
2021 sra_insert_after (bsi
, list
);
2024 sra_insert_before (bsi
, list
);
2026 mark_no_warning (elt
);
2031 /* Scalarize a COPY. To recap, this is an assignment statement between
2032 two scalarizable references, LHS_ELT and RHS_ELT. */
2035 scalarize_copy (struct sra_elt
*lhs_elt
, struct sra_elt
*rhs_elt
,
2036 block_stmt_iterator
*bsi
)
2040 if (lhs_elt
->replacement
&& rhs_elt
->replacement
)
2042 /* If we have two scalar operands, modify the existing statement. */
2043 stmt
= bsi_stmt (*bsi
);
2045 /* See the commentary in sra_walk_function concerning
2046 RETURN_EXPR, and why we should never see one here. */
2047 gcc_assert (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
);
2049 GIMPLE_STMT_OPERAND (stmt
, 0) = lhs_elt
->replacement
;
2050 GIMPLE_STMT_OPERAND (stmt
, 1) = rhs_elt
->replacement
;
2053 else if (lhs_elt
->use_block_copy
|| rhs_elt
->use_block_copy
)
2055 /* If either side requires a block copy, then sync the RHS back
2056 to the original structure, leave the original assignment
2057 statement (which will perform the block copy), then load the
2058 LHS values out of its now-updated original structure. */
2059 /* ??? Could perform a modified pair-wise element copy. That
2060 would at least allow those elements that are instantiated in
2061 both structures to be optimized well. */
2064 generate_copy_inout (rhs_elt
, false,
2065 generate_element_ref (rhs_elt
), &list
);
2068 mark_all_v_defs (list
);
2069 sra_insert_before (bsi
, list
);
2073 generate_copy_inout (lhs_elt
, true,
2074 generate_element_ref (lhs_elt
), &list
);
2077 mark_all_v_defs (list
);
2078 sra_insert_after (bsi
, list
);
2083 /* Otherwise both sides must be fully instantiated. In which
2084 case perform pair-wise element assignments and replace the
2085 original block copy statement. */
2087 stmt
= bsi_stmt (*bsi
);
2088 mark_all_v_defs (stmt
);
2091 generate_element_copy (lhs_elt
, rhs_elt
, &list
);
2093 mark_all_v_defs (list
);
2094 sra_replace (bsi
, list
);
2098 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
2099 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
2100 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
2104 scalarize_init (struct sra_elt
*lhs_elt
, tree rhs
, block_stmt_iterator
*bsi
)
2109 /* Generate initialization statements for all members extant in the RHS. */
2112 /* Unshare the expression just in case this is from a decl's initial. */
2113 rhs
= unshare_expr (rhs
);
2114 result
= generate_element_init (lhs_elt
, rhs
, &list
);
2117 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
2118 a zero value. Initialize the rest of the instantiated elements. */
2119 generate_element_zero (lhs_elt
, &list
);
2123 /* If we failed to convert the entire initializer, then we must
2124 leave the structure assignment in place and must load values
2125 from the structure into the slots for which we did not find
2126 constants. The easiest way to do this is to generate a complete
2127 copy-out, and then follow that with the constant assignments
2128 that we were able to build. DCE will clean things up. */
2130 generate_copy_inout (lhs_elt
, true, generate_element_ref (lhs_elt
),
2132 append_to_statement_list (list
, &list0
);
2136 if (lhs_elt
->use_block_copy
|| !result
)
2138 /* Since LHS is not fully instantiated, we must leave the structure
2139 assignment in place. Treating this case differently from a USE
2140 exposes constants to later optimizations. */
2143 mark_all_v_defs (list
);
2144 sra_insert_after (bsi
, list
);
2149 /* The LHS is fully instantiated. The list of initializations
2150 replaces the original structure assignment. */
2152 mark_all_v_defs (bsi_stmt (*bsi
));
2153 mark_all_v_defs (list
);
2154 sra_replace (bsi
, list
);
2158 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
2159 on all INDIRECT_REFs. */
2162 mark_notrap (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2166 if (TREE_CODE (t
) == INDIRECT_REF
)
2168 TREE_THIS_NOTRAP (t
) = 1;
2171 else if (IS_TYPE_OR_DECL_P (t
))
2177 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
2178 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
2179 if ELT is on the left-hand side. */
2182 scalarize_ldst (struct sra_elt
*elt
, tree other
,
2183 block_stmt_iterator
*bsi
, bool is_output
)
2185 /* Shouldn't have gotten called for a scalar. */
2186 gcc_assert (!elt
->replacement
);
2188 if (elt
->use_block_copy
)
2190 /* Since ELT is not fully instantiated, we have to leave the
2191 block copy in place. Treat this as a USE. */
2192 scalarize_use (elt
, NULL
, bsi
, is_output
, false);
2196 /* The interesting case is when ELT is fully instantiated. In this
2197 case we can have each element stored/loaded directly to/from the
2198 corresponding slot in OTHER. This avoids a block copy. */
2200 tree list
= NULL
, stmt
= bsi_stmt (*bsi
);
2202 mark_all_v_defs (stmt
);
2203 generate_copy_inout (elt
, is_output
, other
, &list
);
2204 mark_all_v_defs (list
);
2207 /* Preserve EH semantics. */
2208 if (stmt_ends_bb_p (stmt
))
2210 tree_stmt_iterator tsi
;
2213 /* Extract the first statement from LIST. */
2214 tsi
= tsi_start (list
);
2215 first
= tsi_stmt (tsi
);
2218 /* Replace the old statement with this new representative. */
2219 bsi_replace (bsi
, first
, true);
2221 if (!tsi_end_p (tsi
))
2223 /* If any reference would trap, then they all would. And more
2224 to the point, the first would. Therefore none of the rest
2225 will trap since the first didn't. Indicate this by
2226 iterating over the remaining statements and set
2227 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
2230 walk_tree (tsi_stmt_ptr (tsi
), mark_notrap
, NULL
, NULL
);
2233 while (!tsi_end_p (tsi
));
2235 insert_edge_copies (list
, bsi
->bb
);
2239 sra_replace (bsi
, list
);
2243 /* Generate initializations for all scalarizable parameters. */
2246 scalarize_parms (void)
2252 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in
, 0, i
, bi
)
2254 tree var
= referenced_var (i
);
2255 struct sra_elt
*elt
= lookup_element (NULL
, var
, NULL
, NO_INSERT
);
2256 generate_copy_inout (elt
, true, var
, &list
);
2261 insert_edge_copies (list
, ENTRY_BLOCK_PTR
);
2262 mark_all_v_defs (list
);
2266 /* Entry point to phase 4. Update the function to match replacements. */
2269 scalarize_function (void)
2271 static const struct sra_walk_fns fns
= {
2272 scalarize_use
, scalarize_copy
, scalarize_init
, scalarize_ldst
, false
2275 sra_walk_function (&fns
);
2277 bsi_commit_edge_inserts ();
2281 /* Debug helper function. Print ELT in a nice human-readable format. */
2284 dump_sra_elt_name (FILE *f
, struct sra_elt
*elt
)
2286 if (elt
->parent
&& TREE_CODE (elt
->parent
->type
) == COMPLEX_TYPE
)
2288 fputs (elt
->element
== integer_zero_node
? "__real__ " : "__imag__ ", f
);
2289 dump_sra_elt_name (f
, elt
->parent
);
2294 dump_sra_elt_name (f
, elt
->parent
);
2295 if (DECL_P (elt
->element
))
2297 if (TREE_CODE (elt
->element
) == FIELD_DECL
)
2299 print_generic_expr (f
, elt
->element
, dump_flags
);
2301 else if (TREE_CODE (elt
->element
) == RANGE_EXPR
)
2302 fprintf (f
, "["HOST_WIDE_INT_PRINT_DEC
".."HOST_WIDE_INT_PRINT_DEC
"]",
2303 TREE_INT_CST_LOW (TREE_OPERAND (elt
->element
, 0)),
2304 TREE_INT_CST_LOW (TREE_OPERAND (elt
->element
, 1)));
2306 fprintf (f
, "[" HOST_WIDE_INT_PRINT_DEC
"]",
2307 TREE_INT_CST_LOW (elt
->element
));
2311 /* Likewise, but callable from the debugger. */
2314 debug_sra_elt_name (struct sra_elt
*elt
)
2316 dump_sra_elt_name (stderr
, elt
);
2317 fputc ('\n', stderr
);
2321 sra_init_cache (void)
2323 if (sra_type_decomp_cache
)
2326 sra_type_decomp_cache
= BITMAP_ALLOC (NULL
);
2327 sra_type_inst_cache
= BITMAP_ALLOC (NULL
);
2330 /* Main entry point. */
2335 /* Initialize local variables. */
2337 gcc_obstack_init (&sra_obstack
);
2338 sra_candidates
= BITMAP_ALLOC (NULL
);
2339 needs_copy_in
= BITMAP_ALLOC (NULL
);
2341 sra_map
= htab_create (101, sra_elt_hash
, sra_elt_eq
, NULL
);
2343 /* Scan. If we find anything, instantiate and scalarize. */
2344 if (find_candidates_for_sra ())
2347 decide_instantiations ();
2348 scalarize_function ();
2351 /* Free allocated memory. */
2352 htab_delete (sra_map
);
2354 BITMAP_FREE (sra_candidates
);
2355 BITMAP_FREE (needs_copy_in
);
2356 BITMAP_FREE (sra_type_decomp_cache
);
2357 BITMAP_FREE (sra_type_inst_cache
);
2358 obstack_free (&sra_obstack
, NULL
);
2365 return flag_tree_sra
!= 0;
2368 struct tree_opt_pass pass_sra
=
2371 gate_sra
, /* gate */
2372 tree_sra
, /* execute */
2375 0, /* static_pass_number */
2376 TV_TREE_SRA
, /* tv_id */
2377 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
2378 0, /* properties_provided */
2379 0, /* properties_destroyed */
2380 0, /* todo_flags_start */
2384 | TODO_verify_ssa
, /* todo_flags_finish */