1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 /* These RTL headers are needed for basic-block.h. */
35 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "diagnostic.h"
38 #include "langhooks.h"
39 #include "tree-inline.h"
40 #include "tree-flow.h"
41 #include "tree-gimple.h"
42 #include "tree-dump.h"
43 #include "tree-pass.h"
49 /* expr.h is needed for MOVE_RATIO. */
54 /* This object of this pass is to replace a non-addressable aggregate with a
55 set of independent variables. Most of the time, all of these variables
56 will be scalars. But a secondary objective is to break up larger
57 aggregates into smaller aggregates. In the process we may find that some
58 bits of the larger aggregate can be deleted as unreferenced.
60 This substitution is done globally. More localized substitutions would
61 be the purvey of a load-store motion pass.
63 The optimization proceeds in phases:
65 (1) Identify variables that have types that are candidates for
68 (2) Scan the function looking for the ways these variables are used.
69 In particular we're interested in the number of times a variable
70 (or member) is needed as a complete unit, and the number of times
71 a variable (or member) is copied.
73 (3) Based on the usage profile, instantiate substitution variables.
75 (4) Scan the function making replacements.
79 /* The set of aggregate variables that are candidates for scalarization. */
80 static bitmap sra_candidates
;
82 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
83 beginning of the function. */
84 static bitmap needs_copy_in
;
86 /* Sets of bit pairs that cache type decomposition and instantiation. */
87 static bitmap sra_type_decomp_cache
;
88 static bitmap sra_type_inst_cache
;
90 /* One of these structures is created for each candidate aggregate
91 and each (accessed) member of such an aggregate. */
94 /* A tree of the elements. Used when we want to traverse everything. */
95 struct sra_elt
*parent
;
96 struct sra_elt
*children
;
97 struct sra_elt
*sibling
;
99 /* If this element is a root, then this is the VAR_DECL. If this is
100 a sub-element, this is some token used to identify the reference.
101 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
102 of an ARRAY_REF, this is the (constant) index. In the case of a
103 complex number, this is a zero or one. */
106 /* The type of the element. */
109 /* A VAR_DECL, for any sub-element we've decided to replace. */
112 /* The number of times the element is referenced as a whole. I.e.
113 given "a.b.c", this would be incremented for C, but not for A or B. */
116 /* The number of times the element is copied to or from another
117 scalarizable element. */
118 unsigned int n_copies
;
120 /* True if TYPE is scalar. */
123 /* True if we saw something about this element that prevents scalarization,
124 such as non-constant indexing. */
125 bool cannot_scalarize
;
127 /* True if we've decided that structure-to-structure assignment
128 should happen via memcpy and not per-element. */
131 /* A flag for use with/after random access traversals. */
135 /* Random access to the child of a parent is performed by hashing.
136 This prevents quadratic behavior, and allows SRA to function
137 reasonably on larger records. */
138 static htab_t sra_map
;
140 /* All structures are allocated out of the following obstack. */
141 static struct obstack sra_obstack
;
143 /* Debugging functions. */
144 static void dump_sra_elt_name (FILE *, struct sra_elt
*);
145 extern void debug_sra_elt_name (struct sra_elt
*);
147 /* Forward declarations. */
148 static tree
generate_element_ref (struct sra_elt
*);
150 /* Return true if DECL is an SRA candidate. */
153 is_sra_candidate_decl (tree decl
)
155 return DECL_P (decl
) && bitmap_bit_p (sra_candidates
, var_ann (decl
)->uid
);
158 /* Return true if TYPE is a scalar type. */
161 is_sra_scalar_type (tree type
)
163 enum tree_code code
= TREE_CODE (type
);
164 return (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== VECTOR_TYPE
165 || code
== ENUMERAL_TYPE
|| code
== BOOLEAN_TYPE
166 || code
== CHAR_TYPE
|| code
== POINTER_TYPE
|| code
== OFFSET_TYPE
167 || code
== REFERENCE_TYPE
);
170 /* Return true if TYPE can be decomposed into a set of independent variables.
172 Note that this doesn't imply that all elements of TYPE can be
173 instantiated, just that if we decide to break up the type into
174 separate pieces that it can be done. */
177 type_can_be_decomposed_p (tree type
)
179 unsigned int cache
= TYPE_UID (TYPE_MAIN_VARIANT (type
)) * 2;
182 /* Avoid searching the same type twice. */
183 if (bitmap_bit_p (sra_type_decomp_cache
, cache
+0))
185 if (bitmap_bit_p (sra_type_decomp_cache
, cache
+1))
188 /* The type must have a definite nonzero size. */
189 if (TYPE_SIZE (type
) == NULL
|| TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
190 || integer_zerop (TYPE_SIZE (type
)))
193 /* The type must be a non-union aggregate. */
194 switch (TREE_CODE (type
))
198 bool saw_one_field
= false;
200 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
201 if (TREE_CODE (t
) == FIELD_DECL
)
203 /* Reject incorrectly represented bit fields. */
204 if (DECL_BIT_FIELD (t
)
205 && (tree_low_cst (DECL_SIZE (t
), 1)
206 != TYPE_PRECISION (TREE_TYPE (t
))))
209 saw_one_field
= true;
212 /* Record types must have at least one field. */
219 /* Array types must have a fixed lower and upper bound. */
220 t
= TYPE_DOMAIN (type
);
223 if (TYPE_MIN_VALUE (t
) == NULL
|| !TREE_CONSTANT (TYPE_MIN_VALUE (t
)))
225 if (TYPE_MAX_VALUE (t
) == NULL
|| !TREE_CONSTANT (TYPE_MAX_VALUE (t
)))
236 bitmap_set_bit (sra_type_decomp_cache
, cache
+0);
240 bitmap_set_bit (sra_type_decomp_cache
, cache
+1);
244 /* Return true if DECL can be decomposed into a set of independent
245 (though not necessarily scalar) variables. */
248 decl_can_be_decomposed_p (tree var
)
250 /* Early out for scalars. */
251 if (is_sra_scalar_type (TREE_TYPE (var
)))
254 /* The variable must not be aliased. */
255 if (!is_gimple_non_addressable (var
))
257 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
259 fprintf (dump_file
, "Cannot scalarize variable ");
260 print_generic_expr (dump_file
, var
, dump_flags
);
261 fprintf (dump_file
, " because it must live in memory\n");
266 /* The variable must not be volatile. */
267 if (TREE_THIS_VOLATILE (var
))
269 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
271 fprintf (dump_file
, "Cannot scalarize variable ");
272 print_generic_expr (dump_file
, var
, dump_flags
);
273 fprintf (dump_file
, " because it is declared volatile\n");
278 /* We must be able to decompose the variable's type. */
279 if (!type_can_be_decomposed_p (TREE_TYPE (var
)))
281 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
283 fprintf (dump_file
, "Cannot scalarize variable ");
284 print_generic_expr (dump_file
, var
, dump_flags
);
285 fprintf (dump_file
, " because its type cannot be decomposed\n");
293 /* Return true if TYPE can be *completely* decomposed into scalars. */
296 type_can_instantiate_all_elements (tree type
)
298 if (is_sra_scalar_type (type
))
300 if (!type_can_be_decomposed_p (type
))
303 switch (TREE_CODE (type
))
307 unsigned int cache
= TYPE_UID (TYPE_MAIN_VARIANT (type
)) * 2;
310 if (bitmap_bit_p (sra_type_inst_cache
, cache
+0))
312 if (bitmap_bit_p (sra_type_inst_cache
, cache
+1))
315 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
316 if (TREE_CODE (f
) == FIELD_DECL
)
318 if (!type_can_instantiate_all_elements (TREE_TYPE (f
)))
320 bitmap_set_bit (sra_type_inst_cache
, cache
+1);
325 bitmap_set_bit (sra_type_inst_cache
, cache
+0);
330 return type_can_instantiate_all_elements (TREE_TYPE (type
));
340 /* Test whether ELT or some sub-element cannot be scalarized. */
343 can_completely_scalarize_p (struct sra_elt
*elt
)
347 if (elt
->cannot_scalarize
)
350 for (c
= elt
->children
; c
; c
= c
->sibling
)
351 if (!can_completely_scalarize_p (c
))
358 /* A simplified tree hashing algorithm that only handles the types of
359 trees we expect to find in sra_elt->element. */
362 sra_hash_tree (tree t
)
366 switch (TREE_CODE (t
))
375 h
= TREE_INT_CST_LOW (t
) ^ TREE_INT_CST_HIGH (t
);
379 /* We can have types that are compatible, but have different member
380 lists, so we can't hash fields by ID. Use offsets instead. */
381 h
= iterative_hash_expr (DECL_FIELD_OFFSET (t
), 0);
382 h
= iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t
), h
);
392 /* Hash function for type SRA_PAIR. */
395 sra_elt_hash (const void *x
)
397 const struct sra_elt
*e
= x
;
398 const struct sra_elt
*p
;
401 h
= sra_hash_tree (e
->element
);
403 /* Take into account everything back up the chain. Given that chain
404 lengths are rarely very long, this should be acceptable. If we
405 truly identify this as a performance problem, it should work to
406 hash the pointer value "e->parent". */
407 for (p
= e
->parent
; p
; p
= p
->parent
)
408 h
= (h
* 65521) ^ sra_hash_tree (p
->element
);
413 /* Equality function for type SRA_PAIR. */
416 sra_elt_eq (const void *x
, const void *y
)
418 const struct sra_elt
*a
= x
;
419 const struct sra_elt
*b
= y
;
422 if (a
->parent
!= b
->parent
)
430 if (TREE_CODE (ae
) != TREE_CODE (be
))
433 switch (TREE_CODE (ae
))
438 /* These are all pointer unique. */
442 /* Integers are not pointer unique, so compare their values. */
443 return tree_int_cst_equal (ae
, be
);
446 /* Fields are unique within a record, but not between
447 compatible records. */
448 if (DECL_FIELD_CONTEXT (ae
) == DECL_FIELD_CONTEXT (be
))
450 return fields_compatible_p (ae
, be
);
457 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
458 may be null, in which case CHILD must be a DECL. */
460 static struct sra_elt
*
461 lookup_element (struct sra_elt
*parent
, tree child
, tree type
,
462 enum insert_option insert
)
464 struct sra_elt dummy
;
465 struct sra_elt
**slot
;
468 dummy
.parent
= parent
;
469 dummy
.element
= child
;
471 slot
= (struct sra_elt
**) htab_find_slot (sra_map
, &dummy
, insert
);
472 if (!slot
&& insert
== NO_INSERT
)
476 if (!elt
&& insert
== INSERT
)
478 *slot
= elt
= obstack_alloc (&sra_obstack
, sizeof (*elt
));
479 memset (elt
, 0, sizeof (*elt
));
481 elt
->parent
= parent
;
482 elt
->element
= child
;
484 elt
->is_scalar
= is_sra_scalar_type (type
);
488 elt
->sibling
= parent
->children
;
489 parent
->children
= elt
;
492 /* If this is a parameter, then if we want to scalarize, we have
493 one copy from the true function parameter. Count it now. */
494 if (TREE_CODE (child
) == PARM_DECL
)
497 bitmap_set_bit (needs_copy_in
, var_ann (child
)->uid
);
504 /* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */
507 is_valid_const_index (tree expr
)
509 tree dom
, t
, index
= TREE_OPERAND (expr
, 1);
511 if (TREE_CODE (index
) != INTEGER_CST
)
514 /* Watch out for stupid user tricks, indexing outside the array.
516 Careful, we're not called only on scalarizable types, so do not
517 assume constant array bounds. We needn't do anything with such
518 cases, since they'll be referring to objects that we should have
519 already rejected for scalarization, so returning false is fine. */
521 dom
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr
, 0)));
525 t
= TYPE_MIN_VALUE (dom
);
526 if (!t
|| TREE_CODE (t
) != INTEGER_CST
)
528 if (tree_int_cst_lt (index
, t
))
531 t
= TYPE_MAX_VALUE (dom
);
532 if (!t
|| TREE_CODE (t
) != INTEGER_CST
)
534 if (tree_int_cst_lt (t
, index
))
540 /* Create or return the SRA_ELT structure for EXPR if the expression
541 refers to a scalarizable variable. */
543 static struct sra_elt
*
544 maybe_lookup_element_for_expr (tree expr
)
549 switch (TREE_CODE (expr
))
554 if (is_sra_candidate_decl (expr
))
555 return lookup_element (NULL
, expr
, TREE_TYPE (expr
), INSERT
);
559 /* We can't scalarize variable array indicies. */
560 if (is_valid_const_index (expr
))
561 child
= TREE_OPERAND (expr
, 1);
567 /* Don't look through unions. */
568 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) != RECORD_TYPE
)
570 child
= TREE_OPERAND (expr
, 1);
574 child
= integer_zero_node
;
577 child
= integer_one_node
;
584 elt
= maybe_lookup_element_for_expr (TREE_OPERAND (expr
, 0));
586 return lookup_element (elt
, child
, TREE_TYPE (expr
), INSERT
);
591 /* Functions to walk just enough of the tree to see all scalarizable
592 references, and categorize them. */
594 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
595 various kinds of references seen. In all cases, *BSI is an iterator
596 pointing to the statement being processed. */
599 /* Invoked when ELT is required as a unit. Note that ELT might refer to
600 a leaf node, in which case this is a simple scalar reference. *EXPR_P
601 points to the location of the expression. IS_OUTPUT is true if this
602 is a left-hand-side reference. */
603 void (*use
) (struct sra_elt
*elt
, tree
*expr_p
,
604 block_stmt_iterator
*bsi
, bool is_output
);
606 /* Invoked when we have a copy between two scalarizable references. */
607 void (*copy
) (struct sra_elt
*lhs_elt
, struct sra_elt
*rhs_elt
,
608 block_stmt_iterator
*bsi
);
610 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
611 in which case it should be treated as an empty CONSTRUCTOR. */
612 void (*init
) (struct sra_elt
*elt
, tree value
, block_stmt_iterator
*bsi
);
614 /* Invoked when we have a copy between one scalarizable reference ELT
615 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
616 is on the left-hand side. */
617 void (*ldst
) (struct sra_elt
*elt
, tree other
,
618 block_stmt_iterator
*bsi
, bool is_output
);
620 /* True during phase 2, false during phase 4. */
621 /* ??? This is a hack. */
625 #ifdef ENABLE_CHECKING
626 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
629 sra_find_candidate_decl (tree
*tp
, int *walk_subtrees
,
630 void *data ATTRIBUTE_UNUSED
)
633 enum tree_code code
= TREE_CODE (t
);
635 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
638 if (is_sra_candidate_decl (t
))
648 /* Walk most expressions looking for a scalarizable aggregate.
649 If we find one, invoke FNS->USE. */
652 sra_walk_expr (tree
*expr_p
, block_stmt_iterator
*bsi
, bool is_output
,
653 const struct sra_walk_fns
*fns
)
657 bool disable_scalarization
= false;
659 /* We're looking to collect a reference expression between EXPR and INNER,
660 such that INNER is a scalarizable decl and all other nodes through EXPR
661 are references that we can scalarize. If we come across something that
662 we can't scalarize, we reset EXPR. This has the effect of making it
663 appear that we're referring to the larger expression as a whole. */
666 switch (TREE_CODE (inner
))
671 /* If there is a scalarizable decl at the bottom, then process it. */
672 if (is_sra_candidate_decl (inner
))
674 struct sra_elt
*elt
= maybe_lookup_element_for_expr (expr
);
675 if (disable_scalarization
)
676 elt
->cannot_scalarize
= true;
678 fns
->use (elt
, expr_p
, bsi
, is_output
);
683 /* Non-constant index means any member may be accessed. Prevent the
684 expression from being scalarized. If we were to treat this as a
685 reference to the whole array, we can wind up with a single dynamic
686 index reference inside a loop being overridden by several constant
687 index references during loop setup. It's possible that this could
688 be avoided by using dynamic usage counts based on BB trip counts
689 (based on loop analysis or profiling), but that hardly seems worth
691 /* ??? Hack. Figure out how to push this into the scan routines
692 without duplicating too much code. */
693 if (!is_valid_const_index (inner
))
695 disable_scalarization
= true;
698 /* ??? Are we assured that non-constant bounds and stride will have
699 the same value everywhere? I don't think Fortran will... */
700 if (TREE_OPERAND (inner
, 2) || TREE_OPERAND (inner
, 3))
702 inner
= TREE_OPERAND (inner
, 0);
706 /* A reference to a union member constitutes a reference to the
708 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner
, 0))) != RECORD_TYPE
)
710 /* ??? See above re non-constant stride. */
711 if (TREE_OPERAND (inner
, 2))
713 inner
= TREE_OPERAND (inner
, 0);
718 inner
= TREE_OPERAND (inner
, 0);
722 /* A bit field reference (access to *multiple* fields simultaneously)
723 is not currently scalarized. Consider this an access to the
724 complete outer element, to which walk_tree will bring us next. */
727 case ARRAY_RANGE_REF
:
728 /* Similarly, an subrange reference is used to modify indexing. Which
729 means that the canonical element names that we have won't work. */
732 case VIEW_CONVERT_EXPR
:
734 /* Similarly, a view/nop explicitly wants to look at an object in a
735 type other than the one we've scalarized. */
739 /* This is a transparent wrapper. The entire inner expression really
744 expr_p
= &TREE_OPERAND (inner
, 0);
745 inner
= expr
= *expr_p
;
749 #ifdef ENABLE_CHECKING
750 /* Validate that we're not missing any references. */
751 gcc_assert (!walk_tree (&inner
, sra_find_candidate_decl
, NULL
, NULL
));
757 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
758 If we find one, invoke FNS->USE. */
761 sra_walk_tree_list (tree list
, block_stmt_iterator
*bsi
, bool is_output
,
762 const struct sra_walk_fns
*fns
)
765 for (op
= list
; op
; op
= TREE_CHAIN (op
))
766 sra_walk_expr (&TREE_VALUE (op
), bsi
, is_output
, fns
);
769 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
770 If we find one, invoke FNS->USE. */
773 sra_walk_call_expr (tree expr
, block_stmt_iterator
*bsi
,
774 const struct sra_walk_fns
*fns
)
776 sra_walk_tree_list (TREE_OPERAND (expr
, 1), bsi
, false, fns
);
779 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
780 aggregates. If we find one, invoke FNS->USE. */
783 sra_walk_asm_expr (tree expr
, block_stmt_iterator
*bsi
,
784 const struct sra_walk_fns
*fns
)
786 sra_walk_tree_list (ASM_INPUTS (expr
), bsi
, false, fns
);
787 sra_walk_tree_list (ASM_OUTPUTS (expr
), bsi
, true, fns
);
790 /* Walk a MODIFY_EXPR and categorize the assignment appropriately. */
793 sra_walk_modify_expr (tree expr
, block_stmt_iterator
*bsi
,
794 const struct sra_walk_fns
*fns
)
796 struct sra_elt
*lhs_elt
, *rhs_elt
;
799 lhs
= TREE_OPERAND (expr
, 0);
800 rhs
= TREE_OPERAND (expr
, 1);
801 lhs_elt
= maybe_lookup_element_for_expr (lhs
);
802 rhs_elt
= maybe_lookup_element_for_expr (rhs
);
804 /* If both sides are scalarizable, this is a COPY operation. */
805 if (lhs_elt
&& rhs_elt
)
807 fns
->copy (lhs_elt
, rhs_elt
, bsi
);
811 /* If the RHS is scalarizable, handle it. There are only two cases. */
814 if (!rhs_elt
->is_scalar
)
815 fns
->ldst (rhs_elt
, lhs
, bsi
, false);
817 fns
->use (rhs_elt
, &TREE_OPERAND (expr
, 1), bsi
, false);
820 /* If it isn't scalarizable, there may be scalarizable variables within, so
821 check for a call or else walk the RHS to see if we need to do any
822 copy-in operations. We need to do it before the LHS is scalarized so
823 that the statements get inserted in the proper place, before any
824 copy-out operations. */
827 tree call
= get_call_expr_in (rhs
);
829 sra_walk_call_expr (call
, bsi
, fns
);
831 sra_walk_expr (&TREE_OPERAND (expr
, 1), bsi
, false, fns
);
834 /* Likewise, handle the LHS being scalarizable. We have cases similar
835 to those above, but also want to handle RHS being constant. */
838 /* If this is an assignment from a constant, or constructor, then
839 we have access to all of the elements individually. Invoke INIT. */
840 if (TREE_CODE (rhs
) == COMPLEX_EXPR
841 || TREE_CODE (rhs
) == COMPLEX_CST
842 || TREE_CODE (rhs
) == CONSTRUCTOR
)
843 fns
->init (lhs_elt
, rhs
, bsi
);
845 /* If this is an assignment from read-only memory, treat this as if
846 we'd been passed the constructor directly. Invoke INIT. */
847 else if (TREE_CODE (rhs
) == VAR_DECL
849 && TREE_READONLY (rhs
)
850 && targetm
.binds_local_p (rhs
))
851 fns
->init (lhs_elt
, DECL_INITIAL (rhs
), bsi
);
853 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
854 The lvalue requirement prevents us from trying to directly scalarize
855 the result of a function call. Which would result in trying to call
856 the function multiple times, and other evil things. */
857 else if (!lhs_elt
->is_scalar
&& is_gimple_addressable (rhs
))
858 fns
->ldst (lhs_elt
, rhs
, bsi
, true);
860 /* Otherwise we're being used in some context that requires the
861 aggregate to be seen as a whole. Invoke USE. */
863 fns
->use (lhs_elt
, &TREE_OPERAND (expr
, 0), bsi
, true);
866 /* Similarly to above, LHS_ELT being null only means that the LHS as a
867 whole is not a scalarizable reference. There may be occurrences of
868 scalarizable variables within, which implies a USE. */
870 sra_walk_expr (&TREE_OPERAND (expr
, 0), bsi
, true, fns
);
873 /* Entry point to the walk functions. Search the entire function,
874 invoking the callbacks in FNS on each of the references to
875 scalarizable variables. */
878 sra_walk_function (const struct sra_walk_fns
*fns
)
881 block_stmt_iterator si
, ni
;
883 /* ??? Phase 4 could derive some benefit to walking the function in
884 dominator tree order. */
887 for (si
= bsi_start (bb
); !bsi_end_p (si
); si
= ni
)
892 stmt
= bsi_stmt (si
);
893 ann
= stmt_ann (stmt
);
898 /* If the statement has no virtual operands, then it doesn't
899 make any structure references that we care about. */
900 if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann
)) == 0
901 && NUM_VUSES (VUSE_OPS (ann
)) == 0
902 && NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann
)) == 0)
905 switch (TREE_CODE (stmt
))
908 /* If we have "return <retval>" then the return value is
909 already exposed for our pleasure. Walk it as a USE to
910 force all the components back in place for the return.
912 If we have an embedded assignment, then <retval> is of
913 a type that gets returned in registers in this ABI, and
914 we do not wish to extend their lifetimes. Treat this
915 as a USE of the variable on the RHS of this assignment. */
917 t
= TREE_OPERAND (stmt
, 0);
918 if (TREE_CODE (t
) == MODIFY_EXPR
)
919 sra_walk_expr (&TREE_OPERAND (t
, 1), &si
, false, fns
);
921 sra_walk_expr (&TREE_OPERAND (stmt
, 0), &si
, false, fns
);
925 sra_walk_modify_expr (stmt
, &si
, fns
);
928 sra_walk_call_expr (stmt
, &si
, fns
);
931 sra_walk_asm_expr (stmt
, &si
, fns
);
940 /* Phase One: Scan all referenced variables in the program looking for
941 structures that could be decomposed. */
944 find_candidates_for_sra (void)
947 bool any_set
= false;
949 for (i
= 0; i
< num_referenced_vars
; i
++)
951 tree var
= referenced_var (i
);
952 if (decl_can_be_decomposed_p (var
))
954 bitmap_set_bit (sra_candidates
, var_ann (var
)->uid
);
963 /* Phase Two: Scan all references to scalarizable variables. Count the
964 number of times they are used or copied respectively. */
966 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
967 considered a copy, because we can decompose the reference such that
968 the sub-elements needn't be contiguous. */
971 scan_use (struct sra_elt
*elt
, tree
*expr_p ATTRIBUTE_UNUSED
,
972 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
,
973 bool is_output ATTRIBUTE_UNUSED
)
979 scan_copy (struct sra_elt
*lhs_elt
, struct sra_elt
*rhs_elt
,
980 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
)
982 lhs_elt
->n_copies
+= 1;
983 rhs_elt
->n_copies
+= 1;
987 scan_init (struct sra_elt
*lhs_elt
, tree rhs ATTRIBUTE_UNUSED
,
988 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
)
990 lhs_elt
->n_copies
+= 1;
994 scan_ldst (struct sra_elt
*elt
, tree other ATTRIBUTE_UNUSED
,
995 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
,
996 bool is_output ATTRIBUTE_UNUSED
)
1001 /* Dump the values we collected during the scanning phase. */
1004 scan_dump (struct sra_elt
*elt
)
1008 dump_sra_elt_name (dump_file
, elt
);
1009 fprintf (dump_file
, ": n_uses=%u n_copies=%u\n", elt
->n_uses
, elt
->n_copies
);
1011 for (c
= elt
->children
; c
; c
= c
->sibling
)
1015 /* Entry point to phase 2. Scan the entire function, building up
1016 scalarization data structures, recording copies and uses. */
1019 scan_function (void)
1021 static const struct sra_walk_fns fns
= {
1022 scan_use
, scan_copy
, scan_init
, scan_ldst
, true
1026 sra_walk_function (&fns
);
1028 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1032 fputs ("\nScan results:\n", dump_file
);
1033 EXECUTE_IF_SET_IN_BITMAP (sra_candidates
, 0, i
, bi
)
1035 tree var
= referenced_var (i
);
1036 struct sra_elt
*elt
= lookup_element (NULL
, var
, NULL
, NO_INSERT
);
1040 fputc ('\n', dump_file
);
1044 /* Phase Three: Make decisions about which variables to scalarize, if any.
1045 All elements to be scalarized have replacement variables made for them. */
1047 /* A subroutine of build_element_name. Recursively build the element
1048 name on the obstack. */
1051 build_element_name_1 (struct sra_elt
*elt
)
1058 build_element_name_1 (elt
->parent
);
1059 obstack_1grow (&sra_obstack
, '$');
1061 if (TREE_CODE (elt
->parent
->type
) == COMPLEX_TYPE
)
1063 if (elt
->element
== integer_zero_node
)
1064 obstack_grow (&sra_obstack
, "real", 4);
1066 obstack_grow (&sra_obstack
, "imag", 4);
1072 if (TREE_CODE (t
) == INTEGER_CST
)
1074 /* ??? Eh. Don't bother doing double-wide printing. */
1075 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (t
));
1076 obstack_grow (&sra_obstack
, buffer
, strlen (buffer
));
1080 tree name
= DECL_NAME (t
);
1082 obstack_grow (&sra_obstack
, IDENTIFIER_POINTER (name
),
1083 IDENTIFIER_LENGTH (name
));
1086 sprintf (buffer
, "D%u", DECL_UID (t
));
1087 obstack_grow (&sra_obstack
, buffer
, strlen (buffer
));
1092 /* Construct a pretty variable name for an element's replacement variable.
1093 The name is built on the obstack. */
1096 build_element_name (struct sra_elt
*elt
)
1098 build_element_name_1 (elt
);
1099 obstack_1grow (&sra_obstack
, '\0');
1100 return obstack_finish (&sra_obstack
);
1103 /* Instantiate an element as an independent variable. */
1106 instantiate_element (struct sra_elt
*elt
)
1108 struct sra_elt
*base_elt
;
1111 for (base_elt
= elt
; base_elt
->parent
; base_elt
= base_elt
->parent
)
1113 base
= base_elt
->element
;
1115 elt
->replacement
= var
= make_rename_temp (elt
->type
, "SR");
1116 DECL_SOURCE_LOCATION (var
) = DECL_SOURCE_LOCATION (base
);
1117 DECL_ARTIFICIAL (var
) = 1;
1119 if (TREE_THIS_VOLATILE (elt
->type
))
1121 TREE_THIS_VOLATILE (var
) = 1;
1122 TREE_SIDE_EFFECTS (var
) = 1;
1125 if (DECL_NAME (base
) && !DECL_IGNORED_P (base
))
1127 char *pretty_name
= build_element_name (elt
);
1128 DECL_NAME (var
) = get_identifier (pretty_name
);
1129 obstack_free (&sra_obstack
, pretty_name
);
1131 DECL_DEBUG_EXPR (var
) = generate_element_ref (elt
);
1132 DECL_DEBUG_EXPR_IS_FROM (var
) = 1;
1134 DECL_IGNORED_P (var
) = 0;
1135 TREE_NO_WARNING (var
) = TREE_NO_WARNING (base
);
1139 DECL_IGNORED_P (var
) = 1;
1140 /* ??? We can't generate any warning that would be meaningful. */
1141 TREE_NO_WARNING (var
) = 1;
1146 fputs (" ", dump_file
);
1147 dump_sra_elt_name (dump_file
, elt
);
1148 fputs (" -> ", dump_file
);
1149 print_generic_expr (dump_file
, var
, dump_flags
);
1150 fputc ('\n', dump_file
);
1154 /* Make one pass across an element tree deciding whether or not it's
1155 profitable to instantiate individual leaf scalars.
1157 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1158 fields all the way up the tree. */
1161 decide_instantiation_1 (struct sra_elt
*elt
, unsigned int parent_uses
,
1162 unsigned int parent_copies
)
1164 if (dump_file
&& !elt
->parent
)
1166 fputs ("Initial instantiation for ", dump_file
);
1167 dump_sra_elt_name (dump_file
, elt
);
1168 fputc ('\n', dump_file
);
1171 if (elt
->cannot_scalarize
)
1176 /* The decision is simple: instantiate if we're used more frequently
1177 than the parent needs to be seen as a complete unit. */
1178 if (elt
->n_uses
+ elt
->n_copies
+ parent_copies
> parent_uses
)
1179 instantiate_element (elt
);
1184 unsigned int this_uses
= elt
->n_uses
+ parent_uses
;
1185 unsigned int this_copies
= elt
->n_copies
+ parent_copies
;
1187 for (c
= elt
->children
; c
; c
= c
->sibling
)
1188 decide_instantiation_1 (c
, this_uses
, this_copies
);
1192 /* Compute the size and number of all instantiated elements below ELT.
1193 We will only care about this if the size of the complete structure
1194 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1197 sum_instantiated_sizes (struct sra_elt
*elt
, unsigned HOST_WIDE_INT
*sizep
)
1199 if (elt
->replacement
)
1201 *sizep
+= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt
->type
));
1207 unsigned int count
= 0;
1209 for (c
= elt
->children
; c
; c
= c
->sibling
)
1210 count
+= sum_instantiated_sizes (c
, sizep
);
1216 /* Instantiate fields in ELT->TYPE that are not currently present as
1219 static void instantiate_missing_elements (struct sra_elt
*elt
);
1222 instantiate_missing_elements_1 (struct sra_elt
*elt
, tree child
, tree type
)
1224 struct sra_elt
*sub
= lookup_element (elt
, child
, type
, INSERT
);
1227 if (sub
->replacement
== NULL
)
1228 instantiate_element (sub
);
1231 instantiate_missing_elements (sub
);
1235 instantiate_missing_elements (struct sra_elt
*elt
)
1237 tree type
= elt
->type
;
1239 switch (TREE_CODE (type
))
1244 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
1245 if (TREE_CODE (f
) == FIELD_DECL
)
1246 instantiate_missing_elements_1 (elt
, f
, TREE_TYPE (f
));
1252 tree i
, max
, subtype
;
1254 i
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1255 max
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1256 subtype
= TREE_TYPE (type
);
1260 instantiate_missing_elements_1 (elt
, i
, subtype
);
1261 if (tree_int_cst_equal (i
, max
))
1263 i
= int_const_binop (PLUS_EXPR
, i
, integer_one_node
, true);
1270 type
= TREE_TYPE (type
);
1271 instantiate_missing_elements_1 (elt
, integer_zero_node
, type
);
1272 instantiate_missing_elements_1 (elt
, integer_one_node
, type
);
1280 /* Make one pass across an element tree deciding whether to perform block
1281 or element copies. If we decide on element copies, instantiate all
1282 elements. Return true if there are any instantiated sub-elements. */
1285 decide_block_copy (struct sra_elt
*elt
)
1290 /* If scalarization is disabled, respect it. */
1291 if (elt
->cannot_scalarize
)
1293 elt
->use_block_copy
= 1;
1297 fputs ("Scalarization disabled for ", dump_file
);
1298 dump_sra_elt_name (dump_file
, elt
);
1299 fputc ('\n', dump_file
);
1302 /* Disable scalarization of sub-elements */
1303 for (c
= elt
->children
; c
; c
= c
->sibling
)
1305 c
->cannot_scalarize
= 1;
1306 decide_block_copy (c
);
1311 /* Don't decide if we've no uses. */
1312 if (elt
->n_uses
== 0 && elt
->n_copies
== 0)
1315 else if (!elt
->is_scalar
)
1317 tree size_tree
= TYPE_SIZE_UNIT (elt
->type
);
1318 bool use_block_copy
= true;
1320 /* Tradeoffs for COMPLEX types pretty much always make it better
1321 to go ahead and split the components. */
1322 if (TREE_CODE (elt
->type
) == COMPLEX_TYPE
)
1323 use_block_copy
= false;
1325 /* Don't bother trying to figure out the rest if the structure is
1326 so large we can't do easy arithmetic. This also forces block
1327 copies for variable sized structures. */
1328 else if (host_integerp (size_tree
, 1))
1330 unsigned HOST_WIDE_INT full_size
, inst_size
= 0;
1331 unsigned int max_size
, max_count
, inst_count
, full_count
;
1333 /* If the sra-max-structure-size parameter is 0, then the
1334 user has not overridden the parameter and we can choose a
1335 sensible default. */
1336 max_size
= SRA_MAX_STRUCTURE_SIZE
1337 ? SRA_MAX_STRUCTURE_SIZE
1338 : MOVE_RATIO
* UNITS_PER_WORD
;
1339 max_count
= SRA_MAX_STRUCTURE_COUNT
1340 ? SRA_MAX_STRUCTURE_COUNT
1343 full_size
= tree_low_cst (size_tree
, 1);
1344 full_count
= count_type_elements (elt
->type
);
1345 inst_count
= sum_instantiated_sizes (elt
, &inst_size
);
1347 /* ??? What to do here. If there are two fields, and we've only
1348 instantiated one, then instantiating the other is clearly a win.
1349 If there are a large number of fields then the size of the copy
1350 is much more of a factor. */
1352 /* If the structure is small, and we've made copies, go ahead
1353 and instantiate, hoping that the copies will go away. */
1354 if (full_size
<= max_size
1355 && (full_count
- inst_count
) <= max_count
1356 && elt
->n_copies
> elt
->n_uses
)
1357 use_block_copy
= false;
1358 else if (inst_count
* 100 >= full_count
* SRA_FIELD_STRUCTURE_RATIO
1359 && inst_size
* 100 >= full_size
* SRA_FIELD_STRUCTURE_RATIO
)
1360 use_block_copy
= false;
1362 /* In order to avoid block copy, we have to be able to instantiate
1363 all elements of the type. See if this is possible. */
1365 && (!can_completely_scalarize_p (elt
)
1366 || !type_can_instantiate_all_elements (elt
->type
)))
1367 use_block_copy
= true;
1369 elt
->use_block_copy
= use_block_copy
;
1373 fprintf (dump_file
, "Using %s for ",
1374 use_block_copy
? "block-copy" : "element-copy");
1375 dump_sra_elt_name (dump_file
, elt
);
1376 fputc ('\n', dump_file
);
1379 if (!use_block_copy
)
1381 instantiate_missing_elements (elt
);
1386 any_inst
= elt
->replacement
!= NULL
;
1388 for (c
= elt
->children
; c
; c
= c
->sibling
)
1389 any_inst
|= decide_block_copy (c
);
1394 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1397 decide_instantiations (void)
1401 bitmap_head done_head
;
1404 /* We cannot clear bits from a bitmap we're iterating over,
1405 so save up all the bits to clear until the end. */
1406 bitmap_initialize (&done_head
, &bitmap_default_obstack
);
1407 cleared_any
= false;
1409 EXECUTE_IF_SET_IN_BITMAP (sra_candidates
, 0, i
, bi
)
1411 tree var
= referenced_var (i
);
1412 struct sra_elt
*elt
= lookup_element (NULL
, var
, NULL
, NO_INSERT
);
1415 decide_instantiation_1 (elt
, 0, 0);
1416 if (!decide_block_copy (elt
))
1421 bitmap_set_bit (&done_head
, i
);
1428 bitmap_and_compl_into (sra_candidates
, &done_head
);
1429 bitmap_and_compl_into (needs_copy_in
, &done_head
);
1431 bitmap_clear (&done_head
);
1434 fputc ('\n', dump_file
);
1438 /* Phase Four: Update the function to match the replacements created. */
1440 /* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
1441 renaming. This becomes necessary when we modify all of a non-scalar. */
1444 mark_all_v_defs (tree stmt
)
1449 get_stmt_operands (stmt
);
1451 FOR_EACH_SSA_TREE_OPERAND (sym
, stmt
, iter
, SSA_OP_ALL_VIRTUALS
)
1453 if (TREE_CODE (sym
) == SSA_NAME
)
1454 sym
= SSA_NAME_VAR (sym
);
1455 bitmap_set_bit (vars_to_rename
, var_ann (sym
)->uid
);
1459 /* Build a single level component reference to ELT rooted at BASE. */
1462 generate_one_element_ref (struct sra_elt
*elt
, tree base
)
1464 switch (TREE_CODE (TREE_TYPE (base
)))
1468 tree field
= elt
->element
;
1470 /* Watch out for compatible records with differing field lists. */
1471 if (DECL_FIELD_CONTEXT (field
) != TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
1472 field
= find_compatible_field (TREE_TYPE (base
), field
);
1474 return build (COMPONENT_REF
, elt
->type
, base
, field
, NULL
);
1478 return build (ARRAY_REF
, elt
->type
, base
, elt
->element
, NULL
, NULL
);
1481 if (elt
->element
== integer_zero_node
)
1482 return build (REALPART_EXPR
, elt
->type
, base
);
1484 return build (IMAGPART_EXPR
, elt
->type
, base
);
1491 /* Build a full component reference to ELT rooted at its native variable. */
1494 generate_element_ref (struct sra_elt
*elt
)
1497 return generate_one_element_ref (elt
, generate_element_ref (elt
->parent
));
1499 return elt
->element
;
1502 /* Generate a set of assignment statements in *LIST_P to copy all
1503 instantiated elements under ELT to or from the equivalent structure
1504 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1505 true meaning to copy out of EXPR into ELT. */
1508 generate_copy_inout (struct sra_elt
*elt
, bool copy_out
, tree expr
,
1514 if (elt
->replacement
)
1517 t
= build (MODIFY_EXPR
, void_type_node
, elt
->replacement
, expr
);
1519 t
= build (MODIFY_EXPR
, void_type_node
, expr
, elt
->replacement
);
1520 append_to_statement_list (t
, list_p
);
1524 for (c
= elt
->children
; c
; c
= c
->sibling
)
1526 t
= generate_one_element_ref (c
, unshare_expr (expr
));
1527 generate_copy_inout (c
, copy_out
, t
, list_p
);
1532 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1533 elements under SRC to their counterparts under DST. There must be a 1-1
1534 correspondence of instantiated elements. */
1537 generate_element_copy (struct sra_elt
*dst
, struct sra_elt
*src
, tree
*list_p
)
1539 struct sra_elt
*dc
, *sc
;
1541 for (dc
= dst
->children
; dc
; dc
= dc
->sibling
)
1543 sc
= lookup_element (src
, dc
->element
, NULL
, NO_INSERT
);
1545 generate_element_copy (dc
, sc
, list_p
);
1548 if (dst
->replacement
)
1552 gcc_assert (src
->replacement
);
1554 t
= build (MODIFY_EXPR
, void_type_node
, dst
->replacement
,
1556 append_to_statement_list (t
, list_p
);
1560 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1561 elements under ELT. In addition, do not assign to elements that have been
1562 marked VISITED but do reset the visited flag; this allows easy coordination
1563 with generate_element_init. */
1566 generate_element_zero (struct sra_elt
*elt
, tree
*list_p
)
1572 elt
->visited
= false;
1576 for (c
= elt
->children
; c
; c
= c
->sibling
)
1577 generate_element_zero (c
, list_p
);
1579 if (elt
->replacement
)
1583 gcc_assert (elt
->is_scalar
);
1584 t
= fold_convert (elt
->type
, integer_zero_node
);
1586 t
= build (MODIFY_EXPR
, void_type_node
, elt
->replacement
, t
);
1587 append_to_statement_list (t
, list_p
);
1591 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1592 Add the result to *LIST_P. */
1595 generate_one_element_init (tree var
, tree init
, tree
*list_p
)
1597 /* The replacement can be almost arbitrarily complex. Gimplify. */
1598 tree stmt
= build (MODIFY_EXPR
, void_type_node
, var
, init
);
1599 gimplify_and_add (stmt
, list_p
);
1602 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1603 elements under ELT with the contents of the initializer INIT. In addition,
1604 mark all assigned elements VISITED; this allows easy coordination with
1605 generate_element_zero. Return false if we found a case we couldn't
1609 generate_element_init_1 (struct sra_elt
*elt
, tree init
, tree
*list_p
)
1612 enum tree_code init_code
;
1613 struct sra_elt
*sub
;
1616 /* We can be passed DECL_INITIAL of a static variable. It might have a
1617 conversion, which we strip off here. */
1618 STRIP_USELESS_TYPE_CONVERSION (init
);
1619 init_code
= TREE_CODE (init
);
1623 if (elt
->replacement
)
1625 generate_one_element_init (elt
->replacement
, init
, list_p
);
1626 elt
->visited
= true;
1635 for (sub
= elt
->children
; sub
; sub
= sub
->sibling
)
1637 if (sub
->element
== integer_zero_node
)
1638 t
= (init_code
== COMPLEX_EXPR
1639 ? TREE_OPERAND (init
, 0) : TREE_REALPART (init
));
1641 t
= (init_code
== COMPLEX_EXPR
1642 ? TREE_OPERAND (init
, 1) : TREE_IMAGPART (init
));
1643 result
&= generate_element_init_1 (sub
, t
, list_p
);
1648 for (t
= CONSTRUCTOR_ELTS (init
); t
; t
= TREE_CHAIN (t
))
1650 tree purpose
= TREE_PURPOSE (t
);
1651 tree value
= TREE_VALUE (t
);
1653 if (TREE_CODE (purpose
) == RANGE_EXPR
)
1655 tree lower
= TREE_OPERAND (purpose
, 0);
1656 tree upper
= TREE_OPERAND (purpose
, 1);
1660 sub
= lookup_element (elt
, lower
, NULL
, NO_INSERT
);
1662 result
&= generate_element_init_1 (sub
, value
, list_p
);
1663 if (tree_int_cst_equal (lower
, upper
))
1665 lower
= int_const_binop (PLUS_EXPR
, lower
,
1666 integer_one_node
, true);
1671 sub
= lookup_element (elt
, purpose
, NULL
, NO_INSERT
);
1673 result
&= generate_element_init_1 (sub
, value
, list_p
);
1679 elt
->visited
= true;
1686 /* A wrapper function for generate_element_init_1 that handles cleanup after
1690 generate_element_init (struct sra_elt
*elt
, tree init
, tree
*list_p
)
1694 push_gimplify_context ();
1695 ret
= generate_element_init_1 (elt
, init
, list_p
);
1696 pop_gimplify_context (NULL
);
1698 /* The replacement can expose previously unreferenced variables. */
1701 tree_stmt_iterator i
;
1704 old
= num_referenced_vars
;
1706 for (i
= tsi_start (*list_p
); !tsi_end_p (i
); tsi_next (&i
))
1707 find_new_referenced_vars (tsi_stmt_ptr (i
));
1709 new = num_referenced_vars
;
1710 for (j
= old
; j
< new; ++j
)
1711 bitmap_set_bit (vars_to_rename
, j
);
1717 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1718 has more than one edge, STMT will be replicated for each edge. Also,
1719 abnormal edges will be ignored. */
1722 insert_edge_copies (tree stmt
, basic_block bb
)
1729 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1731 /* We don't need to insert copies on abnormal edges. The
1732 value of the scalar replacement is not guaranteed to
1733 be valid through an abnormal edge. */
1734 if (!(e
->flags
& EDGE_ABNORMAL
))
1738 bsi_insert_on_edge (e
, stmt
);
1742 bsi_insert_on_edge (e
, unsave_expr_now (stmt
));
1747 /* Helper function to insert LIST before BSI, and set up line number info. */
1750 sra_insert_before (block_stmt_iterator
*bsi
, tree list
)
1752 tree stmt
= bsi_stmt (*bsi
);
1754 if (EXPR_HAS_LOCATION (stmt
))
1755 annotate_all_with_locus (&list
, EXPR_LOCATION (stmt
));
1756 bsi_insert_before (bsi
, list
, BSI_SAME_STMT
);
1759 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1762 sra_insert_after (block_stmt_iterator
*bsi
, tree list
)
1764 tree stmt
= bsi_stmt (*bsi
);
1766 if (EXPR_HAS_LOCATION (stmt
))
1767 annotate_all_with_locus (&list
, EXPR_LOCATION (stmt
));
1769 if (stmt_ends_bb_p (stmt
))
1770 insert_edge_copies (list
, bsi
->bb
);
1772 bsi_insert_after (bsi
, list
, BSI_SAME_STMT
);
1775 /* Similarly, but replace the statement at BSI. */
1778 sra_replace (block_stmt_iterator
*bsi
, tree list
)
1780 sra_insert_before (bsi
, list
);
1782 if (bsi_end_p (*bsi
))
1783 *bsi
= bsi_last (bsi
->bb
);
1788 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1789 if elt is scalar, or some occurrence of ELT that requires a complete
1790 aggregate. IS_OUTPUT is true if ELT is being modified. */
1793 scalarize_use (struct sra_elt
*elt
, tree
*expr_p
, block_stmt_iterator
*bsi
,
1796 tree list
= NULL
, stmt
= bsi_stmt (*bsi
);
1798 if (elt
->replacement
)
1800 /* If we have a replacement, then updating the reference is as
1801 simple as modifying the existing statement in place. */
1803 mark_all_v_defs (stmt
);
1804 *expr_p
= elt
->replacement
;
1809 /* Otherwise we need some copies. If ELT is being read, then we want
1810 to store all (modified) sub-elements back into the structure before
1811 the reference takes place. If ELT is being written, then we want to
1812 load the changed values back into our shadow variables. */
1813 /* ??? We don't check modified for reads, we just always write all of
1814 the values. We should be able to record the SSA number of the VOP
1815 for which the values were last read. If that number matches the
1816 SSA number of the VOP in the current statement, then we needn't
1817 emit an assignment. This would also eliminate double writes when
1818 a structure is passed as more than one argument to a function call.
1819 This optimization would be most effective if sra_walk_function
1820 processed the blocks in dominator order. */
1822 generate_copy_inout (elt
, is_output
, generate_element_ref (elt
), &list
);
1825 mark_all_v_defs (expr_first (list
));
1827 sra_insert_after (bsi
, list
);
1829 sra_insert_before (bsi
, list
);
1833 /* Scalarize a COPY. To recap, this is an assignment statement between
1834 two scalarizable references, LHS_ELT and RHS_ELT. */
1837 scalarize_copy (struct sra_elt
*lhs_elt
, struct sra_elt
*rhs_elt
,
1838 block_stmt_iterator
*bsi
)
1842 if (lhs_elt
->replacement
&& rhs_elt
->replacement
)
1844 /* If we have two scalar operands, modify the existing statement. */
1845 stmt
= bsi_stmt (*bsi
);
1847 /* See the commentary in sra_walk_function concerning
1848 RETURN_EXPR, and why we should never see one here. */
1849 gcc_assert (TREE_CODE (stmt
) == MODIFY_EXPR
);
1851 TREE_OPERAND (stmt
, 0) = lhs_elt
->replacement
;
1852 TREE_OPERAND (stmt
, 1) = rhs_elt
->replacement
;
1855 else if (lhs_elt
->use_block_copy
|| rhs_elt
->use_block_copy
)
1857 /* If either side requires a block copy, then sync the RHS back
1858 to the original structure, leave the original assignment
1859 statement (which will perform the block copy), then load the
1860 LHS values out of its now-updated original structure. */
1861 /* ??? Could perform a modified pair-wise element copy. That
1862 would at least allow those elements that are instantiated in
1863 both structures to be optimized well. */
1866 generate_copy_inout (rhs_elt
, false,
1867 generate_element_ref (rhs_elt
), &list
);
1870 mark_all_v_defs (expr_first (list
));
1871 sra_insert_before (bsi
, list
);
1875 generate_copy_inout (lhs_elt
, true,
1876 generate_element_ref (lhs_elt
), &list
);
1878 sra_insert_after (bsi
, list
);
1882 /* Otherwise both sides must be fully instantiated. In which
1883 case perform pair-wise element assignments and replace the
1884 original block copy statement. */
1886 stmt
= bsi_stmt (*bsi
);
1887 mark_all_v_defs (stmt
);
1890 generate_element_copy (lhs_elt
, rhs_elt
, &list
);
1892 sra_replace (bsi
, list
);
1896 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
1897 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
1898 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
1902 scalarize_init (struct sra_elt
*lhs_elt
, tree rhs
, block_stmt_iterator
*bsi
)
1907 /* Generate initialization statements for all members extant in the RHS. */
1910 /* Unshare the expression just in case this is from a decl's initial. */
1911 rhs
= unshare_expr (rhs
);
1912 result
= generate_element_init (lhs_elt
, rhs
, &list
);
1915 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
1916 a zero value. Initialize the rest of the instantiated elements. */
1917 generate_element_zero (lhs_elt
, &list
);
1921 /* If we failed to convert the entire initializer, then we must
1922 leave the structure assignment in place and must load values
1923 from the structure into the slots for which we did not find
1924 constants. The easiest way to do this is to generate a complete
1925 copy-out, and then follow that with the constant assignments
1926 that we were able to build. DCE will clean things up. */
1928 generate_copy_inout (lhs_elt
, true, generate_element_ref (lhs_elt
),
1930 append_to_statement_list (list
, &list0
);
1934 if (lhs_elt
->use_block_copy
|| !result
)
1936 /* Since LHS is not fully instantiated, we must leave the structure
1937 assignment in place. Treating this case differently from a USE
1938 exposes constants to later optimizations. */
1941 mark_all_v_defs (expr_first (list
));
1942 sra_insert_after (bsi
, list
);
1947 /* The LHS is fully instantiated. The list of initializations
1948 replaces the original structure assignment. */
1950 mark_all_v_defs (bsi_stmt (*bsi
));
1951 sra_replace (bsi
, list
);
1955 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
1956 on all INDIRECT_REFs. */
1959 mark_notrap (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1963 if (TREE_CODE (t
) == INDIRECT_REF
)
1965 TREE_THIS_NOTRAP (t
) = 1;
1968 else if (IS_TYPE_OR_DECL_P (t
))
1974 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
1975 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
1976 if ELT is on the left-hand side. */
1979 scalarize_ldst (struct sra_elt
*elt
, tree other
,
1980 block_stmt_iterator
*bsi
, bool is_output
)
1982 /* Shouldn't have gotten called for a scalar. */
1983 gcc_assert (!elt
->replacement
);
1985 if (elt
->use_block_copy
)
1987 /* Since ELT is not fully instantiated, we have to leave the
1988 block copy in place. Treat this as a USE. */
1989 scalarize_use (elt
, NULL
, bsi
, is_output
);
1993 /* The interesting case is when ELT is fully instantiated. In this
1994 case we can have each element stored/loaded directly to/from the
1995 corresponding slot in OTHER. This avoids a block copy. */
1997 tree list
= NULL
, stmt
= bsi_stmt (*bsi
);
1999 mark_all_v_defs (stmt
);
2000 generate_copy_inout (elt
, is_output
, other
, &list
);
2003 /* Preserve EH semantics. */
2004 if (stmt_ends_bb_p (stmt
))
2006 tree_stmt_iterator tsi
;
2009 /* Extract the first statement from LIST. */
2010 tsi
= tsi_start (list
);
2011 first
= tsi_stmt (tsi
);
2014 /* Replace the old statement with this new representative. */
2015 bsi_replace (bsi
, first
, true);
2017 if (!tsi_end_p (tsi
))
2019 /* If any reference would trap, then they all would. And more
2020 to the point, the first would. Therefore none of the rest
2021 will trap since the first didn't. Indicate this by
2022 iterating over the remaining statements and set
2023 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
2026 walk_tree (tsi_stmt_ptr (tsi
), mark_notrap
, NULL
, NULL
);
2029 while (!tsi_end_p (tsi
));
2031 insert_edge_copies (list
, bsi
->bb
);
2035 sra_replace (bsi
, list
);
2039 /* Generate initializations for all scalarizable parameters. */
2042 scalarize_parms (void)
2048 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in
, 0, i
, bi
)
2050 tree var
= referenced_var (i
);
2051 struct sra_elt
*elt
= lookup_element (NULL
, var
, NULL
, NO_INSERT
);
2052 generate_copy_inout (elt
, true, var
, &list
);
2056 insert_edge_copies (list
, ENTRY_BLOCK_PTR
);
2059 /* Entry point to phase 4. Update the function to match replacements. */
2062 scalarize_function (void)
2064 static const struct sra_walk_fns fns
= {
2065 scalarize_use
, scalarize_copy
, scalarize_init
, scalarize_ldst
, false
2068 sra_walk_function (&fns
);
2070 bsi_commit_edge_inserts ();
2074 /* Debug helper function. Print ELT in a nice human-readable format. */
2077 dump_sra_elt_name (FILE *f
, struct sra_elt
*elt
)
2079 if (elt
->parent
&& TREE_CODE (elt
->parent
->type
) == COMPLEX_TYPE
)
2081 fputs (elt
->element
== integer_zero_node
? "__real__ " : "__imag__ ", f
);
2082 dump_sra_elt_name (f
, elt
->parent
);
2087 dump_sra_elt_name (f
, elt
->parent
);
2088 if (DECL_P (elt
->element
))
2090 if (TREE_CODE (elt
->element
) == FIELD_DECL
)
2092 print_generic_expr (f
, elt
->element
, dump_flags
);
2095 fprintf (f
, "[" HOST_WIDE_INT_PRINT_DEC
"]",
2096 TREE_INT_CST_LOW (elt
->element
));
2100 /* Likewise, but callable from the debugger. */
2103 debug_sra_elt_name (struct sra_elt
*elt
)
2105 dump_sra_elt_name (stderr
, elt
);
2106 fputc ('\n', stderr
);
2109 /* Main entry point. */
2114 /* Initialize local variables. */
2115 gcc_obstack_init (&sra_obstack
);
2116 sra_candidates
= BITMAP_ALLOC (NULL
);
2117 needs_copy_in
= BITMAP_ALLOC (NULL
);
2118 sra_type_decomp_cache
= BITMAP_ALLOC (NULL
);
2119 sra_type_inst_cache
= BITMAP_ALLOC (NULL
);
2120 sra_map
= htab_create (101, sra_elt_hash
, sra_elt_eq
, NULL
);
2122 /* Scan. If we find anything, instantiate and scalarize. */
2123 if (find_candidates_for_sra ())
2126 decide_instantiations ();
2127 scalarize_function ();
2130 /* Free allocated memory. */
2131 htab_delete (sra_map
);
2133 BITMAP_FREE (sra_candidates
);
2134 BITMAP_FREE (needs_copy_in
);
2135 BITMAP_FREE (sra_type_decomp_cache
);
2136 BITMAP_FREE (sra_type_inst_cache
);
2137 obstack_free (&sra_obstack
, NULL
);
2143 return flag_tree_sra
!= 0;
2146 struct tree_opt_pass pass_sra
=
2149 gate_sra
, /* gate */
2150 tree_sra
, /* execute */
2153 0, /* static_pass_number */
2154 TV_TREE_SRA
, /* tv_id */
2155 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
2156 0, /* properties_provided */
2157 0, /* properties_destroyed */
2158 0, /* todo_flags_start */
2159 TODO_dump_func
| TODO_rename_vars
2160 | TODO_ggc_collect
| TODO_verify_ssa
, /* todo_flags_finish */