[multiple changes]
[official-gcc.git] / gcc / tree-sra.c
blob14507ad3adf84576e88209c0f184a7659a40f532
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "ggc.h"
29 #include "tree.h"
31 /* These RTL headers are needed for basic-block.h. */
32 #include "rtl.h"
33 #include "tm_p.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
40 #include "tree-gimple.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "timevar.h"
44 #include "flags.h"
45 #include "bitmap.h"
46 #include "obstack.h"
47 #include "target.h"
48 /* expr.h is needed for MOVE_RATIO. */
49 #include "expr.h"
50 #include "params.h"
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
65 decomposition.
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* The set of todo flags to return from tree_sra. */
79 static unsigned int todoflags;
81 /* The set of aggregate variables that are candidates for scalarization. */
82 static bitmap sra_candidates;
84 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
85 beginning of the function. */
86 static bitmap needs_copy_in;
88 /* Sets of bit pairs that cache type decomposition and instantiation. */
89 static bitmap sra_type_decomp_cache;
90 static bitmap sra_type_inst_cache;
92 /* One of these structures is created for each candidate aggregate and
93 each (accessed) member or group of members of such an aggregate. */
94 struct sra_elt
96 /* A tree of the elements. Used when we want to traverse everything. */
97 struct sra_elt *parent;
98 struct sra_elt *groups;
99 struct sra_elt *children;
100 struct sra_elt *sibling;
102 /* If this element is a root, then this is the VAR_DECL. If this is
103 a sub-element, this is some token used to identify the reference.
104 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
105 of an ARRAY_REF, this is the (constant) index. In the case of an
106 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
107 of a complex number, this is a zero or one. */
108 tree element;
110 /* The type of the element. */
111 tree type;
113 /* A VAR_DECL, for any sub-element we've decided to replace. */
114 tree replacement;
116 /* The number of times the element is referenced as a whole. I.e.
117 given "a.b.c", this would be incremented for C, but not for A or B. */
118 unsigned int n_uses;
120 /* The number of times the element is copied to or from another
121 scalarizable element. */
122 unsigned int n_copies;
124 /* True if TYPE is scalar. */
125 bool is_scalar;
127 /* True if this element is a group of members of its parent. */
128 bool is_group;
130 /* True if we saw something about this element that prevents scalarization,
131 such as non-constant indexing. */
132 bool cannot_scalarize;
134 /* True if we've decided that structure-to-structure assignment
135 should happen via memcpy and not per-element. */
136 bool use_block_copy;
138 /* True if everything under this element has been marked TREE_NO_WARNING. */
139 bool all_no_warning;
141 /* A flag for use with/after random access traversals. */
142 bool visited;
145 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
147 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
148 for ((CHILD) = (ELT)->is_group \
149 ? next_child_for_group (NULL, (ELT)) \
150 : (ELT)->children; \
151 (CHILD); \
152 (CHILD) = (ELT)->is_group \
153 ? next_child_for_group ((CHILD), (ELT)) \
154 : (CHILD)->sibling)
156 /* Helper function for above macro. Return next child in group. */
157 static struct sra_elt *
158 next_child_for_group (struct sra_elt *child, struct sra_elt *group)
160 gcc_assert (group->is_group);
162 /* Find the next child in the parent. */
163 if (child)
164 child = child->sibling;
165 else
166 child = group->parent->children;
168 /* Skip siblings that do not belong to the group. */
169 while (child)
171 tree g_elt = group->element;
172 if (TREE_CODE (g_elt) == RANGE_EXPR)
174 if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
175 && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
176 break;
178 else
179 gcc_unreachable ();
181 child = child->sibling;
184 return child;
187 /* Random access to the child of a parent is performed by hashing.
188 This prevents quadratic behavior, and allows SRA to function
189 reasonably on larger records. */
190 static htab_t sra_map;
192 /* All structures are allocated out of the following obstack. */
193 static struct obstack sra_obstack;
195 /* Debugging functions. */
196 static void dump_sra_elt_name (FILE *, struct sra_elt *);
197 extern void debug_sra_elt_name (struct sra_elt *);
199 /* Forward declarations. */
200 static tree generate_element_ref (struct sra_elt *);
202 /* Return true if DECL is an SRA candidate. */
204 static bool
205 is_sra_candidate_decl (tree decl)
207 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
210 /* Return true if TYPE is a scalar type. */
212 static bool
213 is_sra_scalar_type (tree type)
215 enum tree_code code = TREE_CODE (type);
216 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
217 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
218 || code == POINTER_TYPE || code == OFFSET_TYPE
219 || code == REFERENCE_TYPE);
222 /* Return true if TYPE can be decomposed into a set of independent variables.
224 Note that this doesn't imply that all elements of TYPE can be
225 instantiated, just that if we decide to break up the type into
226 separate pieces that it can be done. */
228 bool
229 sra_type_can_be_decomposed_p (tree type)
231 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
232 tree t;
234 /* Avoid searching the same type twice. */
235 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
236 return true;
237 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
238 return false;
240 /* The type must have a definite nonzero size. */
241 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
242 || integer_zerop (TYPE_SIZE (type)))
243 goto fail;
245 /* The type must be a non-union aggregate. */
246 switch (TREE_CODE (type))
248 case RECORD_TYPE:
250 bool saw_one_field = false;
252 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
253 if (TREE_CODE (t) == FIELD_DECL)
255 /* Reject incorrectly represented bit fields. */
256 if (DECL_BIT_FIELD (t)
257 && (tree_low_cst (DECL_SIZE (t), 1)
258 != TYPE_PRECISION (TREE_TYPE (t))))
259 goto fail;
261 saw_one_field = true;
264 /* Record types must have at least one field. */
265 if (!saw_one_field)
266 goto fail;
268 break;
270 case ARRAY_TYPE:
271 /* Array types must have a fixed lower and upper bound. */
272 t = TYPE_DOMAIN (type);
273 if (t == NULL)
274 goto fail;
275 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
276 goto fail;
277 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
278 goto fail;
279 break;
281 case COMPLEX_TYPE:
282 break;
284 default:
285 goto fail;
288 bitmap_set_bit (sra_type_decomp_cache, cache+0);
289 return true;
291 fail:
292 bitmap_set_bit (sra_type_decomp_cache, cache+1);
293 return false;
296 /* Return true if DECL can be decomposed into a set of independent
297 (though not necessarily scalar) variables. */
299 static bool
300 decl_can_be_decomposed_p (tree var)
302 /* Early out for scalars. */
303 if (is_sra_scalar_type (TREE_TYPE (var)))
304 return false;
306 /* The variable must not be aliased. */
307 if (!is_gimple_non_addressable (var))
309 if (dump_file && (dump_flags & TDF_DETAILS))
311 fprintf (dump_file, "Cannot scalarize variable ");
312 print_generic_expr (dump_file, var, dump_flags);
313 fprintf (dump_file, " because it must live in memory\n");
315 return false;
318 /* The variable must not be volatile. */
319 if (TREE_THIS_VOLATILE (var))
321 if (dump_file && (dump_flags & TDF_DETAILS))
323 fprintf (dump_file, "Cannot scalarize variable ");
324 print_generic_expr (dump_file, var, dump_flags);
325 fprintf (dump_file, " because it is declared volatile\n");
327 return false;
330 /* We must be able to decompose the variable's type. */
331 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
333 if (dump_file && (dump_flags & TDF_DETAILS))
335 fprintf (dump_file, "Cannot scalarize variable ");
336 print_generic_expr (dump_file, var, dump_flags);
337 fprintf (dump_file, " because its type cannot be decomposed\n");
339 return false;
342 return true;
345 /* Return true if TYPE can be *completely* decomposed into scalars. */
347 static bool
348 type_can_instantiate_all_elements (tree type)
350 if (is_sra_scalar_type (type))
351 return true;
352 if (!sra_type_can_be_decomposed_p (type))
353 return false;
355 switch (TREE_CODE (type))
357 case RECORD_TYPE:
359 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
360 tree f;
362 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
363 return true;
364 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
365 return false;
367 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
368 if (TREE_CODE (f) == FIELD_DECL)
370 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
372 bitmap_set_bit (sra_type_inst_cache, cache+1);
373 return false;
377 bitmap_set_bit (sra_type_inst_cache, cache+0);
378 return true;
381 case ARRAY_TYPE:
382 return type_can_instantiate_all_elements (TREE_TYPE (type));
384 case COMPLEX_TYPE:
385 return true;
387 default:
388 gcc_unreachable ();
392 /* Test whether ELT or some sub-element cannot be scalarized. */
394 static bool
395 can_completely_scalarize_p (struct sra_elt *elt)
397 struct sra_elt *c;
399 if (elt->cannot_scalarize)
400 return false;
402 for (c = elt->children; c; c = c->sibling)
403 if (!can_completely_scalarize_p (c))
404 return false;
406 for (c = elt->groups; c; c = c->sibling)
407 if (!can_completely_scalarize_p (c))
408 return false;
410 return true;
414 /* A simplified tree hashing algorithm that only handles the types of
415 trees we expect to find in sra_elt->element. */
417 static hashval_t
418 sra_hash_tree (tree t)
420 hashval_t h;
422 switch (TREE_CODE (t))
424 case VAR_DECL:
425 case PARM_DECL:
426 case RESULT_DECL:
427 h = DECL_UID (t);
428 break;
430 case INTEGER_CST:
431 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
432 break;
434 case RANGE_EXPR:
435 h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
436 h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
437 break;
439 case FIELD_DECL:
440 /* We can have types that are compatible, but have different member
441 lists, so we can't hash fields by ID. Use offsets instead. */
442 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
443 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
444 break;
446 default:
447 gcc_unreachable ();
450 return h;
453 /* Hash function for type SRA_PAIR. */
455 static hashval_t
456 sra_elt_hash (const void *x)
458 const struct sra_elt *e = x;
459 const struct sra_elt *p;
460 hashval_t h;
462 h = sra_hash_tree (e->element);
464 /* Take into account everything back up the chain. Given that chain
465 lengths are rarely very long, this should be acceptable. If we
466 truly identify this as a performance problem, it should work to
467 hash the pointer value "e->parent". */
468 for (p = e->parent; p ; p = p->parent)
469 h = (h * 65521) ^ sra_hash_tree (p->element);
471 return h;
474 /* Equality function for type SRA_PAIR. */
476 static int
477 sra_elt_eq (const void *x, const void *y)
479 const struct sra_elt *a = x;
480 const struct sra_elt *b = y;
481 tree ae, be;
483 if (a->parent != b->parent)
484 return false;
486 ae = a->element;
487 be = b->element;
489 if (ae == be)
490 return true;
491 if (TREE_CODE (ae) != TREE_CODE (be))
492 return false;
494 switch (TREE_CODE (ae))
496 case VAR_DECL:
497 case PARM_DECL:
498 case RESULT_DECL:
499 /* These are all pointer unique. */
500 return false;
502 case INTEGER_CST:
503 /* Integers are not pointer unique, so compare their values. */
504 return tree_int_cst_equal (ae, be);
506 case RANGE_EXPR:
507 return
508 tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
509 && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
511 case FIELD_DECL:
512 /* Fields are unique within a record, but not between
513 compatible records. */
514 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
515 return false;
516 return fields_compatible_p (ae, be);
518 default:
519 gcc_unreachable ();
523 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
524 may be null, in which case CHILD must be a DECL. */
526 static struct sra_elt *
527 lookup_element (struct sra_elt *parent, tree child, tree type,
528 enum insert_option insert)
530 struct sra_elt dummy;
531 struct sra_elt **slot;
532 struct sra_elt *elt;
534 if (parent)
535 dummy.parent = parent->is_group ? parent->parent : parent;
536 else
537 dummy.parent = NULL;
538 dummy.element = child;
540 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
541 if (!slot && insert == NO_INSERT)
542 return NULL;
544 elt = *slot;
545 if (!elt && insert == INSERT)
547 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
548 memset (elt, 0, sizeof (*elt));
550 elt->parent = parent;
551 elt->element = child;
552 elt->type = type;
553 elt->is_scalar = is_sra_scalar_type (type);
555 if (parent)
557 if (IS_ELEMENT_FOR_GROUP (elt->element))
559 elt->is_group = true;
560 elt->sibling = parent->groups;
561 parent->groups = elt;
563 else
565 elt->sibling = parent->children;
566 parent->children = elt;
570 /* If this is a parameter, then if we want to scalarize, we have
571 one copy from the true function parameter. Count it now. */
572 if (TREE_CODE (child) == PARM_DECL)
574 elt->n_copies = 1;
575 bitmap_set_bit (needs_copy_in, DECL_UID (child));
579 return elt;
582 /* Create or return the SRA_ELT structure for EXPR if the expression
583 refers to a scalarizable variable. */
585 static struct sra_elt *
586 maybe_lookup_element_for_expr (tree expr)
588 struct sra_elt *elt;
589 tree child;
591 switch (TREE_CODE (expr))
593 case VAR_DECL:
594 case PARM_DECL:
595 case RESULT_DECL:
596 if (is_sra_candidate_decl (expr))
597 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
598 return NULL;
600 case ARRAY_REF:
601 /* We can't scalarize variable array indices. */
602 if (in_array_bounds_p (expr))
603 child = TREE_OPERAND (expr, 1);
604 else
605 return NULL;
606 break;
608 case ARRAY_RANGE_REF:
609 /* We can't scalarize variable array indices. */
610 if (range_in_array_bounds_p (expr))
612 tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
613 child = build2 (RANGE_EXPR, integer_type_node,
614 TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
616 else
617 return NULL;
618 break;
620 case COMPONENT_REF:
621 /* Don't look through unions. */
622 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
623 return NULL;
624 child = TREE_OPERAND (expr, 1);
625 break;
627 case REALPART_EXPR:
628 child = integer_zero_node;
629 break;
630 case IMAGPART_EXPR:
631 child = integer_one_node;
632 break;
634 default:
635 return NULL;
638 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
639 if (elt)
640 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
641 return NULL;
645 /* Functions to walk just enough of the tree to see all scalarizable
646 references, and categorize them. */
648 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
649 various kinds of references seen. In all cases, *BSI is an iterator
650 pointing to the statement being processed. */
651 struct sra_walk_fns
653 /* Invoked when ELT is required as a unit. Note that ELT might refer to
654 a leaf node, in which case this is a simple scalar reference. *EXPR_P
655 points to the location of the expression. IS_OUTPUT is true if this
656 is a left-hand-side reference. USE_ALL is true if we saw something we
657 couldn't quite identify and had to force the use of the entire object. */
658 void (*use) (struct sra_elt *elt, tree *expr_p,
659 block_stmt_iterator *bsi, bool is_output, bool use_all);
661 /* Invoked when we have a copy between two scalarizable references. */
662 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
663 block_stmt_iterator *bsi);
665 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
666 in which case it should be treated as an empty CONSTRUCTOR. */
667 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
669 /* Invoked when we have a copy between one scalarizable reference ELT
670 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
671 is on the left-hand side. */
672 void (*ldst) (struct sra_elt *elt, tree other,
673 block_stmt_iterator *bsi, bool is_output);
675 /* True during phase 2, false during phase 4. */
676 /* ??? This is a hack. */
677 bool initial_scan;
680 #ifdef ENABLE_CHECKING
681 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
683 static tree
684 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
685 void *data ATTRIBUTE_UNUSED)
687 tree t = *tp;
688 enum tree_code code = TREE_CODE (t);
690 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
692 *walk_subtrees = 0;
693 if (is_sra_candidate_decl (t))
694 return t;
696 else if (TYPE_P (t))
697 *walk_subtrees = 0;
699 return NULL;
701 #endif
703 /* Walk most expressions looking for a scalarizable aggregate.
704 If we find one, invoke FNS->USE. */
706 static void
707 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
708 const struct sra_walk_fns *fns)
710 tree expr = *expr_p;
711 tree inner = expr;
712 bool disable_scalarization = false;
713 bool use_all_p = false;
715 /* We're looking to collect a reference expression between EXPR and INNER,
716 such that INNER is a scalarizable decl and all other nodes through EXPR
717 are references that we can scalarize. If we come across something that
718 we can't scalarize, we reset EXPR. This has the effect of making it
719 appear that we're referring to the larger expression as a whole. */
721 while (1)
722 switch (TREE_CODE (inner))
724 case VAR_DECL:
725 case PARM_DECL:
726 case RESULT_DECL:
727 /* If there is a scalarizable decl at the bottom, then process it. */
728 if (is_sra_candidate_decl (inner))
730 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
731 if (disable_scalarization)
732 elt->cannot_scalarize = true;
733 else
734 fns->use (elt, expr_p, bsi, is_output, use_all_p);
736 return;
738 case ARRAY_REF:
739 /* Non-constant index means any member may be accessed. Prevent the
740 expression from being scalarized. If we were to treat this as a
741 reference to the whole array, we can wind up with a single dynamic
742 index reference inside a loop being overridden by several constant
743 index references during loop setup. It's possible that this could
744 be avoided by using dynamic usage counts based on BB trip counts
745 (based on loop analysis or profiling), but that hardly seems worth
746 the effort. */
747 /* ??? Hack. Figure out how to push this into the scan routines
748 without duplicating too much code. */
749 if (!in_array_bounds_p (inner))
751 disable_scalarization = true;
752 goto use_all;
754 /* ??? Are we assured that non-constant bounds and stride will have
755 the same value everywhere? I don't think Fortran will... */
756 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
757 goto use_all;
758 inner = TREE_OPERAND (inner, 0);
759 break;
761 case ARRAY_RANGE_REF:
762 if (!range_in_array_bounds_p (inner))
764 disable_scalarization = true;
765 goto use_all;
767 /* ??? See above non-constant bounds and stride . */
768 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
769 goto use_all;
770 inner = TREE_OPERAND (inner, 0);
771 break;
773 case COMPONENT_REF:
774 /* A reference to a union member constitutes a reference to the
775 entire union. */
776 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
777 goto use_all;
778 /* ??? See above re non-constant stride. */
779 if (TREE_OPERAND (inner, 2))
780 goto use_all;
781 inner = TREE_OPERAND (inner, 0);
782 break;
784 case REALPART_EXPR:
785 case IMAGPART_EXPR:
786 inner = TREE_OPERAND (inner, 0);
787 break;
789 case BIT_FIELD_REF:
790 /* A bit field reference (access to *multiple* fields simultaneously)
791 is not currently scalarized. Consider this an access to the
792 complete outer element, to which walk_tree will bring us next. */
793 goto use_all;
795 case VIEW_CONVERT_EXPR:
796 case NOP_EXPR:
797 /* Similarly, a view/nop explicitly wants to look at an object in a
798 type other than the one we've scalarized. */
799 goto use_all;
801 case WITH_SIZE_EXPR:
802 /* This is a transparent wrapper. The entire inner expression really
803 is being used. */
804 goto use_all;
806 use_all:
807 expr_p = &TREE_OPERAND (inner, 0);
808 inner = expr = *expr_p;
809 use_all_p = true;
810 break;
812 default:
813 #ifdef ENABLE_CHECKING
814 /* Validate that we're not missing any references. */
815 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
816 #endif
817 return;
821 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
822 If we find one, invoke FNS->USE. */
824 static void
825 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
826 const struct sra_walk_fns *fns)
828 tree op;
829 for (op = list; op ; op = TREE_CHAIN (op))
830 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
833 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
834 If we find one, invoke FNS->USE. */
836 static void
837 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
838 const struct sra_walk_fns *fns)
840 sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
843 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
844 aggregates. If we find one, invoke FNS->USE. */
846 static void
847 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
848 const struct sra_walk_fns *fns)
850 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
851 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
854 /* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately. */
856 static void
857 sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
858 const struct sra_walk_fns *fns)
860 struct sra_elt *lhs_elt, *rhs_elt;
861 tree lhs, rhs;
863 lhs = GIMPLE_STMT_OPERAND (expr, 0);
864 rhs = GIMPLE_STMT_OPERAND (expr, 1);
865 lhs_elt = maybe_lookup_element_for_expr (lhs);
866 rhs_elt = maybe_lookup_element_for_expr (rhs);
868 /* If both sides are scalarizable, this is a COPY operation. */
869 if (lhs_elt && rhs_elt)
871 fns->copy (lhs_elt, rhs_elt, bsi);
872 return;
875 /* If the RHS is scalarizable, handle it. There are only two cases. */
876 if (rhs_elt)
878 if (!rhs_elt->is_scalar)
879 fns->ldst (rhs_elt, lhs, bsi, false);
880 else
881 fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false, false);
884 /* If it isn't scalarizable, there may be scalarizable variables within, so
885 check for a call or else walk the RHS to see if we need to do any
886 copy-in operations. We need to do it before the LHS is scalarized so
887 that the statements get inserted in the proper place, before any
888 copy-out operations. */
889 else
891 tree call = get_call_expr_in (rhs);
892 if (call)
893 sra_walk_call_expr (call, bsi, fns);
894 else
895 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 1), bsi, false, fns);
898 /* Likewise, handle the LHS being scalarizable. We have cases similar
899 to those above, but also want to handle RHS being constant. */
900 if (lhs_elt)
902 /* If this is an assignment from a constant, or constructor, then
903 we have access to all of the elements individually. Invoke INIT. */
904 if (TREE_CODE (rhs) == COMPLEX_EXPR
905 || TREE_CODE (rhs) == COMPLEX_CST
906 || TREE_CODE (rhs) == CONSTRUCTOR)
907 fns->init (lhs_elt, rhs, bsi);
909 /* If this is an assignment from read-only memory, treat this as if
910 we'd been passed the constructor directly. Invoke INIT. */
911 else if (TREE_CODE (rhs) == VAR_DECL
912 && TREE_STATIC (rhs)
913 && TREE_READONLY (rhs)
914 && targetm.binds_local_p (rhs))
915 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
917 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
918 The lvalue requirement prevents us from trying to directly scalarize
919 the result of a function call. Which would result in trying to call
920 the function multiple times, and other evil things. */
921 else if (!lhs_elt->is_scalar && is_gimple_addressable (rhs))
922 fns->ldst (lhs_elt, rhs, bsi, true);
924 /* Otherwise we're being used in some context that requires the
925 aggregate to be seen as a whole. Invoke USE. */
926 else
927 fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true, false);
930 /* Similarly to above, LHS_ELT being null only means that the LHS as a
931 whole is not a scalarizable reference. There may be occurrences of
932 scalarizable variables within, which implies a USE. */
933 else
934 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 0), bsi, true, fns);
937 /* Entry point to the walk functions. Search the entire function,
938 invoking the callbacks in FNS on each of the references to
939 scalarizable variables. */
941 static void
942 sra_walk_function (const struct sra_walk_fns *fns)
944 basic_block bb;
945 block_stmt_iterator si, ni;
947 /* ??? Phase 4 could derive some benefit to walking the function in
948 dominator tree order. */
950 FOR_EACH_BB (bb)
951 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
953 tree stmt, t;
954 stmt_ann_t ann;
956 stmt = bsi_stmt (si);
957 ann = stmt_ann (stmt);
959 ni = si;
960 bsi_next (&ni);
962 /* If the statement has no virtual operands, then it doesn't
963 make any structure references that we care about. */
964 if (ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
965 continue;
967 switch (TREE_CODE (stmt))
969 case RETURN_EXPR:
970 /* If we have "return <retval>" then the return value is
971 already exposed for our pleasure. Walk it as a USE to
972 force all the components back in place for the return.
974 If we have an embedded assignment, then <retval> is of
975 a type that gets returned in registers in this ABI, and
976 we do not wish to extend their lifetimes. Treat this
977 as a USE of the variable on the RHS of this assignment. */
979 t = TREE_OPERAND (stmt, 0);
980 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
981 sra_walk_expr (&GIMPLE_STMT_OPERAND (t, 1), &si, false, fns);
982 else
983 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
984 break;
986 case GIMPLE_MODIFY_STMT:
987 sra_walk_gimple_modify_stmt (stmt, &si, fns);
988 break;
989 case CALL_EXPR:
990 sra_walk_call_expr (stmt, &si, fns);
991 break;
992 case ASM_EXPR:
993 sra_walk_asm_expr (stmt, &si, fns);
994 break;
996 default:
997 break;
1002 /* Phase One: Scan all referenced variables in the program looking for
1003 structures that could be decomposed. */
1005 static bool
1006 find_candidates_for_sra (void)
1008 bool any_set = false;
1009 tree var;
1010 referenced_var_iterator rvi;
1012 FOR_EACH_REFERENCED_VAR (var, rvi)
1014 if (decl_can_be_decomposed_p (var))
1016 bitmap_set_bit (sra_candidates, DECL_UID (var));
1017 any_set = true;
1021 return any_set;
1025 /* Phase Two: Scan all references to scalarizable variables. Count the
1026 number of times they are used or copied respectively. */
1028 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1029 considered a copy, because we can decompose the reference such that
1030 the sub-elements needn't be contiguous. */
1032 static void
1033 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1034 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1035 bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1037 elt->n_uses += 1;
1040 static void
1041 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1042 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1044 lhs_elt->n_copies += 1;
1045 rhs_elt->n_copies += 1;
1048 static void
1049 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1050 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1052 lhs_elt->n_copies += 1;
1055 static void
1056 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1057 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1058 bool is_output ATTRIBUTE_UNUSED)
1060 elt->n_copies += 1;
1063 /* Dump the values we collected during the scanning phase. */
1065 static void
1066 scan_dump (struct sra_elt *elt)
1068 struct sra_elt *c;
1070 dump_sra_elt_name (dump_file, elt);
1071 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1073 for (c = elt->children; c ; c = c->sibling)
1074 scan_dump (c);
1076 for (c = elt->groups; c ; c = c->sibling)
1077 scan_dump (c);
1080 /* Entry point to phase 2. Scan the entire function, building up
1081 scalarization data structures, recording copies and uses. */
1083 static void
1084 scan_function (void)
1086 static const struct sra_walk_fns fns = {
1087 scan_use, scan_copy, scan_init, scan_ldst, true
1089 bitmap_iterator bi;
1091 sra_walk_function (&fns);
1093 if (dump_file && (dump_flags & TDF_DETAILS))
1095 unsigned i;
1097 fputs ("\nScan results:\n", dump_file);
1098 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1100 tree var = referenced_var (i);
1101 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1102 if (elt)
1103 scan_dump (elt);
1105 fputc ('\n', dump_file);
1109 /* Phase Three: Make decisions about which variables to scalarize, if any.
1110 All elements to be scalarized have replacement variables made for them. */
1112 /* A subroutine of build_element_name. Recursively build the element
1113 name on the obstack. */
1115 static void
1116 build_element_name_1 (struct sra_elt *elt)
1118 tree t;
1119 char buffer[32];
1121 if (elt->parent)
1123 build_element_name_1 (elt->parent);
1124 obstack_1grow (&sra_obstack, '$');
1126 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1128 if (elt->element == integer_zero_node)
1129 obstack_grow (&sra_obstack, "real", 4);
1130 else
1131 obstack_grow (&sra_obstack, "imag", 4);
1132 return;
1136 t = elt->element;
1137 if (TREE_CODE (t) == INTEGER_CST)
1139 /* ??? Eh. Don't bother doing double-wide printing. */
1140 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1141 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1143 else
1145 tree name = DECL_NAME (t);
1146 if (name)
1147 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1148 IDENTIFIER_LENGTH (name));
1149 else
1151 sprintf (buffer, "D%u", DECL_UID (t));
1152 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1157 /* Construct a pretty variable name for an element's replacement variable.
1158 The name is built on the obstack. */
1160 static char *
1161 build_element_name (struct sra_elt *elt)
1163 build_element_name_1 (elt);
1164 obstack_1grow (&sra_obstack, '\0');
1165 return XOBFINISH (&sra_obstack, char *);
1168 /* Instantiate an element as an independent variable. */
1170 static void
1171 instantiate_element (struct sra_elt *elt)
1173 struct sra_elt *base_elt;
1174 tree var, base;
1176 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1177 continue;
1178 base = base_elt->element;
1180 elt->replacement = var = make_rename_temp (elt->type, "SR");
1181 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1182 DECL_ARTIFICIAL (var) = 1;
1184 if (TREE_THIS_VOLATILE (elt->type))
1186 TREE_THIS_VOLATILE (var) = 1;
1187 TREE_SIDE_EFFECTS (var) = 1;
1190 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1192 char *pretty_name = build_element_name (elt);
1193 DECL_NAME (var) = get_identifier (pretty_name);
1194 obstack_free (&sra_obstack, pretty_name);
1196 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1197 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1199 DECL_IGNORED_P (var) = 0;
1200 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1202 else
1204 DECL_IGNORED_P (var) = 1;
1205 /* ??? We can't generate any warning that would be meaningful. */
1206 TREE_NO_WARNING (var) = 1;
1209 if (dump_file)
1211 fputs (" ", dump_file);
1212 dump_sra_elt_name (dump_file, elt);
1213 fputs (" -> ", dump_file);
1214 print_generic_expr (dump_file, var, dump_flags);
1215 fputc ('\n', dump_file);
1219 /* Make one pass across an element tree deciding whether or not it's
1220 profitable to instantiate individual leaf scalars.
1222 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1223 fields all the way up the tree. */
1225 static void
1226 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1227 unsigned int parent_copies)
1229 if (dump_file && !elt->parent)
1231 fputs ("Initial instantiation for ", dump_file);
1232 dump_sra_elt_name (dump_file, elt);
1233 fputc ('\n', dump_file);
1236 if (elt->cannot_scalarize)
1237 return;
1239 if (elt->is_scalar)
1241 /* The decision is simple: instantiate if we're used more frequently
1242 than the parent needs to be seen as a complete unit. */
1243 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1244 instantiate_element (elt);
1246 else
1248 struct sra_elt *c, *group;
1249 unsigned int this_uses = elt->n_uses + parent_uses;
1250 unsigned int this_copies = elt->n_copies + parent_copies;
1252 /* Consider groups of sub-elements as weighing in favour of
1253 instantiation whatever their size. */
1254 for (group = elt->groups; group ; group = group->sibling)
1255 FOR_EACH_ACTUAL_CHILD (c, group)
1257 c->n_uses += group->n_uses;
1258 c->n_copies += group->n_copies;
1261 for (c = elt->children; c ; c = c->sibling)
1262 decide_instantiation_1 (c, this_uses, this_copies);
1266 /* Compute the size and number of all instantiated elements below ELT.
1267 We will only care about this if the size of the complete structure
1268 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1270 static unsigned int
1271 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1273 if (elt->replacement)
1275 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1276 return 1;
1278 else
1280 struct sra_elt *c;
1281 unsigned int count = 0;
1283 for (c = elt->children; c ; c = c->sibling)
1284 count += sum_instantiated_sizes (c, sizep);
1286 return count;
1290 /* Instantiate fields in ELT->TYPE that are not currently present as
1291 children of ELT. */
1293 static void instantiate_missing_elements (struct sra_elt *elt);
1295 static void
1296 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1298 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1299 if (sub->is_scalar)
1301 if (sub->replacement == NULL)
1302 instantiate_element (sub);
1304 else
1305 instantiate_missing_elements (sub);
1308 static void
1309 instantiate_missing_elements (struct sra_elt *elt)
1311 tree type = elt->type;
1313 switch (TREE_CODE (type))
1315 case RECORD_TYPE:
1317 tree f;
1318 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1319 if (TREE_CODE (f) == FIELD_DECL)
1320 instantiate_missing_elements_1 (elt, f, TREE_TYPE (f));
1321 break;
1324 case ARRAY_TYPE:
1326 tree i, max, subtype;
1328 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1329 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1330 subtype = TREE_TYPE (type);
1332 while (1)
1334 instantiate_missing_elements_1 (elt, i, subtype);
1335 if (tree_int_cst_equal (i, max))
1336 break;
1337 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1340 break;
1343 case COMPLEX_TYPE:
1344 type = TREE_TYPE (type);
1345 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1346 instantiate_missing_elements_1 (elt, integer_one_node, type);
1347 break;
1349 default:
1350 gcc_unreachable ();
1354 /* Return true if there is only one non aggregate field in the record, TYPE.
1355 Return false otherwise. */
1357 static bool
1358 single_scalar_field_in_record_p (tree type)
1360 int num_fields = 0;
1361 tree field;
1362 if (TREE_CODE (type) != RECORD_TYPE)
1363 return false;
1365 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1366 if (TREE_CODE (field) == FIELD_DECL)
1368 num_fields++;
1370 if (num_fields == 2)
1371 return false;
1373 if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
1374 return false;
1377 return true;
1380 /* Make one pass across an element tree deciding whether to perform block
1381 or element copies. If we decide on element copies, instantiate all
1382 elements. Return true if there are any instantiated sub-elements. */
1384 static bool
1385 decide_block_copy (struct sra_elt *elt)
1387 struct sra_elt *c;
1388 bool any_inst;
1390 /* We shouldn't be invoked on groups of sub-elements as they must
1391 behave like their parent as far as block copy is concerned. */
1392 gcc_assert (!elt->is_group);
1394 /* If scalarization is disabled, respect it. */
1395 if (elt->cannot_scalarize)
1397 elt->use_block_copy = 1;
1399 if (dump_file)
1401 fputs ("Scalarization disabled for ", dump_file);
1402 dump_sra_elt_name (dump_file, elt);
1403 fputc ('\n', dump_file);
1406 /* Disable scalarization of sub-elements */
1407 for (c = elt->children; c; c = c->sibling)
1409 c->cannot_scalarize = 1;
1410 decide_block_copy (c);
1413 /* Groups behave like their parent. */
1414 for (c = elt->groups; c; c = c->sibling)
1416 c->cannot_scalarize = 1;
1417 c->use_block_copy = 1;
1420 return false;
1423 /* Don't decide if we've no uses. */
1424 if (elt->n_uses == 0 && elt->n_copies == 0)
1427 else if (!elt->is_scalar)
1429 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1430 bool use_block_copy = true;
1432 /* Tradeoffs for COMPLEX types pretty much always make it better
1433 to go ahead and split the components. */
1434 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1435 use_block_copy = false;
1437 /* Don't bother trying to figure out the rest if the structure is
1438 so large we can't do easy arithmetic. This also forces block
1439 copies for variable sized structures. */
1440 else if (host_integerp (size_tree, 1))
1442 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1443 unsigned int max_size, max_count, inst_count, full_count;
1445 /* If the sra-max-structure-size parameter is 0, then the
1446 user has not overridden the parameter and we can choose a
1447 sensible default. */
1448 max_size = SRA_MAX_STRUCTURE_SIZE
1449 ? SRA_MAX_STRUCTURE_SIZE
1450 : MOVE_RATIO * UNITS_PER_WORD;
1451 max_count = SRA_MAX_STRUCTURE_COUNT
1452 ? SRA_MAX_STRUCTURE_COUNT
1453 : MOVE_RATIO;
1455 full_size = tree_low_cst (size_tree, 1);
1456 full_count = count_type_elements (elt->type, false);
1457 inst_count = sum_instantiated_sizes (elt, &inst_size);
1459 /* If there is only one scalar field in the record, don't block copy. */
1460 if (single_scalar_field_in_record_p (elt->type))
1461 use_block_copy = false;
1463 /* ??? What to do here. If there are two fields, and we've only
1464 instantiated one, then instantiating the other is clearly a win.
1465 If there are a large number of fields then the size of the copy
1466 is much more of a factor. */
1468 /* If the structure is small, and we've made copies, go ahead
1469 and instantiate, hoping that the copies will go away. */
1470 if (full_size <= max_size
1471 && (full_count - inst_count) <= max_count
1472 && elt->n_copies > elt->n_uses)
1473 use_block_copy = false;
1474 else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1475 && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1476 use_block_copy = false;
1478 /* In order to avoid block copy, we have to be able to instantiate
1479 all elements of the type. See if this is possible. */
1480 if (!use_block_copy
1481 && (!can_completely_scalarize_p (elt)
1482 || !type_can_instantiate_all_elements (elt->type)))
1483 use_block_copy = true;
1486 elt->use_block_copy = use_block_copy;
1488 /* Groups behave like their parent. */
1489 for (c = elt->groups; c; c = c->sibling)
1490 c->use_block_copy = use_block_copy;
1492 if (dump_file)
1494 fprintf (dump_file, "Using %s for ",
1495 use_block_copy ? "block-copy" : "element-copy");
1496 dump_sra_elt_name (dump_file, elt);
1497 fputc ('\n', dump_file);
1500 if (!use_block_copy)
1502 instantiate_missing_elements (elt);
1503 return true;
1507 any_inst = elt->replacement != NULL;
1509 for (c = elt->children; c ; c = c->sibling)
1510 any_inst |= decide_block_copy (c);
1512 return any_inst;
1515 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1517 static void
1518 decide_instantiations (void)
1520 unsigned int i;
1521 bool cleared_any;
1522 bitmap_head done_head;
1523 bitmap_iterator bi;
1525 /* We cannot clear bits from a bitmap we're iterating over,
1526 so save up all the bits to clear until the end. */
1527 bitmap_initialize (&done_head, &bitmap_default_obstack);
1528 cleared_any = false;
1530 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1532 tree var = referenced_var (i);
1533 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1534 if (elt)
1536 decide_instantiation_1 (elt, 0, 0);
1537 if (!decide_block_copy (elt))
1538 elt = NULL;
1540 if (!elt)
1542 bitmap_set_bit (&done_head, i);
1543 cleared_any = true;
1547 if (cleared_any)
1549 bitmap_and_compl_into (sra_candidates, &done_head);
1550 bitmap_and_compl_into (needs_copy_in, &done_head);
1552 bitmap_clear (&done_head);
1554 if (!bitmap_empty_p (sra_candidates))
1555 todoflags |= TODO_update_smt_usage;
1557 mark_set_for_renaming (sra_candidates);
1559 if (dump_file)
1560 fputc ('\n', dump_file);
1564 /* Phase Four: Update the function to match the replacements created. */
1566 /* Mark all the variables in VDEF/VUSE operators for STMT for
1567 renaming. This becomes necessary when we modify all of a
1568 non-scalar. */
1570 static void
1571 mark_all_v_defs_1 (tree stmt)
1573 tree sym;
1574 ssa_op_iter iter;
1576 update_stmt_if_modified (stmt);
1578 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
1580 if (TREE_CODE (sym) == SSA_NAME)
1581 sym = SSA_NAME_VAR (sym);
1582 mark_sym_for_renaming (sym);
1587 /* Mark all the variables in virtual operands in all the statements in
1588 LIST for renaming. */
1590 static void
1591 mark_all_v_defs (tree list)
1593 if (TREE_CODE (list) != STATEMENT_LIST)
1594 mark_all_v_defs_1 (list);
1595 else
1597 tree_stmt_iterator i;
1598 for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
1599 mark_all_v_defs_1 (tsi_stmt (i));
1604 /* Mark every replacement under ELT with TREE_NO_WARNING. */
1606 static void
1607 mark_no_warning (struct sra_elt *elt)
1609 if (!elt->all_no_warning)
1611 if (elt->replacement)
1612 TREE_NO_WARNING (elt->replacement) = 1;
1613 else
1615 struct sra_elt *c;
1616 FOR_EACH_ACTUAL_CHILD (c, elt)
1617 mark_no_warning (c);
1619 elt->all_no_warning = true;
1623 /* Build a single level component reference to ELT rooted at BASE. */
1625 static tree
1626 generate_one_element_ref (struct sra_elt *elt, tree base)
1628 switch (TREE_CODE (TREE_TYPE (base)))
1630 case RECORD_TYPE:
1632 tree field = elt->element;
1634 /* Watch out for compatible records with differing field lists. */
1635 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
1636 field = find_compatible_field (TREE_TYPE (base), field);
1638 return build3 (COMPONENT_REF, elt->type, base, field, NULL);
1641 case ARRAY_TYPE:
1642 todoflags |= TODO_update_smt_usage;
1643 if (TREE_CODE (elt->element) == RANGE_EXPR)
1644 return build4 (ARRAY_RANGE_REF, elt->type, base,
1645 TREE_OPERAND (elt->element, 0), NULL, NULL);
1646 else
1647 return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1649 case COMPLEX_TYPE:
1650 if (elt->element == integer_zero_node)
1651 return build1 (REALPART_EXPR, elt->type, base);
1652 else
1653 return build1 (IMAGPART_EXPR, elt->type, base);
1655 default:
1656 gcc_unreachable ();
1660 /* Build a full component reference to ELT rooted at its native variable. */
1662 static tree
1663 generate_element_ref (struct sra_elt *elt)
1665 if (elt->parent)
1666 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1667 else
1668 return elt->element;
1671 /* Generate a set of assignment statements in *LIST_P to copy all
1672 instantiated elements under ELT to or from the equivalent structure
1673 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1674 true meaning to copy out of EXPR into ELT. */
1676 static void
1677 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1678 tree *list_p)
1680 struct sra_elt *c;
1681 tree t;
1683 if (!copy_out && TREE_CODE (expr) == SSA_NAME
1684 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
1686 tree r, i;
1688 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
1689 r = c->replacement;
1690 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
1691 i = c->replacement;
1693 t = build2 (COMPLEX_EXPR, elt->type, r, i);
1694 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, expr, t);
1695 SSA_NAME_DEF_STMT (expr) = t;
1696 append_to_statement_list (t, list_p);
1698 else if (elt->replacement)
1700 if (copy_out)
1701 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, elt->replacement, expr);
1702 else
1703 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, expr, elt->replacement);
1704 append_to_statement_list (t, list_p);
1706 else
1708 FOR_EACH_ACTUAL_CHILD (c, elt)
1710 t = generate_one_element_ref (c, unshare_expr (expr));
1711 generate_copy_inout (c, copy_out, t, list_p);
1716 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1717 elements under SRC to their counterparts under DST. There must be a 1-1
1718 correspondence of instantiated elements. */
1720 static void
1721 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1723 struct sra_elt *dc, *sc;
1725 FOR_EACH_ACTUAL_CHILD (dc, dst)
1727 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1728 gcc_assert (sc);
1729 generate_element_copy (dc, sc, list_p);
1732 if (dst->replacement)
1734 tree t;
1736 gcc_assert (src->replacement);
1738 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, dst->replacement,
1739 src->replacement);
1740 append_to_statement_list (t, list_p);
1744 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1745 elements under ELT. In addition, do not assign to elements that have been
1746 marked VISITED but do reset the visited flag; this allows easy coordination
1747 with generate_element_init. */
1749 static void
1750 generate_element_zero (struct sra_elt *elt, tree *list_p)
1752 struct sra_elt *c;
1754 if (elt->visited)
1756 elt->visited = false;
1757 return;
1760 FOR_EACH_ACTUAL_CHILD (c, elt)
1761 generate_element_zero (c, list_p);
1763 if (elt->replacement)
1765 tree t;
1767 gcc_assert (elt->is_scalar);
1768 t = fold_convert (elt->type, integer_zero_node);
1770 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, elt->replacement, t);
1771 append_to_statement_list (t, list_p);
1775 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1776 Add the result to *LIST_P. */
1778 static void
1779 generate_one_element_init (tree var, tree init, tree *list_p)
1781 /* The replacement can be almost arbitrarily complex. Gimplify. */
1782 tree stmt = build2 (GIMPLE_MODIFY_STMT, void_type_node, var, init);
1783 gimplify_and_add (stmt, list_p);
1786 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1787 elements under ELT with the contents of the initializer INIT. In addition,
1788 mark all assigned elements VISITED; this allows easy coordination with
1789 generate_element_zero. Return false if we found a case we couldn't
1790 handle. */
1792 static bool
1793 generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
1795 bool result = true;
1796 enum tree_code init_code;
1797 struct sra_elt *sub;
1798 tree t;
1799 unsigned HOST_WIDE_INT idx;
1800 tree value, purpose;
1802 /* We can be passed DECL_INITIAL of a static variable. It might have a
1803 conversion, which we strip off here. */
1804 STRIP_USELESS_TYPE_CONVERSION (init);
1805 init_code = TREE_CODE (init);
1807 if (elt->is_scalar)
1809 if (elt->replacement)
1811 generate_one_element_init (elt->replacement, init, list_p);
1812 elt->visited = true;
1814 return result;
1817 switch (init_code)
1819 case COMPLEX_CST:
1820 case COMPLEX_EXPR:
1821 FOR_EACH_ACTUAL_CHILD (sub, elt)
1823 if (sub->element == integer_zero_node)
1824 t = (init_code == COMPLEX_EXPR
1825 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1826 else
1827 t = (init_code == COMPLEX_EXPR
1828 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1829 result &= generate_element_init_1 (sub, t, list_p);
1831 break;
1833 case CONSTRUCTOR:
1834 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
1836 if (TREE_CODE (purpose) == RANGE_EXPR)
1838 tree lower = TREE_OPERAND (purpose, 0);
1839 tree upper = TREE_OPERAND (purpose, 1);
1841 while (1)
1843 sub = lookup_element (elt, lower, NULL, NO_INSERT);
1844 if (sub != NULL)
1845 result &= generate_element_init_1 (sub, value, list_p);
1846 if (tree_int_cst_equal (lower, upper))
1847 break;
1848 lower = int_const_binop (PLUS_EXPR, lower,
1849 integer_one_node, true);
1852 else
1854 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
1855 if (sub != NULL)
1856 result &= generate_element_init_1 (sub, value, list_p);
1859 break;
1861 default:
1862 elt->visited = true;
1863 result = false;
1866 return result;
1869 /* A wrapper function for generate_element_init_1 that handles cleanup after
1870 gimplification. */
1872 static bool
1873 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1875 bool ret;
1877 push_gimplify_context ();
1878 ret = generate_element_init_1 (elt, init, list_p);
1879 pop_gimplify_context (NULL);
1881 /* The replacement can expose previously unreferenced variables. */
1882 if (ret && *list_p)
1884 tree_stmt_iterator i;
1886 for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
1887 find_new_referenced_vars (tsi_stmt_ptr (i));
1890 return ret;
1893 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1894 has more than one edge, STMT will be replicated for each edge. Also,
1895 abnormal edges will be ignored. */
1897 void
1898 insert_edge_copies (tree stmt, basic_block bb)
1900 edge e;
1901 edge_iterator ei;
1902 bool first_copy;
1904 first_copy = true;
1905 FOR_EACH_EDGE (e, ei, bb->succs)
1907 /* We don't need to insert copies on abnormal edges. The
1908 value of the scalar replacement is not guaranteed to
1909 be valid through an abnormal edge. */
1910 if (!(e->flags & EDGE_ABNORMAL))
1912 if (first_copy)
1914 bsi_insert_on_edge (e, stmt);
1915 first_copy = false;
1917 else
1918 bsi_insert_on_edge (e, unsave_expr_now (stmt));
1923 /* Helper function to insert LIST before BSI, and set up line number info. */
1925 void
1926 sra_insert_before (block_stmt_iterator *bsi, tree list)
1928 tree stmt = bsi_stmt (*bsi);
1930 if (EXPR_HAS_LOCATION (stmt))
1931 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1932 bsi_insert_before (bsi, list, BSI_SAME_STMT);
1935 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1937 void
1938 sra_insert_after (block_stmt_iterator *bsi, tree list)
1940 tree stmt = bsi_stmt (*bsi);
1942 if (EXPR_HAS_LOCATION (stmt))
1943 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1945 if (stmt_ends_bb_p (stmt))
1946 insert_edge_copies (list, bsi->bb);
1947 else
1948 bsi_insert_after (bsi, list, BSI_SAME_STMT);
1951 /* Similarly, but replace the statement at BSI. */
1953 static void
1954 sra_replace (block_stmt_iterator *bsi, tree list)
1956 sra_insert_before (bsi, list);
1957 bsi_remove (bsi, false);
1958 if (bsi_end_p (*bsi))
1959 *bsi = bsi_last (bsi->bb);
1960 else
1961 bsi_prev (bsi);
1964 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1965 if elt is scalar, or some occurrence of ELT that requires a complete
1966 aggregate. IS_OUTPUT is true if ELT is being modified. */
1968 static void
1969 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
1970 bool is_output, bool use_all)
1972 tree list = NULL, stmt = bsi_stmt (*bsi);
1974 if (elt->replacement)
1976 /* If we have a replacement, then updating the reference is as
1977 simple as modifying the existing statement in place. */
1978 if (is_output)
1979 mark_all_v_defs (stmt);
1980 *expr_p = elt->replacement;
1981 update_stmt (stmt);
1983 else
1985 /* Otherwise we need some copies. If ELT is being read, then we want
1986 to store all (modified) sub-elements back into the structure before
1987 the reference takes place. If ELT is being written, then we want to
1988 load the changed values back into our shadow variables. */
1989 /* ??? We don't check modified for reads, we just always write all of
1990 the values. We should be able to record the SSA number of the VOP
1991 for which the values were last read. If that number matches the
1992 SSA number of the VOP in the current statement, then we needn't
1993 emit an assignment. This would also eliminate double writes when
1994 a structure is passed as more than one argument to a function call.
1995 This optimization would be most effective if sra_walk_function
1996 processed the blocks in dominator order. */
1998 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
1999 if (list == NULL)
2000 return;
2001 mark_all_v_defs (list);
2002 if (is_output)
2003 sra_insert_after (bsi, list);
2004 else
2006 sra_insert_before (bsi, list);
2007 if (use_all)
2008 mark_no_warning (elt);
2013 /* Scalarize a COPY. To recap, this is an assignment statement between
2014 two scalarizable references, LHS_ELT and RHS_ELT. */
2016 static void
2017 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
2018 block_stmt_iterator *bsi)
2020 tree list, stmt;
2022 if (lhs_elt->replacement && rhs_elt->replacement)
2024 /* If we have two scalar operands, modify the existing statement. */
2025 stmt = bsi_stmt (*bsi);
2027 /* See the commentary in sra_walk_function concerning
2028 RETURN_EXPR, and why we should never see one here. */
2029 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
2031 GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
2032 GIMPLE_STMT_OPERAND (stmt, 1) = rhs_elt->replacement;
2033 update_stmt (stmt);
2035 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
2037 /* If either side requires a block copy, then sync the RHS back
2038 to the original structure, leave the original assignment
2039 statement (which will perform the block copy), then load the
2040 LHS values out of its now-updated original structure. */
2041 /* ??? Could perform a modified pair-wise element copy. That
2042 would at least allow those elements that are instantiated in
2043 both structures to be optimized well. */
2045 list = NULL;
2046 generate_copy_inout (rhs_elt, false,
2047 generate_element_ref (rhs_elt), &list);
2048 if (list)
2050 mark_all_v_defs (list);
2051 sra_insert_before (bsi, list);
2054 list = NULL;
2055 generate_copy_inout (lhs_elt, true,
2056 generate_element_ref (lhs_elt), &list);
2057 if (list)
2059 mark_all_v_defs (list);
2060 sra_insert_after (bsi, list);
2063 else
2065 /* Otherwise both sides must be fully instantiated. In which
2066 case perform pair-wise element assignments and replace the
2067 original block copy statement. */
2069 stmt = bsi_stmt (*bsi);
2070 mark_all_v_defs (stmt);
2072 list = NULL;
2073 generate_element_copy (lhs_elt, rhs_elt, &list);
2074 gcc_assert (list);
2075 mark_all_v_defs (list);
2076 sra_replace (bsi, list);
2080 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
2081 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
2082 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
2083 CONSTRUCTOR. */
2085 static void
2086 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
2088 bool result = true;
2089 tree list = NULL;
2091 /* Generate initialization statements for all members extant in the RHS. */
2092 if (rhs)
2094 /* Unshare the expression just in case this is from a decl's initial. */
2095 rhs = unshare_expr (rhs);
2096 result = generate_element_init (lhs_elt, rhs, &list);
2099 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
2100 a zero value. Initialize the rest of the instantiated elements. */
2101 generate_element_zero (lhs_elt, &list);
2103 if (!result)
2105 /* If we failed to convert the entire initializer, then we must
2106 leave the structure assignment in place and must load values
2107 from the structure into the slots for which we did not find
2108 constants. The easiest way to do this is to generate a complete
2109 copy-out, and then follow that with the constant assignments
2110 that we were able to build. DCE will clean things up. */
2111 tree list0 = NULL;
2112 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
2113 &list0);
2114 append_to_statement_list (list, &list0);
2115 list = list0;
2118 if (lhs_elt->use_block_copy || !result)
2120 /* Since LHS is not fully instantiated, we must leave the structure
2121 assignment in place. Treating this case differently from a USE
2122 exposes constants to later optimizations. */
2123 if (list)
2125 mark_all_v_defs (list);
2126 sra_insert_after (bsi, list);
2129 else
2131 /* The LHS is fully instantiated. The list of initializations
2132 replaces the original structure assignment. */
2133 gcc_assert (list);
2134 mark_all_v_defs (bsi_stmt (*bsi));
2135 mark_all_v_defs (list);
2136 sra_replace (bsi, list);
2140 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
2141 on all INDIRECT_REFs. */
2143 static tree
2144 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2146 tree t = *tp;
2148 if (TREE_CODE (t) == INDIRECT_REF)
2150 TREE_THIS_NOTRAP (t) = 1;
2151 *walk_subtrees = 0;
2153 else if (IS_TYPE_OR_DECL_P (t))
2154 *walk_subtrees = 0;
2156 return NULL;
2159 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
2160 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
2161 if ELT is on the left-hand side. */
2163 static void
2164 scalarize_ldst (struct sra_elt *elt, tree other,
2165 block_stmt_iterator *bsi, bool is_output)
2167 /* Shouldn't have gotten called for a scalar. */
2168 gcc_assert (!elt->replacement);
2170 if (elt->use_block_copy)
2172 /* Since ELT is not fully instantiated, we have to leave the
2173 block copy in place. Treat this as a USE. */
2174 scalarize_use (elt, NULL, bsi, is_output, false);
2176 else
2178 /* The interesting case is when ELT is fully instantiated. In this
2179 case we can have each element stored/loaded directly to/from the
2180 corresponding slot in OTHER. This avoids a block copy. */
2182 tree list = NULL, stmt = bsi_stmt (*bsi);
2184 mark_all_v_defs (stmt);
2185 generate_copy_inout (elt, is_output, other, &list);
2186 mark_all_v_defs (list);
2187 gcc_assert (list);
2189 /* Preserve EH semantics. */
2190 if (stmt_ends_bb_p (stmt))
2192 tree_stmt_iterator tsi;
2193 tree first;
2195 /* Extract the first statement from LIST. */
2196 tsi = tsi_start (list);
2197 first = tsi_stmt (tsi);
2198 tsi_delink (&tsi);
2200 /* Replace the old statement with this new representative. */
2201 bsi_replace (bsi, first, true);
2203 if (!tsi_end_p (tsi))
2205 /* If any reference would trap, then they all would. And more
2206 to the point, the first would. Therefore none of the rest
2207 will trap since the first didn't. Indicate this by
2208 iterating over the remaining statements and set
2209 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
2212 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
2213 tsi_next (&tsi);
2215 while (!tsi_end_p (tsi));
2217 insert_edge_copies (list, bsi->bb);
2220 else
2221 sra_replace (bsi, list);
2225 /* Generate initializations for all scalarizable parameters. */
2227 static void
2228 scalarize_parms (void)
2230 tree list = NULL;
2231 unsigned i;
2232 bitmap_iterator bi;
2234 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
2236 tree var = referenced_var (i);
2237 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
2238 generate_copy_inout (elt, true, var, &list);
2241 if (list)
2243 insert_edge_copies (list, ENTRY_BLOCK_PTR);
2244 mark_all_v_defs (list);
2248 /* Entry point to phase 4. Update the function to match replacements. */
2250 static void
2251 scalarize_function (void)
2253 static const struct sra_walk_fns fns = {
2254 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
2257 sra_walk_function (&fns);
2258 scalarize_parms ();
2259 bsi_commit_edge_inserts ();
2263 /* Debug helper function. Print ELT in a nice human-readable format. */
2265 static void
2266 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
2268 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
2270 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
2271 dump_sra_elt_name (f, elt->parent);
2273 else
2275 if (elt->parent)
2276 dump_sra_elt_name (f, elt->parent);
2277 if (DECL_P (elt->element))
2279 if (TREE_CODE (elt->element) == FIELD_DECL)
2280 fputc ('.', f);
2281 print_generic_expr (f, elt->element, dump_flags);
2283 else if (TREE_CODE (elt->element) == RANGE_EXPR)
2284 fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
2285 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
2286 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
2287 else
2288 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
2289 TREE_INT_CST_LOW (elt->element));
2293 /* Likewise, but callable from the debugger. */
2295 void
2296 debug_sra_elt_name (struct sra_elt *elt)
2298 dump_sra_elt_name (stderr, elt);
2299 fputc ('\n', stderr);
2302 void
2303 sra_init_cache (void)
2305 if (sra_type_decomp_cache)
2306 return;
2308 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
2309 sra_type_inst_cache = BITMAP_ALLOC (NULL);
2312 /* Main entry point. */
2314 static unsigned int
2315 tree_sra (void)
2317 /* Initialize local variables. */
2318 todoflags = 0;
2319 gcc_obstack_init (&sra_obstack);
2320 sra_candidates = BITMAP_ALLOC (NULL);
2321 needs_copy_in = BITMAP_ALLOC (NULL);
2322 sra_init_cache ();
2323 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
2325 /* Scan. If we find anything, instantiate and scalarize. */
2326 if (find_candidates_for_sra ())
2328 scan_function ();
2329 decide_instantiations ();
2330 scalarize_function ();
2333 /* Free allocated memory. */
2334 htab_delete (sra_map);
2335 sra_map = NULL;
2336 BITMAP_FREE (sra_candidates);
2337 BITMAP_FREE (needs_copy_in);
2338 BITMAP_FREE (sra_type_decomp_cache);
2339 BITMAP_FREE (sra_type_inst_cache);
2340 obstack_free (&sra_obstack, NULL);
2341 return todoflags;
2344 static bool
2345 gate_sra (void)
2347 return flag_tree_sra != 0;
2350 struct tree_opt_pass pass_sra =
2352 "sra", /* name */
2353 gate_sra, /* gate */
2354 tree_sra, /* execute */
2355 NULL, /* sub */
2356 NULL, /* next */
2357 0, /* static_pass_number */
2358 TV_TREE_SRA, /* tv_id */
2359 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2360 0, /* properties_provided */
2361 0, /* properties_destroyed */
2362 0, /* todo_flags_start */
2363 TODO_dump_func
2364 | TODO_update_ssa
2365 | TODO_ggc_collect
2366 | TODO_verify_ssa, /* todo_flags_finish */
2367 0 /* letter */