* config/i386/i386.md (paritydi2, paritysi2): New expanders.
[official-gcc.git] / gcc / tree-sra.c
blob03f9b59526a1895f31414e850756ef89e56e29d7
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "ggc.h"
29 #include "tree.h"
31 /* These RTL headers are needed for basic-block.h. */
32 #include "rtl.h"
33 #include "tm_p.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
40 #include "tree-gimple.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "timevar.h"
44 #include "flags.h"
45 #include "bitmap.h"
46 #include "obstack.h"
47 #include "target.h"
48 /* expr.h is needed for MOVE_RATIO. */
49 #include "expr.h"
50 #include "params.h"
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
65 decomposition.
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* The set of todo flags to return from tree_sra. */
79 static unsigned int todoflags;
81 /* The set of aggregate variables that are candidates for scalarization. */
82 static bitmap sra_candidates;
84 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
85 beginning of the function. */
86 static bitmap needs_copy_in;
88 /* Sets of bit pairs that cache type decomposition and instantiation. */
89 static bitmap sra_type_decomp_cache;
90 static bitmap sra_type_inst_cache;
92 /* One of these structures is created for each candidate aggregate and
93 each (accessed) member or group of members of such an aggregate. */
94 struct sra_elt
96 /* A tree of the elements. Used when we want to traverse everything. */
97 struct sra_elt *parent;
98 struct sra_elt *groups;
99 struct sra_elt *children;
100 struct sra_elt *sibling;
102 /* If this element is a root, then this is the VAR_DECL. If this is
103 a sub-element, this is some token used to identify the reference.
104 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
105 of an ARRAY_REF, this is the (constant) index. In the case of an
106 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
107 of a complex number, this is a zero or one. */
108 tree element;
110 /* The type of the element. */
111 tree type;
113 /* A VAR_DECL, for any sub-element we've decided to replace. */
114 tree replacement;
116 /* The number of times the element is referenced as a whole. I.e.
117 given "a.b.c", this would be incremented for C, but not for A or B. */
118 unsigned int n_uses;
120 /* The number of times the element is copied to or from another
121 scalarizable element. */
122 unsigned int n_copies;
124 /* True if TYPE is scalar. */
125 bool is_scalar;
127 /* True if this element is a group of members of its parent. */
128 bool is_group;
130 /* True if we saw something about this element that prevents scalarization,
131 such as non-constant indexing. */
132 bool cannot_scalarize;
134 /* True if we've decided that structure-to-structure assignment
135 should happen via memcpy and not per-element. */
136 bool use_block_copy;
138 /* True if everything under this element has been marked TREE_NO_WARNING. */
139 bool all_no_warning;
141 /* A flag for use with/after random access traversals. */
142 bool visited;
144 /* True if there is BIT_FIELD_REF on the lhs with a vector. */
145 bool is_vector_lhs;
148 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
150 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
151 for ((CHILD) = (ELT)->is_group \
152 ? next_child_for_group (NULL, (ELT)) \
153 : (ELT)->children; \
154 (CHILD); \
155 (CHILD) = (ELT)->is_group \
156 ? next_child_for_group ((CHILD), (ELT)) \
157 : (CHILD)->sibling)
159 /* Helper function for above macro. Return next child in group. */
160 static struct sra_elt *
161 next_child_for_group (struct sra_elt *child, struct sra_elt *group)
163 gcc_assert (group->is_group);
165 /* Find the next child in the parent. */
166 if (child)
167 child = child->sibling;
168 else
169 child = group->parent->children;
171 /* Skip siblings that do not belong to the group. */
172 while (child)
174 tree g_elt = group->element;
175 if (TREE_CODE (g_elt) == RANGE_EXPR)
177 if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
178 && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
179 break;
181 else
182 gcc_unreachable ();
184 child = child->sibling;
187 return child;
190 /* Random access to the child of a parent is performed by hashing.
191 This prevents quadratic behavior, and allows SRA to function
192 reasonably on larger records. */
193 static htab_t sra_map;
195 /* All structures are allocated out of the following obstack. */
196 static struct obstack sra_obstack;
198 /* Debugging functions. */
199 static void dump_sra_elt_name (FILE *, struct sra_elt *);
200 extern void debug_sra_elt_name (struct sra_elt *);
202 /* Forward declarations. */
203 static tree generate_element_ref (struct sra_elt *);
205 /* Return true if DECL is an SRA candidate. */
207 static bool
208 is_sra_candidate_decl (tree decl)
210 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
213 /* Return true if TYPE is a scalar type. */
215 static bool
216 is_sra_scalar_type (tree type)
218 enum tree_code code = TREE_CODE (type);
219 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
220 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
221 || code == POINTER_TYPE || code == OFFSET_TYPE
222 || code == REFERENCE_TYPE);
225 /* Return true if TYPE can be decomposed into a set of independent variables.
227 Note that this doesn't imply that all elements of TYPE can be
228 instantiated, just that if we decide to break up the type into
229 separate pieces that it can be done. */
231 bool
232 sra_type_can_be_decomposed_p (tree type)
234 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
235 tree t;
237 /* Avoid searching the same type twice. */
238 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
239 return true;
240 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
241 return false;
243 /* The type must have a definite nonzero size. */
244 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
245 || integer_zerop (TYPE_SIZE (type)))
246 goto fail;
248 /* The type must be a non-union aggregate. */
249 switch (TREE_CODE (type))
251 case RECORD_TYPE:
253 bool saw_one_field = false;
255 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
256 if (TREE_CODE (t) == FIELD_DECL)
258 /* Reject incorrectly represented bit fields. */
259 if (DECL_BIT_FIELD (t)
260 && (tree_low_cst (DECL_SIZE (t), 1)
261 != TYPE_PRECISION (TREE_TYPE (t))))
262 goto fail;
264 saw_one_field = true;
267 /* Record types must have at least one field. */
268 if (!saw_one_field)
269 goto fail;
271 break;
273 case ARRAY_TYPE:
274 /* Array types must have a fixed lower and upper bound. */
275 t = TYPE_DOMAIN (type);
276 if (t == NULL)
277 goto fail;
278 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
279 goto fail;
280 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
281 goto fail;
282 break;
284 case COMPLEX_TYPE:
285 break;
287 default:
288 goto fail;
291 bitmap_set_bit (sra_type_decomp_cache, cache+0);
292 return true;
294 fail:
295 bitmap_set_bit (sra_type_decomp_cache, cache+1);
296 return false;
299 /* Return true if DECL can be decomposed into a set of independent
300 (though not necessarily scalar) variables. */
302 static bool
303 decl_can_be_decomposed_p (tree var)
305 /* Early out for scalars. */
306 if (is_sra_scalar_type (TREE_TYPE (var)))
307 return false;
309 /* The variable must not be aliased. */
310 if (!is_gimple_non_addressable (var))
312 if (dump_file && (dump_flags & TDF_DETAILS))
314 fprintf (dump_file, "Cannot scalarize variable ");
315 print_generic_expr (dump_file, var, dump_flags);
316 fprintf (dump_file, " because it must live in memory\n");
318 return false;
321 /* The variable must not be volatile. */
322 if (TREE_THIS_VOLATILE (var))
324 if (dump_file && (dump_flags & TDF_DETAILS))
326 fprintf (dump_file, "Cannot scalarize variable ");
327 print_generic_expr (dump_file, var, dump_flags);
328 fprintf (dump_file, " because it is declared volatile\n");
330 return false;
333 /* We must be able to decompose the variable's type. */
334 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
336 if (dump_file && (dump_flags & TDF_DETAILS))
338 fprintf (dump_file, "Cannot scalarize variable ");
339 print_generic_expr (dump_file, var, dump_flags);
340 fprintf (dump_file, " because its type cannot be decomposed\n");
342 return false;
345 return true;
348 /* Return true if TYPE can be *completely* decomposed into scalars. */
350 static bool
351 type_can_instantiate_all_elements (tree type)
353 if (is_sra_scalar_type (type))
354 return true;
355 if (!sra_type_can_be_decomposed_p (type))
356 return false;
358 switch (TREE_CODE (type))
360 case RECORD_TYPE:
362 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
363 tree f;
365 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
366 return true;
367 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
368 return false;
370 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
371 if (TREE_CODE (f) == FIELD_DECL)
373 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
375 bitmap_set_bit (sra_type_inst_cache, cache+1);
376 return false;
380 bitmap_set_bit (sra_type_inst_cache, cache+0);
381 return true;
384 case ARRAY_TYPE:
385 return type_can_instantiate_all_elements (TREE_TYPE (type));
387 case COMPLEX_TYPE:
388 return true;
390 default:
391 gcc_unreachable ();
395 /* Test whether ELT or some sub-element cannot be scalarized. */
397 static bool
398 can_completely_scalarize_p (struct sra_elt *elt)
400 struct sra_elt *c;
402 if (elt->cannot_scalarize)
403 return false;
405 for (c = elt->children; c; c = c->sibling)
406 if (!can_completely_scalarize_p (c))
407 return false;
409 for (c = elt->groups; c; c = c->sibling)
410 if (!can_completely_scalarize_p (c))
411 return false;
413 return true;
417 /* A simplified tree hashing algorithm that only handles the types of
418 trees we expect to find in sra_elt->element. */
420 static hashval_t
421 sra_hash_tree (tree t)
423 hashval_t h;
425 switch (TREE_CODE (t))
427 case VAR_DECL:
428 case PARM_DECL:
429 case RESULT_DECL:
430 h = DECL_UID (t);
431 break;
433 case INTEGER_CST:
434 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
435 break;
437 case RANGE_EXPR:
438 h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
439 h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
440 break;
442 case FIELD_DECL:
443 /* We can have types that are compatible, but have different member
444 lists, so we can't hash fields by ID. Use offsets instead. */
445 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
446 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
447 break;
449 default:
450 gcc_unreachable ();
453 return h;
456 /* Hash function for type SRA_PAIR. */
458 static hashval_t
459 sra_elt_hash (const void *x)
461 const struct sra_elt *e = x;
462 const struct sra_elt *p;
463 hashval_t h;
465 h = sra_hash_tree (e->element);
467 /* Take into account everything back up the chain. Given that chain
468 lengths are rarely very long, this should be acceptable. If we
469 truly identify this as a performance problem, it should work to
470 hash the pointer value "e->parent". */
471 for (p = e->parent; p ; p = p->parent)
472 h = (h * 65521) ^ sra_hash_tree (p->element);
474 return h;
477 /* Equality function for type SRA_PAIR. */
479 static int
480 sra_elt_eq (const void *x, const void *y)
482 const struct sra_elt *a = x;
483 const struct sra_elt *b = y;
484 tree ae, be;
486 if (a->parent != b->parent)
487 return false;
489 ae = a->element;
490 be = b->element;
492 if (ae == be)
493 return true;
494 if (TREE_CODE (ae) != TREE_CODE (be))
495 return false;
497 switch (TREE_CODE (ae))
499 case VAR_DECL:
500 case PARM_DECL:
501 case RESULT_DECL:
502 /* These are all pointer unique. */
503 return false;
505 case INTEGER_CST:
506 /* Integers are not pointer unique, so compare their values. */
507 return tree_int_cst_equal (ae, be);
509 case RANGE_EXPR:
510 return
511 tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
512 && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
514 case FIELD_DECL:
515 /* Fields are unique within a record, but not between
516 compatible records. */
517 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
518 return false;
519 return fields_compatible_p (ae, be);
521 default:
522 gcc_unreachable ();
526 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
527 may be null, in which case CHILD must be a DECL. */
529 static struct sra_elt *
530 lookup_element (struct sra_elt *parent, tree child, tree type,
531 enum insert_option insert)
533 struct sra_elt dummy;
534 struct sra_elt **slot;
535 struct sra_elt *elt;
537 if (parent)
538 dummy.parent = parent->is_group ? parent->parent : parent;
539 else
540 dummy.parent = NULL;
541 dummy.element = child;
543 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
544 if (!slot && insert == NO_INSERT)
545 return NULL;
547 elt = *slot;
548 if (!elt && insert == INSERT)
550 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
551 memset (elt, 0, sizeof (*elt));
553 elt->parent = parent;
554 elt->element = child;
555 elt->type = type;
556 elt->is_scalar = is_sra_scalar_type (type);
558 if (parent)
560 if (IS_ELEMENT_FOR_GROUP (elt->element))
562 elt->is_group = true;
563 elt->sibling = parent->groups;
564 parent->groups = elt;
566 else
568 elt->sibling = parent->children;
569 parent->children = elt;
573 /* If this is a parameter, then if we want to scalarize, we have
574 one copy from the true function parameter. Count it now. */
575 if (TREE_CODE (child) == PARM_DECL)
577 elt->n_copies = 1;
578 bitmap_set_bit (needs_copy_in, DECL_UID (child));
582 return elt;
585 /* Create or return the SRA_ELT structure for EXPR if the expression
586 refers to a scalarizable variable. */
588 static struct sra_elt *
589 maybe_lookup_element_for_expr (tree expr)
591 struct sra_elt *elt;
592 tree child;
594 switch (TREE_CODE (expr))
596 case VAR_DECL:
597 case PARM_DECL:
598 case RESULT_DECL:
599 if (is_sra_candidate_decl (expr))
600 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
601 return NULL;
603 case ARRAY_REF:
604 /* We can't scalarize variable array indices. */
605 if (in_array_bounds_p (expr))
606 child = TREE_OPERAND (expr, 1);
607 else
608 return NULL;
609 break;
611 case ARRAY_RANGE_REF:
612 /* We can't scalarize variable array indices. */
613 if (range_in_array_bounds_p (expr))
615 tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
616 child = build2 (RANGE_EXPR, integer_type_node,
617 TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
619 else
620 return NULL;
621 break;
623 case COMPONENT_REF:
624 /* Don't look through unions. */
625 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
626 return NULL;
627 child = TREE_OPERAND (expr, 1);
628 break;
630 case REALPART_EXPR:
631 child = integer_zero_node;
632 break;
633 case IMAGPART_EXPR:
634 child = integer_one_node;
635 break;
637 default:
638 return NULL;
641 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
642 if (elt)
643 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
644 return NULL;
648 /* Functions to walk just enough of the tree to see all scalarizable
649 references, and categorize them. */
651 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
652 various kinds of references seen. In all cases, *BSI is an iterator
653 pointing to the statement being processed. */
654 struct sra_walk_fns
656 /* Invoked when ELT is required as a unit. Note that ELT might refer to
657 a leaf node, in which case this is a simple scalar reference. *EXPR_P
658 points to the location of the expression. IS_OUTPUT is true if this
659 is a left-hand-side reference. USE_ALL is true if we saw something we
660 couldn't quite identify and had to force the use of the entire object. */
661 void (*use) (struct sra_elt *elt, tree *expr_p,
662 block_stmt_iterator *bsi, bool is_output, bool use_all);
664 /* Invoked when we have a copy between two scalarizable references. */
665 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
666 block_stmt_iterator *bsi);
668 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
669 in which case it should be treated as an empty CONSTRUCTOR. */
670 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
672 /* Invoked when we have a copy between one scalarizable reference ELT
673 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
674 is on the left-hand side. */
675 void (*ldst) (struct sra_elt *elt, tree other,
676 block_stmt_iterator *bsi, bool is_output);
678 /* True during phase 2, false during phase 4. */
679 /* ??? This is a hack. */
680 bool initial_scan;
683 #ifdef ENABLE_CHECKING
684 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
686 static tree
687 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
688 void *data ATTRIBUTE_UNUSED)
690 tree t = *tp;
691 enum tree_code code = TREE_CODE (t);
693 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
695 *walk_subtrees = 0;
696 if (is_sra_candidate_decl (t))
697 return t;
699 else if (TYPE_P (t))
700 *walk_subtrees = 0;
702 return NULL;
704 #endif
706 /* Walk most expressions looking for a scalarizable aggregate.
707 If we find one, invoke FNS->USE. */
709 static void
710 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
711 const struct sra_walk_fns *fns)
713 tree expr = *expr_p;
714 tree inner = expr;
715 bool disable_scalarization = false;
716 bool use_all_p = false;
718 /* We're looking to collect a reference expression between EXPR and INNER,
719 such that INNER is a scalarizable decl and all other nodes through EXPR
720 are references that we can scalarize. If we come across something that
721 we can't scalarize, we reset EXPR. This has the effect of making it
722 appear that we're referring to the larger expression as a whole. */
724 while (1)
725 switch (TREE_CODE (inner))
727 case VAR_DECL:
728 case PARM_DECL:
729 case RESULT_DECL:
730 /* If there is a scalarizable decl at the bottom, then process it. */
731 if (is_sra_candidate_decl (inner))
733 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
734 if (disable_scalarization)
735 elt->cannot_scalarize = true;
736 else
737 fns->use (elt, expr_p, bsi, is_output, use_all_p);
739 return;
741 case ARRAY_REF:
742 /* Non-constant index means any member may be accessed. Prevent the
743 expression from being scalarized. If we were to treat this as a
744 reference to the whole array, we can wind up with a single dynamic
745 index reference inside a loop being overridden by several constant
746 index references during loop setup. It's possible that this could
747 be avoided by using dynamic usage counts based on BB trip counts
748 (based on loop analysis or profiling), but that hardly seems worth
749 the effort. */
750 /* ??? Hack. Figure out how to push this into the scan routines
751 without duplicating too much code. */
752 if (!in_array_bounds_p (inner))
754 disable_scalarization = true;
755 goto use_all;
757 /* ??? Are we assured that non-constant bounds and stride will have
758 the same value everywhere? I don't think Fortran will... */
759 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
760 goto use_all;
761 inner = TREE_OPERAND (inner, 0);
762 break;
764 case ARRAY_RANGE_REF:
765 if (!range_in_array_bounds_p (inner))
767 disable_scalarization = true;
768 goto use_all;
770 /* ??? See above non-constant bounds and stride . */
771 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
772 goto use_all;
773 inner = TREE_OPERAND (inner, 0);
774 break;
776 case COMPONENT_REF:
777 /* A reference to a union member constitutes a reference to the
778 entire union. */
779 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
780 goto use_all;
781 /* ??? See above re non-constant stride. */
782 if (TREE_OPERAND (inner, 2))
783 goto use_all;
784 inner = TREE_OPERAND (inner, 0);
785 break;
787 case REALPART_EXPR:
788 case IMAGPART_EXPR:
789 inner = TREE_OPERAND (inner, 0);
790 break;
792 case BIT_FIELD_REF:
793 /* A bit field reference to a specific vector is scalarized but for
794 ones for inputs need to be marked as used on the left hand size so
795 when we scalarize it, we can mark that variable as non renamable. */
796 if (is_output
797 && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
799 struct sra_elt *elt
800 = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
801 if (elt)
802 elt->is_vector_lhs = true;
804 /* A bit field reference (access to *multiple* fields simultaneously)
805 is not currently scalarized. Consider this an access to the
806 complete outer element, to which walk_tree will bring us next. */
808 goto use_all;
810 case VIEW_CONVERT_EXPR:
811 case NOP_EXPR:
812 /* Similarly, a view/nop explicitly wants to look at an object in a
813 type other than the one we've scalarized. */
814 goto use_all;
816 case WITH_SIZE_EXPR:
817 /* This is a transparent wrapper. The entire inner expression really
818 is being used. */
819 goto use_all;
821 use_all:
822 expr_p = &TREE_OPERAND (inner, 0);
823 inner = expr = *expr_p;
824 use_all_p = true;
825 break;
827 default:
828 #ifdef ENABLE_CHECKING
829 /* Validate that we're not missing any references. */
830 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
831 #endif
832 return;
836 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
837 If we find one, invoke FNS->USE. */
839 static void
840 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
841 const struct sra_walk_fns *fns)
843 tree op;
844 for (op = list; op ; op = TREE_CHAIN (op))
845 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
848 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
849 If we find one, invoke FNS->USE. */
851 static void
852 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
853 const struct sra_walk_fns *fns)
855 sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
858 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
859 aggregates. If we find one, invoke FNS->USE. */
861 static void
862 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
863 const struct sra_walk_fns *fns)
865 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
866 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
869 /* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately. */
871 static void
872 sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
873 const struct sra_walk_fns *fns)
875 struct sra_elt *lhs_elt, *rhs_elt;
876 tree lhs, rhs;
878 lhs = GIMPLE_STMT_OPERAND (expr, 0);
879 rhs = GIMPLE_STMT_OPERAND (expr, 1);
880 lhs_elt = maybe_lookup_element_for_expr (lhs);
881 rhs_elt = maybe_lookup_element_for_expr (rhs);
883 /* If both sides are scalarizable, this is a COPY operation. */
884 if (lhs_elt && rhs_elt)
886 fns->copy (lhs_elt, rhs_elt, bsi);
887 return;
890 /* If the RHS is scalarizable, handle it. There are only two cases. */
891 if (rhs_elt)
893 if (!rhs_elt->is_scalar)
894 fns->ldst (rhs_elt, lhs, bsi, false);
895 else
896 fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false, false);
899 /* If it isn't scalarizable, there may be scalarizable variables within, so
900 check for a call or else walk the RHS to see if we need to do any
901 copy-in operations. We need to do it before the LHS is scalarized so
902 that the statements get inserted in the proper place, before any
903 copy-out operations. */
904 else
906 tree call = get_call_expr_in (rhs);
907 if (call)
908 sra_walk_call_expr (call, bsi, fns);
909 else
910 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 1), bsi, false, fns);
913 /* Likewise, handle the LHS being scalarizable. We have cases similar
914 to those above, but also want to handle RHS being constant. */
915 if (lhs_elt)
917 /* If this is an assignment from a constant, or constructor, then
918 we have access to all of the elements individually. Invoke INIT. */
919 if (TREE_CODE (rhs) == COMPLEX_EXPR
920 || TREE_CODE (rhs) == COMPLEX_CST
921 || TREE_CODE (rhs) == CONSTRUCTOR)
922 fns->init (lhs_elt, rhs, bsi);
924 /* If this is an assignment from read-only memory, treat this as if
925 we'd been passed the constructor directly. Invoke INIT. */
926 else if (TREE_CODE (rhs) == VAR_DECL
927 && TREE_STATIC (rhs)
928 && TREE_READONLY (rhs)
929 && targetm.binds_local_p (rhs))
930 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
932 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
933 The lvalue requirement prevents us from trying to directly scalarize
934 the result of a function call. Which would result in trying to call
935 the function multiple times, and other evil things. */
936 else if (!lhs_elt->is_scalar && is_gimple_addressable (rhs))
937 fns->ldst (lhs_elt, rhs, bsi, true);
939 /* Otherwise we're being used in some context that requires the
940 aggregate to be seen as a whole. Invoke USE. */
941 else
942 fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true, false);
945 /* Similarly to above, LHS_ELT being null only means that the LHS as a
946 whole is not a scalarizable reference. There may be occurrences of
947 scalarizable variables within, which implies a USE. */
948 else
949 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 0), bsi, true, fns);
952 /* Entry point to the walk functions. Search the entire function,
953 invoking the callbacks in FNS on each of the references to
954 scalarizable variables. */
956 static void
957 sra_walk_function (const struct sra_walk_fns *fns)
959 basic_block bb;
960 block_stmt_iterator si, ni;
962 /* ??? Phase 4 could derive some benefit to walking the function in
963 dominator tree order. */
965 FOR_EACH_BB (bb)
966 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
968 tree stmt, t;
969 stmt_ann_t ann;
971 stmt = bsi_stmt (si);
972 ann = stmt_ann (stmt);
974 ni = si;
975 bsi_next (&ni);
977 /* If the statement has no virtual operands, then it doesn't
978 make any structure references that we care about. */
979 if (gimple_aliases_computed_p (cfun)
980 && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
981 continue;
983 switch (TREE_CODE (stmt))
985 case RETURN_EXPR:
986 /* If we have "return <retval>" then the return value is
987 already exposed for our pleasure. Walk it as a USE to
988 force all the components back in place for the return.
990 If we have an embedded assignment, then <retval> is of
991 a type that gets returned in registers in this ABI, and
992 we do not wish to extend their lifetimes. Treat this
993 as a USE of the variable on the RHS of this assignment. */
995 t = TREE_OPERAND (stmt, 0);
996 if (t == NULL_TREE)
998 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
999 sra_walk_expr (&GIMPLE_STMT_OPERAND (t, 1), &si, false, fns);
1000 else
1001 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
1002 break;
1004 case GIMPLE_MODIFY_STMT:
1005 sra_walk_gimple_modify_stmt (stmt, &si, fns);
1006 break;
1007 case CALL_EXPR:
1008 sra_walk_call_expr (stmt, &si, fns);
1009 break;
1010 case ASM_EXPR:
1011 sra_walk_asm_expr (stmt, &si, fns);
1012 break;
1014 default:
1015 break;
1020 /* Phase One: Scan all referenced variables in the program looking for
1021 structures that could be decomposed. */
1023 static bool
1024 find_candidates_for_sra (void)
1026 bool any_set = false;
1027 tree var;
1028 referenced_var_iterator rvi;
1030 FOR_EACH_REFERENCED_VAR (var, rvi)
1032 if (decl_can_be_decomposed_p (var))
1034 bitmap_set_bit (sra_candidates, DECL_UID (var));
1035 any_set = true;
1039 return any_set;
1043 /* Phase Two: Scan all references to scalarizable variables. Count the
1044 number of times they are used or copied respectively. */
1046 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1047 considered a copy, because we can decompose the reference such that
1048 the sub-elements needn't be contiguous. */
1050 static void
1051 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1052 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1053 bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1055 elt->n_uses += 1;
1058 static void
1059 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1060 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1062 lhs_elt->n_copies += 1;
1063 rhs_elt->n_copies += 1;
1066 static void
1067 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1068 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1070 lhs_elt->n_copies += 1;
1073 static void
1074 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1075 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1076 bool is_output ATTRIBUTE_UNUSED)
1078 elt->n_copies += 1;
1081 /* Dump the values we collected during the scanning phase. */
1083 static void
1084 scan_dump (struct sra_elt *elt)
1086 struct sra_elt *c;
1088 dump_sra_elt_name (dump_file, elt);
1089 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1091 for (c = elt->children; c ; c = c->sibling)
1092 scan_dump (c);
1094 for (c = elt->groups; c ; c = c->sibling)
1095 scan_dump (c);
1098 /* Entry point to phase 2. Scan the entire function, building up
1099 scalarization data structures, recording copies and uses. */
1101 static void
1102 scan_function (void)
1104 static const struct sra_walk_fns fns = {
1105 scan_use, scan_copy, scan_init, scan_ldst, true
1107 bitmap_iterator bi;
1109 sra_walk_function (&fns);
1111 if (dump_file && (dump_flags & TDF_DETAILS))
1113 unsigned i;
1115 fputs ("\nScan results:\n", dump_file);
1116 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1118 tree var = referenced_var (i);
1119 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1120 if (elt)
1121 scan_dump (elt);
1123 fputc ('\n', dump_file);
1127 /* Phase Three: Make decisions about which variables to scalarize, if any.
1128 All elements to be scalarized have replacement variables made for them. */
1130 /* A subroutine of build_element_name. Recursively build the element
1131 name on the obstack. */
1133 static void
1134 build_element_name_1 (struct sra_elt *elt)
1136 tree t;
1137 char buffer[32];
1139 if (elt->parent)
1141 build_element_name_1 (elt->parent);
1142 obstack_1grow (&sra_obstack, '$');
1144 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1146 if (elt->element == integer_zero_node)
1147 obstack_grow (&sra_obstack, "real", 4);
1148 else
1149 obstack_grow (&sra_obstack, "imag", 4);
1150 return;
1154 t = elt->element;
1155 if (TREE_CODE (t) == INTEGER_CST)
1157 /* ??? Eh. Don't bother doing double-wide printing. */
1158 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1159 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1161 else
1163 tree name = DECL_NAME (t);
1164 if (name)
1165 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1166 IDENTIFIER_LENGTH (name));
1167 else
1169 sprintf (buffer, "D%u", DECL_UID (t));
1170 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1175 /* Construct a pretty variable name for an element's replacement variable.
1176 The name is built on the obstack. */
1178 static char *
1179 build_element_name (struct sra_elt *elt)
1181 build_element_name_1 (elt);
1182 obstack_1grow (&sra_obstack, '\0');
1183 return XOBFINISH (&sra_obstack, char *);
1186 /* Instantiate an element as an independent variable. */
1188 static void
1189 instantiate_element (struct sra_elt *elt)
1191 struct sra_elt *base_elt;
1192 tree var, base;
1194 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1195 continue;
1196 base = base_elt->element;
1198 elt->replacement = var = make_rename_temp (elt->type, "SR");
1200 /* For vectors, if used on the left hand side with BIT_FIELD_REF,
1201 they are not a gimple register. */
1202 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
1203 DECL_GIMPLE_REG_P (var) = 0;
1205 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1206 DECL_ARTIFICIAL (var) = 1;
1208 if (TREE_THIS_VOLATILE (elt->type))
1210 TREE_THIS_VOLATILE (var) = 1;
1211 TREE_SIDE_EFFECTS (var) = 1;
1214 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1216 char *pretty_name = build_element_name (elt);
1217 DECL_NAME (var) = get_identifier (pretty_name);
1218 obstack_free (&sra_obstack, pretty_name);
1220 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1221 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1223 DECL_IGNORED_P (var) = 0;
1224 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1226 else
1228 DECL_IGNORED_P (var) = 1;
1229 /* ??? We can't generate any warning that would be meaningful. */
1230 TREE_NO_WARNING (var) = 1;
1233 if (dump_file)
1235 fputs (" ", dump_file);
1236 dump_sra_elt_name (dump_file, elt);
1237 fputs (" -> ", dump_file);
1238 print_generic_expr (dump_file, var, dump_flags);
1239 fputc ('\n', dump_file);
1243 /* Make one pass across an element tree deciding whether or not it's
1244 profitable to instantiate individual leaf scalars.
1246 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1247 fields all the way up the tree. */
1249 static void
1250 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1251 unsigned int parent_copies)
1253 if (dump_file && !elt->parent)
1255 fputs ("Initial instantiation for ", dump_file);
1256 dump_sra_elt_name (dump_file, elt);
1257 fputc ('\n', dump_file);
1260 if (elt->cannot_scalarize)
1261 return;
1263 if (elt->is_scalar)
1265 /* The decision is simple: instantiate if we're used more frequently
1266 than the parent needs to be seen as a complete unit. */
1267 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1268 instantiate_element (elt);
1270 else
1272 struct sra_elt *c, *group;
1273 unsigned int this_uses = elt->n_uses + parent_uses;
1274 unsigned int this_copies = elt->n_copies + parent_copies;
1276 /* Consider groups of sub-elements as weighing in favour of
1277 instantiation whatever their size. */
1278 for (group = elt->groups; group ; group = group->sibling)
1279 FOR_EACH_ACTUAL_CHILD (c, group)
1281 c->n_uses += group->n_uses;
1282 c->n_copies += group->n_copies;
1285 for (c = elt->children; c ; c = c->sibling)
1286 decide_instantiation_1 (c, this_uses, this_copies);
1290 /* Compute the size and number of all instantiated elements below ELT.
1291 We will only care about this if the size of the complete structure
1292 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1294 static unsigned int
1295 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1297 if (elt->replacement)
1299 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1300 return 1;
1302 else
1304 struct sra_elt *c;
1305 unsigned int count = 0;
1307 for (c = elt->children; c ; c = c->sibling)
1308 count += sum_instantiated_sizes (c, sizep);
1310 return count;
1314 /* Instantiate fields in ELT->TYPE that are not currently present as
1315 children of ELT. */
1317 static void instantiate_missing_elements (struct sra_elt *elt);
1319 static void
1320 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1322 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1323 if (sub->is_scalar)
1325 if (sub->replacement == NULL)
1326 instantiate_element (sub);
1328 else
1329 instantiate_missing_elements (sub);
1332 static void
1333 instantiate_missing_elements (struct sra_elt *elt)
1335 tree type = elt->type;
1337 switch (TREE_CODE (type))
1339 case RECORD_TYPE:
1341 tree f;
1342 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1343 if (TREE_CODE (f) == FIELD_DECL)
1344 instantiate_missing_elements_1 (elt, f, TREE_TYPE (f));
1345 break;
1348 case ARRAY_TYPE:
1350 tree i, max, subtype;
1352 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1353 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1354 subtype = TREE_TYPE (type);
1356 while (1)
1358 instantiate_missing_elements_1 (elt, i, subtype);
1359 if (tree_int_cst_equal (i, max))
1360 break;
1361 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1364 break;
1367 case COMPLEX_TYPE:
1368 type = TREE_TYPE (type);
1369 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1370 instantiate_missing_elements_1 (elt, integer_one_node, type);
1371 break;
1373 default:
1374 gcc_unreachable ();
1378 /* Return true if there is only one non aggregate field in the record, TYPE.
1379 Return false otherwise. */
1381 static bool
1382 single_scalar_field_in_record_p (tree type)
1384 int num_fields = 0;
1385 tree field;
1386 if (TREE_CODE (type) != RECORD_TYPE)
1387 return false;
1389 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1390 if (TREE_CODE (field) == FIELD_DECL)
1392 num_fields++;
1394 if (num_fields == 2)
1395 return false;
1397 if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
1398 return false;
1401 return true;
1404 /* Make one pass across an element tree deciding whether to perform block
1405 or element copies. If we decide on element copies, instantiate all
1406 elements. Return true if there are any instantiated sub-elements. */
1408 static bool
1409 decide_block_copy (struct sra_elt *elt)
1411 struct sra_elt *c;
1412 bool any_inst;
1414 /* We shouldn't be invoked on groups of sub-elements as they must
1415 behave like their parent as far as block copy is concerned. */
1416 gcc_assert (!elt->is_group);
1418 /* If scalarization is disabled, respect it. */
1419 if (elt->cannot_scalarize)
1421 elt->use_block_copy = 1;
1423 if (dump_file)
1425 fputs ("Scalarization disabled for ", dump_file);
1426 dump_sra_elt_name (dump_file, elt);
1427 fputc ('\n', dump_file);
1430 /* Disable scalarization of sub-elements */
1431 for (c = elt->children; c; c = c->sibling)
1433 c->cannot_scalarize = 1;
1434 decide_block_copy (c);
1437 /* Groups behave like their parent. */
1438 for (c = elt->groups; c; c = c->sibling)
1440 c->cannot_scalarize = 1;
1441 c->use_block_copy = 1;
1444 return false;
1447 /* Don't decide if we've no uses. */
1448 if (elt->n_uses == 0 && elt->n_copies == 0)
1451 else if (!elt->is_scalar)
1453 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1454 bool use_block_copy = true;
1456 /* Tradeoffs for COMPLEX types pretty much always make it better
1457 to go ahead and split the components. */
1458 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1459 use_block_copy = false;
1461 /* Don't bother trying to figure out the rest if the structure is
1462 so large we can't do easy arithmetic. This also forces block
1463 copies for variable sized structures. */
1464 else if (host_integerp (size_tree, 1))
1466 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1467 unsigned int max_size, max_count, inst_count, full_count;
1469 /* If the sra-max-structure-size parameter is 0, then the
1470 user has not overridden the parameter and we can choose a
1471 sensible default. */
1472 max_size = SRA_MAX_STRUCTURE_SIZE
1473 ? SRA_MAX_STRUCTURE_SIZE
1474 : MOVE_RATIO * UNITS_PER_WORD;
1475 max_count = SRA_MAX_STRUCTURE_COUNT
1476 ? SRA_MAX_STRUCTURE_COUNT
1477 : MOVE_RATIO;
1479 full_size = tree_low_cst (size_tree, 1);
1480 full_count = count_type_elements (elt->type, false);
1481 inst_count = sum_instantiated_sizes (elt, &inst_size);
1483 /* If there is only one scalar field in the record, don't block copy. */
1484 if (single_scalar_field_in_record_p (elt->type))
1485 use_block_copy = false;
1487 /* ??? What to do here. If there are two fields, and we've only
1488 instantiated one, then instantiating the other is clearly a win.
1489 If there are a large number of fields then the size of the copy
1490 is much more of a factor. */
1492 /* If the structure is small, and we've made copies, go ahead
1493 and instantiate, hoping that the copies will go away. */
1494 if (full_size <= max_size
1495 && (full_count - inst_count) <= max_count
1496 && elt->n_copies > elt->n_uses)
1497 use_block_copy = false;
1498 else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1499 && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1500 use_block_copy = false;
1502 /* In order to avoid block copy, we have to be able to instantiate
1503 all elements of the type. See if this is possible. */
1504 if (!use_block_copy
1505 && (!can_completely_scalarize_p (elt)
1506 || !type_can_instantiate_all_elements (elt->type)))
1507 use_block_copy = true;
1510 elt->use_block_copy = use_block_copy;
1512 /* Groups behave like their parent. */
1513 for (c = elt->groups; c; c = c->sibling)
1514 c->use_block_copy = use_block_copy;
1516 if (dump_file)
1518 fprintf (dump_file, "Using %s for ",
1519 use_block_copy ? "block-copy" : "element-copy");
1520 dump_sra_elt_name (dump_file, elt);
1521 fputc ('\n', dump_file);
1524 if (!use_block_copy)
1526 instantiate_missing_elements (elt);
1527 return true;
1531 any_inst = elt->replacement != NULL;
1533 for (c = elt->children; c ; c = c->sibling)
1534 any_inst |= decide_block_copy (c);
1536 return any_inst;
1539 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1541 static void
1542 decide_instantiations (void)
1544 unsigned int i;
1545 bool cleared_any;
1546 bitmap_head done_head;
1547 bitmap_iterator bi;
1549 /* We cannot clear bits from a bitmap we're iterating over,
1550 so save up all the bits to clear until the end. */
1551 bitmap_initialize (&done_head, &bitmap_default_obstack);
1552 cleared_any = false;
1554 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1556 tree var = referenced_var (i);
1557 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1558 if (elt)
1560 decide_instantiation_1 (elt, 0, 0);
1561 if (!decide_block_copy (elt))
1562 elt = NULL;
1564 if (!elt)
1566 bitmap_set_bit (&done_head, i);
1567 cleared_any = true;
1571 if (cleared_any)
1573 bitmap_and_compl_into (sra_candidates, &done_head);
1574 bitmap_and_compl_into (needs_copy_in, &done_head);
1576 bitmap_clear (&done_head);
1578 if (!bitmap_empty_p (sra_candidates))
1579 todoflags |= TODO_update_smt_usage;
1581 mark_set_for_renaming (sra_candidates);
1583 if (dump_file)
1584 fputc ('\n', dump_file);
1588 /* Phase Four: Update the function to match the replacements created. */
1590 /* Mark all the variables in VDEF/VUSE operators for STMT for
1591 renaming. This becomes necessary when we modify all of a
1592 non-scalar. */
1594 static void
1595 mark_all_v_defs_1 (tree stmt)
1597 tree sym;
1598 ssa_op_iter iter;
1600 update_stmt_if_modified (stmt);
1602 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
1604 if (TREE_CODE (sym) == SSA_NAME)
1605 sym = SSA_NAME_VAR (sym);
1606 mark_sym_for_renaming (sym);
1611 /* Mark all the variables in virtual operands in all the statements in
1612 LIST for renaming. */
1614 static void
1615 mark_all_v_defs (tree list)
1617 if (TREE_CODE (list) != STATEMENT_LIST)
1618 mark_all_v_defs_1 (list);
1619 else
1621 tree_stmt_iterator i;
1622 for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
1623 mark_all_v_defs_1 (tsi_stmt (i));
1628 /* Mark every replacement under ELT with TREE_NO_WARNING. */
1630 static void
1631 mark_no_warning (struct sra_elt *elt)
1633 if (!elt->all_no_warning)
1635 if (elt->replacement)
1636 TREE_NO_WARNING (elt->replacement) = 1;
1637 else
1639 struct sra_elt *c;
1640 FOR_EACH_ACTUAL_CHILD (c, elt)
1641 mark_no_warning (c);
1643 elt->all_no_warning = true;
1647 /* Build a single level component reference to ELT rooted at BASE. */
1649 static tree
1650 generate_one_element_ref (struct sra_elt *elt, tree base)
1652 switch (TREE_CODE (TREE_TYPE (base)))
1654 case RECORD_TYPE:
1656 tree field = elt->element;
1658 /* Watch out for compatible records with differing field lists. */
1659 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
1660 field = find_compatible_field (TREE_TYPE (base), field);
1662 return build3 (COMPONENT_REF, elt->type, base, field, NULL);
1665 case ARRAY_TYPE:
1666 todoflags |= TODO_update_smt_usage;
1667 if (TREE_CODE (elt->element) == RANGE_EXPR)
1668 return build4 (ARRAY_RANGE_REF, elt->type, base,
1669 TREE_OPERAND (elt->element, 0), NULL, NULL);
1670 else
1671 return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1673 case COMPLEX_TYPE:
1674 if (elt->element == integer_zero_node)
1675 return build1 (REALPART_EXPR, elt->type, base);
1676 else
1677 return build1 (IMAGPART_EXPR, elt->type, base);
1679 default:
1680 gcc_unreachable ();
1684 /* Build a full component reference to ELT rooted at its native variable. */
1686 static tree
1687 generate_element_ref (struct sra_elt *elt)
1689 if (elt->parent)
1690 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1691 else
1692 return elt->element;
1695 /* Generate a set of assignment statements in *LIST_P to copy all
1696 instantiated elements under ELT to or from the equivalent structure
1697 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1698 true meaning to copy out of EXPR into ELT. */
1700 static void
1701 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1702 tree *list_p)
1704 struct sra_elt *c;
1705 tree t;
1707 if (!copy_out && TREE_CODE (expr) == SSA_NAME
1708 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
1710 tree r, i;
1712 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
1713 r = c->replacement;
1714 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
1715 i = c->replacement;
1717 t = build2 (COMPLEX_EXPR, elt->type, r, i);
1718 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, expr, t);
1719 SSA_NAME_DEF_STMT (expr) = t;
1720 append_to_statement_list (t, list_p);
1722 else if (elt->replacement)
1724 if (copy_out)
1725 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, elt->replacement, expr);
1726 else
1727 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, expr, elt->replacement);
1728 append_to_statement_list (t, list_p);
1730 else
1732 FOR_EACH_ACTUAL_CHILD (c, elt)
1734 t = generate_one_element_ref (c, unshare_expr (expr));
1735 generate_copy_inout (c, copy_out, t, list_p);
1740 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1741 elements under SRC to their counterparts under DST. There must be a 1-1
1742 correspondence of instantiated elements. */
1744 static void
1745 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1747 struct sra_elt *dc, *sc;
1749 FOR_EACH_ACTUAL_CHILD (dc, dst)
1751 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1752 gcc_assert (sc);
1753 generate_element_copy (dc, sc, list_p);
1756 if (dst->replacement)
1758 tree t;
1760 gcc_assert (src->replacement);
1762 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, dst->replacement,
1763 src->replacement);
1764 append_to_statement_list (t, list_p);
1768 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1769 elements under ELT. In addition, do not assign to elements that have been
1770 marked VISITED but do reset the visited flag; this allows easy coordination
1771 with generate_element_init. */
1773 static void
1774 generate_element_zero (struct sra_elt *elt, tree *list_p)
1776 struct sra_elt *c;
1778 if (elt->visited)
1780 elt->visited = false;
1781 return;
1784 FOR_EACH_ACTUAL_CHILD (c, elt)
1785 generate_element_zero (c, list_p);
1787 if (elt->replacement)
1789 tree t;
1791 gcc_assert (elt->is_scalar);
1792 t = fold_convert (elt->type, integer_zero_node);
1794 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, elt->replacement, t);
1795 append_to_statement_list (t, list_p);
1799 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1800 Add the result to *LIST_P. */
1802 static void
1803 generate_one_element_init (tree var, tree init, tree *list_p)
1805 /* The replacement can be almost arbitrarily complex. Gimplify. */
1806 tree stmt = build2 (GIMPLE_MODIFY_STMT, void_type_node, var, init);
1807 gimplify_and_add (stmt, list_p);
1810 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1811 elements under ELT with the contents of the initializer INIT. In addition,
1812 mark all assigned elements VISITED; this allows easy coordination with
1813 generate_element_zero. Return false if we found a case we couldn't
1814 handle. */
1816 static bool
1817 generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
1819 bool result = true;
1820 enum tree_code init_code;
1821 struct sra_elt *sub;
1822 tree t;
1823 unsigned HOST_WIDE_INT idx;
1824 tree value, purpose;
1826 /* We can be passed DECL_INITIAL of a static variable. It might have a
1827 conversion, which we strip off here. */
1828 STRIP_USELESS_TYPE_CONVERSION (init);
1829 init_code = TREE_CODE (init);
1831 if (elt->is_scalar)
1833 if (elt->replacement)
1835 generate_one_element_init (elt->replacement, init, list_p);
1836 elt->visited = true;
1838 return result;
1841 switch (init_code)
1843 case COMPLEX_CST:
1844 case COMPLEX_EXPR:
1845 FOR_EACH_ACTUAL_CHILD (sub, elt)
1847 if (sub->element == integer_zero_node)
1848 t = (init_code == COMPLEX_EXPR
1849 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1850 else
1851 t = (init_code == COMPLEX_EXPR
1852 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1853 result &= generate_element_init_1 (sub, t, list_p);
1855 break;
1857 case CONSTRUCTOR:
1858 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
1860 if (TREE_CODE (purpose) == RANGE_EXPR)
1862 tree lower = TREE_OPERAND (purpose, 0);
1863 tree upper = TREE_OPERAND (purpose, 1);
1865 while (1)
1867 sub = lookup_element (elt, lower, NULL, NO_INSERT);
1868 if (sub != NULL)
1869 result &= generate_element_init_1 (sub, value, list_p);
1870 if (tree_int_cst_equal (lower, upper))
1871 break;
1872 lower = int_const_binop (PLUS_EXPR, lower,
1873 integer_one_node, true);
1876 else
1878 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
1879 if (sub != NULL)
1880 result &= generate_element_init_1 (sub, value, list_p);
1883 break;
1885 default:
1886 elt->visited = true;
1887 result = false;
1890 return result;
1893 /* A wrapper function for generate_element_init_1 that handles cleanup after
1894 gimplification. */
1896 static bool
1897 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1899 bool ret;
1901 push_gimplify_context ();
1902 ret = generate_element_init_1 (elt, init, list_p);
1903 pop_gimplify_context (NULL);
1905 /* The replacement can expose previously unreferenced variables. */
1906 if (ret && *list_p)
1908 tree_stmt_iterator i;
1910 for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
1911 find_new_referenced_vars (tsi_stmt_ptr (i));
1914 return ret;
1917 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1918 has more than one edge, STMT will be replicated for each edge. Also,
1919 abnormal edges will be ignored. */
1921 void
1922 insert_edge_copies (tree stmt, basic_block bb)
1924 edge e;
1925 edge_iterator ei;
1926 bool first_copy;
1928 first_copy = true;
1929 FOR_EACH_EDGE (e, ei, bb->succs)
1931 /* We don't need to insert copies on abnormal edges. The
1932 value of the scalar replacement is not guaranteed to
1933 be valid through an abnormal edge. */
1934 if (!(e->flags & EDGE_ABNORMAL))
1936 if (first_copy)
1938 bsi_insert_on_edge (e, stmt);
1939 first_copy = false;
1941 else
1942 bsi_insert_on_edge (e, unsave_expr_now (stmt));
1947 /* Helper function to insert LIST before BSI, and set up line number info. */
1949 void
1950 sra_insert_before (block_stmt_iterator *bsi, tree list)
1952 tree stmt = bsi_stmt (*bsi);
1954 if (EXPR_HAS_LOCATION (stmt))
1955 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1956 bsi_insert_before (bsi, list, BSI_SAME_STMT);
1959 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1961 void
1962 sra_insert_after (block_stmt_iterator *bsi, tree list)
1964 tree stmt = bsi_stmt (*bsi);
1966 if (EXPR_HAS_LOCATION (stmt))
1967 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1969 if (stmt_ends_bb_p (stmt))
1970 insert_edge_copies (list, bsi->bb);
1971 else
1972 bsi_insert_after (bsi, list, BSI_SAME_STMT);
1975 /* Similarly, but replace the statement at BSI. */
1977 static void
1978 sra_replace (block_stmt_iterator *bsi, tree list)
1980 sra_insert_before (bsi, list);
1981 bsi_remove (bsi, false);
1982 if (bsi_end_p (*bsi))
1983 *bsi = bsi_last (bsi->bb);
1984 else
1985 bsi_prev (bsi);
1988 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1989 if elt is scalar, or some occurrence of ELT that requires a complete
1990 aggregate. IS_OUTPUT is true if ELT is being modified. */
1992 static void
1993 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
1994 bool is_output, bool use_all)
1996 tree list = NULL, stmt = bsi_stmt (*bsi);
1998 if (elt->replacement)
2000 /* If we have a replacement, then updating the reference is as
2001 simple as modifying the existing statement in place. */
2002 if (is_output)
2003 mark_all_v_defs (stmt);
2004 *expr_p = elt->replacement;
2005 update_stmt (stmt);
2007 else
2009 /* Otherwise we need some copies. If ELT is being read, then we want
2010 to store all (modified) sub-elements back into the structure before
2011 the reference takes place. If ELT is being written, then we want to
2012 load the changed values back into our shadow variables. */
2013 /* ??? We don't check modified for reads, we just always write all of
2014 the values. We should be able to record the SSA number of the VOP
2015 for which the values were last read. If that number matches the
2016 SSA number of the VOP in the current statement, then we needn't
2017 emit an assignment. This would also eliminate double writes when
2018 a structure is passed as more than one argument to a function call.
2019 This optimization would be most effective if sra_walk_function
2020 processed the blocks in dominator order. */
2022 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
2023 if (list == NULL)
2024 return;
2025 mark_all_v_defs (list);
2026 if (is_output)
2027 sra_insert_after (bsi, list);
2028 else
2030 sra_insert_before (bsi, list);
2031 if (use_all)
2032 mark_no_warning (elt);
2037 /* Scalarize a COPY. To recap, this is an assignment statement between
2038 two scalarizable references, LHS_ELT and RHS_ELT. */
2040 static void
2041 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
2042 block_stmt_iterator *bsi)
2044 tree list, stmt;
2046 if (lhs_elt->replacement && rhs_elt->replacement)
2048 /* If we have two scalar operands, modify the existing statement. */
2049 stmt = bsi_stmt (*bsi);
2051 /* See the commentary in sra_walk_function concerning
2052 RETURN_EXPR, and why we should never see one here. */
2053 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
2055 GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
2056 GIMPLE_STMT_OPERAND (stmt, 1) = rhs_elt->replacement;
2057 update_stmt (stmt);
2059 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
2061 /* If either side requires a block copy, then sync the RHS back
2062 to the original structure, leave the original assignment
2063 statement (which will perform the block copy), then load the
2064 LHS values out of its now-updated original structure. */
2065 /* ??? Could perform a modified pair-wise element copy. That
2066 would at least allow those elements that are instantiated in
2067 both structures to be optimized well. */
2069 list = NULL;
2070 generate_copy_inout (rhs_elt, false,
2071 generate_element_ref (rhs_elt), &list);
2072 if (list)
2074 mark_all_v_defs (list);
2075 sra_insert_before (bsi, list);
2078 list = NULL;
2079 generate_copy_inout (lhs_elt, true,
2080 generate_element_ref (lhs_elt), &list);
2081 if (list)
2083 mark_all_v_defs (list);
2084 sra_insert_after (bsi, list);
2087 else
2089 /* Otherwise both sides must be fully instantiated. In which
2090 case perform pair-wise element assignments and replace the
2091 original block copy statement. */
2093 stmt = bsi_stmt (*bsi);
2094 mark_all_v_defs (stmt);
2096 list = NULL;
2097 generate_element_copy (lhs_elt, rhs_elt, &list);
2098 gcc_assert (list);
2099 mark_all_v_defs (list);
2100 sra_replace (bsi, list);
2104 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
2105 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
2106 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
2107 CONSTRUCTOR. */
2109 static void
2110 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
2112 bool result = true;
2113 tree list = NULL;
2115 /* Generate initialization statements for all members extant in the RHS. */
2116 if (rhs)
2118 /* Unshare the expression just in case this is from a decl's initial. */
2119 rhs = unshare_expr (rhs);
2120 result = generate_element_init (lhs_elt, rhs, &list);
2123 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
2124 a zero value. Initialize the rest of the instantiated elements. */
2125 generate_element_zero (lhs_elt, &list);
2127 if (!result)
2129 /* If we failed to convert the entire initializer, then we must
2130 leave the structure assignment in place and must load values
2131 from the structure into the slots for which we did not find
2132 constants. The easiest way to do this is to generate a complete
2133 copy-out, and then follow that with the constant assignments
2134 that we were able to build. DCE will clean things up. */
2135 tree list0 = NULL;
2136 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
2137 &list0);
2138 append_to_statement_list (list, &list0);
2139 list = list0;
2142 if (lhs_elt->use_block_copy || !result)
2144 /* Since LHS is not fully instantiated, we must leave the structure
2145 assignment in place. Treating this case differently from a USE
2146 exposes constants to later optimizations. */
2147 if (list)
2149 mark_all_v_defs (list);
2150 sra_insert_after (bsi, list);
2153 else
2155 /* The LHS is fully instantiated. The list of initializations
2156 replaces the original structure assignment. */
2157 gcc_assert (list);
2158 mark_all_v_defs (bsi_stmt (*bsi));
2159 mark_all_v_defs (list);
2160 sra_replace (bsi, list);
2164 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
2165 on all INDIRECT_REFs. */
2167 static tree
2168 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2170 tree t = *tp;
2172 if (TREE_CODE (t) == INDIRECT_REF)
2174 TREE_THIS_NOTRAP (t) = 1;
2175 *walk_subtrees = 0;
2177 else if (IS_TYPE_OR_DECL_P (t))
2178 *walk_subtrees = 0;
2180 return NULL;
2183 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
2184 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
2185 if ELT is on the left-hand side. */
2187 static void
2188 scalarize_ldst (struct sra_elt *elt, tree other,
2189 block_stmt_iterator *bsi, bool is_output)
2191 /* Shouldn't have gotten called for a scalar. */
2192 gcc_assert (!elt->replacement);
2194 if (elt->use_block_copy)
2196 /* Since ELT is not fully instantiated, we have to leave the
2197 block copy in place. Treat this as a USE. */
2198 scalarize_use (elt, NULL, bsi, is_output, false);
2200 else
2202 /* The interesting case is when ELT is fully instantiated. In this
2203 case we can have each element stored/loaded directly to/from the
2204 corresponding slot in OTHER. This avoids a block copy. */
2206 tree list = NULL, stmt = bsi_stmt (*bsi);
2208 mark_all_v_defs (stmt);
2209 generate_copy_inout (elt, is_output, other, &list);
2210 mark_all_v_defs (list);
2211 gcc_assert (list);
2213 /* Preserve EH semantics. */
2214 if (stmt_ends_bb_p (stmt))
2216 tree_stmt_iterator tsi;
2217 tree first;
2219 /* Extract the first statement from LIST. */
2220 tsi = tsi_start (list);
2221 first = tsi_stmt (tsi);
2222 tsi_delink (&tsi);
2224 /* Replace the old statement with this new representative. */
2225 bsi_replace (bsi, first, true);
2227 if (!tsi_end_p (tsi))
2229 /* If any reference would trap, then they all would. And more
2230 to the point, the first would. Therefore none of the rest
2231 will trap since the first didn't. Indicate this by
2232 iterating over the remaining statements and set
2233 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
2236 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
2237 tsi_next (&tsi);
2239 while (!tsi_end_p (tsi));
2241 insert_edge_copies (list, bsi->bb);
2244 else
2245 sra_replace (bsi, list);
2249 /* Generate initializations for all scalarizable parameters. */
2251 static void
2252 scalarize_parms (void)
2254 tree list = NULL;
2255 unsigned i;
2256 bitmap_iterator bi;
2258 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
2260 tree var = referenced_var (i);
2261 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
2262 generate_copy_inout (elt, true, var, &list);
2265 if (list)
2267 insert_edge_copies (list, ENTRY_BLOCK_PTR);
2268 mark_all_v_defs (list);
2272 /* Entry point to phase 4. Update the function to match replacements. */
2274 static void
2275 scalarize_function (void)
2277 static const struct sra_walk_fns fns = {
2278 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
2281 sra_walk_function (&fns);
2282 scalarize_parms ();
2283 bsi_commit_edge_inserts ();
2287 /* Debug helper function. Print ELT in a nice human-readable format. */
2289 static void
2290 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
2292 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
2294 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
2295 dump_sra_elt_name (f, elt->parent);
2297 else
2299 if (elt->parent)
2300 dump_sra_elt_name (f, elt->parent);
2301 if (DECL_P (elt->element))
2303 if (TREE_CODE (elt->element) == FIELD_DECL)
2304 fputc ('.', f);
2305 print_generic_expr (f, elt->element, dump_flags);
2307 else if (TREE_CODE (elt->element) == RANGE_EXPR)
2308 fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
2309 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
2310 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
2311 else
2312 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
2313 TREE_INT_CST_LOW (elt->element));
2317 /* Likewise, but callable from the debugger. */
2319 void
2320 debug_sra_elt_name (struct sra_elt *elt)
2322 dump_sra_elt_name (stderr, elt);
2323 fputc ('\n', stderr);
2326 void
2327 sra_init_cache (void)
2329 if (sra_type_decomp_cache)
2330 return;
2332 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
2333 sra_type_inst_cache = BITMAP_ALLOC (NULL);
2336 /* Main entry point. */
2338 static unsigned int
2339 tree_sra (void)
2341 /* Initialize local variables. */
2342 todoflags = 0;
2343 gcc_obstack_init (&sra_obstack);
2344 sra_candidates = BITMAP_ALLOC (NULL);
2345 needs_copy_in = BITMAP_ALLOC (NULL);
2346 sra_init_cache ();
2347 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
2349 /* Scan. If we find anything, instantiate and scalarize. */
2350 if (find_candidates_for_sra ())
2352 scan_function ();
2353 decide_instantiations ();
2354 scalarize_function ();
2357 /* Free allocated memory. */
2358 htab_delete (sra_map);
2359 sra_map = NULL;
2360 BITMAP_FREE (sra_candidates);
2361 BITMAP_FREE (needs_copy_in);
2362 BITMAP_FREE (sra_type_decomp_cache);
2363 BITMAP_FREE (sra_type_inst_cache);
2364 obstack_free (&sra_obstack, NULL);
2365 return todoflags;
2368 static bool
2369 gate_sra (void)
2371 return flag_tree_sra != 0;
2374 struct tree_opt_pass pass_sra =
2376 "sra", /* name */
2377 gate_sra, /* gate */
2378 tree_sra, /* execute */
2379 NULL, /* sub */
2380 NULL, /* next */
2381 0, /* static_pass_number */
2382 TV_TREE_SRA, /* tv_id */
2383 PROP_cfg | PROP_ssa, /* properties_required */
2384 0, /* properties_provided */
2385 0, /* properties_destroyed */
2386 0, /* todo_flags_start */
2387 TODO_dump_func
2388 | TODO_update_ssa
2389 | TODO_ggc_collect
2390 | TODO_verify_ssa, /* todo_flags_finish */
2391 0 /* letter */