2007-02-28 Eric Christopher <echristo@apple.com>
[official-gcc.git] / gcc / tree-sra.c
blobcb8f63d18338a7e9ba441b21ab1783ab4ee38b99
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2003, 2004, 2005, 2006, 2007
5 Free Software Foundation, Inc.
6 Contributed by Diego Novillo <dnovillo@redhat.com>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 2, or (at your option) any
13 later version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 02110-1301, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "ggc.h"
30 #include "tree.h"
32 /* These RTL headers are needed for basic-block.h. */
33 #include "rtl.h"
34 #include "tm_p.h"
35 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "diagnostic.h"
38 #include "langhooks.h"
39 #include "tree-inline.h"
40 #include "tree-flow.h"
41 #include "tree-gimple.h"
42 #include "tree-dump.h"
43 #include "tree-pass.h"
44 #include "timevar.h"
45 #include "flags.h"
46 #include "bitmap.h"
47 #include "obstack.h"
48 #include "target.h"
49 /* expr.h is needed for MOVE_RATIO. */
50 #include "expr.h"
51 #include "params.h"
54 /* This object of this pass is to replace a non-addressable aggregate with a
55 set of independent variables. Most of the time, all of these variables
56 will be scalars. But a secondary objective is to break up larger
57 aggregates into smaller aggregates. In the process we may find that some
58 bits of the larger aggregate can be deleted as unreferenced.
60 This substitution is done globally. More localized substitutions would
61 be the purvey of a load-store motion pass.
63 The optimization proceeds in phases:
65 (1) Identify variables that have types that are candidates for
66 decomposition.
68 (2) Scan the function looking for the ways these variables are used.
69 In particular we're interested in the number of times a variable
70 (or member) is needed as a complete unit, and the number of times
71 a variable (or member) is copied.
73 (3) Based on the usage profile, instantiate substitution variables.
75 (4) Scan the function making replacements.
79 /* True if this is the "early" pass, before inlining. */
80 static bool early_sra;
82 /* The set of todo flags to return from tree_sra. */
83 static unsigned int todoflags;
85 /* The set of aggregate variables that are candidates for scalarization. */
86 static bitmap sra_candidates;
88 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
89 beginning of the function. */
90 static bitmap needs_copy_in;
92 /* Sets of bit pairs that cache type decomposition and instantiation. */
93 static bitmap sra_type_decomp_cache;
94 static bitmap sra_type_inst_cache;
96 /* One of these structures is created for each candidate aggregate and
97 each (accessed) member or group of members of such an aggregate. */
98 struct sra_elt
100 /* A tree of the elements. Used when we want to traverse everything. */
101 struct sra_elt *parent;
102 struct sra_elt *groups;
103 struct sra_elt *children;
104 struct sra_elt *sibling;
106 /* If this element is a root, then this is the VAR_DECL. If this is
107 a sub-element, this is some token used to identify the reference.
108 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
109 of an ARRAY_REF, this is the (constant) index. In the case of an
110 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
111 of a complex number, this is a zero or one. */
112 tree element;
114 /* The type of the element. */
115 tree type;
117 /* A VAR_DECL, for any sub-element we've decided to replace. */
118 tree replacement;
120 /* The number of times the element is referenced as a whole. I.e.
121 given "a.b.c", this would be incremented for C, but not for A or B. */
122 unsigned int n_uses;
124 /* The number of times the element is copied to or from another
125 scalarizable element. */
126 unsigned int n_copies;
128 /* True if TYPE is scalar. */
129 bool is_scalar;
131 /* True if this element is a group of members of its parent. */
132 bool is_group;
134 /* True if we saw something about this element that prevents scalarization,
135 such as non-constant indexing. */
136 bool cannot_scalarize;
138 /* True if we've decided that structure-to-structure assignment
139 should happen via memcpy and not per-element. */
140 bool use_block_copy;
142 /* True if everything under this element has been marked TREE_NO_WARNING. */
143 bool all_no_warning;
145 /* A flag for use with/after random access traversals. */
146 bool visited;
148 /* True if there is BIT_FIELD_REF on the lhs with a vector. */
149 bool is_vector_lhs;
152 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
154 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
155 for ((CHILD) = (ELT)->is_group \
156 ? next_child_for_group (NULL, (ELT)) \
157 : (ELT)->children; \
158 (CHILD); \
159 (CHILD) = (ELT)->is_group \
160 ? next_child_for_group ((CHILD), (ELT)) \
161 : (CHILD)->sibling)
163 /* Helper function for above macro. Return next child in group. */
164 static struct sra_elt *
165 next_child_for_group (struct sra_elt *child, struct sra_elt *group)
167 gcc_assert (group->is_group);
169 /* Find the next child in the parent. */
170 if (child)
171 child = child->sibling;
172 else
173 child = group->parent->children;
175 /* Skip siblings that do not belong to the group. */
176 while (child)
178 tree g_elt = group->element;
179 if (TREE_CODE (g_elt) == RANGE_EXPR)
181 if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
182 && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
183 break;
185 else
186 gcc_unreachable ();
188 child = child->sibling;
191 return child;
194 /* Random access to the child of a parent is performed by hashing.
195 This prevents quadratic behavior, and allows SRA to function
196 reasonably on larger records. */
197 static htab_t sra_map;
199 /* All structures are allocated out of the following obstack. */
200 static struct obstack sra_obstack;
202 /* Debugging functions. */
203 static void dump_sra_elt_name (FILE *, struct sra_elt *);
204 extern void debug_sra_elt_name (struct sra_elt *);
206 /* Forward declarations. */
207 static tree generate_element_ref (struct sra_elt *);
209 /* Return true if DECL is an SRA candidate. */
211 static bool
212 is_sra_candidate_decl (tree decl)
214 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
217 /* Return true if TYPE is a scalar type. */
219 static bool
220 is_sra_scalar_type (tree type)
222 enum tree_code code = TREE_CODE (type);
223 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
224 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
225 || code == POINTER_TYPE || code == OFFSET_TYPE
226 || code == REFERENCE_TYPE);
229 /* Return true if TYPE can be decomposed into a set of independent variables.
231 Note that this doesn't imply that all elements of TYPE can be
232 instantiated, just that if we decide to break up the type into
233 separate pieces that it can be done. */
235 bool
236 sra_type_can_be_decomposed_p (tree type)
238 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
239 tree t;
241 /* Avoid searching the same type twice. */
242 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
243 return true;
244 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
245 return false;
247 /* The type must have a definite nonzero size. */
248 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
249 || integer_zerop (TYPE_SIZE (type)))
250 goto fail;
252 /* The type must be a non-union aggregate. */
253 switch (TREE_CODE (type))
255 case RECORD_TYPE:
257 bool saw_one_field = false;
259 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
260 if (TREE_CODE (t) == FIELD_DECL)
262 /* Reject incorrectly represented bit fields. */
263 if (DECL_BIT_FIELD (t)
264 && (tree_low_cst (DECL_SIZE (t), 1)
265 != TYPE_PRECISION (TREE_TYPE (t))))
266 goto fail;
268 saw_one_field = true;
271 /* Record types must have at least one field. */
272 if (!saw_one_field)
273 goto fail;
275 break;
277 case ARRAY_TYPE:
278 /* Array types must have a fixed lower and upper bound. */
279 t = TYPE_DOMAIN (type);
280 if (t == NULL)
281 goto fail;
282 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
283 goto fail;
284 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
285 goto fail;
286 break;
288 case COMPLEX_TYPE:
289 break;
291 default:
292 goto fail;
295 bitmap_set_bit (sra_type_decomp_cache, cache+0);
296 return true;
298 fail:
299 bitmap_set_bit (sra_type_decomp_cache, cache+1);
300 return false;
303 /* Return true if DECL can be decomposed into a set of independent
304 (though not necessarily scalar) variables. */
306 static bool
307 decl_can_be_decomposed_p (tree var)
309 /* Early out for scalars. */
310 if (is_sra_scalar_type (TREE_TYPE (var)))
311 return false;
313 /* The variable must not be aliased. */
314 if (!is_gimple_non_addressable (var))
316 if (dump_file && (dump_flags & TDF_DETAILS))
318 fprintf (dump_file, "Cannot scalarize variable ");
319 print_generic_expr (dump_file, var, dump_flags);
320 fprintf (dump_file, " because it must live in memory\n");
322 return false;
325 /* The variable must not be volatile. */
326 if (TREE_THIS_VOLATILE (var))
328 if (dump_file && (dump_flags & TDF_DETAILS))
330 fprintf (dump_file, "Cannot scalarize variable ");
331 print_generic_expr (dump_file, var, dump_flags);
332 fprintf (dump_file, " because it is declared volatile\n");
334 return false;
337 /* We must be able to decompose the variable's type. */
338 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
340 if (dump_file && (dump_flags & TDF_DETAILS))
342 fprintf (dump_file, "Cannot scalarize variable ");
343 print_generic_expr (dump_file, var, dump_flags);
344 fprintf (dump_file, " because its type cannot be decomposed\n");
346 return false;
349 /* HACK: if we decompose a va_list_type_node before inlining, then we'll
350 confuse tree-stdarg.c, and we won't be able to figure out which and
351 how many arguments are accessed. This really should be improved in
352 tree-stdarg.c, as the decomposition is truely a win. This could also
353 be fixed if the stdarg pass ran early, but this can't be done until
354 we've aliasing information early too. See PR 30791. */
355 if (early_sra
356 && TYPE_MAIN_VARIANT (TREE_TYPE (var))
357 == TYPE_MAIN_VARIANT (va_list_type_node))
358 return false;
360 return true;
363 /* Return true if TYPE can be *completely* decomposed into scalars. */
365 static bool
366 type_can_instantiate_all_elements (tree type)
368 if (is_sra_scalar_type (type))
369 return true;
370 if (!sra_type_can_be_decomposed_p (type))
371 return false;
373 switch (TREE_CODE (type))
375 case RECORD_TYPE:
377 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
378 tree f;
380 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
381 return true;
382 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
383 return false;
385 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
386 if (TREE_CODE (f) == FIELD_DECL)
388 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
390 bitmap_set_bit (sra_type_inst_cache, cache+1);
391 return false;
395 bitmap_set_bit (sra_type_inst_cache, cache+0);
396 return true;
399 case ARRAY_TYPE:
400 return type_can_instantiate_all_elements (TREE_TYPE (type));
402 case COMPLEX_TYPE:
403 return true;
405 default:
406 gcc_unreachable ();
410 /* Test whether ELT or some sub-element cannot be scalarized. */
412 static bool
413 can_completely_scalarize_p (struct sra_elt *elt)
415 struct sra_elt *c;
417 if (elt->cannot_scalarize)
418 return false;
420 for (c = elt->children; c; c = c->sibling)
421 if (!can_completely_scalarize_p (c))
422 return false;
424 for (c = elt->groups; c; c = c->sibling)
425 if (!can_completely_scalarize_p (c))
426 return false;
428 return true;
432 /* A simplified tree hashing algorithm that only handles the types of
433 trees we expect to find in sra_elt->element. */
435 static hashval_t
436 sra_hash_tree (tree t)
438 hashval_t h;
440 switch (TREE_CODE (t))
442 case VAR_DECL:
443 case PARM_DECL:
444 case RESULT_DECL:
445 h = DECL_UID (t);
446 break;
448 case INTEGER_CST:
449 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
450 break;
452 case RANGE_EXPR:
453 h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
454 h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
455 break;
457 case FIELD_DECL:
458 /* We can have types that are compatible, but have different member
459 lists, so we can't hash fields by ID. Use offsets instead. */
460 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
461 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
462 break;
464 default:
465 gcc_unreachable ();
468 return h;
471 /* Hash function for type SRA_PAIR. */
473 static hashval_t
474 sra_elt_hash (const void *x)
476 const struct sra_elt *e = x;
477 const struct sra_elt *p;
478 hashval_t h;
480 h = sra_hash_tree (e->element);
482 /* Take into account everything back up the chain. Given that chain
483 lengths are rarely very long, this should be acceptable. If we
484 truly identify this as a performance problem, it should work to
485 hash the pointer value "e->parent". */
486 for (p = e->parent; p ; p = p->parent)
487 h = (h * 65521) ^ sra_hash_tree (p->element);
489 return h;
492 /* Equality function for type SRA_PAIR. */
494 static int
495 sra_elt_eq (const void *x, const void *y)
497 const struct sra_elt *a = x;
498 const struct sra_elt *b = y;
499 tree ae, be;
501 if (a->parent != b->parent)
502 return false;
504 ae = a->element;
505 be = b->element;
507 if (ae == be)
508 return true;
509 if (TREE_CODE (ae) != TREE_CODE (be))
510 return false;
512 switch (TREE_CODE (ae))
514 case VAR_DECL:
515 case PARM_DECL:
516 case RESULT_DECL:
517 /* These are all pointer unique. */
518 return false;
520 case INTEGER_CST:
521 /* Integers are not pointer unique, so compare their values. */
522 return tree_int_cst_equal (ae, be);
524 case RANGE_EXPR:
525 return
526 tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
527 && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
529 case FIELD_DECL:
530 /* Fields are unique within a record, but not between
531 compatible records. */
532 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
533 return false;
534 return fields_compatible_p (ae, be);
536 default:
537 gcc_unreachable ();
541 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
542 may be null, in which case CHILD must be a DECL. */
544 static struct sra_elt *
545 lookup_element (struct sra_elt *parent, tree child, tree type,
546 enum insert_option insert)
548 struct sra_elt dummy;
549 struct sra_elt **slot;
550 struct sra_elt *elt;
552 if (parent)
553 dummy.parent = parent->is_group ? parent->parent : parent;
554 else
555 dummy.parent = NULL;
556 dummy.element = child;
558 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
559 if (!slot && insert == NO_INSERT)
560 return NULL;
562 elt = *slot;
563 if (!elt && insert == INSERT)
565 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
566 memset (elt, 0, sizeof (*elt));
568 elt->parent = parent;
569 elt->element = child;
570 elt->type = type;
571 elt->is_scalar = is_sra_scalar_type (type);
573 if (parent)
575 if (IS_ELEMENT_FOR_GROUP (elt->element))
577 elt->is_group = true;
578 elt->sibling = parent->groups;
579 parent->groups = elt;
581 else
583 elt->sibling = parent->children;
584 parent->children = elt;
588 /* If this is a parameter, then if we want to scalarize, we have
589 one copy from the true function parameter. Count it now. */
590 if (TREE_CODE (child) == PARM_DECL)
592 elt->n_copies = 1;
593 bitmap_set_bit (needs_copy_in, DECL_UID (child));
597 return elt;
600 /* Create or return the SRA_ELT structure for EXPR if the expression
601 refers to a scalarizable variable. */
603 static struct sra_elt *
604 maybe_lookup_element_for_expr (tree expr)
606 struct sra_elt *elt;
607 tree child;
609 switch (TREE_CODE (expr))
611 case VAR_DECL:
612 case PARM_DECL:
613 case RESULT_DECL:
614 if (is_sra_candidate_decl (expr))
615 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
616 return NULL;
618 case ARRAY_REF:
619 /* We can't scalarize variable array indices. */
620 if (in_array_bounds_p (expr))
621 child = TREE_OPERAND (expr, 1);
622 else
623 return NULL;
624 break;
626 case ARRAY_RANGE_REF:
627 /* We can't scalarize variable array indices. */
628 if (range_in_array_bounds_p (expr))
630 tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
631 child = build2 (RANGE_EXPR, integer_type_node,
632 TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
634 else
635 return NULL;
636 break;
638 case COMPONENT_REF:
639 /* Don't look through unions. */
640 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
641 return NULL;
642 child = TREE_OPERAND (expr, 1);
643 break;
645 case REALPART_EXPR:
646 child = integer_zero_node;
647 break;
648 case IMAGPART_EXPR:
649 child = integer_one_node;
650 break;
652 default:
653 return NULL;
656 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
657 if (elt)
658 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
659 return NULL;
663 /* Functions to walk just enough of the tree to see all scalarizable
664 references, and categorize them. */
666 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
667 various kinds of references seen. In all cases, *BSI is an iterator
668 pointing to the statement being processed. */
669 struct sra_walk_fns
671 /* Invoked when ELT is required as a unit. Note that ELT might refer to
672 a leaf node, in which case this is a simple scalar reference. *EXPR_P
673 points to the location of the expression. IS_OUTPUT is true if this
674 is a left-hand-side reference. USE_ALL is true if we saw something we
675 couldn't quite identify and had to force the use of the entire object. */
676 void (*use) (struct sra_elt *elt, tree *expr_p,
677 block_stmt_iterator *bsi, bool is_output, bool use_all);
679 /* Invoked when we have a copy between two scalarizable references. */
680 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
681 block_stmt_iterator *bsi);
683 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
684 in which case it should be treated as an empty CONSTRUCTOR. */
685 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
687 /* Invoked when we have a copy between one scalarizable reference ELT
688 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
689 is on the left-hand side. */
690 void (*ldst) (struct sra_elt *elt, tree other,
691 block_stmt_iterator *bsi, bool is_output);
693 /* True during phase 2, false during phase 4. */
694 /* ??? This is a hack. */
695 bool initial_scan;
698 #ifdef ENABLE_CHECKING
699 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
701 static tree
702 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
703 void *data ATTRIBUTE_UNUSED)
705 tree t = *tp;
706 enum tree_code code = TREE_CODE (t);
708 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
710 *walk_subtrees = 0;
711 if (is_sra_candidate_decl (t))
712 return t;
714 else if (TYPE_P (t))
715 *walk_subtrees = 0;
717 return NULL;
719 #endif
721 /* Walk most expressions looking for a scalarizable aggregate.
722 If we find one, invoke FNS->USE. */
724 static void
725 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
726 const struct sra_walk_fns *fns)
728 tree expr = *expr_p;
729 tree inner = expr;
730 bool disable_scalarization = false;
731 bool use_all_p = false;
733 /* We're looking to collect a reference expression between EXPR and INNER,
734 such that INNER is a scalarizable decl and all other nodes through EXPR
735 are references that we can scalarize. If we come across something that
736 we can't scalarize, we reset EXPR. This has the effect of making it
737 appear that we're referring to the larger expression as a whole. */
739 while (1)
740 switch (TREE_CODE (inner))
742 case VAR_DECL:
743 case PARM_DECL:
744 case RESULT_DECL:
745 /* If there is a scalarizable decl at the bottom, then process it. */
746 if (is_sra_candidate_decl (inner))
748 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
749 if (disable_scalarization)
750 elt->cannot_scalarize = true;
751 else
752 fns->use (elt, expr_p, bsi, is_output, use_all_p);
754 return;
756 case ARRAY_REF:
757 /* Non-constant index means any member may be accessed. Prevent the
758 expression from being scalarized. If we were to treat this as a
759 reference to the whole array, we can wind up with a single dynamic
760 index reference inside a loop being overridden by several constant
761 index references during loop setup. It's possible that this could
762 be avoided by using dynamic usage counts based on BB trip counts
763 (based on loop analysis or profiling), but that hardly seems worth
764 the effort. */
765 /* ??? Hack. Figure out how to push this into the scan routines
766 without duplicating too much code. */
767 if (!in_array_bounds_p (inner))
769 disable_scalarization = true;
770 goto use_all;
772 /* ??? Are we assured that non-constant bounds and stride will have
773 the same value everywhere? I don't think Fortran will... */
774 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
775 goto use_all;
776 inner = TREE_OPERAND (inner, 0);
777 break;
779 case ARRAY_RANGE_REF:
780 if (!range_in_array_bounds_p (inner))
782 disable_scalarization = true;
783 goto use_all;
785 /* ??? See above non-constant bounds and stride . */
786 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
787 goto use_all;
788 inner = TREE_OPERAND (inner, 0);
789 break;
791 case COMPONENT_REF:
792 /* A reference to a union member constitutes a reference to the
793 entire union. */
794 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
795 goto use_all;
796 /* ??? See above re non-constant stride. */
797 if (TREE_OPERAND (inner, 2))
798 goto use_all;
799 inner = TREE_OPERAND (inner, 0);
800 break;
802 case REALPART_EXPR:
803 case IMAGPART_EXPR:
804 inner = TREE_OPERAND (inner, 0);
805 break;
807 case BIT_FIELD_REF:
808 /* A bit field reference to a specific vector is scalarized but for
809 ones for inputs need to be marked as used on the left hand size so
810 when we scalarize it, we can mark that variable as non renamable. */
811 if (is_output
812 && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
814 struct sra_elt *elt
815 = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
816 if (elt)
817 elt->is_vector_lhs = true;
819 /* A bit field reference (access to *multiple* fields simultaneously)
820 is not currently scalarized. Consider this an access to the
821 complete outer element, to which walk_tree will bring us next. */
823 goto use_all;
825 case VIEW_CONVERT_EXPR:
826 case NOP_EXPR:
827 /* Similarly, a view/nop explicitly wants to look at an object in a
828 type other than the one we've scalarized. */
829 goto use_all;
831 case WITH_SIZE_EXPR:
832 /* This is a transparent wrapper. The entire inner expression really
833 is being used. */
834 goto use_all;
836 use_all:
837 expr_p = &TREE_OPERAND (inner, 0);
838 inner = expr = *expr_p;
839 use_all_p = true;
840 break;
842 default:
843 #ifdef ENABLE_CHECKING
844 /* Validate that we're not missing any references. */
845 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
846 #endif
847 return;
851 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
852 If we find one, invoke FNS->USE. */
854 static void
855 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
856 const struct sra_walk_fns *fns)
858 tree op;
859 for (op = list; op ; op = TREE_CHAIN (op))
860 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
863 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
864 If we find one, invoke FNS->USE. */
866 static void
867 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
868 const struct sra_walk_fns *fns)
870 int i;
871 int nargs = call_expr_nargs (expr);
872 for (i = 0; i < nargs; i++)
873 sra_walk_expr (&CALL_EXPR_ARG (expr, i), bsi, false, fns);
876 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
877 aggregates. If we find one, invoke FNS->USE. */
879 static void
880 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
881 const struct sra_walk_fns *fns)
883 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
884 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
887 /* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately. */
889 static void
890 sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
891 const struct sra_walk_fns *fns)
893 struct sra_elt *lhs_elt, *rhs_elt;
894 tree lhs, rhs;
896 lhs = GIMPLE_STMT_OPERAND (expr, 0);
897 rhs = GIMPLE_STMT_OPERAND (expr, 1);
898 lhs_elt = maybe_lookup_element_for_expr (lhs);
899 rhs_elt = maybe_lookup_element_for_expr (rhs);
901 /* If both sides are scalarizable, this is a COPY operation. */
902 if (lhs_elt && rhs_elt)
904 fns->copy (lhs_elt, rhs_elt, bsi);
905 return;
908 /* If the RHS is scalarizable, handle it. There are only two cases. */
909 if (rhs_elt)
911 if (!rhs_elt->is_scalar)
912 fns->ldst (rhs_elt, lhs, bsi, false);
913 else
914 fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false, false);
917 /* If it isn't scalarizable, there may be scalarizable variables within, so
918 check for a call or else walk the RHS to see if we need to do any
919 copy-in operations. We need to do it before the LHS is scalarized so
920 that the statements get inserted in the proper place, before any
921 copy-out operations. */
922 else
924 tree call = get_call_expr_in (rhs);
925 if (call)
926 sra_walk_call_expr (call, bsi, fns);
927 else
928 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 1), bsi, false, fns);
931 /* Likewise, handle the LHS being scalarizable. We have cases similar
932 to those above, but also want to handle RHS being constant. */
933 if (lhs_elt)
935 /* If this is an assignment from a constant, or constructor, then
936 we have access to all of the elements individually. Invoke INIT. */
937 if (TREE_CODE (rhs) == COMPLEX_EXPR
938 || TREE_CODE (rhs) == COMPLEX_CST
939 || TREE_CODE (rhs) == CONSTRUCTOR)
940 fns->init (lhs_elt, rhs, bsi);
942 /* If this is an assignment from read-only memory, treat this as if
943 we'd been passed the constructor directly. Invoke INIT. */
944 else if (TREE_CODE (rhs) == VAR_DECL
945 && TREE_STATIC (rhs)
946 && TREE_READONLY (rhs)
947 && targetm.binds_local_p (rhs))
948 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
950 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
951 The lvalue requirement prevents us from trying to directly scalarize
952 the result of a function call. Which would result in trying to call
953 the function multiple times, and other evil things. */
954 else if (!lhs_elt->is_scalar && is_gimple_addressable (rhs))
955 fns->ldst (lhs_elt, rhs, bsi, true);
957 /* Otherwise we're being used in some context that requires the
958 aggregate to be seen as a whole. Invoke USE. */
959 else
960 fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true, false);
963 /* Similarly to above, LHS_ELT being null only means that the LHS as a
964 whole is not a scalarizable reference. There may be occurrences of
965 scalarizable variables within, which implies a USE. */
966 else
967 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 0), bsi, true, fns);
970 /* Entry point to the walk functions. Search the entire function,
971 invoking the callbacks in FNS on each of the references to
972 scalarizable variables. */
974 static void
975 sra_walk_function (const struct sra_walk_fns *fns)
977 basic_block bb;
978 block_stmt_iterator si, ni;
980 /* ??? Phase 4 could derive some benefit to walking the function in
981 dominator tree order. */
983 FOR_EACH_BB (bb)
984 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
986 tree stmt, t;
987 stmt_ann_t ann;
989 stmt = bsi_stmt (si);
990 ann = stmt_ann (stmt);
992 ni = si;
993 bsi_next (&ni);
995 /* If the statement has no virtual operands, then it doesn't
996 make any structure references that we care about. */
997 if (gimple_aliases_computed_p (cfun)
998 && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
999 continue;
1001 switch (TREE_CODE (stmt))
1003 case RETURN_EXPR:
1004 /* If we have "return <retval>" then the return value is
1005 already exposed for our pleasure. Walk it as a USE to
1006 force all the components back in place for the return.
1008 If we have an embedded assignment, then <retval> is of
1009 a type that gets returned in registers in this ABI, and
1010 we do not wish to extend their lifetimes. Treat this
1011 as a USE of the variable on the RHS of this assignment. */
1013 t = TREE_OPERAND (stmt, 0);
1014 if (t == NULL_TREE)
1016 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
1017 sra_walk_expr (&GIMPLE_STMT_OPERAND (t, 1), &si, false, fns);
1018 else
1019 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
1020 break;
1022 case GIMPLE_MODIFY_STMT:
1023 sra_walk_gimple_modify_stmt (stmt, &si, fns);
1024 break;
1025 case CALL_EXPR:
1026 sra_walk_call_expr (stmt, &si, fns);
1027 break;
1028 case ASM_EXPR:
1029 sra_walk_asm_expr (stmt, &si, fns);
1030 break;
1032 default:
1033 break;
1038 /* Phase One: Scan all referenced variables in the program looking for
1039 structures that could be decomposed. */
1041 static bool
1042 find_candidates_for_sra (void)
1044 bool any_set = false;
1045 tree var;
1046 referenced_var_iterator rvi;
1048 FOR_EACH_REFERENCED_VAR (var, rvi)
1050 if (decl_can_be_decomposed_p (var))
1052 bitmap_set_bit (sra_candidates, DECL_UID (var));
1053 any_set = true;
1057 return any_set;
1061 /* Phase Two: Scan all references to scalarizable variables. Count the
1062 number of times they are used or copied respectively. */
1064 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1065 considered a copy, because we can decompose the reference such that
1066 the sub-elements needn't be contiguous. */
1068 static void
1069 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1070 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1071 bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1073 elt->n_uses += 1;
1076 static void
1077 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1078 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1080 lhs_elt->n_copies += 1;
1081 rhs_elt->n_copies += 1;
1084 static void
1085 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1086 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1088 lhs_elt->n_copies += 1;
1091 static void
1092 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1093 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1094 bool is_output ATTRIBUTE_UNUSED)
1096 elt->n_copies += 1;
1099 /* Dump the values we collected during the scanning phase. */
1101 static void
1102 scan_dump (struct sra_elt *elt)
1104 struct sra_elt *c;
1106 dump_sra_elt_name (dump_file, elt);
1107 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1109 for (c = elt->children; c ; c = c->sibling)
1110 scan_dump (c);
1112 for (c = elt->groups; c ; c = c->sibling)
1113 scan_dump (c);
1116 /* Entry point to phase 2. Scan the entire function, building up
1117 scalarization data structures, recording copies and uses. */
1119 static void
1120 scan_function (void)
1122 static const struct sra_walk_fns fns = {
1123 scan_use, scan_copy, scan_init, scan_ldst, true
1125 bitmap_iterator bi;
1127 sra_walk_function (&fns);
1129 if (dump_file && (dump_flags & TDF_DETAILS))
1131 unsigned i;
1133 fputs ("\nScan results:\n", dump_file);
1134 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1136 tree var = referenced_var (i);
1137 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1138 if (elt)
1139 scan_dump (elt);
1141 fputc ('\n', dump_file);
1145 /* Phase Three: Make decisions about which variables to scalarize, if any.
1146 All elements to be scalarized have replacement variables made for them. */
1148 /* A subroutine of build_element_name. Recursively build the element
1149 name on the obstack. */
1151 static void
1152 build_element_name_1 (struct sra_elt *elt)
1154 tree t;
1155 char buffer[32];
1157 if (elt->parent)
1159 build_element_name_1 (elt->parent);
1160 obstack_1grow (&sra_obstack, '$');
1162 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1164 if (elt->element == integer_zero_node)
1165 obstack_grow (&sra_obstack, "real", 4);
1166 else
1167 obstack_grow (&sra_obstack, "imag", 4);
1168 return;
1172 t = elt->element;
1173 if (TREE_CODE (t) == INTEGER_CST)
1175 /* ??? Eh. Don't bother doing double-wide printing. */
1176 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1177 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1179 else
1181 tree name = DECL_NAME (t);
1182 if (name)
1183 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1184 IDENTIFIER_LENGTH (name));
1185 else
1187 sprintf (buffer, "D%u", DECL_UID (t));
1188 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1193 /* Construct a pretty variable name for an element's replacement variable.
1194 The name is built on the obstack. */
1196 static char *
1197 build_element_name (struct sra_elt *elt)
1199 build_element_name_1 (elt);
1200 obstack_1grow (&sra_obstack, '\0');
1201 return XOBFINISH (&sra_obstack, char *);
1204 /* Instantiate an element as an independent variable. */
1206 static void
1207 instantiate_element (struct sra_elt *elt)
1209 struct sra_elt *base_elt;
1210 tree var, base;
1212 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1213 continue;
1214 base = base_elt->element;
1216 elt->replacement = var = make_rename_temp (elt->type, "SR");
1218 /* For vectors, if used on the left hand side with BIT_FIELD_REF,
1219 they are not a gimple register. */
1220 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
1221 DECL_GIMPLE_REG_P (var) = 0;
1223 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1224 DECL_ARTIFICIAL (var) = 1;
1226 if (TREE_THIS_VOLATILE (elt->type))
1228 TREE_THIS_VOLATILE (var) = 1;
1229 TREE_SIDE_EFFECTS (var) = 1;
1232 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1234 char *pretty_name = build_element_name (elt);
1235 DECL_NAME (var) = get_identifier (pretty_name);
1236 obstack_free (&sra_obstack, pretty_name);
1238 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1239 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1241 DECL_IGNORED_P (var) = 0;
1242 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1244 else
1246 DECL_IGNORED_P (var) = 1;
1247 /* ??? We can't generate any warning that would be meaningful. */
1248 TREE_NO_WARNING (var) = 1;
1251 if (dump_file)
1253 fputs (" ", dump_file);
1254 dump_sra_elt_name (dump_file, elt);
1255 fputs (" -> ", dump_file);
1256 print_generic_expr (dump_file, var, dump_flags);
1257 fputc ('\n', dump_file);
1261 /* Make one pass across an element tree deciding whether or not it's
1262 profitable to instantiate individual leaf scalars.
1264 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1265 fields all the way up the tree. */
1267 static void
1268 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1269 unsigned int parent_copies)
1271 if (dump_file && !elt->parent)
1273 fputs ("Initial instantiation for ", dump_file);
1274 dump_sra_elt_name (dump_file, elt);
1275 fputc ('\n', dump_file);
1278 if (elt->cannot_scalarize)
1279 return;
1281 if (elt->is_scalar)
1283 /* The decision is simple: instantiate if we're used more frequently
1284 than the parent needs to be seen as a complete unit. */
1285 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1286 instantiate_element (elt);
1288 else
1290 struct sra_elt *c, *group;
1291 unsigned int this_uses = elt->n_uses + parent_uses;
1292 unsigned int this_copies = elt->n_copies + parent_copies;
1294 /* Consider groups of sub-elements as weighing in favour of
1295 instantiation whatever their size. */
1296 for (group = elt->groups; group ; group = group->sibling)
1297 FOR_EACH_ACTUAL_CHILD (c, group)
1299 c->n_uses += group->n_uses;
1300 c->n_copies += group->n_copies;
1303 for (c = elt->children; c ; c = c->sibling)
1304 decide_instantiation_1 (c, this_uses, this_copies);
1308 /* Compute the size and number of all instantiated elements below ELT.
1309 We will only care about this if the size of the complete structure
1310 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1312 static unsigned int
1313 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1315 if (elt->replacement)
1317 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1318 return 1;
1320 else
1322 struct sra_elt *c;
1323 unsigned int count = 0;
1325 for (c = elt->children; c ; c = c->sibling)
1326 count += sum_instantiated_sizes (c, sizep);
1328 return count;
1332 /* Instantiate fields in ELT->TYPE that are not currently present as
1333 children of ELT. */
1335 static void instantiate_missing_elements (struct sra_elt *elt);
1337 static void
1338 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1340 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1341 if (sub->is_scalar)
1343 if (sub->replacement == NULL)
1344 instantiate_element (sub);
1346 else
1347 instantiate_missing_elements (sub);
1350 static void
1351 instantiate_missing_elements (struct sra_elt *elt)
1353 tree type = elt->type;
1355 switch (TREE_CODE (type))
1357 case RECORD_TYPE:
1359 tree f;
1360 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1361 if (TREE_CODE (f) == FIELD_DECL)
1363 tree field_type = TREE_TYPE (f);
1365 /* canonicalize_component_ref() unwidens some bit-field
1366 types (not marked as DECL_BIT_FIELD in C++), so we
1367 must do the same, lest we may introduce type
1368 mismatches. */
1369 if (INTEGRAL_TYPE_P (field_type)
1370 && DECL_MODE (f) != TYPE_MODE (field_type))
1371 field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
1372 field_type,
1373 elt->element,
1374 f, NULL_TREE),
1375 NULL_TREE));
1377 instantiate_missing_elements_1 (elt, f, field_type);
1379 break;
1382 case ARRAY_TYPE:
1384 tree i, max, subtype;
1386 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1387 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1388 subtype = TREE_TYPE (type);
1390 while (1)
1392 instantiate_missing_elements_1 (elt, i, subtype);
1393 if (tree_int_cst_equal (i, max))
1394 break;
1395 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1398 break;
1401 case COMPLEX_TYPE:
1402 type = TREE_TYPE (type);
1403 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1404 instantiate_missing_elements_1 (elt, integer_one_node, type);
1405 break;
1407 default:
1408 gcc_unreachable ();
1412 /* Return true if there is only one non aggregate field in the record, TYPE.
1413 Return false otherwise. */
1415 static bool
1416 single_scalar_field_in_record_p (tree type)
1418 int num_fields = 0;
1419 tree field;
1420 if (TREE_CODE (type) != RECORD_TYPE)
1421 return false;
1423 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1424 if (TREE_CODE (field) == FIELD_DECL)
1426 num_fields++;
1428 if (num_fields == 2)
1429 return false;
1431 if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
1432 return false;
1435 return true;
1438 /* Make one pass across an element tree deciding whether to perform block
1439 or element copies. If we decide on element copies, instantiate all
1440 elements. Return true if there are any instantiated sub-elements. */
1442 static bool
1443 decide_block_copy (struct sra_elt *elt)
1445 struct sra_elt *c;
1446 bool any_inst;
1448 /* We shouldn't be invoked on groups of sub-elements as they must
1449 behave like their parent as far as block copy is concerned. */
1450 gcc_assert (!elt->is_group);
1452 /* If scalarization is disabled, respect it. */
1453 if (elt->cannot_scalarize)
1455 elt->use_block_copy = 1;
1457 if (dump_file)
1459 fputs ("Scalarization disabled for ", dump_file);
1460 dump_sra_elt_name (dump_file, elt);
1461 fputc ('\n', dump_file);
1464 /* Disable scalarization of sub-elements */
1465 for (c = elt->children; c; c = c->sibling)
1467 c->cannot_scalarize = 1;
1468 decide_block_copy (c);
1471 /* Groups behave like their parent. */
1472 for (c = elt->groups; c; c = c->sibling)
1474 c->cannot_scalarize = 1;
1475 c->use_block_copy = 1;
1478 return false;
1481 /* Don't decide if we've no uses. */
1482 if (elt->n_uses == 0 && elt->n_copies == 0)
1485 else if (!elt->is_scalar)
1487 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1488 bool use_block_copy = true;
1490 /* Tradeoffs for COMPLEX types pretty much always make it better
1491 to go ahead and split the components. */
1492 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1493 use_block_copy = false;
1495 /* Don't bother trying to figure out the rest if the structure is
1496 so large we can't do easy arithmetic. This also forces block
1497 copies for variable sized structures. */
1498 else if (host_integerp (size_tree, 1))
1500 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1501 unsigned int max_size, max_count, inst_count, full_count;
1503 /* If the sra-max-structure-size parameter is 0, then the
1504 user has not overridden the parameter and we can choose a
1505 sensible default. */
1506 max_size = SRA_MAX_STRUCTURE_SIZE
1507 ? SRA_MAX_STRUCTURE_SIZE
1508 : MOVE_RATIO * UNITS_PER_WORD;
1509 max_count = SRA_MAX_STRUCTURE_COUNT
1510 ? SRA_MAX_STRUCTURE_COUNT
1511 : MOVE_RATIO;
1513 full_size = tree_low_cst (size_tree, 1);
1514 full_count = count_type_elements (elt->type, false);
1515 inst_count = sum_instantiated_sizes (elt, &inst_size);
1517 /* If there is only one scalar field in the record, don't block copy. */
1518 if (single_scalar_field_in_record_p (elt->type))
1519 use_block_copy = false;
1521 /* ??? What to do here. If there are two fields, and we've only
1522 instantiated one, then instantiating the other is clearly a win.
1523 If there are a large number of fields then the size of the copy
1524 is much more of a factor. */
1526 /* If the structure is small, and we've made copies, go ahead
1527 and instantiate, hoping that the copies will go away. */
1528 if (full_size <= max_size
1529 && (full_count - inst_count) <= max_count
1530 && elt->n_copies > elt->n_uses)
1531 use_block_copy = false;
1532 else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1533 && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1534 use_block_copy = false;
1536 /* In order to avoid block copy, we have to be able to instantiate
1537 all elements of the type. See if this is possible. */
1538 if (!use_block_copy
1539 && (!can_completely_scalarize_p (elt)
1540 || !type_can_instantiate_all_elements (elt->type)))
1541 use_block_copy = true;
1544 elt->use_block_copy = use_block_copy;
1546 /* Groups behave like their parent. */
1547 for (c = elt->groups; c; c = c->sibling)
1548 c->use_block_copy = use_block_copy;
1550 if (dump_file)
1552 fprintf (dump_file, "Using %s for ",
1553 use_block_copy ? "block-copy" : "element-copy");
1554 dump_sra_elt_name (dump_file, elt);
1555 fputc ('\n', dump_file);
1558 if (!use_block_copy)
1560 instantiate_missing_elements (elt);
1561 return true;
1565 any_inst = elt->replacement != NULL;
1567 for (c = elt->children; c ; c = c->sibling)
1568 any_inst |= decide_block_copy (c);
1570 return any_inst;
1573 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1575 static void
1576 decide_instantiations (void)
1578 unsigned int i;
1579 bool cleared_any;
1580 bitmap_head done_head;
1581 bitmap_iterator bi;
1583 /* We cannot clear bits from a bitmap we're iterating over,
1584 so save up all the bits to clear until the end. */
1585 bitmap_initialize (&done_head, &bitmap_default_obstack);
1586 cleared_any = false;
1588 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1590 tree var = referenced_var (i);
1591 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1592 if (elt)
1594 decide_instantiation_1 (elt, 0, 0);
1595 if (!decide_block_copy (elt))
1596 elt = NULL;
1598 if (!elt)
1600 bitmap_set_bit (&done_head, i);
1601 cleared_any = true;
1605 if (cleared_any)
1607 bitmap_and_compl_into (sra_candidates, &done_head);
1608 bitmap_and_compl_into (needs_copy_in, &done_head);
1610 bitmap_clear (&done_head);
1612 if (!bitmap_empty_p (sra_candidates))
1613 todoflags |= TODO_update_smt_usage;
1615 mark_set_for_renaming (sra_candidates);
1617 if (dump_file)
1618 fputc ('\n', dump_file);
1622 /* Phase Four: Update the function to match the replacements created. */
1624 /* Mark all the variables in VDEF/VUSE operators for STMT for
1625 renaming. This becomes necessary when we modify all of a
1626 non-scalar. */
1628 static void
1629 mark_all_v_defs_1 (tree stmt)
1631 tree sym;
1632 ssa_op_iter iter;
1634 update_stmt_if_modified (stmt);
1636 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
1638 if (TREE_CODE (sym) == SSA_NAME)
1639 sym = SSA_NAME_VAR (sym);
1640 mark_sym_for_renaming (sym);
1645 /* Mark all the variables in virtual operands in all the statements in
1646 LIST for renaming. */
1648 static void
1649 mark_all_v_defs (tree list)
1651 if (TREE_CODE (list) != STATEMENT_LIST)
1652 mark_all_v_defs_1 (list);
1653 else
1655 tree_stmt_iterator i;
1656 for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
1657 mark_all_v_defs_1 (tsi_stmt (i));
1662 /* Mark every replacement under ELT with TREE_NO_WARNING. */
1664 static void
1665 mark_no_warning (struct sra_elt *elt)
1667 if (!elt->all_no_warning)
1669 if (elt->replacement)
1670 TREE_NO_WARNING (elt->replacement) = 1;
1671 else
1673 struct sra_elt *c;
1674 FOR_EACH_ACTUAL_CHILD (c, elt)
1675 mark_no_warning (c);
1677 elt->all_no_warning = true;
1681 /* Build a single level component reference to ELT rooted at BASE. */
1683 static tree
1684 generate_one_element_ref (struct sra_elt *elt, tree base)
1686 switch (TREE_CODE (TREE_TYPE (base)))
1688 case RECORD_TYPE:
1690 tree field = elt->element;
1692 /* Watch out for compatible records with differing field lists. */
1693 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
1694 field = find_compatible_field (TREE_TYPE (base), field);
1696 return build3 (COMPONENT_REF, elt->type, base, field, NULL);
1699 case ARRAY_TYPE:
1700 todoflags |= TODO_update_smt_usage;
1701 if (TREE_CODE (elt->element) == RANGE_EXPR)
1702 return build4 (ARRAY_RANGE_REF, elt->type, base,
1703 TREE_OPERAND (elt->element, 0), NULL, NULL);
1704 else
1705 return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1707 case COMPLEX_TYPE:
1708 if (elt->element == integer_zero_node)
1709 return build1 (REALPART_EXPR, elt->type, base);
1710 else
1711 return build1 (IMAGPART_EXPR, elt->type, base);
1713 default:
1714 gcc_unreachable ();
1718 /* Build a full component reference to ELT rooted at its native variable. */
1720 static tree
1721 generate_element_ref (struct sra_elt *elt)
1723 if (elt->parent)
1724 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1725 else
1726 return elt->element;
1729 /* Create an assignment statement from SRC to DST. */
1731 static tree
1732 sra_build_assignment (tree dst, tree src)
1734 /* It was hoped that we could perform some type sanity checking
1735 here, but since front-ends can emit accesses of fields in types
1736 different from their nominal types and copy structures containing
1737 them as a whole, we'd have to handle such differences here.
1738 Since such accesses under different types require compatibility
1739 anyway, there's little point in making tests and/or adding
1740 conversions to ensure the types of src and dst are the same.
1741 So we just assume type differences at this point are ok. */
1742 return build2 (GIMPLE_MODIFY_STMT, void_type_node, dst, src);
1745 /* Generate a set of assignment statements in *LIST_P to copy all
1746 instantiated elements under ELT to or from the equivalent structure
1747 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1748 true meaning to copy out of EXPR into ELT. */
1750 static void
1751 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1752 tree *list_p)
1754 struct sra_elt *c;
1755 tree t;
1757 if (!copy_out && TREE_CODE (expr) == SSA_NAME
1758 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
1760 tree r, i;
1762 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
1763 r = c->replacement;
1764 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
1765 i = c->replacement;
1767 t = build2 (COMPLEX_EXPR, elt->type, r, i);
1768 t = sra_build_assignment (expr, t);
1769 SSA_NAME_DEF_STMT (expr) = t;
1770 append_to_statement_list (t, list_p);
1772 else if (elt->replacement)
1774 if (copy_out)
1775 t = sra_build_assignment (elt->replacement, expr);
1776 else
1777 t = sra_build_assignment (expr, elt->replacement);
1778 append_to_statement_list (t, list_p);
1780 else
1782 FOR_EACH_ACTUAL_CHILD (c, elt)
1784 t = generate_one_element_ref (c, unshare_expr (expr));
1785 generate_copy_inout (c, copy_out, t, list_p);
1790 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1791 elements under SRC to their counterparts under DST. There must be a 1-1
1792 correspondence of instantiated elements. */
1794 static void
1795 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1797 struct sra_elt *dc, *sc;
1799 FOR_EACH_ACTUAL_CHILD (dc, dst)
1801 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1802 gcc_assert (sc);
1803 generate_element_copy (dc, sc, list_p);
1806 if (dst->replacement)
1808 tree t;
1810 gcc_assert (src->replacement);
1812 t = sra_build_assignment (dst->replacement, src->replacement);
1813 append_to_statement_list (t, list_p);
1817 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1818 elements under ELT. In addition, do not assign to elements that have been
1819 marked VISITED but do reset the visited flag; this allows easy coordination
1820 with generate_element_init. */
1822 static void
1823 generate_element_zero (struct sra_elt *elt, tree *list_p)
1825 struct sra_elt *c;
1827 if (elt->visited)
1829 elt->visited = false;
1830 return;
1833 FOR_EACH_ACTUAL_CHILD (c, elt)
1834 generate_element_zero (c, list_p);
1836 if (elt->replacement)
1838 tree t;
1840 gcc_assert (elt->is_scalar);
1841 t = fold_convert (elt->type, integer_zero_node);
1843 t = sra_build_assignment (elt->replacement, t);
1844 append_to_statement_list (t, list_p);
1848 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1849 Add the result to *LIST_P. */
1851 static void
1852 generate_one_element_init (tree var, tree init, tree *list_p)
1854 /* The replacement can be almost arbitrarily complex. Gimplify. */
1855 tree stmt = sra_build_assignment (var, init);
1856 gimplify_and_add (stmt, list_p);
1859 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1860 elements under ELT with the contents of the initializer INIT. In addition,
1861 mark all assigned elements VISITED; this allows easy coordination with
1862 generate_element_zero. Return false if we found a case we couldn't
1863 handle. */
1865 static bool
1866 generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
1868 bool result = true;
1869 enum tree_code init_code;
1870 struct sra_elt *sub;
1871 tree t;
1872 unsigned HOST_WIDE_INT idx;
1873 tree value, purpose;
1875 /* We can be passed DECL_INITIAL of a static variable. It might have a
1876 conversion, which we strip off here. */
1877 STRIP_USELESS_TYPE_CONVERSION (init);
1878 init_code = TREE_CODE (init);
1880 if (elt->is_scalar)
1882 if (elt->replacement)
1884 generate_one_element_init (elt->replacement, init, list_p);
1885 elt->visited = true;
1887 return result;
1890 switch (init_code)
1892 case COMPLEX_CST:
1893 case COMPLEX_EXPR:
1894 FOR_EACH_ACTUAL_CHILD (sub, elt)
1896 if (sub->element == integer_zero_node)
1897 t = (init_code == COMPLEX_EXPR
1898 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1899 else
1900 t = (init_code == COMPLEX_EXPR
1901 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1902 result &= generate_element_init_1 (sub, t, list_p);
1904 break;
1906 case CONSTRUCTOR:
1907 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
1909 if (TREE_CODE (purpose) == RANGE_EXPR)
1911 tree lower = TREE_OPERAND (purpose, 0);
1912 tree upper = TREE_OPERAND (purpose, 1);
1914 while (1)
1916 sub = lookup_element (elt, lower, NULL, NO_INSERT);
1917 if (sub != NULL)
1918 result &= generate_element_init_1 (sub, value, list_p);
1919 if (tree_int_cst_equal (lower, upper))
1920 break;
1921 lower = int_const_binop (PLUS_EXPR, lower,
1922 integer_one_node, true);
1925 else
1927 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
1928 if (sub != NULL)
1929 result &= generate_element_init_1 (sub, value, list_p);
1932 break;
1934 default:
1935 elt->visited = true;
1936 result = false;
1939 return result;
1942 /* A wrapper function for generate_element_init_1 that handles cleanup after
1943 gimplification. */
1945 static bool
1946 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1948 bool ret;
1950 push_gimplify_context ();
1951 ret = generate_element_init_1 (elt, init, list_p);
1952 pop_gimplify_context (NULL);
1954 /* The replacement can expose previously unreferenced variables. */
1955 if (ret && *list_p)
1957 tree_stmt_iterator i;
1959 for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
1960 find_new_referenced_vars (tsi_stmt_ptr (i));
1963 return ret;
1966 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1967 has more than one edge, STMT will be replicated for each edge. Also,
1968 abnormal edges will be ignored. */
1970 void
1971 insert_edge_copies (tree stmt, basic_block bb)
1973 edge e;
1974 edge_iterator ei;
1975 bool first_copy;
1977 first_copy = true;
1978 FOR_EACH_EDGE (e, ei, bb->succs)
1980 /* We don't need to insert copies on abnormal edges. The
1981 value of the scalar replacement is not guaranteed to
1982 be valid through an abnormal edge. */
1983 if (!(e->flags & EDGE_ABNORMAL))
1985 if (first_copy)
1987 bsi_insert_on_edge (e, stmt);
1988 first_copy = false;
1990 else
1991 bsi_insert_on_edge (e, unsave_expr_now (stmt));
1996 /* Helper function to insert LIST before BSI, and set up line number info. */
1998 void
1999 sra_insert_before (block_stmt_iterator *bsi, tree list)
2001 tree stmt = bsi_stmt (*bsi);
2003 if (EXPR_HAS_LOCATION (stmt))
2004 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
2005 bsi_insert_before (bsi, list, BSI_SAME_STMT);
2008 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
2010 void
2011 sra_insert_after (block_stmt_iterator *bsi, tree list)
2013 tree stmt = bsi_stmt (*bsi);
2015 if (EXPR_HAS_LOCATION (stmt))
2016 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
2018 if (stmt_ends_bb_p (stmt))
2019 insert_edge_copies (list, bsi->bb);
2020 else
2021 bsi_insert_after (bsi, list, BSI_SAME_STMT);
2024 /* Similarly, but replace the statement at BSI. */
2026 static void
2027 sra_replace (block_stmt_iterator *bsi, tree list)
2029 sra_insert_before (bsi, list);
2030 bsi_remove (bsi, false);
2031 if (bsi_end_p (*bsi))
2032 *bsi = bsi_last (bsi->bb);
2033 else
2034 bsi_prev (bsi);
2037 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
2038 if elt is scalar, or some occurrence of ELT that requires a complete
2039 aggregate. IS_OUTPUT is true if ELT is being modified. */
2041 static void
2042 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
2043 bool is_output, bool use_all)
2045 tree list = NULL, stmt = bsi_stmt (*bsi);
2047 if (elt->replacement)
2049 /* If we have a replacement, then updating the reference is as
2050 simple as modifying the existing statement in place. */
2051 if (is_output)
2052 mark_all_v_defs (stmt);
2053 *expr_p = elt->replacement;
2054 update_stmt (stmt);
2056 else
2058 /* Otherwise we need some copies. If ELT is being read, then we want
2059 to store all (modified) sub-elements back into the structure before
2060 the reference takes place. If ELT is being written, then we want to
2061 load the changed values back into our shadow variables. */
2062 /* ??? We don't check modified for reads, we just always write all of
2063 the values. We should be able to record the SSA number of the VOP
2064 for which the values were last read. If that number matches the
2065 SSA number of the VOP in the current statement, then we needn't
2066 emit an assignment. This would also eliminate double writes when
2067 a structure is passed as more than one argument to a function call.
2068 This optimization would be most effective if sra_walk_function
2069 processed the blocks in dominator order. */
2071 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
2072 if (list == NULL)
2073 return;
2074 mark_all_v_defs (list);
2075 if (is_output)
2076 sra_insert_after (bsi, list);
2077 else
2079 sra_insert_before (bsi, list);
2080 if (use_all)
2081 mark_no_warning (elt);
2086 /* Scalarize a COPY. To recap, this is an assignment statement between
2087 two scalarizable references, LHS_ELT and RHS_ELT. */
2089 static void
2090 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
2091 block_stmt_iterator *bsi)
2093 tree list, stmt;
2095 if (lhs_elt->replacement && rhs_elt->replacement)
2097 /* If we have two scalar operands, modify the existing statement. */
2098 stmt = bsi_stmt (*bsi);
2100 /* See the commentary in sra_walk_function concerning
2101 RETURN_EXPR, and why we should never see one here. */
2102 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
2104 GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
2105 GIMPLE_STMT_OPERAND (stmt, 1) = rhs_elt->replacement;
2106 update_stmt (stmt);
2108 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
2110 /* If either side requires a block copy, then sync the RHS back
2111 to the original structure, leave the original assignment
2112 statement (which will perform the block copy), then load the
2113 LHS values out of its now-updated original structure. */
2114 /* ??? Could perform a modified pair-wise element copy. That
2115 would at least allow those elements that are instantiated in
2116 both structures to be optimized well. */
2118 list = NULL;
2119 generate_copy_inout (rhs_elt, false,
2120 generate_element_ref (rhs_elt), &list);
2121 if (list)
2123 mark_all_v_defs (list);
2124 sra_insert_before (bsi, list);
2127 list = NULL;
2128 generate_copy_inout (lhs_elt, true,
2129 generate_element_ref (lhs_elt), &list);
2130 if (list)
2132 mark_all_v_defs (list);
2133 sra_insert_after (bsi, list);
2136 else
2138 /* Otherwise both sides must be fully instantiated. In which
2139 case perform pair-wise element assignments and replace the
2140 original block copy statement. */
2142 stmt = bsi_stmt (*bsi);
2143 mark_all_v_defs (stmt);
2145 list = NULL;
2146 generate_element_copy (lhs_elt, rhs_elt, &list);
2147 gcc_assert (list);
2148 mark_all_v_defs (list);
2149 sra_replace (bsi, list);
2153 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
2154 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
2155 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
2156 CONSTRUCTOR. */
2158 static void
2159 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
2161 bool result = true;
2162 tree list = NULL;
2164 /* Generate initialization statements for all members extant in the RHS. */
2165 if (rhs)
2167 /* Unshare the expression just in case this is from a decl's initial. */
2168 rhs = unshare_expr (rhs);
2169 result = generate_element_init (lhs_elt, rhs, &list);
2172 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
2173 a zero value. Initialize the rest of the instantiated elements. */
2174 generate_element_zero (lhs_elt, &list);
2176 if (!result)
2178 /* If we failed to convert the entire initializer, then we must
2179 leave the structure assignment in place and must load values
2180 from the structure into the slots for which we did not find
2181 constants. The easiest way to do this is to generate a complete
2182 copy-out, and then follow that with the constant assignments
2183 that we were able to build. DCE will clean things up. */
2184 tree list0 = NULL;
2185 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
2186 &list0);
2187 append_to_statement_list (list, &list0);
2188 list = list0;
2191 if (lhs_elt->use_block_copy || !result)
2193 /* Since LHS is not fully instantiated, we must leave the structure
2194 assignment in place. Treating this case differently from a USE
2195 exposes constants to later optimizations. */
2196 if (list)
2198 mark_all_v_defs (list);
2199 sra_insert_after (bsi, list);
2202 else
2204 /* The LHS is fully instantiated. The list of initializations
2205 replaces the original structure assignment. */
2206 gcc_assert (list);
2207 mark_all_v_defs (bsi_stmt (*bsi));
2208 mark_all_v_defs (list);
2209 sra_replace (bsi, list);
2213 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
2214 on all INDIRECT_REFs. */
2216 static tree
2217 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2219 tree t = *tp;
2221 if (TREE_CODE (t) == INDIRECT_REF)
2223 TREE_THIS_NOTRAP (t) = 1;
2224 *walk_subtrees = 0;
2226 else if (IS_TYPE_OR_DECL_P (t))
2227 *walk_subtrees = 0;
2229 return NULL;
2232 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
2233 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
2234 if ELT is on the left-hand side. */
2236 static void
2237 scalarize_ldst (struct sra_elt *elt, tree other,
2238 block_stmt_iterator *bsi, bool is_output)
2240 /* Shouldn't have gotten called for a scalar. */
2241 gcc_assert (!elt->replacement);
2243 if (elt->use_block_copy)
2245 /* Since ELT is not fully instantiated, we have to leave the
2246 block copy in place. Treat this as a USE. */
2247 scalarize_use (elt, NULL, bsi, is_output, false);
2249 else
2251 /* The interesting case is when ELT is fully instantiated. In this
2252 case we can have each element stored/loaded directly to/from the
2253 corresponding slot in OTHER. This avoids a block copy. */
2255 tree list = NULL, stmt = bsi_stmt (*bsi);
2257 mark_all_v_defs (stmt);
2258 generate_copy_inout (elt, is_output, other, &list);
2259 mark_all_v_defs (list);
2260 gcc_assert (list);
2262 /* Preserve EH semantics. */
2263 if (stmt_ends_bb_p (stmt))
2265 tree_stmt_iterator tsi;
2266 tree first;
2268 /* Extract the first statement from LIST. */
2269 tsi = tsi_start (list);
2270 first = tsi_stmt (tsi);
2271 tsi_delink (&tsi);
2273 /* Replace the old statement with this new representative. */
2274 bsi_replace (bsi, first, true);
2276 if (!tsi_end_p (tsi))
2278 /* If any reference would trap, then they all would. And more
2279 to the point, the first would. Therefore none of the rest
2280 will trap since the first didn't. Indicate this by
2281 iterating over the remaining statements and set
2282 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
2285 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
2286 tsi_next (&tsi);
2288 while (!tsi_end_p (tsi));
2290 insert_edge_copies (list, bsi->bb);
2293 else
2294 sra_replace (bsi, list);
2298 /* Generate initializations for all scalarizable parameters. */
2300 static void
2301 scalarize_parms (void)
2303 tree list = NULL;
2304 unsigned i;
2305 bitmap_iterator bi;
2307 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
2309 tree var = referenced_var (i);
2310 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
2311 generate_copy_inout (elt, true, var, &list);
2314 if (list)
2316 insert_edge_copies (list, ENTRY_BLOCK_PTR);
2317 mark_all_v_defs (list);
2321 /* Entry point to phase 4. Update the function to match replacements. */
2323 static void
2324 scalarize_function (void)
2326 static const struct sra_walk_fns fns = {
2327 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
2330 sra_walk_function (&fns);
2331 scalarize_parms ();
2332 bsi_commit_edge_inserts ();
2336 /* Debug helper function. Print ELT in a nice human-readable format. */
2338 static void
2339 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
2341 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
2343 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
2344 dump_sra_elt_name (f, elt->parent);
2346 else
2348 if (elt->parent)
2349 dump_sra_elt_name (f, elt->parent);
2350 if (DECL_P (elt->element))
2352 if (TREE_CODE (elt->element) == FIELD_DECL)
2353 fputc ('.', f);
2354 print_generic_expr (f, elt->element, dump_flags);
2356 else if (TREE_CODE (elt->element) == RANGE_EXPR)
2357 fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
2358 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
2359 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
2360 else
2361 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
2362 TREE_INT_CST_LOW (elt->element));
2366 /* Likewise, but callable from the debugger. */
2368 void
2369 debug_sra_elt_name (struct sra_elt *elt)
2371 dump_sra_elt_name (stderr, elt);
2372 fputc ('\n', stderr);
2375 void
2376 sra_init_cache (void)
2378 if (sra_type_decomp_cache)
2379 return;
2381 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
2382 sra_type_inst_cache = BITMAP_ALLOC (NULL);
2385 /* Main entry point. */
2387 static unsigned int
2388 tree_sra (void)
2390 /* Initialize local variables. */
2391 todoflags = 0;
2392 gcc_obstack_init (&sra_obstack);
2393 sra_candidates = BITMAP_ALLOC (NULL);
2394 needs_copy_in = BITMAP_ALLOC (NULL);
2395 sra_init_cache ();
2396 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
2398 /* Scan. If we find anything, instantiate and scalarize. */
2399 if (find_candidates_for_sra ())
2401 scan_function ();
2402 decide_instantiations ();
2403 scalarize_function ();
2406 /* Free allocated memory. */
2407 htab_delete (sra_map);
2408 sra_map = NULL;
2409 BITMAP_FREE (sra_candidates);
2410 BITMAP_FREE (needs_copy_in);
2411 BITMAP_FREE (sra_type_decomp_cache);
2412 BITMAP_FREE (sra_type_inst_cache);
2413 obstack_free (&sra_obstack, NULL);
2414 return todoflags;
2417 static unsigned int
2418 tree_sra_early (void)
2420 unsigned int ret;
2422 early_sra = true;
2423 ret = tree_sra ();
2424 early_sra = false;
2426 return ret;
2429 static bool
2430 gate_sra (void)
2432 return flag_tree_sra != 0;
2435 struct tree_opt_pass pass_sra_early =
2437 "esra", /* name */
2438 gate_sra, /* gate */
2439 tree_sra_early, /* execute */
2440 NULL, /* sub */
2441 NULL, /* next */
2442 0, /* static_pass_number */
2443 TV_TREE_SRA, /* tv_id */
2444 PROP_cfg | PROP_ssa, /* properties_required */
2445 0, /* properties_provided */
2446 0, /* properties_destroyed */
2447 0, /* todo_flags_start */
2448 TODO_dump_func
2449 | TODO_update_ssa
2450 | TODO_ggc_collect
2451 | TODO_verify_ssa, /* todo_flags_finish */
2452 0 /* letter */
2455 struct tree_opt_pass pass_sra =
2457 "sra", /* name */
2458 gate_sra, /* gate */
2459 tree_sra, /* execute */
2460 NULL, /* sub */
2461 NULL, /* next */
2462 0, /* static_pass_number */
2463 TV_TREE_SRA, /* tv_id */
2464 PROP_cfg | PROP_ssa, /* properties_required */
2465 0, /* properties_provided */
2466 0, /* properties_destroyed */
2467 0, /* todo_flags_start */
2468 TODO_dump_func
2469 | TODO_update_ssa
2470 | TODO_ggc_collect
2471 | TODO_verify_ssa, /* todo_flags_finish */
2472 0 /* letter */