2005-01-22 Thomas Koenig <Thomas.Koenig@online.de>
[official-gcc.git] / gcc / tree-sra.c
blob1ac0ec0a0aeb4fe967f7a53c93789352ee655996
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 02111-1307, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "errors.h"
29 #include "ggc.h"
30 #include "tree.h"
32 /* These RTL headers are needed for basic-block.h. */
33 #include "rtl.h"
34 #include "tm_p.h"
35 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "diagnostic.h"
38 #include "langhooks.h"
39 #include "tree-inline.h"
40 #include "tree-flow.h"
41 #include "tree-gimple.h"
42 #include "tree-dump.h"
43 #include "tree-pass.h"
44 #include "timevar.h"
45 #include "flags.h"
46 #include "bitmap.h"
47 #include "obstack.h"
48 #include "target.h"
49 /* expr.h is needed for MOVE_RATIO. */
50 #include "expr.h"
51 #include "params.h"
54 /* This object of this pass is to replace a non-addressable aggregate with a
55 set of independent variables. Most of the time, all of these variables
56 will be scalars. But a secondary objective is to break up larger
57 aggregates into smaller aggregates. In the process we may find that some
58 bits of the larger aggregate can be deleted as unreferenced.
60 This substitution is done globally. More localized substitutions would
61 be the purvey of a load-store motion pass.
63 The optimization proceeds in phases:
65 (1) Identify variables that have types that are candidates for
66 decomposition.
68 (2) Scan the function looking for the ways these variables are used.
69 In particular we're interested in the number of times a variable
70 (or member) is needed as a complete unit, and the number of times
71 a variable (or member) is copied.
73 (3) Based on the usage profile, instantiate substitution variables.
75 (4) Scan the function making replacements.
79 /* The set of aggregate variables that are candidates for scalarization. */
80 static bitmap sra_candidates;
82 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
83 beginning of the function. */
84 static bitmap needs_copy_in;
86 /* Sets of bit pairs that cache type decomposition and instantiation. */
87 static bitmap sra_type_decomp_cache;
88 static bitmap sra_type_inst_cache;
90 /* One of these structures is created for each candidate aggregate
91 and each (accessed) member of such an aggregate. */
92 struct sra_elt
94 /* A tree of the elements. Used when we want to traverse everything. */
95 struct sra_elt *parent;
96 struct sra_elt *children;
97 struct sra_elt *sibling;
99 /* If this element is a root, then this is the VAR_DECL. If this is
100 a sub-element, this is some token used to identify the reference.
101 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
102 of an ARRAY_REF, this is the (constant) index. In the case of a
103 complex number, this is a zero or one. */
104 tree element;
106 /* The type of the element. */
107 tree type;
109 /* A VAR_DECL, for any sub-element we've decided to replace. */
110 tree replacement;
112 /* The number of times the element is referenced as a whole. I.e.
113 given "a.b.c", this would be incremented for C, but not for A or B. */
114 unsigned int n_uses;
116 /* The number of times the element is copied to or from another
117 scalarizable element. */
118 unsigned int n_copies;
120 /* True if TYPE is scalar. */
121 bool is_scalar;
123 /* True if we saw something about this element that prevents scalarization,
124 such as non-constant indexing. */
125 bool cannot_scalarize;
127 /* True if we've decided that structure-to-structure assignment
128 should happen via memcpy and not per-element. */
129 bool use_block_copy;
131 /* A flag for use with/after random access traversals. */
132 bool visited;
135 /* Random access to the child of a parent is performed by hashing.
136 This prevents quadratic behavior, and allows SRA to function
137 reasonably on larger records. */
138 static htab_t sra_map;
140 /* All structures are allocated out of the following obstack. */
141 static struct obstack sra_obstack;
143 /* Debugging functions. */
144 static void dump_sra_elt_name (FILE *, struct sra_elt *);
145 extern void debug_sra_elt_name (struct sra_elt *);
148 /* Return true if DECL is an SRA candidate. */
150 static bool
151 is_sra_candidate_decl (tree decl)
153 return DECL_P (decl) && bitmap_bit_p (sra_candidates, var_ann (decl)->uid);
156 /* Return true if TYPE is a scalar type. */
158 static bool
159 is_sra_scalar_type (tree type)
161 enum tree_code code = TREE_CODE (type);
162 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
163 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
164 || code == CHAR_TYPE || code == POINTER_TYPE || code == OFFSET_TYPE
165 || code == REFERENCE_TYPE);
168 /* Return true if TYPE can be decomposed into a set of independent variables.
170 Note that this doesn't imply that all elements of TYPE can be
171 instantiated, just that if we decide to break up the type into
172 separate pieces that it can be done. */
174 static bool
175 type_can_be_decomposed_p (tree type)
177 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
178 tree t;
180 /* Avoid searching the same type twice. */
181 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
182 return true;
183 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
184 return false;
186 /* The type must have a definite nonzero size. */
187 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
188 || integer_zerop (TYPE_SIZE (type)))
189 goto fail;
191 /* The type must be a non-union aggregate. */
192 switch (TREE_CODE (type))
194 case RECORD_TYPE:
196 bool saw_one_field = false;
198 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
199 if (TREE_CODE (t) == FIELD_DECL)
201 /* Reject incorrectly represented bit fields. */
202 if (DECL_BIT_FIELD (t)
203 && (tree_low_cst (DECL_SIZE (t), 1)
204 != TYPE_PRECISION (TREE_TYPE (t))))
205 goto fail;
207 saw_one_field = true;
210 /* Record types must have at least one field. */
211 if (!saw_one_field)
212 goto fail;
214 break;
216 case ARRAY_TYPE:
217 /* Array types must have a fixed lower and upper bound. */
218 t = TYPE_DOMAIN (type);
219 if (t == NULL)
220 goto fail;
221 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
222 goto fail;
223 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
224 goto fail;
225 break;
227 case COMPLEX_TYPE:
228 break;
230 default:
231 goto fail;
234 bitmap_set_bit (sra_type_decomp_cache, cache+0);
235 return true;
237 fail:
238 bitmap_set_bit (sra_type_decomp_cache, cache+1);
239 return false;
242 /* Return true if DECL can be decomposed into a set of independent
243 (though not necessarily scalar) variables. */
245 static bool
246 decl_can_be_decomposed_p (tree var)
248 /* Early out for scalars. */
249 if (is_sra_scalar_type (TREE_TYPE (var)))
250 return false;
252 /* The variable must not be aliased. */
253 if (!is_gimple_non_addressable (var))
255 if (dump_file && (dump_flags & TDF_DETAILS))
257 fprintf (dump_file, "Cannot scalarize variable ");
258 print_generic_expr (dump_file, var, dump_flags);
259 fprintf (dump_file, " because it must live in memory\n");
261 return false;
264 /* The variable must not be volatile. */
265 if (TREE_THIS_VOLATILE (var))
267 if (dump_file && (dump_flags & TDF_DETAILS))
269 fprintf (dump_file, "Cannot scalarize variable ");
270 print_generic_expr (dump_file, var, dump_flags);
271 fprintf (dump_file, " because it is declared volatile\n");
273 return false;
276 /* We must be able to decompose the variable's type. */
277 if (!type_can_be_decomposed_p (TREE_TYPE (var)))
279 if (dump_file && (dump_flags & TDF_DETAILS))
281 fprintf (dump_file, "Cannot scalarize variable ");
282 print_generic_expr (dump_file, var, dump_flags);
283 fprintf (dump_file, " because its type cannot be decomposed\n");
285 return false;
288 return true;
291 /* Return true if TYPE can be *completely* decomposed into scalars. */
293 static bool
294 type_can_instantiate_all_elements (tree type)
296 if (is_sra_scalar_type (type))
297 return true;
298 if (!type_can_be_decomposed_p (type))
299 return false;
301 switch (TREE_CODE (type))
303 case RECORD_TYPE:
305 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
306 tree f;
308 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
309 return true;
310 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
311 return false;
313 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
314 if (TREE_CODE (f) == FIELD_DECL)
316 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
318 bitmap_set_bit (sra_type_inst_cache, cache+1);
319 return false;
323 bitmap_set_bit (sra_type_inst_cache, cache+0);
324 return true;
327 case ARRAY_TYPE:
328 return type_can_instantiate_all_elements (TREE_TYPE (type));
330 case COMPLEX_TYPE:
331 return true;
333 default:
334 gcc_unreachable ();
338 /* Test whether ELT or some sub-element cannot be scalarized. */
340 static bool
341 can_completely_scalarize_p (struct sra_elt *elt)
343 struct sra_elt *c;
345 if (elt->cannot_scalarize)
346 return false;
348 for (c = elt->children; c ; c = c->sibling)
349 if (!can_completely_scalarize_p (c))
350 return false;
352 return true;
356 /* A simplified tree hashing algorithm that only handles the types of
357 trees we expect to find in sra_elt->element. */
359 static hashval_t
360 sra_hash_tree (tree t)
362 hashval_t h;
364 switch (TREE_CODE (t))
366 case VAR_DECL:
367 case PARM_DECL:
368 case RESULT_DECL:
369 h = DECL_UID (t);
370 break;
372 case INTEGER_CST:
373 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
374 break;
376 case FIELD_DECL:
377 /* We can have types that are compatible, but have different member
378 lists, so we can't hash fields by ID. Use offsets instead. */
379 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
380 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
381 break;
383 default:
384 gcc_unreachable ();
387 return h;
390 /* Hash function for type SRA_PAIR. */
392 static hashval_t
393 sra_elt_hash (const void *x)
395 const struct sra_elt *e = x;
396 const struct sra_elt *p;
397 hashval_t h;
399 h = sra_hash_tree (e->element);
401 /* Take into account everything back up the chain. Given that chain
402 lengths are rarely very long, this should be acceptable. If we
403 truly identify this as a performance problem, it should work to
404 hash the pointer value "e->parent". */
405 for (p = e->parent; p ; p = p->parent)
406 h = (h * 65521) ^ sra_hash_tree (p->element);
408 return h;
411 /* Equality function for type SRA_PAIR. */
413 static int
414 sra_elt_eq (const void *x, const void *y)
416 const struct sra_elt *a = x;
417 const struct sra_elt *b = y;
418 tree ae, be;
420 if (a->parent != b->parent)
421 return false;
423 ae = a->element;
424 be = b->element;
426 if (ae == be)
427 return true;
428 if (TREE_CODE (ae) != TREE_CODE (be))
429 return false;
431 switch (TREE_CODE (ae))
433 case VAR_DECL:
434 case PARM_DECL:
435 case RESULT_DECL:
436 /* These are all pointer unique. */
437 return false;
439 case INTEGER_CST:
440 /* Integers are not pointer unique, so compare their values. */
441 return tree_int_cst_equal (ae, be);
443 case FIELD_DECL:
444 /* Fields are unique within a record, but not between
445 compatible records. */
446 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
447 return false;
448 return fields_compatible_p (ae, be);
450 default:
451 gcc_unreachable ();
455 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
456 may be null, in which case CHILD must be a DECL. */
458 static struct sra_elt *
459 lookup_element (struct sra_elt *parent, tree child, tree type,
460 enum insert_option insert)
462 struct sra_elt dummy;
463 struct sra_elt **slot;
464 struct sra_elt *elt;
466 dummy.parent = parent;
467 dummy.element = child;
469 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
470 if (!slot && insert == NO_INSERT)
471 return NULL;
473 elt = *slot;
474 if (!elt && insert == INSERT)
476 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
477 memset (elt, 0, sizeof (*elt));
479 elt->parent = parent;
480 elt->element = child;
481 elt->type = type;
482 elt->is_scalar = is_sra_scalar_type (type);
484 if (parent)
486 elt->sibling = parent->children;
487 parent->children = elt;
490 /* If this is a parameter, then if we want to scalarize, we have
491 one copy from the true function parameter. Count it now. */
492 if (TREE_CODE (child) == PARM_DECL)
494 elt->n_copies = 1;
495 bitmap_set_bit (needs_copy_in, var_ann (child)->uid);
499 return elt;
502 /* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */
504 static bool
505 is_valid_const_index (tree expr)
507 tree dom, t, index = TREE_OPERAND (expr, 1);
509 if (TREE_CODE (index) != INTEGER_CST)
510 return false;
512 /* Watch out for stupid user tricks, indexing outside the array.
514 Careful, we're not called only on scalarizable types, so do not
515 assume constant array bounds. We needn't do anything with such
516 cases, since they'll be referring to objects that we should have
517 already rejected for scalarization, so returning false is fine. */
519 dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr, 0)));
520 if (dom == NULL)
521 return false;
523 t = TYPE_MIN_VALUE (dom);
524 if (!t || TREE_CODE (t) != INTEGER_CST)
525 return false;
526 if (tree_int_cst_lt (index, t))
527 return false;
529 t = TYPE_MAX_VALUE (dom);
530 if (!t || TREE_CODE (t) != INTEGER_CST)
531 return false;
532 if (tree_int_cst_lt (t, index))
533 return false;
535 return true;
538 /* Create or return the SRA_ELT structure for EXPR if the expression
539 refers to a scalarizable variable. */
541 static struct sra_elt *
542 maybe_lookup_element_for_expr (tree expr)
544 struct sra_elt *elt;
545 tree child;
547 switch (TREE_CODE (expr))
549 case VAR_DECL:
550 case PARM_DECL:
551 case RESULT_DECL:
552 if (is_sra_candidate_decl (expr))
553 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
554 return NULL;
556 case ARRAY_REF:
557 /* We can't scalarize variable array indicies. */
558 if (is_valid_const_index (expr))
559 child = TREE_OPERAND (expr, 1);
560 else
561 return NULL;
562 break;
564 case COMPONENT_REF:
565 /* Don't look through unions. */
566 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
567 return NULL;
568 child = TREE_OPERAND (expr, 1);
569 break;
571 case REALPART_EXPR:
572 child = integer_zero_node;
573 break;
574 case IMAGPART_EXPR:
575 child = integer_one_node;
576 break;
578 default:
579 return NULL;
582 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
583 if (elt)
584 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
585 return NULL;
589 /* Functions to walk just enough of the tree to see all scalarizable
590 references, and categorize them. */
592 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
593 various kinds of references seen. In all cases, *BSI is an iterator
594 pointing to the statement being processed. */
595 struct sra_walk_fns
597 /* Invoked when ELT is required as a unit. Note that ELT might refer to
598 a leaf node, in which case this is a simple scalar reference. *EXPR_P
599 points to the location of the expression. IS_OUTPUT is true if this
600 is a left-hand-side reference. */
601 void (*use) (struct sra_elt *elt, tree *expr_p,
602 block_stmt_iterator *bsi, bool is_output);
604 /* Invoked when we have a copy between two scalarizable references. */
605 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
606 block_stmt_iterator *bsi);
608 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
609 in which case it should be treated as an empty CONSTRUCTOR. */
610 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
612 /* Invoked when we have a copy between one scalarizable reference ELT
613 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
614 is on the left-hand side. */
615 void (*ldst) (struct sra_elt *elt, tree other,
616 block_stmt_iterator *bsi, bool is_output);
618 /* True during phase 2, false during phase 4. */
619 /* ??? This is a hack. */
620 bool initial_scan;
623 #ifdef ENABLE_CHECKING
624 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
626 static tree
627 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
628 void *data ATTRIBUTE_UNUSED)
630 tree t = *tp;
631 enum tree_code code = TREE_CODE (t);
633 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
635 *walk_subtrees = 0;
636 if (is_sra_candidate_decl (t))
637 return t;
639 else if (TYPE_P (t))
640 *walk_subtrees = 0;
642 return NULL;
644 #endif
646 /* Walk most expressions looking for a scalarizable aggregate.
647 If we find one, invoke FNS->USE. */
649 static void
650 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
651 const struct sra_walk_fns *fns)
653 tree expr = *expr_p;
654 tree inner = expr;
655 bool disable_scalarization = false;
657 /* We're looking to collect a reference expression between EXPR and INNER,
658 such that INNER is a scalarizable decl and all other nodes through EXPR
659 are references that we can scalarize. If we come across something that
660 we can't scalarize, we reset EXPR. This has the effect of making it
661 appear that we're referring to the larger expression as a whole. */
663 while (1)
664 switch (TREE_CODE (inner))
666 case VAR_DECL:
667 case PARM_DECL:
668 case RESULT_DECL:
669 /* If there is a scalarizable decl at the bottom, then process it. */
670 if (is_sra_candidate_decl (inner))
672 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
673 if (disable_scalarization)
674 elt->cannot_scalarize = true;
675 else
676 fns->use (elt, expr_p, bsi, is_output);
678 return;
680 case ARRAY_REF:
681 /* Non-constant index means any member may be accessed. Prevent the
682 expression from being scalarized. If we were to treat this as a
683 reference to the whole array, we can wind up with a single dynamic
684 index reference inside a loop being overridden by several constant
685 index references during loop setup. It's possible that this could
686 be avoided by using dynamic usage counts based on BB trip counts
687 (based on loop analysis or profiling), but that hardly seems worth
688 the effort. */
689 /* ??? Hack. Figure out how to push this into the scan routines
690 without duplicating too much code. */
691 if (!is_valid_const_index (inner))
693 disable_scalarization = true;
694 goto use_all;
696 /* ??? Are we assured that non-constant bounds and stride will have
697 the same value everywhere? I don't think Fortran will... */
698 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
699 goto use_all;
700 inner = TREE_OPERAND (inner, 0);
701 break;
703 case COMPONENT_REF:
704 /* A reference to a union member constitutes a reference to the
705 entire union. */
706 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
707 goto use_all;
708 /* ??? See above re non-constant stride. */
709 if (TREE_OPERAND (inner, 2))
710 goto use_all;
711 inner = TREE_OPERAND (inner, 0);
712 break;
714 case REALPART_EXPR:
715 case IMAGPART_EXPR:
716 inner = TREE_OPERAND (inner, 0);
717 break;
719 case BIT_FIELD_REF:
720 /* A bit field reference (access to *multiple* fields simultaneously)
721 is not currently scalarized. Consider this an access to the
722 complete outer element, to which walk_tree will bring us next. */
723 goto use_all;
725 case ARRAY_RANGE_REF:
726 /* Similarly, an subrange reference is used to modify indexing. Which
727 means that the canonical element names that we have won't work. */
728 goto use_all;
730 case VIEW_CONVERT_EXPR:
731 case NOP_EXPR:
732 /* Similarly, a view/nop explicitly wants to look at an object in a
733 type other than the one we've scalarized. */
734 goto use_all;
736 case WITH_SIZE_EXPR:
737 /* This is a transparent wrapper. The entire inner expression really
738 is being used. */
739 goto use_all;
741 use_all:
742 expr_p = &TREE_OPERAND (inner, 0);
743 inner = expr = *expr_p;
744 break;
746 default:
747 #ifdef ENABLE_CHECKING
748 /* Validate that we're not missing any references. */
749 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
750 #endif
751 return;
755 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
756 If we find one, invoke FNS->USE. */
758 static void
759 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
760 const struct sra_walk_fns *fns)
762 tree op;
763 for (op = list; op ; op = TREE_CHAIN (op))
764 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
767 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
768 If we find one, invoke FNS->USE. */
770 static void
771 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
772 const struct sra_walk_fns *fns)
774 sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
777 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
778 aggregates. If we find one, invoke FNS->USE. */
780 static void
781 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
782 const struct sra_walk_fns *fns)
784 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
785 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
788 /* Walk a MODIFY_EXPR and categorize the assignment appropriately. */
790 static void
791 sra_walk_modify_expr (tree expr, block_stmt_iterator *bsi,
792 const struct sra_walk_fns *fns)
794 struct sra_elt *lhs_elt, *rhs_elt;
795 tree lhs, rhs;
797 lhs = TREE_OPERAND (expr, 0);
798 rhs = TREE_OPERAND (expr, 1);
799 lhs_elt = maybe_lookup_element_for_expr (lhs);
800 rhs_elt = maybe_lookup_element_for_expr (rhs);
802 /* If both sides are scalarizable, this is a COPY operation. */
803 if (lhs_elt && rhs_elt)
805 fns->copy (lhs_elt, rhs_elt, bsi);
806 return;
809 /* If the RHS is scalarizable, handle it. There are only two cases. */
810 if (rhs_elt)
812 if (!rhs_elt->is_scalar)
813 fns->ldst (rhs_elt, lhs, bsi, false);
814 else
815 fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false);
818 /* If it isn't scalarizable, there may be scalarizable variables within, so
819 check for a call or else walk the RHS to see if we need to do any
820 copy-in operations. We need to do it before the LHS is scalarized so
821 that the statements get inserted in the proper place, before any
822 copy-out operations. */
823 else
825 tree call = get_call_expr_in (rhs);
826 if (call)
827 sra_walk_call_expr (call, bsi, fns);
828 else
829 sra_walk_expr (&TREE_OPERAND (expr, 1), bsi, false, fns);
832 /* Likewise, handle the LHS being scalarizable. We have cases similar
833 to those above, but also want to handle RHS being constant. */
834 if (lhs_elt)
836 /* If this is an assignment from a constant, or constructor, then
837 we have access to all of the elements individually. Invoke INIT. */
838 if (TREE_CODE (rhs) == COMPLEX_EXPR
839 || TREE_CODE (rhs) == COMPLEX_CST
840 || TREE_CODE (rhs) == CONSTRUCTOR)
841 fns->init (lhs_elt, rhs, bsi);
843 /* If this is an assignment from read-only memory, treat this as if
844 we'd been passed the constructor directly. Invoke INIT. */
845 else if (TREE_CODE (rhs) == VAR_DECL
846 && TREE_STATIC (rhs)
847 && TREE_READONLY (rhs)
848 && targetm.binds_local_p (rhs))
849 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
851 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
852 The lvalue requirement prevents us from trying to directly scalarize
853 the result of a function call. Which would result in trying to call
854 the function multiple times, and other evil things. */
855 else if (!lhs_elt->is_scalar && is_gimple_addressable (rhs))
856 fns->ldst (lhs_elt, rhs, bsi, true);
858 /* Otherwise we're being used in some context that requires the
859 aggregate to be seen as a whole. Invoke USE. */
860 else
861 fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true);
864 /* Similarly to above, LHS_ELT being null only means that the LHS as a
865 whole is not a scalarizable reference. There may be occurrences of
866 scalarizable variables within, which implies a USE. */
867 else
868 sra_walk_expr (&TREE_OPERAND (expr, 0), bsi, true, fns);
871 /* Entry point to the walk functions. Search the entire function,
872 invoking the callbacks in FNS on each of the references to
873 scalarizable variables. */
875 static void
876 sra_walk_function (const struct sra_walk_fns *fns)
878 basic_block bb;
879 block_stmt_iterator si, ni;
881 /* ??? Phase 4 could derive some benefit to walking the function in
882 dominator tree order. */
884 FOR_EACH_BB (bb)
885 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
887 tree stmt, t;
888 stmt_ann_t ann;
890 stmt = bsi_stmt (si);
891 ann = stmt_ann (stmt);
893 ni = si;
894 bsi_next (&ni);
896 /* If the statement has no virtual operands, then it doesn't
897 make any structure references that we care about. */
898 if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann)) == 0
899 && NUM_VUSES (VUSE_OPS (ann)) == 0
900 && NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) == 0)
901 continue;
903 switch (TREE_CODE (stmt))
905 case RETURN_EXPR:
906 /* If we have "return <retval>" then the return value is
907 already exposed for our pleasure. Walk it as a USE to
908 force all the components back in place for the return.
910 If we have an embedded assignment, then <retval> is of
911 a type that gets returned in registers in this ABI, and
912 we do not wish to extend their lifetimes. Treat this
913 as a USE of the variable on the RHS of this assignment. */
915 t = TREE_OPERAND (stmt, 0);
916 if (TREE_CODE (t) == MODIFY_EXPR)
917 sra_walk_expr (&TREE_OPERAND (t, 1), &si, false, fns);
918 else
919 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
920 break;
922 case MODIFY_EXPR:
923 sra_walk_modify_expr (stmt, &si, fns);
924 break;
925 case CALL_EXPR:
926 sra_walk_call_expr (stmt, &si, fns);
927 break;
928 case ASM_EXPR:
929 sra_walk_asm_expr (stmt, &si, fns);
930 break;
932 default:
933 break;
938 /* Phase One: Scan all referenced variables in the program looking for
939 structures that could be decomposed. */
941 static bool
942 find_candidates_for_sra (void)
944 size_t i;
945 bool any_set = false;
947 for (i = 0; i < num_referenced_vars; i++)
949 tree var = referenced_var (i);
950 if (decl_can_be_decomposed_p (var))
952 bitmap_set_bit (sra_candidates, var_ann (var)->uid);
953 any_set = true;
957 return any_set;
961 /* Phase Two: Scan all references to scalarizable variables. Count the
962 number of times they are used or copied respectively. */
964 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
965 considered a copy, because we can decompose the reference such that
966 the sub-elements needn't be contiguous. */
968 static void
969 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
970 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
971 bool is_output ATTRIBUTE_UNUSED)
973 elt->n_uses += 1;
976 static void
977 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
978 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
980 lhs_elt->n_copies += 1;
981 rhs_elt->n_copies += 1;
984 static void
985 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
986 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
988 lhs_elt->n_copies += 1;
991 static void
992 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
993 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
994 bool is_output ATTRIBUTE_UNUSED)
996 elt->n_copies += 1;
999 /* Dump the values we collected during the scanning phase. */
1001 static void
1002 scan_dump (struct sra_elt *elt)
1004 struct sra_elt *c;
1006 dump_sra_elt_name (dump_file, elt);
1007 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1009 for (c = elt->children; c ; c = c->sibling)
1010 scan_dump (c);
1013 /* Entry point to phase 2. Scan the entire function, building up
1014 scalarization data structures, recording copies and uses. */
1016 static void
1017 scan_function (void)
1019 static const struct sra_walk_fns fns = {
1020 scan_use, scan_copy, scan_init, scan_ldst, true
1022 bitmap_iterator bi;
1024 sra_walk_function (&fns);
1026 if (dump_file && (dump_flags & TDF_DETAILS))
1028 unsigned i;
1030 fputs ("\nScan results:\n", dump_file);
1031 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1033 tree var = referenced_var (i);
1034 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1035 if (elt)
1036 scan_dump (elt);
1038 fputc ('\n', dump_file);
1042 /* Phase Three: Make decisions about which variables to scalarize, if any.
1043 All elements to be scalarized have replacement variables made for them. */
1045 /* A subroutine of build_element_name. Recursively build the element
1046 name on the obstack. */
1048 static void
1049 build_element_name_1 (struct sra_elt *elt)
1051 tree t;
1052 char buffer[32];
1054 if (elt->parent)
1056 build_element_name_1 (elt->parent);
1057 obstack_1grow (&sra_obstack, '$');
1059 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1061 if (elt->element == integer_zero_node)
1062 obstack_grow (&sra_obstack, "real", 4);
1063 else
1064 obstack_grow (&sra_obstack, "imag", 4);
1065 return;
1069 t = elt->element;
1070 if (TREE_CODE (t) == INTEGER_CST)
1072 /* ??? Eh. Don't bother doing double-wide printing. */
1073 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1074 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1076 else
1078 tree name = DECL_NAME (t);
1079 if (name)
1080 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1081 IDENTIFIER_LENGTH (name));
1082 else
1084 sprintf (buffer, "D%u", DECL_UID (t));
1085 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1090 /* Construct a pretty variable name for an element's replacement variable.
1091 The name is built on the obstack. */
1093 static char *
1094 build_element_name (struct sra_elt *elt)
1096 build_element_name_1 (elt);
1097 obstack_1grow (&sra_obstack, '\0');
1098 return obstack_finish (&sra_obstack);
1101 /* Instantiate an element as an independent variable. */
1103 static void
1104 instantiate_element (struct sra_elt *elt)
1106 struct sra_elt *base_elt;
1107 tree var, base;
1109 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1110 continue;
1111 base = base_elt->element;
1113 elt->replacement = var = make_rename_temp (elt->type, "SR");
1114 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1115 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1116 DECL_ARTIFICIAL (var) = DECL_ARTIFICIAL (base);
1117 DECL_IGNORED_P (var) = DECL_IGNORED_P (base);
1119 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1121 char *pretty_name = build_element_name (elt);
1122 DECL_NAME (var) = get_identifier (pretty_name);
1123 obstack_free (&sra_obstack, pretty_name);
1126 if (dump_file)
1128 fputs (" ", dump_file);
1129 dump_sra_elt_name (dump_file, elt);
1130 fputs (" -> ", dump_file);
1131 print_generic_expr (dump_file, var, dump_flags);
1132 fputc ('\n', dump_file);
1136 /* Make one pass across an element tree deciding whether or not it's
1137 profitable to instantiate individual leaf scalars.
1139 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1140 fields all the way up the tree. */
1142 static void
1143 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1144 unsigned int parent_copies)
1146 if (dump_file && !elt->parent)
1148 fputs ("Initial instantiation for ", dump_file);
1149 dump_sra_elt_name (dump_file, elt);
1150 fputc ('\n', dump_file);
1153 if (elt->cannot_scalarize)
1154 return;
1156 if (elt->is_scalar)
1158 /* The decision is simple: instantiate if we're used more frequently
1159 than the parent needs to be seen as a complete unit. */
1160 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1161 instantiate_element (elt);
1163 else
1165 struct sra_elt *c;
1166 unsigned int this_uses = elt->n_uses + parent_uses;
1167 unsigned int this_copies = elt->n_copies + parent_copies;
1169 for (c = elt->children; c ; c = c->sibling)
1170 decide_instantiation_1 (c, this_uses, this_copies);
1174 /* Compute the size and number of all instantiated elements below ELT.
1175 We will only care about this if the size of the complete structure
1176 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1178 static unsigned int
1179 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1181 if (elt->replacement)
1183 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1184 return 1;
1186 else
1188 struct sra_elt *c;
1189 unsigned int count = 0;
1191 for (c = elt->children; c ; c = c->sibling)
1192 count += sum_instantiated_sizes (c, sizep);
1194 return count;
1198 /* Instantiate fields in ELT->TYPE that are not currently present as
1199 children of ELT. */
1201 static void instantiate_missing_elements (struct sra_elt *elt);
1203 static void
1204 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1206 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1207 if (sub->is_scalar)
1209 if (sub->replacement == NULL)
1210 instantiate_element (sub);
1212 else
1213 instantiate_missing_elements (sub);
1216 static void
1217 instantiate_missing_elements (struct sra_elt *elt)
1219 tree type = elt->type;
1221 switch (TREE_CODE (type))
1223 case RECORD_TYPE:
1225 tree f;
1226 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1227 if (TREE_CODE (f) == FIELD_DECL)
1228 instantiate_missing_elements_1 (elt, f, TREE_TYPE (f));
1229 break;
1232 case ARRAY_TYPE:
1234 tree i, max, subtype;
1236 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1237 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1238 subtype = TREE_TYPE (type);
1240 while (1)
1242 instantiate_missing_elements_1 (elt, i, subtype);
1243 if (tree_int_cst_equal (i, max))
1244 break;
1245 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1248 break;
1251 case COMPLEX_TYPE:
1252 type = TREE_TYPE (type);
1253 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1254 instantiate_missing_elements_1 (elt, integer_one_node, type);
1255 break;
1257 default:
1258 gcc_unreachable ();
1262 /* Make one pass across an element tree deciding whether to perform block
1263 or element copies. If we decide on element copies, instantiate all
1264 elements. Return true if there are any instantiated sub-elements. */
1266 static bool
1267 decide_block_copy (struct sra_elt *elt)
1269 struct sra_elt *c;
1270 bool any_inst;
1272 /* If scalarization is disabled, respect it. */
1273 if (elt->cannot_scalarize)
1275 elt->use_block_copy = 1;
1277 if (dump_file)
1279 fputs ("Scalarization disabled for ", dump_file);
1280 dump_sra_elt_name (dump_file, elt);
1281 fputc ('\n', dump_file);
1284 return false;
1287 /* Don't decide if we've no uses. */
1288 if (elt->n_uses == 0 && elt->n_copies == 0)
1291 else if (!elt->is_scalar)
1293 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1294 bool use_block_copy = true;
1296 /* Tradeoffs for COMPLEX types pretty much always make it better
1297 to go ahead and split the components. */
1298 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1299 use_block_copy = false;
1301 /* Don't bother trying to figure out the rest if the structure is
1302 so large we can't do easy arithmetic. This also forces block
1303 copies for variable sized structures. */
1304 else if (host_integerp (size_tree, 1))
1306 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1307 unsigned int inst_count;
1308 unsigned int max_size;
1310 /* If the sra-max-structure-size parameter is 0, then the
1311 user has not overridden the parameter and we can choose a
1312 sensible default. */
1313 max_size = SRA_MAX_STRUCTURE_SIZE
1314 ? SRA_MAX_STRUCTURE_SIZE
1315 : MOVE_RATIO * UNITS_PER_WORD;
1317 full_size = tree_low_cst (size_tree, 1);
1319 /* ??? What to do here. If there are two fields, and we've only
1320 instantiated one, then instantiating the other is clearly a win.
1321 If there are a large number of fields then the size of the copy
1322 is much more of a factor. */
1324 /* If the structure is small, and we've made copies, go ahead
1325 and instantiate, hoping that the copies will go away. */
1326 if (full_size <= max_size
1327 && elt->n_copies > elt->n_uses)
1328 use_block_copy = false;
1329 else
1331 inst_count = sum_instantiated_sizes (elt, &inst_size);
1333 if (inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1334 use_block_copy = false;
1337 /* In order to avoid block copy, we have to be able to instantiate
1338 all elements of the type. See if this is possible. */
1339 if (!use_block_copy
1340 && (!can_completely_scalarize_p (elt)
1341 || !type_can_instantiate_all_elements (elt->type)))
1342 use_block_copy = true;
1344 elt->use_block_copy = use_block_copy;
1346 if (dump_file)
1348 fprintf (dump_file, "Using %s for ",
1349 use_block_copy ? "block-copy" : "element-copy");
1350 dump_sra_elt_name (dump_file, elt);
1351 fputc ('\n', dump_file);
1354 if (!use_block_copy)
1356 instantiate_missing_elements (elt);
1357 return true;
1361 any_inst = elt->replacement != NULL;
1363 for (c = elt->children; c ; c = c->sibling)
1364 any_inst |= decide_block_copy (c);
1366 return any_inst;
1369 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1371 static void
1372 decide_instantiations (void)
1374 unsigned int i;
1375 bool cleared_any;
1376 bitmap_head done_head;
1377 bitmap_iterator bi;
1379 /* We cannot clear bits from a bitmap we're iterating over,
1380 so save up all the bits to clear until the end. */
1381 bitmap_initialize (&done_head, &bitmap_default_obstack);
1382 cleared_any = false;
1384 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1386 tree var = referenced_var (i);
1387 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1388 if (elt)
1390 decide_instantiation_1 (elt, 0, 0);
1391 if (!decide_block_copy (elt))
1392 elt = NULL;
1394 if (!elt)
1396 bitmap_set_bit (&done_head, i);
1397 cleared_any = true;
1401 if (cleared_any)
1403 bitmap_and_compl_into (sra_candidates, &done_head);
1404 bitmap_and_compl_into (needs_copy_in, &done_head);
1406 bitmap_clear (&done_head);
1408 if (dump_file)
1409 fputc ('\n', dump_file);
1413 /* Phase Four: Update the function to match the replacements created. */
1415 /* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
1416 renaming. This becomes necessary when we modify all of a non-scalar. */
1418 static void
1419 mark_all_v_defs (tree stmt)
1421 tree sym;
1422 ssa_op_iter iter;
1424 get_stmt_operands (stmt);
1426 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
1428 if (TREE_CODE (sym) == SSA_NAME)
1429 sym = SSA_NAME_VAR (sym);
1430 bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
1434 /* Build a single level component reference to ELT rooted at BASE. */
1436 static tree
1437 generate_one_element_ref (struct sra_elt *elt, tree base)
1439 switch (TREE_CODE (TREE_TYPE (base)))
1441 case RECORD_TYPE:
1443 tree field = elt->element;
1445 /* Watch out for compatible records with differing field lists. */
1446 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
1447 field = find_compatible_field (TREE_TYPE (base), field);
1449 return build (COMPONENT_REF, elt->type, base, field, NULL);
1452 case ARRAY_TYPE:
1453 return build (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1455 case COMPLEX_TYPE:
1456 if (elt->element == integer_zero_node)
1457 return build (REALPART_EXPR, elt->type, base);
1458 else
1459 return build (IMAGPART_EXPR, elt->type, base);
1461 default:
1462 gcc_unreachable ();
1466 /* Build a full component reference to ELT rooted at its native variable. */
1468 static tree
1469 generate_element_ref (struct sra_elt *elt)
1471 if (elt->parent)
1472 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1473 else
1474 return elt->element;
1477 /* Generate a set of assignment statements in *LIST_P to copy all
1478 instantiated elements under ELT to or from the equivalent structure
1479 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1480 true meaning to copy out of EXPR into ELT. */
1482 static void
1483 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1484 tree *list_p)
1486 struct sra_elt *c;
1487 tree t;
1489 if (elt->replacement)
1491 if (copy_out)
1492 t = build (MODIFY_EXPR, void_type_node, elt->replacement, expr);
1493 else
1494 t = build (MODIFY_EXPR, void_type_node, expr, elt->replacement);
1495 append_to_statement_list (t, list_p);
1497 else
1499 for (c = elt->children; c ; c = c->sibling)
1501 t = generate_one_element_ref (c, unshare_expr (expr));
1502 generate_copy_inout (c, copy_out, t, list_p);
1507 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1508 elements under SRC to their counterparts under DST. There must be a 1-1
1509 correspondence of instantiated elements. */
1511 static void
1512 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1514 struct sra_elt *dc, *sc;
1516 for (dc = dst->children; dc ; dc = dc->sibling)
1518 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1519 gcc_assert (sc);
1520 generate_element_copy (dc, sc, list_p);
1523 if (dst->replacement)
1525 tree t;
1527 gcc_assert (src->replacement);
1529 t = build (MODIFY_EXPR, void_type_node, dst->replacement,
1530 src->replacement);
1531 append_to_statement_list (t, list_p);
1535 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1536 elements under ELT. In addition, do not assign to elements that have been
1537 marked VISITED but do reset the visited flag; this allows easy coordination
1538 with generate_element_init. */
1540 static void
1541 generate_element_zero (struct sra_elt *elt, tree *list_p)
1543 struct sra_elt *c;
1545 if (elt->visited)
1547 elt->visited = false;
1548 return;
1551 for (c = elt->children; c ; c = c->sibling)
1552 generate_element_zero (c, list_p);
1554 if (elt->replacement)
1556 tree t;
1558 gcc_assert (elt->is_scalar);
1559 t = fold_convert (elt->type, integer_zero_node);
1561 t = build (MODIFY_EXPR, void_type_node, elt->replacement, t);
1562 append_to_statement_list (t, list_p);
1566 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1567 Add the result to *LIST_P. */
1569 static void
1570 generate_one_element_init (tree var, tree init, tree *list_p)
1572 /* The replacement can be almost arbitrarily complex. Gimplify. */
1573 tree stmt = build (MODIFY_EXPR, void_type_node, var, init);
1574 gimplify_and_add (stmt, list_p);
1577 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1578 elements under ELT with the contents of the initializer INIT. In addition,
1579 mark all assigned elements VISITED; this allows easy coordination with
1580 generate_element_zero. Return false if we found a case we couldn't
1581 handle. */
1583 static bool
1584 generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
1586 bool result = true;
1587 enum tree_code init_code;
1588 struct sra_elt *sub;
1589 tree t;
1591 /* We can be passed DECL_INITIAL of a static variable. It might have a
1592 conversion, which we strip off here. */
1593 STRIP_USELESS_TYPE_CONVERSION (init);
1594 init_code = TREE_CODE (init);
1596 if (elt->is_scalar)
1598 if (elt->replacement)
1600 generate_one_element_init (elt->replacement, init, list_p);
1601 elt->visited = true;
1603 return result;
1606 switch (init_code)
1608 case COMPLEX_CST:
1609 case COMPLEX_EXPR:
1610 for (sub = elt->children; sub ; sub = sub->sibling)
1612 if (sub->element == integer_zero_node)
1613 t = (init_code == COMPLEX_EXPR
1614 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1615 else
1616 t = (init_code == COMPLEX_EXPR
1617 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1618 result &= generate_element_init_1 (sub, t, list_p);
1620 break;
1622 case CONSTRUCTOR:
1623 for (t = CONSTRUCTOR_ELTS (init); t ; t = TREE_CHAIN (t))
1625 sub = lookup_element (elt, TREE_PURPOSE (t), NULL, NO_INSERT);
1626 if (sub == NULL)
1627 continue;
1628 result &= generate_element_init_1 (sub, TREE_VALUE (t), list_p);
1630 break;
1632 default:
1633 elt->visited = true;
1634 result = false;
1637 return result;
1640 /* A wrapper function for generate_element_init_1 that handles cleanup after
1641 gimplification. */
1643 static bool
1644 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1646 bool ret;
1648 push_gimplify_context ();
1649 ret = generate_element_init_1 (elt, init, list_p);
1650 pop_gimplify_context (NULL);
1652 /* The replacement can expose previously unreferenced variables. */
1653 if (ret && *list_p)
1655 tree_stmt_iterator i;
1656 size_t old, new, j;
1658 old = num_referenced_vars;
1660 for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
1661 find_new_referenced_vars (tsi_stmt_ptr (i));
1663 new = num_referenced_vars;
1664 for (j = old; j < new; ++j)
1665 bitmap_set_bit (vars_to_rename, j);
1668 return ret;
1671 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1672 has more than one edge, STMT will be replicated for each edge. Also,
1673 abnormal edges will be ignored. */
1675 void
1676 insert_edge_copies (tree stmt, basic_block bb)
1678 edge e;
1679 edge_iterator ei;
1680 bool first_copy;
1682 first_copy = true;
1683 FOR_EACH_EDGE (e, ei, bb->succs)
1685 /* We don't need to insert copies on abnormal edges. The
1686 value of the scalar replacement is not guaranteed to
1687 be valid through an abnormal edge. */
1688 if (!(e->flags & EDGE_ABNORMAL))
1690 if (first_copy)
1692 bsi_insert_on_edge (e, stmt);
1693 first_copy = false;
1695 else
1696 bsi_insert_on_edge (e, unsave_expr_now (stmt));
1701 /* Helper function to insert LIST before BSI, and set up line number info. */
1703 static void
1704 sra_insert_before (block_stmt_iterator *bsi, tree list)
1706 tree stmt = bsi_stmt (*bsi);
1708 if (EXPR_HAS_LOCATION (stmt))
1709 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1710 bsi_insert_before (bsi, list, BSI_SAME_STMT);
1713 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1715 static void
1716 sra_insert_after (block_stmt_iterator *bsi, tree list)
1718 tree stmt = bsi_stmt (*bsi);
1720 if (EXPR_HAS_LOCATION (stmt))
1721 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1723 if (stmt_ends_bb_p (stmt))
1724 insert_edge_copies (list, bsi->bb);
1725 else
1726 bsi_insert_after (bsi, list, BSI_SAME_STMT);
1729 /* Similarly, but replace the statement at BSI. */
1731 static void
1732 sra_replace (block_stmt_iterator *bsi, tree list)
1734 sra_insert_before (bsi, list);
1735 bsi_remove (bsi);
1736 if (bsi_end_p (*bsi))
1737 *bsi = bsi_last (bsi->bb);
1738 else
1739 bsi_prev (bsi);
1742 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1743 if elt is scalar, or some occurrence of ELT that requires a complete
1744 aggregate. IS_OUTPUT is true if ELT is being modified. */
1746 static void
1747 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
1748 bool is_output)
1750 tree list = NULL, stmt = bsi_stmt (*bsi);
1752 if (elt->replacement)
1754 /* If we have a replacement, then updating the reference is as
1755 simple as modifying the existing statement in place. */
1756 if (is_output)
1757 mark_all_v_defs (stmt);
1758 *expr_p = elt->replacement;
1759 modify_stmt (stmt);
1761 else
1763 /* Otherwise we need some copies. If ELT is being read, then we want
1764 to store all (modified) sub-elements back into the structure before
1765 the reference takes place. If ELT is being written, then we want to
1766 load the changed values back into our shadow variables. */
1767 /* ??? We don't check modified for reads, we just always write all of
1768 the values. We should be able to record the SSA number of the VOP
1769 for which the values were last read. If that number matches the
1770 SSA number of the VOP in the current statement, then we needn't
1771 emit an assignment. This would also eliminate double writes when
1772 a structure is passed as more than one argument to a function call.
1773 This optimization would be most effective if sra_walk_function
1774 processed the blocks in dominator order. */
1776 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
1777 if (list == NULL)
1778 return;
1779 mark_all_v_defs (expr_first (list));
1780 if (is_output)
1781 sra_insert_after (bsi, list);
1782 else
1783 sra_insert_before (bsi, list);
1787 /* Scalarize a COPY. To recap, this is an assignment statement between
1788 two scalarizable references, LHS_ELT and RHS_ELT. */
1790 static void
1791 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1792 block_stmt_iterator *bsi)
1794 tree list, stmt;
1796 if (lhs_elt->replacement && rhs_elt->replacement)
1798 /* If we have two scalar operands, modify the existing statement. */
1799 stmt = bsi_stmt (*bsi);
1801 /* See the commentary in sra_walk_function concerning
1802 RETURN_EXPR, and why we should never see one here. */
1803 gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
1805 TREE_OPERAND (stmt, 0) = lhs_elt->replacement;
1806 TREE_OPERAND (stmt, 1) = rhs_elt->replacement;
1807 modify_stmt (stmt);
1809 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
1811 /* If either side requires a block copy, then sync the RHS back
1812 to the original structure, leave the original assignment
1813 statement (which will perform the block copy), then load the
1814 LHS values out of its now-updated original structure. */
1815 /* ??? Could perform a modified pair-wise element copy. That
1816 would at least allow those elements that are instantiated in
1817 both structures to be optimized well. */
1819 list = NULL;
1820 generate_copy_inout (rhs_elt, false,
1821 generate_element_ref (rhs_elt), &list);
1822 if (list)
1824 mark_all_v_defs (expr_first (list));
1825 sra_insert_before (bsi, list);
1828 list = NULL;
1829 generate_copy_inout (lhs_elt, true,
1830 generate_element_ref (lhs_elt), &list);
1831 if (list)
1832 sra_insert_after (bsi, list);
1834 else
1836 /* Otherwise both sides must be fully instantiated. In which
1837 case perform pair-wise element assignments and replace the
1838 original block copy statement. */
1840 stmt = bsi_stmt (*bsi);
1841 mark_all_v_defs (stmt);
1843 list = NULL;
1844 generate_element_copy (lhs_elt, rhs_elt, &list);
1845 gcc_assert (list);
1846 sra_replace (bsi, list);
1850 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
1851 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
1852 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
1853 CONSTRUCTOR. */
1855 static void
1856 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
1858 bool result = true;
1859 tree list = NULL;
1861 /* Generate initialization statements for all members extant in the RHS. */
1862 if (rhs)
1864 /* Unshare the expression just in case this is from a decl's initial. */
1865 rhs = unshare_expr (rhs);
1866 result = generate_element_init (lhs_elt, rhs, &list);
1869 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
1870 a zero value. Initialize the rest of the instantiated elements. */
1871 generate_element_zero (lhs_elt, &list);
1873 if (!result)
1875 /* If we failed to convert the entire initializer, then we must
1876 leave the structure assignment in place and must load values
1877 from the structure into the slots for which we did not find
1878 constants. The easiest way to do this is to generate a complete
1879 copy-out, and then follow that with the constant assignments
1880 that we were able to build. DCE will clean things up. */
1881 tree list0 = NULL;
1882 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
1883 &list0);
1884 append_to_statement_list (list, &list0);
1885 list = list0;
1888 if (lhs_elt->use_block_copy || !result)
1890 /* Since LHS is not fully instantiated, we must leave the structure
1891 assignment in place. Treating this case differently from a USE
1892 exposes constants to later optimizations. */
1893 if (list)
1895 mark_all_v_defs (expr_first (list));
1896 sra_insert_after (bsi, list);
1899 else
1901 /* The LHS is fully instantiated. The list of initializations
1902 replaces the original structure assignment. */
1903 gcc_assert (list);
1904 mark_all_v_defs (bsi_stmt (*bsi));
1905 sra_replace (bsi, list);
1909 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
1910 on all INDIRECT_REFs. */
1912 static tree
1913 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1915 tree t = *tp;
1917 if (TREE_CODE (t) == INDIRECT_REF)
1919 TREE_THIS_NOTRAP (t) = 1;
1920 *walk_subtrees = 0;
1922 else if (IS_TYPE_OR_DECL_P (t))
1923 *walk_subtrees = 0;
1925 return NULL;
1928 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
1929 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
1930 if ELT is on the left-hand side. */
1932 static void
1933 scalarize_ldst (struct sra_elt *elt, tree other,
1934 block_stmt_iterator *bsi, bool is_output)
1936 /* Shouldn't have gotten called for a scalar. */
1937 gcc_assert (!elt->replacement);
1939 if (elt->use_block_copy)
1941 /* Since ELT is not fully instantiated, we have to leave the
1942 block copy in place. Treat this as a USE. */
1943 scalarize_use (elt, NULL, bsi, is_output);
1945 else
1947 /* The interesting case is when ELT is fully instantiated. In this
1948 case we can have each element stored/loaded directly to/from the
1949 corresponding slot in OTHER. This avoids a block copy. */
1951 tree list = NULL, stmt = bsi_stmt (*bsi);
1953 mark_all_v_defs (stmt);
1954 generate_copy_inout (elt, is_output, other, &list);
1955 gcc_assert (list);
1957 /* Preserve EH semantics. */
1958 if (stmt_ends_bb_p (stmt))
1960 tree_stmt_iterator tsi;
1961 tree first;
1963 /* Extract the first statement from LIST. */
1964 tsi = tsi_start (list);
1965 first = tsi_stmt (tsi);
1966 tsi_delink (&tsi);
1968 /* Replace the old statement with this new representative. */
1969 bsi_replace (bsi, first, true);
1971 if (!tsi_end_p (tsi))
1973 /* If any reference would trap, then they all would. And more
1974 to the point, the first would. Therefore none of the rest
1975 will trap since the first didn't. Indicate this by
1976 iterating over the remaining statements and set
1977 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
1980 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
1981 tsi_next (&tsi);
1983 while (!tsi_end_p (tsi));
1985 insert_edge_copies (list, bsi->bb);
1988 else
1989 sra_replace (bsi, list);
1993 /* Generate initializations for all scalarizable parameters. */
1995 static void
1996 scalarize_parms (void)
1998 tree list = NULL;
1999 unsigned i;
2000 bitmap_iterator bi;
2002 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
2004 tree var = referenced_var (i);
2005 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
2006 generate_copy_inout (elt, true, var, &list);
2009 if (list)
2010 insert_edge_copies (list, ENTRY_BLOCK_PTR);
2013 /* Entry point to phase 4. Update the function to match replacements. */
2015 static void
2016 scalarize_function (void)
2018 static const struct sra_walk_fns fns = {
2019 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
2022 sra_walk_function (&fns);
2023 scalarize_parms ();
2024 bsi_commit_edge_inserts ();
2028 /* Debug helper function. Print ELT in a nice human-readable format. */
2030 static void
2031 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
2033 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
2035 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
2036 dump_sra_elt_name (f, elt->parent);
2038 else
2040 if (elt->parent)
2041 dump_sra_elt_name (f, elt->parent);
2042 if (DECL_P (elt->element))
2044 if (TREE_CODE (elt->element) == FIELD_DECL)
2045 fputc ('.', f);
2046 print_generic_expr (f, elt->element, dump_flags);
2048 else
2049 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
2050 TREE_INT_CST_LOW (elt->element));
2054 /* Likewise, but callable from the debugger. */
2056 void
2057 debug_sra_elt_name (struct sra_elt *elt)
2059 dump_sra_elt_name (stderr, elt);
2060 fputc ('\n', stderr);
2063 /* Main entry point. */
2065 static void
2066 tree_sra (void)
2068 /* Initialize local variables. */
2069 gcc_obstack_init (&sra_obstack);
2070 sra_candidates = BITMAP_XMALLOC ();
2071 needs_copy_in = BITMAP_XMALLOC ();
2072 sra_type_decomp_cache = BITMAP_XMALLOC ();
2073 sra_type_inst_cache = BITMAP_XMALLOC ();
2074 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
2076 /* Scan. If we find anything, instantiate and scalarize. */
2077 if (find_candidates_for_sra ())
2079 scan_function ();
2080 decide_instantiations ();
2081 scalarize_function ();
2084 /* Free allocated memory. */
2085 htab_delete (sra_map);
2086 sra_map = NULL;
2087 BITMAP_XFREE (sra_candidates);
2088 BITMAP_XFREE (needs_copy_in);
2089 BITMAP_XFREE (sra_type_decomp_cache);
2090 BITMAP_XFREE (sra_type_inst_cache);
2091 obstack_free (&sra_obstack, NULL);
2094 static bool
2095 gate_sra (void)
2097 return flag_tree_sra != 0;
2100 struct tree_opt_pass pass_sra =
2102 "sra", /* name */
2103 gate_sra, /* gate */
2104 tree_sra, /* execute */
2105 NULL, /* sub */
2106 NULL, /* next */
2107 0, /* static_pass_number */
2108 TV_TREE_SRA, /* tv_id */
2109 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2110 0, /* properties_provided */
2111 0, /* properties_destroyed */
2112 0, /* todo_flags_start */
2113 TODO_dump_func | TODO_rename_vars
2114 | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
2115 0 /* letter */