Relax constraints on Machine_Attribute argument types:
[official-gcc.git] / gcc / tree-sra.c
bloba2f783afaede53acc6d40dc96c8bda40ddbbd24a
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "tree-flow.h"
82 #include "diagnostic.h"
83 #include "tree-dump.h"
84 #include "timevar.h"
85 #include "params.h"
86 #include "target.h"
87 #include "flags.h"
89 /* Enumeration of all aggregate reductions we can do. */
90 enum sra_mode { SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
91 SRA_MODE_INTRA }; /* late intraprocedural SRA */
93 /* Global variable describing which aggregate reduction we are performing at
94 the moment. */
95 static enum sra_mode sra_mode;
97 struct assign_link;
99 /* ACCESS represents each access to an aggregate variable (as a whole or a
100 part). It can also represent a group of accesses that refer to exactly the
101 same fragment of an aggregate (i.e. those that have exactly the same offset
102 and size). Such representatives for a single aggregate, once determined,
103 are linked in a linked list and have the group fields set.
105 Moreover, when doing intraprocedural SRA, a tree is built from those
106 representatives (by the means of first_child and next_sibling pointers), in
107 which all items in a subtree are "within" the root, i.e. their offset is
108 greater or equal to offset of the root and offset+size is smaller or equal
109 to offset+size of the root. Children of an access are sorted by offset.
111 Note that accesses to parts of vector and complex number types always
112 represented by an access to the whole complex number or a vector. It is a
113 duty of the modifying functions to replace them appropriately. */
115 struct access
117 /* Values returned by `get_ref_base_and_extent' for each component reference
118 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
119 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
120 HOST_WIDE_INT offset;
121 HOST_WIDE_INT size;
122 tree base;
124 /* Expression. */
125 tree expr;
126 /* Type. */
127 tree type;
129 /* Next group representative for this aggregate. */
130 struct access *next_grp;
132 /* Pointer to the group representative. Pointer to itself if the struct is
133 the representative. */
134 struct access *group_representative;
136 /* If this access has any children (in terms of the definition above), this
137 points to the first one. */
138 struct access *first_child;
140 /* Pointer to the next sibling in the access tree as described above. */
141 struct access *next_sibling;
143 /* Pointers to the first and last element in the linked list of assign
144 links. */
145 struct assign_link *first_link, *last_link;
147 /* Pointer to the next access in the work queue. */
148 struct access *next_queued;
150 /* Replacement variable for this access "region." Never to be accessed
151 directly, always only by the means of get_access_replacement() and only
152 when grp_to_be_replaced flag is set. */
153 tree replacement_decl;
155 /* Is this particular access write access? */
156 unsigned write : 1;
158 /* Is this access currently in the work queue? */
159 unsigned grp_queued : 1;
160 /* Does this group contain a write access? This flag is propagated down the
161 access tree. */
162 unsigned grp_write : 1;
163 /* Does this group contain a read access? This flag is propagated down the
164 access tree. */
165 unsigned grp_read : 1;
166 /* Is the subtree rooted in this access fully covered by scalar
167 replacements? */
168 unsigned grp_covered : 1;
169 /* If set to true, this access and all below it in an access tree must not be
170 scalarized. */
171 unsigned grp_unscalarizable_region : 1;
172 /* Whether data have been written to parts of the aggregate covered by this
173 access which is not to be scalarized. This flag is propagated up in the
174 access tree. */
175 unsigned grp_unscalarized_data : 1;
176 /* Does this access and/or group contain a write access through a
177 BIT_FIELD_REF? */
178 unsigned grp_partial_lhs : 1;
180 /* Set when a scalar replacement should be created for this variable. We do
181 the decision and creation at different places because create_tmp_var
182 cannot be called from within FOR_EACH_REFERENCED_VAR. */
183 unsigned grp_to_be_replaced : 1;
186 typedef struct access *access_p;
188 DEF_VEC_P (access_p);
189 DEF_VEC_ALLOC_P (access_p, heap);
191 /* Alloc pool for allocating access structures. */
192 static alloc_pool access_pool;
194 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
195 are used to propagate subaccesses from rhs to lhs as long as they don't
196 conflict with what is already there. */
197 struct assign_link
199 struct access *lacc, *racc;
200 struct assign_link *next;
203 /* Alloc pool for allocating assign link structures. */
204 static alloc_pool link_pool;
206 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
207 static struct pointer_map_t *base_access_vec;
209 /* Bitmap of bases (candidates). */
210 static bitmap candidate_bitmap;
211 /* Obstack for creation of fancy names. */
212 static struct obstack name_obstack;
214 /* Head of a linked list of accesses that need to have its subaccesses
215 propagated to their assignment counterparts. */
216 static struct access *work_queue_head;
218 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
219 representative fields are dumped, otherwise those which only describe the
220 individual access are. */
222 static void
223 dump_access (FILE *f, struct access *access, bool grp)
225 fprintf (f, "access { ");
226 fprintf (f, "base = (%d)'", DECL_UID (access->base));
227 print_generic_expr (f, access->base, 0);
228 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
229 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
230 fprintf (f, ", expr = ");
231 print_generic_expr (f, access->expr, 0);
232 fprintf (f, ", type = ");
233 print_generic_expr (f, access->type, 0);
234 if (grp)
235 fprintf (f, ", grp_write = %d, grp_read = %d, grp_covered = %d, "
236 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
237 "grp_partial_lhs = %d, grp_to_be_replaced = %d\n",
238 access->grp_write, access->grp_read, access->grp_covered,
239 access->grp_unscalarizable_region, access->grp_unscalarized_data,
240 access->grp_partial_lhs, access->grp_to_be_replaced);
241 else
242 fprintf (f, ", write = %d, grp_partial_lhs = %d\n", access->write,
243 access->grp_partial_lhs);
246 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
248 static void
249 dump_access_tree_1 (FILE *f, struct access *access, int level)
253 int i;
255 for (i = 0; i < level; i++)
256 fputs ("* ", dump_file);
258 dump_access (f, access, true);
260 if (access->first_child)
261 dump_access_tree_1 (f, access->first_child, level + 1);
263 access = access->next_sibling;
265 while (access);
268 /* Dump all access trees for a variable, given the pointer to the first root in
269 ACCESS. */
271 static void
272 dump_access_tree (FILE *f, struct access *access)
274 for (; access; access = access->next_grp)
275 dump_access_tree_1 (f, access, 0);
278 /* Return true iff ACC is non-NULL and has subaccesses. */
280 static inline bool
281 access_has_children_p (struct access *acc)
283 return acc && acc->first_child;
286 /* Return a vector of pointers to accesses for the variable given in BASE or
287 NULL if there is none. */
289 static VEC (access_p, heap) *
290 get_base_access_vector (tree base)
292 void **slot;
294 slot = pointer_map_contains (base_access_vec, base);
295 if (!slot)
296 return NULL;
297 else
298 return *(VEC (access_p, heap) **) slot;
301 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
302 in ACCESS. Return NULL if it cannot be found. */
304 static struct access *
305 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
306 HOST_WIDE_INT size)
308 while (access && (access->offset != offset || access->size != size))
310 struct access *child = access->first_child;
312 while (child && (child->offset + child->size <= offset))
313 child = child->next_sibling;
314 access = child;
317 return access;
320 /* Return the first group representative for DECL or NULL if none exists. */
322 static struct access *
323 get_first_repr_for_decl (tree base)
325 VEC (access_p, heap) *access_vec;
327 access_vec = get_base_access_vector (base);
328 if (!access_vec)
329 return NULL;
331 return VEC_index (access_p, access_vec, 0);
334 /* Find an access representative for the variable BASE and given OFFSET and
335 SIZE. Requires that access trees have already been built. Return NULL if
336 it cannot be found. */
338 static struct access *
339 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
340 HOST_WIDE_INT size)
342 struct access *access;
344 access = get_first_repr_for_decl (base);
345 while (access && (access->offset + access->size <= offset))
346 access = access->next_grp;
347 if (!access)
348 return NULL;
350 return find_access_in_subtree (access, offset, size);
353 /* Add LINK to the linked list of assign links of RACC. */
354 static void
355 add_link_to_rhs (struct access *racc, struct assign_link *link)
357 gcc_assert (link->racc == racc);
359 if (!racc->first_link)
361 gcc_assert (!racc->last_link);
362 racc->first_link = link;
364 else
365 racc->last_link->next = link;
367 racc->last_link = link;
368 link->next = NULL;
371 /* Move all link structures in their linked list in OLD_RACC to the linked list
372 in NEW_RACC. */
373 static void
374 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
376 if (!old_racc->first_link)
378 gcc_assert (!old_racc->last_link);
379 return;
382 if (new_racc->first_link)
384 gcc_assert (!new_racc->last_link->next);
385 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
387 new_racc->last_link->next = old_racc->first_link;
388 new_racc->last_link = old_racc->last_link;
390 else
392 gcc_assert (!new_racc->last_link);
394 new_racc->first_link = old_racc->first_link;
395 new_racc->last_link = old_racc->last_link;
397 old_racc->first_link = old_racc->last_link = NULL;
400 /* Add ACCESS to the work queue (which is actually a stack). */
402 static void
403 add_access_to_work_queue (struct access *access)
405 if (!access->grp_queued)
407 gcc_assert (!access->next_queued);
408 access->next_queued = work_queue_head;
409 access->grp_queued = 1;
410 work_queue_head = access;
414 /* Pop an access from the work queue, and return it, assuming there is one. */
416 static struct access *
417 pop_access_from_work_queue (void)
419 struct access *access = work_queue_head;
421 work_queue_head = access->next_queued;
422 access->next_queued = NULL;
423 access->grp_queued = 0;
424 return access;
428 /* Allocate necessary structures. */
430 static void
431 sra_initialize (void)
433 candidate_bitmap = BITMAP_ALLOC (NULL);
434 gcc_obstack_init (&name_obstack);
435 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
436 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
437 base_access_vec = pointer_map_create ();
440 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
442 static bool
443 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
444 void *data ATTRIBUTE_UNUSED)
446 VEC (access_p, heap) *access_vec;
447 access_vec = (VEC (access_p, heap) *) *value;
448 VEC_free (access_p, heap, access_vec);
450 return true;
453 /* Deallocate all general structures. */
455 static void
456 sra_deinitialize (void)
458 BITMAP_FREE (candidate_bitmap);
459 free_alloc_pool (access_pool);
460 free_alloc_pool (link_pool);
461 obstack_free (&name_obstack, NULL);
463 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
464 pointer_map_destroy (base_access_vec);
467 /* Remove DECL from candidates for SRA and write REASON to the dump file if
468 there is one. */
469 static void
470 disqualify_candidate (tree decl, const char *reason)
472 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
474 if (dump_file && (dump_flags & TDF_DETAILS))
476 fprintf (dump_file, "! Disqualifying ");
477 print_generic_expr (dump_file, decl, 0);
478 fprintf (dump_file, " - %s\n", reason);
482 /* Return true iff the type contains a field or an element which does not allow
483 scalarization. */
485 static bool
486 type_internals_preclude_sra_p (tree type)
488 tree fld;
489 tree et;
491 switch (TREE_CODE (type))
493 case RECORD_TYPE:
494 case UNION_TYPE:
495 case QUAL_UNION_TYPE:
496 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
497 if (TREE_CODE (fld) == FIELD_DECL)
499 tree ft = TREE_TYPE (fld);
501 if (TREE_THIS_VOLATILE (fld)
502 || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
503 || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
504 || !host_integerp (DECL_SIZE (fld), 1))
505 return true;
507 if (AGGREGATE_TYPE_P (ft)
508 && type_internals_preclude_sra_p (ft))
509 return true;
512 return false;
514 case ARRAY_TYPE:
515 et = TREE_TYPE (type);
517 if (AGGREGATE_TYPE_P (et))
518 return type_internals_preclude_sra_p (et);
519 else
520 return false;
522 default:
523 return false;
527 /* Create and insert access for EXPR. Return created access, or NULL if it is
528 not possible. */
530 static struct access *
531 create_access (tree expr, bool write)
533 struct access *access;
534 void **slot;
535 VEC (access_p,heap) *vec;
536 HOST_WIDE_INT offset, size, max_size;
537 tree base = expr;
538 bool unscalarizable_region = false;
540 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
542 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
543 return NULL;
545 if (size != max_size)
547 size = max_size;
548 unscalarizable_region = true;
551 if (size < 0)
553 disqualify_candidate (base, "Encountered an unconstrained access.");
554 return NULL;
557 access = (struct access *) pool_alloc (access_pool);
558 memset (access, 0, sizeof (struct access));
560 access->base = base;
561 access->offset = offset;
562 access->size = size;
563 access->expr = expr;
564 access->type = TREE_TYPE (expr);
565 access->write = write;
566 access->grp_unscalarizable_region = unscalarizable_region;
568 slot = pointer_map_contains (base_access_vec, base);
569 if (slot)
570 vec = (VEC (access_p, heap) *) *slot;
571 else
572 vec = VEC_alloc (access_p, heap, 32);
574 VEC_safe_push (access_p, heap, vec, access);
576 *((struct VEC (access_p,heap) **)
577 pointer_map_insert (base_access_vec, base)) = vec;
579 return access;
583 /* Search the given tree for a declaration by skipping handled components and
584 exclude it from the candidates. */
586 static void
587 disqualify_base_of_expr (tree t, const char *reason)
589 while (handled_component_p (t))
590 t = TREE_OPERAND (t, 0);
592 if (DECL_P (t))
593 disqualify_candidate (t, reason);
596 /* Scan expression EXPR and create access structures for all accesses to
597 candidates for scalarization. Return the created access or NULL if none is
598 created. */
600 static struct access *
601 build_access_from_expr_1 (tree *expr_ptr, bool write)
603 struct access *ret = NULL;
604 tree expr = *expr_ptr;
605 bool partial_ref;
607 if (TREE_CODE (expr) == BIT_FIELD_REF
608 || TREE_CODE (expr) == IMAGPART_EXPR
609 || TREE_CODE (expr) == REALPART_EXPR)
611 expr = TREE_OPERAND (expr, 0);
612 partial_ref = true;
614 else
615 partial_ref = false;
617 /* We need to dive through V_C_Es in order to get the size of its parameter
618 and not the result type. Ada produces such statements. We are also
619 capable of handling the topmost V_C_E but not any of those buried in other
620 handled components. */
621 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
622 expr = TREE_OPERAND (expr, 0);
624 if (contains_view_convert_expr_p (expr))
626 disqualify_base_of_expr (expr, "V_C_E under a different handled "
627 "component.");
628 return NULL;
631 switch (TREE_CODE (expr))
633 case VAR_DECL:
634 case PARM_DECL:
635 case RESULT_DECL:
636 case COMPONENT_REF:
637 case ARRAY_REF:
638 case ARRAY_RANGE_REF:
639 ret = create_access (expr, write);
640 break;
642 default:
643 break;
646 if (write && partial_ref && ret)
647 ret->grp_partial_lhs = 1;
649 return ret;
652 /* Callback of scan_function. Scan expression EXPR and create access
653 structures for all accesses to candidates for scalarization. Return true if
654 any access has been inserted. */
656 static bool
657 build_access_from_expr (tree *expr_ptr,
658 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED, bool write,
659 void *data ATTRIBUTE_UNUSED)
661 return build_access_from_expr_1 (expr_ptr, write) != NULL;
664 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
665 modes in which it matters, return true iff they have been disqualified. RHS
666 may be NULL, in that case ignore it. If we scalarize an aggregate in
667 intra-SRA we may need to add statements after each statement. This is not
668 possible if a statement unconditionally has to end the basic block. */
669 static bool
670 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
672 if (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt))
674 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
675 if (rhs)
676 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
677 return true;
679 return false;
683 /* Result code for scan_assign callback for scan_function. */
684 enum scan_assign_result { SRA_SA_NONE, /* nothing done for the stmt */
685 SRA_SA_PROCESSED, /* stmt analyzed/changed */
686 SRA_SA_REMOVED }; /* stmt redundant and eliminated */
689 /* Callback of scan_function. Scan expressions occuring in the statement
690 pointed to by STMT_EXPR, create access structures for all accesses to
691 candidates for scalarization and remove those candidates which occur in
692 statements or expressions that prevent them from being split apart. Return
693 true if any access has been inserted. */
695 static enum scan_assign_result
696 build_accesses_from_assign (gimple *stmt_ptr,
697 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
698 void *data ATTRIBUTE_UNUSED)
700 gimple stmt = *stmt_ptr;
701 tree *lhs_ptr, *rhs_ptr;
702 struct access *lacc, *racc;
704 if (!gimple_assign_single_p (stmt))
705 return SRA_SA_NONE;
707 lhs_ptr = gimple_assign_lhs_ptr (stmt);
708 rhs_ptr = gimple_assign_rhs1_ptr (stmt);
710 if (disqualify_ops_if_throwing_stmt (stmt, *lhs_ptr, *rhs_ptr))
711 return SRA_SA_NONE;
713 racc = build_access_from_expr_1 (rhs_ptr, false);
714 lacc = build_access_from_expr_1 (lhs_ptr, true);
716 if (lacc && racc
717 && !lacc->grp_unscalarizable_region
718 && !racc->grp_unscalarizable_region
719 && AGGREGATE_TYPE_P (TREE_TYPE (*lhs_ptr))
720 /* FIXME: Turn the following line into an assert after PR 40058 is
721 fixed. */
722 && lacc->size == racc->size
723 && useless_type_conversion_p (lacc->type, racc->type))
725 struct assign_link *link;
727 link = (struct assign_link *) pool_alloc (link_pool);
728 memset (link, 0, sizeof (struct assign_link));
730 link->lacc = lacc;
731 link->racc = racc;
733 add_link_to_rhs (racc, link);
736 return (lacc || racc) ? SRA_SA_PROCESSED : SRA_SA_NONE;
739 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
740 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
742 static bool
743 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
744 void *data ATTRIBUTE_UNUSED)
746 if (DECL_P (op))
747 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
749 return false;
753 /* Scan function and look for interesting statements. Return true if any has
754 been found or processed, as indicated by callbacks. SCAN_EXPR is a callback
755 called on all expressions within statements except assign statements and
756 those deemed entirely unsuitable for some reason (all operands in such
757 statements and expression are removed from candidate_bitmap). SCAN_ASSIGN
758 is a callback called on all assign statements, HANDLE_SSA_DEFS is a callback
759 called on assign statements and those call statements which have a lhs and
760 it is the only callback which can be NULL. ANALYSIS_STAGE is true when
761 running in the analysis stage of a pass and thus no statement is being
762 modified. DATA is a pointer passed to all callbacks. If any single
763 callback returns true, this function also returns true, otherwise it returns
764 false. */
766 static bool
767 scan_function (bool (*scan_expr) (tree *, gimple_stmt_iterator *, bool, void *),
768 enum scan_assign_result (*scan_assign) (gimple *,
769 gimple_stmt_iterator *,
770 void *),
771 bool (*handle_ssa_defs)(gimple, void *),
772 bool analysis_stage, void *data)
774 gimple_stmt_iterator gsi;
775 basic_block bb;
776 unsigned i;
777 tree *t;
778 bool ret = false;
780 FOR_EACH_BB (bb)
782 bool bb_changed = false;
784 gsi = gsi_start_bb (bb);
785 while (!gsi_end_p (gsi))
787 gimple stmt = gsi_stmt (gsi);
788 enum scan_assign_result assign_result;
789 bool any = false, deleted = false;
791 switch (gimple_code (stmt))
793 case GIMPLE_RETURN:
794 t = gimple_return_retval_ptr (stmt);
795 if (*t != NULL_TREE)
796 any |= scan_expr (t, &gsi, false, data);
797 break;
799 case GIMPLE_ASSIGN:
800 assign_result = scan_assign (&stmt, &gsi, data);
801 any |= assign_result == SRA_SA_PROCESSED;
802 deleted = assign_result == SRA_SA_REMOVED;
803 if (handle_ssa_defs && assign_result != SRA_SA_REMOVED)
804 any |= handle_ssa_defs (stmt, data);
805 break;
807 case GIMPLE_CALL:
808 /* Operands must be processed before the lhs. */
809 for (i = 0; i < gimple_call_num_args (stmt); i++)
811 tree *argp = gimple_call_arg_ptr (stmt, i);
812 any |= scan_expr (argp, &gsi, false, data);
815 if (gimple_call_lhs (stmt))
817 tree *lhs_ptr = gimple_call_lhs_ptr (stmt);
818 if (!analysis_stage
819 || !disqualify_ops_if_throwing_stmt (stmt,
820 *lhs_ptr, NULL))
822 any |= scan_expr (lhs_ptr, &gsi, true, data);
823 if (handle_ssa_defs)
824 any |= handle_ssa_defs (stmt, data);
827 break;
829 case GIMPLE_ASM:
831 if (analysis_stage)
832 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
833 asm_visit_addr);
834 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
836 tree *op = &TREE_VALUE (gimple_asm_input_op (stmt, i));
837 any |= scan_expr (op, &gsi, false, data);
839 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
841 tree *op = &TREE_VALUE (gimple_asm_output_op (stmt, i));
842 any |= scan_expr (op, &gsi, true, data);
845 default:
846 break;
849 if (any)
851 ret = true;
852 bb_changed = true;
854 if (!analysis_stage)
856 update_stmt (stmt);
857 if (!stmt_could_throw_p (stmt))
858 remove_stmt_from_eh_region (stmt);
861 if (deleted)
862 bb_changed = true;
863 else
865 gsi_next (&gsi);
866 ret = true;
869 if (!analysis_stage && bb_changed)
870 gimple_purge_dead_eh_edges (bb);
873 return ret;
876 /* Helper of QSORT function. There are pointers to accesses in the array. An
877 access is considered smaller than another if it has smaller offset or if the
878 offsets are the same but is size is bigger. */
880 static int
881 compare_access_positions (const void *a, const void *b)
883 const access_p *fp1 = (const access_p *) a;
884 const access_p *fp2 = (const access_p *) b;
885 const access_p f1 = *fp1;
886 const access_p f2 = *fp2;
888 if (f1->offset != f2->offset)
889 return f1->offset < f2->offset ? -1 : 1;
891 if (f1->size == f2->size)
893 /* Put any non-aggregate type before any aggregate type. */
894 if (!is_gimple_reg_type (f1->type)
895 && is_gimple_reg_type (f2->type))
896 return 1;
897 else if (is_gimple_reg_type (f1->type)
898 && !is_gimple_reg_type (f2->type))
899 return -1;
900 /* Put the integral type with the bigger precision first. */
901 else if (INTEGRAL_TYPE_P (f1->type)
902 && INTEGRAL_TYPE_P (f2->type))
903 return TYPE_PRECISION (f1->type) > TYPE_PRECISION (f2->type) ? -1 : 1;
904 /* Put any integral type with non-full precision last. */
905 else if (INTEGRAL_TYPE_P (f1->type)
906 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
907 != TYPE_PRECISION (f1->type)))
908 return 1;
909 else if (INTEGRAL_TYPE_P (f2->type)
910 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
911 != TYPE_PRECISION (f2->type)))
912 return -1;
913 /* Stabilize the sort. */
914 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
917 /* We want the bigger accesses first, thus the opposite operator in the next
918 line: */
919 return f1->size > f2->size ? -1 : 1;
923 /* Append a name of the declaration to the name obstack. A helper function for
924 make_fancy_name. */
926 static void
927 make_fancy_decl_name (tree decl)
929 char buffer[32];
931 tree name = DECL_NAME (decl);
932 if (name)
933 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
934 IDENTIFIER_LENGTH (name));
935 else
937 sprintf (buffer, "D%u", DECL_UID (decl));
938 obstack_grow (&name_obstack, buffer, strlen (buffer));
942 /* Helper for make_fancy_name. */
944 static void
945 make_fancy_name_1 (tree expr)
947 char buffer[32];
948 tree index;
950 if (DECL_P (expr))
952 make_fancy_decl_name (expr);
953 return;
956 switch (TREE_CODE (expr))
958 case COMPONENT_REF:
959 make_fancy_name_1 (TREE_OPERAND (expr, 0));
960 obstack_1grow (&name_obstack, '$');
961 make_fancy_decl_name (TREE_OPERAND (expr, 1));
962 break;
964 case ARRAY_REF:
965 make_fancy_name_1 (TREE_OPERAND (expr, 0));
966 obstack_1grow (&name_obstack, '$');
967 /* Arrays with only one element may not have a constant as their
968 index. */
969 index = TREE_OPERAND (expr, 1);
970 if (TREE_CODE (index) != INTEGER_CST)
971 break;
972 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
973 obstack_grow (&name_obstack, buffer, strlen (buffer));
975 break;
977 case BIT_FIELD_REF:
978 case REALPART_EXPR:
979 case IMAGPART_EXPR:
980 gcc_unreachable (); /* we treat these as scalars. */
981 break;
982 default:
983 break;
987 /* Create a human readable name for replacement variable of ACCESS. */
989 static char *
990 make_fancy_name (tree expr)
992 make_fancy_name_1 (expr);
993 obstack_1grow (&name_obstack, '\0');
994 return XOBFINISH (&name_obstack, char *);
997 /* Helper function for build_ref_for_offset. */
999 static bool
1000 build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset,
1001 tree exp_type)
1003 while (1)
1005 tree fld;
1006 tree tr_size, index;
1007 HOST_WIDE_INT el_size;
1009 if (offset == 0 && exp_type
1010 && useless_type_conversion_p (exp_type, type))
1011 return true;
1013 switch (TREE_CODE (type))
1015 case UNION_TYPE:
1016 case QUAL_UNION_TYPE:
1017 case RECORD_TYPE:
1018 /* Some ADA records are half-unions, treat all of them the same. */
1019 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
1021 HOST_WIDE_INT pos, size;
1022 tree expr, *expr_ptr;
1024 if (TREE_CODE (fld) != FIELD_DECL)
1025 continue;
1027 pos = int_bit_position (fld);
1028 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1029 size = tree_low_cst (DECL_SIZE (fld), 1);
1030 if (pos > offset || (pos + size) <= offset)
1031 continue;
1033 if (res)
1035 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1036 NULL_TREE);
1037 expr_ptr = &expr;
1039 else
1040 expr_ptr = NULL;
1041 if (build_ref_for_offset_1 (expr_ptr, TREE_TYPE (fld),
1042 offset - pos, exp_type))
1044 if (res)
1045 *res = expr;
1046 return true;
1049 return false;
1051 case ARRAY_TYPE:
1052 tr_size = TYPE_SIZE (TREE_TYPE (type));
1053 if (!tr_size || !host_integerp (tr_size, 1))
1054 return false;
1055 el_size = tree_low_cst (tr_size, 1);
1057 if (res)
1059 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1060 if (!integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1061 index = int_const_binop (PLUS_EXPR, index,
1062 TYPE_MIN_VALUE (TYPE_DOMAIN (type)),
1064 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1065 NULL_TREE, NULL_TREE);
1067 offset = offset % el_size;
1068 type = TREE_TYPE (type);
1069 break;
1071 default:
1072 if (offset != 0)
1073 return false;
1075 if (exp_type)
1076 return false;
1077 else
1078 return true;
1083 /* Construct an expression that would reference a part of aggregate *EXPR of
1084 type TYPE at the given OFFSET of the type EXP_TYPE. If EXPR is NULL, the
1085 function only determines whether it can build such a reference without
1086 actually doing it.
1088 FIXME: Eventually this should be replaced with
1089 maybe_fold_offset_to_reference() from tree-ssa-ccp.c but that requires a
1090 minor rewrite of fold_stmt.
1093 static bool
1094 build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
1095 tree exp_type, bool allow_ptr)
1097 if (allow_ptr && POINTER_TYPE_P (type))
1099 type = TREE_TYPE (type);
1100 if (expr)
1101 *expr = fold_build1 (INDIRECT_REF, type, *expr);
1104 return build_ref_for_offset_1 (expr, type, offset, exp_type);
1107 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1108 those with type which is suitable for scalarization. */
1110 static bool
1111 find_var_candidates (void)
1113 tree var, type;
1114 referenced_var_iterator rvi;
1115 bool ret = false;
1117 FOR_EACH_REFERENCED_VAR (var, rvi)
1119 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1120 continue;
1121 type = TREE_TYPE (var);
1123 if (!AGGREGATE_TYPE_P (type)
1124 || needs_to_live_in_memory (var)
1125 || TREE_THIS_VOLATILE (var)
1126 || !COMPLETE_TYPE_P (type)
1127 || !host_integerp (TYPE_SIZE (type), 1)
1128 || tree_low_cst (TYPE_SIZE (type), 1) == 0
1129 || type_internals_preclude_sra_p (type))
1130 continue;
1132 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1134 if (dump_file && (dump_flags & TDF_DETAILS))
1136 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1137 print_generic_expr (dump_file, var, 0);
1138 fprintf (dump_file, "\n");
1140 ret = true;
1143 return ret;
1146 /* Sort all accesses for the given variable, check for partial overlaps and
1147 return NULL if there are any. If there are none, pick a representative for
1148 each combination of offset and size and create a linked list out of them.
1149 Return the pointer to the first representative and make sure it is the first
1150 one in the vector of accesses. */
1152 static struct access *
1153 sort_and_splice_var_accesses (tree var)
1155 int i, j, access_count;
1156 struct access *res, **prev_acc_ptr = &res;
1157 VEC (access_p, heap) *access_vec;
1158 bool first = true;
1159 HOST_WIDE_INT low = -1, high = 0;
1161 access_vec = get_base_access_vector (var);
1162 if (!access_vec)
1163 return NULL;
1164 access_count = VEC_length (access_p, access_vec);
1166 /* Sort by <OFFSET, SIZE>. */
1167 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
1168 compare_access_positions);
1170 i = 0;
1171 while (i < access_count)
1173 struct access *access = VEC_index (access_p, access_vec, i);
1174 bool modification = access->write;
1175 bool grp_read = !access->write;
1176 bool grp_partial_lhs = access->grp_partial_lhs;
1177 bool first_scalar = is_gimple_reg_type (access->type);
1178 bool unscalarizable_region = access->grp_unscalarizable_region;
1180 if (first || access->offset >= high)
1182 first = false;
1183 low = access->offset;
1184 high = access->offset + access->size;
1186 else if (access->offset > low && access->offset + access->size > high)
1187 return NULL;
1188 else
1189 gcc_assert (access->offset >= low
1190 && access->offset + access->size <= high);
1192 j = i + 1;
1193 while (j < access_count)
1195 struct access *ac2 = VEC_index (access_p, access_vec, j);
1196 if (ac2->offset != access->offset || ac2->size != access->size)
1197 break;
1198 modification |= ac2->write;
1199 grp_read |= !ac2->write;
1200 grp_partial_lhs |= ac2->grp_partial_lhs;
1201 unscalarizable_region |= ac2->grp_unscalarizable_region;
1202 relink_to_new_repr (access, ac2);
1204 /* If there are both aggregate-type and scalar-type accesses with
1205 this combination of size and offset, the comparison function
1206 should have put the scalars first. */
1207 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1208 ac2->group_representative = access;
1209 j++;
1212 i = j;
1214 access->group_representative = access;
1215 access->grp_write = modification;
1216 access->grp_read = grp_read;
1217 access->grp_partial_lhs = grp_partial_lhs;
1218 access->grp_unscalarizable_region = unscalarizable_region;
1219 if (access->first_link)
1220 add_access_to_work_queue (access);
1222 *prev_acc_ptr = access;
1223 prev_acc_ptr = &access->next_grp;
1226 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1227 return res;
1230 /* Create a variable for the given ACCESS which determines the type, name and a
1231 few other properties. Return the variable declaration and store it also to
1232 ACCESS->replacement. */
1234 static tree
1235 create_access_replacement (struct access *access)
1237 tree repl;
1239 repl = create_tmp_var (access->type, "SR");
1240 get_var_ann (repl);
1241 add_referenced_var (repl);
1242 mark_sym_for_renaming (repl);
1244 if (!access->grp_partial_lhs
1245 && (TREE_CODE (access->type) == COMPLEX_TYPE
1246 || TREE_CODE (access->type) == VECTOR_TYPE))
1247 DECL_GIMPLE_REG_P (repl) = 1;
1249 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1250 DECL_ARTIFICIAL (repl) = 1;
1252 if (DECL_NAME (access->base)
1253 && !DECL_IGNORED_P (access->base)
1254 && !DECL_ARTIFICIAL (access->base))
1256 char *pretty_name = make_fancy_name (access->expr);
1258 DECL_NAME (repl) = get_identifier (pretty_name);
1259 obstack_free (&name_obstack, pretty_name);
1261 SET_DECL_DEBUG_EXPR (repl, access->expr);
1262 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1263 DECL_IGNORED_P (repl) = 0;
1266 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1267 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1269 if (dump_file)
1271 fprintf (dump_file, "Created a replacement for ");
1272 print_generic_expr (dump_file, access->base, 0);
1273 fprintf (dump_file, " offset: %u, size: %u: ",
1274 (unsigned) access->offset, (unsigned) access->size);
1275 print_generic_expr (dump_file, repl, 0);
1276 fprintf (dump_file, "\n");
1279 return repl;
1282 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1284 static inline tree
1285 get_access_replacement (struct access *access)
1287 gcc_assert (access->grp_to_be_replaced);
1289 if (access->replacement_decl)
1290 return access->replacement_decl;
1292 access->replacement_decl = create_access_replacement (access);
1293 return access->replacement_decl;
1296 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1297 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1298 to it is not "within" the root. */
1300 static void
1301 build_access_subtree (struct access **access)
1303 struct access *root = *access, *last_child = NULL;
1304 HOST_WIDE_INT limit = root->offset + root->size;
1306 *access = (*access)->next_grp;
1307 while (*access && (*access)->offset + (*access)->size <= limit)
1309 if (!last_child)
1310 root->first_child = *access;
1311 else
1312 last_child->next_sibling = *access;
1313 last_child = *access;
1315 build_access_subtree (access);
1319 /* Build a tree of access representatives, ACCESS is the pointer to the first
1320 one, others are linked in a list by the next_grp field. Decide about scalar
1321 replacements on the way, return true iff any are to be created. */
1323 static void
1324 build_access_trees (struct access *access)
1326 while (access)
1328 struct access *root = access;
1330 build_access_subtree (&access);
1331 root->next_grp = access;
1335 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1336 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set
1337 all sorts of access flags appropriately along the way, notably always ser
1338 grp_read when MARK_READ is true and grp_write when MARK_WRITE is true. */
1340 static bool
1341 analyze_access_subtree (struct access *root, bool allow_replacements,
1342 bool mark_read, bool mark_write)
1344 struct access *child;
1345 HOST_WIDE_INT limit = root->offset + root->size;
1346 HOST_WIDE_INT covered_to = root->offset;
1347 bool scalar = is_gimple_reg_type (root->type);
1348 bool hole = false, sth_created = false;
1350 if (mark_read)
1351 root->grp_read = true;
1352 else if (root->grp_read)
1353 mark_read = true;
1355 if (mark_write)
1356 root->grp_write = true;
1357 else if (root->grp_write)
1358 mark_write = true;
1360 if (root->grp_unscalarizable_region)
1361 allow_replacements = false;
1363 for (child = root->first_child; child; child = child->next_sibling)
1365 if (!hole && child->offset < covered_to)
1366 hole = true;
1367 else
1368 covered_to += child->size;
1370 sth_created |= analyze_access_subtree (child, allow_replacements,
1371 mark_read, mark_write);
1373 root->grp_unscalarized_data |= child->grp_unscalarized_data;
1374 hole |= !child->grp_covered;
1377 if (allow_replacements && scalar && !root->first_child)
1379 if (dump_file && (dump_flags & TDF_DETAILS))
1381 fprintf (dump_file, "Marking ");
1382 print_generic_expr (dump_file, root->base, 0);
1383 fprintf (dump_file, " offset: %u, size: %u: ",
1384 (unsigned) root->offset, (unsigned) root->size);
1385 fprintf (dump_file, " to be replaced.\n");
1388 root->grp_to_be_replaced = 1;
1389 sth_created = true;
1390 hole = false;
1392 else if (covered_to < limit)
1393 hole = true;
1395 if (sth_created && !hole)
1397 root->grp_covered = 1;
1398 return true;
1400 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1401 root->grp_unscalarized_data = 1; /* not covered and written to */
1402 if (sth_created)
1403 return true;
1404 return false;
1407 /* Analyze all access trees linked by next_grp by the means of
1408 analyze_access_subtree. */
1409 static bool
1410 analyze_access_trees (struct access *access)
1412 bool ret = false;
1414 while (access)
1416 if (analyze_access_subtree (access, true, false, false))
1417 ret = true;
1418 access = access->next_grp;
1421 return ret;
1424 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1425 SIZE would conflict with an already existing one. If exactly such a child
1426 already exists in LACC, store a pointer to it in EXACT_MATCH. */
1428 static bool
1429 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1430 HOST_WIDE_INT size, struct access **exact_match)
1432 struct access *child;
1434 for (child = lacc->first_child; child; child = child->next_sibling)
1436 if (child->offset == norm_offset && child->size == size)
1438 *exact_match = child;
1439 return true;
1442 if (child->offset < norm_offset + size
1443 && child->offset + child->size > norm_offset)
1444 return true;
1447 return false;
1450 /* Set the expr of TARGET to one just like MODEL but with is own base at the
1451 bottom of the handled components. */
1453 static void
1454 duplicate_expr_for_different_base (struct access *target,
1455 struct access *model)
1457 tree t, expr = unshare_expr (model->expr);
1459 gcc_assert (handled_component_p (expr));
1460 t = expr;
1461 while (handled_component_p (TREE_OPERAND (t, 0)))
1462 t = TREE_OPERAND (t, 0);
1463 gcc_assert (TREE_OPERAND (t, 0) == model->base);
1464 TREE_OPERAND (t, 0) = target->base;
1466 target->expr = expr;
1470 /* Create a new child access of PARENT, with all properties just like MODEL
1471 except for its offset and with its grp_write false and grp_read true.
1472 Return the new access. Note that this access is created long after all
1473 splicing and sorting, it's not located in any access vector and is
1474 automatically a representative of its group. */
1476 static struct access *
1477 create_artificial_child_access (struct access *parent, struct access *model,
1478 HOST_WIDE_INT new_offset)
1480 struct access *access;
1481 struct access **child;
1483 gcc_assert (!model->grp_unscalarizable_region);
1485 access = (struct access *) pool_alloc (access_pool);
1486 memset (access, 0, sizeof (struct access));
1487 access->base = parent->base;
1488 access->offset = new_offset;
1489 access->size = model->size;
1490 duplicate_expr_for_different_base (access, model);
1491 access->type = model->type;
1492 access->grp_write = true;
1493 access->grp_read = false;
1495 child = &parent->first_child;
1496 while (*child && (*child)->offset < new_offset)
1497 child = &(*child)->next_sibling;
1499 access->next_sibling = *child;
1500 *child = access;
1502 return access;
1506 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
1507 true if any new subaccess was created. Additionally, if RACC is a scalar
1508 access but LACC is not, change the type of the latter. */
1510 static bool
1511 propagate_subacesses_accross_link (struct access *lacc, struct access *racc)
1513 struct access *rchild;
1514 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
1516 bool ret = false;
1518 if (is_gimple_reg_type (lacc->type)
1519 || lacc->grp_unscalarizable_region
1520 || racc->grp_unscalarizable_region)
1521 return false;
1523 if (!lacc->first_child && !racc->first_child
1524 && is_gimple_reg_type (racc->type))
1526 duplicate_expr_for_different_base (lacc, racc);
1527 lacc->type = racc->type;
1528 return false;
1531 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
1533 struct access *new_acc = NULL;
1534 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
1536 if (rchild->grp_unscalarizable_region)
1537 continue;
1539 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
1540 &new_acc))
1542 if (new_acc && rchild->first_child)
1543 ret |= propagate_subacesses_accross_link (new_acc, rchild);
1544 continue;
1547 /* If a (part of) a union field is on the RHS of an assignment, it can
1548 have sub-accesses which do not make sense on the LHS (PR 40351).
1549 Check that this is not the case. */
1550 if (!build_ref_for_offset (NULL, TREE_TYPE (lacc->base), norm_offset,
1551 rchild->type, false))
1552 continue;
1554 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
1555 if (racc->first_child)
1556 propagate_subacesses_accross_link (new_acc, rchild);
1558 ret = true;
1561 return ret;
1564 /* Propagate all subaccesses across assignment links. */
1566 static void
1567 propagate_all_subaccesses (void)
1569 while (work_queue_head)
1571 struct access *racc = pop_access_from_work_queue ();
1572 struct assign_link *link;
1574 gcc_assert (racc->first_link);
1576 for (link = racc->first_link; link; link = link->next)
1578 struct access *lacc = link->lacc;
1580 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
1581 continue;
1582 lacc = lacc->group_representative;
1583 if (propagate_subacesses_accross_link (lacc, racc)
1584 && lacc->first_link)
1585 add_access_to_work_queue (lacc);
1590 /* Go through all accesses collected throughout the (intraprocedural) analysis
1591 stage, exclude overlapping ones, identify representatives and build trees
1592 out of them, making decisions about scalarization on the way. Return true
1593 iff there are any to-be-scalarized variables after this stage. */
1595 static bool
1596 analyze_all_variable_accesses (void)
1598 tree var;
1599 referenced_var_iterator rvi;
1600 bool res = false;
1602 FOR_EACH_REFERENCED_VAR (var, rvi)
1603 if (bitmap_bit_p (candidate_bitmap, DECL_UID (var)))
1605 struct access *access;
1607 access = sort_and_splice_var_accesses (var);
1608 if (access)
1609 build_access_trees (access);
1610 else
1611 disqualify_candidate (var,
1612 "No or inhibitingly overlapping accesses.");
1615 propagate_all_subaccesses ();
1617 FOR_EACH_REFERENCED_VAR (var, rvi)
1618 if (bitmap_bit_p (candidate_bitmap, DECL_UID (var)))
1620 struct access *access = get_first_repr_for_decl (var);
1622 if (analyze_access_trees (access))
1624 res = true;
1625 if (dump_file && (dump_flags & TDF_DETAILS))
1627 fprintf (dump_file, "\nAccess trees for ");
1628 print_generic_expr (dump_file, var, 0);
1629 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
1630 dump_access_tree (dump_file, access);
1631 fprintf (dump_file, "\n");
1634 else
1635 disqualify_candidate (var, "No scalar replacements to be created.");
1638 return res;
1641 /* Return true iff a reference statement into aggregate AGG can be built for
1642 every single to-be-replaced accesses that is a child of ACCESS, its sibling
1643 or a child of its sibling. TOP_OFFSET is the offset from the processed
1644 access subtree that has to be subtracted from offset of each access. */
1646 static bool
1647 ref_expr_for_all_replacements_p (struct access *access, tree agg,
1648 HOST_WIDE_INT top_offset)
1652 if (access->grp_to_be_replaced
1653 && !build_ref_for_offset (NULL, TREE_TYPE (agg),
1654 access->offset - top_offset,
1655 access->type, false))
1656 return false;
1658 if (access->first_child
1659 && !ref_expr_for_all_replacements_p (access->first_child, agg,
1660 top_offset))
1661 return false;
1663 access = access->next_sibling;
1665 while (access);
1667 return true;
1670 /* Generate statements copying scalar replacements of accesses within a subtree
1671 into or out of AGG. ACCESS is the first child of the root of the subtree to
1672 be processed. AGG is an aggregate type expression (can be a declaration but
1673 does not have to be, it can for example also be an indirect_ref).
1674 TOP_OFFSET is the offset of the processed subtree which has to be subtracted
1675 from offsets of individual accesses to get corresponding offsets for AGG.
1676 If CHUNK_SIZE is non-null, copy only replacements in the interval
1677 <start_offset, start_offset + chunk_size>, otherwise copy all. GSI is a
1678 statement iterator used to place the new statements. WRITE should be true
1679 when the statements should write from AGG to the replacement and false if
1680 vice versa. if INSERT_AFTER is true, new statements will be added after the
1681 current statement in GSI, they will be added before the statement
1682 otherwise. */
1684 static void
1685 generate_subtree_copies (struct access *access, tree agg,
1686 HOST_WIDE_INT top_offset,
1687 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
1688 gimple_stmt_iterator *gsi, bool write,
1689 bool insert_after)
1693 tree expr = unshare_expr (agg);
1695 if (chunk_size && access->offset >= start_offset + chunk_size)
1696 return;
1698 if (access->grp_to_be_replaced
1699 && (chunk_size == 0
1700 || access->offset + access->size > start_offset))
1702 tree repl = get_access_replacement (access);
1703 bool ref_found;
1704 gimple stmt;
1706 ref_found = build_ref_for_offset (&expr, TREE_TYPE (agg),
1707 access->offset - top_offset,
1708 access->type, false);
1709 gcc_assert (ref_found);
1711 if (write)
1713 if (access->grp_partial_lhs)
1714 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
1715 !insert_after,
1716 insert_after ? GSI_NEW_STMT
1717 : GSI_SAME_STMT);
1718 stmt = gimple_build_assign (repl, expr);
1720 else
1722 TREE_NO_WARNING (repl) = 1;
1723 if (access->grp_partial_lhs)
1724 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
1725 !insert_after,
1726 insert_after ? GSI_NEW_STMT
1727 : GSI_SAME_STMT);
1728 stmt = gimple_build_assign (expr, repl);
1731 if (insert_after)
1732 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1733 else
1734 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1735 update_stmt (stmt);
1738 if (access->first_child)
1739 generate_subtree_copies (access->first_child, agg, top_offset,
1740 start_offset, chunk_size, gsi,
1741 write, insert_after);
1743 access = access->next_sibling;
1745 while (access);
1748 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
1749 the root of the subtree to be processed. GSI is the statement iterator used
1750 for inserting statements which are added after the current statement if
1751 INSERT_AFTER is true or before it otherwise. */
1753 static void
1754 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
1755 bool insert_after)
1758 struct access *child;
1760 if (access->grp_to_be_replaced)
1762 gimple stmt;
1764 stmt = gimple_build_assign (get_access_replacement (access),
1765 fold_convert (access->type,
1766 integer_zero_node));
1767 if (insert_after)
1768 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1769 else
1770 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1771 update_stmt (stmt);
1774 for (child = access->first_child; child; child = child->next_sibling)
1775 init_subtree_with_zero (child, gsi, insert_after);
1778 /* Search for an access representative for the given expression EXPR and
1779 return it or NULL if it cannot be found. */
1781 static struct access *
1782 get_access_for_expr (tree expr)
1784 HOST_WIDE_INT offset, size, max_size;
1785 tree base;
1787 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
1788 a different size than the size of its argument and we need the latter
1789 one. */
1790 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1791 expr = TREE_OPERAND (expr, 0);
1793 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
1794 if (max_size == -1 || !DECL_P (base))
1795 return NULL;
1797 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
1798 return NULL;
1800 return get_var_base_offset_size_access (base, offset, max_size);
1803 /* Callback for scan_function. Replace the expression EXPR with a scalar
1804 replacement if there is one and generate other statements to do type
1805 conversion or subtree copying if necessary. GSI is used to place newly
1806 created statements, WRITE is true if the expression is being written to (it
1807 is on a LHS of a statement or output in an assembly statement). */
1809 static bool
1810 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write,
1811 void *data ATTRIBUTE_UNUSED)
1813 struct access *access;
1814 tree type, bfr;
1816 if (TREE_CODE (*expr) == BIT_FIELD_REF)
1818 bfr = *expr;
1819 expr = &TREE_OPERAND (*expr, 0);
1821 else
1822 bfr = NULL_TREE;
1824 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
1825 expr = &TREE_OPERAND (*expr, 0);
1826 access = get_access_for_expr (*expr);
1827 if (!access)
1828 return false;
1829 type = TREE_TYPE (*expr);
1831 if (access->grp_to_be_replaced)
1833 tree repl = get_access_replacement (access);
1834 /* If we replace a non-register typed access simply use the original
1835 access expression to extract the scalar component afterwards.
1836 This happens if scalarizing a function return value or parameter
1837 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
1838 gcc.c-torture/compile/20011217-1.c. */
1839 if (!is_gimple_reg_type (type))
1841 gimple stmt;
1842 if (write)
1844 tree ref = unshare_expr (access->expr);
1845 if (access->grp_partial_lhs)
1846 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
1847 false, GSI_NEW_STMT);
1848 stmt = gimple_build_assign (repl, ref);
1849 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1851 else
1853 if (access->grp_partial_lhs)
1854 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
1855 true, GSI_SAME_STMT);
1856 stmt = gimple_build_assign (unshare_expr (access->expr), repl);
1857 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1860 else
1862 gcc_assert (useless_type_conversion_p (type, access->type));
1863 *expr = repl;
1867 if (access->first_child)
1869 HOST_WIDE_INT start_offset, chunk_size;
1870 if (bfr
1871 && host_integerp (TREE_OPERAND (bfr, 1), 1)
1872 && host_integerp (TREE_OPERAND (bfr, 2), 1))
1874 start_offset = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
1875 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 2), 1);
1877 else
1878 start_offset = chunk_size = 0;
1880 generate_subtree_copies (access->first_child, access->base, 0,
1881 start_offset, chunk_size, gsi, write, write);
1883 return true;
1886 /* Store all replacements in the access tree rooted in TOP_RACC either to their
1887 base aggregate if there are unscalarized data or directly to LHS
1888 otherwise. */
1890 static void
1891 handle_unscalarized_data_in_subtree (struct access *top_racc, tree lhs,
1892 gimple_stmt_iterator *gsi)
1894 if (top_racc->grp_unscalarized_data)
1895 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
1896 gsi, false, false);
1897 else
1898 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
1899 0, 0, gsi, false, false);
1903 /* Try to generate statements to load all sub-replacements in an access
1904 (sub)tree (LACC is the first child) from scalar replacements in the TOP_RACC
1905 (sub)tree. If that is not possible, refresh the TOP_RACC base aggregate and
1906 load the accesses from it. LEFT_OFFSET is the offset of the left whole
1907 subtree being copied, RIGHT_OFFSET is the same thing for the right subtree.
1908 GSI is stmt iterator used for statement insertions. *REFRESHED is true iff
1909 the rhs top aggregate has already been refreshed by contents of its scalar
1910 reductions and is set to true if this function has to do it. */
1912 static void
1913 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
1914 HOST_WIDE_INT left_offset,
1915 HOST_WIDE_INT right_offset,
1916 gimple_stmt_iterator *old_gsi,
1917 gimple_stmt_iterator *new_gsi,
1918 bool *refreshed, tree lhs)
1922 if (lacc->grp_to_be_replaced)
1924 struct access *racc;
1925 HOST_WIDE_INT offset = lacc->offset - left_offset + right_offset;
1926 gimple stmt;
1927 tree rhs;
1929 racc = find_access_in_subtree (top_racc, offset, lacc->size);
1930 if (racc && racc->grp_to_be_replaced)
1932 rhs = get_access_replacement (racc);
1933 if (!useless_type_conversion_p (lacc->type, racc->type))
1934 rhs = fold_build1 (VIEW_CONVERT_EXPR, lacc->type, rhs);
1936 else
1938 bool repl_found;
1940 /* No suitable access on the right hand side, need to load from
1941 the aggregate. See if we have to update it first... */
1942 if (!*refreshed)
1944 gcc_assert (top_racc->first_child);
1945 handle_unscalarized_data_in_subtree (top_racc, lhs, old_gsi);
1946 *refreshed = true;
1949 rhs = unshare_expr (top_racc->base);
1950 repl_found = build_ref_for_offset (&rhs,
1951 TREE_TYPE (top_racc->base),
1952 offset, lacc->type, false);
1953 gcc_assert (repl_found);
1956 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
1957 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
1958 update_stmt (stmt);
1960 else if (lacc->grp_read && !lacc->grp_covered && !*refreshed)
1962 handle_unscalarized_data_in_subtree (top_racc, lhs, old_gsi);
1963 *refreshed = true;
1966 if (lacc->first_child)
1967 load_assign_lhs_subreplacements (lacc->first_child, top_racc,
1968 left_offset, right_offset,
1969 old_gsi, new_gsi, refreshed, lhs);
1970 lacc = lacc->next_sibling;
1972 while (lacc);
1975 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
1976 to the assignment and GSI is the statement iterator pointing at it. Returns
1977 the same values as sra_modify_assign. */
1979 static enum scan_assign_result
1980 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
1982 tree lhs = gimple_assign_lhs (*stmt);
1983 struct access *acc;
1985 acc = get_access_for_expr (lhs);
1986 if (!acc)
1987 return SRA_SA_NONE;
1989 if (VEC_length (constructor_elt,
1990 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
1992 /* I have never seen this code path trigger but if it can happen the
1993 following should handle it gracefully. */
1994 if (access_has_children_p (acc))
1995 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
1996 true, true);
1997 return SRA_SA_PROCESSED;
2000 if (acc->grp_covered)
2002 init_subtree_with_zero (acc, gsi, false);
2003 unlink_stmt_vdef (*stmt);
2004 gsi_remove (gsi, true);
2005 return SRA_SA_REMOVED;
2007 else
2009 init_subtree_with_zero (acc, gsi, true);
2010 return SRA_SA_PROCESSED;
2015 /* Callback of scan_function to process assign statements. It examines both
2016 sides of the statement, replaces them with a scalare replacement if there is
2017 one and generating copying of replacements if scalarized aggregates have been
2018 used in the assignment. STMT is a pointer to the assign statement, GSI is
2019 used to hold generated statements for type conversions and subtree
2020 copying. */
2022 static enum scan_assign_result
2023 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
2024 void *data ATTRIBUTE_UNUSED)
2026 struct access *lacc, *racc;
2027 tree lhs, rhs;
2028 bool modify_this_stmt = false;
2029 bool force_gimple_rhs = false;
2031 if (!gimple_assign_single_p (*stmt))
2032 return SRA_SA_NONE;
2033 lhs = gimple_assign_lhs (*stmt);
2034 rhs = gimple_assign_rhs1 (*stmt);
2036 if (TREE_CODE (rhs) == CONSTRUCTOR)
2037 return sra_modify_constructor_assign (stmt, gsi);
2039 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2040 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2041 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2043 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2044 gsi, false, data);
2045 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2046 gsi, true, data);
2047 return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2050 lacc = get_access_for_expr (lhs);
2051 racc = get_access_for_expr (rhs);
2052 if (!lacc && !racc)
2053 return SRA_SA_NONE;
2055 if (lacc && lacc->grp_to_be_replaced)
2057 lhs = get_access_replacement (lacc);
2058 gimple_assign_set_lhs (*stmt, lhs);
2059 modify_this_stmt = true;
2060 if (lacc->grp_partial_lhs)
2061 force_gimple_rhs = true;
2064 if (racc && racc->grp_to_be_replaced)
2066 rhs = get_access_replacement (racc);
2067 modify_this_stmt = true;
2068 if (racc->grp_partial_lhs)
2069 force_gimple_rhs = true;
2072 if (modify_this_stmt)
2074 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2076 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2077 ??? This should move to fold_stmt which we simply should
2078 call after building a VIEW_CONVERT_EXPR here. */
2079 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2080 && !access_has_children_p (lacc))
2082 tree expr = unshare_expr (lhs);
2083 if (build_ref_for_offset (&expr, TREE_TYPE (lhs), racc->offset,
2084 TREE_TYPE (rhs), false))
2086 lhs = expr;
2087 gimple_assign_set_lhs (*stmt, expr);
2090 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2091 && !access_has_children_p (racc))
2093 tree expr = unshare_expr (rhs);
2094 if (build_ref_for_offset (&expr, TREE_TYPE (rhs), lacc->offset,
2095 TREE_TYPE (lhs), false))
2096 rhs = expr;
2098 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2100 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2101 if (!is_gimple_reg (lhs))
2102 force_gimple_rhs = true;
2106 if (force_gimple_rhs)
2107 rhs = force_gimple_operand_gsi (gsi, rhs, true, NULL_TREE,
2108 true, GSI_SAME_STMT);
2109 if (gimple_assign_rhs1 (*stmt) != rhs)
2111 gimple_assign_set_rhs_from_tree (gsi, rhs);
2112 gcc_assert (*stmt == gsi_stmt (*gsi));
2116 /* From this point on, the function deals with assignments in between
2117 aggregates when at least one has scalar reductions of some of its
2118 components. There are three possible scenarios: Both the LHS and RHS have
2119 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2121 In the first case, we would like to load the LHS components from RHS
2122 components whenever possible. If that is not possible, we would like to
2123 read it directly from the RHS (after updating it by storing in it its own
2124 components). If there are some necessary unscalarized data in the LHS,
2125 those will be loaded by the original assignment too. If neither of these
2126 cases happen, the original statement can be removed. Most of this is done
2127 by load_assign_lhs_subreplacements.
2129 In the second case, we would like to store all RHS scalarized components
2130 directly into LHS and if they cover the aggregate completely, remove the
2131 statement too. In the third case, we want the LHS components to be loaded
2132 directly from the RHS (DSE will remove the original statement if it
2133 becomes redundant).
2135 This is a bit complex but manageable when types match and when unions do
2136 not cause confusion in a way that we cannot really load a component of LHS
2137 from the RHS or vice versa (the access representing this level can have
2138 subaccesses that are accessible only through a different union field at a
2139 higher level - different from the one used in the examined expression).
2140 Unions are fun.
2142 Therefore, I specially handle a fourth case, happening when there is a
2143 specific type cast or it is impossible to locate a scalarized subaccess on
2144 the other side of the expression. If that happens, I simply "refresh" the
2145 RHS by storing in it is scalarized components leave the original statement
2146 there to do the copying and then load the scalar replacements of the LHS.
2147 This is what the first branch does. */
2149 if (contains_view_convert_expr_p (rhs) || contains_view_convert_expr_p (lhs)
2150 || (access_has_children_p (racc)
2151 && !ref_expr_for_all_replacements_p (racc, lhs, racc->offset))
2152 || (access_has_children_p (lacc)
2153 && !ref_expr_for_all_replacements_p (lacc, rhs, lacc->offset)))
2155 if (access_has_children_p (racc))
2156 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2157 gsi, false, false);
2158 if (access_has_children_p (lacc))
2159 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2160 gsi, true, true);
2162 else
2164 if (access_has_children_p (lacc) && access_has_children_p (racc))
2166 gimple_stmt_iterator orig_gsi = *gsi;
2167 bool refreshed;
2169 if (lacc->grp_read && !lacc->grp_covered)
2171 handle_unscalarized_data_in_subtree (racc, lhs, gsi);
2172 refreshed = true;
2174 else
2175 refreshed = false;
2177 load_assign_lhs_subreplacements (lacc->first_child, racc,
2178 lacc->offset, racc->offset,
2179 &orig_gsi, gsi, &refreshed, lhs);
2180 if (!refreshed || !racc->grp_unscalarized_data)
2182 if (*stmt == gsi_stmt (*gsi))
2183 gsi_next (gsi);
2185 unlink_stmt_vdef (*stmt);
2186 gsi_remove (&orig_gsi, true);
2187 return SRA_SA_REMOVED;
2190 else
2192 if (access_has_children_p (racc))
2194 if (!racc->grp_unscalarized_data)
2196 generate_subtree_copies (racc->first_child, lhs,
2197 racc->offset, 0, 0, gsi,
2198 false, false);
2199 gcc_assert (*stmt == gsi_stmt (*gsi));
2200 unlink_stmt_vdef (*stmt);
2201 gsi_remove (gsi, true);
2202 return SRA_SA_REMOVED;
2204 else
2205 generate_subtree_copies (racc->first_child, lhs,
2206 racc->offset, 0, 0, gsi, false, true);
2208 else if (access_has_children_p (lacc))
2209 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2210 0, 0, gsi, true, true);
2213 return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2216 /* Generate statements initializing scalar replacements of parts of function
2217 parameters. */
2219 static void
2220 initialize_parameter_reductions (void)
2222 gimple_stmt_iterator gsi;
2223 gimple_seq seq = NULL;
2224 tree parm;
2226 for (parm = DECL_ARGUMENTS (current_function_decl);
2227 parm;
2228 parm = TREE_CHAIN (parm))
2230 VEC (access_p, heap) *access_vec;
2231 struct access *access;
2233 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2234 continue;
2235 access_vec = get_base_access_vector (parm);
2236 if (!access_vec)
2237 continue;
2239 if (!seq)
2241 seq = gimple_seq_alloc ();
2242 gsi = gsi_start (seq);
2245 for (access = VEC_index (access_p, access_vec, 0);
2246 access;
2247 access = access->next_grp)
2248 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true);
2251 if (seq)
2252 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
2255 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
2256 it reveals there are components of some aggregates to be scalarized, it runs
2257 the required transformations. */
2258 static unsigned int
2259 perform_intra_sra (void)
2261 int ret = 0;
2262 sra_initialize ();
2264 if (!find_var_candidates ())
2265 goto out;
2267 if (!scan_function (build_access_from_expr, build_accesses_from_assign, NULL,
2268 true, NULL))
2269 goto out;
2271 if (!analyze_all_variable_accesses ())
2272 goto out;
2274 scan_function (sra_modify_expr, sra_modify_assign, NULL,
2275 false, NULL);
2276 initialize_parameter_reductions ();
2277 ret = TODO_update_ssa;
2279 out:
2280 sra_deinitialize ();
2281 return ret;
2284 /* Perform early intraprocedural SRA. */
2285 static unsigned int
2286 early_intra_sra (void)
2288 sra_mode = SRA_MODE_EARLY_INTRA;
2289 return perform_intra_sra ();
2292 /* Perform "late" intraprocedural SRA. */
2293 static unsigned int
2294 late_intra_sra (void)
2296 sra_mode = SRA_MODE_INTRA;
2297 return perform_intra_sra ();
2301 static bool
2302 gate_intra_sra (void)
2304 return flag_tree_sra != 0;
2308 struct gimple_opt_pass pass_sra_early =
2311 GIMPLE_PASS,
2312 "esra", /* name */
2313 gate_intra_sra, /* gate */
2314 early_intra_sra, /* execute */
2315 NULL, /* sub */
2316 NULL, /* next */
2317 0, /* static_pass_number */
2318 TV_TREE_SRA, /* tv_id */
2319 PROP_cfg | PROP_ssa, /* properties_required */
2320 0, /* properties_provided */
2321 0, /* properties_destroyed */
2322 0, /* todo_flags_start */
2323 TODO_dump_func
2324 | TODO_update_ssa
2325 | TODO_ggc_collect
2326 | TODO_verify_ssa /* todo_flags_finish */
2331 struct gimple_opt_pass pass_sra =
2334 GIMPLE_PASS,
2335 "sra", /* name */
2336 gate_intra_sra, /* gate */
2337 late_intra_sra, /* execute */
2338 NULL, /* sub */
2339 NULL, /* next */
2340 0, /* static_pass_number */
2341 TV_TREE_SRA, /* tv_id */
2342 PROP_cfg | PROP_ssa, /* properties_required */
2343 0, /* properties_provided */
2344 0, /* properties_destroyed */
2345 TODO_update_address_taken, /* todo_flags_start */
2346 TODO_dump_func
2347 | TODO_update_ssa
2348 | TODO_ggc_collect
2349 | TODO_verify_ssa /* todo_flags_finish */